Compare commits
11 Commits
developmen
...
user/jbert
Author | SHA1 | Date |
---|---|---|
Guillaume Hivert | a14037f518 | |
Guillaume Hivert | 9d7f041508 | |
Jordi Bertran de Balanda | 61da23c8d2 | |
Jordi Bertran de Balanda | 65c94012c6 | |
Jordi Bertran de Balanda | cb87471cc8 | |
Jordi Bertran de Balanda | 57e4ecc0ee | |
Jordi Bertran de Balanda | 7a390e684d | |
Jordi Bertran de Balanda | bbcd33a14d | |
Jordi Bertran de Balanda | 88311ffbd9 | |
Jordi Bertran de Balanda | be1e0fc56f | |
Jordi Bertran de Balanda | 9a1a56e7b9 |
|
@ -0,0 +1,7 @@
|
||||||
|
{
|
||||||
|
"extends": ["scality"],
|
||||||
|
"plugins": ["jest"],
|
||||||
|
"env": {
|
||||||
|
"jest/globals": true
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
module.exports = {
|
||||||
|
presets: [
|
||||||
|
['@babel/preset-env', { targets: { node: 'current' } }],
|
||||||
|
'@babel/preset-typescript',
|
||||||
|
],
|
||||||
|
plugins: ['add-module-exports'],
|
||||||
|
};
|
||||||
|
|
4
index.js
4
index.js
|
@ -76,10 +76,10 @@ module.exports = {
|
||||||
tagging: require('./lib/s3middleware/tagging'),
|
tagging: require('./lib/s3middleware/tagging'),
|
||||||
checkDateModifiedHeaders:
|
checkDateModifiedHeaders:
|
||||||
require('./lib/s3middleware/validateConditionalHeaders')
|
require('./lib/s3middleware/validateConditionalHeaders')
|
||||||
.checkDateModifiedHeaders,
|
.checkDateModifiedHeaders,
|
||||||
validateConditionalHeaders:
|
validateConditionalHeaders:
|
||||||
require('./lib/s3middleware/validateConditionalHeaders')
|
require('./lib/s3middleware/validateConditionalHeaders')
|
||||||
.validateConditionalHeaders,
|
.validateConditionalHeaders,
|
||||||
MD5Sum: require('./lib/s3middleware/MD5Sum'),
|
MD5Sum: require('./lib/s3middleware/MD5Sum'),
|
||||||
NullStream: require('./lib/s3middleware/nullStream'),
|
NullStream: require('./lib/s3middleware/nullStream'),
|
||||||
objectUtils: require('./lib/s3middleware/objectUtils'),
|
objectUtils: require('./lib/s3middleware/objectUtils'),
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
||||||
FILTER_END, FILTER_ACCEPT } = require('./tools');
|
FILTER_END, FILTER_ACCEPT } = require('./tools');
|
||||||
const DEFAULT_MAX_KEYS = 1000;
|
const DEFAULT_MAX_KEYS = 1000;
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
const Extension = require('./Extension').default;
|
const Extension = require('./Extension').default;
|
||||||
const { inc, listingParamsMasterKeysV0ToV1,
|
const { inc, listingParamsMasterKeysV0ToV1,
|
||||||
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
|
@ -66,7 +66,7 @@ class Delimiter extends Extension {
|
||||||
this.continuationToken = parameters.continuationToken;
|
this.continuationToken = parameters.continuationToken;
|
||||||
this.alphabeticalOrder =
|
this.alphabeticalOrder =
|
||||||
typeof parameters.alphabeticalOrder !== 'undefined' ?
|
typeof parameters.alphabeticalOrder !== 'undefined' ?
|
||||||
parameters.alphabeticalOrder : true;
|
parameters.alphabeticalOrder : true;
|
||||||
|
|
||||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||||
// results
|
// results
|
||||||
|
@ -87,7 +87,7 @@ class Delimiter extends Extension {
|
||||||
this[this.nextContinueMarker].startsWith(this.prefix || '')) {
|
this[this.nextContinueMarker].startsWith(this.prefix || '')) {
|
||||||
const nextDelimiterIndex =
|
const nextDelimiterIndex =
|
||||||
this[this.nextContinueMarker].indexOf(this.delimiter,
|
this[this.nextContinueMarker].indexOf(this.delimiter,
|
||||||
this.prefix ? this.prefix.length : 0);
|
this.prefix ? this.prefix.length : 0);
|
||||||
this[this.nextContinueMarker] =
|
this[this.nextContinueMarker] =
|
||||||
this[this.nextContinueMarker].slice(0, nextDelimiterIndex +
|
this[this.nextContinueMarker].slice(0, nextDelimiterIndex +
|
||||||
this.delimiter.length);
|
this.delimiter.length);
|
||||||
|
|
|
@ -194,7 +194,7 @@ class DelimiterVersions extends Delimiter {
|
||||||
// length is the same so we can remove their prefix without
|
// length is the same so we can remove their prefix without
|
||||||
// looking at the type of key
|
// looking at the type of key
|
||||||
return this.filterCommon(obj.key.slice(DbPrefixes.Master.length),
|
return this.filterCommon(obj.key.slice(DbPrefixes.Master.length),
|
||||||
obj.value);
|
obj.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
filterCommon(key, value) {
|
filterCommon(key, value) {
|
||||||
|
@ -250,7 +250,7 @@ class DelimiterVersions extends Delimiter {
|
||||||
}
|
}
|
||||||
// skip to the same object key in both M and V range listings
|
// skip to the same object key in both M and V range listings
|
||||||
return [DbPrefixes.Master + skipV0,
|
return [DbPrefixes.Master + skipV0,
|
||||||
DbPrefixes.Version + skipV0];
|
DbPrefixes.Version + skipV0];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
const ArrayUtils = require('./ArrayUtils');
|
const ArrayUtils = require('./ArrayUtils');
|
||||||
|
|
||||||
class SortedSet {
|
class SortedSet {
|
||||||
|
|
||||||
constructor(obj) {
|
constructor(obj) {
|
||||||
if (obj) {
|
if (obj) {
|
||||||
this.keys = obj.keys;
|
this.keys = obj.keys;
|
||||||
|
|
|
@ -91,7 +91,7 @@ class Vault {
|
||||||
requestContext: serializedRCsArr,
|
requestContext: serializedRCsArr,
|
||||||
},
|
},
|
||||||
(err, userInfo) => vaultSignatureCb(err, userInfo,
|
(err, userInfo) => vaultSignatureCb(err, userInfo,
|
||||||
params.log, callback)
|
params.log, callback),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -146,7 +146,7 @@ class Vault {
|
||||||
requestContext: serializedRCs,
|
requestContext: serializedRCs,
|
||||||
},
|
},
|
||||||
(err, userInfo) => vaultSignatureCb(err, userInfo,
|
(err, userInfo) => vaultSignatureCb(err, userInfo,
|
||||||
params.log, callback, streamingV4Params)
|
params.log, callback, streamingV4Params),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -232,28 +232,28 @@ class Vault {
|
||||||
*/
|
*/
|
||||||
getAccountIds(canonicalIDs, log, callback) {
|
getAccountIds(canonicalIDs, log, callback) {
|
||||||
log.trace('getting accountIds from Vault based on canonicalIDs',
|
log.trace('getting accountIds from Vault based on canonicalIDs',
|
||||||
{ canonicalIDs });
|
{ canonicalIDs });
|
||||||
this.client.getAccountIds(canonicalIDs,
|
this.client.getAccountIds(canonicalIDs,
|
||||||
{ reqUid: log.getSerializedUids() },
|
{ reqUid: log.getSerializedUids() },
|
||||||
(err, info) => {
|
(err, info) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('received error message from vault',
|
log.debug('received error message from vault',
|
||||||
{ errorMessage: err });
|
{ errorMessage: err });
|
||||||
return callback(err);
|
return callback(err);
|
||||||
}
|
|
||||||
const infoFromVault = info.message.body;
|
|
||||||
log.trace('info received from vault', { infoFromVault });
|
|
||||||
const result = {};
|
|
||||||
/* If the accountId was not found in Vault, do not
|
|
||||||
send the canonicalID back to the API */
|
|
||||||
Object.keys(infoFromVault).forEach(key => {
|
|
||||||
if (infoFromVault[key] !== 'NotFound' &&
|
|
||||||
infoFromVault[key] !== 'WrongFormat') {
|
|
||||||
result[key] = infoFromVault[key];
|
|
||||||
}
|
}
|
||||||
|
const infoFromVault = info.message.body;
|
||||||
|
log.trace('info received from vault', { infoFromVault });
|
||||||
|
const result = {};
|
||||||
|
/* If the accountId was not found in Vault, do not
|
||||||
|
send the canonicalID back to the API */
|
||||||
|
Object.keys(infoFromVault).forEach(key => {
|
||||||
|
if (infoFromVault[key] !== 'NotFound' &&
|
||||||
|
infoFromVault[key] !== 'WrongFormat') {
|
||||||
|
result[key] = infoFromVault[key];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return callback(null, result);
|
||||||
});
|
});
|
||||||
return callback(null, result);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** checkPolicies -- call Vault to evaluate policies
|
/** checkPolicies -- call Vault to evaluate policies
|
||||||
|
|
|
@ -74,7 +74,7 @@ function extractParams(request, log, awsService, data) {
|
||||||
version = 'v4';
|
version = 'v4';
|
||||||
} else {
|
} else {
|
||||||
log.trace('invalid authorization security header',
|
log.trace('invalid authorization security header',
|
||||||
{ header: authHeader });
|
{ header: authHeader });
|
||||||
return { err: errors.AccessDenied };
|
return { err: errors.AccessDenied };
|
||||||
}
|
}
|
||||||
} else if (data.Signature) {
|
} else if (data.Signature) {
|
||||||
|
@ -89,7 +89,7 @@ function extractParams(request, log, awsService, data) {
|
||||||
if (version !== null && method !== null) {
|
if (version !== null && method !== null) {
|
||||||
if (!checkFunctions[version] || !checkFunctions[version][method]) {
|
if (!checkFunctions[version] || !checkFunctions[version][method]) {
|
||||||
log.trace('invalid auth version or method',
|
log.trace('invalid auth version or method',
|
||||||
{ version, authMethod: method });
|
{ version, authMethod: method });
|
||||||
return { err: errors.NotImplemented };
|
return { err: errors.NotImplemented };
|
||||||
}
|
}
|
||||||
log.trace('identified auth method', { version, authMethod: method });
|
log.trace('identified auth method', { version, authMethod: method });
|
||||||
|
@ -161,7 +161,7 @@ function doAuth(request, log, cb, awsService, requestContexts) {
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
function generateV4Headers(request, data, accessKey, secretKeyValue,
|
function generateV4Headers(request, data, accessKey, secretKeyValue,
|
||||||
awsService, proxyPath, sessionToken) {
|
awsService, proxyPath, sessionToken) {
|
||||||
Object.assign(request, { headers: {} });
|
Object.assign(request, { headers: {} });
|
||||||
const amzDate = convertUTCtoISO8601(Date.now());
|
const amzDate = convertUTCtoISO8601(Date.now());
|
||||||
// get date without time
|
// get date without time
|
||||||
|
@ -194,16 +194,16 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
|
||||||
.filter(headerName =>
|
.filter(headerName =>
|
||||||
headerName.startsWith('x-amz-')
|
headerName.startsWith('x-amz-')
|
||||||
|| headerName.startsWith('x-scal-')
|
|| headerName.startsWith('x-scal-')
|
||||||
|| headerName === 'host'
|
|| headerName === 'host',
|
||||||
).sort().join(';');
|
).sort().join(';');
|
||||||
const params = { request, signedHeaders, payloadChecksum,
|
const params = { request, signedHeaders, payloadChecksum,
|
||||||
credentialScope, timestamp, query: data,
|
credentialScope, timestamp, query: data,
|
||||||
awsService: service, proxyPath };
|
awsService: service, proxyPath };
|
||||||
const stringToSign = constructStringToSignV4(params);
|
const stringToSign = constructStringToSignV4(params);
|
||||||
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
|
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
|
||||||
region,
|
region,
|
||||||
scopeDate,
|
scopeDate,
|
||||||
service);
|
service);
|
||||||
const signature = crypto.createHmac('sha256', signingKey)
|
const signature = crypto.createHmac('sha256', signingKey)
|
||||||
.update(stringToSign, 'binary').digest('hex');
|
.update(stringToSign, 'binary').digest('hex');
|
||||||
const authorizationHeader = `${algorithm} Credential=${accessKey}` +
|
const authorizationHeader = `${algorithm} Credential=${accessKey}` +
|
||||||
|
|
|
@ -29,7 +29,7 @@ class ChainBackend extends BaseBackend {
|
||||||
typeof client.getCanonicalIds === 'function' &&
|
typeof client.getCanonicalIds === 'function' &&
|
||||||
typeof client.getEmailAddresses === 'function' &&
|
typeof client.getEmailAddresses === 'function' &&
|
||||||
typeof client.checkPolicies === 'function' &&
|
typeof client.checkPolicies === 'function' &&
|
||||||
typeof client.healthcheck === 'function'
|
typeof client.healthcheck === 'function',
|
||||||
), 'invalid client: missing required auth backend methods');
|
), 'invalid client: missing required auth backend methods');
|
||||||
this._clients = clients;
|
this._clients = clients;
|
||||||
}
|
}
|
||||||
|
@ -55,7 +55,7 @@ class ChainBackend extends BaseBackend {
|
||||||
signatureFromRequest,
|
signatureFromRequest,
|
||||||
accessKey,
|
accessKey,
|
||||||
options,
|
options,
|
||||||
done
|
done,
|
||||||
), callback);
|
), callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ class ChainBackend extends BaseBackend {
|
||||||
region,
|
region,
|
||||||
scopeDate,
|
scopeDate,
|
||||||
options,
|
options,
|
||||||
done
|
done,
|
||||||
), callback);
|
), callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -153,7 +153,7 @@ class ChainBackend extends BaseBackend {
|
||||||
requestContextParams,
|
requestContextParams,
|
||||||
userArn,
|
userArn,
|
||||||
options,
|
options,
|
||||||
done
|
done,
|
||||||
), (err, res) => {
|
), (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return callback(err);
|
return callback(err);
|
||||||
|
@ -171,8 +171,8 @@ class ChainBackend extends BaseBackend {
|
||||||
client.healthcheck(reqUid, (err, res) => done(null, {
|
client.healthcheck(reqUid, (err, res) => done(null, {
|
||||||
error: !!err ? err : null,
|
error: !!err ? err : null,
|
||||||
status: res,
|
status: res,
|
||||||
})
|
}),
|
||||||
), (err, res) => {
|
), (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return callback(err);
|
return callback(err);
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,20 +26,20 @@ class AuthLoader {
|
||||||
.required();
|
.required();
|
||||||
|
|
||||||
const accountsJoi = joi.array()
|
const accountsJoi = joi.array()
|
||||||
.items({
|
.items({
|
||||||
name: joi.string().required(),
|
name: joi.string().required(),
|
||||||
email: joi.string().email().required(),
|
email: joi.string().email().required(),
|
||||||
arn: joi.string().required(),
|
arn: joi.string().required(),
|
||||||
canonicalID: joi.string().required(),
|
canonicalID: joi.string().required(),
|
||||||
shortid: joi.string().regex(/^[0-9]{12}$/).required(),
|
shortid: joi.string().regex(/^[0-9]{12}$/).required(),
|
||||||
keys: this._joiKeysValidator,
|
keys: this._joiKeysValidator,
|
||||||
// backward-compat
|
// backward-compat
|
||||||
users: joi.array(),
|
users: joi.array(),
|
||||||
})
|
})
|
||||||
.required()
|
.required()
|
||||||
.unique('arn')
|
.unique('arn')
|
||||||
.unique('email')
|
.unique('email')
|
||||||
.unique('canonicalID');
|
.unique('canonicalID');
|
||||||
this._joiValidator = joi.object({ accounts: accountsJoi });
|
this._joiValidator = joi.object({ accounts: accountsJoi });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,7 +136,7 @@ class AuthLoader {
|
||||||
|
|
||||||
_validateData(authData, filePath) {
|
_validateData(authData, filePath) {
|
||||||
const res = joi.validate(authData, this._joiValidator,
|
const res = joi.validate(authData, this._joiValidator,
|
||||||
{ abortEarly: false });
|
{ abortEarly: false });
|
||||||
if (res.error) {
|
if (res.error) {
|
||||||
this._dumpJoiErrors(res.error.details, filePath);
|
this._dumpJoiErrors(res.error.details, filePath);
|
||||||
return false;
|
return false;
|
||||||
|
@ -156,7 +156,7 @@ class AuthLoader {
|
||||||
'master/conf/authdata.json). Also note that support ' +
|
'master/conf/authdata.json). Also note that support ' +
|
||||||
'for account users has been dropped.',
|
'for account users has been dropped.',
|
||||||
{ accountName: account.name, accountArn: account.arn,
|
{ accountName: account.name, accountArn: account.arn,
|
||||||
filePath });
|
filePath });
|
||||||
arnError = true;
|
arnError = true;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -167,7 +167,7 @@ class AuthLoader {
|
||||||
'https://github.com/scality/S3/blob/master/conf/' +
|
'https://github.com/scality/S3/blob/master/conf/' +
|
||||||
'authdata.json)',
|
'authdata.json)',
|
||||||
{ accountName: account.name, accountArn: account.arn,
|
{ accountName: account.name, accountArn: account.arn,
|
||||||
filePath });
|
filePath });
|
||||||
arnError = true;
|
arnError = true;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -176,8 +176,8 @@ class AuthLoader {
|
||||||
this._log.error(
|
this._log.error(
|
||||||
'authentication config validation error',
|
'authentication config validation error',
|
||||||
{ reason: arnObj.error.description,
|
{ reason: arnObj.error.description,
|
||||||
accountName: account.name, accountArn: account.arn,
|
accountName: account.name, accountArn: account.arn,
|
||||||
filePath });
|
filePath });
|
||||||
arnError = true;
|
arnError = true;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -185,8 +185,8 @@ class AuthLoader {
|
||||||
this._log.error(
|
this._log.error(
|
||||||
'authentication config validation error',
|
'authentication config validation error',
|
||||||
{ reason: 'not an IAM account ARN',
|
{ reason: 'not an IAM account ARN',
|
||||||
accountName: account.name, accountArn: account.arn,
|
accountName: account.name, accountArn: account.arn,
|
||||||
filePath });
|
filePath });
|
||||||
arnError = true;
|
arnError = true;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -215,7 +215,7 @@ class AuthLoader {
|
||||||
logInfo.context = err.context;
|
logInfo.context = err.context;
|
||||||
}
|
}
|
||||||
this._log.error('authentication config validation error',
|
this._log.error('authentication config validation error',
|
||||||
logInfo);
|
logInfo);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,7 +41,7 @@ function getCanonicalizedAmzHeaders(headers, clientType) {
|
||||||
// Build headerString
|
// Build headerString
|
||||||
return amzHeaders.reduce((headerStr, current) =>
|
return amzHeaders.reduce((headerStr, current) =>
|
||||||
`${headerStr}${current[0]}:${current[1]}\n`,
|
`${headerStr}${current[0]}:${current[1]}\n`,
|
||||||
'');
|
'');
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = getCanonicalizedAmzHeaders;
|
module.exports = getCanonicalizedAmzHeaders;
|
||||||
|
|
|
@ -22,9 +22,9 @@ function check(request, log, data) {
|
||||||
timestamp = Date.parse(timestamp);
|
timestamp = Date.parse(timestamp);
|
||||||
if (!timestamp) {
|
if (!timestamp) {
|
||||||
log.debug('missing or invalid date header',
|
log.debug('missing or invalid date header',
|
||||||
{ method: 'auth/v2/headerAuthCheck.check' });
|
{ method: 'auth/v2/headerAuthCheck.check' });
|
||||||
return { err: errors.AccessDenied.
|
return { err: errors.AccessDenied.
|
||||||
customizeDescription('Authentication requires a valid Date or ' +
|
customizeDescription('Authentication requires a valid Date or ' +
|
||||||
'x-amz-date header') };
|
'x-amz-date header') };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -42,12 +42,12 @@ function check(request, log, data) {
|
||||||
|
|
||||||
if (expirationTime > currentTime + preSignedURLExpiry) {
|
if (expirationTime > currentTime + preSignedURLExpiry) {
|
||||||
log.debug('expires parameter too far in future',
|
log.debug('expires parameter too far in future',
|
||||||
{ expires: request.query.Expires });
|
{ expires: request.query.Expires });
|
||||||
return { err: errors.AccessDenied };
|
return { err: errors.AccessDenied };
|
||||||
}
|
}
|
||||||
if (currentTime > expirationTime) {
|
if (currentTime > expirationTime) {
|
||||||
log.debug('current time exceeds expires time',
|
log.debug('current time exceeds expires time',
|
||||||
{ expires: request.query.Expires });
|
{ expires: request.query.Expires });
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
return { err: errors.RequestTimeTooSkewed };
|
||||||
}
|
}
|
||||||
const accessKey = data.AWSAccessKeyId;
|
const accessKey = data.AWSAccessKeyId;
|
||||||
|
|
|
@ -88,14 +88,14 @@ function check(request, log, data, awsService) {
|
||||||
}
|
}
|
||||||
if (!timestamp) {
|
if (!timestamp) {
|
||||||
log.debug('missing or invalid date header',
|
log.debug('missing or invalid date header',
|
||||||
{ method: 'auth/v4/headerAuthCheck.check' });
|
{ method: 'auth/v4/headerAuthCheck.check' });
|
||||||
return { err: errors.AccessDenied.
|
return { err: errors.AccessDenied.
|
||||||
customizeDescription('Authentication requires a valid Date or ' +
|
customizeDescription('Authentication requires a valid Date or ' +
|
||||||
'x-amz-date header') };
|
'x-amz-date header') };
|
||||||
}
|
}
|
||||||
|
|
||||||
const validationResult = validateCredentials(credentialsArr, timestamp,
|
const validationResult = validateCredentials(credentialsArr, timestamp,
|
||||||
log);
|
log);
|
||||||
if (validationResult instanceof Error) {
|
if (validationResult instanceof Error) {
|
||||||
log.debug('credentials in improper format', { credentialsArr,
|
log.debug('credentials in improper format', { credentialsArr,
|
||||||
timestamp, validationResult });
|
timestamp, validationResult });
|
||||||
|
@ -134,7 +134,7 @@ function check(request, log, data, awsService) {
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log.debug('invalid proxy_path header', { proxyPath, err });
|
log.debug('invalid proxy_path header', { proxyPath, err });
|
||||||
return { err: errors.InvalidArgument.customizeDescription(
|
return { err: errors.InvalidArgument.customizeDescription(
|
||||||
'invalid proxy_path header') };
|
'invalid proxy_path header') };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,7 @@ function check(request, log, data) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const validationResult = validateCredentials(credential, timestamp,
|
const validationResult = validateCredentials(credential, timestamp,
|
||||||
log);
|
log);
|
||||||
if (validationResult instanceof Error) {
|
if (validationResult instanceof Error) {
|
||||||
log.debug('credentials in improper format', { credential,
|
log.debug('credentials in improper format', { credential,
|
||||||
timestamp, validationResult });
|
timestamp, validationResult });
|
||||||
|
@ -69,7 +69,7 @@ function check(request, log, data) {
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log.debug('invalid proxy_path header', { proxyPath });
|
log.debug('invalid proxy_path header', { proxyPath });
|
||||||
return { err: errors.InvalidArgument.customizeDescription(
|
return { err: errors.InvalidArgument.customizeDescription(
|
||||||
'invalid proxy_path header') };
|
'invalid proxy_path header') };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -273,7 +273,7 @@ class V4Transform extends Transform {
|
||||||
}
|
}
|
||||||
// get next chunk
|
// get next chunk
|
||||||
return callback();
|
return callback();
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,20 +25,20 @@ function validateCredentials(credentials, timestamp, log) {
|
||||||
log.warn('accessKey provided is wrong format', { accessKey });
|
log.warn('accessKey provided is wrong format', { accessKey });
|
||||||
return errors.InvalidArgument;
|
return errors.InvalidArgument;
|
||||||
}
|
}
|
||||||
// The scope date (format YYYYMMDD) must be same date as the timestamp
|
// The scope date (format YYYYMMDD) must be same date as the timestamp
|
||||||
// on the request from the x-amz-date param (if queryAuthCheck)
|
// on the request from the x-amz-date param (if queryAuthCheck)
|
||||||
// or from the x-amz-date header or date header (if headerAuthCheck)
|
// or from the x-amz-date header or date header (if headerAuthCheck)
|
||||||
// Format of timestamp is ISO 8601: YYYYMMDDTHHMMSSZ.
|
// Format of timestamp is ISO 8601: YYYYMMDDTHHMMSSZ.
|
||||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/
|
// http://docs.aws.amazon.com/AmazonS3/latest/API/
|
||||||
// sigv4-query-string-auth.html
|
// sigv4-query-string-auth.html
|
||||||
// http://docs.aws.amazon.com/general/latest/gr/
|
// http://docs.aws.amazon.com/general/latest/gr/
|
||||||
// sigv4-date-handling.html
|
// sigv4-date-handling.html
|
||||||
|
|
||||||
// convert timestamp to format of scopeDate YYYYMMDD
|
// convert timestamp to format of scopeDate YYYYMMDD
|
||||||
const timestampDate = timestamp.split('T')[0];
|
const timestampDate = timestamp.split('T')[0];
|
||||||
if (scopeDate.length !== 8 || scopeDate !== timestampDate) {
|
if (scopeDate.length !== 8 || scopeDate !== timestampDate) {
|
||||||
log.warn('scope date must be the same date as the timestamp date',
|
log.warn('scope date must be the same date as the timestamp date',
|
||||||
{ scopeDate, timestampDate });
|
{ scopeDate, timestampDate });
|
||||||
return errors.RequestTimeTooSkewed;
|
return errors.RequestTimeTooSkewed;
|
||||||
}
|
}
|
||||||
if (service !== 's3' && service !== 'iam' && service !== 'ring' &&
|
if (service !== 's3' && service !== 'iam' && service !== 'ring' &&
|
||||||
|
@ -50,7 +50,7 @@ function validateCredentials(credentials, timestamp, log) {
|
||||||
}
|
}
|
||||||
if (requestType !== 'aws4_request') {
|
if (requestType !== 'aws4_request') {
|
||||||
log.warn('requestType contained in params is not aws4_request',
|
log.warn('requestType contained in params is not aws4_request',
|
||||||
{ requestType });
|
{ requestType });
|
||||||
return errors.InvalidArgument;
|
return errors.InvalidArgument;
|
||||||
}
|
}
|
||||||
return {};
|
return {};
|
||||||
|
@ -68,7 +68,7 @@ function extractQueryParams(queryObj, log) {
|
||||||
// Do not need the algorithm sent back
|
// Do not need the algorithm sent back
|
||||||
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
|
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
|
||||||
log.warn('algorithm param incorrect',
|
log.warn('algorithm param incorrect',
|
||||||
{ algo: queryObj['X-Amz-Algorithm'] });
|
{ algo: queryObj['X-Amz-Algorithm'] });
|
||||||
return authParams;
|
return authParams;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -103,11 +103,11 @@ module.exports = {
|
||||||
gcpTaggingPrefix: 'aws-tag-',
|
gcpTaggingPrefix: 'aws-tag-',
|
||||||
productName: 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko',
|
productName: 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko',
|
||||||
legacyLocations: ['sproxyd', 'legacy'],
|
legacyLocations: ['sproxyd', 'legacy'],
|
||||||
// healthcheck default call from nginx is every 2 seconds
|
// healthcheck default call from nginx is every 2 seconds
|
||||||
// for external backends, don't call unless at least 1 minute
|
// for external backends, don't call unless at least 1 minute
|
||||||
// (60,000 milliseconds) since last call
|
// (60,000 milliseconds) since last call
|
||||||
externalBackendHealthCheckInterval: 60000,
|
externalBackendHealthCheckInterval: 60000,
|
||||||
// some of the available data backends (if called directly rather
|
// some of the available data backends (if called directly rather
|
||||||
// than through the multiple backend gateway) need a key provided
|
// than through the multiple backend gateway) need a key provided
|
||||||
// as a string as first parameter of the get/delete methods.
|
// as a string as first parameter of the get/delete methods.
|
||||||
clientsRequireStringKey: { sproxyd: true, cdmi: true },
|
clientsRequireStringKey: { sproxyd: true, cdmi: true },
|
||||||
|
|
10
lib/db.js
10
lib/db.js
|
@ -64,12 +64,12 @@ class IndexTransaction {
|
||||||
push(op) {
|
push(op) {
|
||||||
if (this.closed) {
|
if (this.closed) {
|
||||||
throw propError('pushOnCommittedTransaction',
|
throw propError('pushOnCommittedTransaction',
|
||||||
'can not add ops to already committed transaction');
|
'can not add ops to already committed transaction');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (op.type !== 'put' && op.type !== 'del') {
|
if (op.type !== 'put' && op.type !== 'del') {
|
||||||
throw propError('invalidTransactionVerb',
|
throw propError('invalidTransactionVerb',
|
||||||
`unknown action type: ${op.type}`);
|
`unknown action type: ${op.type}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (op.key === undefined) {
|
if (op.key === undefined) {
|
||||||
|
@ -137,7 +137,7 @@ class IndexTransaction {
|
||||||
addCondition(condition) {
|
addCondition(condition) {
|
||||||
if (this.closed) {
|
if (this.closed) {
|
||||||
throw propError('pushOnCommittedTransaction',
|
throw propError('pushOnCommittedTransaction',
|
||||||
'can not add conditions to already committed transaction');
|
'can not add conditions to already committed transaction');
|
||||||
}
|
}
|
||||||
if (condition === undefined || Object.keys(condition).length === 0) {
|
if (condition === undefined || Object.keys(condition).length === 0) {
|
||||||
throw propError('missingCondition', 'missing condition for conditional put');
|
throw propError('missingCondition', 'missing condition for conditional put');
|
||||||
|
@ -159,12 +159,12 @@ class IndexTransaction {
|
||||||
commit(cb) {
|
commit(cb) {
|
||||||
if (this.closed) {
|
if (this.closed) {
|
||||||
return cb(propError('alreadyCommitted',
|
return cb(propError('alreadyCommitted',
|
||||||
'transaction was already committed'));
|
'transaction was already committed'));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.operations.length === 0) {
|
if (this.operations.length === 0) {
|
||||||
return cb(propError('emptyTransaction',
|
return cb(propError('emptyTransaction',
|
||||||
'tried to commit an empty transaction'));
|
'tried to commit an empty transaction'));
|
||||||
}
|
}
|
||||||
|
|
||||||
this.closed = true;
|
this.closed = true;
|
||||||
|
|
|
@ -76,11 +76,11 @@ function errorsGen() {
|
||||||
const errorsObj = require('../errors/arsenalErrors.json');
|
const errorsObj = require('../errors/arsenalErrors.json');
|
||||||
|
|
||||||
Object.keys(errorsObj)
|
Object.keys(errorsObj)
|
||||||
.filter(index => index !== '_comment')
|
.filter(index => index !== '_comment')
|
||||||
.forEach(index => {
|
.forEach(index => {
|
||||||
errors[index] = new ArsenalError(index, errorsObj[index].code,
|
errors[index] = new ArsenalError(index, errorsObj[index].code,
|
||||||
errorsObj[index].description);
|
errorsObj[index].description);
|
||||||
});
|
});
|
||||||
return errors;
|
return errors;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,9 +17,9 @@ describe('decyrptSecret', () => {
|
||||||
describe('parseServiceCredentials', () => {
|
describe('parseServiceCredentials', () => {
|
||||||
const conf = {
|
const conf = {
|
||||||
users: [{ accessKey,
|
users: [{ accessKey,
|
||||||
accountType: 'service-clueso',
|
accountType: 'service-clueso',
|
||||||
secretKey,
|
secretKey,
|
||||||
userName: 'Search Service Account' }],
|
userName: 'Search Service Account' }],
|
||||||
};
|
};
|
||||||
const auth = JSON.stringify({ privateKey });
|
const auth = JSON.stringify({ privateKey });
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ module.exports.once = function once(func) {
|
||||||
state.res = func.apply(func, args);
|
state.res = func.apply(func, args);
|
||||||
} else {
|
} else {
|
||||||
debug('function already called:', func,
|
debug('function already called:', func,
|
||||||
'returning cached result:', state.res);
|
'returning cached result:', state.res);
|
||||||
}
|
}
|
||||||
return state.res;
|
return state.res;
|
||||||
};
|
};
|
||||||
|
|
|
@ -17,7 +17,7 @@ class RedisClient {
|
||||||
method: 'RedisClient.constructor',
|
method: 'RedisClient.constructor',
|
||||||
redisHost: config.host,
|
redisHost: config.host,
|
||||||
redisPort: config.port,
|
redisPort: config.port,
|
||||||
})
|
}),
|
||||||
);
|
);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,6 @@ const StatsClient = require('./StatsClient');
|
||||||
* rather than by seconds
|
* rather than by seconds
|
||||||
*/
|
*/
|
||||||
class StatsModel extends StatsClient {
|
class StatsModel extends StatsClient {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Utility method to convert 2d array rows to columns, and vice versa
|
* Utility method to convert 2d array rows to columns, and vice versa
|
||||||
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
|
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
|
||||||
|
|
|
@ -2,8 +2,8 @@ const promClient = require('prom-client');
|
||||||
|
|
||||||
const collectDefaultMetricsIntervalMs =
|
const collectDefaultMetricsIntervalMs =
|
||||||
process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined ?
|
process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined ?
|
||||||
Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10) :
|
Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10) :
|
||||||
10000;
|
10000;
|
||||||
|
|
||||||
promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs });
|
promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs });
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ class ARN {
|
||||||
|
|
||||||
static createFromString(arnStr) {
|
static createFromString(arnStr) {
|
||||||
const [arn, partition, service, region, accountId,
|
const [arn, partition, service, region, accountId,
|
||||||
resourceType, resource] = arnStr.split(':');
|
resourceType, resource] = arnStr.split(':');
|
||||||
|
|
||||||
if (arn !== 'arn') {
|
if (arn !== 'arn') {
|
||||||
return { error: errors.InvalidArgument.customizeDescription(
|
return { error: errors.InvalidArgument.customizeDescription(
|
||||||
|
@ -58,7 +58,7 @@ class ARN {
|
||||||
'must be a 12-digit number or "*"') };
|
'must be a 12-digit number or "*"') };
|
||||||
}
|
}
|
||||||
const fullResource = (resource !== undefined ?
|
const fullResource = (resource !== undefined ?
|
||||||
`${resourceType}:${resource}` : resourceType);
|
`${resourceType}:${resource}` : resourceType);
|
||||||
return new ARN(partition, service, region, accountId, fullResource);
|
return new ARN(partition, service, region, accountId, fullResource);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,7 +98,7 @@ class ARN {
|
||||||
|
|
||||||
toString() {
|
toString() {
|
||||||
return ['arn', this.getPartition(), this.getService(),
|
return ['arn', this.getPartition(), this.getService(),
|
||||||
this.getRegion(), this.getAccountId(), this.getResource()]
|
this.getRegion(), this.getAccountId(), this.getResource()]
|
||||||
.join(':');
|
.join(':');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,9 +52,9 @@ class BackendInfo {
|
||||||
*/
|
*/
|
||||||
static isRequestEndpointPresent(config, requestEndpoint, log) {
|
static isRequestEndpointPresent(config, requestEndpoint, log) {
|
||||||
if (Object.keys(config.restEndpoints).
|
if (Object.keys(config.restEndpoints).
|
||||||
indexOf(requestEndpoint) < 0) {
|
indexOf(requestEndpoint) < 0) {
|
||||||
log.trace('requestEndpoint does not match config restEndpoints',
|
log.trace('requestEndpoint does not match config restEndpoints',
|
||||||
{ requestEndpoint });
|
{ requestEndpoint });
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
|
@ -70,10 +70,10 @@ class BackendInfo {
|
||||||
*/
|
*/
|
||||||
static isRequestEndpointValueValid(config, requestEndpoint, log) {
|
static isRequestEndpointValueValid(config, requestEndpoint, log) {
|
||||||
if (Object.keys(config.locationConstraints).
|
if (Object.keys(config.locationConstraints).
|
||||||
indexOf(config.restEndpoints[requestEndpoint]) < 0) {
|
indexOf(config.restEndpoints[requestEndpoint]) < 0) {
|
||||||
log.trace('the default locationConstraint for request' +
|
log.trace('the default locationConstraint for request' +
|
||||||
'Endpoint does not match any config locationConstraint',
|
'Endpoint does not match any config locationConstraint',
|
||||||
{ requestEndpoint });
|
{ requestEndpoint });
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
|
@ -110,7 +110,7 @@ class BackendInfo {
|
||||||
*/
|
*/
|
||||||
static isValidRequestEndpointOrBackend(config, requestEndpoint, log) {
|
static isValidRequestEndpointOrBackend(config, requestEndpoint, log) {
|
||||||
if (!BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
|
if (!BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
|
||||||
log)) {
|
log)) {
|
||||||
return BackendInfo.isMemOrFileBackend(config, log);
|
return BackendInfo.isMemOrFileBackend(config, log);
|
||||||
}
|
}
|
||||||
return BackendInfo.isRequestEndpointValueValid(config, requestEndpoint,
|
return BackendInfo.isRequestEndpointValueValid(config, requestEndpoint,
|
||||||
|
@ -132,7 +132,7 @@ class BackendInfo {
|
||||||
bucketLocationConstraint, requestEndpoint, log) {
|
bucketLocationConstraint, requestEndpoint, log) {
|
||||||
if (objectLocationConstraint) {
|
if (objectLocationConstraint) {
|
||||||
if (BackendInfo.isValidLocationConstraint(config,
|
if (BackendInfo.isValidLocationConstraint(config,
|
||||||
objectLocationConstraint, log)) {
|
objectLocationConstraint, log)) {
|
||||||
log.trace('objectLocationConstraint is valid');
|
log.trace('objectLocationConstraint is valid');
|
||||||
return { isValid: true };
|
return { isValid: true };
|
||||||
}
|
}
|
||||||
|
@ -143,7 +143,7 @@ class BackendInfo {
|
||||||
}
|
}
|
||||||
if (bucketLocationConstraint) {
|
if (bucketLocationConstraint) {
|
||||||
if (BackendInfo.isValidLocationConstraint(config,
|
if (BackendInfo.isValidLocationConstraint(config,
|
||||||
bucketLocationConstraint, log)) {
|
bucketLocationConstraint, log)) {
|
||||||
log.trace('bucketLocationConstraint is valid');
|
log.trace('bucketLocationConstraint is valid');
|
||||||
return { isValid: true };
|
return { isValid: true };
|
||||||
}
|
}
|
||||||
|
@ -159,7 +159,7 @@ class BackendInfo {
|
||||||
return { isValid: true, legacyLocationConstraint };
|
return { isValid: true, legacyLocationConstraint };
|
||||||
}
|
}
|
||||||
if (!BackendInfo.isValidRequestEndpointOrBackend(config,
|
if (!BackendInfo.isValidRequestEndpointOrBackend(config,
|
||||||
requestEndpoint, log)) {
|
requestEndpoint, log)) {
|
||||||
return { isValid: false, description: 'Endpoint Location Error - ' +
|
return { isValid: false, description: 'Endpoint Location Error - ' +
|
||||||
`Your endpoint "${requestEndpoint}" is not in restEndpoints ` +
|
`Your endpoint "${requestEndpoint}" is not in restEndpoints ` +
|
||||||
'in your config OR the default location constraint for request ' +
|
'in your config OR the default location constraint for request ' +
|
||||||
|
@ -167,7 +167,7 @@ class BackendInfo {
|
||||||
'match any config locationConstraint - Please update.' };
|
'match any config locationConstraint - Please update.' };
|
||||||
}
|
}
|
||||||
if (BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
|
if (BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
|
||||||
log)) {
|
log)) {
|
||||||
return { isValid: true };
|
return { isValid: true };
|
||||||
}
|
}
|
||||||
return { isValid: true, defaultedToDataBackend: true };
|
return { isValid: true, defaultedToDataBackend: true };
|
||||||
|
|
|
@ -69,13 +69,13 @@ class BucketInfo {
|
||||||
* @param {object} [notificationConfiguration] - bucket notification configuration
|
* @param {object} [notificationConfiguration] - bucket notification configuration
|
||||||
*/
|
*/
|
||||||
constructor(name, owner, ownerDisplayName, creationDate,
|
constructor(name, owner, ownerDisplayName, creationDate,
|
||||||
mdBucketModelVersion, acl, transient, deleted,
|
mdBucketModelVersion, acl, transient, deleted,
|
||||||
serverSideEncryption, versioningConfiguration,
|
serverSideEncryption, versioningConfiguration,
|
||||||
locationConstraint, websiteConfiguration, cors,
|
locationConstraint, websiteConfiguration, cors,
|
||||||
replicationConfiguration, lifecycleConfiguration,
|
replicationConfiguration, lifecycleConfiguration,
|
||||||
bucketPolicy, uid, readLocationConstraint, isNFS,
|
bucketPolicy, uid, readLocationConstraint, isNFS,
|
||||||
ingestionConfig, azureInfo, objectLockEnabled,
|
ingestionConfig, azureInfo, objectLockEnabled,
|
||||||
objectLockConfiguration, notificationConfiguration) {
|
objectLockConfiguration, notificationConfiguration) {
|
||||||
assert.strictEqual(typeof name, 'string');
|
assert.strictEqual(typeof name, 'string');
|
||||||
assert.strictEqual(typeof owner, 'string');
|
assert.strictEqual(typeof owner, 'string');
|
||||||
assert.strictEqual(typeof ownerDisplayName, 'string');
|
assert.strictEqual(typeof ownerDisplayName, 'string');
|
||||||
|
@ -94,7 +94,7 @@ class BucketInfo {
|
||||||
if (serverSideEncryption) {
|
if (serverSideEncryption) {
|
||||||
assert.strictEqual(typeof serverSideEncryption, 'object');
|
assert.strictEqual(typeof serverSideEncryption, 'object');
|
||||||
const { cryptoScheme, algorithm, masterKeyId,
|
const { cryptoScheme, algorithm, masterKeyId,
|
||||||
configuredMasterKeyId, mandatory } = serverSideEncryption;
|
configuredMasterKeyId, mandatory } = serverSideEncryption;
|
||||||
assert.strictEqual(typeof cryptoScheme, 'number');
|
assert.strictEqual(typeof cryptoScheme, 'number');
|
||||||
assert.strictEqual(typeof algorithm, 'string');
|
assert.strictEqual(typeof algorithm, 'string');
|
||||||
assert.strictEqual(typeof masterKeyId, 'string');
|
assert.strictEqual(typeof masterKeyId, 'string');
|
||||||
|
|
|
@ -381,7 +381,7 @@ class LifecycleConfiguration {
|
||||||
if (!tags[i].Key || !tags[i].Value) {
|
if (!tags[i].Key || !tags[i].Value) {
|
||||||
tagObj.error =
|
tagObj.error =
|
||||||
errors.MissingRequiredParameter.customizeDescription(
|
errors.MissingRequiredParameter.customizeDescription(
|
||||||
'Tag XML does not contain both Key and Value');
|
'Tag XML does not contain both Key and Value');
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -929,7 +929,7 @@ class LifecycleConfiguration {
|
||||||
const daysInt = parseInt(subExp.Days[0], 10);
|
const daysInt = parseInt(subExp.Days[0], 10);
|
||||||
if (daysInt < 1) {
|
if (daysInt < 1) {
|
||||||
expObj.error = errors.InvalidArgument.customizeDescription(
|
expObj.error = errors.InvalidArgument.customizeDescription(
|
||||||
'Expiration days is not a positive integer');
|
'Expiration days is not a positive integer');
|
||||||
} else {
|
} else {
|
||||||
expObj.days = daysInt;
|
expObj.days = daysInt;
|
||||||
}
|
}
|
||||||
|
@ -1125,10 +1125,10 @@ class LifecycleConfiguration {
|
||||||
const { noncurrentDays, storageClass } = transition;
|
const { noncurrentDays, storageClass } = transition;
|
||||||
xml.push(
|
xml.push(
|
||||||
`<${actionName}>`,
|
`<${actionName}>`,
|
||||||
`<NoncurrentDays>${noncurrentDays}` +
|
`<NoncurrentDays>${noncurrentDays}` +
|
||||||
'</NoncurrentDays>',
|
'</NoncurrentDays>',
|
||||||
`<StorageClass>${storageClass}</StorageClass>`,
|
`<StorageClass>${storageClass}</StorageClass>`,
|
||||||
`</${actionName}>`
|
`</${actionName}>`,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
Action = xml.join('');
|
Action = xml.join('');
|
||||||
|
@ -1146,9 +1146,9 @@ class LifecycleConfiguration {
|
||||||
}
|
}
|
||||||
xml.push(
|
xml.push(
|
||||||
`<${actionName}>`,
|
`<${actionName}>`,
|
||||||
element,
|
element,
|
||||||
`<StorageClass>${storageClass}</StorageClass>`,
|
`<StorageClass>${storageClass}</StorageClass>`,
|
||||||
`</${actionName}>`
|
`</${actionName}>`,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
Action = xml.join('');
|
Action = xml.join('');
|
||||||
|
|
|
@ -27,7 +27,7 @@ const errors = require('../errors');
|
||||||
* </NotificationConfiguration>
|
* </NotificationConfiguration>
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format of config:
|
* Format of config:
|
||||||
*
|
*
|
||||||
* config = {
|
* config = {
|
||||||
|
|
|
@ -17,7 +17,7 @@ const errors = require('../errors');
|
||||||
* </ObjectLockConfiguration>
|
* </ObjectLockConfiguration>
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format of config:
|
* Format of config:
|
||||||
*
|
*
|
||||||
* config = {
|
* config = {
|
||||||
|
|
|
@ -10,7 +10,6 @@ const ObjectMDLocation = require('./ObjectMDLocation');
|
||||||
* mpuPart metadata for example)
|
* mpuPart metadata for example)
|
||||||
*/
|
*/
|
||||||
class ObjectMD {
|
class ObjectMD {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
|
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
|
||||||
* reserved for internal use, users should call
|
* reserved for internal use, users should call
|
||||||
|
@ -149,7 +148,7 @@ class ObjectMD {
|
||||||
|
|
||||||
Object.assign(this._data, objMd._data);
|
Object.assign(this._data, objMd._data);
|
||||||
Object.assign(this._data.replicationInfo,
|
Object.assign(this._data.replicationInfo,
|
||||||
objMd._data.replicationInfo);
|
objMd._data.replicationInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
_updateFromParsedJSON(objMd) {
|
_updateFromParsedJSON(objMd) {
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
* 'location' array
|
* 'location' array
|
||||||
*/
|
*/
|
||||||
class ObjectMDLocation {
|
class ObjectMDLocation {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {object} locationObj - single data location info
|
* @param {object} locationObj - single data location info
|
||||||
|
|
|
@ -111,7 +111,7 @@ class RoundRobin {
|
||||||
pickHost() {
|
pickHost() {
|
||||||
if (this.logger) {
|
if (this.logger) {
|
||||||
this.logger.debug('pick host',
|
this.logger.debug('pick host',
|
||||||
{ host: this.getCurrentHost() });
|
{ host: this.getCurrentHost() });
|
||||||
}
|
}
|
||||||
const curHost = this.getCurrentHost();
|
const curHost = this.getCurrentHost();
|
||||||
++this.pickCount;
|
++this.pickCount;
|
||||||
|
@ -163,7 +163,7 @@ class RoundRobin {
|
||||||
}
|
}
|
||||||
if (this.logger) {
|
if (this.logger) {
|
||||||
this.logger.debug('round robin host',
|
this.logger.debug('round robin host',
|
||||||
{ newHost: this.getCurrentHost() });
|
{ newHost: this.getCurrentHost() });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,6 @@ const { checkSupportIPv6 } = require('./utils');
|
||||||
|
|
||||||
|
|
||||||
class Server {
|
class Server {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
*
|
*
|
||||||
|
@ -431,16 +430,16 @@ class Server {
|
||||||
// Setting no delay of the socket to the value configured
|
// Setting no delay of the socket to the value configured
|
||||||
sock.setNoDelay(this.isNoDelay());
|
sock.setNoDelay(this.isNoDelay());
|
||||||
sock.on('error', err => this._logger.info(
|
sock.on('error', err => this._logger.info(
|
||||||
'socket error - request rejected', { error: err }));
|
'socket error - request rejected', { error: err }));
|
||||||
});
|
});
|
||||||
this._server.on('tlsClientError', (err, sock) =>
|
this._server.on('tlsClientError', (err, sock) =>
|
||||||
this._onClientError(err, sock));
|
this._onClientError(err, sock));
|
||||||
this._server.on('clientError', (err, sock) =>
|
this._server.on('clientError', (err, sock) =>
|
||||||
this._onClientError(err, sock));
|
this._onClientError(err, sock));
|
||||||
this._server.on('checkContinue', (req, res) =>
|
this._server.on('checkContinue', (req, res) =>
|
||||||
this._onCheckContinue(req, res));
|
this._onCheckContinue(req, res));
|
||||||
this._server.on('checkExpectation', (req, res) =>
|
this._server.on('checkExpectation', (req, res) =>
|
||||||
this._onCheckExpectation(req, res));
|
this._onCheckExpectation(req, res));
|
||||||
this._server.on('listening', () => this._onListening());
|
this._server.on('listening', () => this._onListening());
|
||||||
}
|
}
|
||||||
this._server.listen(this._port, this._address);
|
this._server.listen(this._port, this._address);
|
||||||
|
|
|
@ -72,8 +72,8 @@ function getByteRangeFromSpec(rangeSpec, objectSize) {
|
||||||
if (rangeSpec.start < objectSize) {
|
if (rangeSpec.start < objectSize) {
|
||||||
// test is false if end is undefined
|
// test is false if end is undefined
|
||||||
return { range: [rangeSpec.start,
|
return { range: [rangeSpec.start,
|
||||||
(rangeSpec.end < objectSize ?
|
(rangeSpec.end < objectSize ?
|
||||||
rangeSpec.end : objectSize - 1)] };
|
rangeSpec.end : objectSize - 1)] };
|
||||||
}
|
}
|
||||||
return { error: errors.InvalidRange };
|
return { error: errors.InvalidRange };
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,8 +90,8 @@ function _negotiateProtocolVersion(client, logger, cb) {
|
||||||
if (err) {
|
if (err) {
|
||||||
const error = _arsenalError(err);
|
const error = _arsenalError(err);
|
||||||
logger.error('KMIP::negotiateProtocolVersion',
|
logger.error('KMIP::negotiateProtocolVersion',
|
||||||
{ error,
|
{ error,
|
||||||
vendorIdentification: client.vendorIdentification });
|
vendorIdentification: client.vendorIdentification });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
const majorVersions =
|
const majorVersions =
|
||||||
|
@ -102,8 +102,8 @@ function _negotiateProtocolVersion(client, logger, cb) {
|
||||||
majorVersions.length !== minorVersions.length) {
|
majorVersions.length !== minorVersions.length) {
|
||||||
const error = _arsenalError('No suitable protocol version');
|
const error = _arsenalError('No suitable protocol version');
|
||||||
logger.error('KMIP::negotiateProtocolVersion',
|
logger.error('KMIP::negotiateProtocolVersion',
|
||||||
{ error,
|
{ error,
|
||||||
vendorIdentification: client.vendorIdentification });
|
vendorIdentification: client.vendorIdentification });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
client.kmip.changeProtocolVersion(majorVersions[0], minorVersions[0]);
|
client.kmip.changeProtocolVersion(majorVersions[0], minorVersions[0]);
|
||||||
|
@ -126,8 +126,8 @@ function _mapExtensions(client, logger, cb) {
|
||||||
if (err) {
|
if (err) {
|
||||||
const error = _arsenalError(err);
|
const error = _arsenalError(err);
|
||||||
logger.error('KMIP::mapExtensions',
|
logger.error('KMIP::mapExtensions',
|
||||||
{ error,
|
{ error,
|
||||||
vendorIdentification: client.vendorIdentification });
|
vendorIdentification: client.vendorIdentification });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
const extensionNames = response.lookup(searchFilter.extensionName);
|
const extensionNames = response.lookup(searchFilter.extensionName);
|
||||||
|
@ -135,8 +135,8 @@ function _mapExtensions(client, logger, cb) {
|
||||||
if (extensionNames.length !== extensionTags.length) {
|
if (extensionNames.length !== extensionTags.length) {
|
||||||
const error = _arsenalError('Inconsistent extension list');
|
const error = _arsenalError('Inconsistent extension list');
|
||||||
logger.error('KMIP::mapExtensions',
|
logger.error('KMIP::mapExtensions',
|
||||||
{ error,
|
{ error,
|
||||||
vendorIdentification: client.vendorIdentification });
|
vendorIdentification: client.vendorIdentification });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
extensionNames.forEach((extensionName, idx) => {
|
extensionNames.forEach((extensionName, idx) => {
|
||||||
|
@ -160,7 +160,7 @@ function _queryServerInformation(client, logger, cb) {
|
||||||
if (err) {
|
if (err) {
|
||||||
const error = _arsenalError(err);
|
const error = _arsenalError(err);
|
||||||
logger.warn('KMIP::queryServerInformation',
|
logger.warn('KMIP::queryServerInformation',
|
||||||
{ error });
|
{ error });
|
||||||
/* no error returned, caller can keep going */
|
/* no error returned, caller can keep going */
|
||||||
return cb();
|
return cb();
|
||||||
}
|
}
|
||||||
|
@ -170,9 +170,9 @@ function _queryServerInformation(client, logger, cb) {
|
||||||
JSON.stringify(response.lookup(searchFilter.serverInformation)[0]));
|
JSON.stringify(response.lookup(searchFilter.serverInformation)[0]));
|
||||||
|
|
||||||
logger.info('KMIP Server identified',
|
logger.info('KMIP Server identified',
|
||||||
{ vendorIdentification: client.vendorIdentification,
|
{ vendorIdentification: client.vendorIdentification,
|
||||||
serverInformation: client.serverInformation,
|
serverInformation: client.serverInformation,
|
||||||
negotiatedProtocolVersion: client.kmip.protocolVersion });
|
negotiatedProtocolVersion: client.kmip.protocolVersion });
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -196,8 +196,8 @@ function _queryOperationsAndObjects(client, logger, cb) {
|
||||||
if (err) {
|
if (err) {
|
||||||
const error = _arsenalError(err);
|
const error = _arsenalError(err);
|
||||||
logger.error('KMIP::queryOperationsAndObjects',
|
logger.error('KMIP::queryOperationsAndObjects',
|
||||||
{ error,
|
{ error,
|
||||||
vendorIdentification: client.vendorIdentification });
|
vendorIdentification: client.vendorIdentification });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
const supportedOperations = response.lookup(searchFilter.operation);
|
const supportedOperations = response.lookup(searchFilter.operation);
|
||||||
|
@ -222,15 +222,15 @@ function _queryOperationsAndObjects(client, logger, cb) {
|
||||||
logger.warn('KMIP::queryOperationsAndObjects: ' +
|
logger.warn('KMIP::queryOperationsAndObjects: ' +
|
||||||
'The KMIP Server announces that it ' +
|
'The KMIP Server announces that it ' +
|
||||||
'does not support all of the required features',
|
'does not support all of the required features',
|
||||||
{ vendorIdentification: client.vendorIdentification,
|
{ vendorIdentification: client.vendorIdentification,
|
||||||
serverInformation: client.serverInformation,
|
serverInformation: client.serverInformation,
|
||||||
supportsEncrypt, supportsDecrypt,
|
supportsEncrypt, supportsDecrypt,
|
||||||
supportsActivate, supportsRevoke,
|
supportsActivate, supportsRevoke,
|
||||||
supportsCreate, supportsDestroy,
|
supportsCreate, supportsDestroy,
|
||||||
supportsQuery, supportsSymmetricKeys });
|
supportsQuery, supportsSymmetricKeys });
|
||||||
} else {
|
} else {
|
||||||
logger.info('KMIP Server provides the necessary feature set',
|
logger.info('KMIP Server provides the necessary feature set',
|
||||||
{ vendorIdentification: client.vendorIdentification });
|
{ vendorIdentification: client.vendorIdentification });
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
|
@ -264,8 +264,8 @@ class Client {
|
||||||
this.vendorIdentification = '';
|
this.vendorIdentification = '';
|
||||||
this.serverInformation = [];
|
this.serverInformation = [];
|
||||||
this.kmip = new KMIP(CodecClass || TTLVCodec,
|
this.kmip = new KMIP(CodecClass || TTLVCodec,
|
||||||
TransportClass || TlsTransport,
|
TransportClass || TlsTransport,
|
||||||
options);
|
options);
|
||||||
this.kmip.registerHandshakeFunction((logger, cb) => {
|
this.kmip.registerHandshakeFunction((logger, cb) => {
|
||||||
this._kmipHandshake(logger, cb);
|
this._kmipHandshake(logger, cb);
|
||||||
});
|
});
|
||||||
|
@ -322,8 +322,8 @@ class Client {
|
||||||
if (err) {
|
if (err) {
|
||||||
const error = _arsenalError(err);
|
const error = _arsenalError(err);
|
||||||
logger.error('KMIP::_activateBucketKey',
|
logger.error('KMIP::_activateBucketKey',
|
||||||
{ error,
|
{ error,
|
||||||
serverInformation: this.serverInformation });
|
serverInformation: this.serverInformation });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
const uniqueIdentifier =
|
const uniqueIdentifier =
|
||||||
|
@ -332,7 +332,7 @@ class Client {
|
||||||
const error = _arsenalError(
|
const error = _arsenalError(
|
||||||
'Server did not return the expected identifier');
|
'Server did not return the expected identifier');
|
||||||
logger.error('KMIP::cipherDataKey',
|
logger.error('KMIP::cipherDataKey',
|
||||||
{ error, uniqueIdentifier });
|
{ error, uniqueIdentifier });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
return cb(null, keyIdentifier);
|
return cb(null, keyIdentifier);
|
||||||
|
@ -351,20 +351,20 @@ class Client {
|
||||||
const attributes = [];
|
const attributes = [];
|
||||||
if (!!this.options.bucketNameAttributeName) {
|
if (!!this.options.bucketNameAttributeName) {
|
||||||
attributes.push(KMIP.Attribute('TextString',
|
attributes.push(KMIP.Attribute('TextString',
|
||||||
this.options.bucketNameAttributeName,
|
this.options.bucketNameAttributeName,
|
||||||
bucketName));
|
bucketName));
|
||||||
}
|
}
|
||||||
attributes.push(...[
|
attributes.push(...[
|
||||||
KMIP.Attribute('Enumeration', 'Cryptographic Algorithm',
|
KMIP.Attribute('Enumeration', 'Cryptographic Algorithm',
|
||||||
CRYPTOGRAPHIC_ALGORITHM),
|
CRYPTOGRAPHIC_ALGORITHM),
|
||||||
KMIP.Attribute('Integer', 'Cryptographic Length',
|
KMIP.Attribute('Integer', 'Cryptographic Length',
|
||||||
CRYPTOGRAPHIC_LENGTH),
|
CRYPTOGRAPHIC_LENGTH),
|
||||||
KMIP.Attribute('Integer', 'Cryptographic Usage Mask',
|
KMIP.Attribute('Integer', 'Cryptographic Usage Mask',
|
||||||
this.kmip.encodeMask('Cryptographic Usage Mask',
|
this.kmip.encodeMask('Cryptographic Usage Mask',
|
||||||
CRYPTOGRAPHIC_USAGE_MASK))]);
|
CRYPTOGRAPHIC_USAGE_MASK))]);
|
||||||
if (this.options.compoundCreateActivate) {
|
if (this.options.compoundCreateActivate) {
|
||||||
attributes.push(KMIP.Attribute('Date-Time', 'Activation Date',
|
attributes.push(KMIP.Attribute('Date-Time', 'Activation Date',
|
||||||
new Date(Date.UTC())));
|
new Date(Date.UTC())));
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.kmip.request(logger, 'Create', [
|
return this.kmip.request(logger, 'Create', [
|
||||||
|
@ -374,8 +374,8 @@ class Client {
|
||||||
if (err) {
|
if (err) {
|
||||||
const error = _arsenalError(err);
|
const error = _arsenalError(err);
|
||||||
logger.error('KMIP::createBucketKey',
|
logger.error('KMIP::createBucketKey',
|
||||||
{ error,
|
{ error,
|
||||||
serverInformation: this.serverInformation });
|
serverInformation: this.serverInformation });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
const createdObjectType =
|
const createdObjectType =
|
||||||
|
@ -386,7 +386,7 @@ class Client {
|
||||||
const error = _arsenalError(
|
const error = _arsenalError(
|
||||||
'Server created an object of wrong type');
|
'Server created an object of wrong type');
|
||||||
logger.error('KMIP::createBucketKey',
|
logger.error('KMIP::createBucketKey',
|
||||||
{ error, createdObjectType });
|
{ error, createdObjectType });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
if (!this.options.compoundCreateActivate) {
|
if (!this.options.compoundCreateActivate) {
|
||||||
|
@ -411,16 +411,16 @@ class Client {
|
||||||
KMIP.TextString('Unique Identifier', bucketKeyId),
|
KMIP.TextString('Unique Identifier', bucketKeyId),
|
||||||
KMIP.Structure('Revocation Reason', [
|
KMIP.Structure('Revocation Reason', [
|
||||||
KMIP.Enumeration('Revocation Reason Code',
|
KMIP.Enumeration('Revocation Reason Code',
|
||||||
'Cessation of Operation'),
|
'Cessation of Operation'),
|
||||||
KMIP.TextString('Revocation Message',
|
KMIP.TextString('Revocation Message',
|
||||||
'About to be deleted'),
|
'About to be deleted'),
|
||||||
]),
|
]),
|
||||||
], (err, response) => {
|
], (err, response) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
const error = _arsenalError(err);
|
const error = _arsenalError(err);
|
||||||
logger.error('KMIP::_revokeBucketKey',
|
logger.error('KMIP::_revokeBucketKey',
|
||||||
{ error,
|
{ error,
|
||||||
serverInformation: this.serverInformation });
|
serverInformation: this.serverInformation });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
const uniqueIdentifier =
|
const uniqueIdentifier =
|
||||||
|
@ -429,7 +429,7 @@ class Client {
|
||||||
const error = _arsenalError(
|
const error = _arsenalError(
|
||||||
'Server did not return the expected identifier');
|
'Server did not return the expected identifier');
|
||||||
logger.error('KMIP::_revokeBucketKey',
|
logger.error('KMIP::_revokeBucketKey',
|
||||||
{ error, uniqueIdentifier });
|
{ error, uniqueIdentifier });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
|
@ -448,8 +448,8 @@ class Client {
|
||||||
if (err) {
|
if (err) {
|
||||||
const error = _arsenalError(err);
|
const error = _arsenalError(err);
|
||||||
logger.error('KMIP::destroyBucketKey: revocation failed',
|
logger.error('KMIP::destroyBucketKey: revocation failed',
|
||||||
{ error,
|
{ error,
|
||||||
serverInformation: this.serverInformation });
|
serverInformation: this.serverInformation });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
return this.kmip.request(logger, 'Destroy', [
|
return this.kmip.request(logger, 'Destroy', [
|
||||||
|
@ -458,8 +458,8 @@ class Client {
|
||||||
if (err) {
|
if (err) {
|
||||||
const error = _arsenalError(err);
|
const error = _arsenalError(err);
|
||||||
logger.error('KMIP::destroyBucketKey',
|
logger.error('KMIP::destroyBucketKey',
|
||||||
{ error,
|
{ error,
|
||||||
serverInformation: this.serverInformation });
|
serverInformation: this.serverInformation });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
const uniqueIdentifier =
|
const uniqueIdentifier =
|
||||||
|
@ -468,7 +468,7 @@ class Client {
|
||||||
const error = _arsenalError(
|
const error = _arsenalError(
|
||||||
'Server did not return the expected identifier');
|
'Server did not return the expected identifier');
|
||||||
logger.error('KMIP::destroyBucketKey',
|
logger.error('KMIP::destroyBucketKey',
|
||||||
{ error, uniqueIdentifier });
|
{ error, uniqueIdentifier });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
|
@ -487,19 +487,19 @@ class Client {
|
||||||
* @callback called with (err, cipheredDataKey: Buffer)
|
* @callback called with (err, cipheredDataKey: Buffer)
|
||||||
*/
|
*/
|
||||||
cipherDataKey(cryptoScheme,
|
cipherDataKey(cryptoScheme,
|
||||||
masterKeyId,
|
masterKeyId,
|
||||||
plainTextDataKey,
|
plainTextDataKey,
|
||||||
logger,
|
logger,
|
||||||
cb) {
|
cb) {
|
||||||
return this.kmip.request(logger, 'Encrypt', [
|
return this.kmip.request(logger, 'Encrypt', [
|
||||||
KMIP.TextString('Unique Identifier', masterKeyId),
|
KMIP.TextString('Unique Identifier', masterKeyId),
|
||||||
KMIP.Structure('Cryptographic Parameters', [
|
KMIP.Structure('Cryptographic Parameters', [
|
||||||
KMIP.Enumeration('Block Cipher Mode',
|
KMIP.Enumeration('Block Cipher Mode',
|
||||||
CRYPTOGRAPHIC_CIPHER_MODE),
|
CRYPTOGRAPHIC_CIPHER_MODE),
|
||||||
KMIP.Enumeration('Padding Method',
|
KMIP.Enumeration('Padding Method',
|
||||||
CRYPTOGRAPHIC_PADDING_METHOD),
|
CRYPTOGRAPHIC_PADDING_METHOD),
|
||||||
KMIP.Enumeration('Cryptographic Algorithm',
|
KMIP.Enumeration('Cryptographic Algorithm',
|
||||||
CRYPTOGRAPHIC_ALGORITHM),
|
CRYPTOGRAPHIC_ALGORITHM),
|
||||||
]),
|
]),
|
||||||
KMIP.ByteString('Data', plainTextDataKey),
|
KMIP.ByteString('Data', plainTextDataKey),
|
||||||
KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV),
|
KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV),
|
||||||
|
@ -507,8 +507,8 @@ class Client {
|
||||||
if (err) {
|
if (err) {
|
||||||
const error = _arsenalError(err);
|
const error = _arsenalError(err);
|
||||||
logger.error('KMIP::cipherDataKey',
|
logger.error('KMIP::cipherDataKey',
|
||||||
{ error,
|
{ error,
|
||||||
serverInformation: this.serverInformation });
|
serverInformation: this.serverInformation });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
const uniqueIdentifier =
|
const uniqueIdentifier =
|
||||||
|
@ -518,7 +518,7 @@ class Client {
|
||||||
const error = _arsenalError(
|
const error = _arsenalError(
|
||||||
'Server did not return the expected identifier');
|
'Server did not return the expected identifier');
|
||||||
logger.error('KMIP::cipherDataKey',
|
logger.error('KMIP::cipherDataKey',
|
||||||
{ error, uniqueIdentifier });
|
{ error, uniqueIdentifier });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
return cb(null, data);
|
return cb(null, data);
|
||||||
|
@ -536,19 +536,19 @@ class Client {
|
||||||
* @callback called with (err, plainTextDataKey: Buffer)
|
* @callback called with (err, plainTextDataKey: Buffer)
|
||||||
*/
|
*/
|
||||||
decipherDataKey(cryptoScheme,
|
decipherDataKey(cryptoScheme,
|
||||||
masterKeyId,
|
masterKeyId,
|
||||||
cipheredDataKey,
|
cipheredDataKey,
|
||||||
logger,
|
logger,
|
||||||
cb) {
|
cb) {
|
||||||
return this.kmip.request(logger, 'Decrypt', [
|
return this.kmip.request(logger, 'Decrypt', [
|
||||||
KMIP.TextString('Unique Identifier', masterKeyId),
|
KMIP.TextString('Unique Identifier', masterKeyId),
|
||||||
KMIP.Structure('Cryptographic Parameters', [
|
KMIP.Structure('Cryptographic Parameters', [
|
||||||
KMIP.Enumeration('Block Cipher Mode',
|
KMIP.Enumeration('Block Cipher Mode',
|
||||||
CRYPTOGRAPHIC_CIPHER_MODE),
|
CRYPTOGRAPHIC_CIPHER_MODE),
|
||||||
KMIP.Enumeration('Padding Method',
|
KMIP.Enumeration('Padding Method',
|
||||||
CRYPTOGRAPHIC_PADDING_METHOD),
|
CRYPTOGRAPHIC_PADDING_METHOD),
|
||||||
KMIP.Enumeration('Cryptographic Algorithm',
|
KMIP.Enumeration('Cryptographic Algorithm',
|
||||||
CRYPTOGRAPHIC_ALGORITHM),
|
CRYPTOGRAPHIC_ALGORITHM),
|
||||||
]),
|
]),
|
||||||
KMIP.ByteString('Data', cipheredDataKey),
|
KMIP.ByteString('Data', cipheredDataKey),
|
||||||
KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV),
|
KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV),
|
||||||
|
@ -556,8 +556,8 @@ class Client {
|
||||||
if (err) {
|
if (err) {
|
||||||
const error = _arsenalError(err);
|
const error = _arsenalError(err);
|
||||||
logger.error('KMIP::decipherDataKey',
|
logger.error('KMIP::decipherDataKey',
|
||||||
{ error,
|
{ error,
|
||||||
serverInformation: this.serverInformation });
|
serverInformation: this.serverInformation });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
const uniqueIdentifier =
|
const uniqueIdentifier =
|
||||||
|
@ -567,7 +567,7 @@ class Client {
|
||||||
const error = _arsenalError(
|
const error = _arsenalError(
|
||||||
'Server did not return the right identifier');
|
'Server did not return the right identifier');
|
||||||
logger.error('KMIP::decipherDataKey',
|
logger.error('KMIP::decipherDataKey',
|
||||||
{ error, uniqueIdentifier });
|
{ error, uniqueIdentifier });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
return cb(null, data);
|
return cb(null, data);
|
||||||
|
|
|
@ -55,15 +55,15 @@ function TTLVCodec() {
|
||||||
const property = {};
|
const property = {};
|
||||||
if (!TypeDecoder[elementType]) {
|
if (!TypeDecoder[elementType]) {
|
||||||
_throwError(logger,
|
_throwError(logger,
|
||||||
'Unknown element type',
|
'Unknown element type',
|
||||||
{ funcName, elementTag, elementType });
|
{ funcName, elementTag, elementType });
|
||||||
}
|
}
|
||||||
const elementValue = value.slice(i + 8,
|
const elementValue = value.slice(i + 8,
|
||||||
i + 8 + elementLength);
|
i + 8 + elementLength);
|
||||||
if (elementValue.length !== elementLength) {
|
if (elementValue.length !== elementLength) {
|
||||||
_throwError(logger, 'BUG: Wrong buffer size',
|
_throwError(logger, 'BUG: Wrong buffer size',
|
||||||
{ funcName, elementLength,
|
{ funcName, elementLength,
|
||||||
bufferLength: elementValue.length });
|
bufferLength: elementValue.length });
|
||||||
}
|
}
|
||||||
property.type = TypeDecoder[elementType].name;
|
property.type = TypeDecoder[elementType].name;
|
||||||
property.value = TypeDecoder[elementType]
|
property.value = TypeDecoder[elementType]
|
||||||
|
@ -75,7 +75,7 @@ function TTLVCodec() {
|
||||||
const tagInfo = TagDecoder[elementTag];
|
const tagInfo = TagDecoder[elementTag];
|
||||||
if (!tagInfo) {
|
if (!tagInfo) {
|
||||||
logger.debug('Unknown element tag',
|
logger.debug('Unknown element tag',
|
||||||
{ funcName, elementTag });
|
{ funcName, elementTag });
|
||||||
property.tag = elementTag;
|
property.tag = elementTag;
|
||||||
element['Unknown Tag'] = property;
|
element['Unknown Tag'] = property;
|
||||||
} else {
|
} else {
|
||||||
|
@ -83,8 +83,8 @@ function TTLVCodec() {
|
||||||
if (tagInfo.name === 'Attribute Name') {
|
if (tagInfo.name === 'Attribute Name') {
|
||||||
if (property.type !== 'TextString') {
|
if (property.type !== 'TextString') {
|
||||||
_throwError(logger,
|
_throwError(logger,
|
||||||
'Invalide type',
|
'Invalide type',
|
||||||
{ funcName, type: property.type });
|
{ funcName, type: property.type });
|
||||||
}
|
}
|
||||||
diversion = property.value;
|
diversion = property.value;
|
||||||
}
|
}
|
||||||
|
@ -114,8 +114,8 @@ function TTLVCodec() {
|
||||||
}
|
}
|
||||||
const itemResult =
|
const itemResult =
|
||||||
TypeEncoder[itemType].encode(itemTagName,
|
TypeEncoder[itemType].encode(itemTagName,
|
||||||
itemValue,
|
itemValue,
|
||||||
itemDiversion);
|
itemDiversion);
|
||||||
encodedValue = encodedValue
|
encodedValue = encodedValue
|
||||||
.concat(_ttlvPadVector(itemResult));
|
.concat(_ttlvPadVector(itemResult));
|
||||||
});
|
});
|
||||||
|
@ -133,9 +133,9 @@ function TTLVCodec() {
|
||||||
const fixedLength = 4;
|
const fixedLength = 4;
|
||||||
if (fixedLength !== value.length) {
|
if (fixedLength !== value.length) {
|
||||||
_throwError(logger,
|
_throwError(logger,
|
||||||
'Length mismatch',
|
'Length mismatch',
|
||||||
{ funcName, fixedLength,
|
{ funcName, fixedLength,
|
||||||
bufferLength: value.length });
|
bufferLength: value.length });
|
||||||
}
|
}
|
||||||
return value.readUInt32BE(0);
|
return value.readUInt32BE(0);
|
||||||
},
|
},
|
||||||
|
@ -156,16 +156,16 @@ function TTLVCodec() {
|
||||||
const fixedLength = 8;
|
const fixedLength = 8;
|
||||||
if (fixedLength !== value.length) {
|
if (fixedLength !== value.length) {
|
||||||
_throwError(logger,
|
_throwError(logger,
|
||||||
'Length mismatch',
|
'Length mismatch',
|
||||||
{ funcName, fixedLength,
|
{ funcName, fixedLength,
|
||||||
bufferLength: value.length });
|
bufferLength: value.length });
|
||||||
}
|
}
|
||||||
const longUInt = UINT32_MAX * value.readUInt32BE(0) +
|
const longUInt = UINT32_MAX * value.readUInt32BE(0) +
|
||||||
value.readUInt32BE(4);
|
value.readUInt32BE(4);
|
||||||
if (longUInt > Number.MAX_SAFE_INTEGER) {
|
if (longUInt > Number.MAX_SAFE_INTEGER) {
|
||||||
_throwError(logger,
|
_throwError(logger,
|
||||||
'53-bit overflow',
|
'53-bit overflow',
|
||||||
{ funcName, longUInt });
|
{ funcName, longUInt });
|
||||||
}
|
}
|
||||||
return longUInt;
|
return longUInt;
|
||||||
},
|
},
|
||||||
|
@ -200,9 +200,9 @@ function TTLVCodec() {
|
||||||
const fixedLength = 4;
|
const fixedLength = 4;
|
||||||
if (fixedLength !== value.length) {
|
if (fixedLength !== value.length) {
|
||||||
_throwError(logger,
|
_throwError(logger,
|
||||||
'Length mismatch',
|
'Length mismatch',
|
||||||
{ funcName, fixedLength,
|
{ funcName, fixedLength,
|
||||||
bufferLength: value.length });
|
bufferLength: value.length });
|
||||||
}
|
}
|
||||||
const enumValue = value.toString('hex');
|
const enumValue = value.toString('hex');
|
||||||
const actualTag = diversion ? TagEncoder[diversion].value : tag;
|
const actualTag = diversion ? TagEncoder[diversion].value : tag;
|
||||||
|
@ -211,10 +211,10 @@ function TTLVCodec() {
|
||||||
!enumInfo.enumeration ||
|
!enumInfo.enumeration ||
|
||||||
!enumInfo.enumeration[enumValue]) {
|
!enumInfo.enumeration[enumValue]) {
|
||||||
return { tag,
|
return { tag,
|
||||||
value: enumValue,
|
value: enumValue,
|
||||||
message: 'Unknown enumeration value',
|
message: 'Unknown enumeration value',
|
||||||
diversion,
|
diversion,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
return enumInfo.enumeration[enumValue];
|
return enumInfo.enumeration[enumValue];
|
||||||
},
|
},
|
||||||
|
@ -227,7 +227,7 @@ function TTLVCodec() {
|
||||||
const actualTag = diversion || tagName;
|
const actualTag = diversion || tagName;
|
||||||
const encodedValue =
|
const encodedValue =
|
||||||
Buffer.from(TagEncoder[actualTag].enumeration[value],
|
Buffer.from(TagEncoder[actualTag].enumeration[value],
|
||||||
'hex');
|
'hex');
|
||||||
return _ttlvPadVector([tag, type, length, encodedValue]);
|
return _ttlvPadVector([tag, type, length, encodedValue]);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -238,9 +238,9 @@ function TTLVCodec() {
|
||||||
const fixedLength = 8;
|
const fixedLength = 8;
|
||||||
if (fixedLength !== value.length) {
|
if (fixedLength !== value.length) {
|
||||||
_throwError(logger,
|
_throwError(logger,
|
||||||
'Length mismatch',
|
'Length mismatch',
|
||||||
{ funcName, fixedLength,
|
{ funcName, fixedLength,
|
||||||
bufferLength: value.length });
|
bufferLength: value.length });
|
||||||
}
|
}
|
||||||
const msUInt = value.readUInt32BE(0);
|
const msUInt = value.readUInt32BE(0);
|
||||||
const lsUInt = value.readUInt32BE(4);
|
const lsUInt = value.readUInt32BE(4);
|
||||||
|
@ -267,7 +267,7 @@ function TTLVCodec() {
|
||||||
const length = Buffer.alloc(4);
|
const length = Buffer.alloc(4);
|
||||||
length.writeUInt32BE(value.length);
|
length.writeUInt32BE(value.length);
|
||||||
return _ttlvPadVector([tag, type, length,
|
return _ttlvPadVector([tag, type, length,
|
||||||
Buffer.from(value, 'utf8')]);
|
Buffer.from(value, 'utf8')]);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
'08': {
|
'08': {
|
||||||
|
@ -289,17 +289,17 @@ function TTLVCodec() {
|
||||||
const fixedLength = 8;
|
const fixedLength = 8;
|
||||||
if (fixedLength !== value.length) {
|
if (fixedLength !== value.length) {
|
||||||
_throwError(logger,
|
_throwError(logger,
|
||||||
'Length mismatch',
|
'Length mismatch',
|
||||||
{ funcName, fixedLength,
|
{ funcName, fixedLength,
|
||||||
bufferLength: value.length });
|
bufferLength: value.length });
|
||||||
}
|
}
|
||||||
const d = new Date(0);
|
const d = new Date(0);
|
||||||
const utcSeconds = UINT32_MAX * value.readUInt32BE(0) +
|
const utcSeconds = UINT32_MAX * value.readUInt32BE(0) +
|
||||||
value.readUInt32BE(4);
|
value.readUInt32BE(4);
|
||||||
if (utcSeconds > Number.MAX_SAFE_INTEGER) {
|
if (utcSeconds > Number.MAX_SAFE_INTEGER) {
|
||||||
_throwError(logger,
|
_throwError(logger,
|
||||||
'53-bit overflow',
|
'53-bit overflow',
|
||||||
{ funcName, utcSeconds });
|
{ funcName, utcSeconds });
|
||||||
}
|
}
|
||||||
d.setUTCSeconds(utcSeconds);
|
d.setUTCSeconds(utcSeconds);
|
||||||
return d;
|
return d;
|
||||||
|
@ -323,9 +323,9 @@ function TTLVCodec() {
|
||||||
const fixedLength = 4;
|
const fixedLength = 4;
|
||||||
if (fixedLength !== value.length) {
|
if (fixedLength !== value.length) {
|
||||||
_throwError(logger,
|
_throwError(logger,
|
||||||
'Length mismatch',
|
'Length mismatch',
|
||||||
{ funcName, fixedLength,
|
{ funcName, fixedLength,
|
||||||
bufferLength: value.length });
|
bufferLength: value.length });
|
||||||
}
|
}
|
||||||
return value.readInt32BE(0);
|
return value.readInt32BE(0);
|
||||||
},
|
},
|
||||||
|
@ -415,8 +415,8 @@ function TTLVCodec() {
|
||||||
throw Error(`Unknown Type '${type}'`);
|
throw Error(`Unknown Type '${type}'`);
|
||||||
}
|
}
|
||||||
const itemValue = TypeEncoder[type].encode(key,
|
const itemValue = TypeEncoder[type].encode(key,
|
||||||
item[key].value,
|
item[key].value,
|
||||||
item[key].diversion);
|
item[key].diversion);
|
||||||
result = result.concat(_ttlvPadVector(itemValue));
|
result = result.concat(_ttlvPadVector(itemValue));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -275,11 +275,11 @@ class KMIP {
|
||||||
KMIP.Structure('Request Header', [
|
KMIP.Structure('Request Header', [
|
||||||
KMIP.Structure('Protocol Version', [
|
KMIP.Structure('Protocol Version', [
|
||||||
KMIP.Integer('Protocol Version Major',
|
KMIP.Integer('Protocol Version Major',
|
||||||
this.protocolVersion.major),
|
this.protocolVersion.major),
|
||||||
KMIP.Integer('Protocol Version Minor',
|
KMIP.Integer('Protocol Version Minor',
|
||||||
this.protocolVersion.minor)]),
|
this.protocolVersion.minor)]),
|
||||||
KMIP.Integer('Maximum Response Size',
|
KMIP.Integer('Maximum Response Size',
|
||||||
this.maximumResponseSize),
|
this.maximumResponseSize),
|
||||||
KMIP.Integer('Batch Count', 1)]),
|
KMIP.Integer('Batch Count', 1)]),
|
||||||
KMIP.Structure('Batch Item', [
|
KMIP.Structure('Batch Item', [
|
||||||
KMIP.Enumeration('Operation', operation),
|
KMIP.Enumeration('Operation', operation),
|
||||||
|
@ -292,7 +292,7 @@ class KMIP {
|
||||||
(err, conversation, rawResponse) => {
|
(err, conversation, rawResponse) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logger.error('KMIP::request: Failed to send message',
|
logger.error('KMIP::request: Failed to send message',
|
||||||
{ error: err });
|
{ error: err });
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
const response = this._decodeMessage(logger, rawResponse);
|
const response = this._decodeMessage(logger, rawResponse);
|
||||||
|
@ -311,16 +311,16 @@ class KMIP {
|
||||||
this.transport.abortPipeline(conversation);
|
this.transport.abortPipeline(conversation);
|
||||||
const error = Error('Invalid batch item ID returned');
|
const error = Error('Invalid batch item ID returned');
|
||||||
logger.error('KMIP::request: failed',
|
logger.error('KMIP::request: failed',
|
||||||
{ resultUniqueBatchItemID, uuid, error });
|
{ resultUniqueBatchItemID, uuid, error });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
if (performedOperation !== operation) {
|
if (performedOperation !== operation) {
|
||||||
this.transport.abortPipeline(conversation);
|
this.transport.abortPipeline(conversation);
|
||||||
const error = Error('Operation mismatch',
|
const error = Error('Operation mismatch',
|
||||||
{ got: performedOperation,
|
{ got: performedOperation,
|
||||||
expected: operation });
|
expected: operation });
|
||||||
logger.error('KMIP::request: Operation mismatch',
|
logger.error('KMIP::request: Operation mismatch',
|
||||||
{ error });
|
{ error });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
if (resultStatus !== 'Success') {
|
if (resultStatus !== 'Success') {
|
||||||
|
@ -331,19 +331,17 @@ class KMIP {
|
||||||
response.lookup(
|
response.lookup(
|
||||||
'Response Message/Batch Item/Result Message')[0];
|
'Response Message/Batch Item/Result Message')[0];
|
||||||
const error = Error('KMIP request failure',
|
const error = Error('KMIP request failure',
|
||||||
{ resultStatus,
|
{ resultStatus,
|
||||||
resultReason,
|
resultReason,
|
||||||
resultMessage });
|
resultMessage });
|
||||||
logger.error('KMIP::request: request failed',
|
logger.error('KMIP::request: request failed',
|
||||||
{ error, resultStatus,
|
{ error, resultStatus,
|
||||||
resultReason, resultMessage });
|
resultReason, resultMessage });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
return cb(null, response);
|
return cb(null, response);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -86,8 +86,8 @@ class TransportTemplate {
|
||||||
const deferedRequest = this.deferedRequests.shift();
|
const deferedRequest = this.deferedRequests.shift();
|
||||||
process.nextTick(() => {
|
process.nextTick(() => {
|
||||||
this.send(logger,
|
this.send(logger,
|
||||||
deferedRequest.encodedMessage,
|
deferedRequest.encodedMessage,
|
||||||
deferedRequest.cb);
|
deferedRequest.cb);
|
||||||
});
|
});
|
||||||
} else if (this.callbackPipeline.length === 0 &&
|
} else if (this.callbackPipeline.length === 0 &&
|
||||||
this.deferedRequests.length === 0 &&
|
this.deferedRequests.length === 0 &&
|
||||||
|
|
|
@ -26,7 +26,7 @@ function sendError(res, log, error, optMessage) {
|
||||||
httpCode: error.code,
|
httpCode: error.code,
|
||||||
errorType: error.message,
|
errorType: error.message,
|
||||||
error: message,
|
error: message,
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
res.writeHead(error.code);
|
res.writeHead(error.code);
|
||||||
res.end(JSON.stringify({
|
res.end(JSON.stringify({
|
||||||
|
|
|
@ -19,7 +19,7 @@ function setContentRange(response, byteRange, objectSize) {
|
||||||
const [start, end] = byteRange;
|
const [start, end] = byteRange;
|
||||||
assert(start !== undefined && end !== undefined);
|
assert(start !== undefined && end !== undefined);
|
||||||
response.setHeader('Content-Range',
|
response.setHeader('Content-Range',
|
||||||
`bytes ${start}-${end}/${objectSize}`);
|
`bytes ${start}-${end}/${objectSize}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
function sendError(res, log, error, optMessage) {
|
function sendError(res, log, error, optMessage) {
|
||||||
|
@ -45,7 +45,6 @@ function sendError(res, log, error, optMessage) {
|
||||||
* start() to start listening to the configured port.
|
* start() to start listening to the configured port.
|
||||||
*/
|
*/
|
||||||
class RESTServer extends httpServer {
|
class RESTServer extends httpServer {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {Object} params - constructor params
|
* @param {Object} params - constructor params
|
||||||
|
@ -227,7 +226,7 @@ class RESTServer extends httpServer {
|
||||||
return sendError(res, log, err);
|
return sendError(res, log, err);
|
||||||
}
|
}
|
||||||
log.debug('sending back 200/206 response with contents',
|
log.debug('sending back 200/206 response with contents',
|
||||||
{ key: pathInfo.key });
|
{ key: pathInfo.key });
|
||||||
setContentLength(res, contentLength);
|
setContentLength(res, contentLength);
|
||||||
res.setHeader('Accept-Ranges', 'bytes');
|
res.setHeader('Accept-Ranges', 'bytes');
|
||||||
if (byteRange) {
|
if (byteRange) {
|
||||||
|
@ -265,7 +264,7 @@ class RESTServer extends httpServer {
|
||||||
return sendError(res, log, err);
|
return sendError(res, log, err);
|
||||||
}
|
}
|
||||||
log.debug('sending back 204 response to DELETE',
|
log.debug('sending back 204 response to DELETE',
|
||||||
{ key: pathInfo.key });
|
{ key: pathInfo.key });
|
||||||
res.writeHead(204);
|
res.writeHead(204);
|
||||||
return res.end(() => {
|
return res.end(() => {
|
||||||
log.debug('DELETE response sent', { key: pathInfo.key });
|
log.debug('DELETE response sent', { key: pathInfo.key });
|
||||||
|
|
|
@ -19,7 +19,7 @@ function explodePath(path) {
|
||||||
return {
|
return {
|
||||||
service: pathMatch[1],
|
service: pathMatch[1],
|
||||||
key: (pathMatch[3] !== undefined && pathMatch[3].length > 0 ?
|
key: (pathMatch[3] !== undefined && pathMatch[3].length > 0 ?
|
||||||
pathMatch[3] : undefined),
|
pathMatch[3] : undefined),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
throw errors.InvalidURI.customizeDescription('malformed URI');
|
throw errors.InvalidURI.customizeDescription('malformed URI');
|
||||||
|
|
|
@ -17,7 +17,6 @@ const rpc = require('./rpc.js');
|
||||||
* RPC client object accessing the sub-level transparently.
|
* RPC client object accessing the sub-level transparently.
|
||||||
*/
|
*/
|
||||||
class LevelDbClient extends rpc.BaseClient {
|
class LevelDbClient extends rpc.BaseClient {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
*
|
*
|
||||||
|
@ -78,7 +77,6 @@ class LevelDbClient extends rpc.BaseClient {
|
||||||
* env.subDb (env is passed as first parameter of received RPC calls).
|
* env.subDb (env is passed as first parameter of received RPC calls).
|
||||||
*/
|
*/
|
||||||
class LevelDbService extends rpc.BaseService {
|
class LevelDbService extends rpc.BaseService {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
*
|
*
|
||||||
|
|
|
@ -37,7 +37,6 @@ let streamRPCJSONObj;
|
||||||
* an error occurred).
|
* an error occurred).
|
||||||
*/
|
*/
|
||||||
class BaseClient extends EventEmitter {
|
class BaseClient extends EventEmitter {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
*
|
*
|
||||||
|
@ -54,7 +53,7 @@ class BaseClient extends EventEmitter {
|
||||||
*/
|
*/
|
||||||
constructor(params) {
|
constructor(params) {
|
||||||
const { url, logger, callTimeoutMs,
|
const { url, logger, callTimeoutMs,
|
||||||
streamMaxPendingAck, streamAckTimeoutMs } = params;
|
streamMaxPendingAck, streamAckTimeoutMs } = params;
|
||||||
assert(url);
|
assert(url);
|
||||||
assert(logger);
|
assert(logger);
|
||||||
|
|
||||||
|
@ -82,11 +81,11 @@ class BaseClient extends EventEmitter {
|
||||||
_call(remoteCall, args, cb) {
|
_call(remoteCall, args, cb) {
|
||||||
const wrapCb = (err, data) => {
|
const wrapCb = (err, data) => {
|
||||||
cb(reconstructError(err),
|
cb(reconstructError(err),
|
||||||
this.socketStreams.decodeStreams(data));
|
this.socketStreams.decodeStreams(data));
|
||||||
};
|
};
|
||||||
this.logger.debug('remote call', { remoteCall, args });
|
this.logger.debug('remote call', { remoteCall, args });
|
||||||
this.socket.emit('call', remoteCall,
|
this.socket.emit('call', remoteCall,
|
||||||
this.socketStreams.encodeStreams(args), wrapCb);
|
this.socketStreams.encodeStreams(args), wrapCb);
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -113,8 +112,8 @@ class BaseClient extends EventEmitter {
|
||||||
throw new Error(`argument cb=${cb} is not a callback`);
|
throw new Error(`argument cb=${cb} is not a callback`);
|
||||||
}
|
}
|
||||||
async.timeout(this._call.bind(this), timeoutMs,
|
async.timeout(this._call.bind(this), timeoutMs,
|
||||||
`operation ${remoteCall} timed out`)(remoteCall,
|
`operation ${remoteCall} timed out`)(remoteCall,
|
||||||
args, cb);
|
args, cb);
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -142,7 +141,7 @@ class BaseClient extends EventEmitter {
|
||||||
const url = this.url;
|
const url = this.url;
|
||||||
this.socket.on('error', err => {
|
this.socket.on('error', err => {
|
||||||
this.logger.warn('connectivity error to the RPC service',
|
this.logger.warn('connectivity error to the RPC service',
|
||||||
{ url, error: err });
|
{ url, error: err });
|
||||||
});
|
});
|
||||||
this.socket.on('connect', () => {
|
this.socket.on('connect', () => {
|
||||||
this.emit('connect');
|
this.emit('connect');
|
||||||
|
@ -156,7 +155,7 @@ class BaseClient extends EventEmitter {
|
||||||
this.getManifest((err, manifest) => {
|
this.getManifest((err, manifest) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
this.logger.error('Error fetching manifest from RPC server',
|
this.logger.error('Error fetching manifest from RPC server',
|
||||||
{ error: err });
|
{ error: err });
|
||||||
} else {
|
} else {
|
||||||
manifest.api.forEach(apiItem => {
|
manifest.api.forEach(apiItem => {
|
||||||
this.createCall(apiItem.name);
|
this.createCall(apiItem.name);
|
||||||
|
@ -251,7 +250,6 @@ class BaseClient extends EventEmitter {
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
class BaseService {
|
class BaseService {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
*
|
*
|
||||||
|
@ -497,7 +495,7 @@ function RPCServer(params) {
|
||||||
|
|
||||||
conn.on('error', err => {
|
conn.on('error', err => {
|
||||||
log.error('error on socket.io connection',
|
log.error('error on socket.io connection',
|
||||||
{ namespace: service.namespace, error: err });
|
{ namespace: service.namespace, error: err });
|
||||||
});
|
});
|
||||||
conn.on('call', (remoteCall, args, cb) => {
|
conn.on('call', (remoteCall, args, cb) => {
|
||||||
const decodedArgs = streamsSocket.decodeStreams(args);
|
const decodedArgs = streamsSocket.decodeStreams(args);
|
||||||
|
@ -647,8 +645,8 @@ streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) {
|
||||||
// primitive types
|
// primitive types
|
||||||
if (obj === undefined) {
|
if (obj === undefined) {
|
||||||
wstream.write('null'); // if undefined elements are present in
|
wstream.write('null'); // if undefined elements are present in
|
||||||
// arrays, convert them to JSON null
|
// arrays, convert them to JSON null
|
||||||
// objects
|
// objects
|
||||||
} else {
|
} else {
|
||||||
wstream.write(JSON.stringify(obj));
|
wstream.write(JSON.stringify(obj));
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ class SIOOutputStream extends stream.Writable {
|
||||||
constructor(socket, streamId, maxPendingAck, ackTimeoutMs) {
|
constructor(socket, streamId, maxPendingAck, ackTimeoutMs) {
|
||||||
super({ objectMode: true });
|
super({ objectMode: true });
|
||||||
this._initOutputStream(socket, streamId, maxPendingAck,
|
this._initOutputStream(socket, streamId, maxPendingAck,
|
||||||
ackTimeoutMs);
|
ackTimeoutMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
_initOutputStream(socket, streamId, maxPendingAck, ackTimeoutMs) {
|
_initOutputStream(socket, streamId, maxPendingAck, ackTimeoutMs) {
|
||||||
|
@ -194,7 +194,7 @@ class SIOStreamSocket {
|
||||||
this.socket.on('stream-data', (payload, cb) => {
|
this.socket.on('stream-data', (payload, cb) => {
|
||||||
const { streamId, data } = payload;
|
const { streamId, data } = payload;
|
||||||
log.debug('received \'stream-data\' event',
|
log.debug('received \'stream-data\' event',
|
||||||
{ streamId, size: data.length });
|
{ streamId, size: data.length });
|
||||||
const stream = this.remoteStreams[streamId];
|
const stream = this.remoteStreams[streamId];
|
||||||
if (!stream) {
|
if (!stream) {
|
||||||
log.debug('no such remote stream registered', { streamId });
|
log.debug('no such remote stream registered', { streamId });
|
||||||
|
@ -280,15 +280,15 @@ class SIOStreamSocket {
|
||||||
let transportStream;
|
let transportStream;
|
||||||
if (isReadStream) {
|
if (isReadStream) {
|
||||||
transportStream = new SIOOutputStream(this, streamId,
|
transportStream = new SIOOutputStream(this, streamId,
|
||||||
this.maxPendingAck,
|
this.maxPendingAck,
|
||||||
this.ackTimeoutMs);
|
this.ackTimeoutMs);
|
||||||
} else {
|
} else {
|
||||||
transportStream = new SIOInputStream(this, streamId);
|
transportStream = new SIOInputStream(this, streamId);
|
||||||
}
|
}
|
||||||
this.localStreams[streamId] = arg;
|
this.localStreams[streamId] = arg;
|
||||||
arg.once('close', () => {
|
arg.once('close', () => {
|
||||||
log.debug('stream closed, removing from local streams',
|
log.debug('stream closed, removing from local streams',
|
||||||
{ streamId });
|
{ streamId });
|
||||||
delete this.localStreams[streamId];
|
delete this.localStreams[streamId];
|
||||||
});
|
});
|
||||||
arg.on('error', error => {
|
arg.on('error', error => {
|
||||||
|
@ -350,8 +350,8 @@ class SIOStreamSocket {
|
||||||
stream = new SIOInputStream(this, streamId);
|
stream = new SIOInputStream(this, streamId);
|
||||||
} else if (arg.writable) {
|
} else if (arg.writable) {
|
||||||
stream = new SIOOutputStream(this, streamId,
|
stream = new SIOOutputStream(this, streamId,
|
||||||
this.maxPendingAck,
|
this.maxPendingAck,
|
||||||
this.ackTimeoutMs);
|
this.ackTimeoutMs);
|
||||||
} else {
|
} else {
|
||||||
throw new Error('can\'t decode stream neither readable ' +
|
throw new Error('can\'t decode stream neither readable ' +
|
||||||
'nor writable');
|
'nor writable');
|
||||||
|
@ -360,14 +360,14 @@ class SIOStreamSocket {
|
||||||
if (arg.readable) {
|
if (arg.readable) {
|
||||||
stream.once('close', () => {
|
stream.once('close', () => {
|
||||||
log.debug('stream closed, removing from remote streams',
|
log.debug('stream closed, removing from remote streams',
|
||||||
{ streamId });
|
{ streamId });
|
||||||
delete this.remoteStreams[streamId];
|
delete this.remoteStreams[streamId];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (arg.writable) {
|
if (arg.writable) {
|
||||||
stream.once('finish', () => {
|
stream.once('finish', () => {
|
||||||
log.debug('stream finished, removing from remote streams',
|
log.debug('stream finished, removing from remote streams',
|
||||||
{ streamId });
|
{ streamId });
|
||||||
delete this.remoteStreams[streamId];
|
delete this.remoteStreams[streamId];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -399,7 +399,7 @@ class SIOStreamSocket {
|
||||||
|
|
||||||
_write(streamId, data, cb) {
|
_write(streamId, data, cb) {
|
||||||
this.logger.debug('emit \'stream-data\' event',
|
this.logger.debug('emit \'stream-data\' event',
|
||||||
{ streamId, size: data.length });
|
{ streamId, size: data.length });
|
||||||
this.socket.emit('stream-data', { streamId, data }, cb);
|
this.socket.emit('stream-data', { streamId, data }, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -50,7 +50,7 @@ evaluators.isResourceApplicable = (requestContext, statementResource, log) => {
|
||||||
requestResourceArr, true);
|
requestResourceArr, true);
|
||||||
if (arnSegmentsMatch) {
|
if (arnSegmentsMatch) {
|
||||||
log.trace('policy resource is applicable to request',
|
log.trace('policy resource is applicable to request',
|
||||||
{ requestResource: resource, policyResource });
|
{ requestResource: resource, policyResource });
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
|
@ -224,21 +224,21 @@ evaluators.evaluatePolicy = (requestContext, policy, log) => {
|
||||||
// in policy, move on to next statement
|
// in policy, move on to next statement
|
||||||
if (currentStatement.NotResource &&
|
if (currentStatement.NotResource &&
|
||||||
evaluators.isResourceApplicable(requestContext,
|
evaluators.isResourceApplicable(requestContext,
|
||||||
currentStatement.NotResource, log)) {
|
currentStatement.NotResource, log)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// If affirmative action is in policy and request action is not
|
// If affirmative action is in policy and request action is not
|
||||||
// applicable, move on to next statement
|
// applicable, move on to next statement
|
||||||
if (currentStatement.Action &&
|
if (currentStatement.Action &&
|
||||||
!evaluators.isActionApplicable(requestContext.getAction(),
|
!evaluators.isActionApplicable(requestContext.getAction(),
|
||||||
currentStatement.Action, log)) {
|
currentStatement.Action, log)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// If NotAction is in policy and action matches NotAction in policy,
|
// If NotAction is in policy and action matches NotAction in policy,
|
||||||
// move on to next statement
|
// move on to next statement
|
||||||
if (currentStatement.NotAction &&
|
if (currentStatement.NotAction &&
|
||||||
evaluators.isActionApplicable(requestContext.getAction(),
|
evaluators.isActionApplicable(requestContext.getAction(),
|
||||||
currentStatement.NotAction, log)) {
|
currentStatement.NotAction, log)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const conditionEval = currentStatement.Condition ?
|
const conditionEval = currentStatement.Condition ?
|
||||||
|
|
|
@ -39,11 +39,11 @@ conditions.findConditionKey = (key, requestContext) => {
|
||||||
// (see Boolean Condition Operators).
|
// (see Boolean Condition Operators).
|
||||||
// Note: This key is only present if MFA was used. So, the following
|
// Note: This key is only present if MFA was used. So, the following
|
||||||
// will not work:
|
// will not work:
|
||||||
// "Condition" :
|
// "Condition" :
|
||||||
// { "Bool" : { "aws:MultiFactorAuthPresent" : false } }
|
// { "Bool" : { "aws:MultiFactorAuthPresent" : false } }
|
||||||
// Instead use:
|
// Instead use:
|
||||||
// "Condition" :
|
// "Condition" :
|
||||||
// { "Null" : { "aws:MultiFactorAuthPresent" : true } }
|
// { "Null" : { "aws:MultiFactorAuthPresent" : true } }
|
||||||
map.set('aws:MultiFactorAuthPresent',
|
map.set('aws:MultiFactorAuthPresent',
|
||||||
requestContext.getMultiFactorAuthPresent());
|
requestContext.getMultiFactorAuthPresent());
|
||||||
// aws:MultiFactorAuthAge – Used to check how many seconds since
|
// aws:MultiFactorAuthAge – Used to check how many seconds since
|
||||||
|
@ -166,8 +166,8 @@ conditions.findConditionKey = (key, requestContext) => {
|
||||||
// so evaluation should be skipped
|
// so evaluation should be skipped
|
||||||
map.set('s3:RequestObjectTagKeys',
|
map.set('s3:RequestObjectTagKeys',
|
||||||
requestContext.getNeedTagEval() && requestContext.getRequestObjTags()
|
requestContext.getNeedTagEval() && requestContext.getRequestObjTags()
|
||||||
? getTagKeys(requestContext.getRequestObjTags())
|
? getTagKeys(requestContext.getRequestObjTags())
|
||||||
: undefined);
|
: undefined);
|
||||||
return map.get(key);
|
return map.get(key);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -191,7 +191,7 @@ function convertSpecialChars(string) {
|
||||||
return map[char];
|
return map[char];
|
||||||
}
|
}
|
||||||
return string.replace(/(\$\{\*\})|(\$\{\?\})|(\$\{\$\})/g,
|
return string.replace(/(\$\{\*\})|(\$\{\?\})|(\$\{\$\})/g,
|
||||||
characterMap);
|
characterMap);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -425,10 +425,10 @@ conditions.convertConditionOperator = operator => {
|
||||||
return !operatorMap.ArnLike(key, value);
|
return !operatorMap.ArnLike(key, value);
|
||||||
},
|
},
|
||||||
Null: function nullOperator(key, value) {
|
Null: function nullOperator(key, value) {
|
||||||
// Null is used to check if a condition key is present.
|
// Null is used to check if a condition key is present.
|
||||||
// The policy statement value should be either true (the key doesn't
|
// The policy statement value should be either true (the key doesn't
|
||||||
// exist — it is null) or false (the key exists and its value is
|
// exist — it is null) or false (the key exists and its value is
|
||||||
// not null).
|
// not null).
|
||||||
if ((key === undefined || key === null)
|
if ((key === undefined || key === null)
|
||||||
&& value[0] === 'true' ||
|
&& value[0] === 'true' ||
|
||||||
(key !== undefined && key !== null)
|
(key !== undefined && key !== null)
|
||||||
|
|
|
@ -51,10 +51,10 @@ wildcards.handleWildcardInResource = arn => {
|
||||||
// Wildcards can be part of the resource ARN.
|
// Wildcards can be part of the resource ARN.
|
||||||
// Wildcards do NOT span segments of the ARN (separated by ":")
|
// Wildcards do NOT span segments of the ARN (separated by ":")
|
||||||
|
|
||||||
// Example: all elements in specific bucket:
|
// Example: all elements in specific bucket:
|
||||||
// "Resource": "arn:aws:s3:::my_corporate_bucket/*"
|
// "Resource": "arn:aws:s3:::my_corporate_bucket/*"
|
||||||
// ARN format:
|
// ARN format:
|
||||||
// arn:partition:service:region:namespace:relative-id
|
// arn:partition:service:region:namespace:relative-id
|
||||||
const arnArr = arn.split(':');
|
const arnArr = arn.split(':');
|
||||||
return arnArr.map(portion => wildcards.handleWildcards(portion));
|
return arnArr.map(portion => wildcards.handleWildcards(portion));
|
||||||
};
|
};
|
||||||
|
|
|
@ -6,7 +6,6 @@ const crypto = require('crypto');
|
||||||
* data through a stream
|
* data through a stream
|
||||||
*/
|
*/
|
||||||
class MD5Sum extends Transform {
|
class MD5Sum extends Transform {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
*/
|
*/
|
||||||
|
@ -40,7 +39,6 @@ class MD5Sum extends Transform {
|
||||||
this.emit('hashed');
|
this.emit('hashed');
|
||||||
callback(null);
|
callback(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = MD5Sum;
|
module.exports = MD5Sum;
|
||||||
|
|
|
@ -73,7 +73,7 @@ class ResultsCollector extends EventEmitter {
|
||||||
* @property {Error} [results[].error] - error returned by Azure putting subpart
|
* @property {Error} [results[].error] - error returned by Azure putting subpart
|
||||||
* @property {number} results[].subPartIndex - index of the subpart
|
* @property {number} results[].subPartIndex - index of the subpart
|
||||||
*/
|
*/
|
||||||
/**
|
/**
|
||||||
* "error" event
|
* "error" event
|
||||||
* @event ResultCollector#error
|
* @event ResultCollector#error
|
||||||
* @type {(Error|undefined)} error - error returned by Azure last subpart
|
* @type {(Error|undefined)} error - error returned by Azure last subpart
|
||||||
|
|
|
@ -94,7 +94,7 @@ azureMpuUtils.getSubPartIds = (part, uploadId) =>
|
||||||
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
|
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
|
||||||
|
|
||||||
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
|
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
|
||||||
log, cb) => {
|
log, cb) => {
|
||||||
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
|
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
|
||||||
= params;
|
= params;
|
||||||
const blockId = azureMpuUtils.getBlockId(uploadId, partNumber, 0);
|
const blockId = azureMpuUtils.getBlockId(uploadId, partNumber, 0);
|
||||||
|
@ -107,31 +107,31 @@ log, cb) => {
|
||||||
request.pipe(passThrough);
|
request.pipe(passThrough);
|
||||||
return errorWrapperFn('uploadPart', 'createBlockFromStream',
|
return errorWrapperFn('uploadPart', 'createBlockFromStream',
|
||||||
[blockId, bucketName, objectKey, passThrough, size, options,
|
[blockId, bucketName, objectKey, passThrough, size, options,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('Error from Azure data backend uploadPart',
|
log.error('Error from Azure data backend uploadPart',
|
||||||
{ error: err.message, dataStoreName });
|
{ error: err.message, dataStoreName });
|
||||||
if (err.code === 'ContainerNotFound') {
|
if (err.code === 'ContainerNotFound') {
|
||||||
return cb(errors.NoSuchBucket);
|
return cb(errors.NoSuchBucket);
|
||||||
|
}
|
||||||
|
if (err.code === 'InvalidMd5') {
|
||||||
|
return cb(errors.InvalidDigest);
|
||||||
|
}
|
||||||
|
if (err.code === 'Md5Mismatch') {
|
||||||
|
return cb(errors.BadDigest);
|
||||||
|
}
|
||||||
|
return cb(errors.InternalError.customizeDescription(
|
||||||
|
`Error returned from Azure: ${err.message}`),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
if (err.code === 'InvalidMd5') {
|
const md5 = result.headers['content-md5'] || '';
|
||||||
return cb(errors.InvalidDigest);
|
const eTag = objectUtils.getHexMD5(md5);
|
||||||
}
|
return cb(null, eTag, size);
|
||||||
if (err.code === 'Md5Mismatch') {
|
}], log, cb);
|
||||||
return cb(errors.BadDigest);
|
|
||||||
}
|
|
||||||
return cb(errors.InternalError.customizeDescription(
|
|
||||||
`Error returned from Azure: ${err.message}`)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const md5 = result.headers['content-md5'] || '';
|
|
||||||
const eTag = objectUtils.getHexMD5(md5);
|
|
||||||
return cb(null, eTag, size);
|
|
||||||
}], log, cb);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo,
|
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo,
|
||||||
subPartStream, subPartIndex, resultsCollector, log, cb) => {
|
subPartStream, subPartIndex, resultsCollector, log, cb) => {
|
||||||
const { uploadId, partNumber, bucketName, objectKey } = partParams;
|
const { uploadId, partNumber, bucketName, objectKey } = partParams;
|
||||||
const subPartSize = azureMpuUtils.getSubPartSize(
|
const subPartSize = azureMpuUtils.getSubPartSize(
|
||||||
subPartInfo, subPartIndex);
|
subPartInfo, subPartIndex);
|
||||||
|
@ -140,11 +140,11 @@ subPartStream, subPartIndex, resultsCollector, log, cb) => {
|
||||||
resultsCollector.pushOp();
|
resultsCollector.pushOp();
|
||||||
errorWrapperFn('uploadPart', 'createBlockFromStream',
|
errorWrapperFn('uploadPart', 'createBlockFromStream',
|
||||||
[subPartId, bucketName, objectKey, subPartStream, subPartSize,
|
[subPartId, bucketName, objectKey, subPartStream, subPartSize,
|
||||||
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
|
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
|
||||||
};
|
};
|
||||||
|
|
||||||
azureMpuUtils.putSubParts = (errorWrapperFn, request, params,
|
azureMpuUtils.putSubParts = (errorWrapperFn, request, params,
|
||||||
dataStoreName, log, cb) => {
|
dataStoreName, log, cb) => {
|
||||||
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
|
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
|
||||||
const resultsCollector = new ResultsCollector();
|
const resultsCollector = new ResultsCollector();
|
||||||
const hashedStream = new MD5Sum();
|
const hashedStream = new MD5Sum();
|
||||||
|
|
|
@ -31,9 +31,9 @@ convertMethods.listMultipartUploads = xmlParams => {
|
||||||
const l = xmlParams.list;
|
const l = xmlParams.list;
|
||||||
|
|
||||||
xml.push('<?xml version="1.0" encoding="UTF-8"?>',
|
xml.push('<?xml version="1.0" encoding="UTF-8"?>',
|
||||||
'<ListMultipartUploadsResult ' +
|
'<ListMultipartUploadsResult ' +
|
||||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
|
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
|
||||||
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`
|
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`,
|
||||||
);
|
);
|
||||||
|
|
||||||
// For certain XML elements, if it is `undefined`, AWS returns either an
|
// For certain XML elements, if it is `undefined`, AWS returns either an
|
||||||
|
@ -58,7 +58,7 @@ convertMethods.listMultipartUploads = xmlParams => {
|
||||||
});
|
});
|
||||||
|
|
||||||
xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`,
|
xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`,
|
||||||
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`
|
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`,
|
||||||
);
|
);
|
||||||
|
|
||||||
l.Uploads.forEach(upload => {
|
l.Uploads.forEach(upload => {
|
||||||
|
@ -69,29 +69,29 @@ convertMethods.listMultipartUploads = xmlParams => {
|
||||||
}
|
}
|
||||||
|
|
||||||
xml.push('<Upload>',
|
xml.push('<Upload>',
|
||||||
`<Key>${escapeForXml(key)}</Key>`,
|
`<Key>${escapeForXml(key)}</Key>`,
|
||||||
`<UploadId>${escapeForXml(val.UploadId)}</UploadId>`,
|
`<UploadId>${escapeForXml(val.UploadId)}</UploadId>`,
|
||||||
'<Initiator>',
|
'<Initiator>',
|
||||||
`<ID>${escapeForXml(val.Initiator.ID)}</ID>`,
|
`<ID>${escapeForXml(val.Initiator.ID)}</ID>`,
|
||||||
`<DisplayName>${escapeForXml(val.Initiator.DisplayName)}` +
|
`<DisplayName>${escapeForXml(val.Initiator.DisplayName)}` +
|
||||||
'</DisplayName>',
|
'</DisplayName>',
|
||||||
'</Initiator>',
|
'</Initiator>',
|
||||||
'<Owner>',
|
'<Owner>',
|
||||||
`<ID>${escapeForXml(val.Owner.ID)}</ID>`,
|
`<ID>${escapeForXml(val.Owner.ID)}</ID>`,
|
||||||
`<DisplayName>${escapeForXml(val.Owner.DisplayName)}` +
|
`<DisplayName>${escapeForXml(val.Owner.DisplayName)}` +
|
||||||
'</DisplayName>',
|
'</DisplayName>',
|
||||||
'</Owner>',
|
'</Owner>',
|
||||||
`<StorageClass>${escapeForXml(val.StorageClass)}` +
|
`<StorageClass>${escapeForXml(val.StorageClass)}` +
|
||||||
'</StorageClass>',
|
'</StorageClass>',
|
||||||
`<Initiated>${escapeForXml(val.Initiated)}</Initiated>`,
|
`<Initiated>${escapeForXml(val.Initiated)}</Initiated>`,
|
||||||
'</Upload>'
|
'</Upload>',
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
l.CommonPrefixes.forEach(prefix => {
|
l.CommonPrefixes.forEach(prefix => {
|
||||||
xml.push('<CommonPrefixes>',
|
xml.push('<CommonPrefixes>',
|
||||||
`<Prefix>${escapeForXml(prefix)}</Prefix>`,
|
`<Prefix>${escapeForXml(prefix)}</Prefix>`,
|
||||||
'</CommonPrefixes>'
|
'</CommonPrefixes>',
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,6 @@ const Readable = require('stream').Readable;
|
||||||
* This class is used to produce zeros filled buffers for a reader consumption
|
* This class is used to produce zeros filled buffers for a reader consumption
|
||||||
*/
|
*/
|
||||||
class NullStream extends Readable {
|
class NullStream extends Readable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a new zeros filled buffers producer that will
|
* Construct a new zeros filled buffers producer that will
|
||||||
* produce as much bytes as specified by the range parameter, or the size
|
* produce as much bytes as specified by the range parameter, or the size
|
||||||
|
@ -32,8 +31,8 @@ class NullStream extends Readable {
|
||||||
_read(size) {
|
_read(size) {
|
||||||
const toRead = Math.min(size, this.bytesToRead);
|
const toRead = Math.min(size, this.bytesToRead);
|
||||||
const buffer = toRead > 0
|
const buffer = toRead > 0
|
||||||
? Buffer.alloc(toRead, 0)
|
? Buffer.alloc(toRead, 0)
|
||||||
: null;
|
: null;
|
||||||
this.bytesToRead -= toRead;
|
this.bytesToRead -= toRead;
|
||||||
this.push(buffer);
|
this.push(buffer);
|
||||||
}
|
}
|
||||||
|
|
|
@ -110,7 +110,7 @@ function generateMpuPartStorageInfo(filteredPartList) {
|
||||||
* and extraPartLocations
|
* and extraPartLocations
|
||||||
*/
|
*/
|
||||||
function validateAndFilterMpuParts(storedParts, jsonList, mpuOverviewKey,
|
function validateAndFilterMpuParts(storedParts, jsonList, mpuOverviewKey,
|
||||||
splitter, log) {
|
splitter, log) {
|
||||||
let storedPartsCopy = [];
|
let storedPartsCopy = [];
|
||||||
const filteredPartsObj = {};
|
const filteredPartsObj = {};
|
||||||
filteredPartsObj.partList = [];
|
filteredPartsObj.partList = [];
|
||||||
|
|
|
@ -4,11 +4,11 @@ const errors = require('../errors');
|
||||||
const escapeForXml = require('./escapeForXml');
|
const escapeForXml = require('./escapeForXml');
|
||||||
|
|
||||||
const errorInvalidArgument = errors.InvalidArgument
|
const errorInvalidArgument = errors.InvalidArgument
|
||||||
.customizeDescription('The header \'x-amz-tagging\' shall be ' +
|
.customizeDescription('The header \'x-amz-tagging\' shall be ' +
|
||||||
'encoded as UTF-8 then URLEncoded URL query parameters without ' +
|
'encoded as UTF-8 then URLEncoded URL query parameters without ' +
|
||||||
'tag name duplicates.');
|
'tag name duplicates.');
|
||||||
const errorBadRequestLimit50 = errors.BadRequest
|
const errorBadRequestLimit50 = errors.BadRequest
|
||||||
.customizeDescription('Object tags cannot be greater than 50');
|
.customizeDescription('Object tags cannot be greater than 50');
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Format of xml request:
|
Format of xml request:
|
||||||
|
@ -38,7 +38,7 @@ const _validator = {
|
||||||
result.Tagging.TagSet &&
|
result.Tagging.TagSet &&
|
||||||
result.Tagging.TagSet.length === 1 &&
|
result.Tagging.TagSet.length === 1 &&
|
||||||
(
|
(
|
||||||
result.Tagging.TagSet[0] === '' ||
|
result.Tagging.TagSet[0] === '' ||
|
||||||
result.Tagging.TagSet[0] &&
|
result.Tagging.TagSet[0] &&
|
||||||
Object.keys(result.Tagging.TagSet[0]).length === 1 &&
|
Object.keys(result.Tagging.TagSet[0]).length === 1 &&
|
||||||
result.Tagging.TagSet[0].Tag &&
|
result.Tagging.TagSet[0].Tag &&
|
||||||
|
@ -155,7 +155,7 @@ function parseTagXml(xml, log, cb) {
|
||||||
function convertToXml(objectTags) {
|
function convertToXml(objectTags) {
|
||||||
const xml = [];
|
const xml = [];
|
||||||
xml.push('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>',
|
xml.push('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>',
|
||||||
'<Tagging> <TagSet>');
|
'<Tagging> <TagSet>');
|
||||||
if (objectTags && Object.keys(objectTags).length > 0) {
|
if (objectTags && Object.keys(objectTags).length > 0) {
|
||||||
Object.keys(objectTags).forEach(key => {
|
Object.keys(objectTags).forEach(key => {
|
||||||
xml.push(`<Tag><Key>${escapeForXml(key)}</Key>` +
|
xml.push(`<Tag><Key>${escapeForXml(key)}</Key>` +
|
||||||
|
|
|
@ -43,7 +43,7 @@ function checkBucketAndKey(bucketName, objectKey, method, reqQuery,
|
||||||
log.debug('empty bucket name', { method: 'routes' });
|
log.debug('empty bucket name', { method: 'routes' });
|
||||||
return (method !== 'OPTIONS') ?
|
return (method !== 'OPTIONS') ?
|
||||||
errors.MethodNotAllowed : errors.AccessForbidden
|
errors.MethodNotAllowed : errors.AccessForbidden
|
||||||
.customizeDescription('CORSResponse: Bucket not found');
|
.customizeDescription('CORSResponse: Bucket not found');
|
||||||
}
|
}
|
||||||
if (bucketName !== undefined && routesUtils.isValidBucketName(bucketName,
|
if (bucketName !== undefined && routesUtils.isValidBucketName(bucketName,
|
||||||
blacklistedPrefixes.bucket) === false) {
|
blacklistedPrefixes.bucket) === false) {
|
||||||
|
@ -92,7 +92,7 @@ function checkTypes(req, res, params, logger, s3config) {
|
||||||
'bad routes param: internalHandlers must be an object');
|
'bad routes param: internalHandlers must be an object');
|
||||||
if (params.statsClient) {
|
if (params.statsClient) {
|
||||||
assert.strictEqual(typeof params.statsClient, 'object',
|
assert.strictEqual(typeof params.statsClient, 'object',
|
||||||
'bad routes param: statsClient must be an object');
|
'bad routes param: statsClient must be an object');
|
||||||
}
|
}
|
||||||
assert(Array.isArray(params.allEndpoints),
|
assert(Array.isArray(params.allEndpoints),
|
||||||
'bad routes param: allEndpoints must be an array');
|
'bad routes param: allEndpoints must be an array');
|
||||||
|
@ -100,13 +100,13 @@ function checkTypes(req, res, params, logger, s3config) {
|
||||||
'bad routes param: allEndpoints must have at least one endpoint');
|
'bad routes param: allEndpoints must have at least one endpoint');
|
||||||
params.allEndpoints.forEach(endpoint => {
|
params.allEndpoints.forEach(endpoint => {
|
||||||
assert.strictEqual(typeof endpoint, 'string',
|
assert.strictEqual(typeof endpoint, 'string',
|
||||||
'bad routes param: each item in allEndpoints must be a string');
|
'bad routes param: each item in allEndpoints must be a string');
|
||||||
});
|
});
|
||||||
assert(Array.isArray(params.websiteEndpoints),
|
assert(Array.isArray(params.websiteEndpoints),
|
||||||
'bad routes param: allEndpoints must be an array');
|
'bad routes param: allEndpoints must be an array');
|
||||||
params.websiteEndpoints.forEach(endpoint => {
|
params.websiteEndpoints.forEach(endpoint => {
|
||||||
assert.strictEqual(typeof endpoint, 'string',
|
assert.strictEqual(typeof endpoint, 'string',
|
||||||
'bad routes param: each item in websiteEndpoints must be a string');
|
'bad routes param: each item in websiteEndpoints must be a string');
|
||||||
});
|
});
|
||||||
assert.strictEqual(typeof params.blacklistedPrefixes, 'object',
|
assert.strictEqual(typeof params.blacklistedPrefixes, 'object',
|
||||||
'bad routes param: blacklistedPrefixes must be an object');
|
'bad routes param: blacklistedPrefixes must be an object');
|
||||||
|
@ -114,13 +114,13 @@ function checkTypes(req, res, params, logger, s3config) {
|
||||||
'bad routes param: blacklistedPrefixes.bucket must be an array');
|
'bad routes param: blacklistedPrefixes.bucket must be an array');
|
||||||
params.blacklistedPrefixes.bucket.forEach(pre => {
|
params.blacklistedPrefixes.bucket.forEach(pre => {
|
||||||
assert.strictEqual(typeof pre, 'string',
|
assert.strictEqual(typeof pre, 'string',
|
||||||
'bad routes param: each blacklisted bucket prefix must be a string');
|
'bad routes param: each blacklisted bucket prefix must be a string');
|
||||||
});
|
});
|
||||||
assert(Array.isArray(params.blacklistedPrefixes.object),
|
assert(Array.isArray(params.blacklistedPrefixes.object),
|
||||||
'bad routes param: blacklistedPrefixes.object must be an array');
|
'bad routes param: blacklistedPrefixes.object must be an array');
|
||||||
params.blacklistedPrefixes.object.forEach(pre => {
|
params.blacklistedPrefixes.object.forEach(pre => {
|
||||||
assert.strictEqual(typeof pre, 'string',
|
assert.strictEqual(typeof pre, 'string',
|
||||||
'bad routes param: each blacklisted object prefix must be a string');
|
'bad routes param: each blacklisted object prefix must be a string');
|
||||||
});
|
});
|
||||||
assert.strictEqual(typeof params.dataRetrievalParams, 'object',
|
assert.strictEqual(typeof params.dataRetrievalParams, 'object',
|
||||||
'bad routes param: dataRetrievalParams must be a defined object');
|
'bad routes param: dataRetrievalParams must be a defined object');
|
||||||
|
@ -179,8 +179,8 @@ function routes(req, res, params, logger, s3config) {
|
||||||
reqUids = undefined;
|
reqUids = undefined;
|
||||||
}
|
}
|
||||||
const log = (reqUids !== undefined ?
|
const log = (reqUids !== undefined ?
|
||||||
logger.newRequestLoggerFromSerializedUids(reqUids) :
|
logger.newRequestLoggerFromSerializedUids(reqUids) :
|
||||||
logger.newRequestLogger());
|
logger.newRequestLogger());
|
||||||
|
|
||||||
if (!req.url.startsWith('/_/healthcheck') &&
|
if (!req.url.startsWith('/_/healthcheck') &&
|
||||||
!req.url.startsWith('/_/report')) {
|
!req.url.startsWith('/_/report')) {
|
||||||
|
@ -216,7 +216,7 @@ function routes(req, res, params, logger, s3config) {
|
||||||
return routesUtils.responseXMLBody(
|
return routesUtils.responseXMLBody(
|
||||||
errors.InvalidURI.customizeDescription('Could not parse the ' +
|
errors.InvalidURI.customizeDescription('Could not parse the ' +
|
||||||
'specified URI. Check your restEndpoints configuration.'),
|
'specified URI. Check your restEndpoints configuration.'),
|
||||||
undefined, res, log);
|
undefined, res, log);
|
||||||
}
|
}
|
||||||
|
|
||||||
log.addDefaultFields({
|
log.addDefaultFields({
|
||||||
|
@ -238,7 +238,7 @@ function routes(req, res, params, logger, s3config) {
|
||||||
|
|
||||||
if (bucketOrKeyError) {
|
if (bucketOrKeyError) {
|
||||||
log.trace('error with bucket or key value',
|
log.trace('error with bucket or key value',
|
||||||
{ error: bucketOrKeyError });
|
{ error: bucketOrKeyError });
|
||||||
return routesUtils.responseXMLBody(bucketOrKeyError, null, res, log);
|
return routesUtils.responseXMLBody(bucketOrKeyError, null, res, log);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ function routeDELETE(request, response, api, log, statsClient) {
|
||||||
if (request.query.uploadId) {
|
if (request.query.uploadId) {
|
||||||
if (request.objectKey === undefined) {
|
if (request.objectKey === undefined) {
|
||||||
return routesUtils.responseNoBody(
|
return routesUtils.responseNoBody(
|
||||||
errors.InvalidRequest.customizeDescription('A key must be ' +
|
errors.InvalidRequest.customizeDescription('A key must be ' +
|
||||||
'specified'), null, response, 200, log);
|
'specified'), null, response, 200, log);
|
||||||
}
|
}
|
||||||
api.callApiMethod('multipartDelete', request, response, log,
|
api.callApiMethod('multipartDelete', request, response, log,
|
||||||
|
@ -19,77 +19,77 @@ function routeDELETE(request, response, api, log, statsClient) {
|
||||||
} else if (request.objectKey === undefined) {
|
} else if (request.objectKey === undefined) {
|
||||||
if (request.query.website !== undefined) {
|
if (request.query.website !== undefined) {
|
||||||
return api.callApiMethod('bucketDeleteWebsite', request,
|
return api.callApiMethod('bucketDeleteWebsite', request,
|
||||||
response, log, (err, corsHeaders) => {
|
response, log, (err, corsHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
response, 204, log);
|
response, 204, log);
|
||||||
});
|
});
|
||||||
} else if (request.query.cors !== undefined) {
|
} else if (request.query.cors !== undefined) {
|
||||||
return api.callApiMethod('bucketDeleteCors', request, response,
|
return api.callApiMethod('bucketDeleteCors', request, response,
|
||||||
log, (err, corsHeaders) => {
|
log, (err, corsHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
response, 204, log);
|
response, 204, log);
|
||||||
});
|
});
|
||||||
} else if (request.query.replication !== undefined) {
|
} else if (request.query.replication !== undefined) {
|
||||||
return api.callApiMethod('bucketDeleteReplication', request,
|
return api.callApiMethod('bucketDeleteReplication', request,
|
||||||
response, log, (err, corsHeaders) => {
|
response, log, (err, corsHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
response, 204, log);
|
response, 204, log);
|
||||||
});
|
});
|
||||||
} else if (request.query.lifecycle !== undefined) {
|
} else if (request.query.lifecycle !== undefined) {
|
||||||
return api.callApiMethod('bucketDeleteLifecycle', request,
|
return api.callApiMethod('bucketDeleteLifecycle', request,
|
||||||
response, log, (err, corsHeaders) => {
|
response, log, (err, corsHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
response, 204, log);
|
response, 204, log);
|
||||||
});
|
});
|
||||||
} else if (request.query.policy !== undefined) {
|
} else if (request.query.policy !== undefined) {
|
||||||
return api.callApiMethod('bucketDeletePolicy', request,
|
return api.callApiMethod('bucketDeletePolicy', request,
|
||||||
response, log, (err, corsHeaders) => {
|
response, log, (err, corsHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
response, 204, log);
|
response, 204, log);
|
||||||
});
|
});
|
||||||
} else if (request.query.encryption !== undefined) {
|
} else if (request.query.encryption !== undefined) {
|
||||||
return api.callApiMethod('bucketDeleteEncryption', request,
|
return api.callApiMethod('bucketDeleteEncryption', request,
|
||||||
response, log, (err, corsHeaders) => {
|
response, log, (err, corsHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
response, 204, log);
|
response, 204, log);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
api.callApiMethod('bucketDelete', request, response, log,
|
api.callApiMethod('bucketDelete', request, response, log,
|
||||||
(err, corsHeaders) => {
|
(err, corsHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseNoBody(err, corsHeaders, response,
|
return routesUtils.responseNoBody(err, corsHeaders, response,
|
||||||
204, log);
|
204, log);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
if (request.query.tagging !== undefined) {
|
if (request.query.tagging !== undefined) {
|
||||||
return api.callApiMethod('objectDeleteTagging', request,
|
return api.callApiMethod('objectDeleteTagging', request,
|
||||||
response, log, (err, resHeaders) => {
|
response, log, (err, resHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseNoBody(err, resHeaders,
|
return routesUtils.responseNoBody(err, resHeaders,
|
||||||
response, 204, log);
|
response, 204, log);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
api.callApiMethod('objectDelete', request, response, log,
|
api.callApiMethod('objectDelete', request, response, log,
|
||||||
(err, corsHeaders) => {
|
(err, corsHeaders) => {
|
||||||
/*
|
/*
|
||||||
* Since AWS expects a 204 regardless of the existence of
|
* Since AWS expects a 204 regardless of the existence of
|
||||||
the object, the errors NoSuchKey and NoSuchVersion should not
|
the object, the errors NoSuchKey and NoSuchVersion should not
|
||||||
* be sent back as a response.
|
* be sent back as a response.
|
||||||
*/
|
*/
|
||||||
if (err && !err.NoSuchKey && !err.NoSuchVersion) {
|
if (err && !err.NoSuchKey && !err.NoSuchVersion) {
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
response, null, log);
|
response, null, log);
|
||||||
}
|
}
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseNoBody(null, corsHeaders, response,
|
return routesUtils.responseNoBody(null, corsHeaders, response,
|
||||||
204, log);
|
204, log);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@ const errors = require('../../errors');
|
||||||
const routesUtils = require('../routesUtils');
|
const routesUtils = require('../routesUtils');
|
||||||
|
|
||||||
function routerGET(request, response, api, log, statsClient,
|
function routerGET(request, response, api, log, statsClient,
|
||||||
dataRetrievalParams) {
|
dataRetrievalParams) {
|
||||||
log.debug('routing request', { method: 'routerGET' });
|
log.debug('routing request', { method: 'routerGET' });
|
||||||
if (request.bucketName === undefined && request.objectKey !== undefined) {
|
if (request.bucketName === undefined && request.objectKey !== undefined) {
|
||||||
routesUtils.responseXMLBody(errors.NoSuchBucket, null, response, log);
|
routesUtils.responseXMLBody(errors.NoSuchBucket, null, response, log);
|
||||||
|
@ -17,18 +17,18 @@ dataRetrievalParams) {
|
||||||
// GET bucket ACL
|
// GET bucket ACL
|
||||||
if (request.query.acl !== undefined) {
|
if (request.query.acl !== undefined) {
|
||||||
api.callApiMethod('bucketGetACL', request, response, log,
|
api.callApiMethod('bucketGetACL', request, response, log,
|
||||||
(err, xml, corsHeaders) => {
|
(err, xml, corsHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseXMLBody(err, xml, response, log,
|
return routesUtils.responseXMLBody(err, xml, response, log,
|
||||||
corsHeaders);
|
corsHeaders);
|
||||||
});
|
});
|
||||||
} else if (request.query.replication !== undefined) {
|
} else if (request.query.replication !== undefined) {
|
||||||
api.callApiMethod('bucketGetReplication', request, response, log,
|
api.callApiMethod('bucketGetReplication', request, response, log,
|
||||||
(err, xml, corsHeaders) => {
|
(err, xml, corsHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseXMLBody(err, xml, response, log,
|
return routesUtils.responseXMLBody(err, xml, response, log,
|
||||||
corsHeaders);
|
corsHeaders);
|
||||||
});
|
});
|
||||||
} else if (request.query.cors !== undefined) {
|
} else if (request.query.cors !== undefined) {
|
||||||
api.callApiMethod('bucketGetCors', request, response, log,
|
api.callApiMethod('bucketGetCors', request, response, log,
|
||||||
(err, xml, corsHeaders) => {
|
(err, xml, corsHeaders) => {
|
||||||
|
@ -70,7 +70,7 @@ dataRetrievalParams) {
|
||||||
(err, xml, corsHeaders) => {
|
(err, xml, corsHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseXMLBody(err, xml, response, log,
|
return routesUtils.responseXMLBody(err, xml, response, log,
|
||||||
corsHeaders);
|
corsHeaders);
|
||||||
});
|
});
|
||||||
} else if (request.query.policy !== undefined) {
|
} else if (request.query.policy !== undefined) {
|
||||||
api.callApiMethod('bucketGetPolicy', request, response, log,
|
api.callApiMethod('bucketGetPolicy', request, response, log,
|
||||||
|
@ -95,11 +95,11 @@ dataRetrievalParams) {
|
||||||
});
|
});
|
||||||
} else if (request.query.encryption !== undefined) {
|
} else if (request.query.encryption !== undefined) {
|
||||||
api.callApiMethod('bucketGetEncryption', request, response, log,
|
api.callApiMethod('bucketGetEncryption', request, response, log,
|
||||||
(err, xml, corsHeaders) => {
|
(err, xml, corsHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseXMLBody(err, xml, response,
|
return routesUtils.responseXMLBody(err, xml, response,
|
||||||
log, corsHeaders);
|
log, corsHeaders);
|
||||||
});
|
});
|
||||||
} else if (request.query.search !== undefined) {
|
} else if (request.query.search !== undefined) {
|
||||||
api.callApiMethod('metadataSearch', request, response, log,
|
api.callApiMethod('metadataSearch', request, response, log,
|
||||||
(err, xml, corsHeaders) => {
|
(err, xml, corsHeaders) => {
|
||||||
|
|
|
@ -21,11 +21,11 @@ function routeOPTIONS(request, response, api, log, statsClient) {
|
||||||
}
|
}
|
||||||
|
|
||||||
return api.callApiMethod('corsPreflight', request, response, log,
|
return api.callApiMethod('corsPreflight', request, response, log,
|
||||||
(err, resHeaders) => {
|
(err, resHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseNoBody(err, resHeaders, response, 200,
|
return routesUtils.responseNoBody(err, resHeaders, response, 200,
|
||||||
log);
|
log);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = routeOPTIONS;
|
module.exports = routeOPTIONS;
|
||||||
|
|
|
@ -27,28 +27,28 @@ function routePOST(request, response, api, log) {
|
||||||
if (request.query.uploads !== undefined) {
|
if (request.query.uploads !== undefined) {
|
||||||
return api.callApiMethod('initiateMultipartUpload', request,
|
return api.callApiMethod('initiateMultipartUpload', request,
|
||||||
response, log, (err, result, corsHeaders) =>
|
response, log, (err, result, corsHeaders) =>
|
||||||
routesUtils.responseXMLBody(err, result, response, log,
|
routesUtils.responseXMLBody(err, result, response, log,
|
||||||
corsHeaders));
|
corsHeaders));
|
||||||
}
|
}
|
||||||
|
|
||||||
// POST complete multipart upload
|
// POST complete multipart upload
|
||||||
if (request.query.uploadId !== undefined) {
|
if (request.query.uploadId !== undefined) {
|
||||||
return api.callApiMethod('completeMultipartUpload', request,
|
return api.callApiMethod('completeMultipartUpload', request,
|
||||||
response, log, (err, result, resHeaders) =>
|
response, log, (err, result, resHeaders) =>
|
||||||
routesUtils.responseXMLBody(err, result, response, log,
|
routesUtils.responseXMLBody(err, result, response, log,
|
||||||
resHeaders));
|
resHeaders));
|
||||||
}
|
}
|
||||||
|
|
||||||
// POST multiObjectDelete
|
// POST multiObjectDelete
|
||||||
if (request.query.delete !== undefined) {
|
if (request.query.delete !== undefined) {
|
||||||
return api.callApiMethod('multiObjectDelete', request, response,
|
return api.callApiMethod('multiObjectDelete', request, response,
|
||||||
log, (err, xml, corsHeaders) =>
|
log, (err, xml, corsHeaders) =>
|
||||||
routesUtils.responseXMLBody(err, xml, response, log,
|
routesUtils.responseXMLBody(err, xml, response, log,
|
||||||
corsHeaders));
|
corsHeaders));
|
||||||
}
|
}
|
||||||
|
|
||||||
return routesUtils.responseNoBody(errors.NotImplemented, null, response,
|
return routesUtils.responseNoBody(errors.NotImplemented, null, response,
|
||||||
200, log);
|
200, log);
|
||||||
}
|
}
|
||||||
/* eslint-enable no-param-reassign */
|
/* eslint-enable no-param-reassign */
|
||||||
module.exports = routePOST;
|
module.exports = routePOST;
|
||||||
|
|
|
@ -14,16 +14,16 @@ function routePUT(request, response, api, log, statsClient) {
|
||||||
|| contentLength < 0)) || contentLength === '') {
|
|| contentLength < 0)) || contentLength === '') {
|
||||||
log.debug('invalid content-length header');
|
log.debug('invalid content-length header');
|
||||||
return routesUtils.responseNoBody(
|
return routesUtils.responseNoBody(
|
||||||
errors.BadRequest, null, response, null, log);
|
errors.BadRequest, null, response, null, log);
|
||||||
}
|
}
|
||||||
// PUT bucket ACL
|
// PUT bucket ACL
|
||||||
if (request.query.acl !== undefined) {
|
if (request.query.acl !== undefined) {
|
||||||
api.callApiMethod('bucketPutACL', request, response, log,
|
api.callApiMethod('bucketPutACL', request, response, log,
|
||||||
(err, corsHeaders) => {
|
(err, corsHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
response, 200, log);
|
response, 200, log);
|
||||||
});
|
});
|
||||||
} else if (request.query.versioning !== undefined) {
|
} else if (request.query.versioning !== undefined) {
|
||||||
api.callApiMethod('bucketPutVersioning', request, response, log,
|
api.callApiMethod('bucketPutVersioning', request, response, log,
|
||||||
(err, corsHeaders) => {
|
(err, corsHeaders) => {
|
||||||
|
@ -82,11 +82,11 @@ function routePUT(request, response, api, log, statsClient) {
|
||||||
});
|
});
|
||||||
} else if (request.query.encryption !== undefined) {
|
} else if (request.query.encryption !== undefined) {
|
||||||
api.callApiMethod('bucketPutEncryption', request, response, log,
|
api.callApiMethod('bucketPutEncryption', request, response, log,
|
||||||
(err, corsHeaders) => {
|
(err, corsHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
response, 200, log);
|
response, 200, log);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
// PUT bucket
|
// PUT bucket
|
||||||
return api.callApiMethod('bucketPut', request, response, log,
|
return api.callApiMethod('bucketPut', request, response, log,
|
||||||
|
@ -110,7 +110,7 @@ function routePUT(request, response, api, log, statsClient) {
|
||||||
method: 'routePUT',
|
method: 'routePUT',
|
||||||
});
|
});
|
||||||
return routesUtils
|
return routesUtils
|
||||||
.responseNoBody(errors.InvalidDigest, null, response, 200, log);
|
.responseNoBody(errors.InvalidDigest, null, response, 200, log);
|
||||||
}
|
}
|
||||||
if (request.headers['content-md5']) {
|
if (request.headers['content-md5']) {
|
||||||
request.contentMD5 = request.headers['content-md5'];
|
request.contentMD5 = request.headers['content-md5'];
|
||||||
|
@ -126,17 +126,17 @@ function routePUT(request, response, api, log, statsClient) {
|
||||||
});
|
});
|
||||||
return routesUtils
|
return routesUtils
|
||||||
.responseNoBody(errors.InvalidDigest, null, response, 200,
|
.responseNoBody(errors.InvalidDigest, null, response, 200,
|
||||||
log);
|
log);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (request.query.partNumber) {
|
if (request.query.partNumber) {
|
||||||
if (request.headers['x-amz-copy-source']) {
|
if (request.headers['x-amz-copy-source']) {
|
||||||
api.callApiMethod('objectPutCopyPart', request, response, log,
|
api.callApiMethod('objectPutCopyPart', request, response, log,
|
||||||
(err, xml, additionalHeaders) => {
|
(err, xml, additionalHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseXMLBody(err, xml, response, log,
|
return routesUtils.responseXMLBody(err, xml, response, log,
|
||||||
additionalHeaders);
|
additionalHeaders);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
api.callApiMethod('objectPutPart', request, response, log,
|
api.callApiMethod('objectPutPart', request, response, log,
|
||||||
(err, calculatedHash, corsHeaders) => {
|
(err, calculatedHash, corsHeaders) => {
|
||||||
|
@ -202,11 +202,11 @@ function routePUT(request, response, api, log, statsClient) {
|
||||||
contentLength: request.parsedContentLength,
|
contentLength: request.parsedContentLength,
|
||||||
});
|
});
|
||||||
api.callApiMethod('objectPut', request, response, log,
|
api.callApiMethod('objectPut', request, response, log,
|
||||||
(err, resHeaders) => {
|
(err, resHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseNoBody(err, resHeaders,
|
return routesUtils.responseNoBody(err, resHeaders,
|
||||||
response, 200, log);
|
response, 200, log);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
|
|
|
@ -14,7 +14,7 @@ function routerWebsite(request, response, api, log, statsClient,
|
||||||
if (request.method === 'GET') {
|
if (request.method === 'GET') {
|
||||||
return api.callApiMethod('websiteGet', request, response, log,
|
return api.callApiMethod('websiteGet', request, response, log,
|
||||||
(err, userErrorPageFailure, dataGetInfo, resMetaHeaders,
|
(err, userErrorPageFailure, dataGetInfo, resMetaHeaders,
|
||||||
redirectInfo, key) => {
|
redirectInfo, key) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
// request being redirected
|
// request being redirected
|
||||||
if (redirectInfo) {
|
if (redirectInfo) {
|
||||||
|
@ -43,21 +43,21 @@ function routerWebsite(request, response, api, log, statsClient,
|
||||||
}
|
}
|
||||||
if (request.method === 'HEAD') {
|
if (request.method === 'HEAD') {
|
||||||
return api.callApiMethod('websiteHead', request, response, log,
|
return api.callApiMethod('websiteHead', request, response, log,
|
||||||
(err, resMetaHeaders, redirectInfo, key) => {
|
(err, resMetaHeaders, redirectInfo, key) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
if (redirectInfo) {
|
if (redirectInfo) {
|
||||||
return routesUtils.redirectRequest(redirectInfo,
|
return routesUtils.redirectRequest(redirectInfo,
|
||||||
key, request.connection.encrypted,
|
key, request.connection.encrypted,
|
||||||
response, request.headers.host, resMetaHeaders, log);
|
response, request.headers.host, resMetaHeaders, log);
|
||||||
}
|
}
|
||||||
// could redirect on err so check for redirectInfo first
|
// could redirect on err so check for redirectInfo first
|
||||||
if (err) {
|
if (err) {
|
||||||
return routesUtils.errorHeaderResponse(err, response,
|
return routesUtils.errorHeaderResponse(err, response,
|
||||||
resMetaHeaders, log);
|
resMetaHeaders, log);
|
||||||
}
|
}
|
||||||
return routesUtils.responseContentHeaders(err, {}, resMetaHeaders,
|
return routesUtils.responseContentHeaders(err, {}, resMetaHeaders,
|
||||||
response, log);
|
response, log);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ function setCommonResponseHeaders(headers, response, log) {
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
log.debug('header can not be added ' +
|
log.debug('header can not be added ' +
|
||||||
'to the response', { header: headers[key],
|
'to the response', { header: headers[key],
|
||||||
error: e.stack, method: 'setCommonResponseHeaders' });
|
error: e.stack, method: 'setCommonResponseHeaders' });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -71,7 +71,7 @@ const XMLResponseBackend = {
|
||||||
* @return {object} response - response object with additional headers
|
* @return {object} response - response object with additional headers
|
||||||
*/
|
*/
|
||||||
okResponse: function okXMLResponse(xml, response, log,
|
okResponse: function okXMLResponse(xml, response, log,
|
||||||
additionalHeaders) {
|
additionalHeaders) {
|
||||||
const bytesSent = Buffer.byteLength(xml);
|
const bytesSent = Buffer.byteLength(xml);
|
||||||
log.trace('sending success xml response');
|
log.trace('sending success xml response');
|
||||||
log.addDefaultFields({
|
log.addDefaultFields({
|
||||||
|
@ -118,7 +118,7 @@ const XMLResponseBackend = {
|
||||||
`<Message>${errCode.description}</Message>`,
|
`<Message>${errCode.description}</Message>`,
|
||||||
'<Resource></Resource>',
|
'<Resource></Resource>',
|
||||||
`<RequestId>${log.getSerializedUids()}</RequestId>`,
|
`<RequestId>${log.getSerializedUids()}</RequestId>`,
|
||||||
'</Error>'
|
'</Error>',
|
||||||
);
|
);
|
||||||
const xmlStr = xml.join('');
|
const xmlStr = xml.join('');
|
||||||
const bytesSent = Buffer.byteLength(xmlStr);
|
const bytesSent = Buffer.byteLength(xmlStr);
|
||||||
|
@ -148,7 +148,7 @@ const JSONResponseBackend = {
|
||||||
* @return {object} response - response object with additional headers
|
* @return {object} response - response object with additional headers
|
||||||
*/
|
*/
|
||||||
okResponse: function okJSONResponse(json, response, log,
|
okResponse: function okJSONResponse(json, response, log,
|
||||||
additionalHeaders) {
|
additionalHeaders) {
|
||||||
const bytesSent = Buffer.byteLength(json);
|
const bytesSent = Buffer.byteLength(json);
|
||||||
log.trace('sending success json response');
|
log.trace('sending success json response');
|
||||||
log.addDefaultFields({
|
log.addDefaultFields({
|
||||||
|
@ -166,7 +166,7 @@ const JSONResponseBackend = {
|
||||||
},
|
},
|
||||||
|
|
||||||
errorResponse: function errorJSONResponse(errCode, response, log,
|
errorResponse: function errorJSONResponse(errCode, response, log,
|
||||||
corsHeaders) {
|
corsHeaders) {
|
||||||
log.trace('sending error json response', { errCode });
|
log.trace('sending error json response', { errCode });
|
||||||
/*
|
/*
|
||||||
{
|
{
|
||||||
|
@ -368,27 +368,27 @@ function retrieveData(locations, retrieveDataParams, response, log) {
|
||||||
currentStream = readable;
|
currentStream = readable;
|
||||||
return readable.pipe(response, { end: false });
|
return readable.pipe(response, { end: false });
|
||||||
}), err => {
|
}), err => {
|
||||||
currentStream = null;
|
currentStream = null;
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('abort response due to error', {
|
log.debug('abort response due to error', {
|
||||||
error: err.code, errMsg: err.message });
|
error: err.code, errMsg: err.message });
|
||||||
}
|
|
||||||
// call end for all cases (error/success) per node.js docs
|
|
||||||
// recommendation
|
|
||||||
response.end();
|
|
||||||
}
|
}
|
||||||
|
// call end for all cases (error/success) per node.js docs
|
||||||
|
// recommendation
|
||||||
|
response.end();
|
||||||
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function _responseBody(responseBackend, errCode, payload, response, log,
|
function _responseBody(responseBackend, errCode, payload, response, log,
|
||||||
additionalHeaders) {
|
additionalHeaders) {
|
||||||
if (errCode && !response.headersSent) {
|
if (errCode && !response.headersSent) {
|
||||||
return responseBackend.errorResponse(errCode, response, log,
|
return responseBackend.errorResponse(errCode, response, log,
|
||||||
additionalHeaders);
|
additionalHeaders);
|
||||||
}
|
}
|
||||||
if (!response.headersSent) {
|
if (!response.headersSent) {
|
||||||
return responseBackend.okResponse(payload, response, log,
|
return responseBackend.okResponse(payload, response, log,
|
||||||
additionalHeaders);
|
additionalHeaders);
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
@ -397,8 +397,8 @@ function _computeContentLengthFromLocation(dataLocations) {
|
||||||
return dataLocations.reduce(
|
return dataLocations.reduce(
|
||||||
(sum, location) => (sum !== undefined &&
|
(sum, location) => (sum !== undefined &&
|
||||||
(typeof location.size === 'number' || typeof location.size === 'string') ?
|
(typeof location.size === 'number' || typeof location.size === 'string') ?
|
||||||
sum + Number.parseInt(location.size, 10) :
|
sum + Number.parseInt(location.size, 10) :
|
||||||
undefined), 0);
|
undefined), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
function _contentLengthMatchesLocations(contentLength, dataLocations) {
|
function _contentLengthMatchesLocations(contentLength, dataLocations) {
|
||||||
|
@ -419,7 +419,7 @@ const routesUtils = {
|
||||||
*/
|
*/
|
||||||
responseXMLBody(errCode, xml, response, log, additionalHeaders) {
|
responseXMLBody(errCode, xml, response, log, additionalHeaders) {
|
||||||
return _responseBody(XMLResponseBackend, errCode, xml, response,
|
return _responseBody(XMLResponseBackend, errCode, xml, response,
|
||||||
log, additionalHeaders);
|
log, additionalHeaders);
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -433,7 +433,7 @@ const routesUtils = {
|
||||||
*/
|
*/
|
||||||
responseJSONBody(errCode, json, response, log, additionalHeaders) {
|
responseJSONBody(errCode, json, response, log, additionalHeaders) {
|
||||||
return _responseBody(JSONResponseBackend, errCode, json, response,
|
return _responseBody(JSONResponseBackend, errCode, json, response,
|
||||||
log, additionalHeaders);
|
log, additionalHeaders);
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -448,7 +448,7 @@ const routesUtils = {
|
||||||
responseNoBody(errCode, resHeaders, response, httpCode = 200, log) {
|
responseNoBody(errCode, resHeaders, response, httpCode = 200, log) {
|
||||||
if (errCode && !response.headersSent) {
|
if (errCode && !response.headersSent) {
|
||||||
return XMLResponseBackend.errorResponse(errCode, response, log,
|
return XMLResponseBackend.errorResponse(errCode, response, log,
|
||||||
resHeaders);
|
resHeaders);
|
||||||
}
|
}
|
||||||
if (!response.headersSent) {
|
if (!response.headersSent) {
|
||||||
return okHeaderResponse(resHeaders, response, httpCode, log);
|
return okHeaderResponse(resHeaders, response, httpCode, log);
|
||||||
|
@ -466,10 +466,10 @@ const routesUtils = {
|
||||||
* @return {object} - router's response object
|
* @return {object} - router's response object
|
||||||
*/
|
*/
|
||||||
responseContentHeaders(errCode, overrideParams, resHeaders, response,
|
responseContentHeaders(errCode, overrideParams, resHeaders, response,
|
||||||
log) {
|
log) {
|
||||||
if (errCode && !response.headersSent) {
|
if (errCode && !response.headersSent) {
|
||||||
return XMLResponseBackend.errorResponse(errCode, response, log,
|
return XMLResponseBackend.errorResponse(errCode, response, log,
|
||||||
resHeaders);
|
resHeaders);
|
||||||
}
|
}
|
||||||
if (!response.headersSent) {
|
if (!response.headersSent) {
|
||||||
// Undefined added as an argument since need to send range to
|
// Undefined added as an argument since need to send range to
|
||||||
|
@ -504,7 +504,7 @@ const routesUtils = {
|
||||||
retrieveDataParams, response, range, log) {
|
retrieveDataParams, response, range, log) {
|
||||||
if (errCode && !response.headersSent) {
|
if (errCode && !response.headersSent) {
|
||||||
return XMLResponseBackend.errorResponse(errCode, response, log,
|
return XMLResponseBackend.errorResponse(errCode, response, log,
|
||||||
resHeaders);
|
resHeaders);
|
||||||
}
|
}
|
||||||
if (dataLocations !== null && !response.headersSent) {
|
if (dataLocations !== null && !response.headersSent) {
|
||||||
// sanity check of content length against individual data
|
// sanity check of content length against individual data
|
||||||
|
@ -512,13 +512,13 @@ const routesUtils = {
|
||||||
const contentLength = resHeaders && resHeaders['Content-Length'];
|
const contentLength = resHeaders && resHeaders['Content-Length'];
|
||||||
if (contentLength !== undefined &&
|
if (contentLength !== undefined &&
|
||||||
!_contentLengthMatchesLocations(contentLength,
|
!_contentLengthMatchesLocations(contentLength,
|
||||||
dataLocations)) {
|
dataLocations)) {
|
||||||
log.error('logic error: total length of fetched data ' +
|
log.error('logic error: total length of fetched data ' +
|
||||||
'locations does not match returned content-length',
|
'locations does not match returned content-length',
|
||||||
{ contentLength, dataLocations });
|
{ contentLength, dataLocations });
|
||||||
return XMLResponseBackend.errorResponse(errors.InternalError,
|
return XMLResponseBackend.errorResponse(errors.InternalError,
|
||||||
response, log,
|
response, log,
|
||||||
resHeaders);
|
resHeaders);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!response.headersSent) {
|
if (!response.headersSent) {
|
||||||
|
@ -591,7 +591,7 @@ const routesUtils = {
|
||||||
`<h1>${err.code} ${response.statusMessage}</h1>`,
|
`<h1>${err.code} ${response.statusMessage}</h1>`,
|
||||||
'<ul>',
|
'<ul>',
|
||||||
`<li>Code: ${err.message}</li>`,
|
`<li>Code: ${err.message}</li>`,
|
||||||
`<li>Message: ${err.description}</li>`
|
`<li>Message: ${err.description}</li>`,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!userErrorPageFailure && bucketName) {
|
if (!userErrorPageFailure && bucketName) {
|
||||||
|
@ -601,7 +601,7 @@ const routesUtils = {
|
||||||
`<li>RequestId: ${log.getSerializedUids()}</li>`,
|
`<li>RequestId: ${log.getSerializedUids()}</li>`,
|
||||||
// AWS response contains HostId here.
|
// AWS response contains HostId here.
|
||||||
// TODO: consider adding
|
// TODO: consider adding
|
||||||
'</ul>'
|
'</ul>',
|
||||||
);
|
);
|
||||||
if (userErrorPageFailure) {
|
if (userErrorPageFailure) {
|
||||||
html.push(
|
html.push(
|
||||||
|
@ -611,13 +611,13 @@ const routesUtils = {
|
||||||
'<ul>',
|
'<ul>',
|
||||||
`<li>Code: ${err.message}</li>`,
|
`<li>Code: ${err.message}</li>`,
|
||||||
`<li>Message: ${err.description}</li>`,
|
`<li>Message: ${err.description}</li>`,
|
||||||
'</ul>'
|
'</ul>',
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
html.push(
|
html.push(
|
||||||
'<hr/>',
|
'<hr/>',
|
||||||
'</body>',
|
'</body>',
|
||||||
'</html>'
|
'</html>',
|
||||||
);
|
);
|
||||||
|
|
||||||
return response.end(html.join(''), 'utf8', () => {
|
return response.end(html.join(''), 'utf8', () => {
|
||||||
|
@ -839,7 +839,7 @@ const routesUtils = {
|
||||||
// most specific potential hostname
|
// most specific potential hostname
|
||||||
bucketName =
|
bucketName =
|
||||||
potentialBucketName.length < bucketName.length ?
|
potentialBucketName.length < bucketName.length ?
|
||||||
potentialBucketName : bucketName;
|
potentialBucketName : bucketName;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -847,7 +847,7 @@ const routesUtils = {
|
||||||
return bucketName;
|
return bucketName;
|
||||||
}
|
}
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`bad request: hostname ${host} is not in valid endpoints`
|
`bad request: hostname ${host} is not in valid endpoints`,
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -30,7 +30,7 @@ function parseLC(config, vault) {
|
||||||
if (locationObj.details.connector.sproxyd) {
|
if (locationObj.details.connector.sproxyd) {
|
||||||
clients[location] = new Sproxy({
|
clients[location] = new Sproxy({
|
||||||
bootstrap: locationObj.details.connector
|
bootstrap: locationObj.details.connector
|
||||||
.sproxyd.bootstrap,
|
.sproxyd.bootstrap,
|
||||||
// Might be undefined which is ok since there is a default
|
// Might be undefined which is ok since there is a default
|
||||||
// set in sproxydclient if chordCos is undefined
|
// set in sproxydclient if chordCos is undefined
|
||||||
chordCos: locationObj.details.connector.sproxyd.chordCos,
|
chordCos: locationObj.details.connector.sproxyd.chordCos,
|
||||||
|
@ -60,7 +60,7 @@ function parseLC(config, vault) {
|
||||||
// keepalive config
|
// keepalive config
|
||||||
const httpAgentConfig =
|
const httpAgentConfig =
|
||||||
config.externalBackends[locationObj.type].httpAgent;
|
config.externalBackends[locationObj.type].httpAgent;
|
||||||
// max sockets is infinity by default and expressed as null
|
// max sockets is infinity by default and expressed as null
|
||||||
if (httpAgentConfig.maxSockets === null) {
|
if (httpAgentConfig.maxSockets === null) {
|
||||||
httpAgentConfig.maxSockets = undefined;
|
httpAgentConfig.maxSockets = undefined;
|
||||||
}
|
}
|
||||||
|
@ -131,7 +131,7 @@ function parseLC(config, vault) {
|
||||||
const azureStorageEndpoint = config.getAzureEndpoint(location);
|
const azureStorageEndpoint = config.getAzureEndpoint(location);
|
||||||
const proxyParams =
|
const proxyParams =
|
||||||
backendUtils.proxyCompareUrl(azureStorageEndpoint) ?
|
backendUtils.proxyCompareUrl(azureStorageEndpoint) ?
|
||||||
{} : config.outboundProxy;
|
{} : config.outboundProxy;
|
||||||
const azureStorageCredentials =
|
const azureStorageCredentials =
|
||||||
config.getAzureStorageCredentials(location);
|
config.getAzureStorageCredentials(location);
|
||||||
clients[location] = new AzureClient({
|
clients[location] = new AzureClient({
|
||||||
|
|
|
@ -52,26 +52,26 @@ class MultipleBackendGateway {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return client.put(writeStream, size, keyContext, reqUids,
|
return client.put(writeStream, size, keyContext, reqUids,
|
||||||
(err, key, dataStoreVersionId, dataStoreSize, dataStoreMD5) => {
|
(err, key, dataStoreVersionId, dataStoreSize, dataStoreMD5) => {
|
||||||
const log = createLogger(reqUids);
|
const log = createLogger(reqUids);
|
||||||
log.debug('put to location', { controllingLocationConstraint });
|
log.debug('put to location', { controllingLocationConstraint });
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('error from datastore',
|
log.error('error from datastore',
|
||||||
{ error: err, dataStoreType: client.clientType });
|
{ error: err, dataStoreType: client.clientType });
|
||||||
return callback(errors.ServiceUnavailable);
|
return callback(errors.ServiceUnavailable);
|
||||||
}
|
}
|
||||||
const dataRetrievalInfo = {
|
const dataRetrievalInfo = {
|
||||||
key,
|
key,
|
||||||
dataStoreName: controllingLocationConstraint,
|
dataStoreName: controllingLocationConstraint,
|
||||||
dataStoreType: client.clientType,
|
dataStoreType: client.clientType,
|
||||||
dataStoreVersionId,
|
dataStoreVersionId,
|
||||||
dataStoreSize,
|
dataStoreSize,
|
||||||
dataStoreMD5,
|
dataStoreMD5,
|
||||||
};
|
};
|
||||||
return callback(null, dataRetrievalInfo);
|
return callback(null, dataRetrievalInfo);
|
||||||
// sproxyd accepts keyschema, send as null so sproxyd generates key
|
// sproxyd accepts keyschema, send as null so sproxyd generates key
|
||||||
// send metadata as param for AzureClient in Arsenal
|
// send metadata as param for AzureClient in Arsenal
|
||||||
}, null, this.metadata);
|
}, null, this.metadata);
|
||||||
}
|
}
|
||||||
|
|
||||||
head(objectGetInfoArr, reqUids, callback) {
|
head(objectGetInfoArr, reqUids, callback) {
|
||||||
|
@ -166,14 +166,14 @@ class MultipleBackendGateway {
|
||||||
}, () => {
|
}, () => {
|
||||||
async.parallel([
|
async.parallel([
|
||||||
next => checkExternalBackend(
|
next => checkExternalBackend(
|
||||||
this.clients, awsArray, 'aws_s3', flightCheckOnStartUp,
|
this.clients, awsArray, 'aws_s3', flightCheckOnStartUp,
|
||||||
externalBackendHealthCheckInterval, next),
|
externalBackendHealthCheckInterval, next),
|
||||||
next => checkExternalBackend(
|
next => checkExternalBackend(
|
||||||
this.clients, azureArray, 'azure', flightCheckOnStartUp,
|
this.clients, azureArray, 'azure', flightCheckOnStartUp,
|
||||||
externalBackendHealthCheckInterval, next),
|
externalBackendHealthCheckInterval, next),
|
||||||
next => checkExternalBackend(
|
next => checkExternalBackend(
|
||||||
this.clients, gcpArray, 'gcp', flightCheckOnStartUp,
|
this.clients, gcpArray, 'gcp', flightCheckOnStartUp,
|
||||||
externalBackendHealthCheckInterval, next),
|
externalBackendHealthCheckInterval, next),
|
||||||
], (errNull, externalResp) => {
|
], (errNull, externalResp) => {
|
||||||
const externalLocResults = [];
|
const externalLocResults = [];
|
||||||
externalResp.forEach(resp => externalLocResults.push(...resp));
|
externalResp.forEach(resp => externalLocResults.push(...resp));
|
||||||
|
@ -185,19 +185,19 @@ class MultipleBackendGateway {
|
||||||
}
|
}
|
||||||
|
|
||||||
createMPU(key, metaHeaders, bucketName, websiteRedirectHeader,
|
createMPU(key, metaHeaders, bucketName, websiteRedirectHeader,
|
||||||
location, contentType, cacheControl, contentDisposition,
|
location, contentType, cacheControl, contentDisposition,
|
||||||
contentEncoding, tagging, log, cb) {
|
contentEncoding, tagging, log, cb) {
|
||||||
const client = this.clients[location];
|
const client = this.clients[location];
|
||||||
if (client.clientType === 'aws_s3' || client.clientType === 'gcp') {
|
if (client.clientType === 'aws_s3' || client.clientType === 'gcp') {
|
||||||
return client.createMPU(key, metaHeaders, bucketName,
|
return client.createMPU(key, metaHeaders, bucketName,
|
||||||
websiteRedirectHeader, contentType, cacheControl,
|
websiteRedirectHeader, contentType, cacheControl,
|
||||||
contentDisposition, contentEncoding, tagging, log, cb);
|
contentDisposition, contentEncoding, tagging, log, cb);
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadPart(request, streamingV4Params, stream, size, location, key,
|
uploadPart(request, streamingV4Params, stream, size, location, key,
|
||||||
uploadId, partNumber, bucketName, log, cb) {
|
uploadId, partNumber, bucketName, log, cb) {
|
||||||
const client = this.clients[location];
|
const client = this.clients[location];
|
||||||
|
|
||||||
if (client.uploadPart) {
|
if (client.uploadPart) {
|
||||||
|
@ -206,29 +206,29 @@ class MultipleBackendGateway {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
return client.uploadPart(request, streamingV4Params, stream,
|
return client.uploadPart(request, streamingV4Params, stream,
|
||||||
size, key, uploadId, partNumber, bucketName, log,
|
size, key, uploadId, partNumber, bucketName, log,
|
||||||
(err, partInfo) => {
|
(err, partInfo) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
// if error putting part, counter should be decremented
|
// if error putting part, counter should be decremented
|
||||||
return this.locStorageCheckFn(location, -size, log,
|
return this.locStorageCheckFn(location, -size, log,
|
||||||
error => {
|
error => {
|
||||||
if (error) {
|
if (error) {
|
||||||
log.error('Error decrementing location ' +
|
log.error('Error decrementing location ' +
|
||||||
'metric following object PUT failure',
|
'metric following object PUT failure',
|
||||||
{ error: error.message });
|
{ error: error.message });
|
||||||
}
|
}
|
||||||
return cb(err);
|
return cb(err);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return cb(null, partInfo);
|
return cb(null, partInfo);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
}
|
}
|
||||||
|
|
||||||
listParts(key, uploadId, location, bucketName, partNumberMarker, maxParts,
|
listParts(key, uploadId, location, bucketName, partNumberMarker, maxParts,
|
||||||
log, cb) {
|
log, cb) {
|
||||||
const client = this.clients[location];
|
const client = this.clients[location];
|
||||||
|
|
||||||
if (client.listParts) {
|
if (client.listParts) {
|
||||||
|
@ -239,7 +239,7 @@ class MultipleBackendGateway {
|
||||||
}
|
}
|
||||||
|
|
||||||
completeMPU(key, uploadId, location, jsonList, mdInfo, bucketName,
|
completeMPU(key, uploadId, location, jsonList, mdInfo, bucketName,
|
||||||
userMetadata, contentSettings, tagging, log, cb) {
|
userMetadata, contentSettings, tagging, log, cb) {
|
||||||
const client = this.clients[location];
|
const client = this.clients[location];
|
||||||
if (client.completeMPU) {
|
if (client.completeMPU) {
|
||||||
const args = [jsonList, mdInfo, key, uploadId, bucketName];
|
const args = [jsonList, mdInfo, key, uploadId, bucketName];
|
||||||
|
@ -291,40 +291,40 @@ class MultipleBackendGateway {
|
||||||
// NOTE: using copyObject only if copying object from one external
|
// NOTE: using copyObject only if copying object from one external
|
||||||
// backend to the same external backend
|
// backend to the same external backend
|
||||||
copyObject(request, destLocationConstraintName, externalSourceKey,
|
copyObject(request, destLocationConstraintName, externalSourceKey,
|
||||||
sourceLocationConstraintName, storeMetadataParams, config, log, cb) {
|
sourceLocationConstraintName, storeMetadataParams, config, log, cb) {
|
||||||
const client = this.clients[destLocationConstraintName];
|
const client = this.clients[destLocationConstraintName];
|
||||||
if (client.copyObject) {
|
if (client.copyObject) {
|
||||||
return this.locStorageCheckFn(destLocationConstraintName,
|
return this.locStorageCheckFn(destLocationConstraintName,
|
||||||
storeMetadataParams.size, log, err => {
|
storeMetadataParams.size, log, err => {
|
||||||
if (err) {
|
|
||||||
cb(err);
|
|
||||||
}
|
|
||||||
return client.copyObject(request, destLocationConstraintName,
|
|
||||||
externalSourceKey, sourceLocationConstraintName,
|
|
||||||
storeMetadataParams, config, log,
|
|
||||||
(err, key, dataStoreVersionId) => {
|
|
||||||
const dataRetrievalInfo = {
|
|
||||||
key,
|
|
||||||
dataStoreName: destLocationConstraintName,
|
|
||||||
dataStoreType: client.clientType,
|
|
||||||
dataStoreVersionId,
|
|
||||||
};
|
|
||||||
if (err) {
|
if (err) {
|
||||||
// if error copying obj, counter should be decremented
|
cb(err);
|
||||||
return this.locStorageCheckFn(
|
|
||||||
destLocationConstraintName, -storeMetadataParams.size,
|
|
||||||
log, error => {
|
|
||||||
if (error) {
|
|
||||||
log.error('Error decrementing location ' +
|
|
||||||
'metric following object PUT failure',
|
|
||||||
{ error: error.message });
|
|
||||||
}
|
|
||||||
return cb(err);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
return cb(null, dataRetrievalInfo);
|
return client.copyObject(request, destLocationConstraintName,
|
||||||
|
externalSourceKey, sourceLocationConstraintName,
|
||||||
|
storeMetadataParams, config, log,
|
||||||
|
(err, key, dataStoreVersionId) => {
|
||||||
|
const dataRetrievalInfo = {
|
||||||
|
key,
|
||||||
|
dataStoreName: destLocationConstraintName,
|
||||||
|
dataStoreType: client.clientType,
|
||||||
|
dataStoreVersionId,
|
||||||
|
};
|
||||||
|
if (err) {
|
||||||
|
// if error copying obj, counter should be decremented
|
||||||
|
return this.locStorageCheckFn(
|
||||||
|
destLocationConstraintName, -storeMetadataParams.size,
|
||||||
|
log, error => {
|
||||||
|
if (error) {
|
||||||
|
log.error('Error decrementing location ' +
|
||||||
|
'metric following object PUT failure',
|
||||||
|
{ error: error.message });
|
||||||
|
}
|
||||||
|
return cb(err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return cb(null, dataRetrievalInfo);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
return cb(errors.NotImplemented
|
return cb(errors.NotImplemented
|
||||||
.customizeDescription('Can not copy object from ' +
|
.customizeDescription('Can not copy object from ' +
|
||||||
|
@ -332,15 +332,15 @@ class MultipleBackendGateway {
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadPartCopy(request, location, awsSourceKey,
|
uploadPartCopy(request, location, awsSourceKey,
|
||||||
sourceLocationConstraintName, config, log, cb) {
|
sourceLocationConstraintName, config, log, cb) {
|
||||||
const client = this.clients[location];
|
const client = this.clients[location];
|
||||||
if (client.uploadPartCopy) {
|
if (client.uploadPartCopy) {
|
||||||
return client.uploadPartCopy(request, awsSourceKey,
|
return client.uploadPartCopy(request, awsSourceKey,
|
||||||
sourceLocationConstraintName, config,
|
sourceLocationConstraintName, config,
|
||||||
log, cb);
|
log, cb);
|
||||||
}
|
}
|
||||||
return cb(errors.NotImplemented.customizeDescription(
|
return cb(errors.NotImplemented.customizeDescription(
|
||||||
'Can not copy object from ' +
|
'Can not copy object from ' +
|
||||||
`${client.clientType} to ${client.clientType}`));
|
`${client.clientType} to ${client.clientType}`));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -348,7 +348,7 @@ class MultipleBackendGateway {
|
||||||
const client = this.clients[location];
|
const client = this.clients[location];
|
||||||
if (client.protectAzureBlocks) {
|
if (client.protectAzureBlocks) {
|
||||||
return client.protectAzureBlocks(this.metadata, bucketName,
|
return client.protectAzureBlocks(this.metadata, bucketName,
|
||||||
objectKey, location, log, cb);
|
objectKey, location, log, cb);
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@ const { createLogger, logHelper, removeQuotes, trimXMetaPrefix } =
|
||||||
|
|
||||||
const missingVerIdInternalError = errors.InternalError.customizeDescription(
|
const missingVerIdInternalError = errors.InternalError.customizeDescription(
|
||||||
'Invalid state. Please ensure versioning is enabled ' +
|
'Invalid state. Please ensure versioning is enabled ' +
|
||||||
'in AWS for the location constraint and try again.'
|
'in AWS for the location constraint and try again.',
|
||||||
);
|
);
|
||||||
|
|
||||||
class AwsClient {
|
class AwsClient {
|
||||||
|
@ -36,35 +36,35 @@ class AwsClient {
|
||||||
// this request implicitly updates the endpoint for the location
|
// this request implicitly updates the endpoint for the location
|
||||||
// the following code explcitly sets it to avoid surprises
|
// the following code explcitly sets it to avoid surprises
|
||||||
this._client.getBucketLocation({ Bucket: this._awsBucketName },
|
this._client.getBucketLocation({ Bucket: this._awsBucketName },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
if (err && err.code !== 'AuthorizationHeaderMalformed') {
|
if (err && err.code !== 'AuthorizationHeaderMalformed') {
|
||||||
this._logger.error('error during setup', {
|
this._logger.error('error during setup', {
|
||||||
error: err,
|
error: err,
|
||||||
method: 'AwsClient.setup',
|
method: 'AwsClient.setup',
|
||||||
});
|
});
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
let region;
|
let region;
|
||||||
if (err && err.code === 'AuthorizationHeaderMalformed') {
|
if (err && err.code === 'AuthorizationHeaderMalformed') {
|
||||||
// set regional endpoint
|
// set regional endpoint
|
||||||
region = err.region;
|
region = err.region;
|
||||||
} else if (res) {
|
} else if (res) {
|
||||||
region = res.LocationConstraint;
|
region = res.LocationConstraint;
|
||||||
}
|
}
|
||||||
this._client.config.update({ region });
|
this._client.config.update({ region });
|
||||||
|
|
||||||
const isAWS = this._s3Params.endpoint.endsWith('amazonaws.com');
|
const isAWS = this._s3Params.endpoint.endsWith('amazonaws.com');
|
||||||
if (region && isAWS) {
|
if (region && isAWS) {
|
||||||
const endpoint = `s3.${region}.amazonaws.com`;
|
const endpoint = `s3.${region}.amazonaws.com`;
|
||||||
this._logger.debug('setting regional endpoint', {
|
this._logger.debug('setting regional endpoint', {
|
||||||
method: 'AwsClient.setup',
|
method: 'AwsClient.setup',
|
||||||
region,
|
region,
|
||||||
endpoint,
|
endpoint,
|
||||||
});
|
});
|
||||||
this._client.endpoint = new AWS.Endpoint(endpoint);
|
this._client.endpoint = new AWS.Endpoint(endpoint);
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
_createAwsKey(requestBucketName, requestObjectKey,
|
_createAwsKey(requestBucketName, requestObjectKey,
|
||||||
|
@ -84,23 +84,23 @@ class AwsClient {
|
||||||
|
|
||||||
put(stream, size, keyContext, reqUids, callback) {
|
put(stream, size, keyContext, reqUids, callback) {
|
||||||
const awsKey = this._createAwsKey(keyContext.bucketName,
|
const awsKey = this._createAwsKey(keyContext.bucketName,
|
||||||
keyContext.objectKey, this._bucketMatch);
|
keyContext.objectKey, this._bucketMatch);
|
||||||
const metaHeaders = trimXMetaPrefix(keyContext.metaHeaders);
|
const metaHeaders = trimXMetaPrefix(keyContext.metaHeaders);
|
||||||
const log = createLogger(reqUids);
|
const log = createLogger(reqUids);
|
||||||
|
|
||||||
const putCb = (err, data) => {
|
const putCb = (err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err from data backend',
|
logHelper(log, 'error', 'err from data backend',
|
||||||
err, this._dataStoreName, this.clientType);
|
err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`${this.type}: ${err.message}`)
|
`${this.type}: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (!data.VersionId && this._supportsVersioning) {
|
if (!data.VersionId && this._supportsVersioning) {
|
||||||
logHelper(log, 'error', 'missing version id for data ' +
|
logHelper(log, 'error', 'missing version id for data ' +
|
||||||
'backend object', missingVerIdInternalError,
|
'backend object', missingVerIdInternalError,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(missingVerIdInternalError);
|
return callback(missingVerIdInternalError);
|
||||||
}
|
}
|
||||||
const dataStoreVersionId = data.VersionId;
|
const dataStoreVersionId = data.VersionId;
|
||||||
|
@ -180,8 +180,8 @@ class AwsClient {
|
||||||
Range: range ? `bytes=${range[0]}-${range[1]}` : null,
|
Range: range ? `bytes=${range[0]}-${range[1]}` : null,
|
||||||
}).on('success', response => {
|
}).on('success', response => {
|
||||||
log.trace(`${this.type} GET request response headers`,
|
log.trace(`${this.type} GET request response headers`,
|
||||||
{ responseHeaders: response.httpResponse.headers,
|
{ responseHeaders: response.httpResponse.headers,
|
||||||
backendType: this.clientType });
|
backendType: this.clientType });
|
||||||
});
|
});
|
||||||
const stream = request.createReadStream();
|
const stream = request.createReadStream();
|
||||||
|
|
||||||
|
@ -202,8 +202,8 @@ class AwsClient {
|
||||||
logLevel = 'error';
|
logLevel = 'error';
|
||||||
}
|
}
|
||||||
logHelper(log, logLevel,
|
logHelper(log, logLevel,
|
||||||
`error streaming data from ${this.type}`,
|
`error streaming data from ${this.type}`,
|
||||||
err, this._dataStoreName, this.clientType);
|
err, this._dataStoreName, this.clientType);
|
||||||
});
|
});
|
||||||
// Always call the callback asynchronously: the caller may
|
// Always call the callback asynchronously: the caller may
|
||||||
// destroy the stream with destroy(), which MUST be
|
// destroy the stream with destroy(), which MUST be
|
||||||
|
@ -232,8 +232,8 @@ class AwsClient {
|
||||||
return callback();
|
return callback();
|
||||||
}
|
}
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`${this.type}: ${err.message}`)
|
`${this.type}: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
|
@ -243,39 +243,39 @@ class AwsClient {
|
||||||
healthcheck(location, callback) {
|
healthcheck(location, callback) {
|
||||||
const awsResp = {};
|
const awsResp = {};
|
||||||
this._client.headBucket({ Bucket: this._awsBucketName },
|
this._client.headBucket({ Bucket: this._awsBucketName },
|
||||||
err => {
|
err => {
|
||||||
/* eslint-disable no-param-reassign */
|
/* eslint-disable no-param-reassign */
|
||||||
if (err) {
|
|
||||||
awsResp[location] = { error: err, external: true };
|
|
||||||
return callback(null, awsResp);
|
|
||||||
}
|
|
||||||
if (!this._supportsVersioning) {
|
|
||||||
awsResp[location] = {
|
|
||||||
message: 'Congrats! You own the bucket',
|
|
||||||
};
|
|
||||||
return callback(null, awsResp);
|
|
||||||
}
|
|
||||||
return this._client.getBucketVersioning({
|
|
||||||
Bucket: this._awsBucketName },
|
|
||||||
(err, data) => {
|
|
||||||
if (err) {
|
if (err) {
|
||||||
awsResp[location] = { error: err, external: true };
|
awsResp[location] = { error: err, external: true };
|
||||||
} else if (!data.Status ||
|
return callback(null, awsResp);
|
||||||
data.Status === 'Suspended') {
|
}
|
||||||
|
if (!this._supportsVersioning) {
|
||||||
awsResp[location] = {
|
awsResp[location] = {
|
||||||
versioningStatus: data.Status,
|
|
||||||
error: 'Versioning must be enabled',
|
|
||||||
external: true,
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
awsResp[location] = {
|
|
||||||
versioningStatus: data.Status,
|
|
||||||
message: 'Congrats! You own the bucket',
|
message: 'Congrats! You own the bucket',
|
||||||
};
|
};
|
||||||
|
return callback(null, awsResp);
|
||||||
}
|
}
|
||||||
return callback(null, awsResp);
|
return this._client.getBucketVersioning({
|
||||||
|
Bucket: this._awsBucketName },
|
||||||
|
(err, data) => {
|
||||||
|
if (err) {
|
||||||
|
awsResp[location] = { error: err, external: true };
|
||||||
|
} else if (!data.Status ||
|
||||||
|
data.Status === 'Suspended') {
|
||||||
|
awsResp[location] = {
|
||||||
|
versioningStatus: data.Status,
|
||||||
|
error: 'Versioning must be enabled',
|
||||||
|
external: true,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
awsResp[location] = {
|
||||||
|
versioningStatus: data.Status,
|
||||||
|
message: 'Congrats! You own the bucket',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return callback(null, awsResp);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
createMPU(key, metaHeaders, bucketName, websiteRedirectHeader, contentType,
|
createMPU(key, metaHeaders, bucketName, websiteRedirectHeader, contentType,
|
||||||
|
@ -304,10 +304,10 @@ class AwsClient {
|
||||||
return this._client.createMultipartUpload(params, (err, mpuResObj) => {
|
return this._client.createMultipartUpload(params, (err, mpuResObj) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err from data backend',
|
logHelper(log, 'error', 'err from data backend',
|
||||||
err, this._dataStoreName, this.clientType);
|
err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`${this.type}: ${err.message}`)
|
`${this.type}: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return callback(null, mpuResObj);
|
return callback(null, mpuResObj);
|
||||||
|
@ -315,7 +315,7 @@ class AwsClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadPart(request, streamingV4Params, stream, size, key, uploadId,
|
uploadPart(request, streamingV4Params, stream, size, key, uploadId,
|
||||||
partNumber, bucketName, log, callback) {
|
partNumber, bucketName, log, callback) {
|
||||||
let hashedStream = stream;
|
let hashedStream = stream;
|
||||||
if (request) {
|
if (request) {
|
||||||
const partStream = prepareStream(request, streamingV4Params,
|
const partStream = prepareStream(request, streamingV4Params,
|
||||||
|
@ -335,7 +335,7 @@ class AwsClient {
|
||||||
'on uploadPart', err, this._dataStoreName, this.clientType);
|
'on uploadPart', err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`${this.type}: ${err.message}`)
|
`${this.type}: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// Because we manually add quotes to ETag later, remove quotes here
|
// Because we manually add quotes to ETag later, remove quotes here
|
||||||
|
@ -352,7 +352,7 @@ class AwsClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
listParts(key, uploadId, bucketName, partNumberMarker, maxParts, log,
|
listParts(key, uploadId, bucketName, partNumberMarker, maxParts, log,
|
||||||
callback) {
|
callback) {
|
||||||
const awsBucket = this._awsBucketName;
|
const awsBucket = this._awsBucketName;
|
||||||
const awsKey = this._createAwsKey(bucketName, key, this._bucketMatch);
|
const awsKey = this._createAwsKey(bucketName, key, this._bucketMatch);
|
||||||
const params = { Bucket: awsBucket, Key: awsKey, UploadId: uploadId,
|
const params = { Bucket: awsBucket, Key: awsKey, UploadId: uploadId,
|
||||||
|
@ -360,10 +360,10 @@ class AwsClient {
|
||||||
return this._client.listParts(params, (err, partList) => {
|
return this._client.listParts(params, (err, partList) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err from data backend on listPart',
|
logHelper(log, 'error', 'err from data backend on listPart',
|
||||||
err, this._dataStoreName, this.clientType);
|
err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`${this.type}: ${err.message}`)
|
`${this.type}: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// build storedParts object to mimic Scality S3 backend returns
|
// build storedParts object to mimic Scality S3 backend returns
|
||||||
|
@ -424,47 +424,47 @@ class AwsClient {
|
||||||
};
|
};
|
||||||
const completeObjData = { key: awsKey };
|
const completeObjData = { key: awsKey };
|
||||||
return this._client.completeMultipartUpload(mpuParams,
|
return this._client.completeMultipartUpload(mpuParams,
|
||||||
(err, completeMpuRes) => {
|
(err, completeMpuRes) => {
|
||||||
if (err) {
|
|
||||||
if (mpuError[err.code]) {
|
|
||||||
logHelper(log, 'trace', 'err from data backend on ' +
|
|
||||||
'completeMPU', err, this._dataStoreName, this.clientType);
|
|
||||||
return callback(errors[err.code]);
|
|
||||||
}
|
|
||||||
logHelper(log, 'error', 'err from data backend on ' +
|
|
||||||
'completeMPU', err, this._dataStoreName, this.clientType);
|
|
||||||
return callback(errors.ServiceUnavailable
|
|
||||||
.customizeDescription('Error returned from ' +
|
|
||||||
`${this.type}: ${err.message}`)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (!completeMpuRes.VersionId && this._supportsVersioning) {
|
|
||||||
logHelper(log, 'error', 'missing version id for data ' +
|
|
||||||
'backend object', missingVerIdInternalError,
|
|
||||||
this._dataStoreName, this.clientType);
|
|
||||||
return callback(missingVerIdInternalError);
|
|
||||||
}
|
|
||||||
// need to get content length of new object to store
|
|
||||||
// in our metadata
|
|
||||||
return this._client.headObject({ Bucket: awsBucket, Key: awsKey },
|
|
||||||
(err, objHeaders) => {
|
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'trace', 'err from data backend on ' +
|
if (mpuError[err.code]) {
|
||||||
'headObject', err, this._dataStoreName, this.clientType);
|
logHelper(log, 'trace', 'err from data backend on ' +
|
||||||
|
'completeMPU', err, this._dataStoreName, this.clientType);
|
||||||
|
return callback(errors[err.code]);
|
||||||
|
}
|
||||||
|
logHelper(log, 'error', 'err from data backend on ' +
|
||||||
|
'completeMPU', err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`${this.type}: ${err.message}`)
|
`${this.type}: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// remove quotes from eTag because they're added later
|
if (!completeMpuRes.VersionId && this._supportsVersioning) {
|
||||||
completeObjData.eTag = completeMpuRes.ETag
|
logHelper(log, 'error', 'missing version id for data ' +
|
||||||
.substring(1, completeMpuRes.ETag.length - 1);
|
'backend object', missingVerIdInternalError,
|
||||||
completeObjData.dataStoreVersionId = completeMpuRes.VersionId;
|
this._dataStoreName, this.clientType);
|
||||||
completeObjData.contentLength =
|
return callback(missingVerIdInternalError);
|
||||||
|
}
|
||||||
|
// need to get content length of new object to store
|
||||||
|
// in our metadata
|
||||||
|
return this._client.headObject({ Bucket: awsBucket, Key: awsKey },
|
||||||
|
(err, objHeaders) => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(log, 'trace', 'err from data backend on ' +
|
||||||
|
'headObject', err, this._dataStoreName, this.clientType);
|
||||||
|
return callback(errors.ServiceUnavailable
|
||||||
|
.customizeDescription('Error returned from ' +
|
||||||
|
`${this.type}: ${err.message}`),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// remove quotes from eTag because they're added later
|
||||||
|
completeObjData.eTag = completeMpuRes.ETag
|
||||||
|
.substring(1, completeMpuRes.ETag.length - 1);
|
||||||
|
completeObjData.dataStoreVersionId = completeMpuRes.VersionId;
|
||||||
|
completeObjData.contentLength =
|
||||||
Number.parseInt(objHeaders.ContentLength, 10);
|
Number.parseInt(objHeaders.ContentLength, 10);
|
||||||
return callback(null, completeObjData);
|
return callback(null, completeObjData);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
abortMPU(key, uploadId, bucketName, log, callback) {
|
abortMPU(key, uploadId, bucketName, log, callback) {
|
||||||
|
@ -480,8 +480,8 @@ class AwsClient {
|
||||||
'using the same uploadId.', err, this._dataStoreName,
|
'using the same uploadId.', err, this._dataStoreName,
|
||||||
this.clientType);
|
this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`${this.type}: ${err.message}`)
|
`${this.type}: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
|
@ -507,10 +507,10 @@ class AwsClient {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'error from data backend on ' +
|
logHelper(log, 'error', 'error from data backend on ' +
|
||||||
'putObjectTagging', err,
|
'putObjectTagging', err,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`${this.type}: ${err.message}`)
|
`${this.type}: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
|
@ -532,19 +532,19 @@ class AwsClient {
|
||||||
'deleteObjectTagging', err,
|
'deleteObjectTagging', err,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`${this.type}: ${err.message}`)
|
`${this.type}: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
copyObject(request, destLocationConstraintName, sourceKey,
|
copyObject(request, destLocationConstraintName, sourceKey,
|
||||||
sourceLocationConstraintName, storeMetadataParams, config, log, callback) {
|
sourceLocationConstraintName, storeMetadataParams, config, log, callback) {
|
||||||
const destBucketName = request.bucketName;
|
const destBucketName = request.bucketName;
|
||||||
const destObjectKey = request.objectKey;
|
const destObjectKey = request.objectKey;
|
||||||
const destAwsKey = this._createAwsKey(destBucketName, destObjectKey,
|
const destAwsKey = this._createAwsKey(destBucketName, destObjectKey,
|
||||||
this._bucketMatch);
|
this._bucketMatch);
|
||||||
|
|
||||||
const sourceAwsBucketName =
|
const sourceAwsBucketName =
|
||||||
config.getAwsBucketName(sourceLocationConstraintName);
|
config.getAwsBucketName(sourceLocationConstraintName);
|
||||||
|
@ -569,15 +569,15 @@ class AwsClient {
|
||||||
`${sourceAwsBucketName} ${this.type} bucket`, err,
|
`${sourceAwsBucketName} ${this.type} bucket`, err,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.AccessDenied
|
return callback(errors.AccessDenied
|
||||||
.customizeDescription('Error: Unable to access ' +
|
.customizeDescription('Error: Unable to access ' +
|
||||||
`${sourceAwsBucketName} ${this.type} bucket`)
|
`${sourceAwsBucketName} ${this.type} bucket`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
logHelper(log, 'error', 'error from data backend on ' +
|
logHelper(log, 'error', 'error from data backend on ' +
|
||||||
'copyObject', err, this._dataStoreName, this.clientType);
|
'copyObject', err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`${this.type}: ${err.message}`)
|
`${this.type}: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (!copyResult.VersionId && this._supportsVersioning) {
|
if (!copyResult.VersionId && this._supportsVersioning) {
|
||||||
|
@ -590,7 +590,7 @@ class AwsClient {
|
||||||
if (err || !data.VersionId) {
|
if (err || !data.VersionId) {
|
||||||
logHelper(log, 'error', 'missing version id for data ' +
|
logHelper(log, 'error', 'missing version id for data ' +
|
||||||
'backend object', missingVerIdInternalError,
|
'backend object', missingVerIdInternalError,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(missingVerIdInternalError);
|
return callback(missingVerIdInternalError);
|
||||||
}
|
}
|
||||||
return callback(null, destAwsKey, data.VersionId);
|
return callback(null, destAwsKey, data.VersionId);
|
||||||
|
@ -600,11 +600,11 @@ class AwsClient {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
uploadPartCopy(request, awsSourceKey, sourceLocationConstraintName,
|
uploadPartCopy(request, awsSourceKey, sourceLocationConstraintName,
|
||||||
config, log, callback) {
|
config, log, callback) {
|
||||||
const destBucketName = request.bucketName;
|
const destBucketName = request.bucketName;
|
||||||
const destObjectKey = request.objectKey;
|
const destObjectKey = request.objectKey;
|
||||||
const destAwsKey = this._createAwsKey(destBucketName, destObjectKey,
|
const destAwsKey = this._createAwsKey(destBucketName, destObjectKey,
|
||||||
this._bucketMatch);
|
this._bucketMatch);
|
||||||
|
|
||||||
const sourceAwsBucketName =
|
const sourceAwsBucketName =
|
||||||
config.getAwsBucketName(sourceLocationConstraintName);
|
config.getAwsBucketName(sourceLocationConstraintName);
|
||||||
|
@ -628,15 +628,15 @@ class AwsClient {
|
||||||
`${sourceAwsBucketName} AWS bucket`, err,
|
`${sourceAwsBucketName} AWS bucket`, err,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.AccessDenied
|
return callback(errors.AccessDenied
|
||||||
.customizeDescription('Error: Unable to access ' +
|
.customizeDescription('Error: Unable to access ' +
|
||||||
`${sourceAwsBucketName} AWS bucket`)
|
`${sourceAwsBucketName} AWS bucket`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
logHelper(log, 'error', 'error from data backend on ' +
|
logHelper(log, 'error', 'error from data backend on ' +
|
||||||
'uploadPartCopy', err, this._dataStoreName, this.clientType);
|
'uploadPartCopy', err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`${this.type}: ${err.message}`)
|
`${this.type}: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
const eTag = removeQuotes(res.CopyPartResult.ETag);
|
const eTag = removeQuotes(res.CopyPartResult.ETag);
|
||||||
|
|
|
@ -51,7 +51,7 @@ class AzureClient {
|
||||||
if (log) {
|
if (log) {
|
||||||
log.error('error thrown by Azure Storage Client Library',
|
log.error('error thrown by Azure Storage Client Library',
|
||||||
{ error: err.message, stack: err.stack, s3Method,
|
{ error: err.message, stack: err.stack, s3Method,
|
||||||
azureMethod, dataStoreName: this._dataStoreName });
|
azureMethod, dataStoreName: this._dataStoreName });
|
||||||
}
|
}
|
||||||
cb(error.customizeDescription('Error from Azure ' +
|
cb(error.customizeDescription('Error from Azure ' +
|
||||||
`method: ${azureMethod} on ${s3Method} S3 call: ` +
|
`method: ${azureMethod} on ${s3Method} S3 call: ` +
|
||||||
|
@ -82,7 +82,7 @@ class AzureClient {
|
||||||
// same key name. If it does, do not allow put or delete because Azure
|
// same key name. If it does, do not allow put or delete because Azure
|
||||||
// will delete all blocks with same key name
|
// will delete all blocks with same key name
|
||||||
protectAzureBlocks(metadata, bucketName, objectKey, dataStoreName,
|
protectAzureBlocks(metadata, bucketName, objectKey, dataStoreName,
|
||||||
log, cb) {
|
log, cb) {
|
||||||
const mpuBucketName = `${constants.mpuBucketPrefix}${bucketName}`;
|
const mpuBucketName = `${constants.mpuBucketPrefix}${bucketName}`;
|
||||||
const splitter = constants.splitter;
|
const splitter = constants.splitter;
|
||||||
const listingParams = {
|
const listingParams = {
|
||||||
|
@ -93,23 +93,23 @@ class AzureClient {
|
||||||
};
|
};
|
||||||
|
|
||||||
return metadata.listMultipartUploads(mpuBucketName, listingParams,
|
return metadata.listMultipartUploads(mpuBucketName, listingParams,
|
||||||
log, (err, mpuList) => {
|
log, (err, mpuList) => {
|
||||||
if (err && !err.NoSuchBucket) {
|
if (err && !err.NoSuchBucket) {
|
||||||
log.error('Error listing MPUs for Azure delete',
|
log.error('Error listing MPUs for Azure delete',
|
||||||
{ error: err, dataStoreName });
|
{ error: err, dataStoreName });
|
||||||
return cb(errors.ServiceUnavailable);
|
return cb(errors.ServiceUnavailable);
|
||||||
}
|
}
|
||||||
if (mpuList && mpuList.Uploads && mpuList.Uploads.length > 0) {
|
if (mpuList && mpuList.Uploads && mpuList.Uploads.length > 0) {
|
||||||
const error = errors.MPUinProgress;
|
const error = errors.MPUinProgress;
|
||||||
log.error('Error: cannot put/delete object to Azure with ' +
|
log.error('Error: cannot put/delete object to Azure with ' +
|
||||||
'same key name as ongoing MPU on Azure',
|
'same key name as ongoing MPU on Azure',
|
||||||
{ error, dataStoreName });
|
{ error, dataStoreName });
|
||||||
return cb(error);
|
return cb(error);
|
||||||
}
|
}
|
||||||
// If listMultipartUploads returns a NoSuchBucket error or the
|
// If listMultipartUploads returns a NoSuchBucket error or the
|
||||||
// mpu list is empty, there are no conflicting MPUs, so continue
|
// mpu list is empty, there are no conflicting MPUs, so continue
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
toObjectGetInfo(objectKey, bucketName) {
|
toObjectGetInfo(objectKey, bucketName) {
|
||||||
|
@ -123,52 +123,52 @@ class AzureClient {
|
||||||
const log = createLogger(reqUids);
|
const log = createLogger(reqUids);
|
||||||
// before blob is put, make sure there is no ongoing MPU with same key
|
// before blob is put, make sure there is no ongoing MPU with same key
|
||||||
this.protectAzureBlocks(metadata, keyContext.bucketName,
|
this.protectAzureBlocks(metadata, keyContext.bucketName,
|
||||||
keyContext.objectKey, this._dataStoreName, log, err => {
|
keyContext.objectKey, this._dataStoreName, log, err => {
|
||||||
// if error returned, there is ongoing MPU, so do not put
|
// if error returned, there is ongoing MPU, so do not put
|
||||||
if (err) {
|
if (err) {
|
||||||
return callback(err.customizeDescription(
|
return callback(err.customizeDescription(
|
||||||
`Error putting object to Azure: ${err.message}`));
|
`Error putting object to Azure: ${err.message}`));
|
||||||
}
|
}
|
||||||
const azureKey = this._createAzureKey(keyContext.bucketName,
|
const azureKey = this._createAzureKey(keyContext.bucketName,
|
||||||
keyContext.objectKey, this._bucketMatch);
|
keyContext.objectKey, this._bucketMatch);
|
||||||
const options = {
|
const options = {
|
||||||
metadata: translateAzureMetaHeaders(keyContext.metaHeaders,
|
metadata: translateAzureMetaHeaders(keyContext.metaHeaders,
|
||||||
keyContext.tagging),
|
keyContext.tagging),
|
||||||
contentSettings: {
|
contentSettings: {
|
||||||
contentType: keyContext.contentType || undefined,
|
contentType: keyContext.contentType || undefined,
|
||||||
cacheControl: keyContext.cacheControl || undefined,
|
cacheControl: keyContext.cacheControl || undefined,
|
||||||
contentDisposition: keyContext.contentDisposition ||
|
contentDisposition: keyContext.contentDisposition ||
|
||||||
undefined,
|
undefined,
|
||||||
contentEncoding: keyContext.contentEncoding || undefined,
|
contentEncoding: keyContext.contentEncoding || undefined,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
if (size === 0) {
|
if (size === 0) {
|
||||||
return this._errorWrapper('put', 'createBlockBlobFromText',
|
return this._errorWrapper('put', 'createBlockBlobFromText',
|
||||||
[this._azureContainerName, azureKey, '', options,
|
[this._azureContainerName, azureKey, '', options,
|
||||||
err => {
|
err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err from Azure PUT data ' +
|
logHelper(log, 'error', 'err from Azure PUT data ' +
|
||||||
'backend', err, this._dataStoreName);
|
'backend', err, this._dataStoreName);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`Azure: ${err.message}`));
|
`Azure: ${err.message}`));
|
||||||
}
|
}
|
||||||
return callback(null, azureKey);
|
return callback(null, azureKey);
|
||||||
}], log, callback);
|
}], log, callback);
|
||||||
}
|
}
|
||||||
return this._errorWrapper('put', 'createBlockBlobFromStream',
|
return this._errorWrapper('put', 'createBlockBlobFromStream',
|
||||||
[this._azureContainerName, azureKey, stream, size, options,
|
[this._azureContainerName, azureKey, stream, size, options,
|
||||||
err => {
|
err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err from Azure PUT data ' +
|
logHelper(log, 'error', 'err from Azure PUT data ' +
|
||||||
'backend', err, this._dataStoreName);
|
'backend', err, this._dataStoreName);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`Azure: ${err.message}`));
|
`Azure: ${err.message}`));
|
||||||
}
|
}
|
||||||
return callback(null, azureKey);
|
return callback(null, azureKey);
|
||||||
}], log, callback);
|
}], log, callback);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
head(objectGetInfo, reqUids, callback) {
|
head(objectGetInfo, reqUids, callback) {
|
||||||
|
@ -176,25 +176,25 @@ class AzureClient {
|
||||||
const { key, azureStreamingOptions } = objectGetInfo;
|
const { key, azureStreamingOptions } = objectGetInfo;
|
||||||
return this._errorWrapper('head', 'getBlobProperties',
|
return this._errorWrapper('head', 'getBlobProperties',
|
||||||
[this._azureContainerName, key, azureStreamingOptions,
|
[this._azureContainerName, key, azureStreamingOptions,
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
let logLevel;
|
let logLevel;
|
||||||
let retError;
|
let retError;
|
||||||
if (err.code === 'NotFound') {
|
if (err.code === 'NotFound') {
|
||||||
logLevel = 'info';
|
logLevel = 'info';
|
||||||
retError = errors.LocationNotFound;
|
retError = errors.LocationNotFound;
|
||||||
} else {
|
} else {
|
||||||
logLevel = 'error';
|
logLevel = 'error';
|
||||||
retError = errors.ServiceUnavailable
|
retError = errors.ServiceUnavailable
|
||||||
.customizeDescription(
|
.customizeDescription(
|
||||||
`Error returned from Azure: ${err.message}`);
|
`Error returned from Azure: ${err.message}`);
|
||||||
|
}
|
||||||
|
logHelper(log, logLevel, 'err from Azure HEAD data backend',
|
||||||
|
err, this._dataStoreName);
|
||||||
|
return callback(retError);
|
||||||
}
|
}
|
||||||
logHelper(log, logLevel, 'err from Azure HEAD data backend',
|
return callback(null, data);
|
||||||
err, this._dataStoreName);
|
}], log, callback);
|
||||||
return callback(retError);
|
|
||||||
}
|
|
||||||
return callback(null, data);
|
|
||||||
}], log, callback);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
get(objectGetInfo, range, reqUids, callback) {
|
get(objectGetInfo, range, reqUids, callback) {
|
||||||
|
@ -213,14 +213,14 @@ class AzureClient {
|
||||||
}
|
}
|
||||||
this._errorWrapper('get', 'getBlobToStream',
|
this._errorWrapper('get', 'getBlobToStream',
|
||||||
[this._azureContainerName, key, response, streamingOptions,
|
[this._azureContainerName, key, response, streamingOptions,
|
||||||
err => {
|
err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err from Azure GET data backend',
|
logHelper(log, 'error', 'err from Azure GET data backend',
|
||||||
err, this._dataStoreName);
|
err, this._dataStoreName);
|
||||||
return callback(errors.ServiceUnavailable);
|
return callback(errors.ServiceUnavailable);
|
||||||
}
|
}
|
||||||
return callback(null, response);
|
return callback(null, response);
|
||||||
}], log, callback);
|
}], log, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
delete(objectGetInfo, reqUids, callback) {
|
delete(objectGetInfo, reqUids, callback) {
|
||||||
|
@ -234,20 +234,20 @@ class AzureClient {
|
||||||
}
|
}
|
||||||
return this._errorWrapper('delete', 'deleteBlobIfExists',
|
return this._errorWrapper('delete', 'deleteBlobIfExists',
|
||||||
[this._azureContainerName, key, options,
|
[this._azureContainerName, key, options,
|
||||||
err => {
|
err => {
|
||||||
if (err && err.statusCode === 412) {
|
if (err && err.statusCode === 412) {
|
||||||
return callback(errors.PreconditionFailed);
|
return callback(errors.PreconditionFailed);
|
||||||
}
|
}
|
||||||
if (err) {
|
if (err) {
|
||||||
const log = createLogger(reqUids);
|
const log = createLogger(reqUids);
|
||||||
logHelper(log, 'error', 'error deleting object from ' +
|
logHelper(log, 'error', 'error deleting object from ' +
|
||||||
'Azure datastore', err, this._dataStoreName);
|
'Azure datastore', err, this._dataStoreName);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`Azure: ${err.message}`));
|
`Azure: ${err.message}`));
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
}], log, callback);
|
}], log, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
healthcheck(location, callback, flightCheckOnStartUp) {
|
healthcheck(location, callback, flightCheckOnStartUp) {
|
||||||
|
@ -271,7 +271,7 @@ class AzureClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadPart(request, streamingV4Params, partStream, size, key, uploadId,
|
uploadPart(request, streamingV4Params, partStream, size, key, uploadId,
|
||||||
partNumber, bucket, log, callback) {
|
partNumber, bucket, log, callback) {
|
||||||
const azureKey = this._createAzureKey(bucket, key, this._bucketMatch);
|
const azureKey = this._createAzureKey(bucket, key, this._bucketMatch);
|
||||||
const params = { bucketName: this._azureContainerName,
|
const params = { bucketName: this._azureContainerName,
|
||||||
partNumber, size, objectKey: azureKey, uploadId };
|
partNumber, size, objectKey: azureKey, uploadId };
|
||||||
|
@ -299,27 +299,27 @@ class AzureClient {
|
||||||
if (size <= azureMpuUtils.maxSubPartSize) {
|
if (size <= azureMpuUtils.maxSubPartSize) {
|
||||||
const errorWrapperFn = this._errorWrapper.bind(this);
|
const errorWrapperFn = this._errorWrapper.bind(this);
|
||||||
return azureMpuUtils.putSinglePart(errorWrapperFn,
|
return azureMpuUtils.putSinglePart(errorWrapperFn,
|
||||||
stream, params, this._dataStoreName, log, (err, dataStoreETag) => {
|
stream, params, this._dataStoreName, log, (err, dataStoreETag) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
dataRetrievalInfo.dataStoreETag = dataStoreETag;
|
||||||
|
return callback(null, dataRetrievalInfo);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const errorWrapperFn = this._errorWrapper.bind(this);
|
||||||
|
return azureMpuUtils.putSubParts(errorWrapperFn, stream,
|
||||||
|
params, this._dataStoreName, log, (err, dataStoreETag) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return callback(err);
|
return callback(err);
|
||||||
}
|
}
|
||||||
dataRetrievalInfo.dataStoreETag = dataStoreETag;
|
dataRetrievalInfo.dataStoreETag = dataStoreETag;
|
||||||
return callback(null, dataRetrievalInfo);
|
return callback(null, dataRetrievalInfo);
|
||||||
});
|
});
|
||||||
}
|
|
||||||
const errorWrapperFn = this._errorWrapper.bind(this);
|
|
||||||
return azureMpuUtils.putSubParts(errorWrapperFn, stream,
|
|
||||||
params, this._dataStoreName, log, (err, dataStoreETag) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
dataRetrievalInfo.dataStoreETag = dataStoreETag;
|
|
||||||
return callback(null, dataRetrievalInfo);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
completeMPU(jsonList, mdInfo, key, uploadId, bucket, metaHeaders,
|
completeMPU(jsonList, mdInfo, key, uploadId, bucket, metaHeaders,
|
||||||
contentSettings, tagging, log, callback) {
|
contentSettings, tagging, log, callback) {
|
||||||
const azureKey = this._createAzureKey(bucket, key, this._bucketMatch);
|
const azureKey = this._createAzureKey(bucket, key, this._bucketMatch);
|
||||||
const commitList = {
|
const commitList = {
|
||||||
UncommittedBlocks: jsonList.uncommittedBlocks || [],
|
UncommittedBlocks: jsonList.uncommittedBlocks || [],
|
||||||
|
@ -345,20 +345,20 @@ class AzureClient {
|
||||||
};
|
};
|
||||||
return this._errorWrapper('completeMPU', 'commitBlocks',
|
return this._errorWrapper('completeMPU', 'commitBlocks',
|
||||||
[this._azureContainerName, azureKey, commitList, options,
|
[this._azureContainerName, azureKey, commitList, options,
|
||||||
err => {
|
err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err completing MPU on Azure ' +
|
logHelper(log, 'error', 'err completing MPU on Azure ' +
|
||||||
'datastore', err, this._dataStoreName);
|
'datastore', err, this._dataStoreName);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`Azure: ${err.message}`));
|
`Azure: ${err.message}`));
|
||||||
}
|
}
|
||||||
const completeObjData = {
|
const completeObjData = {
|
||||||
key: azureKey,
|
key: azureKey,
|
||||||
filteredPartsObj,
|
filteredPartsObj,
|
||||||
};
|
};
|
||||||
return callback(null, completeObjData);
|
return callback(null, completeObjData);
|
||||||
}], log, callback);
|
}], log, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
objectPutTagging(key, bucket, objectMD, log, callback) {
|
objectPutTagging(key, bucket, objectMD, log, callback) {
|
||||||
|
@ -367,14 +367,14 @@ class AzureClient {
|
||||||
azureMD.tags = JSON.stringify(objectMD.tags);
|
azureMD.tags = JSON.stringify(objectMD.tags);
|
||||||
this._errorWrapper('objectPutTagging', 'setBlobMetadata',
|
this._errorWrapper('objectPutTagging', 'setBlobMetadata',
|
||||||
[this._azureContainerName, azureKey, azureMD,
|
[this._azureContainerName, azureKey, azureMD,
|
||||||
err => {
|
err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err putting object tags to ' +
|
logHelper(log, 'error', 'err putting object tags to ' +
|
||||||
'Azure backend', err, this._dataStoreName);
|
'Azure backend', err, this._dataStoreName);
|
||||||
return callback(errors.ServiceUnavailable);
|
return callback(errors.ServiceUnavailable);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
}], log, callback);
|
}], log, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
objectDeleteTagging(key, bucketName, objectMD, log, callback) {
|
objectDeleteTagging(key, bucketName, objectMD, log, callback) {
|
||||||
|
@ -382,27 +382,27 @@ class AzureClient {
|
||||||
const azureMD = this._getMetaHeaders(objectMD);
|
const azureMD = this._getMetaHeaders(objectMD);
|
||||||
this._errorWrapper('objectDeleteTagging', 'setBlobMetadata',
|
this._errorWrapper('objectDeleteTagging', 'setBlobMetadata',
|
||||||
[this._azureContainerName, azureKey, azureMD,
|
[this._azureContainerName, azureKey, azureMD,
|
||||||
err => {
|
err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err putting object tags to ' +
|
logHelper(log, 'error', 'err putting object tags to ' +
|
||||||
'Azure backend', err, this._dataStoreName);
|
'Azure backend', err, this._dataStoreName);
|
||||||
return callback(errors.ServiceUnavailable);
|
return callback(errors.ServiceUnavailable);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
}], log, callback);
|
}], log, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
copyObject(request, destLocationConstraintName, sourceKey,
|
copyObject(request, destLocationConstraintName, sourceKey,
|
||||||
sourceLocationConstraintName, storeMetadataParams, config, log, callback) {
|
sourceLocationConstraintName, storeMetadataParams, config, log, callback) {
|
||||||
const destContainerName = request.bucketName;
|
const destContainerName = request.bucketName;
|
||||||
const destObjectKey = request.objectKey;
|
const destObjectKey = request.objectKey;
|
||||||
|
|
||||||
const destAzureKey = this._createAzureKey(destContainerName,
|
const destAzureKey = this._createAzureKey(destContainerName,
|
||||||
destObjectKey, this._bucketMatch);
|
destObjectKey, this._bucketMatch);
|
||||||
|
|
||||||
const sourceContainerName =
|
const sourceContainerName =
|
||||||
config.locationConstraints[sourceLocationConstraintName]
|
config.locationConstraints[sourceLocationConstraintName]
|
||||||
.details.azureContainerName;
|
.details.azureContainerName;
|
||||||
|
|
||||||
let options;
|
let options;
|
||||||
if (storeMetadataParams.metaHeaders) {
|
if (storeMetadataParams.metaHeaders) {
|
||||||
|
@ -413,7 +413,7 @@ class AzureClient {
|
||||||
this._errorWrapper('copyObject', 'startCopyBlob',
|
this._errorWrapper('copyObject', 'startCopyBlob',
|
||||||
[`${this._azureStorageEndpoint}` +
|
[`${this._azureStorageEndpoint}` +
|
||||||
`${sourceContainerName}/${sourceKey}`,
|
`${sourceContainerName}/${sourceKey}`,
|
||||||
this._azureContainerName, destAzureKey, options,
|
this._azureContainerName, destAzureKey, options,
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
if (err.code === 'CannotVerifyCopySource') {
|
if (err.code === 'CannotVerifyCopySource') {
|
||||||
|
@ -421,36 +421,36 @@ class AzureClient {
|
||||||
`${sourceContainerName} Azure Container`, err,
|
`${sourceContainerName} Azure Container`, err,
|
||||||
this._dataStoreName);
|
this._dataStoreName);
|
||||||
return callback(errors.AccessDenied
|
return callback(errors.AccessDenied
|
||||||
.customizeDescription('Error: Unable to access ' +
|
.customizeDescription('Error: Unable to access ' +
|
||||||
`${sourceContainerName} Azure Container`)
|
`${sourceContainerName} Azure Container`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
logHelper(log, 'error', 'error from data backend on ' +
|
logHelper(log, 'error', 'error from data backend on ' +
|
||||||
'copyObject', err, this._dataStoreName);
|
'copyObject', err, this._dataStoreName);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS: ${err.message}`)
|
`AWS: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (res.copy.status === 'pending') {
|
if (res.copy.status === 'pending') {
|
||||||
logHelper(log, 'error', 'Azure copy status is pending',
|
logHelper(log, 'error', 'Azure copy status is pending',
|
||||||
err, this._dataStoreName);
|
err, this._dataStoreName);
|
||||||
const copyId = res.copy.id;
|
const copyId = res.copy.id;
|
||||||
this._client.abortCopyBlob(this._azureContainerName,
|
this._client.abortCopyBlob(this._azureContainerName,
|
||||||
destAzureKey, copyId, err => {
|
destAzureKey, copyId, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'error from data backend ' +
|
logHelper(log, 'error', 'error from data backend ' +
|
||||||
'on abortCopyBlob', err, this._dataStoreName);
|
'on abortCopyBlob', err, this._dataStoreName);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS on abortCopyBlob: ${err.message}`)
|
`AWS on abortCopyBlob: ${err.message}`),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return callback(errors.InvalidObjectState
|
||||||
|
.customizeDescription('Error: Azure copy status was ' +
|
||||||
|
'pending. It has been aborted successfully'),
|
||||||
);
|
);
|
||||||
}
|
});
|
||||||
return callback(errors.InvalidObjectState
|
|
||||||
.customizeDescription('Error: Azure copy status was ' +
|
|
||||||
'pending. It has been aborted successfully')
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
return callback(null, destAzureKey);
|
return callback(null, destAzureKey);
|
||||||
}], log, callback);
|
}], log, callback);
|
||||||
|
|
|
@ -24,7 +24,7 @@ class MpuHelper {
|
||||||
const handleFunc = (fnName, params, retry, callback) => {
|
const handleFunc = (fnName, params, retry, callback) => {
|
||||||
const timeout = backoff.duration();
|
const timeout = backoff.duration();
|
||||||
return setTimeout((params, cb) =>
|
return setTimeout((params, cb) =>
|
||||||
this.service[fnName](params, cb), timeout, params,
|
this.service[fnName](params, cb), timeout, params,
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
if (err.statusCode === 429 || err.code === 429) {
|
if (err.statusCode === 429 || err.code === 429) {
|
||||||
|
@ -90,53 +90,53 @@ class MpuHelper {
|
||||||
splitMerge(params, partList, level, callback) {
|
splitMerge(params, partList, level, callback) {
|
||||||
// create composition of slices from the partList array
|
// create composition of slices from the partList array
|
||||||
return async.mapLimit(eachSlice.call(partList, 32),
|
return async.mapLimit(eachSlice.call(partList, 32),
|
||||||
this.service._maxConcurrent,
|
this.service._maxConcurrent,
|
||||||
(infoParts, cb) => {
|
(infoParts, cb) => {
|
||||||
const mpuPartList = infoParts.Parts.map(item =>
|
const mpuPartList = infoParts.Parts.map(item =>
|
||||||
({ PartName: item.PartName }));
|
({ PartName: item.PartName }));
|
||||||
const partNumber = infoParts.PartNumber;
|
const partNumber = infoParts.PartNumber;
|
||||||
const tmpKey =
|
const tmpKey =
|
||||||
createMpuKey(params.Key, params.UploadId, partNumber, level);
|
createMpuKey(params.Key, params.UploadId, partNumber, level);
|
||||||
const mergedObject = { PartName: tmpKey };
|
const mergedObject = { PartName: tmpKey };
|
||||||
if (mpuPartList.length < 2) {
|
if (mpuPartList.length < 2) {
|
||||||
logger.trace(
|
logger.trace(
|
||||||
'splitMerge: parts are fewer than 2, copy instead');
|
'splitMerge: parts are fewer than 2, copy instead');
|
||||||
// else just perform a copy
|
// else just perform a copy
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
Bucket: params.MPU,
|
||||||
|
Key: tmpKey,
|
||||||
|
CopySource: `${params.MPU}/${mpuPartList[0].PartName}`,
|
||||||
|
};
|
||||||
|
return this.service.copyObject(copyParams, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(logger, 'error',
|
||||||
|
'error in splitMerge - copyObject', err);
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
mergedObject.VersionId = res.VersionId;
|
||||||
|
mergedObject.ETag = res.ETag;
|
||||||
|
return cb(null, mergedObject);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const composeParams = {
|
||||||
Bucket: params.MPU,
|
Bucket: params.MPU,
|
||||||
Key: tmpKey,
|
Key: tmpKey,
|
||||||
CopySource: `${params.MPU}/${mpuPartList[0].PartName}`,
|
MultipartUpload: { Parts: mpuPartList },
|
||||||
};
|
};
|
||||||
return this.service.copyObject(copyParams, (err, res) => {
|
return this.retryCompose(composeParams, (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(logger, 'error',
|
|
||||||
'error in splitMerge - copyObject', err);
|
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
mergedObject.VersionId = res.VersionId;
|
mergedObject.VersionId = res.VersionId;
|
||||||
mergedObject.ETag = res.ETag;
|
mergedObject.ETag = res.ETag;
|
||||||
return cb(null, mergedObject);
|
return cb(null, mergedObject);
|
||||||
});
|
});
|
||||||
}
|
}, (err, res) => {
|
||||||
const composeParams = {
|
|
||||||
Bucket: params.MPU,
|
|
||||||
Key: tmpKey,
|
|
||||||
MultipartUpload: { Parts: mpuPartList },
|
|
||||||
};
|
|
||||||
return this.retryCompose(composeParams, (err, res) => {
|
|
||||||
if (err) {
|
if (err) {
|
||||||
return cb(err);
|
return callback(err);
|
||||||
}
|
}
|
||||||
mergedObject.VersionId = res.VersionId;
|
return callback(null, res.length);
|
||||||
mergedObject.ETag = res.ETag;
|
|
||||||
return cb(null, mergedObject);
|
|
||||||
});
|
});
|
||||||
}, (err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
return callback(null, res.length);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -284,7 +284,7 @@ class MpuHelper {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(logger, 'error', 'error in ' +
|
logHelper(logger, 'error', 'error in ' +
|
||||||
'createMultipartUpload - final copyObject',
|
'createMultipartUpload - final copyObject',
|
||||||
err);
|
err);
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
const mpuResult = {
|
const mpuResult = {
|
||||||
|
@ -301,7 +301,7 @@ class MpuHelper {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(logger, 'error', 'error in ' +
|
logHelper(logger, 'error', 'error in ' +
|
||||||
'createMultipartUpload - final head object',
|
'createMultipartUpload - final head object',
|
||||||
err);
|
err);
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
mpuResult.ContentLength = res.ContentLength;
|
mpuResult.ContentLength = res.ContentLength;
|
||||||
|
|
|
@ -70,7 +70,7 @@ class GcpManagedUpload {
|
||||||
if (this.body instanceof stream) {
|
if (this.body instanceof stream) {
|
||||||
assert.strictEqual(typeof this.totalBytes, 'number',
|
assert.strictEqual(typeof this.totalBytes, 'number',
|
||||||
errors.MissingContentLength.customizeDescription(
|
errors.MissingContentLength.customizeDescription(
|
||||||
'If body is a stream, ContentLength must be provided'));
|
'If body is a stream, ContentLength must be provided'));
|
||||||
} else {
|
} else {
|
||||||
if (typeof this.body === 'string') {
|
if (typeof this.body === 'string') {
|
||||||
this.body = Buffer.from(this.body);
|
this.body = Buffer.from(this.body);
|
||||||
|
@ -156,13 +156,13 @@ class GcpManagedUpload {
|
||||||
.map(item =>
|
.map(item =>
|
||||||
Object.assign(item, { ETag: this.parts[item.PartNumber] }));
|
Object.assign(item, { ETag: this.parts[item.PartNumber] }));
|
||||||
return this.service.completeMultipartUpload(params,
|
return this.service.completeMultipartUpload(params,
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return this.cleanUp(err);
|
return this.cleanUp(err);
|
||||||
}
|
}
|
||||||
this.completed = true;
|
this.completed = true;
|
||||||
return this.callback(null, res);
|
return this.callback(null, res);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -187,16 +187,16 @@ class GcpManagedUpload {
|
||||||
if (this.body instanceof stream) {
|
if (this.body instanceof stream) {
|
||||||
// stream type
|
// stream type
|
||||||
this.body.on('error', err => this.cleanUp(err))
|
this.body.on('error', err => this.cleanUp(err))
|
||||||
.on('readable', () => this.chunkStream())
|
.on('readable', () => this.chunkStream())
|
||||||
.on('end', () => {
|
.on('end', () => {
|
||||||
this.isDoneChunking = true;
|
this.isDoneChunking = true;
|
||||||
this.chunkStream();
|
this.chunkStream();
|
||||||
|
|
||||||
if (this.isDoneChunking && this.uploadedParts >= 1 &&
|
if (this.isDoneChunking && this.uploadedParts >= 1 &&
|
||||||
this.uploadedParts === this.totalParts) {
|
this.uploadedParts === this.totalParts) {
|
||||||
this.completeUpload();
|
this.completeUpload();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
@ -251,22 +251,22 @@ class GcpManagedUpload {
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
next => async.eachLimit(this.slicedParts, this.queueSize,
|
next => async.eachLimit(this.slicedParts, this.queueSize,
|
||||||
(uploadPart, done) => {
|
(uploadPart, done) => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: this.mpuBucket,
|
Bucket: this.mpuBucket,
|
||||||
Key: this.params.Key,
|
Key: this.params.Key,
|
||||||
UploadId: this.uploadId,
|
UploadId: this.uploadId,
|
||||||
Body: uploadPart.Body,
|
Body: uploadPart.Body,
|
||||||
PartNumber: uploadPart.PartNumber,
|
PartNumber: uploadPart.PartNumber,
|
||||||
};
|
};
|
||||||
this.service.uploadPart(params, (err, res) => {
|
this.service.uploadPart(params, (err, res) => {
|
||||||
if (!err) {
|
if (!err) {
|
||||||
this.parts[uploadPart.PartNumber] = res.ETag;
|
this.parts[uploadPart.PartNumber] = res.ETag;
|
||||||
this.uploadedParts++;
|
this.uploadedParts++;
|
||||||
}
|
}
|
||||||
return done(err);
|
return done(err);
|
||||||
});
|
});
|
||||||
}, next),
|
}, next),
|
||||||
], err => {
|
], err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return this.cleanUp(new Error(
|
return this.cleanUp(new Error(
|
||||||
|
|
|
@ -239,7 +239,7 @@ module.exports = {
|
||||||
const requestId = resp.httpResponse.headers ?
|
const requestId = resp.httpResponse.headers ?
|
||||||
resp.httpResponse.headers['x-guploader-uploadid'] : null;
|
resp.httpResponse.headers['x-guploader-uploadid'] : null;
|
||||||
if (resp.error) {
|
if (resp.error) {
|
||||||
// eslint-disable-next-line no-param-reassign
|
// eslint-disable-next-line no-param-reassign
|
||||||
resp.error.requestId = resp.requestId || requestId;
|
resp.error.requestId = resp.requestId || requestId;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
@ -120,10 +120,10 @@ class GcpClient extends AwsClient {
|
||||||
return this._client.createMultipartUpload(params, (err, mpuResObj) => {
|
return this._client.createMultipartUpload(params, (err, mpuResObj) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err from data backend',
|
logHelper(log, 'error', 'err from data backend',
|
||||||
err, this._dataStoreName, this.clientType);
|
err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`GCP: ${err.message}`)
|
`GCP: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return callback(null, mpuResObj);
|
return callback(null, mpuResObj);
|
||||||
|
@ -162,32 +162,32 @@ class GcpClient extends AwsClient {
|
||||||
};
|
};
|
||||||
const completeObjData = { key: gcpKey };
|
const completeObjData = { key: gcpKey };
|
||||||
return this._client.completeMultipartUpload(mpuParams,
|
return this._client.completeMultipartUpload(mpuParams,
|
||||||
(err, completeMpuRes) => {
|
(err, completeMpuRes) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err from data backend on ' +
|
logHelper(log, 'error', 'err from data backend on ' +
|
||||||
'completeMPU', err, this._dataStoreName, this.clientType);
|
'completeMPU', err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`GCP: ${err.message}`)
|
`GCP: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (!completeMpuRes.VersionId) {
|
if (!completeMpuRes.VersionId) {
|
||||||
logHelper(log, 'error', 'missing version id for data ' +
|
logHelper(log, 'error', 'missing version id for data ' +
|
||||||
'backend object', missingVerIdInternalError,
|
'backend object', missingVerIdInternalError,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(missingVerIdInternalError);
|
return callback(missingVerIdInternalError);
|
||||||
}
|
}
|
||||||
// remove quotes from eTag because they're added later
|
// remove quotes from eTag because they're added later
|
||||||
completeObjData.eTag = removeQuotes(completeMpuRes.ETag);
|
completeObjData.eTag = removeQuotes(completeMpuRes.ETag);
|
||||||
completeObjData.dataStoreVersionId = completeMpuRes.VersionId;
|
completeObjData.dataStoreVersionId = completeMpuRes.VersionId;
|
||||||
completeObjData.contentLength =
|
completeObjData.contentLength =
|
||||||
Number.parseInt(completeMpuRes.ContentLength, 10);
|
Number.parseInt(completeMpuRes.ContentLength, 10);
|
||||||
return callback(null, completeObjData);
|
return callback(null, completeObjData);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadPart(request, streamingV4Params, stream, size, key, uploadId,
|
uploadPart(request, streamingV4Params, stream, size, key, uploadId,
|
||||||
partNumber, bucketName, log, callback) {
|
partNumber, bucketName, log, callback) {
|
||||||
let hashedStream = stream;
|
let hashedStream = stream;
|
||||||
if (request) {
|
if (request) {
|
||||||
const partStream = prepareStream(request, streamingV4Params,
|
const partStream = prepareStream(request, streamingV4Params,
|
||||||
|
@ -209,8 +209,8 @@ class GcpClient extends AwsClient {
|
||||||
logHelper(log, 'error', 'err from data backend ' +
|
logHelper(log, 'error', 'err from data backend ' +
|
||||||
'on uploadPart', err, this._dataStoreName, this.clientType);
|
'on uploadPart', err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`GCP: ${err.message}`)
|
`GCP: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// remove quotes from eTag because they're added later
|
// remove quotes from eTag because they're added later
|
||||||
|
@ -226,11 +226,11 @@ class GcpClient extends AwsClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadPartCopy(request, gcpSourceKey, sourceLocationConstraintName, config,
|
uploadPartCopy(request, gcpSourceKey, sourceLocationConstraintName, config,
|
||||||
log, callback) {
|
log, callback) {
|
||||||
const destBucketName = request.bucketName;
|
const destBucketName = request.bucketName;
|
||||||
const destObjectKey = request.objectKey;
|
const destObjectKey = request.objectKey;
|
||||||
const destGcpKey = this._createGcpKey(destBucketName, destObjectKey,
|
const destGcpKey = this._createGcpKey(destBucketName, destObjectKey,
|
||||||
this._bucketMatch);
|
this._bucketMatch);
|
||||||
|
|
||||||
const sourceGcpBucketName =
|
const sourceGcpBucketName =
|
||||||
config.getGcpBucketNames(sourceLocationConstraintName).bucketName;
|
config.getGcpBucketNames(sourceLocationConstraintName).bucketName;
|
||||||
|
@ -241,8 +241,8 @@ class GcpClient extends AwsClient {
|
||||||
|
|
||||||
if (copySourceRange) {
|
if (copySourceRange) {
|
||||||
return callback(errors.NotImplemented
|
return callback(errors.NotImplemented
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`${this.clientType}: copySourceRange not implemented`)
|
`${this.clientType}: copySourceRange not implemented`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -260,15 +260,15 @@ class GcpClient extends AwsClient {
|
||||||
`${sourceGcpBucketName} GCP bucket`, err,
|
`${sourceGcpBucketName} GCP bucket`, err,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.AccessDenied
|
return callback(errors.AccessDenied
|
||||||
.customizeDescription('Error: Unable to access ' +
|
.customizeDescription('Error: Unable to access ' +
|
||||||
`${sourceGcpBucketName} GCP bucket`)
|
`${sourceGcpBucketName} GCP bucket`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
logHelper(log, 'error', 'error from data backend on ' +
|
logHelper(log, 'error', 'error from data backend on ' +
|
||||||
'uploadPartCopy', err, this._dataStoreName);
|
'uploadPartCopy', err, this._dataStoreName);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`GCP: ${err.message}`)
|
`GCP: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// remove quotes from eTag because they're added later
|
// remove quotes from eTag because they're added later
|
||||||
|
@ -290,8 +290,8 @@ class GcpClient extends AwsClient {
|
||||||
logHelper(log, 'error', 'err from data backend ' +
|
logHelper(log, 'error', 'err from data backend ' +
|
||||||
'on abortMPU', err, this._dataStoreName, this.clientType);
|
'on abortMPU', err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`GCP: ${err.message}`)
|
`GCP: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
|
|
|
@ -45,7 +45,7 @@ class PfsClient {
|
||||||
}
|
}
|
||||||
return callback(null, keyContext.objectKey, '',
|
return callback(null, keyContext.objectKey, '',
|
||||||
keyContext.metaHeaders['x-amz-meta-size'],
|
keyContext.metaHeaders['x-amz-meta-size'],
|
||||||
md5
|
md5,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
||||||
|
@ -72,7 +72,7 @@ class PfsClient {
|
||||||
this._restClient.delete(key, reqUids, err => {
|
this._restClient.delete(key, reqUids, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err from data backend', err,
|
logHelper(log, 'error', 'err from data backend', err,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(err);
|
return callback(err);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
|
@ -86,61 +86,61 @@ class PfsClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
createMPU(key, metaHeaders, bucketName, websiteRedirectHeader, contentType,
|
createMPU(key, metaHeaders, bucketName, websiteRedirectHeader, contentType,
|
||||||
cacheControl, contentDisposition, contentEncoding, log, callback) {
|
cacheControl, contentDisposition, contentEncoding, log, callback) {
|
||||||
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.NotImplemented);
|
return callback(errors.NotImplemented);
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadPart(request, streamingV4Params, stream, size, key, uploadId,
|
uploadPart(request, streamingV4Params, stream, size, key, uploadId,
|
||||||
partNumber, bucketName, log, callback) {
|
partNumber, bucketName, log, callback) {
|
||||||
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.NotImplemented);
|
return callback(errors.NotImplemented);
|
||||||
}
|
}
|
||||||
|
|
||||||
listParts(key, uploadId, bucketName, partNumberMarker, maxParts, log,
|
listParts(key, uploadId, bucketName, partNumberMarker, maxParts, log,
|
||||||
callback) {
|
callback) {
|
||||||
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.NotImplemented);
|
return callback(errors.NotImplemented);
|
||||||
}
|
}
|
||||||
|
|
||||||
completeMPU(jsonList, mdInfo, key, uploadId, bucketName, log, callback) {
|
completeMPU(jsonList, mdInfo, key, uploadId, bucketName, log, callback) {
|
||||||
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.NotImplemented);
|
return callback(errors.NotImplemented);
|
||||||
}
|
}
|
||||||
|
|
||||||
abortMPU(key, uploadId, bucketName, log, callback) {
|
abortMPU(key, uploadId, bucketName, log, callback) {
|
||||||
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.NotImplemented);
|
return callback(errors.NotImplemented);
|
||||||
}
|
}
|
||||||
|
|
||||||
objectPutTagging(key, bucket, objectMD, log, callback) {
|
objectPutTagging(key, bucket, objectMD, log, callback) {
|
||||||
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.NotImplemented);
|
return callback(errors.NotImplemented);
|
||||||
}
|
}
|
||||||
|
|
||||||
objectDeleteTagging(key, bucketName, objectMD, log, callback) {
|
objectDeleteTagging(key, bucketName, objectMD, log, callback) {
|
||||||
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.NotImplemented);
|
return callback(errors.NotImplemented);
|
||||||
}
|
}
|
||||||
|
|
||||||
copyObject(request, destLocationConstraintName, sourceKey,
|
copyObject(request, destLocationConstraintName, sourceKey,
|
||||||
sourceLocationConstraintName, storeMetadataParams, config, log, callback) {
|
sourceLocationConstraintName, storeMetadataParams, config, log, callback) {
|
||||||
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.NotImplemented);
|
return callback(errors.NotImplemented);
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadPartCopy(request, awsSourceKey, sourceLocationConstraintName,
|
uploadPartCopy(request, awsSourceKey, sourceLocationConstraintName,
|
||||||
config, log, callback) {
|
config, log, callback) {
|
||||||
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.NotImplemented);
|
return callback(errors.NotImplemented);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,7 +94,7 @@ const utils = {
|
||||||
* same account since Azure copy outside of an account is async
|
* same account since Azure copy outside of an account is async
|
||||||
*/
|
*/
|
||||||
externalBackendCopy(config, locationConstraintSrc, locationConstraintDest,
|
externalBackendCopy(config, locationConstraintSrc, locationConstraintDest,
|
||||||
sourceBucketMD, destBucketMD) {
|
sourceBucketMD, destBucketMD) {
|
||||||
const sourceBucketName = sourceBucketMD.getName();
|
const sourceBucketName = sourceBucketMD.getName();
|
||||||
const destBucketName = destBucketMD.getName();
|
const destBucketName = destBucketMD.getName();
|
||||||
const isSameBucket = sourceBucketName === destBucketName;
|
const isSameBucket = sourceBucketName === destBucketName;
|
||||||
|
@ -111,11 +111,11 @@ const utils = {
|
||||||
sourceLocationConstraintType === 'gcp' ||
|
sourceLocationConstraintType === 'gcp' ||
|
||||||
(sourceLocationConstraintType === 'azure' &&
|
(sourceLocationConstraintType === 'azure' &&
|
||||||
config.isSameAzureAccount(locationConstraintSrc,
|
config.isSameAzureAccount(locationConstraintSrc,
|
||||||
locationConstraintDest)));
|
locationConstraintDest)));
|
||||||
},
|
},
|
||||||
|
|
||||||
checkExternalBackend(clients, locations, type, flightCheckOnStartUp,
|
checkExternalBackend(clients, locations, type, flightCheckOnStartUp,
|
||||||
externalBackendHealthCheckInterval, cb) {
|
externalBackendHealthCheckInterval, cb) {
|
||||||
const checkStatus = backendHealth[type] || {};
|
const checkStatus = backendHealth[type] || {};
|
||||||
if (locations.length === 0) {
|
if (locations.length === 0) {
|
||||||
return process.nextTick(cb, null, []);
|
return process.nextTick(cb, null, []);
|
||||||
|
|
|
@ -35,7 +35,6 @@ const FOLDER_HASH = 3511;
|
||||||
* directory hash structure under the configured dataPath.
|
* directory hash structure under the configured dataPath.
|
||||||
*/
|
*/
|
||||||
class DataFileStore {
|
class DataFileStore {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {Object} dataConfig - configuration of the file backend
|
* @param {Object} dataConfig - configuration of the file backend
|
||||||
|
@ -78,7 +77,7 @@ class DataFileStore {
|
||||||
fs.access(this.dataPath, fs.F_OK | fs.R_OK | fs.W_OK, err => {
|
fs.access(this.dataPath, fs.F_OK | fs.R_OK | fs.W_OK, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
this.logger.error('Data path is not readable or writable',
|
this.logger.error('Data path is not readable or writable',
|
||||||
{ error: err });
|
{ error: err });
|
||||||
return callback(err);
|
return callback(err);
|
||||||
}
|
}
|
||||||
if (this.isPassthrough) {
|
if (this.isPassthrough) {
|
||||||
|
@ -86,7 +85,7 @@ class DataFileStore {
|
||||||
}
|
}
|
||||||
// Create FOLDER_HASH subdirectories
|
// Create FOLDER_HASH subdirectories
|
||||||
const subDirs = Array.from({ length: FOLDER_HASH },
|
const subDirs = Array.from({ length: FOLDER_HASH },
|
||||||
(v, k) => (k).toString());
|
(v, k) => (k).toString());
|
||||||
this.logger.info(`pre-creating ${subDirs.length} subdirs...`);
|
this.logger.info(`pre-creating ${subDirs.length} subdirs...`);
|
||||||
if (!this.noSync) {
|
if (!this.noSync) {
|
||||||
storageUtils.setDirSyncFlag(this.dataPath, this.logger);
|
storageUtils.setDirSyncFlag(this.dataPath, this.logger);
|
||||||
|
@ -103,7 +102,7 @@ class DataFileStore {
|
||||||
err => {
|
err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
this.logger.error('Error creating subdirs',
|
this.logger.error('Error creating subdirs',
|
||||||
{ error: err });
|
{ error: err });
|
||||||
return callback(err);
|
return callback(err);
|
||||||
}
|
}
|
||||||
this.logger.info('data file store init complete, ' +
|
this.logger.info('data file store init complete, ' +
|
||||||
|
@ -167,7 +166,7 @@ class DataFileStore {
|
||||||
fs.open(filePath, 'wx', (err, fd) => {
|
fs.open(filePath, 'wx', (err, fd) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('error opening filePath',
|
log.error('error opening filePath',
|
||||||
{ method: 'put', key, filePath, error: err });
|
{ method: 'put', key, filePath, error: err });
|
||||||
return callback(errors.InternalError.customizeDescription(
|
return callback(errors.InternalError.customizeDescription(
|
||||||
`filesystem error: open() returned ${err.code}`));
|
`filesystem error: open() returned ${err.code}`));
|
||||||
}
|
}
|
||||||
|
@ -181,7 +180,7 @@ class DataFileStore {
|
||||||
fileStream.on('finish', () => {
|
fileStream.on('finish', () => {
|
||||||
function ok() {
|
function ok() {
|
||||||
log.debug('finished writing data',
|
log.debug('finished writing data',
|
||||||
{ method: 'put', key, filePath });
|
{ method: 'put', key, filePath });
|
||||||
return cbOnce(null, key);
|
return cbOnce(null, key);
|
||||||
}
|
}
|
||||||
if (this.noSync) {
|
if (this.noSync) {
|
||||||
|
@ -243,7 +242,7 @@ class DataFileStore {
|
||||||
return undefined;
|
return undefined;
|
||||||
}).on('error', err => {
|
}).on('error', err => {
|
||||||
log.error('error streaming data on write',
|
log.error('error streaming data on write',
|
||||||
{ method: 'put', key, filePath, error: err });
|
{ method: 'put', key, filePath, error: err });
|
||||||
// destroying the write stream forces a close(fd)
|
// destroying the write stream forces a close(fd)
|
||||||
fileStream.destroy();
|
fileStream.destroy();
|
||||||
return cbOnce(errors.InternalError.customizeDescription(
|
return cbOnce(errors.InternalError.customizeDescription(
|
||||||
|
@ -287,7 +286,7 @@ class DataFileStore {
|
||||||
return callback(errors.ObjNotFound);
|
return callback(errors.ObjNotFound);
|
||||||
}
|
}
|
||||||
log.error('error on \'stat\' of file',
|
log.error('error on \'stat\' of file',
|
||||||
{ key, filePath, error: err });
|
{ key, filePath, error: err });
|
||||||
return callback(errors.InternalError.customizeDescription(
|
return callback(errors.InternalError.customizeDescription(
|
||||||
`filesystem error: stat() returned ${err.code}`));
|
`filesystem error: stat() returned ${err.code}`));
|
||||||
}
|
}
|
||||||
|
@ -323,34 +322,34 @@ class DataFileStore {
|
||||||
readStreamOptions.end = byteRange[1];
|
readStreamOptions.end = byteRange[1];
|
||||||
}
|
}
|
||||||
log.debug('opening readStream to get data',
|
log.debug('opening readStream to get data',
|
||||||
{ method: 'get', key, filePath, byteRange });
|
{ method: 'get', key, filePath, byteRange });
|
||||||
const cbOnce = jsutil.once(callback);
|
const cbOnce = jsutil.once(callback);
|
||||||
const rs = fs.createReadStream(filePath, readStreamOptions)
|
const rs = fs.createReadStream(filePath, readStreamOptions)
|
||||||
.on('error', err => {
|
.on('error', err => {
|
||||||
if (err.code === 'ENOENT') {
|
if (err.code === 'ENOENT') {
|
||||||
return cbOnce(errors.ObjNotFound);
|
return cbOnce(errors.ObjNotFound);
|
||||||
|
}
|
||||||
|
log.error('error retrieving file',
|
||||||
|
{ method: 'DataFileStore.get', key, filePath,
|
||||||
|
error: err });
|
||||||
|
return cbOnce(
|
||||||
|
errors.InternalError.customizeDescription(
|
||||||
|
`filesystem read error: ${err.code}`));
|
||||||
|
})
|
||||||
|
.on('open', () => { cbOnce(null, rs); })
|
||||||
|
.on('end', () => {
|
||||||
|
if (this.noCache) {
|
||||||
|
releasePageCacheSync(filePath, rs.fd, log);
|
||||||
|
}
|
||||||
|
fs.close(rs.fd, err => {
|
||||||
|
if (err) {
|
||||||
|
log.error('unable to close file descriptor', {
|
||||||
|
method: 'DataFileStore.get', key, filePath,
|
||||||
|
error: err,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
log.error('error retrieving file',
|
|
||||||
{ method: 'DataFileStore.get', key, filePath,
|
|
||||||
error: err });
|
|
||||||
return cbOnce(
|
|
||||||
errors.InternalError.customizeDescription(
|
|
||||||
`filesystem read error: ${err.code}`));
|
|
||||||
})
|
|
||||||
.on('open', () => { cbOnce(null, rs); })
|
|
||||||
.on('end', () => {
|
|
||||||
if (this.noCache) {
|
|
||||||
releasePageCacheSync(filePath, rs.fd, log);
|
|
||||||
}
|
|
||||||
fs.close(rs.fd, err => {
|
|
||||||
if (err) {
|
|
||||||
log.error('unable to close file descriptor', {
|
|
||||||
method: 'DataFileStore.get', key, filePath,
|
|
||||||
error: err,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -12,7 +12,7 @@ function releasePageCacheSync(filePath, fd, log) {
|
||||||
const ret = posixFadvise(fd, 0, 0, 4);
|
const ret = posixFadvise(fd, 0, 0, 4);
|
||||||
if (ret !== 0) {
|
if (ret !== 0) {
|
||||||
log.warning(
|
log.warning(
|
||||||
`error fadv_dontneed ${filePath} returned ${ret}`);
|
`error fadv_dontneed ${filePath} returned ${ret}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -37,16 +37,16 @@ const backend = {
|
||||||
}
|
}
|
||||||
cursor += data.length;
|
cursor += data.length;
|
||||||
})
|
})
|
||||||
.on('end', () => {
|
.on('end', () => {
|
||||||
if (exceeded) {
|
if (exceeded) {
|
||||||
log.error('data stream exceed announced size',
|
log.error('data stream exceed announced size',
|
||||||
{ size, overflow: cursor });
|
{ size, overflow: cursor });
|
||||||
callback(errors.InternalError);
|
callback(errors.InternalError);
|
||||||
} else {
|
} else {
|
||||||
ds[count] = { value, keyContext };
|
ds[count] = { value, keyContext };
|
||||||
callback(null, count++);
|
callback(null, count++);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
get: function getMem(objectGetInfo, range, reqUids, callback) {
|
get: function getMem(objectGetInfo, range, reqUids, callback) {
|
||||||
|
|
|
@ -181,25 +181,25 @@ class MetadataWrapper {
|
||||||
const value = typeof objVal.getValue === 'function' ?
|
const value = typeof objVal.getValue === 'function' ?
|
||||||
objVal.getValue() : objVal;
|
objVal.getValue() : objVal;
|
||||||
this.client.putObject(bucketName, objName, value, params, log,
|
this.client.putObject(bucketName, objName, value, params, log,
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('error from metadata', { implName: this.implName,
|
log.debug('error from metadata', { implName: this.implName,
|
||||||
error: err });
|
error: err });
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
if (data) {
|
if (data) {
|
||||||
log.debug('object version successfully put in metadata',
|
log.debug('object version successfully put in metadata',
|
||||||
{ version: data });
|
{ version: data });
|
||||||
} else {
|
} else {
|
||||||
log.debug('object successfully put in metadata');
|
log.debug('object successfully put in metadata');
|
||||||
}
|
}
|
||||||
return cb(err, data);
|
return cb(err, data);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
getBucketAndObjectMD(bucketName, objName, params, log, cb) {
|
getBucketAndObjectMD(bucketName, objName, params, log, cb) {
|
||||||
log.debug('getting bucket and object from metadata',
|
log.debug('getting bucket and object from metadata',
|
||||||
{ database: bucketName, object: objName });
|
{ database: bucketName, object: objName });
|
||||||
this.client.getBucketAndObject(bucketName, objName, params, log,
|
this.client.getBucketAndObject(bucketName, objName, params, log,
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -208,7 +208,7 @@ class MetadataWrapper {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
log.debug('bucket and object retrieved from metadata',
|
log.debug('bucket and object retrieved from metadata',
|
||||||
{ database: bucketName, object: objName });
|
{ database: bucketName, object: objName });
|
||||||
return cb(err, data);
|
return cb(err, data);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ const BucketInfo = require('../../../models/BucketInfo');
|
||||||
class BucketClientInterface {
|
class BucketClientInterface {
|
||||||
constructor(params, bucketclient, logger) {
|
constructor(params, bucketclient, logger) {
|
||||||
assert(params.bucketdBootstrap.length > 0,
|
assert(params.bucketdBootstrap.length > 0,
|
||||||
'bucketd bootstrap list is empty');
|
'bucketd bootstrap list is empty');
|
||||||
const bootstrap = params.bucketdBootstrap;
|
const bootstrap = params.bucketdBootstrap;
|
||||||
const log = params.bucketdLog;
|
const log = params.bucketdLog;
|
||||||
if (params.https) {
|
if (params.https) {
|
||||||
|
@ -29,7 +29,7 @@ class BucketClientInterface {
|
||||||
|
|
||||||
createBucket(bucketName, bucketMD, log, cb) {
|
createBucket(bucketName, bucketMD, log, cb) {
|
||||||
this.client.createBucket(bucketName, log.getSerializedUids(),
|
this.client.createBucket(bucketName, log.getSerializedUids(),
|
||||||
bucketMD.serialize(), cb);
|
bucketMD.serialize(), cb);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,17 +57,17 @@ class BucketClientInterface {
|
||||||
|
|
||||||
getRaftBuckets(raftId, log, cb) {
|
getRaftBuckets(raftId, log, cb) {
|
||||||
return this.client.getRaftBuckets(raftId, log.getSerializedUids(),
|
return this.client.getRaftBuckets(raftId, log.getSerializedUids(),
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
return cb(null, JSON.parse(data));
|
return cb(null, JSON.parse(data));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
putBucketAttributes(bucketName, bucketMD, log, cb) {
|
putBucketAttributes(bucketName, bucketMD, log, cb) {
|
||||||
this.client.putBucketAttributes(bucketName, log.getSerializedUids(),
|
this.client.putBucketAttributes(bucketName, log.getSerializedUids(),
|
||||||
bucketMD.serialize(), cb);
|
bucketMD.serialize(), cb);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,7 +95,7 @@ class BucketClientInterface {
|
||||||
|
|
||||||
deleteObject(bucketName, objName, params, log, cb) {
|
deleteObject(bucketName, objName, params, log, cb) {
|
||||||
this.client.deleteObject(bucketName, objName, log.getSerializedUids(),
|
this.client.deleteObject(bucketName, objName, log.getSerializedUids(),
|
||||||
cb, params);
|
cb, params);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -183,8 +183,8 @@ class BucketClientInterface {
|
||||||
reason.msg = undefined;
|
reason.msg = undefined;
|
||||||
respBody[implName] = {
|
respBody[implName] = {
|
||||||
code: 200,
|
code: 200,
|
||||||
message, // Provide interpreted reason msg
|
message, // Provide interpreted reason msg
|
||||||
body: reason, // Provide analysis data
|
body: reason, // Provide analysis data
|
||||||
};
|
};
|
||||||
if (failure) {
|
if (failure) {
|
||||||
// Setting the `error` field is how the healthCheck
|
// Setting the `error` field is how the healthCheck
|
||||||
|
|
|
@ -30,7 +30,6 @@ class ListRecordStream extends stream.Transform {
|
||||||
* @classdesc Proxy object to access raft log API
|
* @classdesc Proxy object to access raft log API
|
||||||
*/
|
*/
|
||||||
class LogConsumer {
|
class LogConsumer {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
*
|
*
|
||||||
|
@ -97,14 +96,14 @@ class LogConsumer {
|
||||||
if (err.code === 404) {
|
if (err.code === 404) {
|
||||||
// no such raft session, log and ignore
|
// no such raft session, log and ignore
|
||||||
this.logger.warn('raft session does not exist yet',
|
this.logger.warn('raft session does not exist yet',
|
||||||
{ raftId: this.raftSession });
|
{ raftId: this.raftSession });
|
||||||
return cbOnce(null, { info: { start: null,
|
return cbOnce(null, { info: { start: null,
|
||||||
end: null } });
|
end: null } });
|
||||||
}
|
}
|
||||||
if (err.code === 416) {
|
if (err.code === 416) {
|
||||||
// requested range not satisfiable
|
// requested range not satisfiable
|
||||||
this.logger.debug('no new log record to process',
|
this.logger.debug('no new log record to process',
|
||||||
{ raftId: this.raftSession });
|
{ raftId: this.raftSession });
|
||||||
return cbOnce(null, { info: { start: null,
|
return cbOnce(null, { info: { start: null,
|
||||||
end: null } });
|
end: null } });
|
||||||
}
|
}
|
||||||
|
@ -116,7 +115,7 @@ class LogConsumer {
|
||||||
// is emitted
|
// is emitted
|
||||||
recordStream.on('error', err => {
|
recordStream.on('error', err => {
|
||||||
this.logger.error('error receiving raft log',
|
this.logger.error('error receiving raft log',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cbOnce(errors.InternalError);
|
return cbOnce(errors.InternalError);
|
||||||
});
|
});
|
||||||
const jsonResponse = stream.pipe(jsonStream.parse('log.*'));
|
const jsonResponse = stream.pipe(jsonStream.parse('log.*'));
|
||||||
|
@ -127,7 +126,7 @@ class LogConsumer {
|
||||||
// remove temporary listener
|
// remove temporary listener
|
||||||
recordStream.removeAllListeners('error');
|
recordStream.removeAllListeners('error');
|
||||||
return cbOnce(null, { info: header.info,
|
return cbOnce(null, { info: header.info,
|
||||||
log: recordStream });
|
log: recordStream });
|
||||||
})
|
})
|
||||||
.on('error', err => recordStream.emit('error', err));
|
.on('error', err => recordStream.emit('error', err));
|
||||||
return undefined;
|
return undefined;
|
||||||
|
|
|
@ -14,13 +14,13 @@ const _operatorType1 = joi.string().valid(
|
||||||
'$gt',
|
'$gt',
|
||||||
'$gte',
|
'$gte',
|
||||||
'$lt',
|
'$lt',
|
||||||
'$lte'
|
'$lte',
|
||||||
);
|
);
|
||||||
|
|
||||||
// supports strings, numbers, and boolean
|
// supports strings, numbers, and boolean
|
||||||
const _operatorType2 = joi.string().valid(
|
const _operatorType2 = joi.string().valid(
|
||||||
'$eq',
|
'$eq',
|
||||||
'$ne'
|
'$ne',
|
||||||
);
|
);
|
||||||
|
|
||||||
const _valueType1 = joi.alternatives([
|
const _valueType1 = joi.alternatives([
|
||||||
|
|
|
@ -10,14 +10,13 @@ const list = require('../../../algos/list/exportAlgos');
|
||||||
const MetadataFileClient = require('./MetadataFileClient');
|
const MetadataFileClient = require('./MetadataFileClient');
|
||||||
const versionSep =
|
const versionSep =
|
||||||
require('../../../versioning/constants')
|
require('../../../versioning/constants')
|
||||||
.VersioningConstants.VersionId.Separator;
|
.VersioningConstants.VersionId.Separator;
|
||||||
|
|
||||||
const METASTORE = '__metastore';
|
const METASTORE = '__metastore';
|
||||||
|
|
||||||
const itemScanRefreshDelay = 1000 * 30 * 60; // 30 minutes
|
const itemScanRefreshDelay = 1000 * 30 * 60; // 30 minutes
|
||||||
|
|
||||||
class BucketFileInterface {
|
class BucketFileInterface {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {object} [params] - constructor params
|
* @param {object} [params] - constructor params
|
||||||
|
@ -65,7 +64,7 @@ class BucketFileInterface {
|
||||||
if (err) {
|
if (err) {
|
||||||
this.logger.fatal('error writing usersBucket ' +
|
this.logger.fatal('error writing usersBucket ' +
|
||||||
'attributes to metadata',
|
'attributes to metadata',
|
||||||
{ error: err });
|
{ error: err });
|
||||||
throw (errors.InternalError);
|
throw (errors.InternalError);
|
||||||
}
|
}
|
||||||
return done();
|
return done();
|
||||||
|
@ -104,8 +103,8 @@ class BucketFileInterface {
|
||||||
}
|
}
|
||||||
this.lastItemScanTime = null;
|
this.lastItemScanTime = null;
|
||||||
this.putBucketAttributes(bucketName,
|
this.putBucketAttributes(bucketName,
|
||||||
bucketMD,
|
bucketMD,
|
||||||
log, cb);
|
log, cb);
|
||||||
return undefined;
|
return undefined;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -191,7 +190,7 @@ class BucketFileInterface {
|
||||||
errorStack: err.stack,
|
errorStack: err.stack,
|
||||||
};
|
};
|
||||||
log.error('error deleting bucket',
|
log.error('error deleting bucket',
|
||||||
logObj);
|
logObj);
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
this.lastItemScanTime = null;
|
this.lastItemScanTime = null;
|
||||||
|
@ -392,16 +391,16 @@ class BucketFileInterface {
|
||||||
cbDone = true;
|
cbDone = true;
|
||||||
async.eachSeries(res.bucketList, (bucket, cb) => {
|
async.eachSeries(res.bucketList, (bucket, cb) => {
|
||||||
this.getBucketAttributes(bucket.name, log,
|
this.getBucketAttributes(bucket.name, log,
|
||||||
(err, bucketInfo) => {
|
(err, bucketInfo) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
/* eslint-disable no-param-reassign */
|
/* eslint-disable no-param-reassign */
|
||||||
bucket.location =
|
bucket.location =
|
||||||
bucketInfo.getLocationConstraint();
|
bucketInfo.getLocationConstraint();
|
||||||
/* eslint-enable no-param-reassign */
|
/* eslint-enable no-param-reassign */
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
}, err => {
|
}, err => {
|
||||||
if (!err) {
|
if (!err) {
|
||||||
this.lastItemScanTime = Date.now();
|
this.lastItemScanTime = Date.now();
|
||||||
|
|
|
@ -8,7 +8,6 @@ const { RecordLogProxy } = require('./RecordLog.js');
|
||||||
const werelogs = require('werelogs');
|
const werelogs = require('werelogs');
|
||||||
|
|
||||||
class MetadataFileClient {
|
class MetadataFileClient {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a metadata client
|
* Construct a metadata client
|
||||||
*
|
*
|
||||||
|
@ -86,7 +85,7 @@ class MetadataFileClient {
|
||||||
logProxy.connect(err => {
|
logProxy.connect(err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
this.logger.error('error connecting to record log service',
|
this.logger.error('error connecting to record log service',
|
||||||
{ url, error: err.stack });
|
{ url, error: err.stack });
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
this.logger.info('connected to record log service', { url });
|
this.logger.info('connected to record log service', { url });
|
||||||
|
|
|
@ -25,7 +25,6 @@ const SYNC_OPTIONS = { sync: true };
|
||||||
const SUBLEVEL_SEP = '::';
|
const SUBLEVEL_SEP = '::';
|
||||||
|
|
||||||
class MetadataFileServer {
|
class MetadataFileServer {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a metadata server
|
* Construct a metadata server
|
||||||
*
|
*
|
||||||
|
@ -218,7 +217,7 @@ class MetadataFileServer {
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
this.rootDb.batch(ops, SYNC_OPTIONS,
|
this.rootDb.batch(ops, SYNC_OPTIONS,
|
||||||
err => callback(err));
|
err => callback(err));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -235,17 +234,17 @@ class MetadataFileServer {
|
||||||
put: (env, key, value, options, cb) => {
|
put: (env, key, value, options, cb) => {
|
||||||
const dbName = env.subLevel.join(SUBLEVEL_SEP);
|
const dbName = env.subLevel.join(SUBLEVEL_SEP);
|
||||||
vrp.put({ db: dbName, key, value, options },
|
vrp.put({ db: dbName, key, value, options },
|
||||||
env.requestLogger, cb);
|
env.requestLogger, cb);
|
||||||
},
|
},
|
||||||
del: (env, key, options, cb) => {
|
del: (env, key, options, cb) => {
|
||||||
const dbName = env.subLevel.join(SUBLEVEL_SEP);
|
const dbName = env.subLevel.join(SUBLEVEL_SEP);
|
||||||
vrp.del({ db: dbName, key, options },
|
vrp.del({ db: dbName, key, options },
|
||||||
env.requestLogger, cb);
|
env.requestLogger, cb);
|
||||||
},
|
},
|
||||||
get: (env, key, options, cb) => {
|
get: (env, key, options, cb) => {
|
||||||
const dbName = env.subLevel.join(SUBLEVEL_SEP);
|
const dbName = env.subLevel.join(SUBLEVEL_SEP);
|
||||||
vrp.get({ db: dbName, key, options },
|
vrp.get({ db: dbName, key, options },
|
||||||
env.requestLogger, cb);
|
env.requestLogger, cb);
|
||||||
},
|
},
|
||||||
getDiskUsage: (env, cb) => diskusage.check(this.path, cb),
|
getDiskUsage: (env, cb) => diskusage.check(this.path, cb),
|
||||||
});
|
});
|
||||||
|
|
|
@ -18,7 +18,6 @@ const DEFAULT_RECORD_LOG_NAME = 's3-recordlog';
|
||||||
* object.
|
* object.
|
||||||
*/
|
*/
|
||||||
class RecordLogProxy extends rpc.BaseClient {
|
class RecordLogProxy extends rpc.BaseClient {
|
||||||
|
|
||||||
constructor(params) {
|
constructor(params) {
|
||||||
super(params);
|
super(params);
|
||||||
|
|
||||||
|
@ -102,7 +101,6 @@ class ListRecordStream extends stream.Transform {
|
||||||
* updates can be transactional with each other.
|
* updates can be transactional with each other.
|
||||||
*/
|
*/
|
||||||
class RecordLogService extends rpc.BaseService {
|
class RecordLogService extends rpc.BaseService {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
*
|
*
|
||||||
|
@ -274,12 +272,12 @@ class RecordLogService extends rpc.BaseService {
|
||||||
limit: _params.limit,
|
limit: _params.limit,
|
||||||
};
|
};
|
||||||
const userStream = new ListRecordStream(endSeq,
|
const userStream = new ListRecordStream(endSeq,
|
||||||
_params.limit);
|
_params.limit);
|
||||||
const dbStream =
|
const dbStream =
|
||||||
openLog.logDb.createReadStream(queryParams);
|
openLog.logDb.createReadStream(queryParams);
|
||||||
dbStream.pipe(userStream);
|
dbStream.pipe(userStream);
|
||||||
dbStream.once('error',
|
dbStream.once('error',
|
||||||
err => userStream.emit('error', err));
|
err => userStream.emit('error', err));
|
||||||
userStream.once('error', err => {
|
userStream.once('error', err => {
|
||||||
userStream.removeAllListeners('info');
|
userStream.removeAllListeners('info');
|
||||||
cb(err);
|
cb(err);
|
||||||
|
|
|
@ -8,8 +8,8 @@ function markerFilterMPU(allMarkers, array) {
|
||||||
// in the array that is alphabetically after keyMarker
|
// in the array that is alphabetically after keyMarker
|
||||||
const firstUnfilteredIndex = array.findIndex(
|
const firstUnfilteredIndex = array.findIndex(
|
||||||
item => (uploadIdMarker && item.key === keyMarker ?
|
item => (uploadIdMarker && item.key === keyMarker ?
|
||||||
item.uploadId > uploadIdMarker :
|
item.uploadId > uploadIdMarker :
|
||||||
item.key > keyMarker));
|
item.key > keyMarker));
|
||||||
return firstUnfilteredIndex !== -1 ? array.slice(firstUnfilteredIndex) : [];
|
return firstUnfilteredIndex !== -1 ? array.slice(firstUnfilteredIndex) : [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -182,7 +182,7 @@ const metastore = {
|
||||||
if (params && params.versionId) {
|
if (params && params.versionId) {
|
||||||
const baseKey = inc(formatVersionKey(objName, ''));
|
const baseKey = inc(formatVersionKey(objName, ''));
|
||||||
const vobjName = formatVersionKey(objName,
|
const vobjName = formatVersionKey(objName,
|
||||||
params.versionId);
|
params.versionId);
|
||||||
metadata.keyMaps.get(bucketName).delete(vobjName);
|
metadata.keyMaps.get(bucketName).delete(vobjName);
|
||||||
const mst = metadata.keyMaps.get(bucketName).get(objName);
|
const mst = metadata.keyMaps.get(bucketName).get(objName);
|
||||||
if (mst.versionId === params.versionId) {
|
if (mst.versionId === params.versionId) {
|
||||||
|
|
|
@ -79,9 +79,9 @@ class ListRecordStream extends stream.Readable {
|
||||||
'did not encounter the last saved offset in oplog, ' +
|
'did not encounter the last saved offset in oplog, ' +
|
||||||
'resuming processing right after the latest record ' +
|
'resuming processing right after the latest record ' +
|
||||||
'to date; some entries may have been skipped', {
|
'to date; some entries may have been skipped', {
|
||||||
lastSavedID: this._lastSavedID,
|
lastSavedID: this._lastSavedID,
|
||||||
latestRecordID: this._latestOplogID,
|
latestRecordID: this._latestOplogID,
|
||||||
});
|
});
|
||||||
this._unpublishedListing = true;
|
this._unpublishedListing = true;
|
||||||
}
|
}
|
||||||
++this._skipCount;
|
++this._skipCount;
|
||||||
|
@ -110,7 +110,7 @@ class ListRecordStream extends stream.Readable {
|
||||||
}
|
}
|
||||||
entry = {
|
entry = {
|
||||||
type: 'put', // updates overwrite the whole metadata,
|
type: 'put', // updates overwrite the whole metadata,
|
||||||
// so they are considered as puts
|
// so they are considered as puts
|
||||||
key: itemObj.o2._id,
|
key: itemObj.o2._id,
|
||||||
// updated value may be either stored directly in 'o'
|
// updated value may be either stored directly in 'o'
|
||||||
// attribute or in '$set' attribute (supposedly when
|
// attribute or in '$set' attribute (supposedly when
|
||||||
|
@ -132,7 +132,7 @@ class ListRecordStream extends stream.Readable {
|
||||||
}
|
}
|
||||||
const streamObject = {
|
const streamObject = {
|
||||||
timestamp: new Date((itemObj.ts ?
|
timestamp: new Date((itemObj.ts ?
|
||||||
itemObj.ts.toNumber() * 1000 : 0)),
|
itemObj.ts.toNumber() * 1000 : 0)),
|
||||||
db: dbName,
|
db: dbName,
|
||||||
entries: [entry],
|
entries: [entry],
|
||||||
};
|
};
|
||||||
|
|
|
@ -9,7 +9,6 @@ const MongoUtils = require('./utils');
|
||||||
* @classdesc Class to consume mongo oplog
|
* @classdesc Class to consume mongo oplog
|
||||||
*/
|
*/
|
||||||
class LogConsumer {
|
class LogConsumer {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
*
|
*
|
||||||
|
@ -43,7 +42,7 @@ class LogConsumer {
|
||||||
(err, client) => {
|
(err, client) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
this._logger.error('Unable to connect to MongoDB',
|
this._logger.error('Unable to connect to MongoDB',
|
||||||
{ error: err });
|
{ error: err });
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
this._logger.info('connected to mongodb');
|
this._logger.info('connected to mongodb');
|
||||||
|
|
|
@ -50,7 +50,7 @@ const VID_SEP = require('../../../versioning/constants')
|
||||||
function generateVersionId(replicationGroupId) {
|
function generateVersionId(replicationGroupId) {
|
||||||
// generate a unique number for each member of the nodejs cluster
|
// generate a unique number for each member of the nodejs cluster
|
||||||
return genVID(`${process.pid}.${uidCounter++}`,
|
return genVID(`${process.pid}.${uidCounter++}`,
|
||||||
replicationGroupId);
|
replicationGroupId);
|
||||||
}
|
}
|
||||||
|
|
||||||
function formatVersionKey(key, versionId) {
|
function formatVersionKey(key, versionId) {
|
||||||
|
@ -168,7 +168,7 @@ class MongoClientInterface {
|
||||||
if (err) {
|
if (err) {
|
||||||
this.logger.fatal('error writing usersBucket ' +
|
this.logger.fatal('error writing usersBucket ' +
|
||||||
'attributes to metastore',
|
'attributes to metastore',
|
||||||
{ error: err });
|
{ error: err });
|
||||||
throw (errors.InternalError);
|
throw (errors.InternalError);
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
|
@ -185,7 +185,7 @@ class MongoClientInterface {
|
||||||
getCollection(name) {
|
getCollection(name) {
|
||||||
/* mongo has a problem with .. in collection names */
|
/* mongo has a problem with .. in collection names */
|
||||||
const newName = (name === constants.usersBucket) ?
|
const newName = (name === constants.usersBucket) ?
|
||||||
USERSBUCKET : name;
|
USERSBUCKET : name;
|
||||||
return this.db.collection(newName);
|
return this.db.collection(newName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -209,7 +209,7 @@ class MongoClientInterface {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error(
|
log.error(
|
||||||
'createBucket: error creating bucket',
|
'createBucket: error creating bucket',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
this.lastItemScanTime = null;
|
this.lastItemScanTime = null;
|
||||||
|
@ -222,7 +222,7 @@ class MongoClientInterface {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error(
|
log.error(
|
||||||
'createBucket: error creating bucket',
|
'createBucket: error creating bucket',
|
||||||
{ error: err });
|
{ error: err });
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
if (this.shardCollections) {
|
if (this.shardCollections) {
|
||||||
|
@ -282,12 +282,12 @@ class MongoClientInterface {
|
||||||
if (err) {
|
if (err) {
|
||||||
if (err === errors.NoSuchKey) {
|
if (err === errors.NoSuchKey) {
|
||||||
return cb(null,
|
return cb(null,
|
||||||
{ bucket:
|
{ bucket:
|
||||||
BucketInfo.fromObj(bucket).serialize(),
|
BucketInfo.fromObj(bucket).serialize(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
log.error('getObject: error getting object',
|
log.error('getObject: error getting object',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
return cb(null, {
|
return cb(null, {
|
||||||
|
@ -317,7 +317,7 @@ class MongoClientInterface {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error(
|
log.error(
|
||||||
'putBucketAttributes: error putting bucket attributes',
|
'putBucketAttributes: error putting bucket attributes',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
|
@ -334,12 +334,12 @@ class MongoClientInterface {
|
||||||
}, {}, (err, result) => {
|
}, {}, (err, result) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('deleteBucketStep2: error deleting bucket',
|
log.error('deleteBucketStep2: error deleting bucket',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
if (result.ok !== 1) {
|
if (result.ok !== 1) {
|
||||||
log.error('deleteBucketStep2: failed deleting bucket',
|
log.error('deleteBucketStep2: failed deleting bucket',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
return cb(null);
|
return cb(null);
|
||||||
|
@ -364,7 +364,7 @@ class MongoClientInterface {
|
||||||
return this.deleteBucketStep2(bucketName, log, cb);
|
return this.deleteBucketStep2(bucketName, log, cb);
|
||||||
}
|
}
|
||||||
log.error('deleteBucket: error deleting bucket',
|
log.error('deleteBucket: error deleting bucket',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
return this.deleteBucketStep2(bucketName, log, err => {
|
return this.deleteBucketStep2(bucketName, log, err => {
|
||||||
|
@ -419,7 +419,7 @@ class MongoClientInterface {
|
||||||
$gt: objVal.versionId,
|
$gt: objVal.versionId,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
update: {
|
update: {
|
||||||
$set: { _id: objName, value: objVal },
|
$set: { _id: objName, value: objVal },
|
||||||
|
@ -494,7 +494,7 @@ class MongoClientInterface {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error(
|
log.error(
|
||||||
'putObjectVerCase2: error putting object version',
|
'putObjectVerCase2: error putting object version',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
return cb(null, `{"versionId": "${objVal.versionId}"}`);
|
return cb(null, `{"versionId": "${objVal.versionId}"}`);
|
||||||
|
@ -538,7 +538,7 @@ class MongoClientInterface {
|
||||||
updateOne: {
|
updateOne: {
|
||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
filter: {
|
filter: {
|
||||||
_id: objName,
|
'_id': objName,
|
||||||
'value.versionId': params.versionId,
|
'value.versionId': params.versionId,
|
||||||
},
|
},
|
||||||
update: {
|
update: {
|
||||||
|
@ -555,7 +555,7 @@ class MongoClientInterface {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error(
|
log.error(
|
||||||
'putObjectVerCase3: error putting object version',
|
'putObjectVerCase3: error putting object version',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
if (err.code === 11000) {
|
if (err.code === 11000) {
|
||||||
// We want duplicate key error logged however in
|
// We want duplicate key error logged however in
|
||||||
// case of the race condition mentioned above, the
|
// case of the race condition mentioned above, the
|
||||||
|
@ -601,13 +601,13 @@ class MongoClientInterface {
|
||||||
this.getLatestVersion(c, objName, log, (err, mstObjVal) => {
|
this.getLatestVersion(c, objName, log, (err, mstObjVal) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('getLatestVersion: getting latest version',
|
log.error('getLatestVersion: getting latest version',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
MongoUtils.serialize(mstObjVal);
|
MongoUtils.serialize(mstObjVal);
|
||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
c.update({
|
c.update({
|
||||||
_id: objName,
|
'_id': objName,
|
||||||
'value.versionId': {
|
'value.versionId': {
|
||||||
// We break the semantic correctness here with
|
// We break the semantic correctness here with
|
||||||
// $gte instead of $gt because we do not have
|
// $gte instead of $gt because we do not have
|
||||||
|
@ -673,16 +673,16 @@ class MongoClientInterface {
|
||||||
const c = this.getCollection(bucketName);
|
const c = this.getCollection(bucketName);
|
||||||
if (params && params.versioning && !params.versionId) {
|
if (params && params.versioning && !params.versionId) {
|
||||||
return this.putObjectVerCase1(c, bucketName, objName, objVal,
|
return this.putObjectVerCase1(c, bucketName, objName, objVal,
|
||||||
params, log, cb);
|
params, log, cb);
|
||||||
} else if (params && params.versionId === '') {
|
} else if (params && params.versionId === '') {
|
||||||
return this.putObjectVerCase2(c, bucketName, objName, objVal,
|
return this.putObjectVerCase2(c, bucketName, objName, objVal,
|
||||||
params, log, cb);
|
params, log, cb);
|
||||||
} else if (params && params.versionId && !params.repairMaster) {
|
} else if (params && params.versionId && !params.repairMaster) {
|
||||||
return this.putObjectVerCase3(c, bucketName, objName, objVal,
|
return this.putObjectVerCase3(c, bucketName, objName, objVal,
|
||||||
params, log, cb);
|
params, log, cb);
|
||||||
} else if (params && params.versionId && params.repairMaster) {
|
} else if (params && params.versionId && params.repairMaster) {
|
||||||
return this.putObjectVerCase4(c, bucketName, objName, objVal,
|
return this.putObjectVerCase4(c, bucketName, objName, objVal,
|
||||||
params, log, cb);
|
params, log, cb);
|
||||||
}
|
}
|
||||||
return this.putObjectNoVer(c, bucketName, objName, objVal,
|
return this.putObjectNoVer(c, bucketName, objName, objVal,
|
||||||
params, log, cb);
|
params, log, cb);
|
||||||
|
@ -699,7 +699,7 @@ class MongoClientInterface {
|
||||||
}, {}, (err, doc) => {
|
}, {}, (err, doc) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('findOne: error getting object',
|
log.error('findOne: error getting object',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
if (!doc) {
|
if (!doc) {
|
||||||
|
@ -709,7 +709,7 @@ class MongoClientInterface {
|
||||||
this.getLatestVersion(c, objName, log, (err, value) => {
|
this.getLatestVersion(c, objName, log, (err, value) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('getLatestVersion: getting latest version',
|
log.error('getLatestVersion: getting latest version',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
return cb(null, value);
|
return cb(null, value);
|
||||||
|
@ -760,7 +760,7 @@ class MongoClientInterface {
|
||||||
MongoUtils.serialize(objVal);
|
MongoUtils.serialize(objVal);
|
||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
c.findOneAndReplace({
|
c.findOneAndReplace({
|
||||||
_id: objName,
|
'_id': objName,
|
||||||
'value.isPHD': true,
|
'value.isPHD': true,
|
||||||
'value.versionId': mst.versionId,
|
'value.versionId': mst.versionId,
|
||||||
}, {
|
}, {
|
||||||
|
@ -771,12 +771,12 @@ class MongoClientInterface {
|
||||||
}, (err, result) => {
|
}, (err, result) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('repair: error trying to repair value',
|
log.error('repair: error trying to repair value',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
if (result.ok !== 1) {
|
if (result.ok !== 1) {
|
||||||
log.error('repair: failed trying to repair value',
|
log.error('repair: failed trying to repair value',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
return cb(null);
|
return cb(null);
|
||||||
|
@ -791,7 +791,7 @@ class MongoClientInterface {
|
||||||
this.getLatestVersion(c, objName, log, (err, value) => {
|
this.getLatestVersion(c, objName, log, (err, value) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('async-repair: getting latest version',
|
log.error('async-repair: getting latest version',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
this.repair(c, bucketName, objName, value, mst, log, err => {
|
this.repair(c, bucketName, objName, value, mst, log, err => {
|
||||||
|
@ -822,7 +822,7 @@ class MongoClientInterface {
|
||||||
// version:
|
// version:
|
||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
c.findOneAndDelete({
|
c.findOneAndDelete({
|
||||||
_id: objName,
|
'_id': objName,
|
||||||
'value.isPHD': true,
|
'value.isPHD': true,
|
||||||
'value.versionId': mst.versionId,
|
'value.versionId': mst.versionId,
|
||||||
}, {}, err => {
|
}, {}, err => {
|
||||||
|
@ -839,7 +839,7 @@ class MongoClientInterface {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
log.error('getLatestVersion: error getting latest version',
|
log.error('getLatestVersion: error getting latest version',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
// We have other versions available so repair:
|
// We have other versions available so repair:
|
||||||
|
@ -883,7 +883,7 @@ class MongoClientInterface {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error(
|
log.error(
|
||||||
'deleteObjectVerMaster: error deleting object',
|
'deleteObjectVerMaster: error deleting object',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
return this.deleteOrRepairPHD(c, bucketName, objName, mst, log, cb);
|
return this.deleteOrRepairPHD(c, bucketName, objName, mst, log, cb);
|
||||||
|
@ -927,7 +927,7 @@ class MongoClientInterface {
|
||||||
}, {}, (err, mst) => {
|
}, {}, (err, mst) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('deleteObjectVer: error deleting versioned object',
|
log.error('deleteObjectVer: error deleting versioned object',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
if (!mst) {
|
if (!mst) {
|
||||||
|
@ -936,15 +936,15 @@ class MongoClientInterface {
|
||||||
if (mst.value.isPHD ||
|
if (mst.value.isPHD ||
|
||||||
mst.value.versionId === params.versionId) {
|
mst.value.versionId === params.versionId) {
|
||||||
return this.deleteObjectVerMaster(c, bucketName, objName,
|
return this.deleteObjectVerMaster(c, bucketName, objName,
|
||||||
params, log, err => {
|
params, log, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return this.deleteObjectVerNotMaster(c, bucketName, objName,
|
return this.deleteObjectVerNotMaster(c, bucketName, objName,
|
||||||
params, log, cb);
|
params, log, cb);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -975,10 +975,10 @@ class MongoClientInterface {
|
||||||
const c = this.getCollection(bucketName);
|
const c = this.getCollection(bucketName);
|
||||||
if (params && params.versionId) {
|
if (params && params.versionId) {
|
||||||
return this.deleteObjectVer(c, bucketName, objName,
|
return this.deleteObjectVer(c, bucketName, objName,
|
||||||
params, log, cb);
|
params, log, cb);
|
||||||
}
|
}
|
||||||
return this.deleteObjectNoVer(c, bucketName, objName,
|
return this.deleteObjectNoVer(c, bucketName, objName,
|
||||||
params, log, cb);
|
params, log, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
internalListObject(bucketName, params, extension, log, cb) {
|
internalListObject(bucketName, params, extension, log, cb) {
|
||||||
|
@ -1041,7 +1041,7 @@ class MongoClientInterface {
|
||||||
const internalParams = extension.genMDParams();
|
const internalParams = extension.genMDParams();
|
||||||
internalParams.mongifiedSearch = params.mongifiedSearch;
|
internalParams.mongifiedSearch = params.mongifiedSearch;
|
||||||
return this.internalListObject(bucketName, internalParams, extension,
|
return this.internalListObject(bucketName, internalParams, extension,
|
||||||
log, cb);
|
log, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
listMultipartUploads(bucketName, params, log, cb) {
|
listMultipartUploads(bucketName, params, log, cb) {
|
||||||
|
@ -1050,7 +1050,7 @@ class MongoClientInterface {
|
||||||
const internalParams = extension.genMDParams();
|
const internalParams = extension.genMDParams();
|
||||||
internalParams.mongifiedSearch = params.mongifiedSearch;
|
internalParams.mongifiedSearch = params.mongifiedSearch;
|
||||||
return this.internalListObject(bucketName, internalParams, extension,
|
return this.internalListObject(bucketName, internalParams, extension,
|
||||||
log, cb);
|
log, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
checkHealth(implName, log, cb) {
|
checkHealth(implName, log, cb) {
|
||||||
|
@ -1074,7 +1074,7 @@ class MongoClientInterface {
|
||||||
}, {}, (err, doc) => {
|
}, {}, (err, doc) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('readUUID: error reading UUID',
|
log.error('readUUID: error reading UUID',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
if (!doc) {
|
if (!doc) {
|
||||||
|
@ -1096,7 +1096,7 @@ class MongoClientInterface {
|
||||||
return cb(errors.KeyAlreadyExists);
|
return cb(errors.KeyAlreadyExists);
|
||||||
}
|
}
|
||||||
log.error('writeUUIDIfNotExists: error writing UUID',
|
log.error('writeUUIDIfNotExists: error writing UUID',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
// FIXME: shoud we check for result.ok === 1 ?
|
// FIXME: shoud we check for result.ok === 1 ?
|
||||||
|
@ -1114,7 +1114,7 @@ class MongoClientInterface {
|
||||||
if (err) {
|
if (err) {
|
||||||
if (err === errors.InternalError) {
|
if (err === errors.InternalError) {
|
||||||
log.error('getUUID: error getting UUID',
|
log.error('getUUID: error getting UUID',
|
||||||
{ error: err.message });
|
{ error: err.message });
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
return this.readUUID(log, cb);
|
return this.readUUID(log, cb);
|
||||||
|
@ -1129,7 +1129,7 @@ class MongoClientInterface {
|
||||||
// For Kub/cluster deployments there should be a more sophisticated
|
// For Kub/cluster deployments there should be a more sophisticated
|
||||||
// way for guessing free space.
|
// way for guessing free space.
|
||||||
diskusage.check(this.path !== undefined ?
|
diskusage.check(this.path !== undefined ?
|
||||||
this.path : '/', cb);
|
this.path : '/', cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
readCountItems(log, cb) {
|
readCountItems(log, cb) {
|
||||||
|
@ -1616,7 +1616,7 @@ class MongoClientInterface {
|
||||||
const retResult = this._handleResults(collRes, isVer);
|
const retResult = this._handleResults(collRes, isVer);
|
||||||
retResult.stalled = stalledCount;
|
retResult.stalled = stalledCount;
|
||||||
return callback(null, retResult);
|
return callback(null, retResult);
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1654,7 +1654,7 @@ class MongoClientInterface {
|
||||||
const filter = { _id: objName };
|
const filter = { _id: objName };
|
||||||
try {
|
try {
|
||||||
MongoUtils.translateConditions(0, 'value', filter,
|
MongoUtils.translateConditions(0, 'value', filter,
|
||||||
params.conditions);
|
params.conditions);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log.error('error creating mongodb filter', {
|
log.error('error creating mongodb filter', {
|
||||||
error: reshapeExceptionError(err),
|
error: reshapeExceptionError(err),
|
||||||
|
@ -1700,7 +1700,7 @@ class MongoClientInterface {
|
||||||
const filter = { _id: objName };
|
const filter = { _id: objName };
|
||||||
try {
|
try {
|
||||||
MongoUtils.translateConditions(0, 'value', filter,
|
MongoUtils.translateConditions(0, 'value', filter,
|
||||||
params.conditions);
|
params.conditions);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log.error('error creating mongodb filter', {
|
log.error('error creating mongodb filter', {
|
||||||
error: reshapeExceptionError(err),
|
error: reshapeExceptionError(err),
|
||||||
|
|
|
@ -8,8 +8,8 @@ function escape(obj) {
|
||||||
const _obj = {};
|
const _obj = {};
|
||||||
Object.keys(obj).forEach(prop => {
|
Object.keys(obj).forEach(prop => {
|
||||||
const _prop = prop.
|
const _prop = prop.
|
||||||
replace(/\$/g, '\uFF04').
|
replace(/\$/g, '\uFF04').
|
||||||
replace(/\./g, '\uFF0E');
|
replace(/\./g, '\uFF0E');
|
||||||
_obj[_prop] = obj[prop];
|
_obj[_prop] = obj[prop];
|
||||||
});
|
});
|
||||||
return _obj;
|
return _obj;
|
||||||
|
@ -19,8 +19,8 @@ function unescape(obj) {
|
||||||
const _obj = {};
|
const _obj = {};
|
||||||
Object.keys(obj).forEach(prop => {
|
Object.keys(obj).forEach(prop => {
|
||||||
const _prop = prop.
|
const _prop = prop.
|
||||||
replace(/\uFF04/g, '$').
|
replace(/\uFF04/g, '$').
|
||||||
replace(/\uFF0E/g, '.');
|
replace(/\uFF0E/g, '.');
|
||||||
_obj[_prop] = obj[prop];
|
_obj[_prop] = obj[prop];
|
||||||
});
|
});
|
||||||
return _obj;
|
return _obj;
|
||||||
|
|
|
@ -35,15 +35,15 @@ class BucketdRoutes {
|
||||||
bucketName, logger, (err, data) => {
|
bucketName, logger, (err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logger.error('Failed to get bucket attributes',
|
logger.error('Failed to get bucket attributes',
|
||||||
{ bucket: bucketName, error: err });
|
{ bucket: bucketName, error: err });
|
||||||
return sendResponse(req, res, logger, err);
|
return sendResponse(req, res, logger, err);
|
||||||
}
|
}
|
||||||
if (data === undefined) {
|
if (data === undefined) {
|
||||||
return sendResponse(req, res, logger,
|
return sendResponse(req, res, logger,
|
||||||
errors.NoSuchBucket);
|
errors.NoSuchBucket);
|
||||||
}
|
}
|
||||||
return sendResponse(req, res, logger, null,
|
return sendResponse(req, res, logger, null,
|
||||||
BucketInfo.fromObj(data).serialize());
|
BucketInfo.fromObj(data).serialize());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -112,7 +112,7 @@ class BucketdRoutes {
|
||||||
return this._metadataWrapper.checkHealth(logger, (err, resp) => {
|
return this._metadataWrapper.checkHealth(logger, (err, resp) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logger.error('Failed the health check',
|
logger.error('Failed the health check',
|
||||||
{ error: err, method: '_checkHealth' });
|
{ error: err, method: '_checkHealth' });
|
||||||
return sendResponse(req, res, logger, err);
|
return sendResponse(req, res, logger, err);
|
||||||
}
|
}
|
||||||
return sendResponse(req, res, logger, undefined, resp);
|
return sendResponse(req, res, logger, undefined, resp);
|
||||||
|
@ -122,8 +122,8 @@ class BucketdRoutes {
|
||||||
_createRequestLogger(req) {
|
_createRequestLogger(req) {
|
||||||
const uids = req.headers['x-scal-request-uids'];
|
const uids = req.headers['x-scal-request-uids'];
|
||||||
const logger = uids === undefined ?
|
const logger = uids === undefined ?
|
||||||
this._logger.newRequestLogger() :
|
this._logger.newRequestLogger() :
|
||||||
this._logger.newRequestLoggerFromSerializedUids(uids);
|
this._logger.newRequestLoggerFromSerializedUids(uids);
|
||||||
logger.trace('new request', { method: req.method, url: req.url });
|
logger.trace('new request', { method: req.method, url: req.url });
|
||||||
return logger;
|
return logger;
|
||||||
}
|
}
|
||||||
|
@ -150,7 +150,7 @@ class BucketdRoutes {
|
||||||
_attributesRoutes(req, res, uriComponents, logger) {
|
_attributesRoutes(req, res, uriComponents, logger) {
|
||||||
if (uriComponents.bucketName === undefined) {
|
if (uriComponents.bucketName === undefined) {
|
||||||
logger.error('Missing bucket name for attributes route',
|
logger.error('Missing bucket name for attributes route',
|
||||||
{ uriComponents });
|
{ uriComponents });
|
||||||
return sendResponse(req, res, logger, errors.BadRequest);
|
return sendResponse(req, res, logger, errors.BadRequest);
|
||||||
}
|
}
|
||||||
switch (req.method) {
|
switch (req.method) {
|
||||||
|
@ -195,26 +195,26 @@ class BucketdRoutes {
|
||||||
_bucketRoutes(req, res, uriComponents, logger) {
|
_bucketRoutes(req, res, uriComponents, logger) {
|
||||||
if (uriComponents.bucketName === undefined) {
|
if (uriComponents.bucketName === undefined) {
|
||||||
logger.error('Missing bucket name for bucket route',
|
logger.error('Missing bucket name for bucket route',
|
||||||
{ uriComponents });
|
{ uriComponents });
|
||||||
return sendResponse(req, res, logger, errors.BadRequest);
|
return sendResponse(req, res, logger, errors.BadRequest);
|
||||||
}
|
}
|
||||||
switch (req.method) {
|
switch (req.method) {
|
||||||
case 'GET':
|
case 'GET':
|
||||||
return this._listObject(req, res,
|
return this._listObject(req, res,
|
||||||
uriComponents.bucketName,
|
uriComponents.bucketName,
|
||||||
uriComponents.options,
|
uriComponents.options,
|
||||||
logger);
|
logger);
|
||||||
case 'DELETE':
|
case 'DELETE':
|
||||||
return this._deleteBucket(req, res,
|
return this._deleteBucket(req, res,
|
||||||
uriComponents.bucketName, logger);
|
uriComponents.bucketName, logger);
|
||||||
case 'POST':
|
case 'POST':
|
||||||
return getRequestBody(logger, req, (err, body) => {
|
return getRequestBody(logger, req, (err, body) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return sendResponse(req, res, logger, err);
|
return sendResponse(req, res, logger, err);
|
||||||
}
|
}
|
||||||
return this._createBucket(req, res,
|
return this._createBucket(req, res,
|
||||||
uriComponents.bucketName,
|
uriComponents.bucketName,
|
||||||
body, logger);
|
body, logger);
|
||||||
});
|
});
|
||||||
default:
|
default:
|
||||||
return sendResponse(req, res, logger, errors.RouteNotFound);
|
return sendResponse(req, res, logger, errors.RouteNotFound);
|
||||||
|
@ -233,30 +233,30 @@ class BucketdRoutes {
|
||||||
_objectRoutes(req, res, uriComponents, logger) {
|
_objectRoutes(req, res, uriComponents, logger) {
|
||||||
if (uriComponents.bucketName === undefined) {
|
if (uriComponents.bucketName === undefined) {
|
||||||
logger.error('Missing bucket name for object route',
|
logger.error('Missing bucket name for object route',
|
||||||
{ uriComponents });
|
{ uriComponents });
|
||||||
return sendResponse(req, res, logger, errors.BadRequest);
|
return sendResponse(req, res, logger, errors.BadRequest);
|
||||||
}
|
}
|
||||||
switch (req.method) {
|
switch (req.method) {
|
||||||
case 'GET':
|
case 'GET':
|
||||||
return this._getObject(req, res,
|
return this._getObject(req, res,
|
||||||
uriComponents.bucketName,
|
uriComponents.bucketName,
|
||||||
uriComponents.objectName,
|
uriComponents.objectName,
|
||||||
uriComponents.options,
|
uriComponents.options,
|
||||||
logger);
|
logger);
|
||||||
case 'DELETE':
|
case 'DELETE':
|
||||||
return this._deleteObject(req, res,
|
return this._deleteObject(req, res,
|
||||||
uriComponents.bucketName,
|
uriComponents.bucketName,
|
||||||
uriComponents.objectName,
|
uriComponents.objectName,
|
||||||
uriComponents.options,
|
uriComponents.options,
|
||||||
logger);
|
logger);
|
||||||
case 'POST':
|
case 'POST':
|
||||||
return getRequestBody(logger, req, (err, body) =>
|
return getRequestBody(logger, req, (err, body) =>
|
||||||
this._putObject(req, res,
|
this._putObject(req, res,
|
||||||
uriComponents.bucketName,
|
uriComponents.bucketName,
|
||||||
uriComponents.objectName,
|
uriComponents.objectName,
|
||||||
body,
|
body,
|
||||||
uriComponents.options,
|
uriComponents.options,
|
||||||
logger));
|
logger));
|
||||||
default:
|
default:
|
||||||
return sendResponse(req, res, logger, errors.RouteNotFound);
|
return sendResponse(req, res, logger, errors.RouteNotFound);
|
||||||
}
|
}
|
||||||
|
@ -274,16 +274,16 @@ class BucketdRoutes {
|
||||||
_parallelRoutes(req, res, uriComponents, logger) {
|
_parallelRoutes(req, res, uriComponents, logger) {
|
||||||
if (uriComponents.bucketName === undefined) {
|
if (uriComponents.bucketName === undefined) {
|
||||||
logger.error('Missing bucket name for parallel route',
|
logger.error('Missing bucket name for parallel route',
|
||||||
{ uriComponents });
|
{ uriComponents });
|
||||||
return sendResponse(req, res, logger, errors.BadRequest);
|
return sendResponse(req, res, logger, errors.BadRequest);
|
||||||
}
|
}
|
||||||
switch (req.method) {
|
switch (req.method) {
|
||||||
case 'GET':
|
case 'GET':
|
||||||
return this._getBucketAndObjectMD(req, res,
|
return this._getBucketAndObjectMD(req, res,
|
||||||
uriComponents.bucketName,
|
uriComponents.bucketName,
|
||||||
uriComponents.objectName,
|
uriComponents.objectName,
|
||||||
uriComponents.options,
|
uriComponents.options,
|
||||||
logger);
|
logger);
|
||||||
default:
|
default:
|
||||||
return sendResponse(req, res, logger, errors.RouteNotFound);
|
return sendResponse(req, res, logger, errors.RouteNotFound);
|
||||||
}
|
}
|
||||||
|
@ -307,7 +307,7 @@ class BucketdRoutes {
|
||||||
return sendResponse(req, res, logger, errors.NotImplemented);
|
return sendResponse(req, res, logger, errors.NotImplemented);
|
||||||
case 'metadataInformation':
|
case 'metadataInformation':
|
||||||
return sendResponse(req, res, logger, undefined,
|
return sendResponse(req, res, logger, undefined,
|
||||||
'{"metadataVersion":2}');
|
'{"metadataVersion":2}');
|
||||||
case 'parallel':
|
case 'parallel':
|
||||||
logger.trace(`${uriComponents.context} operation`);
|
logger.trace(`${uriComponents.context} operation`);
|
||||||
if (uriComponents.objectName) {
|
if (uriComponents.objectName) {
|
||||||
|
|
|
@ -11,7 +11,6 @@ const requiresOneWorker = {
|
||||||
};
|
};
|
||||||
|
|
||||||
class Server {
|
class Server {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new Metadata Proxy Server instance
|
* Create a new Metadata Proxy Server instance
|
||||||
*
|
*
|
||||||
|
@ -31,7 +30,7 @@ class Server {
|
||||||
if (requiresOneWorker[metadataWrapper.implName] &&
|
if (requiresOneWorker[metadataWrapper.implName] &&
|
||||||
this._configuration.workers !== 1) {
|
this._configuration.workers !== 1) {
|
||||||
logger.warn('This metadata backend requires only one worker',
|
logger.warn('This metadata backend requires only one worker',
|
||||||
{ metadataBackend: metadataWrapper.implName });
|
{ metadataBackend: metadataWrapper.implName });
|
||||||
this._configuration.workers = 1;
|
this._configuration.workers = 1;
|
||||||
}
|
}
|
||||||
this._logger = logger;
|
this._logger = logger;
|
||||||
|
@ -79,7 +78,7 @@ class Server {
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
this._httpServer = new HttpServer(this._configuration.port,
|
this._httpServer = new HttpServer(this._configuration.port,
|
||||||
this._logger);
|
this._logger);
|
||||||
if (this._configuration.bindAddress) {
|
if (this._configuration.bindAddress) {
|
||||||
this._httpServer.setBindAddress(
|
this._httpServer.setBindAddress(
|
||||||
this._configuration.bindAddress);
|
this._configuration.bindAddress);
|
||||||
|
|
|
@ -67,8 +67,8 @@ function getURIComponents(uri, logger) {
|
||||||
};
|
};
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
logger.error('Invalid URI: failed to parse',
|
logger.error('Invalid URI: failed to parse',
|
||||||
{ uri, error: ex, errorStack: ex.stack,
|
{ uri, error: ex, errorStack: ex.stack,
|
||||||
message: ex.message });
|
message: ex.message });
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,8 +60,8 @@ class TestMatrix {
|
||||||
this.elementsToSpecialize = elementsToSpecialize;
|
this.elementsToSpecialize = elementsToSpecialize;
|
||||||
this.callback = callback;
|
this.callback = callback;
|
||||||
this.description = typeof description === 'undefined'
|
this.description = typeof description === 'undefined'
|
||||||
? ''
|
? ''
|
||||||
: description;
|
: description;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -158,15 +158,15 @@ class TestMatrix {
|
||||||
const callFunction = (matrixFather, matrixChild, callback,
|
const callFunction = (matrixFather, matrixChild, callback,
|
||||||
description) => {
|
description) => {
|
||||||
const result = Object.keys(matrixChild.params)
|
const result = Object.keys(matrixChild.params)
|
||||||
.every(currentKey =>
|
.every(currentKey =>
|
||||||
Object.prototype.toString.call(
|
Object.prototype.toString.call(
|
||||||
matrixChild.params[currentKey]
|
matrixChild.params[currentKey],
|
||||||
).indexOf('Array') === -1);
|
).indexOf('Array') === -1);
|
||||||
|
|
||||||
if (result === true) {
|
if (result === true) {
|
||||||
describe(matrixChild.serialize(), () => {
|
describe(matrixChild.serialize(), () => {
|
||||||
it(description,
|
it(description,
|
||||||
done => callback(matrixChild, done));
|
done => callback(matrixChild, done));
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
describe(matrixChild.serialize(), () => {
|
describe(matrixChild.serialize(), () => {
|
||||||
|
|
|
@ -247,7 +247,7 @@ function decode(str) {
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { generateVersionId, getInfVid,
|
module.exports = { generateVersionId, getInfVid,
|
||||||
hexEncode, hexDecode,
|
hexEncode, hexDecode,
|
||||||
base62Encode, base62Decode,
|
base62Encode, base62Decode,
|
||||||
encode, decode,
|
encode, decode,
|
||||||
ENC_TYPE_HEX, ENC_TYPE_BASE62 };
|
ENC_TYPE_HEX, ENC_TYPE_BASE62 };
|
||||||
|
|
|
@ -84,7 +84,7 @@ class VersioningRequestProcessor {
|
||||||
return callback(null, data);
|
return callback(null, data);
|
||||||
}
|
}
|
||||||
logger.debug('master version is a PHD, getting the latest version',
|
logger.debug('master version is a PHD, getting the latest version',
|
||||||
{ db, key });
|
{ db, key });
|
||||||
// otherwise, need to search for the latest version
|
// otherwise, need to search for the latest version
|
||||||
return this.getByListing(request, logger, callback);
|
return this.getByListing(request, logger, callback);
|
||||||
});
|
});
|
||||||
|
@ -187,7 +187,7 @@ class VersioningRequestProcessor {
|
||||||
return entry.callback(err, value);
|
return entry.callback(err, value);
|
||||||
}
|
}
|
||||||
return this.wgm.get(entry.request, entry.logger,
|
return this.wgm.get(entry.request, entry.logger,
|
||||||
entry.callback);
|
entry.callback);
|
||||||
});
|
});
|
||||||
delete this.queue[cacheKey];
|
delete this.queue[cacheKey];
|
||||||
}
|
}
|
||||||
|
@ -267,19 +267,19 @@ class VersioningRequestProcessor {
|
||||||
return callback(err);
|
return callback(err);
|
||||||
}
|
}
|
||||||
return this.writeCache.batch({ db, array, options },
|
return this.writeCache.batch({ db, array, options },
|
||||||
logger, err => callback(err, `{"versionId":"${vid}"}`));
|
logger, err => callback(err, `{"versionId":"${vid}"}`));
|
||||||
};
|
};
|
||||||
|
|
||||||
if (versionId) {
|
if (versionId) {
|
||||||
return this.processVersionSpecificPut(request, logger,
|
return this.processVersionSpecificPut(request, logger,
|
||||||
versioningCb);
|
versioningCb);
|
||||||
}
|
}
|
||||||
if (versioning) {
|
if (versioning) {
|
||||||
return this.processNewVersionPut(request, logger, versioningCb);
|
return this.processNewVersionPut(request, logger, versioningCb);
|
||||||
}
|
}
|
||||||
// no versioning or versioning configuration off
|
// no versioning or versioning configuration off
|
||||||
return this.writeCache.batch({ db, array: [{ key, value }] },
|
return this.writeCache.batch({ db, array: [{ key, value }] },
|
||||||
logger, callback);
|
logger, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -353,7 +353,7 @@ class VersioningRequestProcessor {
|
||||||
if (!(options && options.versionId)) {
|
if (!(options && options.versionId)) {
|
||||||
return this.writeCache.batch({ db,
|
return this.writeCache.batch({ db,
|
||||||
array: [{ key, type: 'del' }] },
|
array: [{ key, type: 'del' }] },
|
||||||
logger, callback);
|
logger, callback);
|
||||||
}
|
}
|
||||||
// version specific DELETE
|
// version specific DELETE
|
||||||
return this.processVersionSpecificDelete(request, logger,
|
return this.processVersionSpecificDelete(request, logger,
|
||||||
|
@ -399,7 +399,7 @@ class VersioningRequestProcessor {
|
||||||
const cacheKey = formatCacheKey(db, key);
|
const cacheKey = formatCacheKey(db, key);
|
||||||
clearTimeout(this.repairing[cacheKey]);
|
clearTimeout(this.repairing[cacheKey]);
|
||||||
this.repairing[cacheKey] = setTimeout(() =>
|
this.repairing[cacheKey] = setTimeout(() =>
|
||||||
this.getByListing(request, logger, () => {}), 15000);
|
this.getByListing(request, logger, () => {}), 15000);
|
||||||
}
|
}
|
||||||
return callback(null, ops, versionId);
|
return callback(null, ops, versionId);
|
||||||
});
|
});
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue