Compare commits
27 Commits
developmen
...
ft/save/GC
Author | SHA1 | Date |
---|---|---|
Alexander Chan | bb41d0ff0a | |
Alexander Chan | ed60723bb4 | |
Bennett Buchanan | 660f2d7ed8 | |
Alexander Chan | 585e39487a | |
Bennett Buchanan | 2fa427a89f | |
Bennett Buchanan | 084ff933f7 | |
Alexander Chan | 8f29f1ba16 | |
Rahul Padigela | 469eb70a8b | |
Rahul Padigela | 9bd2b9c4ff | |
Alexander Chan | 821effd87b | |
Bennett Buchanan | 96a331bb78 | |
Alexander Chan | b11ce87f9e | |
Rahul Padigela | 80bfb61527 | |
Alexander Chan | 3a020b5099 | |
Bennett Buchanan | 1894c4272e | |
Alexander Chan | 03c63ee41c | |
Bennett Buchanan | 2af2310a85 | |
Alexander Chan | bb9ef7a26f | |
Bennett Buchanan | ef998a041f | |
Alexander Chan | 3bf001f9e1 | |
Bennett Buchanan | 173436bf56 | |
Alexander Chan | 7c2f210171 | |
ironman-machine | c999d8acf1 | |
Alexander Chan | 1be83ceacb | |
Alexander Chan | 83debbcc6c | |
ironman-machine | df18e651e9 | |
Alexander Chan | 5819501dc0 |
10
constants.js
10
constants.js
|
@ -39,6 +39,8 @@ const constants = {
|
||||||
// once the multipart upload is complete.
|
// once the multipart upload is complete.
|
||||||
mpuBucketPrefix: 'mpuShadowBucket',
|
mpuBucketPrefix: 'mpuShadowBucket',
|
||||||
blacklistedPrefixes: { bucket: [], object: [] },
|
blacklistedPrefixes: { bucket: [], object: [] },
|
||||||
|
// GCP Object Tagging Prefix
|
||||||
|
gcpTaggingPrefix: 'aws-tag-',
|
||||||
// PublicId is used as the canonicalID for a request that contains
|
// PublicId is used as the canonicalID for a request that contains
|
||||||
// no authentication information. Requestor can access
|
// no authentication information. Requestor can access
|
||||||
// only public resources
|
// only public resources
|
||||||
|
@ -64,6 +66,10 @@ const constants = {
|
||||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
|
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
|
||||||
minimumAllowedPartSize: 5242880,
|
minimumAllowedPartSize: 5242880,
|
||||||
|
|
||||||
|
// AWS sets a maximum total parts limit
|
||||||
|
// https://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadUploadPart.html
|
||||||
|
maximumAllowedPartCount: 10000,
|
||||||
|
|
||||||
// Max size on put part or copy part is 5GB. For functional
|
// Max size on put part or copy part is 5GB. For functional
|
||||||
// testing use 110 MB as max
|
// testing use 110 MB as max
|
||||||
maximumAllowedPartSize: process.env.MPU_TESTING === 'yes' ? 110100480 :
|
maximumAllowedPartSize: process.env.MPU_TESTING === 'yes' ? 110100480 :
|
||||||
|
@ -115,8 +121,8 @@ const constants = {
|
||||||
// for external backends, don't call unless at least 1 minute
|
// for external backends, don't call unless at least 1 minute
|
||||||
// (60,000 milliseconds) since last call
|
// (60,000 milliseconds) since last call
|
||||||
externalBackendHealthCheckInterval: 60000,
|
externalBackendHealthCheckInterval: 60000,
|
||||||
versioningNotImplBackends: { azure: true },
|
versioningNotImplBackends: { azure: true, gcp: true },
|
||||||
mpuMDStoredExternallyBackend: { aws_s3: true },
|
mpuMDStoredExternallyBackend: { aws_s3: true, gcp: true },
|
||||||
/* eslint-enable camelcase */
|
/* eslint-enable camelcase */
|
||||||
mpuMDStoredOnS3Backend: { azure: true },
|
mpuMDStoredOnS3Backend: { azure: true },
|
||||||
azureAccountNameRegex: /^[a-z0-9]{3,24}$/,
|
azureAccountNameRegex: /^[a-z0-9]{3,24}$/,
|
||||||
|
|
|
@ -59,6 +59,7 @@ function restEndpointsAssert(restEndpoints, locationConstraints) {
|
||||||
function gcpLocationConstraintAssert(location, locationObj) {
|
function gcpLocationConstraintAssert(location, locationObj) {
|
||||||
const {
|
const {
|
||||||
gcpEndpoint,
|
gcpEndpoint,
|
||||||
|
jsonEndpoint,
|
||||||
bucketName,
|
bucketName,
|
||||||
mpuBucketName,
|
mpuBucketName,
|
||||||
overflowBucketName,
|
overflowBucketName,
|
||||||
|
@ -82,6 +83,7 @@ function gcpLocationConstraintAssert(location, locationObj) {
|
||||||
serviceCredentials.serviceKey;
|
serviceCredentials.serviceKey;
|
||||||
const stringFields = [
|
const stringFields = [
|
||||||
gcpEndpoint,
|
gcpEndpoint,
|
||||||
|
jsonEndpoint,
|
||||||
bucketName,
|
bucketName,
|
||||||
mpuBucketName,
|
mpuBucketName,
|
||||||
overflowBucketName,
|
overflowBucketName,
|
||||||
|
@ -988,8 +990,15 @@ class Config extends EventEmitter {
|
||||||
process.env[`${locationConstraint}_GCP_SERVICE_KEYFILE`];
|
process.env[`${locationConstraint}_GCP_SERVICE_KEYFILE`];
|
||||||
const serviceEmailFromEnv =
|
const serviceEmailFromEnv =
|
||||||
process.env[`${locationConstraint}_GCP_SERVICE_EMAIL`];
|
process.env[`${locationConstraint}_GCP_SERVICE_EMAIL`];
|
||||||
const serviceKeyFromEnv =
|
let serviceKeyFromEnv =
|
||||||
process.env[`${locationConstraint}_GCP_SERVICE_KEY`];
|
process.env[`${locationConstraint}_GCP_SERVICE_KEY`];
|
||||||
|
// the environment variable is a RSA private key.
|
||||||
|
// when read directly, the newline '\n' will
|
||||||
|
// be interpreted as '\\n', so the following check will
|
||||||
|
// fix this by replacing '\\n' with the actual newline char '\n'
|
||||||
|
if (typeof serviceKeyFromEnv === 'string') {
|
||||||
|
serviceKeyFromEnv = serviceKeyFromEnv.replace(/\\n/g, '\n');
|
||||||
|
}
|
||||||
const serviceScopeFromEnv =
|
const serviceScopeFromEnv =
|
||||||
process.env[`${locationConstraint}_GCP_SERVICE_SCOPE`];
|
process.env[`${locationConstraint}_GCP_SERVICE_SCOPE`];
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -17,7 +17,7 @@ const validateWebsiteHeader = require('./websiteServing')
|
||||||
const { externalBackends, versioningNotImplBackends } = constants;
|
const { externalBackends, versioningNotImplBackends } = constants;
|
||||||
|
|
||||||
const externalVersioningErrorMessage = 'We do not currently support putting ' +
|
const externalVersioningErrorMessage = 'We do not currently support putting ' +
|
||||||
'a versioned object to a location-constraint of type Azure.';
|
'a versioned object to a location-constraint of type Azure or GCP.';
|
||||||
|
|
||||||
|
|
||||||
function _storeInMDandDeleteData(bucketName, dataGetInfo, cipherBundle,
|
function _storeInMDandDeleteData(bucketName, dataGetInfo, cipherBundle,
|
||||||
|
|
|
@ -238,4 +238,5 @@ splitter, log) {
|
||||||
module.exports = {
|
module.exports = {
|
||||||
generateMpuPartStorageInfo,
|
generateMpuPartStorageInfo,
|
||||||
validateAndFilterMpuParts,
|
validateAndFilterMpuParts,
|
||||||
|
createAggregateETag,
|
||||||
};
|
};
|
||||||
|
|
|
@ -11,7 +11,7 @@ const versioningNotImplBackends =
|
||||||
const { config } = require('../Config');
|
const { config } = require('../Config');
|
||||||
|
|
||||||
const externalVersioningErrorMessage = 'We do not currently support putting ' +
|
const externalVersioningErrorMessage = 'We do not currently support putting ' +
|
||||||
'a versioned object to a location-constraint of type Azure.';
|
'a versioned object to a location-constraint of type Azure or GCP.';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format of xml request:
|
* Format of xml request:
|
||||||
|
|
|
@ -16,7 +16,7 @@ const { config } = require('../Config');
|
||||||
const multipleBackendGateway = require('../data/multipleBackendGateway');
|
const multipleBackendGateway = require('../data/multipleBackendGateway');
|
||||||
|
|
||||||
const externalVersioningErrorMessage = 'We do not currently support putting ' +
|
const externalVersioningErrorMessage = 'We do not currently support putting ' +
|
||||||
'a versioned object to a location-constraint of type Azure.';
|
'a versioned object to a location-constraint of type Azure or GCP.';
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Sample xml response:
|
Sample xml response:
|
||||||
|
|
|
@ -25,7 +25,7 @@ const versionIdUtils = versioning.VersionID;
|
||||||
const locationHeader = constants.objectLocationConstraintHeader;
|
const locationHeader = constants.objectLocationConstraintHeader;
|
||||||
const versioningNotImplBackends = constants.versioningNotImplBackends;
|
const versioningNotImplBackends = constants.versioningNotImplBackends;
|
||||||
const externalVersioningErrorMessage = 'We do not currently support putting ' +
|
const externalVersioningErrorMessage = 'We do not currently support putting ' +
|
||||||
'a versioned object to a location-constraint of type AWS or Azure.';
|
'a versioned object to a location-constraint of type AWS or Azure or GCP.';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Preps metadata to be saved (based on copy or replace request header)
|
* Preps metadata to be saved (based on copy or replace request header)
|
||||||
|
|
|
@ -201,7 +201,8 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
||||||
// if data backend handles MPU, skip to end of waterfall
|
// if data backend handles MPU, skip to end of waterfall
|
||||||
return next(skipError, destinationBucket,
|
return next(skipError, destinationBucket,
|
||||||
partInfo.dataStoreETag);
|
partInfo.dataStoreETag);
|
||||||
} else if (partInfo && partInfo.dataStoreType === 'azure') {
|
} else if (partInfo &&
|
||||||
|
['azure', 'gcp'].includes(partInfo.dataStoreType)) {
|
||||||
return next(null, destinationBucket,
|
return next(null, destinationBucket,
|
||||||
objectLocationConstraint, cipherBundle, splitter,
|
objectLocationConstraint, cipherBundle, splitter,
|
||||||
partInfo);
|
partInfo);
|
||||||
|
@ -250,7 +251,8 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
||||||
(destinationBucket, objectLocationConstraint, cipherBundle,
|
(destinationBucket, objectLocationConstraint, cipherBundle,
|
||||||
partKey, prevObjectSize, oldLocations, partInfo, next) => {
|
partKey, prevObjectSize, oldLocations, partInfo, next) => {
|
||||||
// NOTE: set oldLocations to null so we do not batchDelete for now
|
// NOTE: set oldLocations to null so we do not batchDelete for now
|
||||||
if (partInfo && partInfo.dataStoreType === 'azure') {
|
if (partInfo &&
|
||||||
|
['azure', 'gcp'].includes(partInfo.dataStoreType)) {
|
||||||
// skip to storing metadata
|
// skip to storing metadata
|
||||||
return next(null, destinationBucket, partInfo,
|
return next(null, destinationBucket, partInfo,
|
||||||
partInfo.dataStoreETag,
|
partInfo.dataStoreETag,
|
||||||
|
|
|
@ -15,6 +15,7 @@ const missingVerIdInternalError = errors.InternalError.customizeDescription(
|
||||||
class AwsClient {
|
class AwsClient {
|
||||||
constructor(config) {
|
constructor(config) {
|
||||||
this.clientType = 'aws_s3';
|
this.clientType = 'aws_s3';
|
||||||
|
this.type = 'AWS';
|
||||||
this._s3Params = config.s3Params;
|
this._s3Params = config.s3Params;
|
||||||
this._awsBucketName = config.bucketName;
|
this._awsBucketName = config.bucketName;
|
||||||
this._bucketMatch = config.bucketMatch;
|
this._bucketMatch = config.bucketMatch;
|
||||||
|
@ -39,16 +40,16 @@ class AwsClient {
|
||||||
const putCb = (err, data) => {
|
const putCb = (err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err from data backend',
|
logHelper(log, 'error', 'err from data backend',
|
||||||
err, this._dataStoreName);
|
err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS: ${err.message}`)
|
`${this.type}: ${err.message}`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (!data.VersionId) {
|
if (!data.VersionId) {
|
||||||
logHelper(log, 'error', 'missing version id for data ' +
|
logHelper(log, 'error', 'missing version id for data ' +
|
||||||
'backend object', missingVerIdInternalError,
|
'backend object', missingVerIdInternalError,
|
||||||
this._dataStoreName);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(missingVerIdInternalError);
|
return callback(missingVerIdInternalError);
|
||||||
}
|
}
|
||||||
const dataStoreVersionId = data.VersionId;
|
const dataStoreVersionId = data.VersionId;
|
||||||
|
@ -106,14 +107,15 @@ class AwsClient {
|
||||||
if (err.code === 'NotFound') {
|
if (err.code === 'NotFound') {
|
||||||
const error = errors.ServiceUnavailable
|
const error = errors.ServiceUnavailable
|
||||||
.customizeDescription(
|
.customizeDescription(
|
||||||
'Unexpected error from AWS: "NotFound". Data on AWS ' +
|
`Unexpected error from ${this.type}: ` +
|
||||||
|
`"NotFound". Data on ${this.type} ` +
|
||||||
'may have been altered outside of CloudServer.'
|
'may have been altered outside of CloudServer.'
|
||||||
);
|
);
|
||||||
return callback(error);
|
return callback(error);
|
||||||
}
|
}
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS: ${err.message}`)
|
`${this.type}: ${err.message}`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
|
@ -128,12 +130,14 @@ class AwsClient {
|
||||||
VersionId: dataStoreVersionId,
|
VersionId: dataStoreVersionId,
|
||||||
Range: range ? `bytes=${range[0]}-${range[1]}` : null,
|
Range: range ? `bytes=${range[0]}-${range[1]}` : null,
|
||||||
}).on('success', response => {
|
}).on('success', response => {
|
||||||
log.trace('AWS GET request response headers',
|
log.trace(`${this.type} GET request response headers`,
|
||||||
{ responseHeaders: response.httpResponse.headers });
|
{ responseHeaders: response.httpResponse.headers,
|
||||||
|
backendType: this.clientType });
|
||||||
});
|
});
|
||||||
const stream = request.createReadStream().on('error', err => {
|
const stream = request.createReadStream().on('error', err => {
|
||||||
logHelper(log, 'error', 'error streaming data from AWS',
|
logHelper(log, 'error',
|
||||||
err, this._dataStoreName);
|
`error streaming data from ${this.type}`,
|
||||||
|
err, this._dataStoreName, this.clientType);
|
||||||
return callback(err);
|
return callback(err);
|
||||||
});
|
});
|
||||||
return callback(null, stream);
|
return callback(null, stream);
|
||||||
|
@ -151,7 +155,7 @@ class AwsClient {
|
||||||
return this._client.deleteObject(params, err => {
|
return this._client.deleteObject(params, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'error deleting object from ' +
|
logHelper(log, 'error', 'error deleting object from ' +
|
||||||
'datastore', err, this._dataStoreName);
|
'datastore', err, this._dataStoreName, this.clientType);
|
||||||
if (err.code === 'NoSuchVersion') {
|
if (err.code === 'NoSuchVersion') {
|
||||||
// data may have been deleted directly from the AWS backend
|
// data may have been deleted directly from the AWS backend
|
||||||
// don't want to retry the delete and errors are not
|
// don't want to retry the delete and errors are not
|
||||||
|
@ -160,7 +164,7 @@ class AwsClient {
|
||||||
}
|
}
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS: ${err.message}`)
|
`${this.type}: ${err.message}`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
|
@ -224,10 +228,10 @@ class AwsClient {
|
||||||
return this._client.createMultipartUpload(params, (err, mpuResObj) => {
|
return this._client.createMultipartUpload(params, (err, mpuResObj) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err from data backend',
|
logHelper(log, 'error', 'err from data backend',
|
||||||
err, this._dataStoreName);
|
err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS: ${err.message}`)
|
`${this.type}: ${err.message}`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return callback(null, mpuResObj);
|
return callback(null, mpuResObj);
|
||||||
|
@ -252,10 +256,10 @@ class AwsClient {
|
||||||
return this._client.uploadPart(params, (err, partResObj) => {
|
return this._client.uploadPart(params, (err, partResObj) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err from data backend ' +
|
logHelper(log, 'error', 'err from data backend ' +
|
||||||
'on uploadPart', err, this._dataStoreName);
|
'on uploadPart', err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS: ${err.message}`)
|
`${this.type}: ${err.message}`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// Because we manually add quotes to ETag later, remove quotes here
|
// Because we manually add quotes to ETag later, remove quotes here
|
||||||
|
@ -280,10 +284,10 @@ class AwsClient {
|
||||||
return this._client.listParts(params, (err, partList) => {
|
return this._client.listParts(params, (err, partList) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err from data backend on listPart',
|
logHelper(log, 'error', 'err from data backend on listPart',
|
||||||
err, this._dataStoreName);
|
err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS: ${err.message}`)
|
`${this.type}: ${err.message}`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// build storedParts object to mimic Scality S3 backend returns
|
// build storedParts object to mimic Scality S3 backend returns
|
||||||
|
@ -348,20 +352,20 @@ class AwsClient {
|
||||||
if (err) {
|
if (err) {
|
||||||
if (mpuError[err.code]) {
|
if (mpuError[err.code]) {
|
||||||
logHelper(log, 'trace', 'err from data backend on ' +
|
logHelper(log, 'trace', 'err from data backend on ' +
|
||||||
'completeMPU', err, this._dataStoreName);
|
'completeMPU', err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors[err.code]);
|
return callback(errors[err.code]);
|
||||||
}
|
}
|
||||||
logHelper(log, 'error', 'err from data backend on ' +
|
logHelper(log, 'error', 'err from data backend on ' +
|
||||||
'completeMPU', err, this._dataStoreName);
|
'completeMPU', err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS: ${err.message}`)
|
`${this.type}: ${err.message}`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (!completeMpuRes.VersionId) {
|
if (!completeMpuRes.VersionId) {
|
||||||
logHelper(log, 'error', 'missing version id for data ' +
|
logHelper(log, 'error', 'missing version id for data ' +
|
||||||
'backend object', missingVerIdInternalError,
|
'backend object', missingVerIdInternalError,
|
||||||
this._dataStoreName);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(missingVerIdInternalError);
|
return callback(missingVerIdInternalError);
|
||||||
}
|
}
|
||||||
// need to get content length of new object to store
|
// need to get content length of new object to store
|
||||||
|
@ -370,10 +374,10 @@ class AwsClient {
|
||||||
(err, objHeaders) => {
|
(err, objHeaders) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'trace', 'err from data backend on ' +
|
logHelper(log, 'trace', 'err from data backend on ' +
|
||||||
'headObject', err, this._dataStoreName);
|
'headObject', err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS: ${err.message}`)
|
`${this.type}: ${err.message}`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// remove quotes from eTag because they're added later
|
// remove quotes from eTag because they're added later
|
||||||
|
@ -396,10 +400,11 @@ class AwsClient {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'There was an error aborting ' +
|
logHelper(log, 'error', 'There was an error aborting ' +
|
||||||
'the MPU on AWS S3. You should abort directly on AWS S3 ' +
|
'the MPU on AWS S3. You should abort directly on AWS S3 ' +
|
||||||
'using the same uploadId.', err, this._dataStoreName);
|
'using the same uploadId.', err, this._dataStoreName,
|
||||||
|
this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS: ${err.message}`)
|
`${this.type}: ${err.message}`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
|
@ -424,10 +429,11 @@ class AwsClient {
|
||||||
return this._client.putObjectTagging(tagParams, err => {
|
return this._client.putObjectTagging(tagParams, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'error from data backend on ' +
|
logHelper(log, 'error', 'error from data backend on ' +
|
||||||
'putObjectTagging', err, this._dataStoreName);
|
'putObjectTagging', err,
|
||||||
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS: ${err.message}`)
|
`${this.type}: ${err.message}`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
|
@ -446,10 +452,11 @@ class AwsClient {
|
||||||
return this._client.deleteObjectTagging(tagParams, err => {
|
return this._client.deleteObjectTagging(tagParams, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'error from data backend on ' +
|
logHelper(log, 'error', 'error from data backend on ' +
|
||||||
'deleteObjectTagging', err, this._dataStoreName);
|
'deleteObjectTagging', err,
|
||||||
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS: ${err.message}`)
|
`${this.type}: ${err.message}`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
|
@ -482,24 +489,24 @@ class AwsClient {
|
||||||
if (err) {
|
if (err) {
|
||||||
if (err.code === 'AccessDenied') {
|
if (err.code === 'AccessDenied') {
|
||||||
logHelper(log, 'error', 'Unable to access ' +
|
logHelper(log, 'error', 'Unable to access ' +
|
||||||
`${sourceAwsBucketName} AWS bucket`, err,
|
`${sourceAwsBucketName} ${this.type} bucket`, err,
|
||||||
this._dataStoreName);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.AccessDenied
|
return callback(errors.AccessDenied
|
||||||
.customizeDescription('Error: Unable to access ' +
|
.customizeDescription('Error: Unable to access ' +
|
||||||
`${sourceAwsBucketName} AWS bucket`)
|
`${sourceAwsBucketName} ${this.type} bucket`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
logHelper(log, 'error', 'error from data backend on ' +
|
logHelper(log, 'error', 'error from data backend on ' +
|
||||||
'copyObject', err, this._dataStoreName);
|
'copyObject', err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS: ${err.message}`)
|
`${this.type}: ${err.message}`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (!copyResult.VersionId) {
|
if (!copyResult.VersionId) {
|
||||||
logHelper(log, 'error', 'missing version id for data ' +
|
logHelper(log, 'error', 'missing version id for data ' +
|
||||||
'backend object', missingVerIdInternalError,
|
'backend object', missingVerIdInternalError,
|
||||||
this._dataStoreName);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(missingVerIdInternalError);
|
return callback(missingVerIdInternalError);
|
||||||
}
|
}
|
||||||
return callback(null, destAwsKey, copyResult.VersionId);
|
return callback(null, destAwsKey, copyResult.VersionId);
|
||||||
|
@ -532,17 +539,17 @@ class AwsClient {
|
||||||
if (err.code === 'AccessDenied') {
|
if (err.code === 'AccessDenied') {
|
||||||
logHelper(log, 'error', 'Unable to access ' +
|
logHelper(log, 'error', 'Unable to access ' +
|
||||||
`${sourceAwsBucketName} AWS bucket`, err,
|
`${sourceAwsBucketName} AWS bucket`, err,
|
||||||
this._dataStoreName);
|
this._dataStoreName, this.clientType);
|
||||||
return callback(errors.AccessDenied
|
return callback(errors.AccessDenied
|
||||||
.customizeDescription('Error: Unable to access ' +
|
.customizeDescription('Error: Unable to access ' +
|
||||||
`${sourceAwsBucketName} AWS bucket`)
|
`${sourceAwsBucketName} AWS bucket`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
logHelper(log, 'error', 'error from data backend on ' +
|
logHelper(log, 'error', 'error from data backend on ' +
|
||||||
'uploadPartCopy', err, this._dataStoreName);
|
'uploadPartCopy', err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return callback(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`AWS: ${err.message}`)
|
`${this.type}: ${err.message}`)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
const eTag = removeQuotes(res.CopyPartResult.ETag);
|
const eTag = removeQuotes(res.CopyPartResult.ETag);
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
const MpuHelper = require('./mpuHelper');
|
||||||
|
const { createMpuKey, logger } = require('../GcpUtils');
|
||||||
|
const { logHelper } = require('../../utils');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* abortMPU - remove all objects of a GCP Multipart Upload
|
||||||
|
* @param {object} params - abortMPU params
|
||||||
|
* @param {string} params.Bucket - bucket name
|
||||||
|
* @param {string} params.MPU - mpu bucket name
|
||||||
|
* @param {string} params.Overflow - overflow bucket name
|
||||||
|
* @param {string} params.Key - object key
|
||||||
|
* @param {number} params.UploadId - MPU upload id
|
||||||
|
* @param {function} callback - callback function to call
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
function abortMPU(params, callback) {
|
||||||
|
if (!params || !params.Key || !params.UploadId ||
|
||||||
|
!params.Bucket || !params.MPU || !params.Overflow) {
|
||||||
|
const error = errors.InvalidRequest
|
||||||
|
.customizeDescription('Missing required parameter');
|
||||||
|
logHelper(logger, 'error', 'error in abortMultipartUpload', error);
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
const mpuHelper = new MpuHelper(this);
|
||||||
|
const delParams = {
|
||||||
|
Bucket: params.Bucket,
|
||||||
|
MPU: params.MPU,
|
||||||
|
Overflow: params.Overflow,
|
||||||
|
Prefix: createMpuKey(params.Key, params.UploadId),
|
||||||
|
};
|
||||||
|
return mpuHelper.removeParts(delParams, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = abortMPU;
|
|
@ -0,0 +1,84 @@
|
||||||
|
const async = require('async');
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
const MpuHelper = require('./mpuHelper');
|
||||||
|
const { createMpuList, createMpuKey, logger } = require('../GcpUtils');
|
||||||
|
const { logHelper } = require('../../utils');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* completeMPU - merges a list of parts into a single object
|
||||||
|
* @param {object} params - completeMPU params
|
||||||
|
* @param {string} params.Bucket - bucket name
|
||||||
|
* @param {string} params.MPU - mpu bucket name
|
||||||
|
* @param {string} params.Overflow - overflow bucket name
|
||||||
|
* @param {string} params.Key - object key
|
||||||
|
* @param {number} params.UploadId - MPU upload id
|
||||||
|
* @param {Object} params.MultipartUpload - MPU upload object
|
||||||
|
* @param {Object[]} param.MultipartUpload.Parts - a list of parts to merge
|
||||||
|
* @param {function} callback - callback function to call with MPU result
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
function completeMPU(params, callback) {
|
||||||
|
if (!params || !params.MultipartUpload ||
|
||||||
|
!params.MultipartUpload.Parts || !params.UploadId ||
|
||||||
|
!params.Bucket || !params.Key) {
|
||||||
|
const error = errors.InvalidRequest
|
||||||
|
.customizeDescription('Missing required parameter');
|
||||||
|
logHelper(logger, 'error', 'error in completeMultipartUpload', error);
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
const partList = params.MultipartUpload.Parts;
|
||||||
|
// verify that the part list is in order
|
||||||
|
if (params.MultipartUpload.Parts.length === 0) {
|
||||||
|
const error = errors.InvalidRequest
|
||||||
|
.customizeDescription('You must specify at least one part');
|
||||||
|
logHelper(logger, 'error', 'error in completeMultipartUpload', error);
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
for (let ind = 1; ind < partList.length; ++ind) {
|
||||||
|
if (partList[ind - 1].PartNumber >= partList[ind].PartNumber) {
|
||||||
|
logHelper(logger, 'error', 'error in completeMultipartUpload',
|
||||||
|
errors.InvalidPartOrder);
|
||||||
|
return callback(errors.InvalidPartOrder);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const mpuHelper = new MpuHelper(this); // this === GcpClient
|
||||||
|
return async.waterfall([
|
||||||
|
next => {
|
||||||
|
// first compose: in mpu bucket
|
||||||
|
// max 10,000 => 313 parts
|
||||||
|
// max component count per object 32
|
||||||
|
logger.trace('completeMultipartUpload: compose round 1',
|
||||||
|
{ partCount: partList.length });
|
||||||
|
mpuHelper.splitMerge(params, partList, 'mpu1', next);
|
||||||
|
},
|
||||||
|
(numParts, next) => {
|
||||||
|
// second compose: in mpu bucket
|
||||||
|
// max 313 => 10 parts
|
||||||
|
// max component count per object 1024
|
||||||
|
logger.trace('completeMultipartUpload: compose round 2',
|
||||||
|
{ partCount: numParts });
|
||||||
|
const parts = createMpuList(params, 'mpu1', numParts);
|
||||||
|
if (parts.length !== numParts) {
|
||||||
|
return next(errors.InternalError);
|
||||||
|
}
|
||||||
|
return mpuHelper.splitMerge(params, parts, 'mpu2', next);
|
||||||
|
},
|
||||||
|
(numParts, next) => mpuHelper.copyToOverflow(numParts, params, next),
|
||||||
|
(numParts, next) => mpuHelper.composeOverflow(numParts, params, next),
|
||||||
|
(result, next) => mpuHelper.generateMpuResult(result, partList, next),
|
||||||
|
(result, aggregateETag, next) =>
|
||||||
|
mpuHelper.copyToMain(result, aggregateETag, params, next),
|
||||||
|
(mpuResult, next) => {
|
||||||
|
const delParams = {
|
||||||
|
Bucket: params.Bucket,
|
||||||
|
MPU: params.MPU,
|
||||||
|
Overflow: params.Overflow,
|
||||||
|
Prefix: createMpuKey(params.Key, params.UploadId),
|
||||||
|
};
|
||||||
|
return mpuHelper.removeParts(delParams,
|
||||||
|
err => next(err, mpuResult));
|
||||||
|
},
|
||||||
|
], callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = completeMPU;
|
|
@ -0,0 +1,89 @@
|
||||||
|
const async = require('async');
|
||||||
|
const request = require('request');
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
|
||||||
|
const { getSourceInfo, jsonRespCheck } = require('../GcpUtils');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* copyObject - minimum required functionality to perform object copy
|
||||||
|
* for GCP Backend
|
||||||
|
* @param {object} params - update metadata params
|
||||||
|
* @param {string} params.Bucket - bucket name
|
||||||
|
* @param {string} params.Key - object key
|
||||||
|
* @param {string} param.CopySource - source object
|
||||||
|
* @param {function} callback - callback function to call with the copy object
|
||||||
|
* result
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
function copyObject(params, callback) {
|
||||||
|
const { CopySource } = params;
|
||||||
|
if (!CopySource || typeof CopySource !== 'string') {
|
||||||
|
return callback(errors.InvalidArgument);
|
||||||
|
}
|
||||||
|
const { sourceBucket, sourceObject } = getSourceInfo(CopySource);
|
||||||
|
if (!sourceBucket || !sourceObject) {
|
||||||
|
return callback(errors.InvalidArgument);
|
||||||
|
}
|
||||||
|
return async.waterfall([
|
||||||
|
next => this.getToken((err, res) => next(err, res)),
|
||||||
|
(token, next) => {
|
||||||
|
if (sourceBucket !== params.Bucket) {
|
||||||
|
return this.rewriteObject(params, next);
|
||||||
|
}
|
||||||
|
const uri = '/storage/v1' +
|
||||||
|
`/b/${encodeURIComponent(sourceBucket)}` +
|
||||||
|
`/o/${encodeURIComponent(sourceObject)}` +
|
||||||
|
'/copyTo' +
|
||||||
|
`/b/${encodeURIComponent(params.Bucket)}` +
|
||||||
|
`/o/${encodeURIComponent(params.Key)}`;
|
||||||
|
return request({
|
||||||
|
method: 'POST',
|
||||||
|
baseUrl: this.config.jsonEndpoint,
|
||||||
|
proxy: this.config.proxy,
|
||||||
|
uri,
|
||||||
|
auth: { bearer: token } },
|
||||||
|
(err, resp, body) =>
|
||||||
|
jsonRespCheck(err, resp, body, 'copyObject', next));
|
||||||
|
},
|
||||||
|
(result, next) => {
|
||||||
|
// if metadata directive is REPLACE then perform a metadata update
|
||||||
|
// otherwise default to COPY
|
||||||
|
const hasEtagMpu = result.metadata && result.metadata['scal-ETag'];
|
||||||
|
if (params.MetadataDirective === 'REPLACE' || hasEtagMpu) {
|
||||||
|
const cleanMetadata = Object.assign({}, result.metadata);
|
||||||
|
delete cleanMetadata['scal-ETag'];
|
||||||
|
const updateParams = {
|
||||||
|
Bucket: params.Bucket,
|
||||||
|
Key: params.Key,
|
||||||
|
Metadata: params.Metadata || cleanMetadata || {},
|
||||||
|
VersionId: result.generation,
|
||||||
|
};
|
||||||
|
return this.updateMetadata(updateParams, next);
|
||||||
|
}
|
||||||
|
return next(null, result);
|
||||||
|
},
|
||||||
|
], (err, result) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
const md5Hash = result.md5Hash ?
|
||||||
|
Buffer.from(result.md5Hash, 'base64').toString('hex') : undefined;
|
||||||
|
const resObj = { CopyObjectResult: {} };
|
||||||
|
if (md5Hash !== undefined) {
|
||||||
|
resObj.CopyObjectResult.ETag = md5Hash;
|
||||||
|
}
|
||||||
|
if (result.updated !== undefined) {
|
||||||
|
resObj.CopyObjectResult.LastModified = result.updated;
|
||||||
|
}
|
||||||
|
if (result.size !== undefined && !isNaN(result.size) &&
|
||||||
|
(typeof result.size === 'string' || typeof result.size === 'number')) {
|
||||||
|
resObj.ContentLength = parseInt(result.size, 10);
|
||||||
|
}
|
||||||
|
if (result.generation !== undefined) {
|
||||||
|
resObj.VersionId = result.generation;
|
||||||
|
}
|
||||||
|
return callback(null, resObj);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = copyObject;
|
|
@ -0,0 +1,51 @@
|
||||||
|
const uuid = require('uuid/v4');
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
const { createMpuKey, logger, getPutTagsMetadata } = require('../GcpUtils');
|
||||||
|
const { logHelper } = require('../../utils');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* createMPU - creates a MPU upload on GCP (sets a 0-byte object placeholder
|
||||||
|
* with for the final composed object)
|
||||||
|
* @param {object} params - createMPU param
|
||||||
|
* @param {string} params.Bucket - bucket name
|
||||||
|
* @param {string} params.Key - object key
|
||||||
|
* @param {string} params.Metadata - object Metadata
|
||||||
|
* @param {string} params.ContentType - Content-Type header
|
||||||
|
* @param {string} params.CacheControl - Cache-Control header
|
||||||
|
* @param {string} params.ContentDisposition - Content-Disposition header
|
||||||
|
* @param {string} params.ContentEncoding - Content-Encoding header
|
||||||
|
* @param {function} callback - callback function to call with the generated
|
||||||
|
* upload-id for MPU operations
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
function createMPU(params, callback) {
|
||||||
|
// As google cloud does not have a create MPU function,
|
||||||
|
// create an empty 'init' object that will temporarily store the
|
||||||
|
// object metadata and return an upload ID to mimic an AWS MPU
|
||||||
|
if (!params || !params.Bucket || !params.Key) {
|
||||||
|
const error = errors.InvalidRequest
|
||||||
|
.customizeDescription('Missing required parameter');
|
||||||
|
logHelper(logger, 'error', 'error in createMultipartUpload', error);
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
const uploadId = uuid().replace(/-/g, '');
|
||||||
|
const mpuParams = {
|
||||||
|
Bucket: params.Bucket,
|
||||||
|
Key: createMpuKey(params.Key, uploadId, 'init'),
|
||||||
|
Metadata: params.Metadata,
|
||||||
|
ContentType: params.ContentType,
|
||||||
|
CacheControl: params.CacheControl,
|
||||||
|
ContentDisposition: params.ContentDisposition,
|
||||||
|
ContentEncoding: params.ContentEncoding,
|
||||||
|
};
|
||||||
|
mpuParams.Metadata = getPutTagsMetadata(mpuParams.Metadata, params.Tagging);
|
||||||
|
return this.putObject(mpuParams, err => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(logger, 'error', 'error in createMPU - putObject', err);
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, { UploadId: uploadId });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = createMPU;
|
|
@ -0,0 +1,73 @@
|
||||||
|
const async = require('async');
|
||||||
|
const request = require('request');
|
||||||
|
const uuid = require('uuid/v4');
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
|
||||||
|
const { jsonRespCheck } = require('../GcpUtils');
|
||||||
|
|
||||||
|
function formBatchRequest(bucket, deleteList) {
|
||||||
|
let retBody = '';
|
||||||
|
const boundary = uuid().replace(/-/g, '');
|
||||||
|
|
||||||
|
deleteList.forEach(object => {
|
||||||
|
// add boundary
|
||||||
|
retBody += `--${boundary}\r\n`;
|
||||||
|
// add req headers
|
||||||
|
retBody += `Content-Type: application/http\r\n`;
|
||||||
|
retBody += '\r\n';
|
||||||
|
const key = object.Key;
|
||||||
|
const versionId = object.VersionId;
|
||||||
|
let path = `/storage/v1/b/${bucket}/o/${encodeURIComponent(key)}`;
|
||||||
|
if (versionId) path += `?generation=${versionId}`;
|
||||||
|
retBody += `DELETE ${path} HTTP/1.1\r\n`;
|
||||||
|
retBody += '\r\n';
|
||||||
|
});
|
||||||
|
retBody += `--${boundary}\r\n`;
|
||||||
|
return { body: retBody, boundary };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* deleteObjects - delete a list of objects
|
||||||
|
* @param {object} params - deleteObjects parameters
|
||||||
|
* @param {string} params.Bucket - bucket location
|
||||||
|
* @param {object} params.Delete - delete config object
|
||||||
|
* @param {object[]} params.Delete.Objects - a list of objects to be deleted
|
||||||
|
* @param {string} params.Delete.Objects[].Key - object key
|
||||||
|
* @param {string} params.Delete.Objects[].VersionId - object version Id, if
|
||||||
|
* not given the master version will be archived
|
||||||
|
* @param {function} callback - callback function to call when a batch response
|
||||||
|
* is returned
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
function deleteObjects(params, callback) {
|
||||||
|
if (!params || !params.Delete || !params.Delete.Objects) {
|
||||||
|
return callback(errors.MalformedXML);
|
||||||
|
}
|
||||||
|
return async.waterfall([
|
||||||
|
next => this.getToken((err, res) => next(err, res)),
|
||||||
|
(token, next) => {
|
||||||
|
const { body, boundary } =
|
||||||
|
formBatchRequest(params.Bucket, params.Delete.Objects, token);
|
||||||
|
request({
|
||||||
|
method: 'POST',
|
||||||
|
baseUrl: this.config.jsonEndpoint,
|
||||||
|
proxy: this.config.proxy,
|
||||||
|
uri: '/batch',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': `multipart/mixed; boundary=${boundary}`,
|
||||||
|
},
|
||||||
|
body,
|
||||||
|
auth: { bearer: token },
|
||||||
|
},
|
||||||
|
// batch response is a string of http bodies
|
||||||
|
// attempt to parse response body
|
||||||
|
// if body element can be transformed into an object
|
||||||
|
// there then check if the response is a error object
|
||||||
|
// TO-DO: maybe, check individual batch op response
|
||||||
|
(err, resp, body) =>
|
||||||
|
jsonRespCheck(err, resp, body, 'deleteObjects', next));
|
||||||
|
},
|
||||||
|
], callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = deleteObjects;
|
|
@ -0,0 +1,24 @@
|
||||||
|
const async = require('async');
|
||||||
|
|
||||||
|
const { stripTags } = require('../GcpUtils');
|
||||||
|
|
||||||
|
function deleteObjectTagging(params, callback) {
|
||||||
|
return async.waterfall([
|
||||||
|
next => this.headObject({
|
||||||
|
Bucket: params.Bucket,
|
||||||
|
Key: params.Key,
|
||||||
|
VersionId: params.VersionId,
|
||||||
|
}, next),
|
||||||
|
(resObj, next) => {
|
||||||
|
const completeMD = stripTags(resObj.Metadata);
|
||||||
|
return next(null, completeMD);
|
||||||
|
},
|
||||||
|
(completeMD, next) => this.updateMetadata({
|
||||||
|
Bucket: params.Bucket,
|
||||||
|
Key: params.Key,
|
||||||
|
VersionId: params.VersionId,
|
||||||
|
Metadata: completeMD,
|
||||||
|
}, next),
|
||||||
|
], callback);
|
||||||
|
}
|
||||||
|
module.exports = deleteObjectTagging;
|
|
@ -0,0 +1,26 @@
|
||||||
|
const async = require('async');
|
||||||
|
|
||||||
|
const { retrieveTags } = require('../GcpUtils');
|
||||||
|
|
||||||
|
function getObjectTagging(params, callback) {
|
||||||
|
return async.waterfall([
|
||||||
|
next => {
|
||||||
|
const headParams = {
|
||||||
|
Bucket: params.Bucket,
|
||||||
|
Key: params.Key,
|
||||||
|
VersionId: params.VersionId,
|
||||||
|
};
|
||||||
|
this.headObject(headParams, next);
|
||||||
|
},
|
||||||
|
(resObj, next) => {
|
||||||
|
const TagSet = retrieveTags(resObj.Metadata);
|
||||||
|
const retObj = {
|
||||||
|
VersionId: resObj.VersionId,
|
||||||
|
TagSet,
|
||||||
|
};
|
||||||
|
return next(null, retObj);
|
||||||
|
},
|
||||||
|
], callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = getObjectTagging;
|
|
@ -0,0 +1,19 @@
|
||||||
|
module.exports = {
|
||||||
|
// JSON functions
|
||||||
|
copyObject: require('./copyObject'),
|
||||||
|
updateMetadata: require('./updateMetadata'),
|
||||||
|
deleteObjects: require('./deleteObjects'),
|
||||||
|
rewriteObject: require('./rewriteObject'),
|
||||||
|
// mpu functions
|
||||||
|
abortMultipartUpload: require('./abortMPU'),
|
||||||
|
completeMultipartUpload: require('./completeMPU'),
|
||||||
|
createMultipartUpload: require('./createMPU'),
|
||||||
|
listParts: require('./listParts'),
|
||||||
|
uploadPart: require('./uploadPart'),
|
||||||
|
uploadPartCopy: require('./uploadPartCopy'),
|
||||||
|
// object tagging
|
||||||
|
putObject: require('./putObject'),
|
||||||
|
putObjectTagging: require('./putTagging'),
|
||||||
|
getObjectTagging: require('./getTagging'),
|
||||||
|
deleteObjectTagging: require('./deleteTagging'),
|
||||||
|
};
|
|
@ -0,0 +1,41 @@
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
const { createMpuKey, logger } = require('../GcpUtils');
|
||||||
|
const { logHelper } = require('../../utils');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* listParts - list uploaded MPU parts
|
||||||
|
* @param {object} params - listParts param
|
||||||
|
* @param {string} params.Bucket - bucket name
|
||||||
|
* @param {string} params.Key - object key
|
||||||
|
* @param {string} params.UploadId - MPU upload id
|
||||||
|
* @param {function} callback - callback function to call with the list of parts
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
function listParts(params, callback) {
|
||||||
|
if (!params || !params.UploadId || !params.Bucket || !params.Key) {
|
||||||
|
const error = errors.InvalidRequest
|
||||||
|
.customizeDescription('Missing required parameter');
|
||||||
|
logHelper(logger, 'error', 'error in listParts', error);
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
if (params.PartNumberMarker && params.PartNumberMarker < 0) {
|
||||||
|
return callback(errors.InvalidArgument);
|
||||||
|
}
|
||||||
|
const mpuParams = {
|
||||||
|
Bucket: params.Bucket,
|
||||||
|
Prefix: createMpuKey(params.Key, params.UploadId, 'parts'),
|
||||||
|
Marker: createMpuKey(params.Key, params.UploadId,
|
||||||
|
params.PartNumberMarker, 'parts'),
|
||||||
|
MaxKeys: params.MaxParts,
|
||||||
|
};
|
||||||
|
return this.listObjects(mpuParams, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(logger, 'error',
|
||||||
|
'error in listParts - listObjects', err);
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, res);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = listParts;
|
|
@ -0,0 +1,355 @@
|
||||||
|
const async = require('async');
|
||||||
|
const Backoff = require('backo');
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
const { eachSlice, createMpuKey, createMpuList, logger } =
|
||||||
|
require('../GcpUtils');
|
||||||
|
const { logHelper } = require('../../utils');
|
||||||
|
const { createAggregateETag } =
|
||||||
|
require('../../../../api/apiUtils/object/processMpuParts');
|
||||||
|
|
||||||
|
const BACKOFF_PARAMS = { min: 1000, max: 300000, jitter: 0.1, factor: 1.5 };
|
||||||
|
|
||||||
|
class MpuHelper {
|
||||||
|
constructor(service, options = {}) {
|
||||||
|
this.service = service;
|
||||||
|
this.backoffParams = {
|
||||||
|
min: options.min || BACKOFF_PARAMS.min,
|
||||||
|
max: options.max || BACKOFF_PARAMS.max,
|
||||||
|
jitter: options.jitter || BACKOFF_PARAMS.jitter,
|
||||||
|
factor: options.factor || BACKOFF_PARAMS.factor,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* createDelSlices - creates a list of lists of objects to be deleted via
|
||||||
|
* the a batch operation for MPU. Because batch operation has a limit of
|
||||||
|
* 1000 op per batch, this function creates the list of lists to be process.
|
||||||
|
* @param {object[]} list - a list of objects given to be sliced
|
||||||
|
* @return {object[]} - a list of lists of object to be deleted
|
||||||
|
*/
|
||||||
|
createDelSlices(list) {
|
||||||
|
const retSlice = [];
|
||||||
|
for (let ind = 0; ind < list.length; ind += 1000) {
|
||||||
|
retSlice.push(list.slice(ind, ind + 1000));
|
||||||
|
}
|
||||||
|
return retSlice;
|
||||||
|
}
|
||||||
|
|
||||||
|
_retry(fnName, params, callback) {
|
||||||
|
const backoff = new Backoff(this.backoffParams);
|
||||||
|
const handleFunc = (fnName, params, retry, callback) => {
|
||||||
|
const timeout = backoff.duration();
|
||||||
|
return setTimeout((params, cb) =>
|
||||||
|
this.service[fnName](params, cb), timeout, params,
|
||||||
|
(err, res) => {
|
||||||
|
if (err) {
|
||||||
|
if (err.statusCode === 429 || err.code === 429) {
|
||||||
|
if (fnName === 'composeObject') {
|
||||||
|
logger.trace('composeObject: slow down request',
|
||||||
|
{ retryCount: retry, timeout });
|
||||||
|
} else if (fnName === 'copyObject') {
|
||||||
|
logger.trace('copyObject: slow down request',
|
||||||
|
{ retryCount: retry, timeout });
|
||||||
|
}
|
||||||
|
return handleFunc(
|
||||||
|
fnName, params, retry + 1, callback);
|
||||||
|
}
|
||||||
|
logHelper(logger, 'error', `${fnName} failed`, err);
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
backoff.reset();
|
||||||
|
return callback(null, res);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
handleFunc(fnName, params, 0, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* retryCompose - exponential backoff retry implementation for the compose
|
||||||
|
* operation
|
||||||
|
* @param {object} params - compose object params
|
||||||
|
* @param {function} callback - callback function to call with the result
|
||||||
|
* of the compose operation
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
retryCompose(params, callback) {
|
||||||
|
this._retry('composeObject', params, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* retryCopy - exponential backoff retry implementation for the copy
|
||||||
|
* operation
|
||||||
|
* @param {object} params - copy object params
|
||||||
|
* @param {function} callback - callback function to call with the result
|
||||||
|
* of the copy operation
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
retryCopy(params, callback) {
|
||||||
|
this._retry('copyObject', params, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* splitMerge - breaks down the MPU list of parts to be compose on GCP;
|
||||||
|
* splits partList into chunks of 32 objects, the limit of each compose
|
||||||
|
* operation.
|
||||||
|
* @param {object} params - complete MPU params
|
||||||
|
* @param {string} params.Bucket - bucket name
|
||||||
|
* @param {string} params.MPU - mpu bucket name
|
||||||
|
* @param {string} params.Overflow - overflow bucket name
|
||||||
|
* @param {string} params.Key - object key
|
||||||
|
* @param {string} params.UploadId - MPU upload id
|
||||||
|
* @param {object[]} partList - list of parts for complete multipart upload
|
||||||
|
* @param {string} level - the phase name of the MPU process
|
||||||
|
* @param {function} callback - the callback function to call
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
splitMerge(params, partList, level, callback) {
|
||||||
|
// create composition of slices from the partList array
|
||||||
|
return async.mapLimit(eachSlice.call(partList, 32),
|
||||||
|
this.service._maxConcurrent,
|
||||||
|
(infoParts, cb) => {
|
||||||
|
const mpuPartList = infoParts.Parts.map(item =>
|
||||||
|
({ PartName: item.PartName }));
|
||||||
|
const partNumber = infoParts.PartNumber;
|
||||||
|
const tmpKey =
|
||||||
|
createMpuKey(params.Key, params.UploadId, partNumber, level);
|
||||||
|
const mergedObject = { PartName: tmpKey };
|
||||||
|
if (mpuPartList.length < 2) {
|
||||||
|
logger.trace(
|
||||||
|
'splitMerge: parts are fewer than 2, copy instead');
|
||||||
|
// else just perform a copy
|
||||||
|
const copyParams = {
|
||||||
|
Bucket: params.MPU,
|
||||||
|
Key: tmpKey,
|
||||||
|
CopySource: `${params.MPU}/${mpuPartList[0].PartName}`,
|
||||||
|
};
|
||||||
|
return this.service.copyObject(copyParams, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(logger, 'error',
|
||||||
|
'error in splitMerge - copyObject', err);
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
mergedObject.VersionId = res.VersionId;
|
||||||
|
mergedObject.ETag = res.ETag;
|
||||||
|
return cb(null, mergedObject);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const composeParams = {
|
||||||
|
Bucket: params.MPU,
|
||||||
|
Key: tmpKey,
|
||||||
|
MultipartUpload: { Parts: mpuPartList },
|
||||||
|
};
|
||||||
|
return this.retryCompose(composeParams, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
mergedObject.VersionId = res.VersionId;
|
||||||
|
mergedObject.ETag = res.ETag;
|
||||||
|
return cb(null, mergedObject);
|
||||||
|
});
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, res.length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* removeParts - remove all objects created to perform a multipart upload
|
||||||
|
* @param {object} params - remove parts params
|
||||||
|
* @param {string} params.Bucket - bucket name
|
||||||
|
* @param {string} params.MPU - mpu bucket name
|
||||||
|
* @param {string} params.Overflow - overflow bucket name
|
||||||
|
* @param {string} params.Key - object key
|
||||||
|
* @param {string} params.UploadId - MPU upload id
|
||||||
|
* @param {function} callback - callback function to call
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
removeParts(params, callback) {
|
||||||
|
const _getObjectVersions = (bucketType, callback) => {
|
||||||
|
logger.trace(`remove all parts ${bucketType} bucket`);
|
||||||
|
let partList = [];
|
||||||
|
let isTruncated = true;
|
||||||
|
let nextMarker;
|
||||||
|
const bucket = params[bucketType];
|
||||||
|
return async.whilst(() => isTruncated, next => {
|
||||||
|
const listParams = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Prefix: params.Prefix,
|
||||||
|
Marker: nextMarker,
|
||||||
|
};
|
||||||
|
return this.service.listVersions(listParams, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(logger, 'error', 'error in ' +
|
||||||
|
`removeParts - listVersions ${bucketType}`, err);
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
nextMarker = res.NextMarker;
|
||||||
|
isTruncated = res.IsTruncated;
|
||||||
|
partList = partList.concat(res.Versions);
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
}, err => callback(err, partList));
|
||||||
|
};
|
||||||
|
|
||||||
|
const _deleteObjects = (bucketType, partsList, callback) => {
|
||||||
|
logger.trace(`successfully listed ${bucketType} parts`, {
|
||||||
|
objectCount: partsList.length,
|
||||||
|
});
|
||||||
|
const delSlices = this.createDelSlices(partsList);
|
||||||
|
const bucket = params[bucketType];
|
||||||
|
return async.each(delSlices, (list, next) => {
|
||||||
|
const delParams = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Delete: { Objects: list },
|
||||||
|
};
|
||||||
|
return this.service.deleteObjects(delParams, err => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(logger, 'error',
|
||||||
|
`error deleting ${bucketType} object`, err);
|
||||||
|
}
|
||||||
|
return next(err);
|
||||||
|
});
|
||||||
|
}, err => callback(err));
|
||||||
|
};
|
||||||
|
|
||||||
|
return async.parallel([
|
||||||
|
done => async.waterfall([
|
||||||
|
next => _getObjectVersions('MPU', next),
|
||||||
|
(parts, next) => _deleteObjects('MPU', parts, next),
|
||||||
|
], err => done(err)),
|
||||||
|
done => async.waterfall([
|
||||||
|
next => _getObjectVersions('Overflow', next),
|
||||||
|
(parts, next) => _deleteObjects('Overflow', parts, next),
|
||||||
|
], err => done(err)),
|
||||||
|
], err => callback(err));
|
||||||
|
}
|
||||||
|
|
||||||
|
copyToOverflow(numParts, params, callback) {
|
||||||
|
// copy phase: in overflow bucket
|
||||||
|
// resetting component count by moving item between
|
||||||
|
// different region/class buckets
|
||||||
|
logger.trace('completeMultipartUpload: copy to overflow',
|
||||||
|
{ partCount: numParts });
|
||||||
|
const parts = createMpuList(params, 'mpu2', numParts);
|
||||||
|
if (parts.length !== numParts) {
|
||||||
|
return callback(errors.InternalError);
|
||||||
|
}
|
||||||
|
return async.eachLimit(parts, 10, (infoParts, cb) => {
|
||||||
|
const partName = infoParts.PartName;
|
||||||
|
const partNumber = infoParts.PartNumber;
|
||||||
|
const overflowKey = createMpuKey(
|
||||||
|
params.Key, params.UploadId, partNumber, 'overflow');
|
||||||
|
const rewriteParams = {
|
||||||
|
Bucket: params.Overflow,
|
||||||
|
Key: overflowKey,
|
||||||
|
CopySource: `${params.MPU}/${partName}`,
|
||||||
|
};
|
||||||
|
logger.trace('rewrite object', { rewriteParams });
|
||||||
|
this.service.rewriteObject(rewriteParams, cb);
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(logger, 'error', 'error in ' +
|
||||||
|
'createMultipartUpload - rewriteObject', err);
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, numParts);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
composeOverflow(numParts, params, callback) {
|
||||||
|
// final compose: in overflow bucket
|
||||||
|
// number of parts to compose <= 10
|
||||||
|
// perform final compose in overflow bucket
|
||||||
|
logger.trace('completeMultipartUpload: overflow compose');
|
||||||
|
const parts = createMpuList(params, 'overflow', numParts);
|
||||||
|
const partList = parts.map(item => (
|
||||||
|
{ PartName: item.PartName }));
|
||||||
|
if (partList.length < 2) {
|
||||||
|
logger.trace(
|
||||||
|
'fewer than 2 parts in overflow, skip to copy phase');
|
||||||
|
return callback(null, partList[0].PartName);
|
||||||
|
}
|
||||||
|
const composeParams = {
|
||||||
|
Bucket: params.Overflow,
|
||||||
|
Key: createMpuKey(params.Key, params.UploadId, 'final'),
|
||||||
|
MultipartUpload: { Parts: partList },
|
||||||
|
};
|
||||||
|
return this.retryCompose(composeParams, err => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, null);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Create MPU Aggregate ETag
|
||||||
|
*/
|
||||||
|
generateMpuResult(res, partList, callback) {
|
||||||
|
const concatETag = partList.reduce((prev, curr) =>
|
||||||
|
prev + curr.ETag.substring(1, curr.ETag.length - 1), '');
|
||||||
|
const aggregateETag = createAggregateETag(concatETag, partList);
|
||||||
|
return callback(null, res, aggregateETag);
|
||||||
|
}
|
||||||
|
|
||||||
|
copyToMain(res, aggregateETag, params, callback) {
|
||||||
|
// move object from overflow bucket into the main bucket
|
||||||
|
// retrieve initial metadata then compose the object
|
||||||
|
const copySource = res ||
|
||||||
|
createMpuKey(params.Key, params.UploadId, 'final');
|
||||||
|
return async.waterfall([
|
||||||
|
next => {
|
||||||
|
// retrieve metadata from init object in mpu bucket
|
||||||
|
const headParams = {
|
||||||
|
Bucket: params.MPU,
|
||||||
|
Key: createMpuKey(params.Key, params.UploadId,
|
||||||
|
'init'),
|
||||||
|
};
|
||||||
|
logger.trace('retrieving object metadata');
|
||||||
|
return this.service.headObject(headParams, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(logger, 'error',
|
||||||
|
'error in createMultipartUpload - headObject',
|
||||||
|
err);
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
return next(null, res.Metadata);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
(metadata, next) => {
|
||||||
|
// copy the final object into the main bucket
|
||||||
|
const copyMetadata = Object.assign({}, metadata);
|
||||||
|
copyMetadata['scal-ETag'] = aggregateETag;
|
||||||
|
const copyParams = {
|
||||||
|
Bucket: params.Bucket,
|
||||||
|
Key: params.Key,
|
||||||
|
Metadata: copyMetadata,
|
||||||
|
MetadataDirective: 'REPLACE',
|
||||||
|
CopySource: `${params.Overflow}/${copySource}`,
|
||||||
|
};
|
||||||
|
logger.trace('copyParams', { copyParams });
|
||||||
|
this.retryCopy(copyParams, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(logger, 'error', 'error in ' +
|
||||||
|
'createMultipartUpload - final copyObject',
|
||||||
|
err);
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
const mpuResult = {
|
||||||
|
Bucket: params.Bucket,
|
||||||
|
Key: params.Key,
|
||||||
|
VersionId: res.VersionId,
|
||||||
|
ContentLength: res.ContentLength,
|
||||||
|
ETag: `"${aggregateETag}"`,
|
||||||
|
};
|
||||||
|
return next(null, mpuResult);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], callback);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = MpuHelper;
|
|
@ -0,0 +1,11 @@
|
||||||
|
const { getPutTagsMetadata } = require('../GcpUtils');
|
||||||
|
|
||||||
|
function putObject(params, callback) {
|
||||||
|
const putParams = Object.assign({}, params);
|
||||||
|
putParams.Metadata = getPutTagsMetadata(putParams.Metadata, params.Tagging);
|
||||||
|
delete putParams.Tagging;
|
||||||
|
// error handling will be by the actual putObject request
|
||||||
|
return this.putObjectReq(putParams, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = putObject;
|
|
@ -0,0 +1,33 @@
|
||||||
|
const async = require('async');
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
|
||||||
|
const { processTagSet } = require('../GcpUtils');
|
||||||
|
|
||||||
|
function putObjectTagging(params, callback) {
|
||||||
|
if (!params.Tagging || !params.Tagging.TagSet) {
|
||||||
|
return callback(errors.MissingParameter);
|
||||||
|
}
|
||||||
|
const tagRes = processTagSet(params.Tagging.TagSet);
|
||||||
|
if (tagRes instanceof Error) {
|
||||||
|
return callback(tagRes);
|
||||||
|
}
|
||||||
|
return async.waterfall([
|
||||||
|
next => this.headObject({
|
||||||
|
Bucket: params.Bucket,
|
||||||
|
Key: params.Key,
|
||||||
|
VersionId: params.VersionId,
|
||||||
|
}, next),
|
||||||
|
(resObj, next) => {
|
||||||
|
const completeMD = Object.assign({}, resObj.Metadata, tagRes);
|
||||||
|
return next(null, completeMD);
|
||||||
|
},
|
||||||
|
(completeMD, next) => this.updateMetadata({
|
||||||
|
Bucket: params.Bucket,
|
||||||
|
Key: params.Key,
|
||||||
|
VersionId: params.VersionId,
|
||||||
|
Metadata: completeMD,
|
||||||
|
}, next),
|
||||||
|
], callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = putObjectTagging;
|
|
@ -0,0 +1,77 @@
|
||||||
|
const async = require('async');
|
||||||
|
const request = require('request');
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
|
||||||
|
const { getSourceInfo, jsonRespCheck } = require('../GcpUtils');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* rewriteObject - copy object between buckets of different storage class or
|
||||||
|
* regions. As copyObject has inconsistent results when performed on large
|
||||||
|
* objects across different buckets
|
||||||
|
* @param {object} params - JSON request parameters
|
||||||
|
* @param {string} params.SourceBucket - copy source bucket
|
||||||
|
* @param {string} params.SourceObject - copy source object
|
||||||
|
* @param {string} params.SourceVersionId - specify source version
|
||||||
|
* @param {string} params.DestinationBucket - copy destination bucket
|
||||||
|
* @param {string} params.DestinationObject - copy destination object
|
||||||
|
* @param {string} param.RewriteToken - token to pick up where previous rewrite
|
||||||
|
* had left off
|
||||||
|
* @param {function} callback - callback function to call with object rewrite
|
||||||
|
* results
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
function rewriteObject(params, callback) {
|
||||||
|
const { CopySource } = params;
|
||||||
|
if (!CopySource) {
|
||||||
|
return callback(errors.MissingParameter);
|
||||||
|
}
|
||||||
|
if (typeof CopySource !== 'string') {
|
||||||
|
return callback(errors.InvalidArgument);
|
||||||
|
}
|
||||||
|
const { sourceBucket, sourceObject } = getSourceInfo(CopySource);
|
||||||
|
if (!sourceBucket || !sourceObject) {
|
||||||
|
return callback(errors.InvalidArgument);
|
||||||
|
}
|
||||||
|
return async.waterfall([
|
||||||
|
next => this.getToken((err, res) => next(err, res)),
|
||||||
|
(token, next) => {
|
||||||
|
const uri = '/storage/v1' +
|
||||||
|
`/b/${encodeURIComponent(sourceBucket)}` +
|
||||||
|
`/o/${encodeURIComponent(sourceObject)}` +
|
||||||
|
'/rewriteTo' +
|
||||||
|
`/b/${encodeURIComponent(params.Bucket)}` +
|
||||||
|
`/o/${encodeURIComponent(params.Key)}`;
|
||||||
|
const qs = {
|
||||||
|
sourceGeneration: params.SourceVersionId,
|
||||||
|
rewriteToken: params.RewriteToken,
|
||||||
|
};
|
||||||
|
let rewriteDone = false;
|
||||||
|
return async.whilst(() => !rewriteDone, done => {
|
||||||
|
request({
|
||||||
|
method: 'POST',
|
||||||
|
baseUrl: this.config.jsonEndpoint,
|
||||||
|
proxy: this.config.proxy,
|
||||||
|
uri,
|
||||||
|
qs,
|
||||||
|
auth: { bearer: token } },
|
||||||
|
(err, resp, body) =>
|
||||||
|
jsonRespCheck(err, resp, body, 'rewriteObject',
|
||||||
|
(err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
rewriteDone = res.done;
|
||||||
|
qs.rewriteToken = res.rewriteToken;
|
||||||
|
return done(null, res);
|
||||||
|
}));
|
||||||
|
}, (err, result) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
return next(null, result.resource);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = rewriteObject;
|
|
@ -0,0 +1,44 @@
|
||||||
|
const async = require('async');
|
||||||
|
const request = require('request');
|
||||||
|
|
||||||
|
const { jsonRespCheck } = require('../GcpUtils');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* updateMetadata - update the metadata of an object. Only used when
|
||||||
|
* changes to an object metadata should not affect the version id. Example:
|
||||||
|
* objectTagging, in which creation/deletion of medatadata is required for GCP,
|
||||||
|
* and copyObject.
|
||||||
|
* @param {object} params - update metadata params
|
||||||
|
* @param {string} params.Bucket - bucket name
|
||||||
|
* @param {string} params.Key - object key
|
||||||
|
* @param {string} params.VersionId - object version id
|
||||||
|
* @param {function} callback - callback function to call with the object result
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
function updateMetadata(params, callback) {
|
||||||
|
async.waterfall([
|
||||||
|
next => this.getToken((err, res) => next(err, res)),
|
||||||
|
(token, next) => {
|
||||||
|
const uri = '/storage/v1' +
|
||||||
|
`/b/${encodeURIComponent(params.Bucket)}` +
|
||||||
|
`/o/${encodeURIComponent(params.Key)}`;
|
||||||
|
const body = {
|
||||||
|
acl: {},
|
||||||
|
metadata: params.Metadata,
|
||||||
|
generation: params.VersionId,
|
||||||
|
};
|
||||||
|
request({
|
||||||
|
method: 'PUT',
|
||||||
|
baseUrl: this.config.jsonEndpoint,
|
||||||
|
proxy: this.config.proxy,
|
||||||
|
uri,
|
||||||
|
body,
|
||||||
|
json: true,
|
||||||
|
auth: { bearer: token } },
|
||||||
|
(err, resp, body) =>
|
||||||
|
jsonRespCheck(err, resp, body, 'updateMetadata', next));
|
||||||
|
},
|
||||||
|
], callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = updateMetadata;
|
|
@ -0,0 +1,43 @@
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
const { getPartNumber, createMpuKey, logger } = require('../GcpUtils');
|
||||||
|
const { logHelper } = require('../../utils');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* uploadPart - upload part
|
||||||
|
* @param {object} params - upload part params
|
||||||
|
* @param {string} params.Bucket - bucket name
|
||||||
|
* @param {string} params.Key - object key
|
||||||
|
* @param {function} callback - callback function to call
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
function uploadPart(params, callback) {
|
||||||
|
if (!params || !params.UploadId || !params.Bucket || !params.Key) {
|
||||||
|
const error = errors.InvalidRequest
|
||||||
|
.customizeDescription('Missing required parameter');
|
||||||
|
logHelper(logger, 'error', 'error in uploadPart', error);
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
const partNumber = getPartNumber(params.PartNumber);
|
||||||
|
if (!partNumber) {
|
||||||
|
const error = errors.InvalidArgument
|
||||||
|
.customizeDescription('PartNumber is not a number');
|
||||||
|
logHelper(logger, 'error', 'error in uploadPart', error);
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
const mpuParams = {
|
||||||
|
Bucket: params.Bucket,
|
||||||
|
Key: createMpuKey(params.Key, params.UploadId, partNumber),
|
||||||
|
Body: params.Body,
|
||||||
|
ContentLength: params.ContentLength,
|
||||||
|
};
|
||||||
|
return this.putObject(mpuParams, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(logger, 'error',
|
||||||
|
'error in uploadPart - putObject', err);
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, res);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = uploadPart;
|
|
@ -0,0 +1,37 @@
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
const { getPartNumber, createMpuKey, logger } = require('../GcpUtils');
|
||||||
|
const { logHelper } = require('../../utils');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* uploadPartCopy - upload part copy
|
||||||
|
* @param {object} params - upload part copy params
|
||||||
|
* @param {string} params.Bucket - bucket name
|
||||||
|
* @param {string} params.Key - object key
|
||||||
|
* @param {string} params.CopySource - source object to copy
|
||||||
|
* @param {function} callback - callback function to call
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
function uploadPartCopy(params, callback) {
|
||||||
|
if (!params || !params.UploadId || !params.Bucket || !params.Key ||
|
||||||
|
!params.CopySource) {
|
||||||
|
const error = errors.InvalidRequest
|
||||||
|
.customizeDescription('Missing required parameter');
|
||||||
|
logHelper(logger, 'error', 'error in uploadPartCopy', error);
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
const partNumber = getPartNumber(params.PartNumber);
|
||||||
|
if (!partNumber) {
|
||||||
|
const error = errors.InvalidArgument
|
||||||
|
.customizeDescription('PartNumber is not a number');
|
||||||
|
logHelper(logger, 'error', 'error in uploadPartCopy', error);
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
const mpuParams = {
|
||||||
|
Bucket: params.Bucket,
|
||||||
|
Key: createMpuKey(params.Key, params.UploadId, partNumber),
|
||||||
|
CopySource: params.CopySource,
|
||||||
|
};
|
||||||
|
return this.copyObject(mpuParams, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = uploadPartCopy;
|
|
@ -0,0 +1,409 @@
|
||||||
|
const async = require('async');
|
||||||
|
const assert = require('assert');
|
||||||
|
const stream = require('stream');
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
const { minimumAllowedPartSize, maximumAllowedPartCount } =
|
||||||
|
require('../../../../constants');
|
||||||
|
const { createMpuList, logger } = require('./GcpUtils');
|
||||||
|
const { logHelper } = require('../utils');
|
||||||
|
|
||||||
|
|
||||||
|
function sliceFn(body, size) {
|
||||||
|
const array = [];
|
||||||
|
let partNumber = 1;
|
||||||
|
for (let ind = 0; ind < body.length; ind += size) {
|
||||||
|
array.push({
|
||||||
|
Body: body.slice(ind, ind + size),
|
||||||
|
PartNumber: partNumber++,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return array;
|
||||||
|
}
|
||||||
|
|
||||||
|
class GcpManagedUpload {
|
||||||
|
/**
|
||||||
|
* GcpMangedUpload - class to mimic the upload method in AWS-SDK
|
||||||
|
* To-Do: implement retry on failure like S3's upload
|
||||||
|
* @param {GcpService} service - client object
|
||||||
|
* @param {object} params - upload params
|
||||||
|
* @param {string} params.Bucket - bucket name
|
||||||
|
* @param {string} params.MPU - mpu bucket name
|
||||||
|
* @param {string} params.Overflow - overflow bucket name
|
||||||
|
* @param {string} params.Key - object key
|
||||||
|
* @param {object} options - config setting for GcpManagedUpload object
|
||||||
|
* @param {number} options.partSize - set object chunk size
|
||||||
|
* @param {number} options.queueSize - set the number of concurrent upload
|
||||||
|
* @return {object} - return an GcpManagedUpload object
|
||||||
|
*/
|
||||||
|
constructor(service, params, options = {}) {
|
||||||
|
this.service = service;
|
||||||
|
this.params = params;
|
||||||
|
this.mainBucket =
|
||||||
|
this.params.Bucket || this.service.config.mainBucket;
|
||||||
|
this.mpuBucket =
|
||||||
|
this.params.MPU || this.service.config.mpuBucket;
|
||||||
|
this.overflowBucket =
|
||||||
|
this.params.Overflow || this.service.config.overflowBucket;
|
||||||
|
|
||||||
|
this.partSize = minimumAllowedPartSize;
|
||||||
|
this.queueSize = options.queueSize || 4;
|
||||||
|
this.validateBody();
|
||||||
|
this.setPartSize();
|
||||||
|
// multipart information
|
||||||
|
this.parts = {};
|
||||||
|
this.uploadedParts = 0;
|
||||||
|
this.activeParts = 0;
|
||||||
|
this.partBuffers = [];
|
||||||
|
this.partQueue = [];
|
||||||
|
this.partBufferLength = 0;
|
||||||
|
this.totalChunkedBytes = 0;
|
||||||
|
this.partNumber = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* validateBody - validate that body contains data to upload. If body is not
|
||||||
|
* of type stream, it must then be of either string or buffer. If string,
|
||||||
|
* convert to a Buffer type and split into chunks if body is large enough
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
validateBody() {
|
||||||
|
this.body = this.params.Body;
|
||||||
|
assert(this.body, errors.MissingRequestBodyError.customizeDescription(
|
||||||
|
'Missing request body'));
|
||||||
|
this.totalBytes = this.params.ContentLength;
|
||||||
|
if (this.body instanceof stream) {
|
||||||
|
assert.strictEqual(typeof this.totalBytes, 'number',
|
||||||
|
errors.MissingContentLength.customizeDescription(
|
||||||
|
'If body is a stream, ContentLength must be provided'));
|
||||||
|
} else {
|
||||||
|
if (typeof this.body === 'string') {
|
||||||
|
this.body = Buffer.from(this.body);
|
||||||
|
}
|
||||||
|
this.totalBytes = this.body.byteLength;
|
||||||
|
assert(this.totalBytes, errors.InternalError.customizeDescription(
|
||||||
|
'Unable to perform upload'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setPartSize() {
|
||||||
|
const newPartSize =
|
||||||
|
Math.ceil(this.totalBytes / maximumAllowedPartCount);
|
||||||
|
if (newPartSize > this.partSize) this.partSize = newPartSize;
|
||||||
|
this.totalParts = Math.ceil(this.totalBytes / this.partSize);
|
||||||
|
if (this.body instanceof Buffer && this.totalParts > 1) {
|
||||||
|
this.slicedParts = sliceFn(this.body, this.partSize);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* cleanUp - function that is called if GcpManagedUpload fails at any point,
|
||||||
|
* perform clean up of used resources. Ends the request by calling an
|
||||||
|
* internal callback function
|
||||||
|
* @param {Error} err - Error object
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
cleanUp(err) {
|
||||||
|
// is only called when an error happens
|
||||||
|
if (this.failed || this.completed) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
this.failed = true;
|
||||||
|
if (this.uploadId) {
|
||||||
|
// if MPU was successfuly created
|
||||||
|
return this.abortMPU(mpuErr => {
|
||||||
|
if (mpuErr) {
|
||||||
|
logHelper(logger, 'error',
|
||||||
|
'GcpMangedUpload: abortMPU failed in cleanup');
|
||||||
|
}
|
||||||
|
return this.callback(err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return this.callback(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* abortMPU - function that is called to remove a multipart upload
|
||||||
|
* @param {function} callback - callback function to call to complete the
|
||||||
|
* upload
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
abortMPU(callback) {
|
||||||
|
const params = {
|
||||||
|
Bucket: this.mainBucket,
|
||||||
|
MPU: this.mpuBucket,
|
||||||
|
Overflow: this.overflowBucket,
|
||||||
|
UploadId: this.uploadId,
|
||||||
|
Key: this.params.Key,
|
||||||
|
};
|
||||||
|
this.service.abortMultipartUpload(params, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* completeUpload - function that is called to to complete a multipart
|
||||||
|
* upload
|
||||||
|
* @param {function} callback - callback function to call to complete the
|
||||||
|
* upload
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
completeUpload() {
|
||||||
|
if (this.failed || this.completed) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const params = {
|
||||||
|
Bucket: this.mainBucket,
|
||||||
|
MPU: this.mpuBucket,
|
||||||
|
Overflow: this.overflowBucket,
|
||||||
|
Key: this.params.Key,
|
||||||
|
UploadId: this.uploadId,
|
||||||
|
MultipartUpload: {},
|
||||||
|
};
|
||||||
|
params.MultipartUpload.Parts =
|
||||||
|
createMpuList(params, 'parts', this.uploadedParts)
|
||||||
|
.map(item =>
|
||||||
|
Object.assign(item, { ETag: this.parts[item.PartNumber] }));
|
||||||
|
return this.service.completeMultipartUpload(params,
|
||||||
|
(err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return this.cleanUp(err);
|
||||||
|
}
|
||||||
|
this.completed = true;
|
||||||
|
return this.callback(null, res);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* send - function that is called to execute the method request
|
||||||
|
* @param {function} callback - callback function to be called and stored
|
||||||
|
* at the completion of the method
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
send(callback) {
|
||||||
|
if (this.called || this.callback) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
this.failed = false;
|
||||||
|
this.called = true;
|
||||||
|
this.callback = callback;
|
||||||
|
if (this.totalBytes <= this.partSize) {
|
||||||
|
return this.uploadSingle();
|
||||||
|
}
|
||||||
|
if (this.slicedParts) {
|
||||||
|
return this.uploadBufferSlices();
|
||||||
|
}
|
||||||
|
if (this.body instanceof stream) {
|
||||||
|
// stream type
|
||||||
|
this.body.on('error', err => this.cleanUp(err))
|
||||||
|
.on('readable', () => this.chunkStream())
|
||||||
|
.on('end', () => {
|
||||||
|
this.isDoneChunking = true;
|
||||||
|
this.chunkStream();
|
||||||
|
|
||||||
|
if (this.isDoneChunking && this.uploadedParts >= 1 &&
|
||||||
|
this.uploadedParts === this.totalParts) {
|
||||||
|
this.completeUpload();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* uploadSingle - perform a regular put object upload if the object is
|
||||||
|
* small enough
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
uploadSingle() {
|
||||||
|
if (this.failed || this.completed) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
// use putObject to upload the single part object
|
||||||
|
const params = Object.assign({}, this.params);
|
||||||
|
params.Bucket = this.mainBucket;
|
||||||
|
delete params.MPU;
|
||||||
|
delete params.Overflow;
|
||||||
|
return this.service.putObject(params, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return this.cleanUp(err);
|
||||||
|
}
|
||||||
|
// return results from a putObject request
|
||||||
|
this.completed = true;
|
||||||
|
return this.callback(null, res);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* uploadBufferSlices - perform a multipart upload for body of type string
|
||||||
|
* or Buffer.
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
uploadBufferSlices() {
|
||||||
|
if (this.failed || this.completed) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (this.slicedParts.length <= 1 && this.totalParts) {
|
||||||
|
// there is only one part
|
||||||
|
return this.uploadSingle();
|
||||||
|
}
|
||||||
|
// multiple slices
|
||||||
|
return async.series([
|
||||||
|
// createMultipartUpload
|
||||||
|
next => {
|
||||||
|
const params = this.params;
|
||||||
|
params.Bucket = this.mpuBucket;
|
||||||
|
this.service.createMultipartUpload(params, (err, res) => {
|
||||||
|
if (!err) {
|
||||||
|
this.uploadId = res.UploadId;
|
||||||
|
}
|
||||||
|
return next(err);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
next => async.eachLimit(this.slicedParts, this.queueSize,
|
||||||
|
(uploadPart, done) => {
|
||||||
|
const params = {
|
||||||
|
Bucket: this.mpuBucket,
|
||||||
|
Key: this.params.Key,
|
||||||
|
UploadId: this.uploadId,
|
||||||
|
Body: uploadPart.Body,
|
||||||
|
PartNumber: uploadPart.PartNumber,
|
||||||
|
};
|
||||||
|
this.service.uploadPart(params, (err, res) => {
|
||||||
|
if (!err) {
|
||||||
|
this.parts[uploadPart.PartNumber] = res.ETag;
|
||||||
|
this.uploadedParts++;
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
}, next),
|
||||||
|
], err => {
|
||||||
|
if (err) {
|
||||||
|
return this.cleanUp(new Error(
|
||||||
|
'GcpManagedUpload: unable to complete upload'));
|
||||||
|
}
|
||||||
|
return this.completeUpload();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* chunkStream - read stream up until the max chunk size then call an
|
||||||
|
* uploadPart method on that chunk. If more than chunk size has be read,
|
||||||
|
* move the extra bytes into a queue for the next read.
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
chunkStream() {
|
||||||
|
const buf = this.body.read(this.partSize - this.partBufferLength) ||
|
||||||
|
this.body.read();
|
||||||
|
|
||||||
|
if (buf) {
|
||||||
|
this.partBuffers.push(buf);
|
||||||
|
this.partBufferLength += buf.length;
|
||||||
|
this.totalChunkedBytes += buf.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
let pbuf;
|
||||||
|
if (this.partBufferLength >= this.partSize) {
|
||||||
|
pbuf = Buffer.concat(this.partBuffers);
|
||||||
|
this.partBuffers = [];
|
||||||
|
this.partBufferLength = 0;
|
||||||
|
|
||||||
|
if (pbuf.length > this.partSize) {
|
||||||
|
const rest = pbuf.slice(this.partSize);
|
||||||
|
this.partBuffers.push(rest);
|
||||||
|
this.partBufferLength += rest.length;
|
||||||
|
pbuf = pbuf.slice(0, this.partSize);
|
||||||
|
}
|
||||||
|
this.processChunk(pbuf);
|
||||||
|
}
|
||||||
|
|
||||||
|
// when chunking the last part
|
||||||
|
if (this.isDoneChunking && !this.completeChunking) {
|
||||||
|
this.completeChunking = true;
|
||||||
|
pbuf = Buffer.concat(this.partBuffers);
|
||||||
|
this.partBuffers = [];
|
||||||
|
this.partBufferLength = 0;
|
||||||
|
if (pbuf.length > 0) {
|
||||||
|
this.processChunk(pbuf);
|
||||||
|
} else {
|
||||||
|
if (this.uploadedParts === 0) {
|
||||||
|
// this is a 0-byte object
|
||||||
|
this.uploadSingle();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.body.read(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* processChunk - create a multipart request if one does not exist;
|
||||||
|
* otherwise, call uploadChunk to upload a chunk
|
||||||
|
* @param {Buffer} chunk - bytes to be uploaded
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
processChunk(chunk) {
|
||||||
|
const partNumber = ++this.partNumber;
|
||||||
|
if (!this.uploadId) {
|
||||||
|
// if multipart upload does not exist
|
||||||
|
if (!this.multipartReq) {
|
||||||
|
const params = this.params;
|
||||||
|
params.Bucket = this.mpuBucket;
|
||||||
|
this.multipartReq =
|
||||||
|
this.service.createMultipartUpload(params, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return this.cleanUp();
|
||||||
|
}
|
||||||
|
this.uploadId = res.UploadId;
|
||||||
|
this.uploadChunk(chunk, partNumber);
|
||||||
|
if (this.partQueue.length > 0) {
|
||||||
|
this.partQueue.forEach(
|
||||||
|
part => this.uploadChunk(...part));
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
this.partQueue.push([chunk, partNumber]);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// queues chunks for upload
|
||||||
|
this.uploadChunk(chunk, partNumber);
|
||||||
|
this.activeParts++;
|
||||||
|
if (this.activeParts < this.queueSize) {
|
||||||
|
this.chunkStream();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* uploadChunk - perform the partUpload
|
||||||
|
* @param {Buffer} chunk - bytes to be uploaded
|
||||||
|
* @param {number} partNumber - upload object part number
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
uploadChunk(chunk, partNumber) {
|
||||||
|
if (this.failed || this.completed) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const params = {
|
||||||
|
Bucket: this.mpuBucket,
|
||||||
|
Key: this.params.Key,
|
||||||
|
UploadId: this.uploadId,
|
||||||
|
PartNumber: partNumber,
|
||||||
|
Body: chunk,
|
||||||
|
ContentLength: chunk.length,
|
||||||
|
};
|
||||||
|
return this.service.uploadPart(params, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return this.cleanUp(err);
|
||||||
|
}
|
||||||
|
this.parts[partNumber] = res.ETag;
|
||||||
|
this.uploadedParts++;
|
||||||
|
this.activeParts--;
|
||||||
|
if (this.totalParts === this.uploadedParts &&
|
||||||
|
this.isDoneChunking) {
|
||||||
|
return this.completeUpload();
|
||||||
|
}
|
||||||
|
return this.chunkStream();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = GcpManagedUpload;
|
|
@ -5,7 +5,16 @@ const { errors } = require('arsenal');
|
||||||
const Service = AWS.Service;
|
const Service = AWS.Service;
|
||||||
|
|
||||||
const GcpSigner = require('./GcpSigner');
|
const GcpSigner = require('./GcpSigner');
|
||||||
|
const GcpApis = require('./GcpApis');
|
||||||
|
const GcpManagedUpload = require('./GcpManagedUpload');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* genAuth - create a google authorizer for generating request tokens
|
||||||
|
* @param {object} authParams - params that contains the credentials for
|
||||||
|
* generating the authorizer
|
||||||
|
* @param {function} callback - callback function to call with the authorizer
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
function genAuth(authParams, callback) {
|
function genAuth(authParams, callback) {
|
||||||
async.tryEach([
|
async.tryEach([
|
||||||
function authKeyFile(next) {
|
function authKeyFile(next) {
|
||||||
|
@ -29,9 +38,17 @@ function genAuth(authParams, callback) {
|
||||||
|
|
||||||
AWS.apiLoader.services.gcp = {};
|
AWS.apiLoader.services.gcp = {};
|
||||||
const GCP = Service.defineService('gcp', ['2017-11-01'], {
|
const GCP = Service.defineService('gcp', ['2017-11-01'], {
|
||||||
|
_maxConcurrent: 5,
|
||||||
|
_maxRetries: 5,
|
||||||
_jsonAuth: null,
|
_jsonAuth: null,
|
||||||
_authParams: null,
|
_authParams: null,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* getToken - generate a token for authorizing JSON API requests
|
||||||
|
* @param {function} callback - callback function to call with the
|
||||||
|
* generated token
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
getToken(callback) {
|
getToken(callback) {
|
||||||
if (this._jsonAuth) {
|
if (this._jsonAuth) {
|
||||||
return this._jsonAuth.getToken(callback);
|
return this._jsonAuth.getToken(callback);
|
||||||
|
@ -63,11 +80,27 @@ const GCP = Service.defineService('gcp', ['2017-11-01'], {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
upload(params, options, callback) {
|
// Implemented APIs
|
||||||
return callback(errors.NotImplemented
|
// Bucket API
|
||||||
.customizeDescription('GCP: upload not implemented'));
|
getBucket(params, callback) {
|
||||||
|
return this.listObjects(params, callback);
|
||||||
},
|
},
|
||||||
|
|
||||||
|
// Object APIs
|
||||||
|
upload(params, callback) {
|
||||||
|
try {
|
||||||
|
const uploader = new GcpManagedUpload(this, params);
|
||||||
|
return uploader.send(callback);
|
||||||
|
} catch (err) {
|
||||||
|
return this.callback(err);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
putObjectCopy(params, callback) {
|
||||||
|
return this.copyObject(params, callback);
|
||||||
|
},
|
||||||
|
|
||||||
|
// TO-DO: Implement the following APIs
|
||||||
// Service API
|
// Service API
|
||||||
listBuckets(params, callback) {
|
listBuckets(params, callback) {
|
||||||
return callback(errors.NotImplemented
|
return callback(errors.NotImplemented
|
||||||
|
@ -85,19 +118,14 @@ const GCP = Service.defineService('gcp', ['2017-11-01'], {
|
||||||
.customizeDescription('GCP: deleteBucket not implemented'));
|
.customizeDescription('GCP: deleteBucket not implemented'));
|
||||||
},
|
},
|
||||||
|
|
||||||
headBucket(params, callback) {
|
|
||||||
return callback(errors.NotImplemented
|
|
||||||
.customizeDescription('GCP: headBucket not implemented'));
|
|
||||||
},
|
|
||||||
|
|
||||||
listObjects(params, callback) {
|
|
||||||
return callback(errors.NotImplemented
|
|
||||||
.customizeDescription('GCP: listObjects not implemented'));
|
|
||||||
},
|
|
||||||
|
|
||||||
listObjectVersions(params, callback) {
|
listObjectVersions(params, callback) {
|
||||||
return callback(errors.NotImplemented
|
return callback(errors.NotImplemented
|
||||||
.customizeDescription('GCP: listObjecVersions not implemented'));
|
.customizeDescription('GCP: listObjectVersions not implemented'));
|
||||||
|
},
|
||||||
|
|
||||||
|
createBucket(params, callback) {
|
||||||
|
return callback(errors.NotImplemented
|
||||||
|
.customizeDescription('GCP: createBucket not implemented'));
|
||||||
},
|
},
|
||||||
|
|
||||||
putBucket(params, callback) {
|
putBucket(params, callback) {
|
||||||
|
@ -146,36 +174,6 @@ const GCP = Service.defineService('gcp', ['2017-11-01'], {
|
||||||
},
|
},
|
||||||
|
|
||||||
// Object APIs
|
// Object APIs
|
||||||
headObject(params, callback) {
|
|
||||||
return callback(errors.NotImplemented
|
|
||||||
.customizeDescription('GCP: headObject not implemented'));
|
|
||||||
},
|
|
||||||
|
|
||||||
putObject(params, callback) {
|
|
||||||
return callback(errors.NotImplemented
|
|
||||||
.customizeDescription('GCP: putObject not implemented'));
|
|
||||||
},
|
|
||||||
|
|
||||||
getObject(params, callback) {
|
|
||||||
return callback(errors.NotImplemented
|
|
||||||
.customizeDescription('GCP: getObject not implemented'));
|
|
||||||
},
|
|
||||||
|
|
||||||
deleteObject(params, callback) {
|
|
||||||
return callback(errors.NotImplemented
|
|
||||||
.customizeDescription('GCP: deleteObject not implemented'));
|
|
||||||
},
|
|
||||||
|
|
||||||
deleteObjects(params, callback) {
|
|
||||||
return callback(errors.NotImplemented
|
|
||||||
.customizeDescription('GCP: deleteObjects not implemented'));
|
|
||||||
},
|
|
||||||
|
|
||||||
copyObject(params, callback) {
|
|
||||||
return callback(errors.NotImplemented
|
|
||||||
.customizeDescription('GCP: copyObject not implemented'));
|
|
||||||
},
|
|
||||||
|
|
||||||
putObjectTagging(params, callback) {
|
putObjectTagging(params, callback) {
|
||||||
return callback(errors.NotImplemented
|
return callback(errors.NotImplemented
|
||||||
.customizeDescription('GCP: putObjectTagging not implemented'));
|
.customizeDescription('GCP: putObjectTagging not implemented'));
|
||||||
|
@ -195,45 +193,10 @@ const GCP = Service.defineService('gcp', ['2017-11-01'], {
|
||||||
return callback(errors.NotImplemented
|
return callback(errors.NotImplemented
|
||||||
.customizeDescription('GCP: getObjectAcl not implemented'));
|
.customizeDescription('GCP: getObjectAcl not implemented'));
|
||||||
},
|
},
|
||||||
|
|
||||||
// Multipart upload
|
|
||||||
abortMultipartUpload(params, callback) {
|
|
||||||
return callback(errors.NotImplemented
|
|
||||||
.customizeDescription(
|
|
||||||
'GCP: abortMultipartUpload not implemented'));
|
|
||||||
},
|
|
||||||
|
|
||||||
createMultipartUpload(params, callback) {
|
|
||||||
return callback(errors.NotImplemented
|
|
||||||
.customizeDescription(
|
|
||||||
'GCP: createMultipartUpload not implemented'));
|
|
||||||
},
|
|
||||||
|
|
||||||
completeMultipartUpload(params, callback) {
|
|
||||||
return callback(errors.NotImplemented
|
|
||||||
.customizeDescription(
|
|
||||||
'GCP: completeMultipartUpload not implemented'));
|
|
||||||
},
|
|
||||||
|
|
||||||
uploadPart(params, callback) {
|
|
||||||
return callback(errors.NotImplemented
|
|
||||||
.customizeDescription(
|
|
||||||
'GCP: uploadPart not implemented'));
|
|
||||||
},
|
|
||||||
|
|
||||||
uploadPartCopy(params, callback) {
|
|
||||||
return callback(errors.NotImplemented
|
|
||||||
.customizeDescription(
|
|
||||||
'GCP: uploadPartCopy not implemented'));
|
|
||||||
},
|
|
||||||
|
|
||||||
listParts(params, callback) {
|
|
||||||
return callback(errors.NotImplemented
|
|
||||||
.customizeDescription(
|
|
||||||
'GCP: listParts not implemented'));
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Object.assign(GCP.prototype, GcpApis);
|
||||||
|
|
||||||
Object.defineProperty(AWS.apiLoader.services.gcp, '2017-11-01', {
|
Object.defineProperty(AWS.apiLoader.services.gcp, '2017-11-01', {
|
||||||
get: function get() {
|
get: function get() {
|
||||||
const model = require('./gcp-2017-11-01.api.json');
|
const model = require('./gcp-2017-11-01.api.json');
|
||||||
|
|
|
@ -1 +1,205 @@
|
||||||
module.exports = {};
|
const werelogs = require('werelogs');
|
||||||
|
const { errors, s3middleware } = require('arsenal');
|
||||||
|
|
||||||
|
const _config = require('../../../Config').config;
|
||||||
|
const { logHelper } = require('../utils');
|
||||||
|
const { gcpTaggingPrefix } = require('../../../../constants');
|
||||||
|
|
||||||
|
werelogs.configure({
|
||||||
|
level: _config.log.logLevel,
|
||||||
|
dump: _config.log.dumpLevel,
|
||||||
|
});
|
||||||
|
|
||||||
|
const logger = new werelogs.Logger('gcpUtil');
|
||||||
|
|
||||||
|
class JsonError extends Error {
|
||||||
|
constructor(type, code, desc) {
|
||||||
|
super(type);
|
||||||
|
this.code = code;
|
||||||
|
this.description = desc;
|
||||||
|
this[type] = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function jsonRespCheck(err, resp, body, method, callback) {
|
||||||
|
if (err) {
|
||||||
|
logHelper(logger, 'error',
|
||||||
|
`${method}: error in json method`,
|
||||||
|
errors.InternalError.customizeDescription('json method failed'));
|
||||||
|
return callback(errors.InternalError
|
||||||
|
.customizeDescription('error in JSON Request'));
|
||||||
|
}
|
||||||
|
if (resp.statusCode >= 300) {
|
||||||
|
return callback(
|
||||||
|
new JsonError(resp.statusMessage, resp.statusCode));
|
||||||
|
}
|
||||||
|
let res;
|
||||||
|
try {
|
||||||
|
res = body && typeof body === 'string' ?
|
||||||
|
JSON.parse(body) : body;
|
||||||
|
} catch (error) { res = undefined; }
|
||||||
|
if (res && res.error && res.error.code >= 300) {
|
||||||
|
return callback(
|
||||||
|
new JsonError(res.error.message, res.error.code));
|
||||||
|
}
|
||||||
|
return callback(null, res);
|
||||||
|
}
|
||||||
|
|
||||||
|
function eachSlice(size) {
|
||||||
|
this.array = [];
|
||||||
|
let partNumber = 1;
|
||||||
|
for (let ind = 0; ind < this.length; ind += size) {
|
||||||
|
this.array.push({
|
||||||
|
Parts: this.slice(ind, ind + size),
|
||||||
|
PartNumber: partNumber++,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return this.array;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSourceInfo(CopySource) {
|
||||||
|
const source =
|
||||||
|
CopySource.startsWith('/') ? CopySource.slice(1) : CopySource;
|
||||||
|
const sourceArray = source.split(/\/(.+)/);
|
||||||
|
const sourceBucket = sourceArray[0];
|
||||||
|
const sourceObject = sourceArray[1];
|
||||||
|
return { sourceBucket, sourceObject };
|
||||||
|
}
|
||||||
|
|
||||||
|
function getPaddedPartNumber(number) {
|
||||||
|
return `000000${number}`.substr(-5);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getPartNumber(number) {
|
||||||
|
if (isNaN(number)) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (typeof number === 'string') {
|
||||||
|
return parseInt(number, 10);
|
||||||
|
}
|
||||||
|
return number;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMpuKey(key, uploadId, partNumberArg, fileNameArg) {
|
||||||
|
let partNumber = partNumberArg;
|
||||||
|
let fileName = fileNameArg;
|
||||||
|
|
||||||
|
if (typeof partNumber === 'string' && fileName === undefined) {
|
||||||
|
fileName = partNumber;
|
||||||
|
partNumber = null;
|
||||||
|
}
|
||||||
|
const paddedNumber = getPaddedPartNumber(partNumber);
|
||||||
|
if (fileName && typeof fileName === 'string') {
|
||||||
|
// if partNumber is given, return a "full file path"
|
||||||
|
// else return a "directory path"
|
||||||
|
return partNumber ? `${key}-${uploadId}/${fileName}/${paddedNumber}` :
|
||||||
|
`${key}-${uploadId}/${fileName}`;
|
||||||
|
}
|
||||||
|
if (partNumber && typeof partNumber === 'number') {
|
||||||
|
// filename wasn't passed as an argument. Create default
|
||||||
|
return `${key}-${uploadId}/parts/${paddedNumber}`;
|
||||||
|
}
|
||||||
|
// returns a "directory path"
|
||||||
|
return `${key}-${uploadId}/`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMpuList(params, level, size) {
|
||||||
|
// populate and return a parts list for compose
|
||||||
|
const retList = [];
|
||||||
|
for (let i = 1; i <= size; ++i) {
|
||||||
|
retList.push({
|
||||||
|
PartName: createMpuKey(params.Key, params.UploadId, i, level),
|
||||||
|
PartNumber: i,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return retList;
|
||||||
|
}
|
||||||
|
|
||||||
|
function processTagSet(tagSet = []) {
|
||||||
|
if (tagSet.length > 10) {
|
||||||
|
return errors.BadRequest
|
||||||
|
.customizeDescription('Object tags cannot be greater than 10');
|
||||||
|
}
|
||||||
|
let error = undefined;
|
||||||
|
const tagAsMeta = {};
|
||||||
|
const taggingDict = {};
|
||||||
|
tagSet.every(tag => {
|
||||||
|
const { Key: key, Value: value } = tag;
|
||||||
|
if (key.length > 128) {
|
||||||
|
error = errors.InvalidTag
|
||||||
|
.customizeDescription(
|
||||||
|
'The TagKey you have provided is invalid');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (value.length > 256) {
|
||||||
|
error = errors.InvalidTag
|
||||||
|
.customizeDescription(
|
||||||
|
'The TagValue you have provided is invalid');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (taggingDict[key]) {
|
||||||
|
error = errors.InvalidTag
|
||||||
|
.customizeDescription(
|
||||||
|
'Cannot provide multiple Tags with the same key');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
tagAsMeta[`${gcpTaggingPrefix}${key}`] = value;
|
||||||
|
taggingDict[key] = true;
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
if (error) {
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
return tagAsMeta;
|
||||||
|
}
|
||||||
|
|
||||||
|
function stripTags(metadata = {}) {
|
||||||
|
const retMD = Object.assign({}, metadata);
|
||||||
|
Object.keys(retMD).forEach(key => {
|
||||||
|
if (key.startsWith(gcpTaggingPrefix)) {
|
||||||
|
delete retMD[key];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return retMD;
|
||||||
|
}
|
||||||
|
|
||||||
|
function retrieveTags(metadata = {}) {
|
||||||
|
const retTagSet = [];
|
||||||
|
Object.keys(metadata).forEach(key => {
|
||||||
|
if (key.startsWith(gcpTaggingPrefix)) {
|
||||||
|
retTagSet.push({
|
||||||
|
Key: key.slice(gcpTaggingPrefix.length),
|
||||||
|
Value: metadata[key],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return retTagSet;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getPutTagsMetadata(metadata, tagging = '') {
|
||||||
|
let retMetadata = metadata || {};
|
||||||
|
retMetadata = stripTags(retMetadata);
|
||||||
|
const tagObj = s3middleware.tagging.parseTagFromQuery(tagging);
|
||||||
|
Object.keys(tagObj).forEach(header => {
|
||||||
|
const prefixed = `${gcpTaggingPrefix}${header}`.toLowerCase();
|
||||||
|
retMetadata[prefixed] = tagObj[header];
|
||||||
|
});
|
||||||
|
return retMetadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
// functions
|
||||||
|
eachSlice,
|
||||||
|
createMpuKey,
|
||||||
|
createMpuList,
|
||||||
|
getSourceInfo,
|
||||||
|
jsonRespCheck,
|
||||||
|
processTagSet,
|
||||||
|
stripTags,
|
||||||
|
retrieveTags,
|
||||||
|
getPutTagsMetadata,
|
||||||
|
getPartNumber,
|
||||||
|
// util objects
|
||||||
|
JsonError,
|
||||||
|
logger,
|
||||||
|
};
|
||||||
|
|
|
@ -12,6 +12,803 @@
|
||||||
"timestampFormat": "rfc822",
|
"timestampFormat": "rfc822",
|
||||||
"uid": "gcp-2017-11-01"
|
"uid": "gcp-2017-11-01"
|
||||||
},
|
},
|
||||||
"operations": {},
|
"operations": {
|
||||||
"shapes": {}
|
"HeadBucket": {
|
||||||
}
|
"http": {
|
||||||
|
"method": "HEAD",
|
||||||
|
"requestUri": "/{Bucket}"
|
||||||
|
},
|
||||||
|
"input": {
|
||||||
|
"type": "structure",
|
||||||
|
"required": [
|
||||||
|
"Bucket"
|
||||||
|
],
|
||||||
|
"members": {
|
||||||
|
"Bucket": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Bucket"
|
||||||
|
},
|
||||||
|
"ProjectId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-project-id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"output": {
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"MetaVersionId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-metageneration"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"listObjects": {
|
||||||
|
"http": {
|
||||||
|
"method": "GET",
|
||||||
|
"requestUri": "/{Bucket}"
|
||||||
|
},
|
||||||
|
"input": {
|
||||||
|
"type": "structure",
|
||||||
|
"required": [
|
||||||
|
"Bucket"
|
||||||
|
],
|
||||||
|
"members": {
|
||||||
|
"Bucket": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Bucket"
|
||||||
|
},
|
||||||
|
"Delimiter": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "delimiter"
|
||||||
|
},
|
||||||
|
"Marker": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "marker"
|
||||||
|
},
|
||||||
|
"MaxKeys": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "max-keys",
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"Prefix": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "prefix"
|
||||||
|
},
|
||||||
|
"ProjectId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-project-id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"output": {
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"IsTruncated": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"Marker": {},
|
||||||
|
"NextMarker": {},
|
||||||
|
"Contents": {
|
||||||
|
"shape": "ContentsShape"
|
||||||
|
},
|
||||||
|
"Name": {},
|
||||||
|
"Prefix": {},
|
||||||
|
"Delimiter": {},
|
||||||
|
"MaxKeys": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"CommonPrefixes": {
|
||||||
|
"shape": "CommonPrefixShape"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"listVersions": {
|
||||||
|
"http": {
|
||||||
|
"method": "GET",
|
||||||
|
"requestUri": "/{Bucket}?versions"
|
||||||
|
},
|
||||||
|
"input": {
|
||||||
|
"type": "structure",
|
||||||
|
"required": [
|
||||||
|
"Bucket"
|
||||||
|
],
|
||||||
|
"members": {
|
||||||
|
"Bucket": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Bucket"
|
||||||
|
},
|
||||||
|
"Delimiter": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "delimiter"
|
||||||
|
},
|
||||||
|
"Marker": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "marker"
|
||||||
|
},
|
||||||
|
"MaxKeys": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "max-keys",
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"Prefix": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "prefix"
|
||||||
|
},
|
||||||
|
"ProjectId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-project-id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"output": {
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"IsTruncated": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"Marker": {},
|
||||||
|
"NextMarker": {},
|
||||||
|
"Versions": {
|
||||||
|
"locationName": "Version",
|
||||||
|
"shape": "ContentsShape"
|
||||||
|
},
|
||||||
|
"Name": {},
|
||||||
|
"Prefix": {},
|
||||||
|
"Delimiter": {},
|
||||||
|
"MaxKeys": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"CommonPrefixes": {
|
||||||
|
"shape": "CommonPrefixShape"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"PutBucketVersioning": {
|
||||||
|
"http": {
|
||||||
|
"method": "PUT",
|
||||||
|
"requestUri": "/{Bucket}?versioning"
|
||||||
|
},
|
||||||
|
"input": {
|
||||||
|
"type": "structure",
|
||||||
|
"required": [
|
||||||
|
"Bucket",
|
||||||
|
"VersioningConfiguration"
|
||||||
|
],
|
||||||
|
"members": {
|
||||||
|
"Bucket": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Bucket"
|
||||||
|
},
|
||||||
|
"ContentMD5": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-MD5"
|
||||||
|
},
|
||||||
|
"VersioningConfiguration": {
|
||||||
|
"locationName": "VersioningConfiguration",
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"Status": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"payload": "VersioningConfiguration"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"GetBucketVersioning": {
|
||||||
|
"http": {
|
||||||
|
"method": "GET",
|
||||||
|
"requestUri": "/{Bucket}?versioning"
|
||||||
|
},
|
||||||
|
"input": {
|
||||||
|
"type": "structure",
|
||||||
|
"required": [
|
||||||
|
"Bucket"
|
||||||
|
],
|
||||||
|
"members": {
|
||||||
|
"Bucket": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Bucket"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"output": {
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"Status": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"HeadObject": {
|
||||||
|
"http": {
|
||||||
|
"method": "HEAD",
|
||||||
|
"requestUri": "/{Bucket}/{Key+}"
|
||||||
|
},
|
||||||
|
"input": {
|
||||||
|
"type": "structure",
|
||||||
|
"required": [
|
||||||
|
"Bucket",
|
||||||
|
"Key"
|
||||||
|
],
|
||||||
|
"members": {
|
||||||
|
"Date": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Date",
|
||||||
|
"type": "timestamp"
|
||||||
|
},
|
||||||
|
"Bucket": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Bucket"
|
||||||
|
},
|
||||||
|
"IfMatch": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "If-Match"
|
||||||
|
},
|
||||||
|
"IfModifiedSince": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "If-Modified-Since",
|
||||||
|
"type": "timestamp"
|
||||||
|
},
|
||||||
|
"IfNoneMatch": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "If-None-Match"
|
||||||
|
},
|
||||||
|
"IfUnmodifiedSince": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "If-Unmodified-Since",
|
||||||
|
"type": "timestamp"
|
||||||
|
},
|
||||||
|
"Range": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Range"
|
||||||
|
},
|
||||||
|
"Key": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Key"
|
||||||
|
},
|
||||||
|
"Range": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Range"
|
||||||
|
},
|
||||||
|
"VersionId": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "generation"
|
||||||
|
},
|
||||||
|
"ProjectId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-project-id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"output": {
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"Date": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Date",
|
||||||
|
"type": "timestamp"
|
||||||
|
},
|
||||||
|
"AcceptRanges": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "accept-ranges"
|
||||||
|
},
|
||||||
|
"Expiration": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-expiration"
|
||||||
|
},
|
||||||
|
"LastModified": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Last-Modified",
|
||||||
|
"type": "timestamp"
|
||||||
|
},
|
||||||
|
"ContentLength": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Length",
|
||||||
|
"type": "long"
|
||||||
|
},
|
||||||
|
"ContentHash": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-hash"
|
||||||
|
},
|
||||||
|
"ETag": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "ETag"
|
||||||
|
},
|
||||||
|
"VersionId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-generation"
|
||||||
|
},
|
||||||
|
"MetaVersionId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-metageneration"
|
||||||
|
},
|
||||||
|
"CacheControl": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Cache-Control"
|
||||||
|
},
|
||||||
|
"ContentDisposition": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Disposition"
|
||||||
|
},
|
||||||
|
"ContentEncoding": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Encoding"
|
||||||
|
},
|
||||||
|
"ContentLanguage": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Language"
|
||||||
|
},
|
||||||
|
"ContentType": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Type"
|
||||||
|
},
|
||||||
|
"Expires": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Expires",
|
||||||
|
"type": "timestamp"
|
||||||
|
},
|
||||||
|
"Metadata": {
|
||||||
|
"shape": "MetadataShape",
|
||||||
|
"location": "headers",
|
||||||
|
"locationName": "x-goog-meta-"
|
||||||
|
},
|
||||||
|
"StorageClass": {
|
||||||
|
"location": "headers",
|
||||||
|
"locationName": "x-goog-storage-class"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"PutObjectReq": {
|
||||||
|
"http": {
|
||||||
|
"method": "PUT",
|
||||||
|
"requestUri": "/{Bucket}/{Key+}"
|
||||||
|
},
|
||||||
|
"input": {
|
||||||
|
"type": "structure",
|
||||||
|
"required": [
|
||||||
|
"Bucket",
|
||||||
|
"Key"
|
||||||
|
],
|
||||||
|
"members": {
|
||||||
|
"Date": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Date",
|
||||||
|
"type": "timestamp"
|
||||||
|
},
|
||||||
|
"ACL": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-acl"
|
||||||
|
},
|
||||||
|
"Body": {
|
||||||
|
"streaming": true,
|
||||||
|
"type": "blob"
|
||||||
|
},
|
||||||
|
"Bucket": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Bucket"
|
||||||
|
},
|
||||||
|
"CacheControl": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Cache-Control"
|
||||||
|
},
|
||||||
|
"ContentDisposition": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Disposition"
|
||||||
|
},
|
||||||
|
"ContentEncoding": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Encoding"
|
||||||
|
},
|
||||||
|
"ContentLanguage": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Language"
|
||||||
|
},
|
||||||
|
"ContentLength": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Length",
|
||||||
|
"type": "long"
|
||||||
|
},
|
||||||
|
"ContentMD5": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-MD5"
|
||||||
|
},
|
||||||
|
"ContentType": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Type"
|
||||||
|
},
|
||||||
|
"Expires": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Expires",
|
||||||
|
"type": "timestamp"
|
||||||
|
},
|
||||||
|
"Key": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Key"
|
||||||
|
},
|
||||||
|
"Metadata": {
|
||||||
|
"shape": "MetadataShape",
|
||||||
|
"location": "headers",
|
||||||
|
"locationName": "x-goog-meta-"
|
||||||
|
},
|
||||||
|
"ProjectId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-project-id"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"payload": "Body"
|
||||||
|
},
|
||||||
|
"output": {
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"Expiration": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-expiration"
|
||||||
|
},
|
||||||
|
"ETag": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "ETag"
|
||||||
|
},
|
||||||
|
"ContentHash": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-hash"
|
||||||
|
},
|
||||||
|
"VersionId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-generation"
|
||||||
|
},
|
||||||
|
"MetaVersionId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-metageneration"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"GetObject": {
|
||||||
|
"http": {
|
||||||
|
"method": "GET",
|
||||||
|
"requestUri": "/{Bucket}/{Key+}"
|
||||||
|
},
|
||||||
|
"input": {
|
||||||
|
"type": "structure",
|
||||||
|
"required": [
|
||||||
|
"Bucket",
|
||||||
|
"Key"
|
||||||
|
],
|
||||||
|
"members": {
|
||||||
|
"Bucket": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Bucket"
|
||||||
|
},
|
||||||
|
"IfMatch": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "If-Match"
|
||||||
|
},
|
||||||
|
"IfModifiedSince": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "If-Modified-Since",
|
||||||
|
"type": "timestamp"
|
||||||
|
},
|
||||||
|
"IfNoneMatch": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "If-None-Match"
|
||||||
|
},
|
||||||
|
"IfUnmodifiedSince": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "If-Unmodified-Since",
|
||||||
|
"type": "timestamp"
|
||||||
|
},
|
||||||
|
"Key": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Key"
|
||||||
|
},
|
||||||
|
"Range": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Range"
|
||||||
|
},
|
||||||
|
"ResponseCacheControl": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "response-cache-control"
|
||||||
|
},
|
||||||
|
"ResponseContentDisposition": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "response-content-disposition"
|
||||||
|
},
|
||||||
|
"ResponseContentEncoding": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "response-content-encoding"
|
||||||
|
},
|
||||||
|
"ResponseContentLanguage": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "response-content-language"
|
||||||
|
},
|
||||||
|
"ResponseContentType": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "response-content-type"
|
||||||
|
},
|
||||||
|
"ResponseExpires": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "response-expires",
|
||||||
|
"type": "timestamp"
|
||||||
|
},
|
||||||
|
"VersionId": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "generation"
|
||||||
|
},
|
||||||
|
"ProjectId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-project-id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"output": {
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"Body": {
|
||||||
|
"streaming": true,
|
||||||
|
"type": "blob"
|
||||||
|
},
|
||||||
|
"AcceptRanges": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "accept-ranges"
|
||||||
|
},
|
||||||
|
"Expiration": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-expiration"
|
||||||
|
},
|
||||||
|
"LastModified": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Last-Modified",
|
||||||
|
"type": "timestamp"
|
||||||
|
},
|
||||||
|
"ContentLength": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Length",
|
||||||
|
"type": "long"
|
||||||
|
},
|
||||||
|
"ETag": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "ETag"
|
||||||
|
},
|
||||||
|
"VersionId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-generation"
|
||||||
|
},
|
||||||
|
"MetaVersionId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-metageneration"
|
||||||
|
},
|
||||||
|
"CacheControl": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Cache-Control"
|
||||||
|
},
|
||||||
|
"ContentDisposition": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Disposition"
|
||||||
|
},
|
||||||
|
"ContentEncoding": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Encoding"
|
||||||
|
},
|
||||||
|
"ContentLanguage": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Language"
|
||||||
|
},
|
||||||
|
"ContentRange": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Range"
|
||||||
|
},
|
||||||
|
"ContentType": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Type"
|
||||||
|
},
|
||||||
|
"ContentHash": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-hash"
|
||||||
|
},
|
||||||
|
"Expires": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Expires",
|
||||||
|
"type": "timestamp"
|
||||||
|
},
|
||||||
|
"WebsiteRedirectLocation": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-website-redirect-location"
|
||||||
|
},
|
||||||
|
"ServerSideEncryption": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-server-side-encryption"
|
||||||
|
},
|
||||||
|
"Metadata": {
|
||||||
|
"shape": "MetadataShape",
|
||||||
|
"location": "headers",
|
||||||
|
"locationName": "x-goog-meta-"
|
||||||
|
},
|
||||||
|
"StorageClass": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-storage-class"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"payload": "Body"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"DeleteObject": {
|
||||||
|
"http": {
|
||||||
|
"method": "DELETE",
|
||||||
|
"requestUri": "/{Bucket}/{Key+}"
|
||||||
|
},
|
||||||
|
"input": {
|
||||||
|
"type": "structure",
|
||||||
|
"required": [
|
||||||
|
"Bucket",
|
||||||
|
"Key"
|
||||||
|
],
|
||||||
|
"members": {
|
||||||
|
"Bucket": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Bucket"
|
||||||
|
},
|
||||||
|
"Key": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Key"
|
||||||
|
},
|
||||||
|
"VersionId": {
|
||||||
|
"location": "querystring",
|
||||||
|
"locationName": "generation"
|
||||||
|
},
|
||||||
|
"ProjectId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-project-id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"output": {
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"VersionId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-generation"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ComposeObject": {
|
||||||
|
"http": {
|
||||||
|
"method": "PUT",
|
||||||
|
"requestUri": "/{Bucket}/{Key+}?compose"
|
||||||
|
},
|
||||||
|
"input": {
|
||||||
|
"type": "structure",
|
||||||
|
"required": [
|
||||||
|
"Bucket",
|
||||||
|
"Key"
|
||||||
|
],
|
||||||
|
"members": {
|
||||||
|
"Bucket": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Bucket"
|
||||||
|
},
|
||||||
|
"Source": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-copy-source"
|
||||||
|
},
|
||||||
|
"Key": {
|
||||||
|
"location": "uri",
|
||||||
|
"locationName": "Key"
|
||||||
|
},
|
||||||
|
"MetadataDirective": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-metadata-directive"
|
||||||
|
},
|
||||||
|
"ContentDisposition": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Disposition"
|
||||||
|
},
|
||||||
|
"Content-Encoding": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "Content-Encoding"
|
||||||
|
},
|
||||||
|
"MultipartUpload": {
|
||||||
|
"locationName": "ComposeRequest",
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"Parts": {
|
||||||
|
"locationName": "Component",
|
||||||
|
"type": "list",
|
||||||
|
"member": {
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"PartName": {
|
||||||
|
"locationName": "Name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"flattened": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"Metadata": {
|
||||||
|
"shape": "MetadataShape",
|
||||||
|
"location": "headers",
|
||||||
|
"locationName": "x-goog-meta-"
|
||||||
|
},
|
||||||
|
"ProjectId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-project-id"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"payload": "MultipartUpload"
|
||||||
|
},
|
||||||
|
"output": {
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"Expiration": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-expiration"
|
||||||
|
},
|
||||||
|
"ETag": {},
|
||||||
|
"VersioId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-generation"
|
||||||
|
},
|
||||||
|
"MetaVersionId": {
|
||||||
|
"location": "header",
|
||||||
|
"locationName": "x-goog-metageneration"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"shapes": {
|
||||||
|
"MetadataShape": {
|
||||||
|
"type": "map",
|
||||||
|
"key": {},
|
||||||
|
"value": {}
|
||||||
|
},
|
||||||
|
"OwnerShape": {
|
||||||
|
"locationName": "Owner",
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"ID": {},
|
||||||
|
"Name": {}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ContentsShape": {
|
||||||
|
"type": "list",
|
||||||
|
"member": {
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"Key": {},
|
||||||
|
"LastModified": {
|
||||||
|
"type": "timestamp"
|
||||||
|
},
|
||||||
|
"ETag": {},
|
||||||
|
"Size": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"StorageClass": {},
|
||||||
|
"Owner": {
|
||||||
|
"shape": "OwnerShape"
|
||||||
|
},
|
||||||
|
"VersionId": {
|
||||||
|
"locationName": "Generation"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"flattened": true
|
||||||
|
},
|
||||||
|
"CommonPrefixShape": {
|
||||||
|
"type": "list",
|
||||||
|
"member": {
|
||||||
|
"type": "structure",
|
||||||
|
"members": {
|
||||||
|
"Prefix": {}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"flattened": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,5 +1,17 @@
|
||||||
const { GCP } = require('./GCP');
|
const async = require('async');
|
||||||
|
const { errors, s3middleware } = require('arsenal');
|
||||||
|
const MD5Sum = s3middleware.MD5Sum;
|
||||||
|
|
||||||
|
const { GCP, GcpUtils } = require('./GCP');
|
||||||
|
const { createMpuKey } = GcpUtils;
|
||||||
const AwsClient = require('./AwsClient');
|
const AwsClient = require('./AwsClient');
|
||||||
|
const { prepareStream } = require('../../api/apiUtils/object/prepareStream');
|
||||||
|
const { logHelper, removeQuotes } = require('./utils');
|
||||||
|
const { config } = require('../../Config');
|
||||||
|
|
||||||
|
const missingVerIdInternalError = errors.InternalError.customizeDescription(
|
||||||
|
'Invalid state. Please ensure versioning is enabled ' +
|
||||||
|
'in GCP for the location constraint and try again.');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class representing a Google Cloud Storage backend object
|
* Class representing a Google Cloud Storage backend object
|
||||||
|
@ -23,16 +35,267 @@ class GcpClient extends AwsClient {
|
||||||
constructor(config) {
|
constructor(config) {
|
||||||
super(config);
|
super(config);
|
||||||
this.clientType = 'gcp';
|
this.clientType = 'gcp';
|
||||||
|
this.type = 'GCP';
|
||||||
this._gcpBucketName = config.bucketName;
|
this._gcpBucketName = config.bucketName;
|
||||||
this._mpuBucketName = config.mpuBucket;
|
this._mpuBucketName = config.mpuBucket;
|
||||||
this._overflowBucketname = config.overflowBucket;
|
this._overflowBucketName = config.overflowBucket;
|
||||||
|
this._createGcpKey = this._createAwsKey.bind(this);
|
||||||
this._gcpParams = Object.assign(this._s3Params, {
|
this._gcpParams = Object.assign(this._s3Params, {
|
||||||
mainBucket: this._gcpBucketName,
|
mainBucket: this._gcpBucketName,
|
||||||
mpuBucket: this._mpuBucketName,
|
mpuBucket: this._mpuBucketName,
|
||||||
overflowBucket: this._overflowBucketname,
|
overflowBucket: this._overflowBucketName,
|
||||||
|
jsonEndpoint: config.jsonEndpoint,
|
||||||
|
proxy: config.proxy,
|
||||||
authParams: config.authParams,
|
authParams: config.authParams,
|
||||||
});
|
});
|
||||||
this._client = new GCP(this._gcpParams);
|
this._client = new GCP(this._gcpParams);
|
||||||
|
this.listParts = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* healthcheck - the gcp health requires checking multiple buckets:
|
||||||
|
* main, mpu, and overflow buckets
|
||||||
|
* @param {string} location - location name
|
||||||
|
* @param {function} callback - callback function to call with the bucket
|
||||||
|
* statuses
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
healthcheck(location, callback) {
|
||||||
|
const checkBucketHealth = (bucket, cb) => {
|
||||||
|
let bucketResp;
|
||||||
|
this._client.headBucket({ Bucket: bucket }, err => {
|
||||||
|
if (err) {
|
||||||
|
bucketResp = {
|
||||||
|
gcpBucket: bucket,
|
||||||
|
error: err,
|
||||||
|
external: true };
|
||||||
|
return cb(null, bucketResp);
|
||||||
|
}
|
||||||
|
bucketResp = {
|
||||||
|
gcpBucket: bucket,
|
||||||
|
message: 'Congrats! You own the bucket',
|
||||||
|
};
|
||||||
|
return cb(null, bucketResp);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
const bucketList = [
|
||||||
|
this._gcpBucketName,
|
||||||
|
this._mpuBucketName,
|
||||||
|
this._overflowBucketName,
|
||||||
|
];
|
||||||
|
async.map(bucketList, checkBucketHealth, (err, results) => {
|
||||||
|
const gcpResp = {};
|
||||||
|
gcpResp[location] = {
|
||||||
|
buckets: [],
|
||||||
|
};
|
||||||
|
if (err) {
|
||||||
|
// err should always be undefined
|
||||||
|
return callback(errors.InternalFailure
|
||||||
|
.customizeDescription('Unable to perform health check'));
|
||||||
|
}
|
||||||
|
results.forEach(bucketResp => {
|
||||||
|
if (bucketResp.error) {
|
||||||
|
gcpResp[location].error = true;
|
||||||
|
}
|
||||||
|
gcpResp[location].buckets.push(bucketResp);
|
||||||
|
});
|
||||||
|
return callback(null, gcpResp);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
createMPU(key, metaHeaders, bucketName, websiteRedirectHeader, contentType,
|
||||||
|
cacheControl, contentDisposition, contentEncoding, log, callback) {
|
||||||
|
const metaHeadersTrimmed = {};
|
||||||
|
Object.keys(metaHeaders).forEach(header => {
|
||||||
|
if (header.startsWith('x-amz-meta-')) {
|
||||||
|
const headerKey = header.substring(11);
|
||||||
|
metaHeadersTrimmed[headerKey] = metaHeaders[header];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
Object.assign(metaHeaders, metaHeadersTrimmed);
|
||||||
|
const gcpKey = this._createGcpKey(bucketName, key, this._bucketMatch);
|
||||||
|
const params = {
|
||||||
|
Bucket: this._mpuBucketName,
|
||||||
|
Key: gcpKey,
|
||||||
|
Metadata: metaHeaders,
|
||||||
|
ContentType: contentType,
|
||||||
|
CacheControl: cacheControl,
|
||||||
|
ContentDisposition: contentDisposition,
|
||||||
|
ContentEncoding: contentEncoding,
|
||||||
|
};
|
||||||
|
return this._client.createMultipartUpload(params, (err, mpuResObj) => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(log, 'error', 'err from data backend',
|
||||||
|
err, this._dataStoreName, this.clientType);
|
||||||
|
return callback(errors.ServiceUnavailable
|
||||||
|
.customizeDescription('Error returned from ' +
|
||||||
|
`GCP: ${err.message}`)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return callback(null, mpuResObj);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
completeMPU(jsonList, mdInfo, key, uploadId, bucketName, log, callback) {
|
||||||
|
const gcpKey = this._createGcpKey(bucketName, key, this._bucketMatch);
|
||||||
|
const partArray = [];
|
||||||
|
const partList = jsonList.Part;
|
||||||
|
for (let i = 0; i < partList.length; ++i) {
|
||||||
|
const partObj = partList[i];
|
||||||
|
if (!partObj.PartNumber || !partObj.ETag) {
|
||||||
|
return callback(errors.MalformedXML);
|
||||||
|
}
|
||||||
|
const number = partObj.PartNumber[0];
|
||||||
|
const partNumber = typeof number === 'string' ?
|
||||||
|
parseInt(number, 10) : number;
|
||||||
|
const partParams = {
|
||||||
|
PartName: createMpuKey(gcpKey, uploadId, partNumber),
|
||||||
|
PartNumber: partNumber,
|
||||||
|
ETag: partObj.ETag[0],
|
||||||
|
};
|
||||||
|
partArray.push(partParams);
|
||||||
|
}
|
||||||
|
const mpuParams = {
|
||||||
|
Bucket: this._gcpBucketName,
|
||||||
|
MPU: this._mpuBucketName,
|
||||||
|
Overflow: this._overflowBucketName,
|
||||||
|
Key: gcpKey,
|
||||||
|
UploadId: uploadId,
|
||||||
|
MultipartUpload: { Parts: partArray },
|
||||||
|
};
|
||||||
|
const completeObjData = { key: gcpKey };
|
||||||
|
return this._client.completeMultipartUpload(mpuParams,
|
||||||
|
(err, completeMpuRes) => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(log, 'error', 'err from data backend on ' +
|
||||||
|
'completeMPU', err, this._dataStoreName, this.clientType);
|
||||||
|
return callback(errors.ServiceUnavailable
|
||||||
|
.customizeDescription('Error returned from ' +
|
||||||
|
`GCP: ${err.message}`)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (!completeMpuRes.VersionId) {
|
||||||
|
logHelper(log, 'error', 'missing version id for data ' +
|
||||||
|
'backend object', missingVerIdInternalError,
|
||||||
|
this._dataStoreName, this.clientType);
|
||||||
|
return callback(missingVerIdInternalError);
|
||||||
|
}
|
||||||
|
completeObjData.eTag = removeQuotes(completeMpuRes.ETag);
|
||||||
|
completeObjData.dataStoreVersionId = completeMpuRes.VersionId;
|
||||||
|
completeObjData.contentLength = completeMpuRes.ContentLength;
|
||||||
|
return callback(null, completeObjData);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
uploadPart(request, streamingV4Params, stream, size, key, uploadId,
|
||||||
|
partNumber, bucketName, log, callback) {
|
||||||
|
let hashedStream = stream;
|
||||||
|
if (request) {
|
||||||
|
const partStream = prepareStream(request, streamingV4Params,
|
||||||
|
log, callback);
|
||||||
|
hashedStream = new MD5Sum();
|
||||||
|
partStream.pipe(hashedStream);
|
||||||
|
}
|
||||||
|
|
||||||
|
const gcpKey = this._createGcpKey(bucketName, key, this._bucketMatch);
|
||||||
|
const params = {
|
||||||
|
Bucket: this._mpuBucketName,
|
||||||
|
Key: gcpKey,
|
||||||
|
UploadId: uploadId,
|
||||||
|
Body: hashedStream,
|
||||||
|
ContentLength: size,
|
||||||
|
PartNumber: partNumber };
|
||||||
|
return this._client.uploadPart(params, (err, partResObj) => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(log, 'error', 'err from data backend ' +
|
||||||
|
'on uploadPart', err, this._dataStoreName, this.clientType);
|
||||||
|
return callback(errors.ServiceUnavailable
|
||||||
|
.customizeDescription('Error returned from ' +
|
||||||
|
`GCP: ${err.message}`)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const noQuotesETag = removeQuotes(partResObj.ETag);
|
||||||
|
const dataRetrievalInfo = {
|
||||||
|
key: gcpKey,
|
||||||
|
dataStoreType: 'gcp',
|
||||||
|
dataStoreName: this._dataStoreName,
|
||||||
|
dataStoreETag: noQuotesETag,
|
||||||
|
};
|
||||||
|
return callback(null, dataRetrievalInfo);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
uploadPartCopy(request, gcpSourceKey, sourceLocationConstraintName, log,
|
||||||
|
callback) {
|
||||||
|
const destBucketName = request.bucketName;
|
||||||
|
const destObjectKey = request.objectKey;
|
||||||
|
const destGcpKey = this._createGcpKey(destBucketName, destObjectKey,
|
||||||
|
this._bucketMatch);
|
||||||
|
|
||||||
|
const sourceGcpBucketName =
|
||||||
|
config.getGcpBucketNames(sourceLocationConstraintName).bucketName;
|
||||||
|
|
||||||
|
const uploadId = request.query.uploadId;
|
||||||
|
const partNumber = request.query.partNumber;
|
||||||
|
const copySourceRange = request.headers['x-amz-copy-source-range'];
|
||||||
|
|
||||||
|
if (copySourceRange) {
|
||||||
|
return callback(errors.NotImplemented
|
||||||
|
.customizeDescription('Error returned from ' +
|
||||||
|
`${this.clientType}: copySourceRange not implemented`)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
Bucket: this._mpuBucketName,
|
||||||
|
CopySource: `${sourceGcpBucketName}/${gcpSourceKey}`,
|
||||||
|
Key: destGcpKey,
|
||||||
|
UploadId: uploadId,
|
||||||
|
PartNumber: partNumber,
|
||||||
|
};
|
||||||
|
return this._client.uploadPartCopy(params, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
if (err.code === 'AccesssDenied') {
|
||||||
|
logHelper(log, 'error', 'Unable to access ' +
|
||||||
|
`${sourceGcpBucketName} GCP bucket`, err,
|
||||||
|
this._dataStoreName, this.clientType);
|
||||||
|
return callback(errors.AccessDenied
|
||||||
|
.customizeDescription('Error: Unable to access ' +
|
||||||
|
`${sourceGcpBucketName} GCP bucket`)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
logHelper(log, 'error', 'error from data backend on ' +
|
||||||
|
'uploadPartCopy', err, this._dataStoreName);
|
||||||
|
return callback(errors.ServiceUnavailable
|
||||||
|
.customizeDescription('Error returned from ' +
|
||||||
|
`GCP: ${err.message}`)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const eTag = removeQuotes(res.CopyObjectResult.ETag);
|
||||||
|
return callback(null, eTag);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
abortMPU(key, uploadId, bucketName, log, callback) {
|
||||||
|
const gcpKey = this._createGcpKey(bucketName, key, this._bucketMatch);
|
||||||
|
const getParams = {
|
||||||
|
Bucket: this._gcpBucketName,
|
||||||
|
MPU: this._mpuBucketName,
|
||||||
|
Overflow: this._overflowBucketName,
|
||||||
|
Key: gcpKey,
|
||||||
|
UploadId: uploadId,
|
||||||
|
};
|
||||||
|
return this._client.abortMultipartUpload(getParams, err => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(log, 'error', 'err from data backend ' +
|
||||||
|
'on abortMPU', err, this._dataStoreName, this.clientType);
|
||||||
|
return callback(errors.ServiceUnavailable
|
||||||
|
.customizeDescription('Error returned from ' +
|
||||||
|
`GCP: ${err.message}`)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return callback();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,26 +2,35 @@ const async = require('async');
|
||||||
const constants = require('../../../constants');
|
const constants = require('../../../constants');
|
||||||
const { config } = require('../../../lib/Config');
|
const { config } = require('../../../lib/Config');
|
||||||
|
|
||||||
const awsHealth = {
|
/* eslint-disable camelcase */
|
||||||
response: undefined,
|
const backendHealth = {
|
||||||
time: 0,
|
aws_s3: {
|
||||||
};
|
response: undefined,
|
||||||
const azureHealth = {
|
time: 0,
|
||||||
response: undefined,
|
},
|
||||||
time: 0,
|
azure: {
|
||||||
|
response: undefined,
|
||||||
|
time: 0,
|
||||||
|
},
|
||||||
|
gcp: {
|
||||||
|
reponse: undefined,
|
||||||
|
time: 0,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
/* eslint-enable camelcase */
|
||||||
|
|
||||||
const utils = {
|
const utils = {
|
||||||
logHelper(log, level, description, error, dataStoreName) {
|
logHelper(log, level, description, error, dataStoreName, backendType) {
|
||||||
log[level](description, { error: error.message,
|
log[level](description, { error: error.message,
|
||||||
errorName: error.name, dataStoreName });
|
errorName: error.name, dataStoreName, backendType });
|
||||||
},
|
},
|
||||||
// take off the 'x-amz-meta-'
|
// take off the 'x-amz-meta-'
|
||||||
trimXMetaPrefix(obj) {
|
trimXMetaPrefix(obj) {
|
||||||
const newObj = {};
|
const newObj = {};
|
||||||
Object.keys(obj).forEach(key => {
|
const metaObj = obj || {};
|
||||||
|
Object.keys(metaObj).forEach(key => {
|
||||||
const newKey = key.substring(11);
|
const newKey = key.substring(11);
|
||||||
newObj[newKey] = obj[key];
|
newObj[newKey] = metaObj[key];
|
||||||
});
|
});
|
||||||
return newObj;
|
return newObj;
|
||||||
},
|
},
|
||||||
|
@ -53,8 +62,8 @@ const utils = {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* externalBackendCopy - Server side copy should only be allowed:
|
* externalBackendCopy - Server side copy should only be allowed:
|
||||||
* 1) if source object and destination object are both on aws or both
|
* 1) if source object and destination object are both on aws, both
|
||||||
* on azure.
|
* on azure, or both on gcp
|
||||||
* 2) if azure to azure, must be the same storage account since Azure
|
* 2) if azure to azure, must be the same storage account since Azure
|
||||||
* copy outside of an account is async
|
* copy outside of an account is async
|
||||||
* 3) if the source bucket is not an encrypted bucket and the
|
* 3) if the source bucket is not an encrypted bucket and the
|
||||||
|
@ -84,6 +93,7 @@ const utils = {
|
||||||
config.getLocationConstraintType(locationConstraintDest);
|
config.getLocationConstraintType(locationConstraintDest);
|
||||||
return locationTypeMatch && (isSameBucket || bucketsNotEncrypted) &&
|
return locationTypeMatch && (isSameBucket || bucketsNotEncrypted) &&
|
||||||
(sourceLocationConstraintType === 'aws_s3' ||
|
(sourceLocationConstraintType === 'aws_s3' ||
|
||||||
|
sourceLocationConstraintType === 'gcp' ||
|
||||||
(sourceLocationConstraintType === 'azure' &&
|
(sourceLocationConstraintType === 'azure' &&
|
||||||
config.isSameAzureAccount(locationConstraintSrc,
|
config.isSameAzureAccount(locationConstraintSrc,
|
||||||
locationConstraintDest)));
|
locationConstraintDest)));
|
||||||
|
@ -91,7 +101,7 @@ const utils = {
|
||||||
|
|
||||||
checkExternalBackend(clients, locations, type, flightCheckOnStartUp,
|
checkExternalBackend(clients, locations, type, flightCheckOnStartUp,
|
||||||
externalBackendHealthCheckInterval, cb) {
|
externalBackendHealthCheckInterval, cb) {
|
||||||
const checkStatus = type === 'aws_s3' ? awsHealth : azureHealth;
|
const checkStatus = backendHealth[type] || {};
|
||||||
if (locations.length === 0) {
|
if (locations.length === 0) {
|
||||||
return process.nextTick(cb, null, []);
|
return process.nextTick(cb, null, []);
|
||||||
}
|
}
|
||||||
|
|
|
@ -92,6 +92,10 @@ function parseLC() {
|
||||||
dataStoreName: location,
|
dataStoreName: location,
|
||||||
};
|
};
|
||||||
if (locationObj.type === 'gcp') {
|
if (locationObj.type === 'gcp') {
|
||||||
|
clientConfig.jsonEndpoint = locationObj.details.proxy ?
|
||||||
|
`${protocol}://${locationObj.details.jsonEndpoint}` :
|
||||||
|
`https://${locationObj.details.jsonEndpoint}`;
|
||||||
|
clientConfig.proxy = locationObj.details.proxy;
|
||||||
clientConfig.overflowBucket =
|
clientConfig.overflowBucket =
|
||||||
locationObj.details.overflowBucketName;
|
locationObj.details.overflowBucketName;
|
||||||
clientConfig.mpuBucket = locationObj.details.mpuBucketName;
|
clientConfig.mpuBucket = locationObj.details.mpuBucketName;
|
||||||
|
|
|
@ -121,6 +121,7 @@ const multipleBackendGateway = {
|
||||||
const multBackendResp = {};
|
const multBackendResp = {};
|
||||||
const awsArray = [];
|
const awsArray = [];
|
||||||
const azureArray = [];
|
const azureArray = [];
|
||||||
|
const gcpArray = [];
|
||||||
async.each(Object.keys(clients), (location, cb) => {
|
async.each(Object.keys(clients), (location, cb) => {
|
||||||
const client = clients[location];
|
const client = clients[location];
|
||||||
if (client.clientType === 'scality') {
|
if (client.clientType === 'scality') {
|
||||||
|
@ -139,6 +140,9 @@ const multipleBackendGateway = {
|
||||||
} else if (client.clientType === 'azure') {
|
} else if (client.clientType === 'azure') {
|
||||||
azureArray.push(location);
|
azureArray.push(location);
|
||||||
return cb();
|
return cb();
|
||||||
|
} else if (client.clientType === 'gcp') {
|
||||||
|
gcpArray.push(location);
|
||||||
|
return cb();
|
||||||
}
|
}
|
||||||
// if backend type isn't 'scality' or 'aws_s3', it will be
|
// if backend type isn't 'scality' or 'aws_s3', it will be
|
||||||
// 'mem' or 'file', for which the default response is 200 OK
|
// 'mem' or 'file', for which the default response is 200 OK
|
||||||
|
@ -152,6 +156,9 @@ const multipleBackendGateway = {
|
||||||
next => checkExternalBackend(
|
next => checkExternalBackend(
|
||||||
clients, azureArray, 'azure', flightCheckOnStartUp,
|
clients, azureArray, 'azure', flightCheckOnStartUp,
|
||||||
externalBackendHealthCheckInterval, next),
|
externalBackendHealthCheckInterval, next),
|
||||||
|
next => checkExternalBackend(
|
||||||
|
clients, gcpArray, 'gcp', flightCheckOnStartUp,
|
||||||
|
externalBackendHealthCheckInterval, next),
|
||||||
], (errNull, externalResp) => {
|
], (errNull, externalResp) => {
|
||||||
const externalLocResults = [];
|
const externalLocResults = [];
|
||||||
externalResp.forEach(resp => externalLocResults.push(...resp));
|
externalResp.forEach(resp => externalLocResults.push(...resp));
|
||||||
|
@ -166,7 +173,7 @@ const multipleBackendGateway = {
|
||||||
location, contentType, cacheControl, contentDisposition,
|
location, contentType, cacheControl, contentDisposition,
|
||||||
contentEncoding, log, cb) => {
|
contentEncoding, log, cb) => {
|
||||||
const client = clients[location];
|
const client = clients[location];
|
||||||
if (client.clientType === 'aws_s3') {
|
if (client.clientType === 'aws_s3' || client.clientType === 'gcp') {
|
||||||
return client.createMPU(key, metaHeaders, bucketName,
|
return client.createMPU(key, metaHeaders, bucketName,
|
||||||
websiteRedirectHeader, contentType, cacheControl,
|
websiteRedirectHeader, contentType, cacheControl,
|
||||||
contentDisposition, contentEncoding, log, cb);
|
contentDisposition, contentEncoding, log, cb);
|
||||||
|
@ -218,10 +225,18 @@ const multipleBackendGateway = {
|
||||||
|
|
||||||
abortMPU: (key, uploadId, location, bucketName, log, cb) => {
|
abortMPU: (key, uploadId, location, bucketName, log, cb) => {
|
||||||
const client = clients[location];
|
const client = clients[location];
|
||||||
|
const skipDataDelete = true;
|
||||||
if (client.clientType === 'azure') {
|
if (client.clientType === 'azure') {
|
||||||
const skipDataDelete = true;
|
|
||||||
return cb(null, skipDataDelete);
|
return cb(null, skipDataDelete);
|
||||||
}
|
}
|
||||||
|
if (client.clientType === 'gcp') {
|
||||||
|
return client.abortMPU(key, uploadId, bucketName, log, err => {
|
||||||
|
if (err) {
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
return cb(null, skipDataDelete);
|
||||||
|
});
|
||||||
|
}
|
||||||
if (client.abortMPU) {
|
if (client.abortMPU) {
|
||||||
return client.abortMPU(key, uploadId, bucketName, log, err => {
|
return client.abortMPU(key, uploadId, bucketName, log, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
|
|
@ -569,6 +569,18 @@ const data = {
|
||||||
};
|
};
|
||||||
locations.push(partResult);
|
locations.push(partResult);
|
||||||
return cb();
|
return cb();
|
||||||
|
} else if (
|
||||||
|
partInfo &&
|
||||||
|
partInfo.dataStoreType === 'gcp') {
|
||||||
|
const partResult = {
|
||||||
|
key: partInfo.key,
|
||||||
|
dataStoreName: partInfo.dataStoreName,
|
||||||
|
dataStoreETag: partInfo.dataStoreETag,
|
||||||
|
size: numberPartSize,
|
||||||
|
partNumber: partInfo.partNumber,
|
||||||
|
};
|
||||||
|
locations.push(partResult);
|
||||||
|
return cb();
|
||||||
}
|
}
|
||||||
return cb(skipError);
|
return cb(skipError);
|
||||||
});
|
});
|
||||||
|
@ -680,8 +692,15 @@ const data = {
|
||||||
config.getLocationConstraintType(sourceLocationConstraintName) ===
|
config.getLocationConstraintType(sourceLocationConstraintName) ===
|
||||||
'aws_s3';
|
'aws_s3';
|
||||||
|
|
||||||
|
const locationTypeMatchGCP =
|
||||||
|
config.backends.data === 'multiple' &&
|
||||||
|
config.getLocationConstraintType(sourceLocationConstraintName) ===
|
||||||
|
config.getLocationConstraintType(destLocationConstraintName) &&
|
||||||
|
config.getLocationConstraintType(sourceLocationConstraintName) ===
|
||||||
|
'gcp';
|
||||||
|
|
||||||
// NOTE: using multipleBackendGateway.uploadPartCopy only if copying
|
// NOTE: using multipleBackendGateway.uploadPartCopy only if copying
|
||||||
// from AWS to AWS
|
// from AWS to AWS or from GCP to GCP
|
||||||
|
|
||||||
if (locationTypeMatchAWS && dataLocator.length === 1) {
|
if (locationTypeMatchAWS && dataLocator.length === 1) {
|
||||||
const awsSourceKey = dataLocator[0].key;
|
const awsSourceKey = dataLocator[0].key;
|
||||||
|
@ -696,6 +715,19 @@ const data = {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (locationTypeMatchGCP && dataLocator.lenegth === 1) {
|
||||||
|
const gcpSourceKey = dataLocator[0].key;
|
||||||
|
return multipleBackendGateway.uploadPartCopy(request,
|
||||||
|
destLocationConstraintName, gcpSourceKey,
|
||||||
|
sourceLocationConstraintName, log, (error, eTag) => {
|
||||||
|
if (error) {
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
return callback(null, eTag,
|
||||||
|
lastModified, serverSideEncryption);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
const backendInfo = new BackendInfo(destLocationConstraintName);
|
const backendInfo = new BackendInfo(destLocationConstraintName);
|
||||||
|
|
||||||
// totalHash will be sent through the RelayMD5Sum transform streams
|
// totalHash will be sent through the RelayMD5Sum transform streams
|
||||||
|
|
|
@ -22,10 +22,11 @@
|
||||||
"aws-sdk": "2.28.0",
|
"aws-sdk": "2.28.0",
|
||||||
"arsenal": "scality/Arsenal#rel/7.4",
|
"arsenal": "scality/Arsenal#rel/7.4",
|
||||||
"async": "~2.5.0",
|
"async": "~2.5.0",
|
||||||
"google-auto-auth": "^0.9.1",
|
|
||||||
"azure-storage": "^2.1.0",
|
"azure-storage": "^2.1.0",
|
||||||
|
"backo": "^1.1.0",
|
||||||
"bucketclient": "scality/bucketclient#rel/7.4",
|
"bucketclient": "scality/bucketclient#rel/7.4",
|
||||||
"commander": "^2.9.0",
|
"commander": "^2.9.0",
|
||||||
|
"google-auto-auth": "^0.9.1",
|
||||||
"mongodb": "^2.2.31",
|
"mongodb": "^2.2.31",
|
||||||
"node-uuid": "^1.4.3",
|
"node-uuid": "^1.4.3",
|
||||||
"npm-run-all": "~4.0.2",
|
"npm-run-all": "~4.0.2",
|
||||||
|
|
|
@ -17,15 +17,6 @@ aws_access_key_id = $AWS_ACCESS_KEY_ID_GOOGLE_2
|
||||||
aws_secret_access_key = $AWS_SECRET_ACCESS_KEY_GOOGLE_2
|
aws_secret_access_key = $AWS_SECRET_ACCESS_KEY_GOOGLE_2
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
mkdir ${HOME}/.gcp #create directory for GCP service credential
|
|
||||||
cat >>${HOME}/.gcp/servicekey <<EOF
|
|
||||||
{
|
|
||||||
"type": "service_account",
|
|
||||||
"private_key": "$GOOGLE_SERVICE_KEY",
|
|
||||||
"client_email": "$GOOGLE_SERVICE_EMAIL"
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
MYPWD=$(pwd)
|
MYPWD=$(pwd)
|
||||||
|
|
||||||
killandsleep () {
|
killandsleep () {
|
||||||
|
|
|
@ -0,0 +1,96 @@
|
||||||
|
const async = require('async');
|
||||||
|
const assert = require('assert');
|
||||||
|
|
||||||
|
const withV4 = require('../../support/withV4');
|
||||||
|
const BucketUtility = require('../../../lib/utility/bucket-util');
|
||||||
|
const { describeSkipIfNotMultiple, gcpClient, gcpBucketMPU, gcpLocation } =
|
||||||
|
require('../utils');
|
||||||
|
const { createMpuKey } =
|
||||||
|
require('../../../../../../lib/data/external/GCP').GcpUtils;
|
||||||
|
|
||||||
|
const bucket = 'buckettestmultiplebackendinitmpu-gcp';
|
||||||
|
const keyName = `somekey-${Date.now()}`;
|
||||||
|
|
||||||
|
const skipIfNotMultipleorIfProxy = process.env.CI_PROXY === 'true' ?
|
||||||
|
describe.skip : describeSkipIfNotMultiple;
|
||||||
|
|
||||||
|
let s3;
|
||||||
|
let bucketUtil;
|
||||||
|
|
||||||
|
skipIfNotMultipleorIfProxy('Initiate MPU to GCP', () => {
|
||||||
|
withV4(sigCfg => {
|
||||||
|
beforeEach(() => {
|
||||||
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
|
s3 = bucketUtil.s3;
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
process.stdout.write('Emptying bucket\n');
|
||||||
|
return bucketUtil.empty(bucket)
|
||||||
|
.then(() => {
|
||||||
|
process.stdout.write('Deleting bucket\n');
|
||||||
|
return bucketUtil.deleteOne(bucket);
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
process.stdout.write(`Error in afterEach: ${err}\n`);
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
describe('Basic test: ', () => {
|
||||||
|
beforeEach(done =>
|
||||||
|
s3.createBucket({ Bucket: bucket,
|
||||||
|
CreateBucketConfiguration: {
|
||||||
|
LocationConstraint: gcpLocation,
|
||||||
|
},
|
||||||
|
}, done));
|
||||||
|
afterEach(function afterEachF(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: keyName,
|
||||||
|
UploadId: this.currentTest.uploadId,
|
||||||
|
};
|
||||||
|
s3.abortMultipartUpload(params, done);
|
||||||
|
});
|
||||||
|
it('should create MPU and list in-progress multipart uploads',
|
||||||
|
function ifF(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: keyName,
|
||||||
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.createMultipartUpload(params, (err, res) => {
|
||||||
|
this.test.uploadId = res.UploadId;
|
||||||
|
assert(this.test.uploadId);
|
||||||
|
assert.strictEqual(res.Bucket, bucket);
|
||||||
|
assert.strictEqual(res.Key, keyName);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => s3.listMultipartUploads(
|
||||||
|
{ Bucket: bucket }, (err, res) => {
|
||||||
|
assert.strictEqual(res.NextKeyMarker, keyName);
|
||||||
|
assert.strictEqual(res.NextUploadIdMarker,
|
||||||
|
this.test.uploadId);
|
||||||
|
assert.strictEqual(res.Uploads[0].Key, keyName);
|
||||||
|
assert.strictEqual(res.Uploads[0].UploadId,
|
||||||
|
this.test.uploadId);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
const mpuKey =
|
||||||
|
createMpuKey(keyName, this.test.uploadId, 'init');
|
||||||
|
const params = {
|
||||||
|
Bucket: gcpBucketMPU,
|
||||||
|
Key: mpuKey,
|
||||||
|
};
|
||||||
|
gcpClient.getObject(params, err => {
|
||||||
|
assert.ifError(err,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,98 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
|
||||||
|
const withV4 = require('../../support/withV4');
|
||||||
|
const BucketUtility = require('../../../lib/utility/bucket-util');
|
||||||
|
const { describeSkipIfNotMultiple, gcpLocation }
|
||||||
|
= require('../utils');
|
||||||
|
|
||||||
|
const bucket = 'buckettestmultiplebackendlistparts-gcp';
|
||||||
|
const firstPartSize = 10;
|
||||||
|
const bodyFirstPart = Buffer.alloc(firstPartSize);
|
||||||
|
const secondPartSize = 15;
|
||||||
|
const bodySecondPart = Buffer.alloc(secondPartSize);
|
||||||
|
const skipIfNotMultipleorIfProxy = process.env.CI_PROXY === 'true' ?
|
||||||
|
describe.skip : describeSkipIfNotMultiple;
|
||||||
|
|
||||||
|
let bucketUtil;
|
||||||
|
let s3;
|
||||||
|
|
||||||
|
skipIfNotMultipleorIfProxy('List parts of MPU on GCP data backend', () => {
|
||||||
|
withV4(sigCfg => {
|
||||||
|
beforeEach(function beforeEachFn() {
|
||||||
|
this.currentTest.key = `somekey-${Date.now()}`;
|
||||||
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
|
s3 = bucketUtil.s3;
|
||||||
|
return s3.createBucketAsync({ Bucket: bucket })
|
||||||
|
.then(() => s3.createMultipartUploadAsync({
|
||||||
|
Bucket: bucket, Key: this.currentTest.key,
|
||||||
|
Metadata: { 'scal-location-constraint': gcpLocation } }))
|
||||||
|
.then(res => {
|
||||||
|
this.currentTest.uploadId = res.UploadId;
|
||||||
|
return s3.uploadPartAsync({ Bucket: bucket,
|
||||||
|
Key: this.currentTest.key, PartNumber: 1,
|
||||||
|
UploadId: this.currentTest.uploadId, Body: bodyFirstPart });
|
||||||
|
}).then(res => {
|
||||||
|
this.currentTest.firstEtag = res.ETag;
|
||||||
|
}).then(() => s3.uploadPartAsync({ Bucket: bucket,
|
||||||
|
Key: this.currentTest.key, PartNumber: 2,
|
||||||
|
UploadId: this.currentTest.uploadId, Body: bodySecondPart })
|
||||||
|
).then(res => {
|
||||||
|
this.currentTest.secondEtag = res.ETag;
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
process.stdout.write(`Error in beforeEach: ${err}\n`);
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterEachFn() {
|
||||||
|
process.stdout.write('Emptying bucket');
|
||||||
|
return s3.abortMultipartUploadAsync({
|
||||||
|
Bucket: bucket, Key: this.currentTest.key,
|
||||||
|
UploadId: this.currentTest.uploadId,
|
||||||
|
})
|
||||||
|
.then(() => bucketUtil.empty(bucket))
|
||||||
|
.then(() => {
|
||||||
|
process.stdout.write('Deleting bucket');
|
||||||
|
return bucketUtil.deleteOne(bucket);
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
process.stdout.write('Error in afterEach');
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should list both parts', function itFn(done) {
|
||||||
|
s3.listParts({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId },
|
||||||
|
(err, data) => {
|
||||||
|
assert.equal(err, null, `Err listing parts: ${err}`);
|
||||||
|
assert.strictEqual(data.Parts.length, 2);
|
||||||
|
assert.strictEqual(data.Parts[0].PartNumber, 1);
|
||||||
|
assert.strictEqual(data.Parts[0].Size, firstPartSize);
|
||||||
|
assert.strictEqual(data.Parts[0].ETag, this.test.firstEtag);
|
||||||
|
assert.strictEqual(data.Parts[1].PartNumber, 2);
|
||||||
|
assert.strictEqual(data.Parts[1].Size, secondPartSize);
|
||||||
|
assert.strictEqual(data.Parts[1].ETag, this.test.secondEtag);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should only list the second part', function itFn(done) {
|
||||||
|
s3.listParts({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
PartNumberMarker: 1,
|
||||||
|
UploadId: this.test.uploadId },
|
||||||
|
(err, data) => {
|
||||||
|
assert.equal(err, null, `Err listing parts: ${err}`);
|
||||||
|
assert.strictEqual(data.Parts[0].PartNumber, 2);
|
||||||
|
assert.strictEqual(data.Parts[0].Size, secondPartSize);
|
||||||
|
assert.strictEqual(data.Parts[0].ETag, this.test.secondEtag);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,195 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
|
||||||
|
const withV4 = require('../../support/withV4');
|
||||||
|
const BucketUtility = require('../../../lib/utility/bucket-util');
|
||||||
|
const { describeSkipIfNotMultiple, gcpClient, gcpBucket, gcpBucketMPU,
|
||||||
|
gcpLocation, uniqName } = require('../utils');
|
||||||
|
|
||||||
|
const keyObject = 'abortgcp';
|
||||||
|
const bucket = 'buckettestmultiplebackendabortmpu-gcp';
|
||||||
|
const body = Buffer.from('I am a body', 'utf8');
|
||||||
|
const correctMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a';
|
||||||
|
const gcpTimeout = 5000;
|
||||||
|
const skipIfNotMultipleorIfProxy = process.env.CI_PROXY === 'true' ?
|
||||||
|
describe.skip : describeSkipIfNotMultiple;
|
||||||
|
|
||||||
|
let bucketUtil;
|
||||||
|
let s3;
|
||||||
|
|
||||||
|
function checkMPUList(bucket, key, uploadId, cb) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
UploadId: uploadId,
|
||||||
|
};
|
||||||
|
gcpClient.listParts(params, (err, res) => {
|
||||||
|
assert.ifError(err,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
assert.deepStrictEqual(res.Contents, [],
|
||||||
|
'Expected 0 parts, listed some');
|
||||||
|
cb();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
skipIfNotMultipleorIfProxy('Abort MPU on GCP data backend', function
|
||||||
|
descrbeFn() {
|
||||||
|
this.timeout(180000);
|
||||||
|
withV4(sigCfg => {
|
||||||
|
beforeEach(function beforeFn() {
|
||||||
|
this.currentTest.key = uniqName(keyObject);
|
||||||
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
|
s3 = bucketUtil.s3;
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with bucket location header', () => {
|
||||||
|
beforeEach(function beforeEachFn(done) {
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.createBucket({ Bucket: bucket },
|
||||||
|
err => next(err)),
|
||||||
|
next => s3.createMultipartUpload({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.key,
|
||||||
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
this.currentTest.uploadId = res.UploadId;
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(done => s3.deleteBucket({ Bucket: bucket },
|
||||||
|
done));
|
||||||
|
|
||||||
|
it('should abort a MPU with 0 parts', function itFn(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.abortMultipartUpload(params, () => next()),
|
||||||
|
next => setTimeout(() => checkMPUList(
|
||||||
|
gcpBucketMPU, this.test.key, this.test.uploadId, next),
|
||||||
|
gcpTimeout),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should abort a MPU with uploaded parts', function itFn(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => {
|
||||||
|
async.times(2, (n, cb) => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
Body: body,
|
||||||
|
PartNumber: n + 1,
|
||||||
|
};
|
||||||
|
s3.uploadPart(params, (err, res) => {
|
||||||
|
assert.ifError(err,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
assert.strictEqual(
|
||||||
|
res.ETag, `"${correctMD5}"`);
|
||||||
|
cb();
|
||||||
|
});
|
||||||
|
}, () => next());
|
||||||
|
},
|
||||||
|
next => s3.abortMultipartUpload(params, () => next()),
|
||||||
|
next => setTimeout(() => checkMPUList(
|
||||||
|
gcpBucketMPU, this.test.key, this.test.uploadId, next),
|
||||||
|
gcpTimeout),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with previously existing object with same key', () => {
|
||||||
|
beforeEach(function beforeEachFn(done) {
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.createBucket({ Bucket: bucket },
|
||||||
|
err => next(err)),
|
||||||
|
next => {
|
||||||
|
s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.key,
|
||||||
|
Metadata: {
|
||||||
|
'scal-location-constraint': gcpLocation },
|
||||||
|
Body: body,
|
||||||
|
}, err => {
|
||||||
|
assert.ifError(err,
|
||||||
|
`Expected success, got error: ${err}`);
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
next => s3.createMultipartUpload({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.key,
|
||||||
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
this.currentTest.uploadId = res.UploadId;
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
process.stdout.write('Emptying bucket\n');
|
||||||
|
return bucketUtil.empty(bucket)
|
||||||
|
.then(() => {
|
||||||
|
process.stdout.write('Deleting bucket\n');
|
||||||
|
return bucketUtil.deleteOne(bucket);
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
process.stdout.write('Error emptying/deleting bucket: ' +
|
||||||
|
`${err}\n`);
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should abort MPU without deleting existing object',
|
||||||
|
function itFn(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => {
|
||||||
|
const body = Buffer.alloc(10);
|
||||||
|
const partParams = Object.assign(
|
||||||
|
{ PartNumber: 1, Body: body }, params);
|
||||||
|
s3.uploadPart(partParams, err => {
|
||||||
|
assert.ifError(err,
|
||||||
|
`Expected success, got error: ${err}`);
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
next => s3.abortMultipartUpload(params, () => next()),
|
||||||
|
next => setTimeout(() => {
|
||||||
|
const params = {
|
||||||
|
Bucket: gcpBucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
};
|
||||||
|
gcpClient.getObject(params, (err, res) => {
|
||||||
|
assert.ifError(err,
|
||||||
|
`Expected success, got error: ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
}, gcpTimeout),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,249 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
|
||||||
|
const withV4 = require('../../support/withV4');
|
||||||
|
const BucketUtility = require('../../../lib/utility/bucket-util');
|
||||||
|
const { describeSkipIfNotMultiple, fileLocation, awsS3, awsLocation, awsBucket,
|
||||||
|
gcpClient, gcpBucket, gcpLocation, gcpLocationMismatch } =
|
||||||
|
require('../utils');
|
||||||
|
|
||||||
|
const bucket = 'buckettestmultiplebackendcompletempu-gcp';
|
||||||
|
const smallBody = Buffer.from('I am a body', 'utf8');
|
||||||
|
const bigBody = Buffer.alloc(10485760);
|
||||||
|
const s3MD5 = 'bfb875032e51cbe2a60c5b6b99a2153f-2';
|
||||||
|
const expectedContentLength = '10485771';
|
||||||
|
const gcpTimeout = 5000;
|
||||||
|
const skipIfNotMultipleorIfProxy = process.env.CI_PROXY === 'true' ?
|
||||||
|
describe.skip : describeSkipIfNotMultiple;
|
||||||
|
|
||||||
|
let s3;
|
||||||
|
let bucketUtil;
|
||||||
|
|
||||||
|
function getCheck(key, bucketMatch, cb) {
|
||||||
|
let gcpKey = key;
|
||||||
|
s3.getObject({ Bucket: bucket, Key: gcpKey },
|
||||||
|
(err, s3Res) => {
|
||||||
|
assert.equal(err, null, `Err getting object from S3: ${err}`);
|
||||||
|
assert.strictEqual(s3Res.ETag, `"${s3MD5}"`);
|
||||||
|
|
||||||
|
if (!bucketMatch) {
|
||||||
|
gcpKey = `${bucket}/${gcpKey}`;
|
||||||
|
}
|
||||||
|
const params = { Bucket: gcpBucket, Key: gcpKey };
|
||||||
|
gcpClient.getObject(params, (err, gcpRes) => {
|
||||||
|
assert.equal(err, null, `Err getting object from GCP: ${err}`);
|
||||||
|
assert.strictEqual(expectedContentLength, gcpRes.ContentLength);
|
||||||
|
cb();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function mpuSetup(key, location, cb) {
|
||||||
|
const partArray = [];
|
||||||
|
async.waterfall([
|
||||||
|
next => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
Metadata: { 'scal-location-constraint': location },
|
||||||
|
};
|
||||||
|
s3.createMultipartUpload(params, (err, res) => {
|
||||||
|
const uploadId = res.UploadId;
|
||||||
|
assert(uploadId);
|
||||||
|
assert.strictEqual(res.Bucket, bucket);
|
||||||
|
assert.strictEqual(res.Key, key);
|
||||||
|
next(err, uploadId);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
(uploadId, next) => {
|
||||||
|
const partParams = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
PartNumber: 1,
|
||||||
|
UploadId: uploadId,
|
||||||
|
Body: smallBody,
|
||||||
|
};
|
||||||
|
s3.uploadPart(partParams, (err, res) => {
|
||||||
|
partArray.push({ ETag: res.ETag, PartNumber: 1 });
|
||||||
|
next(err, uploadId);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
(uploadId, next) => {
|
||||||
|
const partParams = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
PartNumber: 2,
|
||||||
|
UploadId: uploadId,
|
||||||
|
Body: bigBody,
|
||||||
|
};
|
||||||
|
s3.uploadPart(partParams, (err, res) => {
|
||||||
|
partArray.push({ ETag: res.ETag, PartNumber: 2 });
|
||||||
|
next(err, uploadId);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], (err, uploadId) => {
|
||||||
|
process.stdout.write('Created MPU and put two parts\n');
|
||||||
|
assert.equal(err, null, `Err setting up MPU: ${err}`);
|
||||||
|
cb(uploadId, partArray);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
skipIfNotMultipleorIfProxy('Complete MPU API for GCP data backend',
|
||||||
|
function testSuite() {
|
||||||
|
this.timeout(150000);
|
||||||
|
withV4(sigCfg => {
|
||||||
|
beforeEach(function beFn() {
|
||||||
|
this.currentTest.key = `somekey-${Date.now()}`;
|
||||||
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
|
s3 = bucketUtil.s3;
|
||||||
|
this.currentTest.awsClient = awsS3;
|
||||||
|
return s3.createBucketAsync({ Bucket: bucket })
|
||||||
|
.catch(err => {
|
||||||
|
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
process.stdout.write('Emptying bucket\n');
|
||||||
|
return bucketUtil.empty(bucket)
|
||||||
|
.then(() => {
|
||||||
|
process.stdout.write('Deleting bucket\n');
|
||||||
|
return bucketUtil.deleteOne(bucket);
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
process.stdout.write(`Error in afterEach: ${err}\n`);
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should complete an MPU on GCP', function itFn(done) {
|
||||||
|
mpuSetup(this.test.key, gcpLocation, (uploadId, partArray) => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: uploadId,
|
||||||
|
MultipartUpload: { Parts: partArray },
|
||||||
|
};
|
||||||
|
setTimeout(() => {
|
||||||
|
s3.completeMultipartUpload(params, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Err completing MPU: ${err}`);
|
||||||
|
getCheck(this.test.key, true, done);
|
||||||
|
});
|
||||||
|
}, gcpTimeout);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should complete an MPU on GCP with bucketMatch=false',
|
||||||
|
function itFn(done) {
|
||||||
|
mpuSetup(this.test.key, gcpLocationMismatch,
|
||||||
|
(uploadId, partArray) => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: uploadId,
|
||||||
|
MultipartUpload: { Parts: partArray },
|
||||||
|
};
|
||||||
|
setTimeout(() => {
|
||||||
|
s3.completeMultipartUpload(params, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Err completing MPU: ${err}`);
|
||||||
|
getCheck(this.test.key, false, done);
|
||||||
|
});
|
||||||
|
}, gcpTimeout);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should complete an MPU on GCP with same key as object put ' +
|
||||||
|
'to file', function itFn(done) {
|
||||||
|
const body = Buffer.from('I am a body', 'utf8');
|
||||||
|
s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
Body: body,
|
||||||
|
Metadata: { 'scal-location-constraint': fileLocation } },
|
||||||
|
err => {
|
||||||
|
assert.equal(err, null, `Err putting object to file: ${err}`);
|
||||||
|
mpuSetup(this.test.key, gcpLocation,
|
||||||
|
(uploadId, partArray) => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: uploadId,
|
||||||
|
MultipartUpload: { Parts: partArray },
|
||||||
|
};
|
||||||
|
setTimeout(() => {
|
||||||
|
s3.completeMultipartUpload(params, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Err completing MPU: ${err}`);
|
||||||
|
getCheck(this.test.key, true, done);
|
||||||
|
});
|
||||||
|
}, gcpTimeout);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should complete an MPU on GCP with same key as object put ' +
|
||||||
|
'to GCP', function itFn(done) {
|
||||||
|
const body = Buffer.from('I am a body', 'utf8');
|
||||||
|
s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
Body: body,
|
||||||
|
Metadata: { 'scal-location-constraint': gcpLocation } },
|
||||||
|
err => {
|
||||||
|
assert.equal(err, null, `Err putting object to GCP: ${err}`);
|
||||||
|
mpuSetup(this.test.key, gcpLocation,
|
||||||
|
(uploadId, partArray) => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: uploadId,
|
||||||
|
MultipartUpload: { Parts: partArray },
|
||||||
|
};
|
||||||
|
setTimeout(() => {
|
||||||
|
s3.completeMultipartUpload(params, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Err completing MPU: ${err}`);
|
||||||
|
getCheck(this.test.key, true, done);
|
||||||
|
});
|
||||||
|
}, gcpTimeout);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should complete an MPU on GCP with same key as object put ' +
|
||||||
|
'to AWS', function itFn(done) {
|
||||||
|
const body = Buffer.from('I am a body', 'utf8');
|
||||||
|
s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
Body: body,
|
||||||
|
Metadata: { 'scal-location-constraint': awsLocation } },
|
||||||
|
err => {
|
||||||
|
assert.equal(err, null, `Err putting object to AWS: ${err}`);
|
||||||
|
mpuSetup(this.test.key, gcpLocation,
|
||||||
|
(uploadId, partArray) => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: uploadId,
|
||||||
|
MultipartUpload: { Parts: partArray },
|
||||||
|
};
|
||||||
|
s3.completeMultipartUpload(params, err => {
|
||||||
|
assert.equal(err, null, `Err completing MPU: ${err}`);
|
||||||
|
// make sure object is gone from AWS
|
||||||
|
setTimeout(() => {
|
||||||
|
this.test.awsClient.getObject({ Bucket: awsBucket,
|
||||||
|
Key: this.test.key }, err => {
|
||||||
|
assert.strictEqual(err.code, 'NoSuchKey');
|
||||||
|
getCheck(this.test.key, true, done);
|
||||||
|
});
|
||||||
|
}, gcpTimeout);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,340 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
|
||||||
|
const withV4 = require('../../support/withV4');
|
||||||
|
const BucketUtility = require('../../../lib/utility/bucket-util');
|
||||||
|
const { describeSkipIfNotMultiple, gcpClient, gcpBucket, gcpBucketMPU,
|
||||||
|
gcpLocation, gcpLocationMismatch, uniqName } = require('../utils');
|
||||||
|
const { createMpuKey } =
|
||||||
|
require('../../../../../../lib/data/external/GCP').GcpUtils;
|
||||||
|
|
||||||
|
const keyObject = 'putgcp';
|
||||||
|
const bucket = 'buckettestmultiplebackendputpart-gcp';
|
||||||
|
const body = Buffer.from('I am a body', 'utf8');
|
||||||
|
const correctMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a';
|
||||||
|
const emptyMD5 = 'd41d8cd98f00b204e9800998ecf8427e';
|
||||||
|
const skipIfNotMultipleorIfProxy = process.env.CI_PROXY === 'true' ?
|
||||||
|
describe.skip : describeSkipIfNotMultiple;
|
||||||
|
|
||||||
|
let bucketUtil;
|
||||||
|
let s3;
|
||||||
|
|
||||||
|
function checkMPUResult(bucket, key, uploadId, objCount, expected, cb) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
UploadId: uploadId,
|
||||||
|
};
|
||||||
|
gcpClient.listParts(params, (err, res) => {
|
||||||
|
assert.ifError(err,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
assert((res && res.Contents &&
|
||||||
|
res.Contents.length === objCount));
|
||||||
|
res.Contents.forEach(part => {
|
||||||
|
assert.strictEqual(
|
||||||
|
part.ETag, `"${expected}"`);
|
||||||
|
});
|
||||||
|
cb();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
skipIfNotMultipleorIfProxy('MultipleBacked put part to GCP', function
|
||||||
|
describeFn() {
|
||||||
|
this.timeout(180000);
|
||||||
|
withV4(sigCfg => {
|
||||||
|
beforeEach(function beforeFn() {
|
||||||
|
this.currentTest.key = uniqName(keyObject);
|
||||||
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
|
s3 = bucketUtil.s3;
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with bucket location header', () => {
|
||||||
|
beforeEach(function beforeEachFn(done) {
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.createBucket({ Bucket: bucket,
|
||||||
|
}, err => next(err)),
|
||||||
|
next => s3.createMultipartUpload({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.key,
|
||||||
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
this.currentTest.uploadId = res.UploadId;
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterEachFn(done) {
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.abortMultipartUpload({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.key,
|
||||||
|
UploadId: this.currentTest.uploadId,
|
||||||
|
}, err => next(err)),
|
||||||
|
next => s3.deleteBucket({ Bucket: bucket },
|
||||||
|
err => next(err)),
|
||||||
|
], err => {
|
||||||
|
assert.equal(err, null, `Error aborting MPU: ${err}`);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should put 0-byte part to GCP', function itFn(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
PartNumber: 1,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.uploadPart(params, (err, res) => {
|
||||||
|
assert.ifError(err,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${emptyMD5}"`);
|
||||||
|
next();
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
const mpuKey =
|
||||||
|
createMpuKey(this.test.key, this.test.uploadId, 1);
|
||||||
|
const getParams = {
|
||||||
|
Bucket: gcpBucketMPU,
|
||||||
|
Key: mpuKey,
|
||||||
|
};
|
||||||
|
gcpClient.getObject(getParams, (err, res) => {
|
||||||
|
assert.ifError(err,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${emptyMD5}"`);
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should put 2 parts to GCP', function ifFn(done) {
|
||||||
|
async.waterfall([
|
||||||
|
next => {
|
||||||
|
async.times(2, (n, cb) => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
Body: body,
|
||||||
|
PartNumber: n + 1,
|
||||||
|
};
|
||||||
|
s3.uploadPart(params, (err, res) => {
|
||||||
|
assert.ifError(err,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
assert.strictEqual(
|
||||||
|
res.ETag, `"${correctMD5}"`);
|
||||||
|
cb();
|
||||||
|
});
|
||||||
|
}, () => next());
|
||||||
|
},
|
||||||
|
next => checkMPUResult(
|
||||||
|
gcpBucketMPU, this.test.key, this.test.uploadId,
|
||||||
|
2, correctMD5, next),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should put the same part twice', function ifFn(done) {
|
||||||
|
async.waterfall([
|
||||||
|
next => {
|
||||||
|
const partBody = ['', body];
|
||||||
|
const partMD5 = [emptyMD5, correctMD5];
|
||||||
|
async.timesSeries(2, (n, cb) => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
Body: partBody[n],
|
||||||
|
PartNumber: 1,
|
||||||
|
};
|
||||||
|
s3.uploadPart(params, (err, res) => {
|
||||||
|
assert.ifError(err,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
assert.strictEqual(
|
||||||
|
res.ETag, `"${partMD5[n]}"`);
|
||||||
|
cb();
|
||||||
|
});
|
||||||
|
}, () => next());
|
||||||
|
},
|
||||||
|
next => checkMPUResult(
|
||||||
|
gcpBucketMPU, this.test.key, this.test.uploadId,
|
||||||
|
1, correctMD5, next),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with same key as preexisting part', () => {
|
||||||
|
beforeEach(function beforeEachFn(done) {
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.createBucket({ Bucket: bucket },
|
||||||
|
err => next(err)),
|
||||||
|
next => {
|
||||||
|
s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.key,
|
||||||
|
Metadata: {
|
||||||
|
'scal-location-constraint': gcpLocation },
|
||||||
|
Body: body,
|
||||||
|
}, err => {
|
||||||
|
assert.equal(err, null, 'Err putting object to ' +
|
||||||
|
`GCP: ${err}`);
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
next => s3.createMultipartUpload({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.key,
|
||||||
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
this.currentTest.uploadId = res.UploadId;
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterEachFn(done) {
|
||||||
|
async.waterfall([
|
||||||
|
next => {
|
||||||
|
process.stdout.write('Aborting multipart upload\n');
|
||||||
|
s3.abortMultipartUpload({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.key,
|
||||||
|
UploadId: this.currentTest.uploadId },
|
||||||
|
err => next(err));
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
process.stdout.write('Deleting object\n');
|
||||||
|
s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.key },
|
||||||
|
err => next(err));
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
process.stdout.write('Deleting bucket\n');
|
||||||
|
s3.deleteBucket({
|
||||||
|
Bucket: bucket },
|
||||||
|
err => next(err));
|
||||||
|
},
|
||||||
|
], err => {
|
||||||
|
assert.equal(err, null, `Err in afterEach: ${err}`);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should put a part without overwriting existing object',
|
||||||
|
function itFn(done) {
|
||||||
|
const body = Buffer.alloc(20);
|
||||||
|
s3.uploadPart({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
PartNumber: 1,
|
||||||
|
Body: body,
|
||||||
|
}, err => {
|
||||||
|
assert.strictEqual(err, null, 'Err putting part to ' +
|
||||||
|
`GCP: ${err}`);
|
||||||
|
gcpClient.getObject({
|
||||||
|
Bucket: gcpBucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.ifError(err,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describeSkipIfNotMultiple('MultipleBackend put part to GCP location with ' +
|
||||||
|
'bucketMatch sets to false', function
|
||||||
|
describeF() {
|
||||||
|
this.timeout(80000);
|
||||||
|
withV4(sigCfg => {
|
||||||
|
beforeEach(function beforeFn() {
|
||||||
|
this.currentTest.key = uniqName(keyObject);
|
||||||
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
|
s3 = bucketUtil.s3;
|
||||||
|
});
|
||||||
|
describe('with bucket location header', () => {
|
||||||
|
beforeEach(function beforeEachFn(done) {
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.createBucket({ Bucket: bucket,
|
||||||
|
}, err => next(err)),
|
||||||
|
next => s3.createMultipartUpload({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.key,
|
||||||
|
Metadata: { 'scal-location-constraint':
|
||||||
|
gcpLocationMismatch },
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
this.currentTest.uploadId = res.UploadId;
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterEachFn(done) {
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.abortMultipartUpload({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.key,
|
||||||
|
UploadId: this.currentTest.uploadId,
|
||||||
|
}, err => next(err)),
|
||||||
|
next => s3.deleteBucket({ Bucket: bucket },
|
||||||
|
err => next(err)),
|
||||||
|
], err => {
|
||||||
|
assert.equal(err, null, `Error aborting MPU: ${err}`);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should put part to GCP location with bucketMatch' +
|
||||||
|
' sets to false', function itFn(done) {
|
||||||
|
const body20 = Buffer.alloc(20);
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
PartNumber: 1,
|
||||||
|
Body: body20,
|
||||||
|
};
|
||||||
|
const eTagExpected =
|
||||||
|
'"441018525208457705bf09a8ee3c1093"';
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.uploadPart(params, (err, res) => {
|
||||||
|
assert.strictEqual(res.ETag, eTagExpected);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
const key =
|
||||||
|
createMpuKey(this.test.key, this.test.uploadId, 1);
|
||||||
|
const mpuKey = `${bucket}/${key}`;
|
||||||
|
const getParams = {
|
||||||
|
Bucket: gcpBucketMPU,
|
||||||
|
Key: mpuKey,
|
||||||
|
};
|
||||||
|
gcpClient.getObject(getParams, (err, res) => {
|
||||||
|
assert.ifError(err,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, eTagExpected);
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,742 @@
|
||||||
|
const async = require('async');
|
||||||
|
const assert = require('assert');
|
||||||
|
|
||||||
|
const { config } = require('../../../../../../lib/Config');
|
||||||
|
const withV4 = require('../../support/withV4');
|
||||||
|
const BucketUtility = require('../../../lib/utility/bucket-util');
|
||||||
|
const { describeSkipIfNotMultiple, uniqName, gcpBucketMPU,
|
||||||
|
gcpClient, gcpLocation, gcpLocationMismatch, memLocation,
|
||||||
|
awsLocation, awsS3, getOwnerInfo } = require('../utils');
|
||||||
|
const skipIfNotMultipleorIfProxy = process.env.CI_PROXY === 'true' ?
|
||||||
|
describe.skip : describeSkipIfNotMultiple;
|
||||||
|
|
||||||
|
const bucket = 'buckettestmultiplebackendpartcopy-gcp';
|
||||||
|
|
||||||
|
const memBucketName = 'membucketnameputcopypartgcp';
|
||||||
|
const awsBucketName = 'awsbucketnameputcopypartgcp';
|
||||||
|
|
||||||
|
const normalBodySize = 11;
|
||||||
|
const normalBody = Buffer.from('I am a body', 'utf8');
|
||||||
|
const normalMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a';
|
||||||
|
|
||||||
|
const sixBytesMD5 = 'c978a461602f0372b5f970157927f723';
|
||||||
|
|
||||||
|
const oneKb = 1024;
|
||||||
|
const oneKbBody = Buffer.alloc(oneKb);
|
||||||
|
const oneKbMD5 = '0f343b0931126a20f133d67c2b018a3b';
|
||||||
|
|
||||||
|
const fiveMB = 5 * 1024 * 1024;
|
||||||
|
const fiveMbBody = Buffer.alloc(fiveMB);
|
||||||
|
const fiveMbMD5 = '5f363e0e58a95f06cbe9bbc662c5dfb6';
|
||||||
|
|
||||||
|
const oneHundredAndFiveMB = 105 * 1024 * 1024;
|
||||||
|
const oneHundredAndFiveMbBody = Buffer.alloc(oneHundredAndFiveMB);
|
||||||
|
const oneHundredAndFiveMbMD5 = 'a9b59b0a5fe1ffed0b23fad2498c4dac';
|
||||||
|
|
||||||
|
const keyObjectGcp = 'objectputcopypartgcp';
|
||||||
|
const keyObjectMemory = 'objectputcopypartMemory';
|
||||||
|
const keyObjectAWS = 'objectputcopypartAWS';
|
||||||
|
|
||||||
|
const { ownerID, ownerDisplayName } = getOwnerInfo('account1');
|
||||||
|
|
||||||
|
const result = {
|
||||||
|
Bucket: '',
|
||||||
|
Key: '',
|
||||||
|
UploadId: '',
|
||||||
|
MaxParts: 1000,
|
||||||
|
IsTruncated: false,
|
||||||
|
Parts: [],
|
||||||
|
Initiator:
|
||||||
|
{ ID: ownerID,
|
||||||
|
DisplayName: ownerDisplayName },
|
||||||
|
Owner:
|
||||||
|
{ DisplayName: ownerDisplayName,
|
||||||
|
ID: ownerID },
|
||||||
|
StorageClass: 'STANDARD',
|
||||||
|
};
|
||||||
|
|
||||||
|
let s3;
|
||||||
|
let bucketUtil;
|
||||||
|
|
||||||
|
function assertCopyPart(infos, cb) {
|
||||||
|
const { bucketName, keyName, uploadId, md5, totalSize } = infos;
|
||||||
|
const resultCopy = JSON.parse(JSON.stringify(result));
|
||||||
|
resultCopy.Bucket = bucketName;
|
||||||
|
resultCopy.Key = keyName;
|
||||||
|
resultCopy.UploadId = uploadId;
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.listParts({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: keyName,
|
||||||
|
UploadId: uploadId,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.ifError(err, 'listParts: Expected success,' +
|
||||||
|
` got error: ${err}`);
|
||||||
|
resultCopy.Parts =
|
||||||
|
[{ PartNumber: 1,
|
||||||
|
LastModified: res.Parts[0].LastModified,
|
||||||
|
ETag: `"${md5}"`,
|
||||||
|
Size: totalSize }];
|
||||||
|
assert.deepStrictEqual(res, resultCopy);
|
||||||
|
next();
|
||||||
|
}),
|
||||||
|
next => gcpClient.listParts({
|
||||||
|
Bucket: gcpBucketMPU,
|
||||||
|
Key: keyName,
|
||||||
|
UploadId: uploadId,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.ifError(err, 'GCP listParts: Expected success,' +
|
||||||
|
`got error: ${err}`);
|
||||||
|
assert.strictEqual(res.Contents[0].ETag, `"${md5}"`);
|
||||||
|
next();
|
||||||
|
}),
|
||||||
|
], cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
skipIfNotMultipleorIfProxy('Put Copy Part to GCP', function describeFn() {
|
||||||
|
this.timeout(800000);
|
||||||
|
withV4(sigCfg => {
|
||||||
|
beforeEach(done => {
|
||||||
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
|
s3 = bucketUtil.s3;
|
||||||
|
s3.createBucket({ Bucket: bucket,
|
||||||
|
CreateBucketConfiguration: {
|
||||||
|
LocationConstraint: gcpLocation,
|
||||||
|
},
|
||||||
|
}, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
process.stdout.write('Emptying bucket\n');
|
||||||
|
return bucketUtil.empty(bucket)
|
||||||
|
.then(() => bucketUtil.empty(memBucketName))
|
||||||
|
.then(() => {
|
||||||
|
process.stdout.write(`Deleting bucket ${bucket}\n`);
|
||||||
|
return bucketUtil.deleteOne(bucket);
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
process.stdout.write(`Deleting bucket ${memBucketName}\n`);
|
||||||
|
return bucketUtil.deleteOne(memBucketName);
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
process.stdout.write(`Error in afterEach: ${err}\n`);
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Basic test: ', () => {
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.keyNameNormalGcp =
|
||||||
|
`normalgcp${uniqName(keyObjectGcp)}`;
|
||||||
|
this.currentTest.keyNameNormalGcpMismatch =
|
||||||
|
`normalgcpmismatch${uniqName(keyObjectGcp)}`;
|
||||||
|
|
||||||
|
this.currentTest.keyNameFiveMbGcp =
|
||||||
|
`fivembgcp${uniqName(keyObjectGcp)}`;
|
||||||
|
this.currentTest.keyNameFiveMbMem =
|
||||||
|
`fivembmem${uniqName(keyObjectMemory)}`;
|
||||||
|
|
||||||
|
this.currentTest.mpuKeyNameGcp =
|
||||||
|
`mpukeyname${uniqName(keyObjectGcp)}`;
|
||||||
|
this.currentTest.mpuKeyNameMem =
|
||||||
|
`mpukeyname${uniqName(keyObjectMemory)}`;
|
||||||
|
this.currentTest.mpuKeyNameAWS =
|
||||||
|
`mpukeyname${uniqName(keyObjectAWS)}`;
|
||||||
|
const paramsGcp = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.mpuKeyNameGcp,
|
||||||
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
|
};
|
||||||
|
const paramsMem = {
|
||||||
|
Bucket: memBucketName,
|
||||||
|
Key: this.currentTest.mpuKeyNameMem,
|
||||||
|
Metadata: { 'scal-location-constraint': memLocation },
|
||||||
|
};
|
||||||
|
const paramsAWS = {
|
||||||
|
Bucket: memBucketName,
|
||||||
|
Key: this.currentTest.mpuKeyNameAWS,
|
||||||
|
Metadata: { 'scal-location-constraint': awsLocation },
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.createBucket({ Bucket: bucket },
|
||||||
|
err => next(err)),
|
||||||
|
next => s3.createBucket({ Bucket: memBucketName },
|
||||||
|
err => next(err)),
|
||||||
|
next => s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.keyNameNormalGcp,
|
||||||
|
Body: normalBody,
|
||||||
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
|
}, err => next(err)),
|
||||||
|
next => s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.keyNameNormalGcpMismatch,
|
||||||
|
Body: normalBody,
|
||||||
|
Metadata: { 'scal-location-constraint':
|
||||||
|
gcpLocationMismatch },
|
||||||
|
}, err => next(err)),
|
||||||
|
next => s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.keyNameFiveMbGcp,
|
||||||
|
Body: fiveMbBody,
|
||||||
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
|
}, err => next(err)),
|
||||||
|
next => s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.keyNameFiveMbMem,
|
||||||
|
Body: fiveMbBody,
|
||||||
|
Metadata: { 'scal-location-constraint': memLocation },
|
||||||
|
}, err => next(err)),
|
||||||
|
next => s3.createMultipartUpload(paramsGcp,
|
||||||
|
(err, res) => {
|
||||||
|
assert.ifError(err, 'createMultipartUpload ' +
|
||||||
|
`on gcp: Expected success, got error: ${err}`);
|
||||||
|
this.currentTest.uploadId = res.UploadId;
|
||||||
|
next();
|
||||||
|
}),
|
||||||
|
next => s3.createMultipartUpload(paramsMem,
|
||||||
|
(err, res) => {
|
||||||
|
assert.ifError(err, 'createMultipartUpload ' +
|
||||||
|
`in memory: Expected success, got error: ${err}`);
|
||||||
|
this.currentTest.uploadIdMem = res.UploadId;
|
||||||
|
next();
|
||||||
|
}),
|
||||||
|
next => s3.createMultipartUpload(paramsAWS,
|
||||||
|
(err, res) => {
|
||||||
|
assert.ifError(err, 'createMultipartUpload ' +
|
||||||
|
`on AWS: Expected success, got error: ${err}`);
|
||||||
|
this.currentTest.uploadIdAWS = res.UploadId;
|
||||||
|
next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterFn(done) {
|
||||||
|
const paramsGcp = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.mpuKeyNameGcp,
|
||||||
|
UploadId: this.currentTest.uploadId,
|
||||||
|
};
|
||||||
|
const paramsMem = {
|
||||||
|
Bucket: memBucketName,
|
||||||
|
Key: this.currentTest.mpuKeyNameMem,
|
||||||
|
UploadId: this.currentTest.uploadIdMem,
|
||||||
|
};
|
||||||
|
const paramsAWS = {
|
||||||
|
Bucket: memBucketName,
|
||||||
|
Key: this.currentTest.mpuKeyNameAWS,
|
||||||
|
UploadId: this.currentTest.uploadIdAWS,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.abortMultipartUpload(paramsGcp,
|
||||||
|
err => next(err)),
|
||||||
|
next => s3.abortMultipartUpload(paramsMem,
|
||||||
|
err => next(err)),
|
||||||
|
next => s3.abortMultipartUpload(paramsAWS,
|
||||||
|
err => next(err)),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should copy small part from GCP to MPU with GCP location',
|
||||||
|
function itFn(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
CopySource:
|
||||||
|
`${bucket}/${this.test.keyNameNormalGcp}`,
|
||||||
|
Key: this.test.mpuKeyNameGcp,
|
||||||
|
PartNumber: 1,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
|
`success, got error: ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
const infos = {
|
||||||
|
bucketName: bucket,
|
||||||
|
keyName: this.test.mpuKeyNameGcp,
|
||||||
|
uploadId: this.test.uploadId,
|
||||||
|
md5: normalMD5,
|
||||||
|
totalSize: normalBodySize,
|
||||||
|
};
|
||||||
|
assertCopyPart(infos, next);
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should copy small part from GCP with bucketMatch=false to ' +
|
||||||
|
'MPU with GCP location',
|
||||||
|
function itFn(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
CopySource:
|
||||||
|
`${bucket}/${this.test.keyNameNormalGcpMismatch}`,
|
||||||
|
Key: this.test.mpuKeyNameGcp,
|
||||||
|
PartNumber: 1,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
|
`success, got error: ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
const infos = {
|
||||||
|
bucketName: bucket,
|
||||||
|
keyName: this.test.mpuKeyNameGcp,
|
||||||
|
uploadId: this.test.uploadId,
|
||||||
|
md5: normalMD5,
|
||||||
|
totalSize: normalBodySize,
|
||||||
|
};
|
||||||
|
assertCopyPart(infos, next);
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should copy 5 Mb part from GCP to MPU with GCP location',
|
||||||
|
function ifF(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
CopySource:
|
||||||
|
`${bucket}/${this.test.keyNameFiveMbGcp}`,
|
||||||
|
Key: this.test.mpuKeyNameGcp,
|
||||||
|
PartNumber: 1,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
|
`success, got error: ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${fiveMbMD5}"`);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
const infos = {
|
||||||
|
bucketName: bucket,
|
||||||
|
keyName: this.test.mpuKeyNameGcp,
|
||||||
|
uploadId: this.test.uploadId,
|
||||||
|
md5: fiveMbMD5,
|
||||||
|
totalSize: fiveMB,
|
||||||
|
};
|
||||||
|
assertCopyPart(infos, next);
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should copy part from GCP to MPU with memory location',
|
||||||
|
function ifF(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: memBucketName,
|
||||||
|
CopySource:
|
||||||
|
`${bucket}/${this.test.keyNameNormalGcp}`,
|
||||||
|
Key: this.test.mpuKeyNameMem,
|
||||||
|
PartNumber: 1,
|
||||||
|
UploadId: this.test.uploadIdMem,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
|
`success, got error: ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
s3.listParts({
|
||||||
|
Bucket: memBucketName,
|
||||||
|
Key: this.test.mpuKeyNameMem,
|
||||||
|
UploadId: this.test.uploadIdMem,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.ifError(err,
|
||||||
|
'listParts: Expected success,' +
|
||||||
|
` got error: ${err}`);
|
||||||
|
const resultCopy =
|
||||||
|
JSON.parse(JSON.stringify(result));
|
||||||
|
resultCopy.Bucket = memBucketName;
|
||||||
|
resultCopy.Key = this.test.mpuKeyNameMem;
|
||||||
|
resultCopy.UploadId = this.test.uploadIdMem;
|
||||||
|
resultCopy.Parts =
|
||||||
|
[{ PartNumber: 1,
|
||||||
|
LastModified: res.Parts[0].LastModified,
|
||||||
|
ETag: `"${normalMD5}"`,
|
||||||
|
Size: normalBodySize }];
|
||||||
|
assert.deepStrictEqual(res, resultCopy);
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should copy part from GCP to MPU with AWS location',
|
||||||
|
function ifF(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: memBucketName,
|
||||||
|
CopySource:
|
||||||
|
`${bucket}/${this.test.keyNameNormalGcp}`,
|
||||||
|
Key: this.test.mpuKeyNameAWS,
|
||||||
|
PartNumber: 1,
|
||||||
|
UploadId: this.test.uploadIdAWS,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
|
`success, got error: ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
const awsBucket =
|
||||||
|
config.locationConstraints[awsLocation]
|
||||||
|
.details.bucketName;
|
||||||
|
awsS3.listParts({
|
||||||
|
Bucket: awsBucket,
|
||||||
|
Key: this.test.mpuKeyNameAWS,
|
||||||
|
UploadId: this.test.uploadIdAWS,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.ifError(err,
|
||||||
|
'listParts: Expected success,' +
|
||||||
|
` got error: ${err}`);
|
||||||
|
assert.strictEqual(res.Bucket, awsBucket);
|
||||||
|
assert.strictEqual(res.Key,
|
||||||
|
this.test.mpuKeyNameAWS);
|
||||||
|
assert.strictEqual(res.UploadId,
|
||||||
|
this.test.uploadIdAWS);
|
||||||
|
assert.strictEqual(res.Parts.length, 1);
|
||||||
|
assert.strictEqual(res.Parts[0].PartNumber, 1);
|
||||||
|
assert.strictEqual(res.Parts[0].ETag,
|
||||||
|
`"${normalMD5}"`);
|
||||||
|
assert.strictEqual(res.Parts[0].Size,
|
||||||
|
normalBodySize);
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should copy part from GCP object with range to MPU ' +
|
||||||
|
'with AWS location', function ifF(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: memBucketName,
|
||||||
|
CopySource:
|
||||||
|
`${bucket}/${this.test.keyNameNormalGcp}`,
|
||||||
|
Key: this.test.mpuKeyNameAWS,
|
||||||
|
CopySourceRange: 'bytes=0-5',
|
||||||
|
PartNumber: 1,
|
||||||
|
UploadId: this.test.uploadIdAWS,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
|
`success, got error: ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${sixBytesMD5}"`);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
const awsBucket =
|
||||||
|
config.locationConstraints[awsLocation]
|
||||||
|
.details.bucketName;
|
||||||
|
awsS3.listParts({
|
||||||
|
Bucket: awsBucket,
|
||||||
|
Key: this.test.mpuKeyNameAWS,
|
||||||
|
UploadId: this.test.uploadIdAWS,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.ifError(err,
|
||||||
|
'listParts: Expected success,' +
|
||||||
|
` got error: ${err}`);
|
||||||
|
assert.strictEqual(res.Bucket, awsBucket);
|
||||||
|
assert.strictEqual(res.Key,
|
||||||
|
this.test.mpuKeyNameAWS);
|
||||||
|
assert.strictEqual(res.UploadId,
|
||||||
|
this.test.uploadIdAWS);
|
||||||
|
assert.strictEqual(res.Parts.length, 1);
|
||||||
|
assert.strictEqual(res.Parts[0].PartNumber, 1);
|
||||||
|
assert.strictEqual(res.Parts[0].ETag,
|
||||||
|
`"${sixBytesMD5}"`);
|
||||||
|
assert.strictEqual(res.Parts[0].Size, 6);
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should copy 5 Mb part from a memory location to MPU with ' +
|
||||||
|
'GCP location',
|
||||||
|
function ifF(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
CopySource:
|
||||||
|
`${bucket}/${this.test.keyNameFiveMbMem}`,
|
||||||
|
Key: this.test.mpuKeyNameGcp,
|
||||||
|
PartNumber: 1,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
|
`success, got error: ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${fiveMbMD5}"`);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
const infos = {
|
||||||
|
bucketName: bucket,
|
||||||
|
keyName: this.test.mpuKeyNameGcp,
|
||||||
|
uploadId: this.test.uploadId,
|
||||||
|
md5: fiveMbMD5,
|
||||||
|
totalSize: fiveMB,
|
||||||
|
};
|
||||||
|
assertCopyPart(infos, next);
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with large object', () => {
|
||||||
|
beforeEach(function beF(done) {
|
||||||
|
this.currentTest.keyNameLargeGcp =
|
||||||
|
`largegcp${uniqName(keyObjectGcp)}`;
|
||||||
|
s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.keyNameLargeGcp,
|
||||||
|
Body: oneHundredAndFiveMbBody,
|
||||||
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
|
}, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should copy 105 Mb part from GCP to MPU with GCP location',
|
||||||
|
function ifF(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
CopySource:
|
||||||
|
`${bucket}/${this.test.keyNameLargeGcp}`,
|
||||||
|
Key: this.test.mpuKeyNameGcp,
|
||||||
|
PartNumber: 1,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
|
`success, got error: ${err}`);
|
||||||
|
assert.strictEqual(res.ETag,
|
||||||
|
`"${oneHundredAndFiveMbMD5}"`);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
const infos = {
|
||||||
|
bucketName: bucket,
|
||||||
|
keyName: this.test.mpuKeyNameGcp,
|
||||||
|
uploadId: this.test.uploadId,
|
||||||
|
md5: oneHundredAndFiveMbMD5,
|
||||||
|
totalSize: oneHundredAndFiveMB,
|
||||||
|
};
|
||||||
|
assertCopyPart(infos, next);
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with existing part', () => {
|
||||||
|
beforeEach(function beF(done) {
|
||||||
|
const params = {
|
||||||
|
Body: oneKbBody,
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.mpuKeyNameGcp,
|
||||||
|
PartNumber: 1,
|
||||||
|
UploadId: this.currentTest.uploadId,
|
||||||
|
};
|
||||||
|
s3.uploadPart(params, done);
|
||||||
|
});
|
||||||
|
it('should copy part from GCP to GCP with existing ' +
|
||||||
|
'parts', function ifF(done) {
|
||||||
|
const resultCopy = JSON.parse(JSON.stringify(result));
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
CopySource:
|
||||||
|
`${bucket}/${this.test.keyNameNormalGcp}`,
|
||||||
|
Key: this.test.mpuKeyNameGcp,
|
||||||
|
PartNumber: 2,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
|
assert.ifError(err,
|
||||||
|
'uploadPartCopy: Expected success, got ' +
|
||||||
|
`error: ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => s3.listParts({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.mpuKeyNameGcp,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.ifError(err, 'listParts: Expected ' +
|
||||||
|
`success, got error: ${err}`);
|
||||||
|
resultCopy.Bucket = bucket;
|
||||||
|
resultCopy.Key = this.test.mpuKeyNameGcp;
|
||||||
|
resultCopy.UploadId = this.test.uploadId;
|
||||||
|
resultCopy.Parts =
|
||||||
|
[{ PartNumber: 1,
|
||||||
|
LastModified: res.Parts[0].LastModified,
|
||||||
|
ETag: `"${oneKbMD5}"`,
|
||||||
|
Size: oneKb },
|
||||||
|
{ PartNumber: 2,
|
||||||
|
LastModified: res.Parts[1].LastModified,
|
||||||
|
ETag: `"${normalMD5}"`,
|
||||||
|
Size: 11 },
|
||||||
|
];
|
||||||
|
assert.deepStrictEqual(res, resultCopy);
|
||||||
|
next();
|
||||||
|
}),
|
||||||
|
next => gcpClient.listParts({
|
||||||
|
Bucket: gcpBucketMPU,
|
||||||
|
Key: this.test.mpuKeyNameGcp,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.ifError(err, 'GCP listParts: Expected ' +
|
||||||
|
`success, got error: ${err}`);
|
||||||
|
assert.strictEqual(
|
||||||
|
res.Contents[0].ETag, `"${oneKbMD5}"`);
|
||||||
|
assert.strictEqual(
|
||||||
|
res.Contents[1].ETag, `"${normalMD5}"`);
|
||||||
|
next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
skipIfNotMultipleorIfProxy('Put Copy Part to GCP with complete MPU',
|
||||||
|
function describeF() {
|
||||||
|
this.timeout(800000);
|
||||||
|
withV4(sigCfg => {
|
||||||
|
beforeEach(() => {
|
||||||
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
|
s3 = bucketUtil.s3;
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
process.stdout.write('Emptying bucket\n');
|
||||||
|
return bucketUtil.empty(bucket)
|
||||||
|
.then(() => {
|
||||||
|
process.stdout.write('Deleting bucket\n');
|
||||||
|
return bucketUtil.deleteOne(bucket);
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
process.stdout.write('Emptying bucket awsBucketName\n');
|
||||||
|
return bucketUtil.empty(awsBucketName);
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
process.stdout.write('Deleting bucket awsBucketName\n');
|
||||||
|
return bucketUtil.deleteOne(awsBucketName);
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
process.stdout.write(`Error in afterEach: ${err}\n`);
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
describe('Basic test with complete MPU from AWS to GCP location: ',
|
||||||
|
() => {
|
||||||
|
beforeEach(function beF(done) {
|
||||||
|
this.currentTest.keyNameAws =
|
||||||
|
`onehundredandfivembgcp${uniqName(keyObjectAWS)}`;
|
||||||
|
this.currentTest.mpuKeyNameGcp =
|
||||||
|
`mpukeyname${uniqName(keyObjectGcp)}`;
|
||||||
|
|
||||||
|
const createMpuParams = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.currentTest.mpuKeyNameGcp,
|
||||||
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.createBucket({ Bucket: awsBucketName },
|
||||||
|
err => next(err)),
|
||||||
|
next => s3.createBucket({ Bucket: bucket },
|
||||||
|
err => next(err)),
|
||||||
|
next => s3.putObject({
|
||||||
|
Bucket: awsBucketName,
|
||||||
|
Key: this.currentTest.keyNameAws,
|
||||||
|
Body: fiveMbBody,
|
||||||
|
Metadata: { 'scal-location-constraint': awsLocation },
|
||||||
|
}, err => next(err)),
|
||||||
|
next => s3.createMultipartUpload(createMpuParams,
|
||||||
|
(err, res) => {
|
||||||
|
assert.equal(err, null, 'createMultipartUpload: ' +
|
||||||
|
`Expected success, got error: ${err}`);
|
||||||
|
this.currentTest.uploadId = res.UploadId;
|
||||||
|
next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should copy two 5 MB part from GCP to MPU with GCP' +
|
||||||
|
'location', function ifF(done) {
|
||||||
|
const uploadParams = {
|
||||||
|
Bucket: bucket,
|
||||||
|
CopySource:
|
||||||
|
`${awsBucketName}/` +
|
||||||
|
`${this.test.keyNameAws}`,
|
||||||
|
Key: this.test.mpuKeyNameGcp,
|
||||||
|
PartNumber: 1,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
};
|
||||||
|
const uploadParams2 = {
|
||||||
|
Bucket: bucket,
|
||||||
|
CopySource:
|
||||||
|
`${awsBucketName}/` +
|
||||||
|
`${this.test.keyNameAws}`,
|
||||||
|
Key: this.test.mpuKeyNameGcp,
|
||||||
|
PartNumber: 2,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.uploadPartCopy(uploadParams, (err, res) => {
|
||||||
|
assert.equal(err, null, 'uploadPartCopy: Expected ' +
|
||||||
|
`success, got error: ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${fiveMbMD5}"`);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => s3.uploadPartCopy(uploadParams2, (err, res) => {
|
||||||
|
assert.equal(err, null, 'uploadPartCopy: Expected ' +
|
||||||
|
`success, got error: ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${fiveMbMD5}"`);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
const completeMpuParams = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: this.test.mpuKeyNameGcp,
|
||||||
|
MultipartUpload: {
|
||||||
|
Parts: [
|
||||||
|
{
|
||||||
|
ETag: `"${fiveMbMD5}"`,
|
||||||
|
PartNumber: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
ETag: `"${fiveMbMD5}"`,
|
||||||
|
PartNumber: 2,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
};
|
||||||
|
s3.completeMultipartUpload(completeMpuParams,
|
||||||
|
(err, res) => {
|
||||||
|
assert.equal(err, null, 'completeMultipartUpload:' +
|
||||||
|
` Expected success, got error: ${err}`);
|
||||||
|
assert.strictEqual(res.Bucket, bucket);
|
||||||
|
assert.strictEqual(res.Key,
|
||||||
|
this.test.mpuKeyNameGcp);
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -6,6 +6,8 @@ const AWS = require('aws-sdk');
|
||||||
const async = require('async');
|
const async = require('async');
|
||||||
const azure = require('azure-storage');
|
const azure = require('azure-storage');
|
||||||
|
|
||||||
|
const { GCP } = require('../../../../../lib/data/external/GCP');
|
||||||
|
|
||||||
const { getRealAwsConfig } = require('../support/awsConfig');
|
const { getRealAwsConfig } = require('../support/awsConfig');
|
||||||
const { config } = require('../../../../../lib/Config');
|
const { config } = require('../../../../../lib/Config');
|
||||||
const authdata = require('../../../../../conf/authdata.json');
|
const authdata = require('../../../../../conf/authdata.json');
|
||||||
|
@ -20,6 +22,9 @@ const azureLocation = 'azurebackend';
|
||||||
const azureLocation2 = 'azurebackend2';
|
const azureLocation2 = 'azurebackend2';
|
||||||
const azureLocationMismatch = 'azurebackendmismatch';
|
const azureLocationMismatch = 'azurebackendmismatch';
|
||||||
const azureLocationNonExistContainer = 'azurenonexistcontainer';
|
const azureLocationNonExistContainer = 'azurenonexistcontainer';
|
||||||
|
const gcpLocation = 'gcpbackend';
|
||||||
|
const gcpLocation2 = 'gcpbackend2';
|
||||||
|
const gcpLocationMismatch = 'gcpbackendmismatch';
|
||||||
const versioningEnabled = { Status: 'Enabled' };
|
const versioningEnabled = { Status: 'Enabled' };
|
||||||
const versioningSuspended = { Status: 'Suspended' };
|
const versioningSuspended = { Status: 'Suspended' };
|
||||||
const awsFirstTimeout = 10000;
|
const awsFirstTimeout = 10000;
|
||||||
|
@ -28,11 +33,24 @@ let describeSkipIfNotMultiple = describe.skip;
|
||||||
let awsS3;
|
let awsS3;
|
||||||
let awsBucket;
|
let awsBucket;
|
||||||
|
|
||||||
|
let gcpClient;
|
||||||
|
let gcpBucket;
|
||||||
|
let gcpBucketMPU;
|
||||||
|
let gcpBucketOverflow;
|
||||||
|
|
||||||
if (config.backends.data === 'multiple') {
|
if (config.backends.data === 'multiple') {
|
||||||
describeSkipIfNotMultiple = describe;
|
describeSkipIfNotMultiple = describe;
|
||||||
const awsConfig = getRealAwsConfig(awsLocation);
|
const awsConfig = getRealAwsConfig(awsLocation);
|
||||||
awsS3 = new AWS.S3(awsConfig);
|
awsS3 = new AWS.S3(awsConfig);
|
||||||
awsBucket = config.locationConstraints[awsLocation].details.bucketName;
|
awsBucket = config.locationConstraints[awsLocation].details.bucketName;
|
||||||
|
|
||||||
|
const gcpConfig = getRealAwsConfig(gcpLocation);
|
||||||
|
gcpClient = new GCP(gcpConfig);
|
||||||
|
gcpBucket = config.locationConstraints[gcpLocation].details.bucketName;
|
||||||
|
gcpBucketMPU =
|
||||||
|
config.locationConstraints[gcpLocation].details.mpuBucketName;
|
||||||
|
gcpBucketOverflow =
|
||||||
|
config.locationConstraints[gcpLocation].details.overflowBucketName;
|
||||||
}
|
}
|
||||||
|
|
||||||
function _assertErrorResult(err, expectedError, desc) {
|
function _assertErrorResult(err, expectedError, desc) {
|
||||||
|
@ -49,6 +67,10 @@ const utils = {
|
||||||
describeSkipIfNotMultiple,
|
describeSkipIfNotMultiple,
|
||||||
awsS3,
|
awsS3,
|
||||||
awsBucket,
|
awsBucket,
|
||||||
|
gcpClient,
|
||||||
|
gcpBucket,
|
||||||
|
gcpBucketMPU,
|
||||||
|
gcpBucketOverflow,
|
||||||
fileLocation,
|
fileLocation,
|
||||||
memLocation,
|
memLocation,
|
||||||
awsLocation,
|
awsLocation,
|
||||||
|
@ -59,6 +81,9 @@ const utils = {
|
||||||
azureLocation2,
|
azureLocation2,
|
||||||
azureLocationMismatch,
|
azureLocationMismatch,
|
||||||
azureLocationNonExistContainer,
|
azureLocationNonExistContainer,
|
||||||
|
gcpLocation,
|
||||||
|
gcpLocation2,
|
||||||
|
gcpLocationMismatch,
|
||||||
};
|
};
|
||||||
|
|
||||||
utils.getOwnerInfo = account => {
|
utils.getOwnerInfo = account => {
|
||||||
|
|
|
@ -17,15 +17,23 @@ function getAwsCredentials(profile, credFile) {
|
||||||
return new AWS.SharedIniFileCredentials({ profile, filename });
|
return new AWS.SharedIniFileCredentials({ profile, filename });
|
||||||
}
|
}
|
||||||
|
|
||||||
function getRealAwsConfig(awsLocation) {
|
function getRealAwsConfig(location) {
|
||||||
const { awsEndpoint, gcpEndpoint,
|
const { awsEndpoint, gcpEndpoint, jsonEndpoint,
|
||||||
credentialsProfile, credentials: locCredentials } =
|
credentialsProfile, credentials: locCredentials,
|
||||||
config.locationConstraints[awsLocation].details;
|
bucketName, mpuBucketName, overflowBucketName } =
|
||||||
|
config.locationConstraints[location].details;
|
||||||
const params = {
|
const params = {
|
||||||
endpoint: gcpEndpoint ?
|
endpoint: gcpEndpoint ?
|
||||||
`https://${gcpEndpoint}` : `https://${awsEndpoint}`,
|
`https://${gcpEndpoint}` : `https://${awsEndpoint}`,
|
||||||
signatureVersion: 'v4',
|
signatureVersion: 'v4',
|
||||||
};
|
};
|
||||||
|
if (config.locationConstraints[location].type === 'gcp') {
|
||||||
|
params.mainBucket = bucketName;
|
||||||
|
params.mpuBucket = mpuBucketName;
|
||||||
|
params.overflowBucket = overflowBucketName;
|
||||||
|
params.jsonEndpoint = `https://${jsonEndpoint}`;
|
||||||
|
params.authParams = config.getGcpServiceParams(location);
|
||||||
|
}
|
||||||
if (credentialsProfile) {
|
if (credentialsProfile) {
|
||||||
const credentials = getAwsCredentials(credentialsProfile,
|
const credentials = getAwsCredentials(credentialsProfile,
|
||||||
'/.aws/credentials');
|
'/.aws/credentials');
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
This directory contains GCP API functional tests.
|
||||||
|
|
||||||
|
These tests will verify that the GCP API implementation located in
|
||||||
|
`lib/data/external/GCP` behaves as intended: correct error responses, edge case
|
||||||
|
handling, and correct responses.
|
|
@ -0,0 +1,169 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { makeGcpRequest } = require('../../../utils/makeRequest');
|
||||||
|
const { gcpRequestRetry } = require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
const { listingHardLimit } = require('../../../../../../constants');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const bucketName = `somebucket-${Date.now()}`;
|
||||||
|
const smallSize = 20;
|
||||||
|
const bigSize = listingHardLimit + 1;
|
||||||
|
const config = getRealAwsConfig(credentialOne);
|
||||||
|
const gcpClient = new GCP(config);
|
||||||
|
|
||||||
|
function populateBucket(createdObjects, callback) {
|
||||||
|
process.stdout.write(
|
||||||
|
`Putting ${createdObjects.length} objects into bucket\n`);
|
||||||
|
async.mapLimit(createdObjects, 10,
|
||||||
|
(object, moveOn) => {
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: object,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, err => moveOn(err));
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout
|
||||||
|
.write(`err putting objects ${err.code}`);
|
||||||
|
}
|
||||||
|
return callback(err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeObjects(createdObjects, callback) {
|
||||||
|
process.stdout.write(
|
||||||
|
`Deleting ${createdObjects.length} objects from bucket\n`);
|
||||||
|
async.mapLimit(createdObjects, 10,
|
||||||
|
(object, moveOn) => {
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: object,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, err => moveOn(err));
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout
|
||||||
|
.write(`err deleting objects ${err.code}`);
|
||||||
|
}
|
||||||
|
return callback(err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('GCP: GET Bucket', function testSuite() {
|
||||||
|
this.timeout(180000);
|
||||||
|
|
||||||
|
before(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
after(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('without existing bucket', () => {
|
||||||
|
it('should return 404 and NoSuchBucket', done => {
|
||||||
|
const badBucketName = `nonexistingbucket-${Date.now()}`;
|
||||||
|
gcpClient.getBucket({
|
||||||
|
Bucket: badBucketName,
|
||||||
|
}, err => {
|
||||||
|
assert(err);
|
||||||
|
assert.strictEqual(err.statusCode, 404);
|
||||||
|
assert.strictEqual(err.code, 'NoSuchBucket');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with existing bucket', () => {
|
||||||
|
describe('with less than listingHardLimit number of objects', () => {
|
||||||
|
const createdObjects = Array.from(
|
||||||
|
Array(smallSize).keys()).map(i => `someObject-${i}`);
|
||||||
|
|
||||||
|
before(done => populateBucket(createdObjects, done));
|
||||||
|
|
||||||
|
after(done => removeObjects(createdObjects, done));
|
||||||
|
|
||||||
|
it(`should list all ${smallSize} created objects`, done => {
|
||||||
|
gcpClient.listObjects({
|
||||||
|
Bucket: bucketName,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null, `Expected success, but got ${err}`);
|
||||||
|
assert.strictEqual(res.Contents.length, smallSize);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with MaxKeys at 10', () => {
|
||||||
|
it('should list MaxKeys number of objects', done => {
|
||||||
|
gcpClient.listObjects({
|
||||||
|
Bucket: bucketName,
|
||||||
|
MaxKeys: 10,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got ${err}`);
|
||||||
|
assert.strictEqual(res.Contents.length, 10);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with more than listingHardLimit number of objects', () => {
|
||||||
|
const createdObjects = Array.from(
|
||||||
|
Array(bigSize).keys()).map(i => `someObject-${i}`);
|
||||||
|
|
||||||
|
before(done => populateBucket(createdObjects, done));
|
||||||
|
|
||||||
|
after(done => removeObjects(createdObjects, done));
|
||||||
|
|
||||||
|
it('should list at max 1000 of objects created', done => {
|
||||||
|
gcpClient.listObjects({
|
||||||
|
Bucket: bucketName,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null, `Expected success, but got ${err}`);
|
||||||
|
assert.strictEqual(res.Contents.length,
|
||||||
|
listingHardLimit);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with MaxKeys at 1001', () => {
|
||||||
|
it('should list at max 1000, ignoring MaxKeys', done => {
|
||||||
|
gcpClient.listObjects({
|
||||||
|
Bucket: bucketName,
|
||||||
|
MaxKeys: 1001,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got ${err}`);
|
||||||
|
assert.strictEqual(res.Contents.length,
|
||||||
|
listingHardLimit);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,103 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { makeGcpRequest } = require('../../../utils/makeRequest');
|
||||||
|
const { gcpRequestRetry } = require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const verEnabledObj = { Status: 'Enabled' };
|
||||||
|
const verDisabledObj = { Status: 'Suspended' };
|
||||||
|
const xmlEnable =
|
||||||
|
'<?xml version="1.0" encoding="UTF-8"?>' +
|
||||||
|
'<VersioningConfiguration>' +
|
||||||
|
'<Status>Enabled</Status>' +
|
||||||
|
'</VersioningConfiguration>';
|
||||||
|
const xmlDisable =
|
||||||
|
'<?xml version="1.0" encoding="UTF-8"?>' +
|
||||||
|
'<VersioningConfiguration>' +
|
||||||
|
'<Status>Suspended</Status>' +
|
||||||
|
'</VersioningConfiguration>';
|
||||||
|
|
||||||
|
describe('GCP: GET Bucket Versioning', () => {
|
||||||
|
const config = getRealAwsConfig(credentialOne);
|
||||||
|
const gcpClient = new GCP(config);
|
||||||
|
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.bucketName = `somebucket-${Date.now()}`;
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: this.currentTest.bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterFn(done) {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: this.currentTest.bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should verify bucket versioning is enabled', function testFn(done) {
|
||||||
|
return async.waterfall([
|
||||||
|
next => makeGcpRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: this.test.bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
queryObj: { versioning: {} },
|
||||||
|
requestBody: xmlEnable,
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in setting versioning ${err}`);
|
||||||
|
}
|
||||||
|
return next(err);
|
||||||
|
}),
|
||||||
|
next => gcpClient.getBucketVersioning({
|
||||||
|
Bucket: this.test.bucketName,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
assert.deepStrictEqual(res, verEnabledObj);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], err => done(err));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should verify bucket versioning is disabled', function testFn(done) {
|
||||||
|
return async.waterfall([
|
||||||
|
next => makeGcpRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: this.test.bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
queryObj: { versioning: {} },
|
||||||
|
requestBody: xmlDisable,
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in setting versioning ${err}`);
|
||||||
|
}
|
||||||
|
return next(err);
|
||||||
|
}),
|
||||||
|
next => gcpClient.getBucketVersioning({
|
||||||
|
Bucket: this.test.bucketName,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
assert.deepStrictEqual(res, verDisabledObj);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], err => done(err));
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,74 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { gcpRequestRetry } = require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
|
||||||
|
describe('GCP: HEAD Bucket', () => {
|
||||||
|
const config = getRealAwsConfig(credentialOne);
|
||||||
|
const gcpClient = new GCP(config);
|
||||||
|
|
||||||
|
describe('without existing bucket', () => {
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.bucketName = `somebucket-${Date.now()}`;
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return 404', function testFn(done) {
|
||||||
|
gcpClient.headBucket({
|
||||||
|
Bucket: this.test.bucketName,
|
||||||
|
}, err => {
|
||||||
|
assert(err);
|
||||||
|
assert.strictEqual(err.statusCode, 404);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with existing bucket', () => {
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.bucketName = `somebucket-${Date.now()}`;
|
||||||
|
process.stdout
|
||||||
|
.write(`Creating test bucket ${this.currentTest.bucketName}\n`);
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: this.currentTest.bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
this.currentTest.bucketObj = {
|
||||||
|
MetaVersionId: res.headers['x-goog-metageneration'],
|
||||||
|
};
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterFn(done) {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: this.currentTest.bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout
|
||||||
|
.write(`err deleting bucket: ${err.code}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get bucket information', function testFn(done) {
|
||||||
|
gcpClient.headBucket({
|
||||||
|
Bucket: this.test.bucketName,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null, `Expected success, but got ${err}`);
|
||||||
|
assert.deepStrictEqual(this.test.bucketObj, res);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,109 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
const xml2js = require('xml2js');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { makeGcpRequest } = require('../../../utils/makeRequest');
|
||||||
|
const { gcpRequestRetry } = require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const verEnabledObj = { VersioningConfiguration: { Status: ['Enabled'] } };
|
||||||
|
const verDisabledObj = { VersioningConfiguration: { Status: ['Suspended'] } };
|
||||||
|
|
||||||
|
function resParseAndAssert(xml, compareObj, callback) {
|
||||||
|
return xml2js.parseString(xml, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in parsing response ${err}\n`);
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
assert.deepStrictEqual(res, compareObj);
|
||||||
|
return callback();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('GCP: PUT Bucket Versioning', () => {
|
||||||
|
const config = getRealAwsConfig(credentialOne);
|
||||||
|
const gcpClient = new GCP(config);
|
||||||
|
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.bucketName = `somebucket-${Date.now()}`;
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: this.currentTest.bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterFn(done) {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: this.currentTest.bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should enable bucket versioning', function testFn(done) {
|
||||||
|
return async.waterfall([
|
||||||
|
next => gcpClient.putBucketVersioning({
|
||||||
|
Bucket: this.test.bucketName,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Enabled',
|
||||||
|
},
|
||||||
|
}, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => makeGcpRequest({
|
||||||
|
method: 'GET',
|
||||||
|
bucket: this.test.bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
queryObj: { versioning: {} },
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in retrieving bucket ${err}`);
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
return resParseAndAssert(res.body, verEnabledObj, next);
|
||||||
|
}),
|
||||||
|
], err => done(err));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should disable bucket versioning', function testFn(done) {
|
||||||
|
return async.waterfall([
|
||||||
|
next => gcpClient.putBucketVersioning({
|
||||||
|
Bucket: this.test.bucketName,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Suspended',
|
||||||
|
},
|
||||||
|
}, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => makeGcpRequest({
|
||||||
|
method: 'GET',
|
||||||
|
bucket: this.test.bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
queryObj: { versioning: {} },
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in retrieving bucket ${err}`);
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
return resParseAndAssert(res.body, verDisabledObj, next);
|
||||||
|
}),
|
||||||
|
], err => done(err));
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,202 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
const { GCP, GcpUtils } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { gcpRequestRetry, setBucketClass, gcpMpuSetup } =
|
||||||
|
require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const bucketNames = {
|
||||||
|
main: {
|
||||||
|
Name: `somebucket-${Date.now()}`,
|
||||||
|
Type: 'MULTI_REGIONAL',
|
||||||
|
},
|
||||||
|
mpu: {
|
||||||
|
Name: `mpubucket-${Date.now()}`,
|
||||||
|
Type: 'REGIONAL',
|
||||||
|
},
|
||||||
|
overflow: {
|
||||||
|
Name: `overflowbucket-${Date.now()}`,
|
||||||
|
Type: 'MULTI_REGIONAL',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const numParts = 10000;
|
||||||
|
const partSize = 10;
|
||||||
|
|
||||||
|
const smallMD5 = '583c466f3f31d97b361adc60caea72f5-1';
|
||||||
|
const bigMD5 = '0bd3785d5d1e3c90988917837bbf57fc-10000';
|
||||||
|
|
||||||
|
function gcpMpuSetupWrapper(params, callback) {
|
||||||
|
gcpMpuSetup(params, (err, result) => {
|
||||||
|
assert.ifError(err, `Unable to setup MPU test, error ${err}`);
|
||||||
|
const { uploadId, etagList } = result;
|
||||||
|
this.currentTest.uploadId = uploadId;
|
||||||
|
this.currentTest.etagList = etagList;
|
||||||
|
return callback();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('GCP: Complete MPU', function testSuite() {
|
||||||
|
this.timeout(600000);
|
||||||
|
let config;
|
||||||
|
let gcpClient;
|
||||||
|
|
||||||
|
before(done => {
|
||||||
|
config = getRealAwsConfig(credentialOne);
|
||||||
|
gcpClient = new GCP(config);
|
||||||
|
async.eachSeries(bucketNames,
|
||||||
|
(bucket, next) => gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucket.Name,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
requestBody: setBucketClass(bucket.Type),
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return next(err);
|
||||||
|
}),
|
||||||
|
done);
|
||||||
|
});
|
||||||
|
|
||||||
|
after(done => {
|
||||||
|
async.eachSeries(bucketNames,
|
||||||
|
(bucket, next) => gcpClient.listObjects({
|
||||||
|
Bucket: bucket.Name,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got error ${err}`);
|
||||||
|
async.map(res.Contents, (object, moveOn) => {
|
||||||
|
const deleteParams = {
|
||||||
|
Bucket: bucket.Name,
|
||||||
|
Key: object.Key,
|
||||||
|
};
|
||||||
|
gcpClient.deleteObject(
|
||||||
|
deleteParams, err => moveOn(err));
|
||||||
|
}, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got error ${err}`);
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucket.Name,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(
|
||||||
|
`err in deleting bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return next(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
done);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when MPU has 0 parts', () => {
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.key = `somekey-${Date.now()}`;
|
||||||
|
gcpMpuSetupWrapper.call(this, {
|
||||||
|
gcpClient,
|
||||||
|
bucketNames,
|
||||||
|
key: this.currentTest.key,
|
||||||
|
partCount: 0, partSize,
|
||||||
|
}, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return error if 0 parts are given in MPU complete',
|
||||||
|
function testFn(done) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucketNames.main.Name,
|
||||||
|
MPU: bucketNames.mpu.Name,
|
||||||
|
Overflow: bucketNames.overflow.Name,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
MultipartUpload: { Parts: [] },
|
||||||
|
};
|
||||||
|
gcpClient.completeMultipartUpload(params, err => {
|
||||||
|
assert(err);
|
||||||
|
assert.strictEqual(err.code, 400);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when MPU has 1 uploaded part', () => {
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.key = `somekey-${Date.now()}`;
|
||||||
|
gcpMpuSetupWrapper.call(this, {
|
||||||
|
gcpClient,
|
||||||
|
bucketNames,
|
||||||
|
key: this.currentTest.key,
|
||||||
|
partCount: 1, partSize,
|
||||||
|
}, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should successfully complete MPU',
|
||||||
|
function testFn(done) {
|
||||||
|
const parts = GcpUtils.createMpuList({
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
}, 'parts', 1).map(item => {
|
||||||
|
Object.assign(item, {
|
||||||
|
ETag: this.test.etagList[item.PartNumber - 1],
|
||||||
|
});
|
||||||
|
return item;
|
||||||
|
});
|
||||||
|
const params = {
|
||||||
|
Bucket: bucketNames.main.Name,
|
||||||
|
MPU: bucketNames.mpu.Name,
|
||||||
|
Overflow: bucketNames.overflow.Name,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
MultipartUpload: { Parts: parts },
|
||||||
|
};
|
||||||
|
gcpClient.completeMultipartUpload(params, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got error ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${smallMD5}"`);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when MPU has 10k uploaded parts', () => {
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.key = `somekey-${Date.now()}`;
|
||||||
|
gcpMpuSetupWrapper.call(this, {
|
||||||
|
gcpClient,
|
||||||
|
bucketNames,
|
||||||
|
key: this.currentTest.key,
|
||||||
|
partCount: numParts, partSize,
|
||||||
|
}, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should successfully complete MPU',
|
||||||
|
function testFn(done) {
|
||||||
|
const parts = GcpUtils.createMpuList({
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
}, 'parts', numParts).map(item => {
|
||||||
|
Object.assign(item, {
|
||||||
|
ETag: this.test.etagList[item.PartNumber - 1],
|
||||||
|
});
|
||||||
|
return item;
|
||||||
|
});
|
||||||
|
const params = {
|
||||||
|
Bucket: bucketNames.main.Name,
|
||||||
|
MPU: bucketNames.mpu.Name,
|
||||||
|
Overflow: bucketNames.overflow.Name,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
MultipartUpload: { Parts: parts },
|
||||||
|
};
|
||||||
|
gcpClient.completeMultipartUpload(params, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got error ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${bigMD5}"`);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,180 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { makeGcpRequest } = require('../../../utils/makeRequest');
|
||||||
|
const { gcpRequestRetry } = require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const bucketName = `somebucket-${Date.now()}`;
|
||||||
|
|
||||||
|
describe('GCP: COPY Object', function testSuite() {
|
||||||
|
this.timeout(180000);
|
||||||
|
const config = getRealAwsConfig(credentialOne);
|
||||||
|
const gcpClient = new GCP(config);
|
||||||
|
|
||||||
|
before(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
after(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('without existing object in bucket', () => {
|
||||||
|
it('should return 404 and \'Not Found\'', done => {
|
||||||
|
const missingObject = `nonexistingkey-${Date.now()}`;
|
||||||
|
const someKey = `somekey-${Date.now()}`;
|
||||||
|
gcpClient.copyObject({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: someKey,
|
||||||
|
CopySource: `/${bucketName}/${missingObject}`,
|
||||||
|
}, err => {
|
||||||
|
assert(err);
|
||||||
|
assert.strictEqual(err.code, 404);
|
||||||
|
assert.strictEqual(err.message, 'Not Found');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with existing object in bucket', () => {
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.key = `somekey-${Date.now()}`;
|
||||||
|
this.currentTest.copyKey = `copykey-${Date.now()}`;
|
||||||
|
this.currentTest.initValue = `${Date.now()}`;
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.copyKey,
|
||||||
|
headers: {
|
||||||
|
'x-goog-meta-value': this.currentTest.initValue,
|
||||||
|
},
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating object ${err}\n`);
|
||||||
|
}
|
||||||
|
this.currentTest.contentHash = res.headers['x-goog-hash'];
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterFn(done) {
|
||||||
|
async.parallel([
|
||||||
|
next => makeGcpRequest({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting object ${err}\n`);
|
||||||
|
}
|
||||||
|
return next(err);
|
||||||
|
}),
|
||||||
|
next => makeGcpRequest({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.copyKey,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout
|
||||||
|
.write(`err in deleting copy object ${err}\n`);
|
||||||
|
}
|
||||||
|
return next(err);
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should successfully copy with REPLACE directive',
|
||||||
|
function testFn(done) {
|
||||||
|
const newValue = `${Date.now()}`;
|
||||||
|
async.waterfall([
|
||||||
|
next => gcpClient.copyObject({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: this.test.key,
|
||||||
|
CopySource: `/${bucketName}/${this.test.copyKey}`,
|
||||||
|
MetadataDirective: 'REPLACE',
|
||||||
|
Metadata: {
|
||||||
|
value: newValue,
|
||||||
|
},
|
||||||
|
}, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got error ${err}`);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => makeGcpRequest({
|
||||||
|
method: 'HEAD',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.test.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout
|
||||||
|
.write(`err in retrieving object ${err}\n`);
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(this.test.contentHash,
|
||||||
|
res.headers['x-goog-hash']);
|
||||||
|
assert.notStrictEqual(res.headers['x-goog-meta-value'],
|
||||||
|
this.test.initValue);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should successfully copy with COPY directive',
|
||||||
|
function testFn(done) {
|
||||||
|
async.waterfall([
|
||||||
|
next => gcpClient.copyObject({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: this.test.key,
|
||||||
|
CopySource: `/${bucketName}/${this.test.copyKey}`,
|
||||||
|
MetadataDirective: 'COPY',
|
||||||
|
}, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got error ${err}`);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => makeGcpRequest({
|
||||||
|
method: 'HEAD',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.test.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout
|
||||||
|
.write(`err in retrieving object ${err}\n`);
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(this.test.contentHash,
|
||||||
|
res.headers['x-goog-hash']);
|
||||||
|
assert.strictEqual(res.headers['x-goog-meta-value'],
|
||||||
|
this.test.initValue);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,98 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { makeGcpRequest } = require('../../../utils/makeRequest');
|
||||||
|
const { gcpRequestRetry } = require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const bucketName = `somebucket-${Date.now()}`;
|
||||||
|
const objectKey = `somekey-${Date.now()}`;
|
||||||
|
const badObjectKey = `nonexistingkey-${Date.now()}`;
|
||||||
|
|
||||||
|
describe('GCP: DELETE Object', function testSuite() {
|
||||||
|
this.timeout(30000);
|
||||||
|
const config = getRealAwsConfig(credentialOne);
|
||||||
|
const gcpClient = new GCP(config);
|
||||||
|
|
||||||
|
before(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
after(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with existing object in bucket', () => {
|
||||||
|
beforeEach(done => {
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating object ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should successfully delete object', done => {
|
||||||
|
async.waterfall([
|
||||||
|
next => gcpClient.deleteObject({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: objectKey,
|
||||||
|
}, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, got error ${err}`);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => makeGcpRequest({
|
||||||
|
method: 'GET',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, err => {
|
||||||
|
assert(err);
|
||||||
|
assert.strictEqual(err.statusCode, 404);
|
||||||
|
assert.strictEqual(err.code, 'NoSuchKey');
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], err => done(err));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('without existing object in bucket', () => {
|
||||||
|
it('should return 404 and NoSuchKey', done => {
|
||||||
|
gcpClient.deleteObject({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: badObjectKey,
|
||||||
|
}, err => {
|
||||||
|
assert(err);
|
||||||
|
assert.strictEqual(err.statusCode, 404);
|
||||||
|
assert.strictEqual(err.code, 'NoSuchKey');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,179 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { gcpRequestRetry, setBucketClass, gcpMpuSetup } =
|
||||||
|
require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const bucketNames = {
|
||||||
|
main: {
|
||||||
|
Name: `somebucket-${Date.now()}`,
|
||||||
|
Type: 'MULTI_REGIONAL',
|
||||||
|
},
|
||||||
|
mpu: {
|
||||||
|
Name: `mpubucket-${Date.now()}`,
|
||||||
|
Type: 'REGIONAL',
|
||||||
|
},
|
||||||
|
overflow: {
|
||||||
|
Name: `overflowbucket-${Date.now()}`,
|
||||||
|
Type: 'MULTI_REGIONAL',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const numParts = 10;
|
||||||
|
const partSize = 10;
|
||||||
|
|
||||||
|
function gcpMpuSetupWrapper(params, callback) {
|
||||||
|
gcpMpuSetup(params, (err, result) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Unable to setup MPU test, error ${err}`);
|
||||||
|
const { uploadId, etagList } = result;
|
||||||
|
this.currentTest.uploadId = uploadId;
|
||||||
|
this.currentTest.etagList = etagList;
|
||||||
|
return callback();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('GCP: Abort MPU', function testSuite() {
|
||||||
|
this.timeout(30000);
|
||||||
|
let config;
|
||||||
|
let gcpClient;
|
||||||
|
|
||||||
|
before(done => {
|
||||||
|
config = getRealAwsConfig(credentialOne);
|
||||||
|
gcpClient = new GCP(config);
|
||||||
|
async.eachSeries(bucketNames,
|
||||||
|
(bucket, next) => gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucket.Name,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
requestBody: setBucketClass(bucket.Type),
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return next(err);
|
||||||
|
}),
|
||||||
|
done);
|
||||||
|
});
|
||||||
|
|
||||||
|
after(done => {
|
||||||
|
async.eachSeries(bucketNames,
|
||||||
|
(bucket, next) => gcpClient.listObjects({
|
||||||
|
Bucket: bucket.Name,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got error ${err}`);
|
||||||
|
async.map(res.Contents, (object, moveOn) => {
|
||||||
|
const deleteParams = {
|
||||||
|
Bucket: bucket.Name,
|
||||||
|
Key: object.Key,
|
||||||
|
};
|
||||||
|
gcpClient.deleteObject(
|
||||||
|
deleteParams, err => moveOn(err));
|
||||||
|
}, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got error ${err}`);
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucket.Name,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(
|
||||||
|
`err in deleting bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return next(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
done);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when MPU has 0 parts', () => {
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.key = `somekey-${Date.now()}`;
|
||||||
|
gcpMpuSetupWrapper.call(this, {
|
||||||
|
gcpClient,
|
||||||
|
bucketNames,
|
||||||
|
key: this.currentTest.key,
|
||||||
|
partCount: 0, partSize,
|
||||||
|
}, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should abort MPU with 0 parts', function testFn(done) {
|
||||||
|
return async.waterfall([
|
||||||
|
next => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucketNames.main.Name,
|
||||||
|
MPU: bucketNames.mpu.Name,
|
||||||
|
Overflow: bucketNames.overflow.Name,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
};
|
||||||
|
gcpClient.abortMultipartUpload(params, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got error ${err}`);
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const keyName =
|
||||||
|
`${this.test.key}-${this.test.uploadId}/init`;
|
||||||
|
gcpClient.headObject({
|
||||||
|
Bucket: bucketNames.mpu.Name,
|
||||||
|
Key: keyName,
|
||||||
|
}, err => {
|
||||||
|
assert(err);
|
||||||
|
assert.strictEqual(err.code, 404);
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when MPU is incomplete', () => {
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.key = `somekey-${Date.now()}`;
|
||||||
|
gcpMpuSetupWrapper.call(this, {
|
||||||
|
gcpClient,
|
||||||
|
bucketNames,
|
||||||
|
key: this.currentTest.key,
|
||||||
|
partCount: numParts, partSize,
|
||||||
|
}, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should abort incomplete MPU', function testFn(done) {
|
||||||
|
return async.waterfall([
|
||||||
|
next => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucketNames.main.Name,
|
||||||
|
MPU: bucketNames.mpu.Name,
|
||||||
|
Overflow: bucketNames.overflow.Name,
|
||||||
|
Key: this.test.key,
|
||||||
|
UploadId: this.test.uploadId,
|
||||||
|
};
|
||||||
|
gcpClient.abortMultipartUpload(params, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got error ${err}`);
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const keyName =
|
||||||
|
`${this.test.key}-${this.test.uploadId}/init`;
|
||||||
|
gcpClient.headObject({
|
||||||
|
Bucket: bucketNames.mpu.Name,
|
||||||
|
Key: keyName,
|
||||||
|
}, err => {
|
||||||
|
assert(err);
|
||||||
|
assert.strictEqual(err.code, 404);
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], err => done(err));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,148 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { makeGcpRequest } = require('../../../utils/makeRequest');
|
||||||
|
const { gcpRequestRetry, genDelTagObj } = require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
const { gcpTaggingPrefix } = require('../../../../../../constants');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const bucketName = `somebucket-${Date.now()}`;
|
||||||
|
const gcpTagPrefix = `x-goog-meta-${gcpTaggingPrefix}`;
|
||||||
|
let config;
|
||||||
|
let gcpClient;
|
||||||
|
|
||||||
|
function assertObjectMetaTag(params, callback) {
|
||||||
|
return makeGcpRequest({
|
||||||
|
method: 'HEAD',
|
||||||
|
bucket: params.bucket,
|
||||||
|
objectKey: params.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
headers: {
|
||||||
|
'x-goog-generation': params.versionId,
|
||||||
|
},
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in retrieving object ${err}`);
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
const resObj = res.headers;
|
||||||
|
const tagRes = {};
|
||||||
|
Object.keys(resObj).forEach(
|
||||||
|
header => {
|
||||||
|
if (header.startsWith(gcpTagPrefix)) {
|
||||||
|
tagRes[header] = resObj[header];
|
||||||
|
delete resObj[header];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const metaRes = {};
|
||||||
|
Object.keys(resObj).forEach(
|
||||||
|
header => {
|
||||||
|
if (header.startsWith('x-goog-meta-')) {
|
||||||
|
metaRes[header] = resObj[header];
|
||||||
|
delete resObj[header];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
assert.deepStrictEqual(params.tag, tagRes);
|
||||||
|
assert.deepStrictEqual(params.meta, metaRes);
|
||||||
|
return callback();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('GCP: DELETE Object Tagging', function testSuite() {
|
||||||
|
this.timeout(30000);
|
||||||
|
|
||||||
|
before(done => {
|
||||||
|
config = getRealAwsConfig(credentialOne);
|
||||||
|
gcpClient = new GCP(config);
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.key = `somekey-${Date.now()}`;
|
||||||
|
this.currentTest.specialKey = `veryspecial-${Date.now()}`;
|
||||||
|
const { headers, expectedTagObj, expectedMetaObj } =
|
||||||
|
genDelTagObj(10, gcpTagPrefix);
|
||||||
|
this.currentTest.expectedTagObj = expectedTagObj;
|
||||||
|
this.currentTest.expectedMetaObj = expectedMetaObj;
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
headers,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating object ${err}`);
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
this.currentTest.versionId = res.headers['x-goog-generation'];
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterFn(done) {
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting object ${err}`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
after(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting bucket ${err}`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should successfully delete object tags', function testFn(done) {
|
||||||
|
async.waterfall([
|
||||||
|
next => assertObjectMetaTag({
|
||||||
|
bucket: bucketName,
|
||||||
|
key: this.test.key,
|
||||||
|
versionId: this.test.versionId,
|
||||||
|
meta: this.test.expectedMetaObj,
|
||||||
|
tag: this.test.expectedTagObj,
|
||||||
|
}, next),
|
||||||
|
next => gcpClient.deleteObjectTagging({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: this.test.key,
|
||||||
|
VersionId: this.test.versionId,
|
||||||
|
}, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, got error ${err}`);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => assertObjectMetaTag({
|
||||||
|
bucket: bucketName,
|
||||||
|
key: this.test.key,
|
||||||
|
versionId: this.test.versionId,
|
||||||
|
meta: this.test.expectedMetaObj,
|
||||||
|
tag: {},
|
||||||
|
}, next),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,104 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { makeGcpRequest } = require('../../../utils/makeRequest');
|
||||||
|
const { gcpRequestRetry } = require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const bucketName = `somebucket-${Date.now()}`;
|
||||||
|
|
||||||
|
describe('GCP: GET Object', function testSuite() {
|
||||||
|
this.timeout(30000);
|
||||||
|
const config = getRealAwsConfig(credentialOne);
|
||||||
|
const gcpClient = new GCP(config);
|
||||||
|
|
||||||
|
before(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
after(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with existing object in bucket', () => {
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.key = `somekey-${Date.now()}`;
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating object ${err}\n`);
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
this.currentTest.uploadId =
|
||||||
|
res.headers['x-goog-generation'];
|
||||||
|
this.currentTest.ETag = res.headers.etag;
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterFn(done) {
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting object ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should successfully retrieve object', function testFn(done) {
|
||||||
|
gcpClient.getObject({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: this.test.key,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, got error ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, this.test.ETag);
|
||||||
|
assert.strictEqual(res.VersionId, this.test.uploadId);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('without existing object in bucket', () => {
|
||||||
|
it('should return 404 and NoSuchKey', done => {
|
||||||
|
const badObjectKey = `nonexistingkey-${Date.now()}`;
|
||||||
|
gcpClient.getObject({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: badObjectKey,
|
||||||
|
}, err => {
|
||||||
|
assert(err);
|
||||||
|
assert.strictEqual(err.statusCode, 404);
|
||||||
|
assert.strictEqual(err.code, 'NoSuchKey');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,94 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { makeGcpRequest } = require('../../../utils/makeRequest');
|
||||||
|
const { gcpRequestRetry, genGetTagObj } = require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
const { gcpTaggingPrefix } = require('../../../../../../constants');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const bucketName = `somebucket-${Date.now()}`;
|
||||||
|
const gcpTagPrefix = `x-goog-meta-${gcpTaggingPrefix}`;
|
||||||
|
const tagSize = 10;
|
||||||
|
|
||||||
|
describe('GCP: GET Object Tagging', () => {
|
||||||
|
let config;
|
||||||
|
let gcpClient;
|
||||||
|
|
||||||
|
before(done => {
|
||||||
|
config = getRealAwsConfig(credentialOne);
|
||||||
|
gcpClient = new GCP(config);
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.key = `somekey-${Date.now()}`;
|
||||||
|
this.currentTest.specialKey = `veryspecial-${Date.now()}`;
|
||||||
|
const { tagHeader, expectedTagObj } =
|
||||||
|
genGetTagObj(tagSize, gcpTagPrefix);
|
||||||
|
this.currentTest.tagObj = expectedTagObj;
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
headers: tagHeader,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating object ${err}`);
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
this.currentTest.versionId = res.headers['x-goog-generation'];
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterFn(done) {
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting object ${err}`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
after(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting bucket ${err}`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should successfully get object tags', function testFn(done) {
|
||||||
|
gcpClient.getObjectTagging({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: this.test.key,
|
||||||
|
VersionId: this.test.versionId,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, got error ${err}`);
|
||||||
|
assert.deepStrictEqual(res.TagSet, this.test.tagObj);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,103 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { makeGcpRequest } = require('../../../utils/makeRequest');
|
||||||
|
const { gcpRequestRetry } = require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const bucketName = `somebucket-${Date.now()}`;
|
||||||
|
|
||||||
|
describe('GCP: HEAD Object', function testSuite() {
|
||||||
|
this.timeout(30000);
|
||||||
|
const config = getRealAwsConfig(credentialOne);
|
||||||
|
const gcpClient = new GCP(config);
|
||||||
|
|
||||||
|
before(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
after(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with existing object in bucket', () => {
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.key = `somekey-${Date.now()}`;
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating object ${err}\n`);
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
this.currentTest.uploadId =
|
||||||
|
res.headers['x-goog-generation'];
|
||||||
|
this.currentTest.ETag = res.headers.etag;
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterFn(done) {
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting object ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should successfully retrieve object', function testFn(done) {
|
||||||
|
gcpClient.headObject({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: this.test.key,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, got error ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, this.test.ETag);
|
||||||
|
assert.strictEqual(res.VersionId, this.test.uploadId);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('without existing object in bucket', () => {
|
||||||
|
it('should return 404', done => {
|
||||||
|
const badObjectkey = `nonexistingkey-${Date.now()}`;
|
||||||
|
gcpClient.headObject({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: badObjectkey,
|
||||||
|
}, err => {
|
||||||
|
assert(err);
|
||||||
|
assert.strictEqual(err.statusCode, 404);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,109 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { makeGcpRequest } = require('../../../utils/makeRequest');
|
||||||
|
const { gcpRequestRetry, setBucketClass } = require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const bucketNames = {
|
||||||
|
main: {
|
||||||
|
Name: `somebucket-${Date.now()}`,
|
||||||
|
Type: 'MULTI_REGIONAL',
|
||||||
|
},
|
||||||
|
mpu: {
|
||||||
|
Name: `mpubucket-${Date.now()}`,
|
||||||
|
Type: 'REGIONAL',
|
||||||
|
},
|
||||||
|
overflow: {
|
||||||
|
Name: `overflowbucket-${Date.now()}`,
|
||||||
|
Type: 'MULTI_REGIONAL',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('GCP: Initiate MPU', function testSuite() {
|
||||||
|
this.timeout(180000);
|
||||||
|
let config;
|
||||||
|
let gcpClient;
|
||||||
|
|
||||||
|
before(done => {
|
||||||
|
config = getRealAwsConfig(credentialOne);
|
||||||
|
gcpClient = new GCP(config);
|
||||||
|
async.eachSeries(bucketNames,
|
||||||
|
(bucket, next) => gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucket.Name,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
requestBody: setBucketClass(bucket.Type),
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return next(err);
|
||||||
|
}),
|
||||||
|
done);
|
||||||
|
});
|
||||||
|
|
||||||
|
after(done => {
|
||||||
|
async.eachSeries(bucketNames,
|
||||||
|
(bucket, next) => gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucket.Name,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return next(err);
|
||||||
|
}),
|
||||||
|
done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should create a multipart upload object', done => {
|
||||||
|
const keyName = `somekey-${Date.now()}`;
|
||||||
|
const specialKey = `special-${Date.now()}`;
|
||||||
|
async.waterfall([
|
||||||
|
next => gcpClient.createMultipartUpload({
|
||||||
|
Bucket: bucketNames.mpu.Name,
|
||||||
|
Key: keyName,
|
||||||
|
Metadata: {
|
||||||
|
special: specialKey,
|
||||||
|
},
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
return next(null, res.UploadId);
|
||||||
|
}),
|
||||||
|
(uploadId, next) => {
|
||||||
|
const mpuInitKey = `${keyName}-${uploadId}/init`;
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'GET',
|
||||||
|
bucket: bucketNames.mpu.Name,
|
||||||
|
objectKey: mpuInitKey,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout
|
||||||
|
.write(`err in retrieving object ${err}`);
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.headers['x-goog-meta-special'],
|
||||||
|
specialKey);
|
||||||
|
return next(null, uploadId);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
(uploadId, next) => gcpClient.abortMultipartUpload({
|
||||||
|
Bucket: bucketNames.main.Name,
|
||||||
|
MPU: bucketNames.mpu.Name,
|
||||||
|
Overflow: bucketNames.overflow.Name,
|
||||||
|
UploadId: uploadId,
|
||||||
|
Key: keyName,
|
||||||
|
}, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got err ${err}`);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,112 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { makeGcpRequest } = require('../../../utils/makeRequest');
|
||||||
|
const { gcpRequestRetry } = require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const bucketName = `somebucket-${Date.now()}`;
|
||||||
|
|
||||||
|
describe('GCP: PUT Object', function testSuite() {
|
||||||
|
this.timeout(30000);
|
||||||
|
const config = getRealAwsConfig(credentialOne);
|
||||||
|
const gcpClient = new GCP(config);
|
||||||
|
|
||||||
|
before(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
after(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterFn(done) {
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting object ${err}\n`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with existing object in bucket', () => {
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.key = `somekey-${Date.now()}`;
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in putting object ${err}\n`);
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
this.currentTest.uploadId =
|
||||||
|
res.headers['x-goog-generation'];
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should overwrite object', function testFn(done) {
|
||||||
|
gcpClient.putObject({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: this.test.key,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.notStrictEqual(res.VersionId, this.test.uploadId);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('without existing object in bucket', () => {
|
||||||
|
it('should successfully put object', function testFn(done) {
|
||||||
|
this.test.key = `somekey-${Date.now()}`;
|
||||||
|
gcpClient.putObject({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: this.test.key,
|
||||||
|
}, (err, putRes) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, got error ${err}`);
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'GET',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.test.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, (err, getRes) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in getting bucket ${err}\n`);
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(getRes.headers['x-goog-generation'],
|
||||||
|
putRes.VersionId);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,191 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { makeGcpRequest } = require('../../../utils/makeRequest');
|
||||||
|
const { gcpRequestRetry, genPutTagObj } = require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
const { gcpTaggingPrefix } = require('../../../../../../constants');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const bucketName = `somebucket-${Date.now()}`;
|
||||||
|
const gcpTagPrefix = `x-goog-meta-${gcpTaggingPrefix}`;
|
||||||
|
|
||||||
|
describe('GCP: PUT Object Tagging', () => {
|
||||||
|
let config;
|
||||||
|
let gcpClient;
|
||||||
|
|
||||||
|
before(done => {
|
||||||
|
config = getRealAwsConfig(credentialOne);
|
||||||
|
gcpClient = new GCP(config);
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(function beforeFn(done) {
|
||||||
|
this.currentTest.key = `somekey-${Date.now()}`;
|
||||||
|
this.currentTest.specialKey = `veryspecial-${Date.now()}`;
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating object ${err}`);
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
this.currentTest.versionId = res.headers['x-goog-generation'];
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(function afterFn(done) {
|
||||||
|
makeGcpRequest({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.currentTest.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting object ${err}`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
after(done => {
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucketName,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in deleting bucket ${err}`);
|
||||||
|
}
|
||||||
|
return done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should successfully put object tags', function testFn(done) {
|
||||||
|
async.waterfall([
|
||||||
|
next => gcpClient.putObjectTagging({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: this.test.key,
|
||||||
|
VersionId: this.test.versionId,
|
||||||
|
Tagging: {
|
||||||
|
TagSet: [
|
||||||
|
{
|
||||||
|
Key: this.test.specialKey,
|
||||||
|
Value: this.test.specialKey,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, got error ${err}`);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => makeGcpRequest({
|
||||||
|
method: 'HEAD',
|
||||||
|
bucket: bucketName,
|
||||||
|
objectKey: this.test.key,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
headers: {
|
||||||
|
'x-goog-generation': this.test.versionId,
|
||||||
|
},
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in retrieving object ${err}`);
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
const toCompare =
|
||||||
|
res.headers[`${gcpTagPrefix}${this.test.specialKey}`];
|
||||||
|
assert.strictEqual(toCompare, this.test.specialKey);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when tagging parameter is incorrect', () => {
|
||||||
|
it('should return 400 and BadRequest if more than ' +
|
||||||
|
'10 tags are given', function testFun(done) {
|
||||||
|
return gcpClient.putObjectTagging({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: this.test.key,
|
||||||
|
VersionId: this.test.versionId,
|
||||||
|
Tagging: {
|
||||||
|
TagSet: genPutTagObj(11),
|
||||||
|
},
|
||||||
|
}, err => {
|
||||||
|
assert(err);
|
||||||
|
assert.strictEqual(err.code, 400);
|
||||||
|
assert.strictEqual(err.message, 'BadRequest');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return 400 and InvalidTag if given duplicate keys',
|
||||||
|
function testFn(done) {
|
||||||
|
return gcpClient.putObjectTagging({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: this.test.key,
|
||||||
|
VersionId: this.test.versionId,
|
||||||
|
Tagging: {
|
||||||
|
TagSet: genPutTagObj(10, true),
|
||||||
|
},
|
||||||
|
}, err => {
|
||||||
|
assert(err);
|
||||||
|
assert.strictEqual(err.code, 400);
|
||||||
|
assert.strictEqual(err.message, 'InvalidTag');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return 400 and InvalidTag if given invalid key',
|
||||||
|
function testFn(done) {
|
||||||
|
return gcpClient.putObjectTagging({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: this.test.key,
|
||||||
|
VersionId: this.test.versionId,
|
||||||
|
Tagging: {
|
||||||
|
TagSet: [
|
||||||
|
{ Key: Buffer.alloc(129, 'a'), Value: 'bad tag' },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}, err => {
|
||||||
|
assert(err);
|
||||||
|
assert.strictEqual(err.code, 400);
|
||||||
|
assert.strictEqual(err.message, 'InvalidTag');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return 400 and InvalidTag if given invalid value',
|
||||||
|
function testFn(done) {
|
||||||
|
return gcpClient.putObjectTagging({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: this.test.key,
|
||||||
|
VersionId: this.test.versionId,
|
||||||
|
Tagging: {
|
||||||
|
TagSet: [
|
||||||
|
{ Key: 'badtag', Value: Buffer.alloc(257, 'a') },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}, err => {
|
||||||
|
assert(err);
|
||||||
|
assert.strictEqual(err.code, 400);
|
||||||
|
assert.strictEqual(err.message, 'InvalidTag');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,116 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
const { GCP } = require('../../../../../../lib/data/external/GCP');
|
||||||
|
const { gcpRequestRetry, setBucketClass } = require('../../../utils/gcpUtils');
|
||||||
|
const { getRealAwsConfig } =
|
||||||
|
require('../../../../aws-node-sdk/test/support/awsConfig');
|
||||||
|
|
||||||
|
const credentialOne = 'gcpbackend';
|
||||||
|
const bucketNames = {
|
||||||
|
main: {
|
||||||
|
Name: `somebucket-${Date.now()}`,
|
||||||
|
Type: 'MULTI_REGIONAL',
|
||||||
|
},
|
||||||
|
mpu: {
|
||||||
|
Name: `mpubucket-${Date.now()}`,
|
||||||
|
Type: 'REGIONAL',
|
||||||
|
},
|
||||||
|
overflow: {
|
||||||
|
Name: `overflowbucket-${Date.now()}`,
|
||||||
|
Type: 'MULTI_REGIONAL',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const body = Buffer.from('I am a body', 'utf8');
|
||||||
|
const bigBody = Buffer.alloc(10485760);
|
||||||
|
const smallMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a';
|
||||||
|
const bigMD5 = 'a7d414b9133d6483d9a1c4e04e856e3b-2';
|
||||||
|
|
||||||
|
describe('GCP: Upload Object', function testSuite() {
|
||||||
|
this.timeout(600000);
|
||||||
|
let config;
|
||||||
|
let gcpClient;
|
||||||
|
|
||||||
|
before(done => {
|
||||||
|
config = getRealAwsConfig(credentialOne);
|
||||||
|
gcpClient = new GCP(config);
|
||||||
|
async.eachSeries(bucketNames,
|
||||||
|
(bucket, next) => gcpRequestRetry({
|
||||||
|
method: 'PUT',
|
||||||
|
bucket: bucket.Name,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
requestBody: setBucketClass(bucket.Type),
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`err in creating bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return next(err);
|
||||||
|
}),
|
||||||
|
err => done(err));
|
||||||
|
});
|
||||||
|
|
||||||
|
after(done => {
|
||||||
|
async.eachSeries(bucketNames,
|
||||||
|
(bucket, next) => gcpClient.listObjects({
|
||||||
|
Bucket: bucket.Name,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got error ${err}`);
|
||||||
|
async.map(res.Contents, (object, moveOn) => {
|
||||||
|
const deleteParams = {
|
||||||
|
Bucket: bucket.Name,
|
||||||
|
Key: object.Key,
|
||||||
|
};
|
||||||
|
gcpClient.deleteObject(
|
||||||
|
deleteParams, err => moveOn(err));
|
||||||
|
}, err => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got error ${err}`);
|
||||||
|
gcpRequestRetry({
|
||||||
|
method: 'DELETE',
|
||||||
|
bucket: bucket.Name,
|
||||||
|
authCredentials: config.credentials,
|
||||||
|
}, 0, err => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(
|
||||||
|
`err in deleting bucket ${err}\n`);
|
||||||
|
}
|
||||||
|
return next(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
err => done(err));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should put an object to GCP', done => {
|
||||||
|
const key = `somekey-${Date.now()}`;
|
||||||
|
gcpClient.upload({
|
||||||
|
Bucket: bucketNames.main.Name,
|
||||||
|
MPU: bucketNames.mpu.Name,
|
||||||
|
Overflow: bucketNames.overflow.Name,
|
||||||
|
Key: key,
|
||||||
|
Body: body,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, got error ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${smallMD5}"`);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should put a large object to GCP', done => {
|
||||||
|
const key = `somekey-${Date.now()}`;
|
||||||
|
gcpClient.upload({
|
||||||
|
Bucket: bucketNames.main.Name,
|
||||||
|
MPU: bucketNames.mpu.Name,
|
||||||
|
Overflow: bucketNames.overflow.Name,
|
||||||
|
Key: key,
|
||||||
|
Body: bigBody,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, got error ${err}`);
|
||||||
|
assert.strictEqual(res.ETag, `"${bigMD5}"`);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -1,15 +1,11 @@
|
||||||
|
const async = require('async');
|
||||||
|
const assert = require('assert');
|
||||||
|
|
||||||
const { makeGcpRequest } = require('./makeRequest');
|
const { makeGcpRequest } = require('./makeRequest');
|
||||||
|
|
||||||
function gcpRequestRetry(params, retry, callback) {
|
function gcpRequestRetry(params, retry, callback) {
|
||||||
const retryTimeout = {
|
|
||||||
0: 0,
|
|
||||||
1: 1000,
|
|
||||||
2: 2000,
|
|
||||||
3: 4000,
|
|
||||||
4: 8000,
|
|
||||||
};
|
|
||||||
const maxRetries = 4;
|
const maxRetries = 4;
|
||||||
const timeout = retryTimeout[retry];
|
const timeout = Math.pow(2, retry) * 1000;
|
||||||
return setTimeout(makeGcpRequest, timeout, params, (err, res) => {
|
return setTimeout(makeGcpRequest, timeout, params, (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
if (retry <= maxRetries && err.statusCode === 429) {
|
if (retry <= maxRetries && err.statusCode === 429) {
|
||||||
|
@ -21,6 +17,133 @@ function gcpRequestRetry(params, retry, callback) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function gcpClientRetry(fn, params, callback, retry = 0) {
|
||||||
|
const maxRetries = 4;
|
||||||
|
const timeout = Math.pow(2, retry) * 1000;
|
||||||
|
return setTimeout(fn, timeout, params, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
if (retry <= maxRetries && err.statusCode === 429) {
|
||||||
|
return gcpClientRetry(fn, params, callback, retry + 1);
|
||||||
|
}
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, res);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// mpu test helpers
|
||||||
|
function gcpMpuSetup(params, callback) {
|
||||||
|
const { gcpClient, bucketNames, key, partCount, partSize } = params;
|
||||||
|
return async.waterfall([
|
||||||
|
next => gcpClient.createMultipartUpload({
|
||||||
|
Bucket: bucketNames.mpu.Name,
|
||||||
|
Key: key,
|
||||||
|
}, (err, res) => {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, but got error ${err}`);
|
||||||
|
return next(null, res.UploadId);
|
||||||
|
}),
|
||||||
|
(uploadId, next) => {
|
||||||
|
if (partCount <= 0) {
|
||||||
|
return next('SkipPutPart', { uploadId });
|
||||||
|
}
|
||||||
|
const arrayData = Array.from(Array(partCount).keys());
|
||||||
|
const etagList = Array(partCount);
|
||||||
|
let count = 0;
|
||||||
|
return async.eachLimit(arrayData, 10,
|
||||||
|
(info, moveOn) => {
|
||||||
|
gcpClient.uploadPart({
|
||||||
|
Bucket: bucketNames.mpu.Name,
|
||||||
|
Key: key,
|
||||||
|
UploadId: uploadId,
|
||||||
|
PartNumber: info + 1,
|
||||||
|
Body: Buffer.alloc(partSize),
|
||||||
|
ContentLength: partSize,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return moveOn(err);
|
||||||
|
}
|
||||||
|
if (!(++count % 500)) {
|
||||||
|
process.stdout.write(`Uploaded Parts: ${count}\n`);
|
||||||
|
}
|
||||||
|
etagList[info] = res.ETag;
|
||||||
|
return moveOn(null);
|
||||||
|
});
|
||||||
|
}, err => {
|
||||||
|
next(err, { uploadId, etagList });
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], (err, result) => {
|
||||||
|
if (err) {
|
||||||
|
if (err === 'SkipPutPart') {
|
||||||
|
return callback(null, result);
|
||||||
|
}
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, result);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function genPutTagObj(size, duplicate) {
|
||||||
|
const retTagSet = [];
|
||||||
|
Array.from(Array(size).keys()).forEach(ind => {
|
||||||
|
retTagSet.push({
|
||||||
|
Key: duplicate ? 'dupeKey' : `key${ind}`,
|
||||||
|
Value: `Value${ind}`,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return retTagSet;
|
||||||
|
}
|
||||||
|
|
||||||
|
function genGetTagObj(size, tagPrefix) {
|
||||||
|
const retObj = {};
|
||||||
|
const expectedTagObj = [];
|
||||||
|
for (let i = 1; i <= size; ++i) {
|
||||||
|
retObj[`${tagPrefix}testtag${i}`] = `testtag${i}`;
|
||||||
|
expectedTagObj.push({
|
||||||
|
Key: `testtag${i}`,
|
||||||
|
Value: `testtag${i}`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return { tagHeader: retObj, expectedTagObj };
|
||||||
|
}
|
||||||
|
|
||||||
|
function genDelTagObj(size, tagPrefix) {
|
||||||
|
const headers = {};
|
||||||
|
const expectedTagObj = {};
|
||||||
|
const expectedMetaObj = {};
|
||||||
|
for (let i = 1; i <= size; ++i) {
|
||||||
|
headers[`${tagPrefix}testtag${i}`] = `testtag${i}`;
|
||||||
|
expectedTagObj[`${tagPrefix}testtag${i}`] = `testtag${i}`;
|
||||||
|
headers[`x-goog-meta-testmeta${i}`] = `testmeta${i}`;
|
||||||
|
expectedMetaObj[`x-goog-meta-testmeta${i}`] = `testmeta${i}`;
|
||||||
|
}
|
||||||
|
return { headers, expectedTagObj, expectedMetaObj };
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
<CreateBucketConfiguration>
|
||||||
|
<LocationConstraint><location></LocationConstraint>
|
||||||
|
<StorageClass><storage class></StorageClass>
|
||||||
|
</CreateBucketConfiguration>
|
||||||
|
*/
|
||||||
|
const regionalLoc = 'us-west1';
|
||||||
|
const multiRegionalLoc = 'us';
|
||||||
|
function setBucketClass(storageClass) {
|
||||||
|
const locationConstraint =
|
||||||
|
storageClass === 'REGIONAL' ? regionalLoc : multiRegionalLoc;
|
||||||
|
return '<CreateBucketConfiguration>' +
|
||||||
|
`<LocationConstraint>${locationConstraint}</LocationConstraint>` +
|
||||||
|
`<StorageClass>${storageClass}</StorageClass>` +
|
||||||
|
'</CreateBucketConfiguration>';
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
gcpRequestRetry,
|
gcpRequestRetry,
|
||||||
|
gcpClientRetry,
|
||||||
|
setBucketClass,
|
||||||
|
gcpMpuSetup,
|
||||||
|
genPutTagObj,
|
||||||
|
genGetTagObj,
|
||||||
|
genDelTagObj,
|
||||||
};
|
};
|
||||||
|
|
|
@ -114,6 +114,7 @@
|
||||||
"legacyAwsBehavior": true,
|
"legacyAwsBehavior": true,
|
||||||
"details": {
|
"details": {
|
||||||
"gcpEndpoint": "storage.googleapis.com",
|
"gcpEndpoint": "storage.googleapis.com",
|
||||||
|
"jsonEndpoint": "www.googleapis.com",
|
||||||
"bucketName": "zenko-gcp-bucket",
|
"bucketName": "zenko-gcp-bucket",
|
||||||
"mpuBucketName": "zenko-gcp-mpu",
|
"mpuBucketName": "zenko-gcp-mpu",
|
||||||
"overflowBucketName": "zenko-gcp-overflow",
|
"overflowBucketName": "zenko-gcp-overflow",
|
||||||
|
@ -122,7 +123,7 @@
|
||||||
"serviceCredentials": {
|
"serviceCredentials": {
|
||||||
"scopes": "https://www.googleapis.com/auth/cloud-platform",
|
"scopes": "https://www.googleapis.com/auth/cloud-platform",
|
||||||
"serviceEmail": "fake001",
|
"serviceEmail": "fake001",
|
||||||
"servieKey": "fake001"
|
"serviceKey": "fake001"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -131,15 +132,16 @@
|
||||||
"legacyAwsBehavior": true,
|
"legacyAwsBehavior": true,
|
||||||
"details": {
|
"details": {
|
||||||
"gcpEndpoint": "storage.googleapis.com",
|
"gcpEndpoint": "storage.googleapis.com",
|
||||||
|
"jsonEndpoint": "www.googleapis.com",
|
||||||
"bucketName": "zenko-gcp-bucket-2",
|
"bucketName": "zenko-gcp-bucket-2",
|
||||||
"mpuBucketName": "zenko-gcp-mpu",
|
"mpuBucketName": "zenko-gcp-mpu-2",
|
||||||
"overflowBucketName": "zenko-gcp-overflow",
|
"overflowBucketName": "zenko-gcp-overflow-2",
|
||||||
"bucketMatch": true,
|
"bucketMatch": true,
|
||||||
"credentialsProfile": "google_2",
|
"credentialsProfile": "google_2",
|
||||||
"serviceCredentials": {
|
"serviceCredentials": {
|
||||||
"scopes": "https://www.googleapis.com/auth/cloud-platform",
|
"scopes": "https://www.googleapis.com/auth/cloud-platform",
|
||||||
"serviceEmail": "fake002",
|
"serviceEmail": "fake002",
|
||||||
"servieKey": "fake002"
|
"serviceKey": "fake002"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -148,6 +150,7 @@
|
||||||
"legacyAwsBehavior": true,
|
"legacyAwsBehavior": true,
|
||||||
"details": {
|
"details": {
|
||||||
"gcpEndpoint": "storage.googleapis.com",
|
"gcpEndpoint": "storage.googleapis.com",
|
||||||
|
"jsonEndpoint": "www.googleapis.com",
|
||||||
"bucketName": "zenko-gcp-bucket",
|
"bucketName": "zenko-gcp-bucket",
|
||||||
"mpuBucketName": "zenko-gcp-mpu",
|
"mpuBucketName": "zenko-gcp-mpu",
|
||||||
"overflowBucketName": "zenko-gcp-overflow",
|
"overflowBucketName": "zenko-gcp-overflow",
|
||||||
|
@ -156,44 +159,7 @@
|
||||||
"serviceCredentials": {
|
"serviceCredentials": {
|
||||||
"scopes": "https://www.googleapis.com/auth/cloud-platform",
|
"scopes": "https://www.googleapis.com/auth/cloud-platform",
|
||||||
"serviceEmail": "fake001",
|
"serviceEmail": "fake001",
|
||||||
"servieKey": "fake001"
|
"serviceKey": "fake001"
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"gcpbackendproxy": {
|
|
||||||
"type": "gcp",
|
|
||||||
"legacyAwsBehavior": true,
|
|
||||||
"details": {
|
|
||||||
"proxy": "https://proxy.server",
|
|
||||||
"https": true,
|
|
||||||
"gcpEndpoint": "storage.googleapis.com",
|
|
||||||
"bucketName": "zenko-gcp-bucket",
|
|
||||||
"mpuBucketName": "zenko-gcp-mpu",
|
|
||||||
"overflowBucketName": "zenko-gcp-overflow",
|
|
||||||
"bucketMatch": false,
|
|
||||||
"credentialsProfile": "google",
|
|
||||||
"serviceCredentials": {
|
|
||||||
"scopes": "https://www.googleapis.com/auth/cloud-platform",
|
|
||||||
"serviceEmail": "fake001",
|
|
||||||
"servieKey": "fake001"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"gcpbackendnoproxy": {
|
|
||||||
"type": "gcp",
|
|
||||||
"legacyAwsBehavior": true,
|
|
||||||
"details": {
|
|
||||||
"https": false,
|
|
||||||
"gcpEndpoint": "storage.googleapis.com",
|
|
||||||
"bucketName": "zenko-gcp-bucket",
|
|
||||||
"mpuBucketName": "zenko-gcp-mpu",
|
|
||||||
"overflowBucketName": "zenko-gcp-overflow",
|
|
||||||
"bucketMatch": false,
|
|
||||||
"credentialsProfile": "google",
|
|
||||||
"serviceCredentials": {
|
|
||||||
"scopes": "https://www.googleapis.com/auth/cloud-platform",
|
|
||||||
"serviceEmail": "fake001",
|
|
||||||
"servieKey": "fake001"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,58 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
const { JsonError, jsonRespCheck } =
|
||||||
|
require('../../../lib/data/external/GCP').GcpUtils;
|
||||||
|
|
||||||
|
const error = errors.InternalError.customizeDescription(
|
||||||
|
'error in JSON Request');
|
||||||
|
const errorResp = { statusMessage: 'unit test error', statusCode: 500 };
|
||||||
|
const errorBody = JSON.stringify({
|
||||||
|
error: {
|
||||||
|
code: 500,
|
||||||
|
message: 'unit test error',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const retError = new JsonError(errorResp.statusMessage, errorResp.statusCode);
|
||||||
|
const successResp = { statusCode: 200 };
|
||||||
|
const successObj = { Value: 'Success' };
|
||||||
|
|
||||||
|
describe('GcpUtils JSON API Helper Fucntions:', () => {
|
||||||
|
it('should return InternalError if resp receives err is set', done => {
|
||||||
|
jsonRespCheck(error, {}, 'Some body value', 'unitTest', err => {
|
||||||
|
assert.deepStrictEqual(err, error);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return resp error if resp code is >= 300', done => {
|
||||||
|
jsonRespCheck(null, errorResp, 'some body value', 'unitTest', err => {
|
||||||
|
assert.deepStrictEqual(err, retError);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return error if body is a json error value', done => {
|
||||||
|
jsonRespCheck(null, {}, errorBody, 'unitTest', err => {
|
||||||
|
assert.deepStrictEqual(err, retError);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return success obj', done => {
|
||||||
|
jsonRespCheck(null, successResp, JSON.stringify(successObj), 'unitTest',
|
||||||
|
(err, res) => {
|
||||||
|
assert.ifError(err, `Expected success, but got error ${err}`);
|
||||||
|
assert.deepStrictEqual(res, successObj);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return no result if success resp but body is invalid', done => {
|
||||||
|
jsonRespCheck(null, successResp, 'invalid body string', 'unitTest',
|
||||||
|
(err, res) => {
|
||||||
|
assert.ifError(err, `Expected success, but got error ${err}`);
|
||||||
|
assert.strictEqual(res, undefined);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,65 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const uuid = require('uuid/v4');
|
||||||
|
const { createMpuKey, createMpuList } =
|
||||||
|
require('../../../lib/data/external/GCP').GcpUtils;
|
||||||
|
|
||||||
|
const key = `somekey${Date.now()}`;
|
||||||
|
const uploadId = uuid().replace(/-/g, '');
|
||||||
|
const phase = 'createMpulist';
|
||||||
|
const size = 2;
|
||||||
|
const correctMpuList = [
|
||||||
|
{ PartName: `${key}-${uploadId}/${phase}/00001`, PartNumber: 1 },
|
||||||
|
{ PartName: `${key}-${uploadId}/${phase}/00002`, PartNumber: 2 },
|
||||||
|
];
|
||||||
|
|
||||||
|
describe('GcpUtils MPU Helper Functions:', () => {
|
||||||
|
describe('createMpuKey', () => {
|
||||||
|
const tests = [
|
||||||
|
{
|
||||||
|
it: 'if phase and part number are given',
|
||||||
|
input: { phase: 'test', partNumber: 1 },
|
||||||
|
output: `${key}-${uploadId}/test/00001`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'if only phase is given',
|
||||||
|
input: { phase: 'test' },
|
||||||
|
output: `${key}-${uploadId}/test`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'if part number is given',
|
||||||
|
input: { partNumber: 1 },
|
||||||
|
output: `${key}-${uploadId}/parts/00001`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'if phase and part number aren not given',
|
||||||
|
input: {},
|
||||||
|
output: `${key}-${uploadId}/`,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
tests.forEach(test => {
|
||||||
|
it(test.it, () => {
|
||||||
|
const { partNumber, phase } = test.input;
|
||||||
|
assert.strictEqual(createMpuKey(
|
||||||
|
key, uploadId, partNumber, phase), test.output);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('createMpuList', () => {
|
||||||
|
const tests = [
|
||||||
|
{
|
||||||
|
it: 'should create valid mpu list',
|
||||||
|
input: { phase, size },
|
||||||
|
output: correctMpuList,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
tests.forEach(test => {
|
||||||
|
it(test.it, () => {
|
||||||
|
const { phase, size } = test.input;
|
||||||
|
assert.deepStrictEqual(createMpuList(
|
||||||
|
{ Key: key, UploadId: uploadId }, phase, size),
|
||||||
|
test.output);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,156 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
const { gcpTaggingPrefix } = require('../../../constants');
|
||||||
|
const { genPutTagObj } =
|
||||||
|
require('../../../tests/functional/raw-node/utils/gcpUtils');
|
||||||
|
const { processTagSet, stripTags, retrieveTags, getPutTagsMetadata } =
|
||||||
|
require('../../../lib/data/external/GCP').GcpUtils;
|
||||||
|
|
||||||
|
const maxTagSize = 10;
|
||||||
|
const validTagSet = genPutTagObj(2);
|
||||||
|
const validTagObj = {};
|
||||||
|
validTagObj[`${gcpTaggingPrefix}key0`] = 'Value0';
|
||||||
|
validTagObj[`${gcpTaggingPrefix}key1`] = 'Value1';
|
||||||
|
const tagQuery = 'key0=Value0&key1=Value1';
|
||||||
|
const invalidSizeTagSet = genPutTagObj(maxTagSize + 1);
|
||||||
|
const invalidDuplicateTagSet = genPutTagObj(maxTagSize, true);
|
||||||
|
const invalidKeyTagSet = [{ Key: Buffer.alloc(129, 'a'), Value: 'value' }];
|
||||||
|
const invalidValueTagSet = [{ Key: 'key', Value: Buffer.alloc(257, 'a') }];
|
||||||
|
const onlyMetadata = {
|
||||||
|
metadata1: 'metadatavalue1',
|
||||||
|
metadata2: 'metadatavalue2',
|
||||||
|
};
|
||||||
|
const tagMetadata = Object.assign({}, validTagObj, onlyMetadata);
|
||||||
|
const oldTagMetadata = {};
|
||||||
|
oldTagMetadata[`${gcpTaggingPrefix}Old`] = 'OldValue0';
|
||||||
|
const withPriorTags = Object.assign({}, onlyMetadata, oldTagMetadata);
|
||||||
|
|
||||||
|
describe('GcpUtils Tagging Helper Functions:', () => {
|
||||||
|
describe('processTagSet', () => {
|
||||||
|
const tests = [
|
||||||
|
{
|
||||||
|
it: 'should return tag object as metadata for valid tag set',
|
||||||
|
input: validTagSet,
|
||||||
|
output: validTagObj,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should return error for invalid tag set size',
|
||||||
|
input: invalidSizeTagSet,
|
||||||
|
output: errors.BadRequest.customizeDescription(
|
||||||
|
'Object tags cannot be greater than 10'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should return error for duplicate tag keys',
|
||||||
|
input: invalidDuplicateTagSet,
|
||||||
|
output: errors.InvalidTag.customizeDescription(
|
||||||
|
'Cannot provide multiple Tags with the same key'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should return error for invalid "key" value',
|
||||||
|
input: invalidKeyTagSet,
|
||||||
|
output: errors.InvalidTag.customizeDescription(
|
||||||
|
'The TagKey you have provided is invalid'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should return error for invalid "value" value',
|
||||||
|
input: invalidValueTagSet,
|
||||||
|
output: errors.InvalidTag.customizeDescription(
|
||||||
|
'The TagValue you have provided is invalid'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should return empty tag object when input is undefined',
|
||||||
|
input: undefined,
|
||||||
|
output: {},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
tests.forEach(test => {
|
||||||
|
it(test.it, () => {
|
||||||
|
assert.deepStrictEqual(processTagSet(test.input), test.output);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('stripTags', () => {
|
||||||
|
const tests = [
|
||||||
|
{
|
||||||
|
it: 'should return metadata without tag',
|
||||||
|
input: tagMetadata,
|
||||||
|
output: onlyMetadata,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should return empty object if metadata only has tags',
|
||||||
|
input: validTagObj,
|
||||||
|
output: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should return empty object if input is undefined',
|
||||||
|
input: undefined,
|
||||||
|
output: {},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
tests.forEach(test => {
|
||||||
|
it(test.it, () => {
|
||||||
|
assert.deepStrictEqual(stripTags(test.input), test.output);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('retrieveTags', () => {
|
||||||
|
const tests = [
|
||||||
|
{
|
||||||
|
it: 'should return tagSet from given input metadata',
|
||||||
|
input: tagMetadata,
|
||||||
|
output: validTagSet,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should return empty when metadata does not have tags',
|
||||||
|
input: onlyMetadata,
|
||||||
|
output: [],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should return empty if input is undefined',
|
||||||
|
input: undefined,
|
||||||
|
output: [],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
tests.forEach(test => {
|
||||||
|
it(test.it, () => {
|
||||||
|
assert.deepStrictEqual(retrieveTags(test.input), test.output);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getPutTagsMetadata', () => {
|
||||||
|
const tests = [
|
||||||
|
{
|
||||||
|
it: 'should return correct object when' +
|
||||||
|
' given a tag query string and a metadata obj',
|
||||||
|
input: { metadata: Object.assign({}, onlyMetadata), tagQuery },
|
||||||
|
output: tagMetadata,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should return correct object when given only query string',
|
||||||
|
input: { tagQuery },
|
||||||
|
output: validTagObj,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should return correct object when only metadata is given',
|
||||||
|
input: { metadata: onlyMetadata },
|
||||||
|
output: onlyMetadata,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should return metadata with correct tag properties ' +
|
||||||
|
'if given a metdata with prior tags and query string',
|
||||||
|
input: { metadata: Object.assign({}, withPriorTags), tagQuery },
|
||||||
|
output: tagMetadata,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
tests.forEach(test => {
|
||||||
|
it(test.it, () => {
|
||||||
|
const { metadata, tagQuery } = test.input;
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
getPutTagsMetadata(metadata, tagQuery), test.output);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
Loading…
Reference in New Issue