Compare commits
8 Commits
developmen
...
improvemen
Author | SHA1 | Date |
---|---|---|
Jordi Bertran de Balanda | 1923937dc6 | |
Jordi Bertran de Balanda | ecc7513412 | |
Jordi Bertran de Balanda | e38c4290e6 | |
Jordi Bertran de Balanda | aff7e77f73 | |
Jordi Bertran de Balanda | 3b3282704e | |
Jordi Bertran de Balanda | 223d19b0a8 | |
Jordi Bertran de Balanda | 46b65f39e5 | |
Jordi Bertran de Balanda | 884132e693 |
|
@ -33,7 +33,7 @@ RUN cd /tmp \
|
|||
&& rm -rf /tmp/Python-$PY_VERSION.tgz
|
||||
|
||||
RUN yarn cache clean \
|
||||
&& yarn install --production --ignore-optional --ignore-engines --network-concurrency 1 \
|
||||
&& yarn install --production --ignore-optional --ignore-engines --network-concurrency 1 \
|
||||
&& apt-get autoremove --purge -y python git build-essential \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& yarn cache clean \
|
||||
|
|
|
@ -49,7 +49,7 @@ function updateRequestContexts(request, requestContexts, apiMethod, log, cb) {
|
|||
return metadata.getObjectMD(bucketName, objectKey, { versionId: reqVersionId }, log,
|
||||
(err, objMD) => {
|
||||
if (err) {
|
||||
if (err.NoSuchKey) {
|
||||
if (err.is.NoSuchKey) {
|
||||
return next();
|
||||
}
|
||||
log.trace('error getting request object tags');
|
||||
|
|
|
@ -22,7 +22,7 @@ function addToUsersBucket(canonicalID, bucketName, log, cb) {
|
|||
|
||||
// Get new format usersBucket to see if it exists
|
||||
return metadata.getBucket(usersBucket, log, (err, usersBucketAttrs) => {
|
||||
if (err && !err.NoSuchBucket && !err.BucketAlreadyExists) {
|
||||
if (err && !err.is.NoSuchBucket && !err.is.BucketAlreadyExists) {
|
||||
return cb(err);
|
||||
}
|
||||
const splitter = usersBucketAttrs ?
|
||||
|
@ -36,7 +36,7 @@ function addToUsersBucket(canonicalID, bucketName, log, cb) {
|
|||
usersBucket : oldUsersBucket;
|
||||
return metadata.putObjectMD(usersBucketBeingCalled, key,
|
||||
omVal, {}, log, err => {
|
||||
if (err && err.NoSuchBucket) {
|
||||
if (err && err.is.NoSuchBucket) {
|
||||
// There must be no usersBucket so createBucket
|
||||
// one using the new format
|
||||
log.trace('users bucket does not exist, ' +
|
||||
|
@ -57,8 +57,7 @@ function addToUsersBucket(canonicalID, bucketName, log, cb) {
|
|||
// error with respect
|
||||
// to the usersBucket.
|
||||
if (err &&
|
||||
err !==
|
||||
errors.BucketAlreadyExists) {
|
||||
!err.is.BucketAlreadyExists) {
|
||||
log.error('error from metadata', {
|
||||
error: err,
|
||||
});
|
||||
|
@ -206,7 +205,7 @@ function createBucket(authInfo, bucketName, headers,
|
|||
},
|
||||
getAnyExistingBucketInfo: function getAnyExistingBucketInfo(callback) {
|
||||
metadata.getBucket(bucketName, log, (err, data) => {
|
||||
if (err && err.NoSuchBucket) {
|
||||
if (err && err.is.NoSuchBucket) {
|
||||
return callback(null, 'NoBucketYet');
|
||||
}
|
||||
if (err) {
|
||||
|
|
|
@ -16,7 +16,7 @@ function _deleteMPUbucket(destinationBucketName, log, cb) {
|
|||
`${mpuBucketPrefix}${destinationBucketName}`;
|
||||
return metadata.deleteBucket(mpuBucketName, log, err => {
|
||||
// If the mpu bucket does not exist, just move on
|
||||
if (err && err.NoSuchBucket) {
|
||||
if (err && err.is.NoSuchBucket) {
|
||||
return cb();
|
||||
}
|
||||
return cb(err);
|
||||
|
@ -90,7 +90,7 @@ function deleteBucket(authInfo, bucketMD, bucketName, canonicalID, log, cb) {
|
|||
log, (err, objectsListRes) => {
|
||||
// If no shadow bucket ever created, no ongoing MPU's, so
|
||||
// continue with deletion
|
||||
if (err && err.NoSuchBucket) {
|
||||
if (err && err.is.NoSuchBucket) {
|
||||
return next();
|
||||
}
|
||||
if (err) {
|
||||
|
|
|
@ -11,16 +11,16 @@ function deleteUserBucketEntry(bucketName, canonicalID, log, cb) {
|
|||
metadata.deleteObjectMD(usersBucket, keyForUserBucket, {}, log, error => {
|
||||
// If the object representing the bucket is not in the
|
||||
// users bucket just continue
|
||||
if (error && error.NoSuchKey) {
|
||||
if (error && error.is.NoSuchKey) {
|
||||
return cb(null);
|
||||
// BACKWARDS COMPATIBILITY: Remove this once no longer
|
||||
// have old user bucket format
|
||||
} else if (error && error.NoSuchBucket) {
|
||||
} else if (error && error.is.NoSuchBucket) {
|
||||
const keyForUserBucket2 = createKeyForUserBucket(canonicalID,
|
||||
oldSplitter, bucketName);
|
||||
return metadata.deleteObjectMD(oldUsersBucket, keyForUserBucket2,
|
||||
{}, log, error => {
|
||||
if (error && !error.NoSuchKey) {
|
||||
if (error && !error.is.NoSuchKey) {
|
||||
log.error('from metadata while deleting user bucket',
|
||||
{ error });
|
||||
return cb(error);
|
||||
|
|
|
@ -2,7 +2,7 @@ const { errors } = require('arsenal');
|
|||
const {
|
||||
parseRangeSpec,
|
||||
parseRange,
|
||||
} = require('arsenal/lib/network/http/utils');
|
||||
} = require('arsenal').network.http.utils;
|
||||
|
||||
const constants = require('../../../../constants');
|
||||
const setPartRanges = require('./setPartRanges');
|
||||
|
|
|
@ -292,7 +292,7 @@ function versioningPreprocessing(bucketName, bucketMD, objectKey, objMD,
|
|||
// it's possible there was a concurrent request to
|
||||
// delete the null version, so proceed with putting a
|
||||
// new version
|
||||
if (err === errors.NoSuchKey) {
|
||||
if (err === errors.is.NoSuchKey) {
|
||||
return next(null, options);
|
||||
}
|
||||
return next(errors.InternalError);
|
||||
|
|
|
@ -341,7 +341,7 @@ function completeMultipartUpload(authInfo, request, log, callback) {
|
|||
if (err) {
|
||||
// TODO: check AWS error when user requested a specific
|
||||
// version before any versions have been put
|
||||
const logLvl = err === errors.BadRequest ?
|
||||
const logLvl = err.is.BadRequest ?
|
||||
'debug' : 'error';
|
||||
log[logLvl]('error getting versioning info', {
|
||||
error: err,
|
||||
|
|
|
@ -210,10 +210,10 @@ function getObjMetadataAndDelete(authInfo, canonicalID, request,
|
|||
(versionId, callback) => metadataGetObject(bucketName, entry.key,
|
||||
versionId, log, (err, objMD) => {
|
||||
// if general error from metadata return error
|
||||
if (err && !err.NoSuchKey) {
|
||||
if (err && !err.is.NoSuchKey) {
|
||||
return callback(err);
|
||||
}
|
||||
if (err && err.NoSuchKey) {
|
||||
if (err && err.is.NoSuchKey) {
|
||||
const verCfg = bucket.getVersioningConfiguration();
|
||||
// To adhere to AWS behavior, create a delete marker
|
||||
// if trying to delete an object that does not exist
|
||||
|
@ -386,11 +386,11 @@ function multiObjectDelete(authInfo, request, log, callback) {
|
|||
return vault.checkPolicies(requestContextParams, authInfo.getArn(),
|
||||
log, (err, authorizationResults) => {
|
||||
// there were no policies so received a blanket AccessDenied
|
||||
if (err && err.AccessDenied) {
|
||||
if (err && err.is.AccessDenied) {
|
||||
objects.forEach(entry => {
|
||||
errorResults.push({
|
||||
entry,
|
||||
error: errors.AccessDenied });
|
||||
error: errors.is.AccessDenied });
|
||||
});
|
||||
// send empty array for inPlay
|
||||
return next(null, quietSetting, errorResults, []);
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
const { errors } = require('arsenal');
|
||||
|
||||
const abortMultipartUpload = require('./apiUtils/object/abortMultipartUpload');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const isLegacyAWSBehavior = require('../utilities/legacyAWSBehavior');
|
||||
|
@ -29,10 +27,10 @@ function multipartDelete(authInfo, request, log, callback) {
|
|||
request.method, destinationBucket);
|
||||
const location = destinationBucket ?
|
||||
destinationBucket.getLocationConstraint() : null;
|
||||
if (err && err !== errors.NoSuchUpload) {
|
||||
if (err && !err.is.NoSuchUpload) {
|
||||
return callback(err, corsHeaders);
|
||||
}
|
||||
if (err === errors.NoSuchUpload && isLegacyAWSBehavior(location)) {
|
||||
if (err && err.is.NoSuchUpload && isLegacyAWSBehavior(location)) {
|
||||
log.trace('did not find valid mpu with uploadId', {
|
||||
method: 'multipartDelete',
|
||||
uploadId,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const { errors, s3middleware } = require('arsenal');
|
||||
const { parseRange } = require('arsenal/lib/network/http/utils');
|
||||
const { parseRange } = require('arsenal').network.http.utils;
|
||||
|
||||
const data = require('../data/wrapper');
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const { errors, s3middleware } = require('arsenal');
|
||||
const { parseRange } = require('arsenal').network.http.utils;
|
||||
const validateHeaders = s3middleware.validateConditionalHeaders;
|
||||
const { parseRange } = require('arsenal/lib/network/http/utils');
|
||||
|
||||
const { decodeVersionId } = require('./apiUtils/object/versioning');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
|
|
|
@ -182,7 +182,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
sourceLocationConstraintName, next) {
|
||||
return metadata.getBucket(mpuBucketName, log,
|
||||
(err, mpuBucket) => {
|
||||
if (err && err.NoSuchBucket) {
|
||||
if (err && err.is.NoSuchBucket) {
|
||||
return next(errors.NoSuchUpload);
|
||||
}
|
||||
if (err) {
|
||||
|
@ -211,7 +211,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
return metadata.getObjectMD(mpuBucketName, mpuOverviewKey,
|
||||
null, log, (err, res) => {
|
||||
if (err) {
|
||||
if (err.NoSuchKey) {
|
||||
if (err.is.NoSuchKey) {
|
||||
return next(errors.NoSuchUpload);
|
||||
}
|
||||
log.error('error getting overview object from ' +
|
||||
|
@ -263,7 +263,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
metadata.getObjectMD(mpuBucketName, partKey, {}, log,
|
||||
(err, result) => {
|
||||
// If there is nothing being overwritten just move on
|
||||
if (err && !err.NoSuchKey) {
|
||||
if (err && !err.is.NoSuchKey) {
|
||||
log.debug('error getting current part (if any)',
|
||||
{ error: err });
|
||||
return next(err);
|
||||
|
|
|
@ -94,7 +94,7 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
|||
// Get the destination bucket.
|
||||
next => metadata.getBucket(bucketName, log,
|
||||
(err, destinationBucket) => {
|
||||
if (err && err.NoSuchBucket) {
|
||||
if (err && err.is.NoSuchBucket) {
|
||||
return next(errors.NoSuchBucket, destinationBucket);
|
||||
}
|
||||
if (err) {
|
||||
|
@ -142,7 +142,7 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
|||
(destinationBucket, cipherBundle, next) =>
|
||||
metadata.getBucket(mpuBucketName, log,
|
||||
(err, mpuBucket) => {
|
||||
if (err && err.NoSuchBucket) {
|
||||
if (err && err.is.NoSuchBucket) {
|
||||
return next(errors.NoSuchUpload, destinationBucket);
|
||||
}
|
||||
if (err) {
|
||||
|
@ -252,7 +252,7 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
|||
return metadata.getObjectMD(mpuBucketName, partKey, {}, log,
|
||||
(err, res) => {
|
||||
// If there is no object with the same key, continue.
|
||||
if (err && !err.NoSuchKey) {
|
||||
if (err && !err.is.NoSuchKey) {
|
||||
log.error('error getting current part (if any)', {
|
||||
error: err,
|
||||
method: 'objectPutPart::metadata.getObjectMD',
|
||||
|
|
|
@ -147,7 +147,7 @@ function websiteGet(request, log, callback) {
|
|||
'bucketGet', constants.publicId, null, log, request);
|
||||
// if index object does not exist and bucket is private AWS
|
||||
// returns 403 - AccessDenied error.
|
||||
if (err === errors.NoSuchKey && !bucketAuthorized) {
|
||||
if (err.is.NoSuchKey && !bucketAuthorized) {
|
||||
returnErr = errors.AccessDenied;
|
||||
}
|
||||
return _errorActions(returnErr,
|
||||
|
|
|
@ -107,7 +107,7 @@ function websiteHead(request, log, callback) {
|
|||
'bucketGet', constants.publicId, null, log, request);
|
||||
// if index object does not exist and bucket is private AWS
|
||||
// returns 403 - AccessDenied error.
|
||||
if (err === errors.NoSuchKey && !bucketAuthorized) {
|
||||
if (err.is.NoSuchKey && !bucketAuthorized) {
|
||||
returnErr = errors.AccessDenied;
|
||||
}
|
||||
return _errorActions(returnErr, routingRules,
|
||||
|
|
|
@ -84,7 +84,7 @@ class AzureClient {
|
|||
|
||||
return metadata.listMultipartUploads(mpuBucketName, listingParams,
|
||||
log, (err, mpuList) => {
|
||||
if (err && !err.NoSuchBucket) {
|
||||
if (err && !err.is.NoSuchBucket) {
|
||||
log.error('Error listing MPUs for Azure delete',
|
||||
{ error: err, dataStoreName });
|
||||
return cb(errors.ServiceUnavailable);
|
||||
|
|
|
@ -85,7 +85,7 @@ function _retryDelete(objectGetInfo, log, count, cb) {
|
|||
}
|
||||
return client.delete(objectGetInfo, log.getSerializedUids(), err => {
|
||||
if (err) {
|
||||
if (err.ObjNotFound) {
|
||||
if (err.is.ObjNotFound) {
|
||||
log.info('no such key in datastore',
|
||||
{ objectGetInfo, implName, moreRetries: 'no' });
|
||||
return cb(err);
|
||||
|
@ -264,7 +264,7 @@ const data = {
|
|||
return;
|
||||
}
|
||||
_retryDelete(clientGetInfo, log, 0, err => {
|
||||
if (err && !err.ObjNotFound) {
|
||||
if (err && !err.is.ObjNotFound) {
|
||||
log.error('delete error from datastore',
|
||||
{ error: err, key: objectGetInfo.key, moreRetries: 'no' });
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ class BucketClientInterface {
|
|||
getBucketAndObject(bucketName, objName, params, log, cb) {
|
||||
this.client.getBucketAndObject(bucketName, objName,
|
||||
log.getSerializedUids(), (err, data) => {
|
||||
if (err && (!err.NoSuchKey && !err.ObjNotFound)) {
|
||||
if (err && (!err.is.NoSuchKey && !err.is.ObjNotFound)) {
|
||||
return cb(err);
|
||||
}
|
||||
return cb(null, JSON.parse(data));
|
||||
|
|
|
@ -84,7 +84,7 @@ class BucketFileInterface {
|
|||
|
||||
createBucket(bucketName, bucketMD, log, cb) {
|
||||
this.getBucketAttributes(bucketName, log, err => {
|
||||
if (err && err !== errors.NoSuchBucket) {
|
||||
if (err && !err.is.NoSuchBucket) {
|
||||
return cb(err);
|
||||
}
|
||||
if (err === undefined) {
|
||||
|
@ -102,7 +102,7 @@ class BucketFileInterface {
|
|||
.withRequestLogger(log)
|
||||
.get(bucketName, {}, (err, data) => {
|
||||
if (err) {
|
||||
if (err.ObjNotFound) {
|
||||
if (err.is.ObjNotFound) {
|
||||
return cb(errors.NoSuchBucket);
|
||||
}
|
||||
const logObj = {
|
||||
|
@ -126,7 +126,7 @@ class BucketFileInterface {
|
|||
db.withRequestLogger(log)
|
||||
.get(objName, params, (err, objAttr) => {
|
||||
if (err) {
|
||||
if (err.ObjNotFound) {
|
||||
if (err.is.ObjNotFound) {
|
||||
return cb(null, {
|
||||
bucket: bucketAttr.serialize(),
|
||||
});
|
||||
|
@ -219,7 +219,7 @@ class BucketFileInterface {
|
|||
}
|
||||
db.withRequestLogger(log).get(objName, params, (err, data) => {
|
||||
if (err) {
|
||||
if (err.ObjNotFound) {
|
||||
if (err.is.ObjNotFound) {
|
||||
return cb(errors.NoSuchKey);
|
||||
}
|
||||
const logObj = {
|
||||
|
|
|
@ -719,7 +719,7 @@ function batchDelete(request, response, log, callback) {
|
|||
log.trace('batch delete locations', { locations });
|
||||
return async.eachLimit(locations, 5, (loc, next) => {
|
||||
data.delete(loc, log, err => {
|
||||
if (err && err.ObjNotFound) {
|
||||
if (err && err.is.ObjNotFound) {
|
||||
log.info('batch delete: data location do not exist', {
|
||||
method: 'batchDelete',
|
||||
location: loc,
|
||||
|
|
|
@ -42,7 +42,7 @@ const services = {
|
|||
// buckets to list. By returning an empty array, the
|
||||
// getService API will just respond with the user info
|
||||
// without listing any buckets.
|
||||
if (err && err.NoSuchBucket) {
|
||||
if (err && err.is.NoSuchBucket) {
|
||||
log.trace('no buckets found');
|
||||
// If we checked the old user bucket, that means we
|
||||
// already checked the new user bucket. If neither the
|
||||
|
@ -555,7 +555,7 @@ const services = {
|
|||
// If the MPU was initiated, the mpu bucket should exist.
|
||||
const mpuBucketName = `${constants.mpuBucketPrefix}${bucketName}`;
|
||||
metadata.getBucket(mpuBucketName, log, (err, mpuBucket) => {
|
||||
if (err && err.NoSuchBucket) {
|
||||
if (err && err.is.NoSuchBucket) {
|
||||
log.debug('bucket not found in metadata', { error: err,
|
||||
method: 'services.metadataValidateMultipart' });
|
||||
return cb(errors.NoSuchUpload);
|
||||
|
@ -577,7 +577,7 @@ const services = {
|
|||
metadata.getObjectMD(mpuBucket.getName(), mpuOverviewKey,
|
||||
{}, log, (err, storedMetadata) => {
|
||||
if (err) {
|
||||
if (err.NoSuchKey) {
|
||||
if (err.is.NoSuchKey) {
|
||||
return cb(errors.NoSuchUpload);
|
||||
}
|
||||
log.error('error from metadata', { error: err });
|
||||
|
@ -753,7 +753,7 @@ const services = {
|
|||
assert.strictEqual(typeof bucketName, 'string');
|
||||
const MPUBucketName = `${constants.mpuBucketPrefix}${bucketName}`;
|
||||
metadata.getBucket(MPUBucketName, log, (err, bucket) => {
|
||||
if (err && err.NoSuchBucket) {
|
||||
if (err && err.is.NoSuchBucket) {
|
||||
log.trace('no buckets found');
|
||||
const creationDate = new Date().toJSON();
|
||||
const mpuBucket = new BucketInfo(MPUBucketName,
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
"homepage": "https://github.com/scality/S3#readme",
|
||||
"dependencies": {
|
||||
"@hapi/joi": "^17.1.0",
|
||||
"arsenal": "git+https://github.com/scality/arsenal#7.10.15",
|
||||
"arsenal": "git+https://github.com/scality/arsenal#ccbc1ed1",
|
||||
"async": "~2.5.0",
|
||||
"aws-sdk": "2.905.0",
|
||||
"azure-storage": "^2.1.0",
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
const assert = require('assert');
|
||||
const { S3 } = require('aws-sdk');
|
||||
const { series } = require('async');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const getConfig = require('../support/config');
|
||||
const BucketUtility = require('../../lib/utility/bucket-util');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const bucket = 'source-bucket';
|
||||
const replicationConfig = {
|
||||
|
@ -81,7 +81,7 @@ describe('aws-node-sdk test deleteBucketReplication', () => {
|
|||
}),
|
||||
next => deleteReplicationAndCheckResponse(bucket, next),
|
||||
next => s3.getBucketReplication({ Bucket: bucket }, err => {
|
||||
assert(errors.ReplicationConfigurationNotFoundError[err.code]);
|
||||
assert(errors.ReplicationConfigurationNotFoundError.is[err.code]);
|
||||
return next();
|
||||
}),
|
||||
], done));
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
const assert = require('assert');
|
||||
const { S3 } = require('aws-sdk');
|
||||
const { series } = require('async');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const getConfig = require('../support/config');
|
||||
const BucketUtility = require('../../lib/utility/bucket-util');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const bucket = 'source-bucket';
|
||||
|
||||
|
@ -45,7 +45,7 @@ describe('aws-node-sdk test getBucketReplication', () => {
|
|||
it("should return 'ReplicationConfigurationNotFoundError' if bucket does " +
|
||||
'not have a replication configuration', done =>
|
||||
s3.getBucketReplication({ Bucket: bucket }, err => {
|
||||
assert(errors.ReplicationConfigurationNotFoundError[err.code]);
|
||||
assert(errors.ReplicationConfigurationNotFoundError.is[err.code]);
|
||||
return done();
|
||||
}));
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
const assert = require('assert');
|
||||
const { errors } = require('arsenal');
|
||||
const DummyRequestLogger = require('../unit/helpers').DummyRequestLogger;
|
||||
const clientCheck
|
||||
= require('../../lib/utilities/healthcheckHandler').clientCheck;
|
||||
|
@ -71,7 +70,7 @@ describe('Healthcheck response', () => {
|
|||
const azureLocationNonExistContainerError =
|
||||
results[azureLocationNonExistContainer].error;
|
||||
if (err) {
|
||||
assert.strictEqual(err, errors.InternalError,
|
||||
assert(err.is.InternalError,
|
||||
`got unexpected err in clientCheck: ${err}`);
|
||||
assert(azureLocationNonExistContainerError.startsWith(
|
||||
'The specified container is being deleted.'));
|
||||
|
|
|
@ -2,7 +2,7 @@ const assert = require('assert');
|
|||
const async = require('async');
|
||||
const AWS = require('aws-sdk');
|
||||
const { parseString } = require('xml2js');
|
||||
const { errors, models } = require('arsenal');
|
||||
const { models } = require('arsenal');
|
||||
|
||||
const BucketInfo = models.BucketInfo;
|
||||
const { getRealAwsConfig } =
|
||||
|
@ -460,11 +460,13 @@ describe('Multipart Upload API with AWS Backend', function mpuTestSuite() {
|
|||
abortMPU(uploadId, getAwsParams(objectKey), () => {
|
||||
const listParams = getListParams(objectKey, uploadId);
|
||||
listParts(authInfo, listParams, log, err => {
|
||||
assert.deepStrictEqual(err, errors.ServiceUnavailable
|
||||
.customizeDescription('Error returned from AWS: ' +
|
||||
'The specified upload does not exist. The upload ID ' +
|
||||
'may be invalid, or the upload may have been aborted ' +
|
||||
'or completed.'));
|
||||
// TODO
|
||||
assert.strictEqual(err.is.ServiceUnavailable, true);
|
||||
assert.strictEqual(err.description,
|
||||
'Error returned from AWS: ' +
|
||||
'The specified upload does not exist. The upload ID ' +
|
||||
'may be invalid, or the upload may have been aborted ' +
|
||||
'or completed.');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -513,7 +515,7 @@ describe('Multipart Upload API with AWS Backend', function mpuTestSuite() {
|
|||
const fakeKey = `key-${Date.now()}`;
|
||||
const delParams = getDeleteParams(fakeKey, fakeUploadId);
|
||||
multipartDelete(authInfo, delParams, log, err => {
|
||||
assert.equal(err, errors.NoSuchUpload,
|
||||
assert(err.is.NoSuchUpload,
|
||||
`Error aborting MPU: ${err}`);
|
||||
done();
|
||||
});
|
||||
|
@ -639,7 +641,7 @@ describe('Multipart Upload API with AWS Backend', function mpuTestSuite() {
|
|||
const compParams = getCompleteParams(objectKey, uploadId);
|
||||
compParams.post = errorBody;
|
||||
completeMultipartUpload(authInfo, compParams, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidPart);
|
||||
assert(err.is.InvalidPart);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -661,7 +663,7 @@ describe('Multipart Upload API with AWS Backend', function mpuTestSuite() {
|
|||
const compParams = getCompleteParams(objectKey, uploadId);
|
||||
compParams.post = errorBody;
|
||||
completeMultipartUpload(authInfo, compParams, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidPartOrder);
|
||||
assert(err.is.InvalidPartOrder);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -687,7 +689,7 @@ describe('Multipart Upload API with AWS Backend', function mpuTestSuite() {
|
|||
const compParams = getCompleteParams(objectKey, uploadId);
|
||||
compParams.post = errorBody;
|
||||
completeMultipartUpload(authInfo, compParams, log, err => {
|
||||
assert.deepStrictEqual(err, errors.EntityTooSmall);
|
||||
assert(err.is.EntityTooSmall);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -825,7 +827,7 @@ describe('Multipart Upload API with AWS Backend', function mpuTestSuite() {
|
|||
(uploadId, next) => {
|
||||
const listParams = getListParams(objectKey, uploadId);
|
||||
listParts(authInfo, listParams, log, err => {
|
||||
assert(err.NoSuchUpload);
|
||||
assert(err.is.NoSuchUpload);
|
||||
next();
|
||||
});
|
||||
},
|
||||
|
|
|
@ -58,7 +58,7 @@ function put(bucketLoc, objLoc, requestHost, cb, errorDescription) {
|
|||
resHeaders) => {
|
||||
if (errorDescription) {
|
||||
assert.strictEqual(err.code, 400);
|
||||
assert(err.InvalidArgument);
|
||||
assert(err.is.InvalidArgument);
|
||||
assert(err.description.indexOf(errorDescription) > -1);
|
||||
} else {
|
||||
assert.strictEqual(err, null, `Error putting object: ${err}`);
|
||||
|
|
|
@ -2,6 +2,7 @@ const assert = require('assert');
|
|||
const async = require('async');
|
||||
const { parseString } = require('xml2js');
|
||||
const AWS = require('aws-sdk');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const { cleanup, DummyRequestLogger, makeAuthInfo }
|
||||
= require('../unit/helpers');
|
||||
|
@ -147,8 +148,7 @@ errorPutCopyPart) {
|
|||
return objectPutCopyPart(authInfo, copyPartReq,
|
||||
bucketName, sourceObjName, undefined, log, (err, copyResult) => {
|
||||
if (errorPutCopyPart) {
|
||||
assert.strictEqual(err.code, errorPutCopyPart.statusCode);
|
||||
assert(err[errorPutCopyPart.code]);
|
||||
assert.strictEqual(err.is[errorPutCopyPart.type], true);
|
||||
return cb();
|
||||
}
|
||||
assert.strictEqual(err, null);
|
||||
|
@ -293,7 +293,7 @@ function testSuite() {
|
|||
it('should return error 403 AccessDenied copying part to a ' +
|
||||
'different AWS location without object READ access',
|
||||
done => {
|
||||
const errorPutCopyPart = { code: 'AccessDenied', statusCode: 403 };
|
||||
const errorPutCopyPart = errors.AccessDenied;
|
||||
copyPutPart(null, awsLocation, awsLocation2, 'localhost', done,
|
||||
errorPutCopyPart);
|
||||
});
|
||||
|
|
|
@ -99,7 +99,7 @@ errorDescription) {
|
|||
(err, json) => {
|
||||
if (errorDescription) {
|
||||
assert.strictEqual(err.code, 400);
|
||||
assert(err.InvalidArgument);
|
||||
assert(err.is.InvalidArgument);
|
||||
assert(err.description.indexOf(errorDescription) > -1);
|
||||
return cb();
|
||||
}
|
||||
|
|
|
@ -37,7 +37,8 @@ describe('objectLockHelpers: validateHeaders', () => {
|
|||
= validateHeaders(objLockDisabledBucketInfo, headers, log);
|
||||
const expectedError = errors.InvalidRequest.customizeDescription(
|
||||
'Bucket is missing ObjectLockConfiguration');
|
||||
assert.strictEqual(objectLockValidationError.InvalidRequest, true);
|
||||
assert.strictEqual(
|
||||
objectLockValidationError.is.InvalidRequest, true);
|
||||
assert.strictEqual(objectLockValidationError.description,
|
||||
expectedError.description);
|
||||
});
|
||||
|
@ -90,7 +91,8 @@ describe('objectLockHelpers: validateHeaders', () => {
|
|||
const expectedError = errors.InvalidArgument.customizeDescription(
|
||||
'x-amz-object-lock-retain-until-date and x-amz-object-lock-mode ' +
|
||||
'must both be supplied');
|
||||
assert.strictEqual(objectLockValidationError.InvalidArgument, true);
|
||||
assert.strictEqual(
|
||||
objectLockValidationError.is.InvalidArgument, true);
|
||||
assert.strictEqual(objectLockValidationError.description,
|
||||
expectedError.description);
|
||||
});
|
||||
|
@ -104,7 +106,8 @@ describe('objectLockHelpers: validateHeaders', () => {
|
|||
const expectedError = errors.InvalidArgument.customizeDescription(
|
||||
'x-amz-object-lock-retain-until-date and x-amz-object-lock-mode ' +
|
||||
'must both be supplied');
|
||||
assert.strictEqual(objectLockValidationError.InvalidArgument, true);
|
||||
assert.strictEqual(
|
||||
objectLockValidationError.is.InvalidArgument, true);
|
||||
assert.strictEqual(objectLockValidationError.description,
|
||||
expectedError.description);
|
||||
});
|
||||
|
@ -118,7 +121,8 @@ describe('objectLockHelpers: validateHeaders', () => {
|
|||
'The retain until date must be in the future!');
|
||||
const objectLockValidationError
|
||||
= validateHeaders(bucketInfo, headers, log);
|
||||
assert.strictEqual(objectLockValidationError.InvalidArgument, true);
|
||||
assert.strictEqual(
|
||||
objectLockValidationError.is.InvalidArgument, true);
|
||||
assert.strictEqual(objectLockValidationError.description,
|
||||
expectedError.description);
|
||||
});
|
||||
|
@ -131,7 +135,8 @@ describe('objectLockHelpers: validateHeaders', () => {
|
|||
= validateHeaders(bucketInfo, headers, log);
|
||||
const expectedError = errors.InvalidArgument.customizeDescription(
|
||||
'Legal hold status must be one of "ON", "OFF"');
|
||||
assert.strictEqual(objectLockValidationError.InvalidArgument, true);
|
||||
assert.strictEqual(
|
||||
objectLockValidationError.is.InvalidArgument, true);
|
||||
assert.strictEqual(objectLockValidationError.description,
|
||||
expectedError.description);
|
||||
});
|
||||
|
@ -145,7 +150,8 @@ describe('objectLockHelpers: validateHeaders', () => {
|
|||
= validateHeaders(bucketInfo, headers, log);
|
||||
const expectedError = errors.InvalidArgument.customizeDescription(
|
||||
'Unknown wormMode directive');
|
||||
assert.strictEqual(objectLockValidationError.InvalidArgument, true);
|
||||
assert.strictEqual(
|
||||
objectLockValidationError.is.InvalidArgument, true);
|
||||
assert.strictEqual(objectLockValidationError.description,
|
||||
expectedError.description);
|
||||
});
|
||||
|
@ -207,7 +213,7 @@ describe('objectLockHelpers: validateObjectLockUpdate', () => {
|
|||
};
|
||||
|
||||
const error = validateObjectLockUpdate(objMD, retentionInfo, false);
|
||||
assert.deepStrictEqual(error, errors.AccessDenied);
|
||||
assert.strictEqual(error.is.AccessDenied, true);
|
||||
});
|
||||
|
||||
it('should disallow COMPLIANCE => GOVERNANCE if retention is not expired', () => {
|
||||
|
@ -222,7 +228,7 @@ describe('objectLockHelpers: validateObjectLockUpdate', () => {
|
|||
};
|
||||
|
||||
const error = validateObjectLockUpdate(objMD, retentionInfo);
|
||||
assert.deepStrictEqual(error, errors.AccessDenied);
|
||||
assert.strictEqual(error.is.AccessDenied, true);
|
||||
});
|
||||
|
||||
it('should allow COMPLIANCE => GOVERNANCE if retention is expired', () => {
|
||||
|
@ -267,7 +273,7 @@ describe('objectLockHelpers: validateObjectLockUpdate', () => {
|
|||
};
|
||||
|
||||
const error = validateObjectLockUpdate(objMD, retentionInfo);
|
||||
assert.deepStrictEqual(error, errors.AccessDenied);
|
||||
assert.strictEqual(error.is.AccessDenied, true);
|
||||
});
|
||||
|
||||
it('should allow shortening retention period if in GOVERNANCE', () => {
|
||||
|
|
|
@ -425,7 +425,7 @@ describe('versioning helpers', () => {
|
|||
'foobucket', mockBucketMD, testCase.objMD,
|
||||
testCase.reqVersionId, null, (err, options) => {
|
||||
if (testCase.expectedError) {
|
||||
assert.strictEqual(err, testCase.expectedError);
|
||||
assert.strictEqual(err.is[testCase.expectedError.type], true);
|
||||
} else {
|
||||
assert.ifError(err);
|
||||
assert.deepStrictEqual(options, testCase.expectedRes);
|
||||
|
|
|
@ -2,7 +2,6 @@ const crypto = require('crypto');
|
|||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
const { parseString } = require('xml2js');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const bucketDelete = require('../../../lib/api/bucketDelete');
|
||||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
|
@ -112,7 +111,7 @@ describe('bucketDelete API', () => {
|
|||
objectPut(authInfo, testPutObjectRequest, undefined, log, err => {
|
||||
assert.strictEqual(err, null);
|
||||
bucketDelete(authInfo, testRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.BucketNotEmpty);
|
||||
assert(err.is.BucketNotEmpty);
|
||||
metadata.getBucket(bucketName, log, (err, md) => {
|
||||
assert.strictEqual(md.getName(), bucketName);
|
||||
metadata.listObject(usersBucket,
|
||||
|
@ -146,7 +145,7 @@ describe('bucketDelete API', () => {
|
|||
bucketPut(authInfo, testRequest, log, () => {
|
||||
bucketDelete(authInfo, testRequest, log, () => {
|
||||
metadata.getBucket(bucketName, log, (err, md) => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert(err.is.NoSuchBucket);
|
||||
assert.strictEqual(md, undefined);
|
||||
metadata.listObject(usersBucket, { prefix: canonicalID },
|
||||
log, (err, listResponse) => {
|
||||
|
@ -169,7 +168,7 @@ describe('bucketDelete API', () => {
|
|||
it('should prevent anonymous user delete bucket API access', done => {
|
||||
const publicAuthInfo = makeAuthInfo(constants.publicId);
|
||||
bucketDelete(publicAuthInfo, testRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.AccessDenied);
|
||||
assert(err.is.AccessDenied);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -10,8 +10,6 @@ const objectPut = require('../../../lib/api/objectPut');
|
|||
const { cleanup, DummyRequestLogger, makeAuthInfo } = require('../helpers');
|
||||
const DummyRequest = require('../DummyRequest');
|
||||
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const authInfo = makeAuthInfo('accessKey1');
|
||||
const bucketName = 'bucketname';
|
||||
const delimiter = '/';
|
||||
|
@ -199,7 +197,7 @@ describe('bucketGet API', () => {
|
|||
const testGetRequest = Object.assign({ query: { 'max-keys': '-1' } },
|
||||
baseGetRequest);
|
||||
bucketGet(authInfo, testGetRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidArgument);
|
||||
assert(err.is.InvalidArgument);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -28,7 +28,7 @@ describe('getBucketLifecycle API', () => {
|
|||
'bucket has no lifecycle', done => {
|
||||
const lifecycleRequest = getLifecycleRequest(bucketName);
|
||||
bucketGetLifecycle(authInfo, lifecycleRequest, log, err => {
|
||||
assert.strictEqual(err.NoSuchLifecycleConfiguration, true);
|
||||
assert.strictEqual(err.is.NoSuchLifecycleConfiguration, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -74,7 +74,7 @@ describe('bucketGetObjectLock API', () => {
|
|||
'object lock is not enabled on the bucket', done => {
|
||||
const objectLockRequest = getObjectLockConfigRequest(bucketName);
|
||||
bucketGetObjectLock(authInfo, objectLockRequest, log, err => {
|
||||
assert.strictEqual(err.ObjectLockConfigurationNotFoundError, true);
|
||||
assert.strictEqual(err.is.ObjectLockConfigurationNotFoundError, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -44,7 +44,7 @@ describe('getBucketPolicy API', () => {
|
|||
it('should return NoSuchBucketPolicy error if ' +
|
||||
'bucket has no policy', done => {
|
||||
bucketGetPolicy(authInfo, testBasicRequest, log, err => {
|
||||
assert.strictEqual(err.NoSuchBucketPolicy, true);
|
||||
assert.strictEqual(err.is.NoSuchBucketPolicy, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
const assert = require('assert');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const bucketHead = require('../../../lib/api/bucketHead');
|
||||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
|
@ -22,7 +21,7 @@ describe('bucketHead API', () => {
|
|||
|
||||
it('should return an error if the bucket does not exist', done => {
|
||||
bucketHead(authInfo, testRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -31,7 +30,7 @@ describe('bucketHead API', () => {
|
|||
const otherAuthInfo = makeAuthInfo('accessKey2');
|
||||
bucketPut(otherAuthInfo, testRequest, log, () => {
|
||||
bucketHead(authInfo, testRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.AccessDenied);
|
||||
assert.strictEqual(err.is.AccessDenied, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
const assert = require('assert');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const { checkLocationConstraint } = require('../../../lib/api/bucketPut');
|
||||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
|
@ -83,8 +82,8 @@ describe('checkLocationConstraint function', () => {
|
|||
if (testCheck.isError) {
|
||||
assert.notEqual(checkLocation.error, null,
|
||||
'Expected failure but got success');
|
||||
assert.strictEqual(checkLocation.error.
|
||||
InvalidLocationConstraint, true);
|
||||
assert.strictEqual(
|
||||
checkLocation.error.is.InvalidLocationConstraint, true);
|
||||
} else {
|
||||
assert.ifError(checkLocation.error);
|
||||
assert.strictEqual(checkLocation.locationConstraint,
|
||||
|
@ -105,7 +104,7 @@ describe('bucketPut API', () => {
|
|||
bucketPut(authInfo, testRequest, log, () => {
|
||||
bucketPut(otherAuthInfo, testRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.BucketAlreadyExists);
|
||||
assert.strictEqual(err.is.BucketAlreadyExists, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -188,9 +187,9 @@ describe('bucketPut API', () => {
|
|||
post: '',
|
||||
};
|
||||
bucketPut(authInfo, testRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidArgument);
|
||||
assert.strictEqual(err.is.InvalidArgument, true);
|
||||
metadata.getBucket(bucketName, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -209,9 +208,9 @@ describe('bucketPut API', () => {
|
|||
post: '',
|
||||
};
|
||||
bucketPut(authInfo, testRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidArgument);
|
||||
assert.strictEqual(err.is.InvalidArgument, true);
|
||||
metadata.getBucket(bucketName, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -231,9 +230,9 @@ describe('bucketPut API', () => {
|
|||
post: '',
|
||||
};
|
||||
bucketPut(authInfo, testRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.UnresolvableGrantByEmailAddress);
|
||||
assert.strictEqual(err.is.UnresolvableGrantByEmailAddress, true);
|
||||
metadata.getBucket(bucketName, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -309,7 +308,7 @@ describe('bucketPut API', () => {
|
|||
it('should prevent anonymous user from accessing putBucket API', done => {
|
||||
const publicAuthInfo = makeAuthInfo(constants.publicId);
|
||||
bucketPut(publicAuthInfo, testRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.AccessDenied);
|
||||
assert.strictEqual(err.is.AccessDenied, true);
|
||||
});
|
||||
done();
|
||||
});
|
||||
|
@ -367,11 +366,10 @@ describe('bucketPut API', () => {
|
|||
|
||||
it('should return error if location constraint config is not updated',
|
||||
done => bucketPut(authInfo, req, log, err => {
|
||||
const expectedError = errors.InvalidLocationConstraint;
|
||||
expectedError.description = 'value of the location you are ' +
|
||||
assert.strictEqual(err.is.InvalidLocationConstraint, true);
|
||||
assert.strictEqual(err.description, 'value of the location you are ' +
|
||||
`attempting to set - ${newLCKey} - is not listed in the ` +
|
||||
'locationConstraint config';
|
||||
assert.deepStrictEqual(err, expectedError);
|
||||
'locationConstraint config');
|
||||
done();
|
||||
}));
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
const assert = require('assert');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const aclUtils = require('../../../lib/utilities/aclUtils');
|
||||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
|
@ -75,7 +74,7 @@ describe('putBucketACL API', () => {
|
|||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidArgument);
|
||||
assert.strictEqual(err.is.InvalidArgument, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -273,7 +272,7 @@ describe('putBucketACL API', () => {
|
|||
query: { acl: '' },
|
||||
};
|
||||
return bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidArgument);
|
||||
assert.strictEqual(err.is.InvalidArgument, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -295,7 +294,7 @@ describe('putBucketACL API', () => {
|
|||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.UnresolvableGrantByEmailAddress);
|
||||
assert.strictEqual(err.is.UnresolvableGrantByEmailAddress, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -421,7 +420,7 @@ describe('putBucketACL API', () => {
|
|||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.MalformedACLError);
|
||||
assert.strictEqual(err.is.MalformedACLError, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -464,7 +463,7 @@ describe('putBucketACL API', () => {
|
|||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.MalformedACLError);
|
||||
assert.strictEqual(err.is.MalformedACLError, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -496,7 +495,7 @@ describe('putBucketACL API', () => {
|
|||
};
|
||||
|
||||
return bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidArgument);
|
||||
assert.strictEqual(err.is.InvalidArgument, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -527,7 +526,7 @@ describe('putBucketACL API', () => {
|
|||
query: { acl: '' },
|
||||
};
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.UnresolvableGrantByEmailAddress);
|
||||
assert.strictEqual(err.is.UnresolvableGrantByEmailAddress, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -563,7 +562,7 @@ describe('putBucketACL API', () => {
|
|||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.MalformedACLError);
|
||||
assert.strictEqual(err.is.MalformedACLError, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -608,7 +607,7 @@ describe('putBucketACL API', () => {
|
|||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.MalformedACLError);
|
||||
assert.strictEqual(err.is.MalformedACLError, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -643,7 +642,7 @@ describe('putBucketACL API', () => {
|
|||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.MalformedXML);
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -677,7 +676,7 @@ describe('putBucketACL API', () => {
|
|||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidArgument);
|
||||
assert.strictEqual(err.is.InvalidArgument, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -698,7 +697,7 @@ describe('putBucketACL API', () => {
|
|||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidArgument);
|
||||
assert.strictEqual(err.is.InvalidArgument, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -129,7 +129,7 @@ describe('PUT bucket cors :: helper validation functions ', () => {
|
|||
`<ID>${testValue}</ID>`);
|
||||
parseCorsXml(xml, log, err => {
|
||||
assert(err, 'Expected error but found none');
|
||||
assert.deepStrictEqual(err, errors.MalformedXML);
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
@ -175,7 +175,7 @@ describe('PUT bucket cors :: helper validation functions ', () => {
|
|||
`<MaxAgeSeconds>${testValue}</MaxAgeSeconds>`);
|
||||
parseCorsXml(xml, log, err => {
|
||||
assert(err, 'Expected error but found none');
|
||||
assert.deepStrictEqual(err, errors.MalformedXML);
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -25,7 +25,7 @@ describe('bucketPutEncryption API', () => {
|
|||
describe('test invalid sse configs', () => {
|
||||
it('should reject an empty config', done => {
|
||||
bucketPutEncryption(authInfo, templateRequest(bucketName, { post: '' }), log, err => {
|
||||
assert.strictEqual(err.MalformedXML, true);
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -36,7 +36,7 @@ describe('bucketPutEncryption API', () => {
|
|||
<ServerSideEncryptionConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
||||
</ServerSideEncryptionConfiguration>`,
|
||||
}), log, err => {
|
||||
assert.strictEqual(err.MalformedXML, true);
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -48,7 +48,7 @@ describe('bucketPutEncryption API', () => {
|
|||
<Rule></Rule>
|
||||
</ServerSideEncryptionConfiguration>`,
|
||||
}), log, err => {
|
||||
assert.strictEqual(err.MalformedXML, true);
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -56,7 +56,7 @@ describe('bucketPutEncryption API', () => {
|
|||
it('should reject a config with no SSEAlgorithm', done => {
|
||||
const post = templateSSEConfig({});
|
||||
bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => {
|
||||
assert.strictEqual(err.MalformedXML, true);
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -64,7 +64,7 @@ describe('bucketPutEncryption API', () => {
|
|||
it('should reject a config with an invalid SSEAlgorithm', done => {
|
||||
const post = templateSSEConfig({ algorithm: 'InvalidAlgo' });
|
||||
bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => {
|
||||
assert.strictEqual(err.MalformedXML, true);
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -72,7 +72,7 @@ describe('bucketPutEncryption API', () => {
|
|||
it('should reject a config with SSEAlgorithm == AES256 and a provided KMSMasterKeyID', done => {
|
||||
const post = templateSSEConfig({ algorithm: 'AES256', keyId: '12345' });
|
||||
bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => {
|
||||
assert.strictEqual(err.InvalidArgument, true);
|
||||
assert.strictEqual(err.is.InvalidArgument, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -48,7 +48,7 @@ describe('putBucketObjectLock API', () => {
|
|||
|
||||
it('should return InvalidBucketState error', done => {
|
||||
bucketPutObjectLock(authInfo, putObjLockRequest, log, err => {
|
||||
assert.strictEqual(err.InvalidBucketState, true);
|
||||
assert.strictEqual(err.is.InvalidBucketState, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -70,7 +70,7 @@ describe('putBucketPolicy API', () => {
|
|||
expectedBucketPolicy.Statement[0].Resource = 'arn:aws::s3:::badname';
|
||||
bucketPutPolicy(authInfo, getPolicyRequest(expectedBucketPolicy),
|
||||
log, err => {
|
||||
assert.strictEqual(err.MalformedPolicy, true);
|
||||
assert.strictEqual(err.is.MalformedPolicy, true);
|
||||
assert.strictEqual(err.description, 'Policy has invalid resource');
|
||||
return done();
|
||||
});
|
||||
|
@ -81,7 +81,7 @@ describe('putBucketPolicy API', () => {
|
|||
{ StringEquals: { 's3:x-amz-acl': ['public-read'] } };
|
||||
bucketPutPolicy(authInfo, getPolicyRequest(expectedBucketPolicy), log,
|
||||
err => {
|
||||
assert.strictEqual(err.NotImplemented, true);
|
||||
assert.strictEqual(err.is.NotImplemented, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -90,7 +90,7 @@ describe('putBucketPolicy API', () => {
|
|||
expectedBucketPolicy.Statement[0].Principal = { Service: ['test.com'] };
|
||||
bucketPutPolicy(authInfo, getPolicyRequest(expectedBucketPolicy), log,
|
||||
err => {
|
||||
assert.strictEqual(err.NotImplemented, true);
|
||||
assert.strictEqual(err.is.NotImplemented, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -100,7 +100,7 @@ describe('putBucketPolicy API', () => {
|
|||
{ Federated: 'www.test.com' };
|
||||
bucketPutPolicy(authInfo, getPolicyRequest(expectedBucketPolicy), log,
|
||||
err => {
|
||||
assert.strictEqual(err.NotImplemented, true);
|
||||
assert.strictEqual(err.is.NotImplemented, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -13,7 +13,7 @@ function checkError(xml, expectedErr, cb) {
|
|||
if (expectedErr === null) {
|
||||
assert.strictEqual(err, null, `expected no error but got '${err}'`);
|
||||
} else {
|
||||
assert(err[expectedErr], 'incorrect error response: should be ' +
|
||||
assert(err.is[expectedErr], 'incorrect error response: should be ' +
|
||||
`'Error: ${expectedErr}' but got '${err}'`);
|
||||
}
|
||||
return cb();
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
const crypto = require('crypto');
|
||||
const assert = require('assert');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const BucketInfo = require('arsenal').models.BucketInfo;
|
||||
const bucketGet = require('../../../lib/api/bucketGet');
|
||||
|
@ -96,7 +95,7 @@ function confirmDeleted(done) {
|
|||
process.nextTick(() => {
|
||||
process.nextTick(() => {
|
||||
metadata.getBucket(bucketName, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
return checkBucketListing(authInfo, bucketName, 0, done);
|
||||
});
|
||||
});
|
||||
|
@ -138,7 +137,7 @@ describe('deleted flag bucket handling', () => {
|
|||
'different account sends put bucket request for bucket with ' +
|
||||
'deleted flag', done => {
|
||||
bucketPut(otherAccountAuthInfo, baseTestRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.BucketAlreadyExists);
|
||||
assert.strictEqual(err.is.BucketAlreadyExists, true);
|
||||
metadata.getBucket(bucketName, log, (err, data) => {
|
||||
assert.strictEqual(data._transient, false);
|
||||
assert.strictEqual(data._deleted, true);
|
||||
|
@ -193,7 +192,7 @@ describe('deleted flag bucket handling', () => {
|
|||
'x-amz-acl': 'public-read' }, 'headers',
|
||||
baseTestRequest, baseTestRequest.headers);
|
||||
bucketPutACL(otherAccountAuthInfo, putACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
metadata.getBucket(bucketName, log, (err, data) => {
|
||||
assert.strictEqual(data._deleted, true);
|
||||
assert.strictEqual(data._transient, false);
|
||||
|
@ -212,7 +211,7 @@ describe('deleted flag bucket handling', () => {
|
|||
baseTestRequest, baseTestRequest.headers);
|
||||
const unauthorizedAccount = makeAuthInfo('keepMeOut');
|
||||
bucketPutACL(unauthorizedAccount, putACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.AccessDenied);
|
||||
assert.strictEqual(err.is.AccessDenied, true);
|
||||
metadata.getBucket(bucketName, log, (err, data) => {
|
||||
assert.strictEqual(data._deleted, true);
|
||||
assert.strictEqual(data._transient, false);
|
||||
|
@ -266,7 +265,7 @@ describe('deleted flag bucket handling', () => {
|
|||
const postBody = Buffer.from('I am a body', 'utf8');
|
||||
const putObjRequest = new DummyRequest(setUpRequest, postBody);
|
||||
objectPut(otherAccountAuthInfo, putObjRequest, undefined, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -314,7 +313,7 @@ describe('deleted flag bucket handling', () => {
|
|||
initiateRequest.objectKey = 'objectName';
|
||||
initiateMultipartUpload(otherAccountAuthInfo, initiateRequest, log,
|
||||
err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -331,7 +330,7 @@ describe('deleted flag bucket handling', () => {
|
|||
'authorized', done => {
|
||||
bucketDelete(otherAccountAuthInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.AccessDenied);
|
||||
assert.strictEqual(err.is.AccessDenied, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -340,7 +339,7 @@ describe('deleted flag bucket handling', () => {
|
|||
'NoSuchBucket error and complete deletion', done => {
|
||||
bucketDeleteWebsite(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
confirmDeleted(done);
|
||||
});
|
||||
});
|
||||
|
@ -349,7 +348,7 @@ describe('deleted flag bucket handling', () => {
|
|||
'NoSuchBucket error and complete deletion', done => {
|
||||
bucketGet(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
confirmDeleted(done);
|
||||
});
|
||||
});
|
||||
|
@ -358,7 +357,7 @@ describe('deleted flag bucket handling', () => {
|
|||
'NoSuchBucket error and complete deletion', done => {
|
||||
bucketGetACL(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
confirmDeleted(done);
|
||||
});
|
||||
});
|
||||
|
@ -367,7 +366,7 @@ describe('deleted flag bucket handling', () => {
|
|||
'NoSuchBucket error and complete deletion', done => {
|
||||
bucketGetCors(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
confirmDeleted(done);
|
||||
});
|
||||
});
|
||||
|
@ -383,7 +382,7 @@ describe('deleted flag bucket handling', () => {
|
|||
bucketPutCorsRequest.headers['content-md5'] = crypto.createHash('md5')
|
||||
.update(bucketPutCorsRequest.post, 'utf8').digest('base64');
|
||||
bucketPutCors(authInfo, bucketPutCorsRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
confirmDeleted(done);
|
||||
});
|
||||
});
|
||||
|
@ -391,7 +390,7 @@ describe('deleted flag bucket handling', () => {
|
|||
it('bucketDeleteCors request on bucket with delete flag should return ' +
|
||||
'NoSuchBucket error and complete deletion', done => {
|
||||
bucketDeleteCors(authInfo, baseTestRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
confirmDeleted(done);
|
||||
});
|
||||
});
|
||||
|
@ -400,7 +399,7 @@ describe('deleted flag bucket handling', () => {
|
|||
'NoSuchBucket error and complete deletion', done => {
|
||||
bucketGetWebsite(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
confirmDeleted(done);
|
||||
});
|
||||
});
|
||||
|
@ -414,7 +413,7 @@ describe('deleted flag bucket handling', () => {
|
|||
'</WebsiteConfiguration>';
|
||||
bucketPutWebsite(authInfo, bucketPutWebsiteRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
confirmDeleted(done);
|
||||
});
|
||||
});
|
||||
|
@ -423,7 +422,7 @@ describe('deleted flag bucket handling', () => {
|
|||
'NoSuchBucket error and complete deletion', done => {
|
||||
bucketHead(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
confirmDeleted(done);
|
||||
});
|
||||
});
|
||||
|
@ -438,13 +437,13 @@ describe('deleted flag bucket handling', () => {
|
|||
if (extraArgNeeded) {
|
||||
return apiAction(authInfo, mpuRequest, undefined,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchUpload);
|
||||
assert.strictEqual(err.is.NoSuchUpload, true);
|
||||
return done();
|
||||
});
|
||||
}
|
||||
return apiAction(authInfo, mpuRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchUpload);
|
||||
assert.strictEqual(err.is.NoSuchUpload, true);
|
||||
return done();
|
||||
});
|
||||
}
|
||||
|
@ -495,7 +494,7 @@ describe('deleted flag bucket handling', () => {
|
|||
listRequest.query = {};
|
||||
listMultipartUploads(authInfo, listRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -505,7 +504,7 @@ describe('deleted flag bucket handling', () => {
|
|||
done => {
|
||||
objectGet(authInfo, baseTestRequest, false,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
confirmDeleted(done);
|
||||
});
|
||||
});
|
||||
|
@ -514,7 +513,7 @@ describe('deleted flag bucket handling', () => {
|
|||
'NoSuchBucket error and complete deletion', done => {
|
||||
objectGetACL(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
confirmDeleted(done);
|
||||
});
|
||||
});
|
||||
|
@ -523,7 +522,7 @@ describe('deleted flag bucket handling', () => {
|
|||
'NoSuchBucket error and complete deletion', done => {
|
||||
objectHead(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
confirmDeleted(done);
|
||||
});
|
||||
});
|
||||
|
@ -532,7 +531,7 @@ describe('deleted flag bucket handling', () => {
|
|||
'NoSuchBucket error and complete deletion', done => {
|
||||
objectPutACL(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
confirmDeleted(done);
|
||||
});
|
||||
});
|
||||
|
@ -541,7 +540,7 @@ describe('deleted flag bucket handling', () => {
|
|||
'NoSuchBucket error', done => {
|
||||
objectDelete(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
confirmDeleted(done);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -3,8 +3,6 @@ const async = require('async');
|
|||
const { parseString } = require('xml2js');
|
||||
const sinon = require('sinon');
|
||||
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const { cleanup, DummyRequestLogger } = require('../helpers');
|
||||
const { config } = require('../../../lib/Config');
|
||||
const services = require('../../../lib/services');
|
||||
|
@ -126,7 +124,7 @@ describe('Multipart Delete API', () => {
|
|||
'exist and legacyAwsBehavior set to true',
|
||||
done => {
|
||||
_createAndAbortMpu(true, true, eastLocation, err => {
|
||||
assert.strictEqual(err, errors.NoSuchUpload,
|
||||
assert.strictEqual(err.is.NoSuchUpload, true,
|
||||
`Expected NoSuchUpload, got ${err}`);
|
||||
done();
|
||||
});
|
||||
|
|
|
@ -168,7 +168,7 @@ describe('Multipart Upload API', () => {
|
|||
'no destination bucket', done => {
|
||||
initiateMultipartUpload(authInfo, initiateRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -315,7 +315,7 @@ describe('Multipart Upload API', () => {
|
|||
}, postBody);
|
||||
objectPutPart(authInfo, partRequest, undefined, log,
|
||||
(err, result) => {
|
||||
assert.deepStrictEqual(err, errors.TooManyParts);
|
||||
assert.strictEqual(err.is.TooManyParts, true);
|
||||
assert.strictEqual(result, undefined);
|
||||
done();
|
||||
});
|
||||
|
@ -351,7 +351,7 @@ describe('Multipart Upload API', () => {
|
|||
}, postBody);
|
||||
objectPutPart(authInfo, partRequest, undefined, log,
|
||||
(err, result) => {
|
||||
assert.deepStrictEqual(err, errors.InvalidArgument);
|
||||
assert.strictEqual(err.is.InvalidArgument, true);
|
||||
assert.strictEqual(result, undefined);
|
||||
done();
|
||||
});
|
||||
|
@ -394,7 +394,7 @@ describe('Multipart Upload API', () => {
|
|||
}, postBody);
|
||||
objectPutPart(authInfo, partRequest, undefined,
|
||||
log, (err, result) => {
|
||||
assert.deepStrictEqual(err, errors.EntityTooLarge);
|
||||
assert.strictEqual(err.is.EntityTooLarge, true);
|
||||
assert.strictEqual(result, undefined);
|
||||
done();
|
||||
});
|
||||
|
@ -691,7 +691,7 @@ describe('Multipart Upload API', () => {
|
|||
};
|
||||
completeMultipartUpload(authInfo,
|
||||
completeRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.MalformedXML);
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
assert.strictEqual(metadata.keyMaps.get(mpuBucket).size,
|
||||
2);
|
||||
done();
|
||||
|
@ -745,7 +745,7 @@ describe('Multipart Upload API', () => {
|
|||
calculatedHash,
|
||||
};
|
||||
completeMultipartUpload(authInfo, completeRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.MalformedXML);
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -818,8 +818,7 @@ describe('Multipart Upload API', () => {
|
|||
};
|
||||
completeMultipartUpload(authInfo,
|
||||
completeRequest, log, err => {
|
||||
assert.deepStrictEqual(err,
|
||||
errors.InvalidPartOrder);
|
||||
assert.strictEqual(err.is.InvalidPartOrder, true);
|
||||
assert.strictEqual(metadata.keyMaps
|
||||
.get(mpuBucket).size, 3);
|
||||
done();
|
||||
|
@ -876,8 +875,7 @@ describe('Multipart Upload API', () => {
|
|||
calculatedHash,
|
||||
};
|
||||
completeMultipartUpload(authInfo, completeRequest, log, err => {
|
||||
assert.deepStrictEqual(err,
|
||||
errors.InvalidPart);
|
||||
assert.strictEqual(err.is.InvalidPart, true);
|
||||
assert.strictEqual(metadata.keyMaps.get(mpuBucket).size, 2);
|
||||
done();
|
||||
});
|
||||
|
@ -950,7 +948,7 @@ describe('Multipart Upload API', () => {
|
|||
assert.strictEqual(metadata.keyMaps.get(mpuBucket).size, 3);
|
||||
completeMultipartUpload(authInfo,
|
||||
completeRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidPart);
|
||||
assert.strictEqual(err.is.InvalidPart, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -1032,8 +1030,7 @@ describe('Multipart Upload API', () => {
|
|||
assert.strictEqual(metadata.keyMaps.get(mpuBucket).size, 3);
|
||||
completeMultipartUpload(authInfo,
|
||||
completeRequest, log, err => {
|
||||
assert.deepStrictEqual(err,
|
||||
errors.EntityTooSmall);
|
||||
assert.strictEqual(err.is.EntityTooSmall, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -1716,7 +1713,7 @@ describe('Multipart Upload API', () => {
|
|||
|
||||
bucketPut(authInfo, bucketPutRequest, log, () =>
|
||||
objectPutPart(authInfo, partRequest, undefined, log, err => {
|
||||
assert.strictEqual(err, errors.NoSuchUpload);
|
||||
assert.strictEqual(err.is.NoSuchUpload, true);
|
||||
done();
|
||||
})
|
||||
);
|
||||
|
@ -1831,7 +1828,7 @@ describe('Multipart Upload API', () => {
|
|||
completeMultipartUpload(authInfo, completeRequest, log, err => {
|
||||
// expect a failure here because we could not
|
||||
// remove the overview key
|
||||
assert.strictEqual(err, errors.InternalError);
|
||||
assert.strictEqual(err.is.InternalError, true);
|
||||
next(null, eTag, testUploadId);
|
||||
});
|
||||
},
|
||||
|
@ -2054,7 +2051,7 @@ describe('complete mpu with versioning', () => {
|
|||
completeMultipartUpload(authInfo, completeRequest, log, err => {
|
||||
// expect a failure here because we could not
|
||||
// remove the overview key
|
||||
assert.strictEqual(err, errors.InternalError);
|
||||
assert.strictEqual(err.is.InternalError, true);
|
||||
next(null, eTag, testUploadId);
|
||||
});
|
||||
},
|
||||
|
|
|
@ -109,7 +109,7 @@ describe('objectCopyPart', () => {
|
|||
_createObjectCopyPartRequest(destBucketName, uploadId, headers);
|
||||
objectPutCopyPart(
|
||||
authInfo, req, sourceBucketName, objectKey, undefined, log, err => {
|
||||
assert(err.InvalidArgument);
|
||||
assert.strictEqual(err.is.InvalidArgument, true);
|
||||
assert.strictEqual(err.description,
|
||||
'The x-amz-copy-source-range value must be of the form ' +
|
||||
'bytes=first-last where first and last are the ' +
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
const crypto = require('crypto');
|
||||
const { errors } = require('arsenal');
|
||||
const xml2js = require('xml2js');
|
||||
|
||||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
|
@ -93,8 +92,7 @@ describe('objectDelete API', () => {
|
|||
assert.strictEqual(err, null);
|
||||
objectGet(authInfo, testGetObjectRequest, false,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err,
|
||||
errors.NoSuchKey);
|
||||
assert.strictEqual(err.is.NoSuchKey, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -117,10 +115,7 @@ describe('objectDelete API', () => {
|
|||
assert.strictEqual(err, null);
|
||||
objectGet(authInfo, testGetObjectRequest, false,
|
||||
log, err => {
|
||||
const expected =
|
||||
Object.assign({}, errors.NoSuchKey);
|
||||
const received = Object.assign({}, err);
|
||||
assert.deepStrictEqual(received, expected);
|
||||
assert.strictEqual(err.is.NoSuchKey, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -183,7 +178,7 @@ describe('objectDelete API', () => {
|
|||
const publicAuthInfo = makeAuthInfo(constants.publicId);
|
||||
bucketPut(authInfo, testBucketPutRequest, log, () => {
|
||||
objectDelete(publicAuthInfo, testDeleteRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.AccessDenied);
|
||||
assert.strictEqual(err.is.AccessDenied, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
const { parseString } = require('xml2js');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
const constants = require('../../../constants');
|
||||
|
@ -79,7 +78,7 @@ describe('objectGetACL API', () => {
|
|||
'for a nonexistent object', done => {
|
||||
bucketPut(authInfo, testBucketPutRequest, log, () => {
|
||||
objectGetACL(authInfo, testGetACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchKey);
|
||||
assert.strictEqual(err.is.NoSuchKey, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -61,7 +61,7 @@ describe('getObjectLegalHold API', () => {
|
|||
it('should return InvalidRequest error', done => {
|
||||
objectGetLegalHold(authInfo, getObjectLegalHoldRequest, log,
|
||||
err => {
|
||||
assert.strictEqual(err.InvalidRequest, true);
|
||||
assert.strictEqual(err.is.InvalidRequest, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -84,8 +84,7 @@ describe('getObjectLegalHold API', () => {
|
|||
done => {
|
||||
objectGetLegalHold(authInfo, getObjectLegalHoldRequest, log,
|
||||
err => {
|
||||
const error = err.NoSuchObjectLockConfiguration;
|
||||
assert.strictEqual(error, true);
|
||||
assert.strictEqual(err.is.NoSuchObjectLockConfiguration, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -64,7 +64,7 @@ describe('getObjectRetention API', () => {
|
|||
|
||||
it('should return InvalidRequest error', done => {
|
||||
objectGetRetention(authInfo, getObjRetRequest, log, err => {
|
||||
assert.strictEqual(err.InvalidRequest, true);
|
||||
assert.strictEqual(err.is.InvalidRequest, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -85,7 +85,7 @@ describe('getObjectRetention API', () => {
|
|||
it('should return NoSuchObjectLockConfiguration if no retention set',
|
||||
done => {
|
||||
objectGetRetention(authInfo, getObjRetRequest, log, err => {
|
||||
assert.strictEqual(err.NoSuchObjectLockConfiguration, true);
|
||||
assert.strictEqual(err.is.NoSuchObjectLockConfiguration, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
const assert = require('assert');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
const { cleanup, DummyRequestLogger, makeAuthInfo } = require('../helpers');
|
||||
|
@ -17,9 +16,9 @@ const postBody = Buffer.from('I am a body', 'utf8');
|
|||
const correctMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a';
|
||||
const incorrectMD5 = 'fkjwelfjlslfksdfsdfsdfsdfsdfsdj';
|
||||
const objectName = 'objectName';
|
||||
const date = new Date();
|
||||
const laterDate = date.setMinutes(date.getMinutes() + 30);
|
||||
const earlierDate = date.setMinutes(date.getMinutes() - 30);
|
||||
const masterDate = new Date();
|
||||
const laterDate = new Date().setMinutes(masterDate.getMinutes() + 30);
|
||||
const earlierDate = new Date().setMinutes(masterDate.getMinutes() - 30);
|
||||
const testPutBucketRequest = {
|
||||
bucketName,
|
||||
namespace,
|
||||
|
@ -60,7 +59,7 @@ describe('objectHead API', () => {
|
|||
(err, resHeaders) => {
|
||||
assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`);
|
||||
objectHead(authInfo, testGetRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NotModified);
|
||||
assert.strictEqual(err.is.NotModified, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -83,8 +82,7 @@ describe('objectHead API', () => {
|
|||
(err, resHeaders) => {
|
||||
assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`);
|
||||
objectHead(authInfo, testGetRequest, log, err => {
|
||||
assert.deepStrictEqual(err,
|
||||
errors.PreconditionFailed);
|
||||
assert.strictEqual(err.is.PreconditionFailed, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -107,8 +105,7 @@ describe('objectHead API', () => {
|
|||
(err, resHeaders) => {
|
||||
assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`);
|
||||
objectHead(authInfo, testGetRequest, log, err => {
|
||||
assert.deepStrictEqual(err,
|
||||
errors.PreconditionFailed);
|
||||
assert.strictEqual(err.is.PreconditionFailed, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -131,7 +128,7 @@ describe('objectHead API', () => {
|
|||
(err, resHeaders) => {
|
||||
assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`);
|
||||
objectHead(authInfo, testGetRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NotModified);
|
||||
assert.strictEqual(err.is.NotModified, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -171,16 +168,15 @@ describe('objectHead API', () => {
|
|||
partNumber: '1',
|
||||
},
|
||||
};
|
||||
const customizedInvalidRequestError = errors.InvalidRequest
|
||||
.customizeDescription('Cannot specify both Range header and ' +
|
||||
'partNumber query parameter.');
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
objectPut(authInfo, testPutObjectRequest, undefined, log, err => {
|
||||
assert.strictEqual(err, null, `Error objectPut: ${err}`);
|
||||
objectHead(authInfo, testGetRequest, log, err => {
|
||||
assert.deepStrictEqual(err, customizedInvalidRequestError);
|
||||
assert.deepStrictEqual(err.InvalidRequest, true);
|
||||
assert.strictEqual(err.is.InvalidRequest, true);
|
||||
assert.strictEqual(err.description,
|
||||
'Cannot specify both Range header and ' +
|
||||
'partNumber query parameter.');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -198,15 +194,13 @@ describe('objectHead API', () => {
|
|||
partNumber: 'nan',
|
||||
},
|
||||
};
|
||||
const customizedInvalidArgumentError = errors.InvalidArgument
|
||||
.customizeDescription('Part number must be a number.');
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
objectPut(authInfo, testPutObjectRequest, undefined, log, err => {
|
||||
assert.strictEqual(err, null, `Error objectPut: ${err}`);
|
||||
objectHead(authInfo, testGetRequest, log, err => {
|
||||
assert.deepStrictEqual(err, customizedInvalidArgumentError);
|
||||
assert.deepStrictEqual(err.InvalidArgument, true);
|
||||
assert.strictEqual(err.is.InvalidArgument, true);
|
||||
assert.strictEqual(err.description, 'Part number must be a number.');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
const moment = require('moment');
|
||||
const { errors, s3middleware } = require('arsenal');
|
||||
const { s3middleware } = require('arsenal');
|
||||
|
||||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
const bucketPutObjectLock = require('../../../lib/api/bucketPutObjectLock');
|
||||
|
@ -84,7 +84,7 @@ describe('parseTagFromQuery', () => {
|
|||
it(`should ${behavior} if tag set: "${test.tagging}"`, done => {
|
||||
const result = parseTagFromQuery(test.tagging);
|
||||
if (test.error) {
|
||||
assert(result[test.error.status]);
|
||||
assert.strictEqual(result.is[test.error.status], true);
|
||||
assert.strictEqual(result.code, test.error.statusCode);
|
||||
} else {
|
||||
assert.deepStrictEqual(result, test.result);
|
||||
|
@ -112,7 +112,7 @@ describe('objectPut API', () => {
|
|||
|
||||
it('should return an error if the bucket does not exist', done => {
|
||||
objectPut(authInfo, testPutObjectRequest, undefined, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -123,7 +123,7 @@ describe('objectPut API', () => {
|
|||
log, () => {
|
||||
objectPut(authInfo, testPutObjectRequest,
|
||||
undefined, log, err => {
|
||||
assert.deepStrictEqual(err, errors.AccessDenied);
|
||||
assert.strictEqual(err.is.AccessDenied, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -451,9 +451,8 @@ describe('objectPut API', () => {
|
|||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
objectPut(authInfo, testPutObjectRequest, undefined, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidRequest
|
||||
.customizeDescription(
|
||||
'Bucket is missing ObjectLockConfiguration'));
|
||||
assert.strictEqual(err.is.InvalidRequest, true);
|
||||
assert.strictEqual(err.description, 'Bucket is missing ObjectLockConfiguration');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -592,7 +591,7 @@ describe('objectPut API with versioning', () => {
|
|||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||
err => {
|
||||
assert.deepStrictEqual(err, errors.BadDigest);
|
||||
assert.strictEqual(err.is.BadDigest, true);
|
||||
// orphan objects don't get deleted
|
||||
// until the next tick
|
||||
// in memory
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
const assert = require('assert');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
const constants = require('../../../constants');
|
||||
|
@ -63,8 +62,7 @@ describe('putObjectACL API', () => {
|
|||
(err, resHeaders) => {
|
||||
assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`);
|
||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||
assert
|
||||
.deepStrictEqual(err, errors.InvalidArgument);
|
||||
assert.strictEqual(err.is.InvalidArgument, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -208,8 +206,8 @@ describe('putObjectACL API', () => {
|
|||
(err, resHeaders) => {
|
||||
assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`);
|
||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||
assert.strictEqual(err,
|
||||
errors.UnresolvableGrantByEmailAddress);
|
||||
assert.strictEqual(
|
||||
err.is.UnresolvableGrantByEmailAddress, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -274,8 +272,7 @@ describe('putObjectACL API', () => {
|
|||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||
() => {
|
||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err,
|
||||
errors.AccessDenied);
|
||||
assert.strictEqual(err.is.AccessDenied, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -342,8 +339,8 @@ describe('putObjectACL API', () => {
|
|||
(err, resHeaders) => {
|
||||
assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`);
|
||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||
assert.strictEqual(err,
|
||||
errors.UnresolvableGrantByEmailAddress);
|
||||
assert.strictEqual(
|
||||
err.is.UnresolvableGrantByEmailAddress, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -371,8 +368,7 @@ describe('putObjectACL API', () => {
|
|||
(err, resHeaders) => {
|
||||
assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`);
|
||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err,
|
||||
errors.MalformedACLError);
|
||||
assert.strictEqual(err.is.MalformedACLError, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -400,7 +396,7 @@ describe('putObjectACL API', () => {
|
|||
(err, resHeaders) => {
|
||||
assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`);
|
||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.MalformedXML);
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -427,7 +423,7 @@ describe('putObjectACL API', () => {
|
|||
(err, resHeaders) => {
|
||||
assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`);
|
||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidArgument);
|
||||
assert.strictEqual(err.is.InvalidArgument, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -455,7 +451,7 @@ describe('putObjectACL API', () => {
|
|||
(err, resHeaders) => {
|
||||
assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`);
|
||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidArgument);
|
||||
assert.strictEqual(err.is.InvalidArgument, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -55,7 +55,7 @@ describe('putObjectLegalHold API', () => {
|
|||
|
||||
it('should return InvalidRequest error', done => {
|
||||
objectPutLegalHold(authInfo, putLegalHoldReq('ON'), log, err => {
|
||||
assert.strictEqual(err.InvalidRequest, true);
|
||||
assert.strictEqual(err.is.InvalidRequest, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
const assert = require('assert');
|
||||
const moment = require('moment');
|
||||
|
||||
const { errors } = require('arsenal');
|
||||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
const objectPut = require('../../../lib/api/objectPut');
|
||||
const objectPutRetention = require('../../../lib/api/objectPutRetention');
|
||||
|
@ -99,7 +98,7 @@ describe('putObjectRetention API', () => {
|
|||
|
||||
it('should return InvalidRequest error', done => {
|
||||
objectPutRetention(authInfo, putObjRetRequestGovernance, log, err => {
|
||||
assert.strictEqual(err.InvalidRequest, true);
|
||||
assert.strictEqual(err.is.InvalidRequest, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -134,7 +133,7 @@ describe('putObjectRetention API', () => {
|
|||
objectPutRetention(authInfo, putObjRetRequestCompliance, log, err => {
|
||||
assert.ifError(err);
|
||||
return objectPutRetention(authInfo, putObjRetRequestGovernance, log, err => {
|
||||
assert.deepStrictEqual(err, errors.AccessDenied);
|
||||
assert.strictEqual(err.is.AccessDenied, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -144,7 +143,7 @@ describe('putObjectRetention API', () => {
|
|||
objectPutRetention(authInfo, putObjRetRequestCompliance, log, err => {
|
||||
assert.ifError(err);
|
||||
return objectPutRetention(authInfo, putObjRetRequestComplianceShorter, log, err => {
|
||||
assert.deepStrictEqual(err, errors.AccessDenied);
|
||||
assert.strictEqual(err.is.AccessDenied, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -155,7 +154,7 @@ describe('putObjectRetention API', () => {
|
|||
objectPutRetention(authInfo, putObjRetRequestGovernance, log, err => {
|
||||
assert.ifError(err);
|
||||
return objectPutRetention(authInfo, putObjRetRequestGovernance, log, err => {
|
||||
assert.deepStrictEqual(err, errors.AccessDenied);
|
||||
assert.strictEqual(err.is.AccessDenied, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -37,8 +37,8 @@ const testPutObjectRequest = new DummyRequest({
|
|||
|
||||
function _checkError(err, code, errorName) {
|
||||
assert(err, 'Expected error but found none');
|
||||
assert.strictEqual(err.is[errorName], true);
|
||||
assert.strictEqual(err.code, code);
|
||||
assert(err[errorName]);
|
||||
}
|
||||
|
||||
function _generateSampleXml(key, value) {
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
const { parseString } = require('xml2js');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
const constants = require('../../../constants');
|
||||
|
@ -79,7 +78,7 @@ describe('serviceGet API', () => {
|
|||
it('should prevent anonymous user from accessing getService API', done => {
|
||||
const publicAuthInfo = makeAuthInfo(constants.publicId);
|
||||
serviceGet(publicAuthInfo, serviceGetRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.AccessDenied);
|
||||
assert.strictEqual(err.is.AccessDenied, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
const assert = require('assert');
|
||||
const crypto = require('crypto');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const BucketInfo = require('arsenal').models.BucketInfo;
|
||||
const bucketGet = require('../../../lib/api/bucketGet');
|
||||
|
@ -101,7 +100,7 @@ describe('transient bucket handling', () => {
|
|||
it('putBucket request should return error if ' +
|
||||
'transient bucket created by different account', done => {
|
||||
bucketPut(otherAccountAuthInfo, baseTestRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.BucketAlreadyExists);
|
||||
assert.strictEqual(err.is.BucketAlreadyExists, true);
|
||||
serviceGet(otherAccountAuthInfo, serviceGetRequest,
|
||||
log, (err, data) => {
|
||||
parseString(data, (err, result) => {
|
||||
|
@ -237,7 +236,7 @@ describe('transient bucket handling', () => {
|
|||
bucketDelete(authInfo, baseTestRequest, log, err => {
|
||||
assert.ifError(err);
|
||||
metadata.getBucket(bucketName, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -247,7 +246,7 @@ describe('transient bucket handling', () => {
|
|||
'request is not from owner', done => {
|
||||
bucketDelete(otherAccountAuthInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.AccessDenied);
|
||||
assert.strictEqual(err.is.AccessDenied, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -260,7 +259,7 @@ describe('transient bucket handling', () => {
|
|||
bucketGetRequest.query = {};
|
||||
bucketGet(authInfo, bucketGetRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -273,7 +272,7 @@ describe('transient bucket handling', () => {
|
|||
bucketGetACLRequest.query = { acl: '' };
|
||||
bucketGetACL(authInfo, bucketGetACLRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -281,7 +280,7 @@ describe('transient bucket handling', () => {
|
|||
it('bucketGetCors request on transient bucket should return ' +
|
||||
'NoSuchBucket error', done => {
|
||||
bucketGetCors(authInfo, baseTestRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -297,7 +296,7 @@ describe('transient bucket handling', () => {
|
|||
bucketPutCorsRequest.headers['content-md5'] = crypto.createHash('md5')
|
||||
.update(bucketPutCorsRequest.post, 'utf8').digest('base64');
|
||||
bucketPutCors(authInfo, bucketPutCorsRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -305,7 +304,7 @@ describe('transient bucket handling', () => {
|
|||
it('bucketDeleteCors request on transient bucket should return ' +
|
||||
'NoSuchBucket error', done => {
|
||||
bucketDeleteCors(authInfo, baseTestRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -313,7 +312,7 @@ describe('transient bucket handling', () => {
|
|||
it('bucketGetWebsite request on transient bucket should return ' +
|
||||
'NoSuchBucket error', done => {
|
||||
bucketGetWebsite(authInfo, baseTestRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -326,7 +325,7 @@ describe('transient bucket handling', () => {
|
|||
'<IndexDocument><Suffix>index.html</Suffix></IndexDocument>' +
|
||||
'</WebsiteConfiguration>';
|
||||
bucketPutWebsite(authInfo, bucketPutWebsiteRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -334,7 +333,7 @@ describe('transient bucket handling', () => {
|
|||
it('bucketDeleteWebsite request on transient bucket should return ' +
|
||||
'NoSuchBucket error', done => {
|
||||
bucketDeleteWebsite(authInfo, baseTestRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -343,7 +342,7 @@ describe('transient bucket handling', () => {
|
|||
'error', done => {
|
||||
bucketHead(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -357,7 +356,7 @@ describe('transient bucket handling', () => {
|
|||
completeMpuRequest.query = { uploadId };
|
||||
completeMultipartUpload(authInfo, completeMpuRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchUpload);
|
||||
assert.strictEqual(err.is.NoSuchUpload, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -371,7 +370,7 @@ describe('transient bucket handling', () => {
|
|||
listRequest.query = { uploadId };
|
||||
listParts(authInfo, listRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchUpload);
|
||||
assert.strictEqual(err.is.NoSuchUpload, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -396,7 +395,7 @@ describe('transient bucket handling', () => {
|
|||
config.locationConstraints[locationConstraint].
|
||||
legacyAwsBehavior = true;
|
||||
multipartDelete(authInfo, deleteRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchUpload);
|
||||
assert.strictEqual(err.is.NoSuchUpload, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -423,7 +422,7 @@ describe('transient bucket handling', () => {
|
|||
partNumber: '1' };
|
||||
objectPutPart(authInfo, putPartRequest, undefined,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchUpload);
|
||||
assert.strictEqual(err.is.NoSuchUpload, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -435,7 +434,7 @@ describe('transient bucket handling', () => {
|
|||
listRequest.query = {};
|
||||
listMultipartUploads(authInfo, listRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -445,7 +444,7 @@ describe('transient bucket handling', () => {
|
|||
done => {
|
||||
objectGet(authInfo, baseTestRequest, false,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -454,7 +453,7 @@ describe('transient bucket handling', () => {
|
|||
'NoSuchBucket error', done => {
|
||||
objectGetACL(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -463,7 +462,7 @@ describe('transient bucket handling', () => {
|
|||
'NoSuchBucket error', done => {
|
||||
objectHead(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -472,7 +471,7 @@ describe('transient bucket handling', () => {
|
|||
'NoSuchBucket error', done => {
|
||||
objectPutACL(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -481,7 +480,7 @@ describe('transient bucket handling', () => {
|
|||
'NoSuchBucket error', done => {
|
||||
objectDelete(authInfo, baseTestRequest,
|
||||
log, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||
assert.strictEqual(err.is.NoSuchBucket, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
const { errors } = require('arsenal');
|
||||
|
||||
const assert = require('assert');
|
||||
|
||||
const { cleanup, DummyRequestLogger } = require('../helpers');
|
||||
|
@ -45,7 +43,7 @@ describe('bucket creation', () => {
|
|||
it('should return 409 if try to recreate in non-us-east-1', done => {
|
||||
createBucket(authInfo, bucketName, headers,
|
||||
normalBehaviorLocationConstraint, log, err => {
|
||||
assert.strictEqual(err, errors.BucketAlreadyOwnedByYou);
|
||||
assert.strictEqual(err.is.BucketAlreadyOwnedByYou, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -64,7 +62,7 @@ describe('bucket creation', () => {
|
|||
it('should return 409 if try to recreate in us-east-1', done => {
|
||||
createBucket(authInfo, bucketName, headers,
|
||||
specialBehaviorLocationConstraint, log, err => {
|
||||
assert.strictEqual(err, errors.BucketAlreadyOwnedByYou);
|
||||
assert.strictEqual(err.is.BucketAlreadyOwnedByYou, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const BucketInfo = require('arsenal').models.BucketInfo;
|
||||
const { cleanup, DummyRequestLogger } = require('../helpers');
|
||||
const { isKeyInContents }
|
||||
|
@ -39,7 +37,7 @@ describe('bucket API for getting, putting and deleting ' +
|
|||
it('should return an error in response ' +
|
||||
'to getObjectMD when no such key', done => {
|
||||
metadata.getObjectMD(bucketName, 'notThere', {}, log, (err, value) => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchKey);
|
||||
assert.strictEqual(err.is.NoSuchKey, true);
|
||||
assert.strictEqual(value, undefined);
|
||||
done();
|
||||
});
|
||||
|
@ -52,7 +50,7 @@ describe('bucket API for getting, putting and deleting ' +
|
|||
() => {
|
||||
metadata.getObjectMD(bucketName, 'objectToDelete', {}, log,
|
||||
(err, value) => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchKey);
|
||||
assert.strictEqual(err.is.NoSuchKey, true);
|
||||
assert.strictEqual(value, undefined);
|
||||
done();
|
||||
});
|
||||
|
|
|
@ -40,7 +40,7 @@ describe('Location Constraint Check', () => {
|
|||
createTestRequest('fail-region'), null, testBucket, log);
|
||||
assert.strictEqual(backendInfoObj.err.code, 400,
|
||||
'Expected "Invalid Argument" code error');
|
||||
assert(backendInfoObj.err.InvalidArgument, 'Expected "Invalid ' +
|
||||
assert(backendInfoObj.err.is.InvalidArgument, 'Expected "Invalid ' +
|
||||
'Argument" error');
|
||||
done();
|
||||
});
|
||||
|
|
72
yarn.lock
72
yarn.lock
|
@ -111,6 +111,23 @@
|
|||
dependencies:
|
||||
dayjs "^1.10.5"
|
||||
|
||||
"@sideway/address@^4.1.3":
|
||||
version "4.1.4"
|
||||
resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0"
|
||||
integrity sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw==
|
||||
dependencies:
|
||||
"@hapi/hoek" "^9.0.0"
|
||||
|
||||
"@sideway/formula@^3.0.0":
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@sideway/formula/-/formula-3.0.0.tgz#fe158aee32e6bd5de85044be615bc08478a0a13c"
|
||||
integrity sha512-vHe7wZ4NOXVfkoRb8T5otiENVlT7a3IAiw7H5M2+GO+9CDgcVUUsX1zalAztCmwyOr2RUTGJdgB+ZvSVqmdHmg==
|
||||
|
||||
"@sideway/pinpoint@^2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@sideway/pinpoint/-/pinpoint-2.0.0.tgz#cff8ffadc372ad29fd3f78277aeb29e632cc70df"
|
||||
integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==
|
||||
|
||||
"@sinonjs/commons@^1.6.0", "@sinonjs/commons@^1.7.0", "@sinonjs/commons@^1.8.3":
|
||||
version "1.8.3"
|
||||
resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d"
|
||||
|
@ -144,6 +161,16 @@
|
|||
resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82"
|
||||
integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==
|
||||
|
||||
"@types/async@^3.2.12":
|
||||
version "3.2.12"
|
||||
resolved "https://registry.yarnpkg.com/@types/async/-/async-3.2.12.tgz#0ebbfaf3f249ffa0fdc50179b07705f69c90d70c"
|
||||
integrity sha512-4i4w4tfNDo73BOjk0qHcB2YJ8A2SjITCrU4BTsgdJFTsVr6atPDXa0T9r0QZTrX3axtWwkqpZqF4B3gR0TqBGw==
|
||||
|
||||
"@types/utf8@^3.0.1":
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/utf8/-/utf8-3.0.1.tgz#bf081663d4fff05ee63b41f377a35f8b189f7e5b"
|
||||
integrity sha512-1EkWuw7rT3BMz2HpmcEOr/HL61mWNA6Ulr/KdbXR9AI0A55wD4Qfv8hizd8Q1DnknSIzzDvQmvvY/guvX7jjZA==
|
||||
|
||||
JSONStream@^1.0.0:
|
||||
version "1.3.5"
|
||||
resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0"
|
||||
|
@ -391,11 +418,12 @@ arraybuffer.slice@~0.0.7:
|
|||
resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675"
|
||||
integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==
|
||||
|
||||
"arsenal@git+https://github.com/scality/arsenal#7.10.15":
|
||||
version "7.10.15"
|
||||
resolved "git+https://github.com/scality/arsenal#310834c237da624800ea6b4e291045808aa9bf80"
|
||||
"arsenal@git+https://github.com/scality/arsenal#ccbc1ed1":
|
||||
version "7.10.19"
|
||||
resolved "git+https://github.com/scality/arsenal#ccbc1ed10c9115c31c08e9ba132887e449cb56d7"
|
||||
dependencies:
|
||||
"@hapi/joi" "^15.1.0"
|
||||
"@types/async" "^3.2.12"
|
||||
"@types/utf8" "^3.0.1"
|
||||
JSONStream "^1.0.0"
|
||||
agentkeepalive "^4.1.3"
|
||||
ajv "6.12.2"
|
||||
|
@ -412,6 +440,7 @@ arraybuffer.slice@~0.0.7:
|
|||
hdclient scality/hdclient#1.1.0
|
||||
ioredis "^4.28.5"
|
||||
ipaddr.js "1.9.1"
|
||||
joi "^17.6.0"
|
||||
level "~5.0.1"
|
||||
level-sublevel "~6.6.5"
|
||||
mongodb "^3.0.1"
|
||||
|
@ -1304,7 +1333,7 @@ debug@2.6.9, debug@^2.1.1, debug@^2.2.0, debug@^2.6.8, debug@~2.6.9:
|
|||
dependencies:
|
||||
ms "2.0.0"
|
||||
|
||||
debug@4, debug@^4.3.3:
|
||||
debug@4:
|
||||
version "4.3.4"
|
||||
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
|
||||
integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
|
||||
|
@ -2037,7 +2066,7 @@ gar@^1.0.4:
|
|||
resolved "https://registry.yarnpkg.com/gar/-/gar-1.0.4.tgz#f777bc7db425c0572fdeb52676172ca1ae9888b8"
|
||||
integrity sha512-w4n9cPWyP7aHxKxYHFQMegj7WIAsL/YX/C4Bs5Rr8s1H9M1rNtRWRsw+ovYMkXDQ5S4ZbYHsHAPmevPjPgw44w==
|
||||
|
||||
gauge@^4.0.3:
|
||||
gauge@^4.0.0:
|
||||
version "4.0.4"
|
||||
resolved "https://registry.yarnpkg.com/gauge/-/gauge-4.0.4.tgz#52ff0652f2bbf607a989793d53b751bef2328dce"
|
||||
integrity sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==
|
||||
|
@ -2891,6 +2920,17 @@ jmespath@0.16.0:
|
|||
resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.16.0.tgz#b15b0a85dfd4d930d43e69ed605943c802785076"
|
||||
integrity sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==
|
||||
|
||||
joi@^17.6.0:
|
||||
version "17.6.0"
|
||||
resolved "https://registry.yarnpkg.com/joi/-/joi-17.6.0.tgz#0bb54f2f006c09a96e75ce687957bd04290054b2"
|
||||
integrity sha512-OX5dG6DTbcr/kbMFj0KGYxuew69HPcAE3K/sZpEV2nP6e/j/C0HV+HNiBPCASxdx5T7DMoa0s8UeHWMnb6n2zw==
|
||||
dependencies:
|
||||
"@hapi/hoek" "^9.0.0"
|
||||
"@hapi/topo" "^5.0.0"
|
||||
"@sideway/address" "^4.1.3"
|
||||
"@sideway/formula" "^3.0.0"
|
||||
"@sideway/pinpoint" "^2.0.0"
|
||||
|
||||
"js-tokens@^3.0.0 || ^4.0.0":
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
|
||||
|
@ -3881,13 +3921,13 @@ npm-run-all@~4.1.5:
|
|||
string.prototype.padend "^3.0.0"
|
||||
|
||||
npmlog@^6.0.0:
|
||||
version "6.0.2"
|
||||
resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-6.0.2.tgz#c8166017a42f2dea92d6453168dd865186a70830"
|
||||
integrity sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-6.0.1.tgz#06f1344a174c06e8de9c6c70834cfba2964bba17"
|
||||
integrity sha512-BTHDvY6nrRHuRfyjt1MAufLxYdVXZfd099H4+i1f0lPywNQyI4foeNXJRObB/uy+TYqUW0vAD9gbdSOXPst7Eg==
|
||||
dependencies:
|
||||
are-we-there-yet "^3.0.0"
|
||||
console-control-strings "^1.1.0"
|
||||
gauge "^4.0.3"
|
||||
gauge "^4.0.0"
|
||||
set-blocking "^2.0.0"
|
||||
|
||||
number-is-nan@^1.0.0:
|
||||
|
@ -4783,15 +4823,15 @@ socket.io@~2.3.0:
|
|||
socket.io-parser "~3.4.0"
|
||||
|
||||
socks-proxy-agent@^6.0.0:
|
||||
version "6.2.0"
|
||||
resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-6.2.0.tgz#f6b5229cc0cbd6f2f202d9695f09d871e951c85e"
|
||||
integrity sha512-wWqJhjb32Q6GsrUqzuFkukxb/zzide5quXYcMVpIjxalDBBYy2nqKCFQ/9+Ie4dvOYSQdOk3hUlZSdzZOd3zMQ==
|
||||
version "6.1.1"
|
||||
resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-6.1.1.tgz#e664e8f1aaf4e1fb3df945f09e3d94f911137f87"
|
||||
integrity sha512-t8J0kG3csjA4g6FTbsMOWws+7R7vuRC8aQ/wy3/1OWmsgwA68zs/+cExQ0koSitUDXqhufF/YJr9wtNMZHw5Ew==
|
||||
dependencies:
|
||||
agent-base "^6.0.2"
|
||||
debug "^4.3.3"
|
||||
socks "^2.6.2"
|
||||
debug "^4.3.1"
|
||||
socks "^2.6.1"
|
||||
|
||||
socks@^2.6.2:
|
||||
socks@^2.6.1:
|
||||
version "2.6.2"
|
||||
resolved "https://registry.yarnpkg.com/socks/-/socks-2.6.2.tgz#ec042d7960073d40d94268ff3bb727dc685f111a"
|
||||
integrity sha512-zDZhHhZRY9PxRruRMR7kMhnf3I8hDs4S3f9RecfnGxvcBHQcKcIH/oUcEWffsfl1XxdYlA7nnlGbbTvPz9D8gA==
|
||||
|
|
Loading…
Reference in New Issue