Compare commits
18 Commits
developmen
...
bugfix/CLD
Author | SHA1 | Date |
---|---|---|
Mickael Bourgois | 40573a9b30 | |
Mickael Bourgois | 14e164ad90 | |
Mickael Bourgois | 99d6fe41a9 | |
Mickael Bourgois | a381a2a15b | |
Mickael Bourgois | 6e5284793b | |
Maha Benzekri | c1e10cd662 | |
Maha Benzekri | c48a037116 | |
Taylor McKinnon | 71e373a13e | |
Taylor McKinnon | 50ce8e959f | |
Taylor McKinnon | 5dacc8998d | |
williamlardier | 52e08d30f3 | |
williamlardier | 4261a01c16 | |
williamlardier | df29f82812 | |
williamlardier | 47ca8c055d | |
gaspardmoindrot | 1af74fb6cb | |
Nicolas Humbert | 6642e3c93c | |
Nicolas Humbert | ebaf54f267 | |
Dimitrios Vasilas | 6345b1b915 |
|
@ -0,0 +1,25 @@
|
||||||
|
---
|
||||||
|
name: codeQL
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [development/*, stabilization/*, hotfix/*]
|
||||||
|
pull_request:
|
||||||
|
branches: [development/*, stabilization/*, hotfix/*]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Static analysis with CodeQL
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
languages: javascript, python, ruby
|
||||||
|
|
||||||
|
- name: Build and analyze
|
||||||
|
uses: github/codeql-action/analyze@v2
|
|
@ -0,0 +1,16 @@
|
||||||
|
---
|
||||||
|
name: dependency review
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [development/*, stabilization/*, hotfix/*]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dependency-review:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: 'Checkout Repository'
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: 'Dependency Review'
|
||||||
|
uses: actions/dependency-review-action@v3
|
|
@ -217,18 +217,13 @@ jobs:
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: |
|
python-version: 3.9
|
||||||
2.7
|
|
||||||
3.9
|
|
||||||
- name: Setup CI environment
|
- name: Setup CI environment
|
||||||
uses: ./.github/actions/setup-ci
|
uses: ./.github/actions/setup-ci
|
||||||
- name: Setup python2 test environment
|
- name: Setup python test environment
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get install -y libdigest-hmac-perl
|
sudo apt-get install -y libdigest-hmac-perl
|
||||||
pip install virtualenv
|
pip install 's3cmd==2.3.0'
|
||||||
virtualenv -p $(which python2) ~/.virtualenv/py2
|
|
||||||
source ~/.virtualenv/py2/bin/activate
|
|
||||||
pip install 's3cmd==1.6.1'
|
|
||||||
- name: Setup CI services
|
- name: Setup CI services
|
||||||
run: docker-compose up -d
|
run: docker-compose up -d
|
||||||
working-directory: .github/docker
|
working-directory: .github/docker
|
||||||
|
@ -236,7 +231,6 @@ jobs:
|
||||||
run: |-
|
run: |-
|
||||||
set -o pipefail;
|
set -o pipefail;
|
||||||
bash wait_for_local_port.bash 8000 40
|
bash wait_for_local_port.bash 8000 40
|
||||||
source ~/.virtualenv/py2/bin/activate
|
|
||||||
yarn run ft_test | tee /tmp/artifacts/${{ github.job }}/tests.log
|
yarn run ft_test | tee /tmp/artifacts/${{ github.job }}/tests.log
|
||||||
- name: Upload logs to artifacts
|
- name: Upload logs to artifacts
|
||||||
uses: scality/action-artifacts@v3
|
uses: scality/action-artifacts@v3
|
||||||
|
|
|
@ -101,8 +101,30 @@ function validateWebsiteHeader(header) {
|
||||||
header.startsWith('http://') || header.startsWith('https://'));
|
header.startsWith('http://') || header.startsWith('https://'));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* appendWebsiteIndexDocument - append index to objectKey if necessary
|
||||||
|
* @param {object} request - normalized request object
|
||||||
|
* @param {string} indexDocumentSuffix - index document from website config
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
function appendWebsiteIndexDocument(request, indexDocumentSuffix) {
|
||||||
|
const reqObjectKey = request.objectKey ? request.objectKey : '';
|
||||||
|
|
||||||
|
// find index document if "directory" sent in request
|
||||||
|
if (reqObjectKey.endsWith('/')) {
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
request.objectKey += indexDocumentSuffix;
|
||||||
|
}
|
||||||
|
// find index document if no key provided
|
||||||
|
if (reqObjectKey === '') {
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
request.objectKey = indexDocumentSuffix;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
findRoutingRule,
|
findRoutingRule,
|
||||||
extractRedirectInfo,
|
extractRedirectInfo,
|
||||||
validateWebsiteHeader,
|
validateWebsiteHeader,
|
||||||
|
appendWebsiteIndexDocument,
|
||||||
};
|
};
|
||||||
|
|
|
@ -5,7 +5,7 @@ const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||||
const constants = require('../../constants');
|
const constants = require('../../constants');
|
||||||
const metadata = require('../metadata/wrapper');
|
const metadata = require('../metadata/wrapper');
|
||||||
const bucketShield = require('./apiUtils/bucket/bucketShield');
|
const bucketShield = require('./apiUtils/bucket/bucketShield');
|
||||||
const { findRoutingRule, extractRedirectInfo } =
|
const { appendWebsiteIndexDocument, findRoutingRule, extractRedirectInfo } =
|
||||||
require('./apiUtils/object/websiteServing');
|
require('./apiUtils/object/websiteServing');
|
||||||
const { isObjAuthorized, isBucketAuthorized } =
|
const { isObjAuthorized, isBucketAuthorized } =
|
||||||
require('./apiUtils/authorization/permissionChecks');
|
require('./apiUtils/authorization/permissionChecks');
|
||||||
|
@ -84,7 +84,6 @@ function websiteGet(request, log, callback) {
|
||||||
log.debug('processing request', { method: 'websiteGet' });
|
log.debug('processing request', { method: 'websiteGet' });
|
||||||
const bucketName = request.bucketName;
|
const bucketName = request.bucketName;
|
||||||
const reqObjectKey = request.objectKey ? request.objectKey : '';
|
const reqObjectKey = request.objectKey ? request.objectKey : '';
|
||||||
let objectKey = reqObjectKey;
|
|
||||||
|
|
||||||
return metadata.getBucket(bucketName, log, (err, bucket) => {
|
return metadata.getBucket(bucketName, log, (err, bucket) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -109,32 +108,25 @@ function websiteGet(request, log, callback) {
|
||||||
// handle redirect all
|
// handle redirect all
|
||||||
if (websiteConfig.getRedirectAllRequestsTo()) {
|
if (websiteConfig.getRedirectAllRequestsTo()) {
|
||||||
return callback(null, false, null, corsHeaders,
|
return callback(null, false, null, corsHeaders,
|
||||||
websiteConfig.getRedirectAllRequestsTo(), objectKey);
|
websiteConfig.getRedirectAllRequestsTo(), reqObjectKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
// check whether need to redirect based on key
|
// check whether need to redirect based on key
|
||||||
const routingRules = websiteConfig.getRoutingRules();
|
const routingRules = websiteConfig.getRoutingRules();
|
||||||
const keyRoutingRule = findRoutingRule(routingRules, objectKey);
|
const keyRoutingRule = findRoutingRule(routingRules, reqObjectKey);
|
||||||
|
|
||||||
if (keyRoutingRule) {
|
if (keyRoutingRule) {
|
||||||
// TODO: optimize by not rerouting if only routing
|
// TODO: optimize by not rerouting if only routing
|
||||||
// rule is to change out key
|
// rule is to change out key
|
||||||
return callback(null, false, null, corsHeaders,
|
return callback(null, false, null, corsHeaders,
|
||||||
keyRoutingRule, objectKey);
|
keyRoutingRule, reqObjectKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
// find index document if "directory" sent in request
|
appendWebsiteIndexDocument(request, websiteConfig.getIndexDocument());
|
||||||
if (reqObjectKey.endsWith('/')) {
|
|
||||||
objectKey += websiteConfig.getIndexDocument();
|
|
||||||
}
|
|
||||||
// find index document if no key provided
|
|
||||||
if (reqObjectKey === '') {
|
|
||||||
objectKey = websiteConfig.getIndexDocument();
|
|
||||||
}
|
|
||||||
|
|
||||||
// get object metadata and check authorization and header
|
// get object metadata and check authorization and header
|
||||||
// validation
|
// validation
|
||||||
return metadata.getObjectMD(bucketName, objectKey, {}, log,
|
return metadata.getObjectMD(bucketName, request.objectKey, {}, log,
|
||||||
(err, objMD) => {
|
(err, objMD) => {
|
||||||
// Note: In case of error, we intentionally send the original
|
// Note: In case of error, we intentionally send the original
|
||||||
// object key to _errorActions as in case of a redirect, we do
|
// object key to _errorActions as in case of a redirect, we do
|
||||||
|
|
|
@ -5,7 +5,7 @@ const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||||
const constants = require('../../constants');
|
const constants = require('../../constants');
|
||||||
const metadata = require('../metadata/wrapper');
|
const metadata = require('../metadata/wrapper');
|
||||||
const bucketShield = require('./apiUtils/bucket/bucketShield');
|
const bucketShield = require('./apiUtils/bucket/bucketShield');
|
||||||
const { findRoutingRule, extractRedirectInfo } =
|
const { appendWebsiteIndexDocument, findRoutingRule, extractRedirectInfo } =
|
||||||
require('./apiUtils/object/websiteServing');
|
require('./apiUtils/object/websiteServing');
|
||||||
const collectResponseHeaders = require('../utilities/collectResponseHeaders');
|
const collectResponseHeaders = require('../utilities/collectResponseHeaders');
|
||||||
const { pushMetric } = require('../utapi/utilities');
|
const { pushMetric } = require('../utapi/utilities');
|
||||||
|
@ -46,7 +46,6 @@ function websiteHead(request, log, callback) {
|
||||||
log.debug('processing request', { method: 'websiteHead' });
|
log.debug('processing request', { method: 'websiteHead' });
|
||||||
const bucketName = request.bucketName;
|
const bucketName = request.bucketName;
|
||||||
const reqObjectKey = request.objectKey ? request.objectKey : '';
|
const reqObjectKey = request.objectKey ? request.objectKey : '';
|
||||||
let objectKey = reqObjectKey;
|
|
||||||
|
|
||||||
return metadata.getBucket(bucketName, log, (err, bucket) => {
|
return metadata.getBucket(bucketName, log, (err, bucket) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -72,21 +71,15 @@ function websiteHead(request, log, callback) {
|
||||||
// handle redirect all
|
// handle redirect all
|
||||||
if (websiteConfig.getRedirectAllRequestsTo()) {
|
if (websiteConfig.getRedirectAllRequestsTo()) {
|
||||||
return callback(null, corsHeaders,
|
return callback(null, corsHeaders,
|
||||||
websiteConfig.getRedirectAllRequestsTo(), objectKey);
|
websiteConfig.getRedirectAllRequestsTo(), reqObjectKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
// find index document if "directory" sent in request
|
appendWebsiteIndexDocument(request, websiteConfig.getIndexDocument());
|
||||||
if (reqObjectKey.endsWith('/')) {
|
|
||||||
objectKey += websiteConfig.getIndexDocument();
|
|
||||||
}
|
|
||||||
// find index document if no key provided
|
|
||||||
if (reqObjectKey === '') {
|
|
||||||
objectKey = websiteConfig.getIndexDocument();
|
|
||||||
}
|
|
||||||
// check whether need to redirect based on key
|
// check whether need to redirect based on key
|
||||||
const routingRules = websiteConfig.getRoutingRules();
|
const routingRules = websiteConfig.getRoutingRules();
|
||||||
|
|
||||||
const keyRoutingRule = findRoutingRule(routingRules, objectKey);
|
const keyRoutingRule = findRoutingRule(routingRules, request.objectKey);
|
||||||
|
|
||||||
if (keyRoutingRule) {
|
if (keyRoutingRule) {
|
||||||
return callback(null, corsHeaders, keyRoutingRule, reqObjectKey);
|
return callback(null, corsHeaders, keyRoutingRule, reqObjectKey);
|
||||||
|
@ -94,7 +87,7 @@ function websiteHead(request, log, callback) {
|
||||||
|
|
||||||
// get object metadata and check authorization and header
|
// get object metadata and check authorization and header
|
||||||
// validation
|
// validation
|
||||||
return metadata.getObjectMD(bucketName, objectKey, {}, log,
|
return metadata.getObjectMD(bucketName, request.objectKey, {}, log,
|
||||||
(err, objMD) => {
|
(err, objMD) => {
|
||||||
// Note: In case of error, we intentionally send the original
|
// Note: In case of error, we intentionally send the original
|
||||||
// object key to _errorActions as in case of a redirect, we do
|
// object key to _errorActions as in case of a redirect, we do
|
||||||
|
|
|
@ -472,22 +472,68 @@ function putMetadata(request, response, bucketInfo, objMd, log, callback) {
|
||||||
omVal[headerName] = objMd[headerName];
|
omVal[headerName] = objMd[headerName];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
const versionId = decodeVersionId(request.query);
|
|
||||||
// specify both 'versioning' and 'versionId' to create a "new"
|
let versionId = decodeVersionId(request.query);
|
||||||
// version (updating master as well) but with specified
|
let versioning = bucketInfo.isVersioningEnabled();
|
||||||
// versionId
|
let isNull = false;
|
||||||
const options = {
|
|
||||||
versioning: bucketInfo.isVersioningEnabled(),
|
if (versionId === 'null') {
|
||||||
versionId,
|
isNull = true;
|
||||||
};
|
// Retrieve the null version id from the object metadata.
|
||||||
|
versionId = objMd && objMd.versionId;
|
||||||
|
if (!versionId) {
|
||||||
|
// Set isNull in the object metadata to be written.
|
||||||
|
// Since metadata will generate a versionId for the null version,
|
||||||
|
// the flag is needed to allow cloudserver to know that the version
|
||||||
|
// is a null version and allow access to it using the "null" versionId.
|
||||||
|
omVal.isNull = true;
|
||||||
|
if (versioning) {
|
||||||
|
// If the null version does not have a version id, it is a current null version.
|
||||||
|
// To update the metadata of a current version, versioning is set to false.
|
||||||
|
|
||||||
|
// This condition is to handle the case where a single null version looks like a master
|
||||||
|
// key and will not have a duplicate versioned key and no version ID.
|
||||||
|
// They are created when you have a non-versioned bucket with objects,
|
||||||
|
// and then convert bucket to versioned.
|
||||||
|
// If no new versioned objects are added for given object(s), they look like
|
||||||
|
// standalone master keys.
|
||||||
|
versioning = false;
|
||||||
|
} else {
|
||||||
|
const versioningConf = bucketInfo.getVersioningConfiguration();
|
||||||
|
// The purpose of this condition is to address situations in which
|
||||||
|
// - versioning is "suspended" and
|
||||||
|
// - no existing object or no null version.
|
||||||
|
// In such scenarios, we generate a new null version and designate it as the master version.
|
||||||
|
if (versioningConf && versioningConf.Status === 'Suspended') {
|
||||||
|
versionId = '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// If the object is from a source bucket without versioning (i.e. NFS),
|
// If the object is from a source bucket without versioning (i.e. NFS),
|
||||||
// then we want to create a version for the replica object even though
|
// then we want to create a version for the replica object even though
|
||||||
// none was provided in the object metadata value.
|
// none was provided in the object metadata value.
|
||||||
if (omVal.replicationInfo.isNFS) {
|
if (omVal.replicationInfo.isNFS) {
|
||||||
const isReplica = omVal.replicationInfo.status === 'REPLICA';
|
const isReplica = omVal.replicationInfo.status === 'REPLICA';
|
||||||
options.versioning = isReplica;
|
versioning = isReplica;
|
||||||
omVal.replicationInfo.isNFS = !isReplica;
|
omVal.replicationInfo.isNFS = !isReplica;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const options = {
|
||||||
|
versionId,
|
||||||
|
isNull,
|
||||||
|
};
|
||||||
|
|
||||||
|
// NOTE: When 'versioning' is set to true and no 'versionId' is specified,
|
||||||
|
// it results in the creation of a "new" version, which also updates the master.
|
||||||
|
// NOTE: Since option fields are converted to strings when they're sent to Metadata via the query string,
|
||||||
|
// Metadata interprets the value "false" as if it were true.
|
||||||
|
// Therefore, to avoid this confusion, we don't pass the versioning parameter at all if its value is false.
|
||||||
|
if (versioning) {
|
||||||
|
options.versioning = true;
|
||||||
|
}
|
||||||
|
|
||||||
log.trace('putting object version', {
|
log.trace('putting object version', {
|
||||||
objectKey: request.objectKey, omVal, options });
|
objectKey: request.objectKey, omVal, options });
|
||||||
return metadata.putObjectMD(bucketName, objectKey, omVal, options, log,
|
return metadata.putObjectMD(bucketName, objectKey, omVal, options, log,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "s3",
|
"name": "s3",
|
||||||
"version": "7.10.27",
|
"version": "7.10.27-3",
|
||||||
"description": "S3 connector",
|
"description": "S3 connector",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"engines": {
|
"engines": {
|
||||||
|
@ -20,7 +20,7 @@
|
||||||
"homepage": "https://github.com/scality/S3#readme",
|
"homepage": "https://github.com/scality/S3#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@hapi/joi": "^17.1.0",
|
"@hapi/joi": "^17.1.0",
|
||||||
"arsenal": "git+https://github.com/scality/arsenal#7.10.43",
|
"arsenal": "git+https://github.com/scality/arsenal#7.10.43-1",
|
||||||
"async": "~2.5.0",
|
"async": "~2.5.0",
|
||||||
"aws-sdk": "2.905.0",
|
"aws-sdk": "2.905.0",
|
||||||
"azure-storage": "^2.1.0",
|
"azure-storage": "^2.1.0",
|
||||||
|
|
|
@ -14,6 +14,18 @@ class BucketUtility {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bucketExists(bucketName) {
|
||||||
|
return this.s3
|
||||||
|
.headBucket({ Bucket: bucketName }).promise()
|
||||||
|
.then(() => true)
|
||||||
|
.catch(err => {
|
||||||
|
if (err.code === 'NotFound') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
createOne(bucketName) {
|
createOne(bucketName) {
|
||||||
return this.s3
|
return this.s3
|
||||||
.createBucket({ Bucket: bucketName }).promise()
|
.createBucket({ Bucket: bucketName }).promise()
|
||||||
|
@ -121,6 +133,24 @@ class BucketUtility {
|
||||||
return Promise.all(promises);
|
return Promise.all(promises);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
emptyIfExists(bucketName) {
|
||||||
|
return this.bucketExists(bucketName)
|
||||||
|
.then(exists => {
|
||||||
|
if (exists) {
|
||||||
|
return this.empty(bucketName);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
emptyManyIfExists(bucketNames) {
|
||||||
|
const promises = bucketNames.map(
|
||||||
|
bucketName => this.emptyIfExists(bucketName)
|
||||||
|
);
|
||||||
|
|
||||||
|
return Promise.all(promises);
|
||||||
|
}
|
||||||
|
|
||||||
getOwner() {
|
getOwner() {
|
||||||
return this.s3
|
return this.s3
|
||||||
.listBuckets().promise()
|
.listBuckets().promise()
|
||||||
|
|
|
@ -30,6 +30,33 @@ function getPolicyParams(paramToChange) {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getPolicyParamsWithId(paramToChange, policyId) {
|
||||||
|
const newParam = {};
|
||||||
|
const bucketPolicy = {
|
||||||
|
Version: '2012-10-17',
|
||||||
|
Id: policyId,
|
||||||
|
Statement: [basicStatement],
|
||||||
|
};
|
||||||
|
if (paramToChange) {
|
||||||
|
newParam[paramToChange.key] = paramToChange.value;
|
||||||
|
bucketPolicy.Statement[0] = Object.assign({}, basicStatement, newParam);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
Bucket: bucket,
|
||||||
|
Policy: JSON.stringify(bucketPolicy),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function generateRandomString(length) {
|
||||||
|
// All allowed characters matching the regex in arsenal
|
||||||
|
const allowedCharacters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789+=,.@ -/';
|
||||||
|
const allowedCharactersLength = allowedCharacters.length;
|
||||||
|
|
||||||
|
return [...Array(length)]
|
||||||
|
.map(() => allowedCharacters[~~(Math.random() * allowedCharactersLength)])
|
||||||
|
.join('');
|
||||||
|
}
|
||||||
|
|
||||||
// Check for the expected error response code and status code.
|
// Check for the expected error response code and status code.
|
||||||
function assertError(err, expectedErr, cb) {
|
function assertError(err, expectedErr, cb) {
|
||||||
if (expectedErr === null) {
|
if (expectedErr === null) {
|
||||||
|
@ -44,6 +71,7 @@ function assertError(err, expectedErr, cb) {
|
||||||
cb();
|
cb();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
describe('aws-sdk test put bucket policy', () => {
|
describe('aws-sdk test put bucket policy', () => {
|
||||||
let s3;
|
let s3;
|
||||||
let otherAccountS3;
|
let otherAccountS3;
|
||||||
|
@ -102,5 +130,31 @@ describe('aws-sdk test put bucket policy', () => {
|
||||||
s3.putBucketPolicy(params, err =>
|
s3.putBucketPolicy(params, err =>
|
||||||
assertError(err, 'MalformedPolicy', done));
|
assertError(err, 'MalformedPolicy', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should return MalformedPolicy because Id is not a string',
|
||||||
|
done => {
|
||||||
|
const params = getPolicyParamsWithId(null, 59);
|
||||||
|
s3.putBucketPolicy(params, err =>
|
||||||
|
assertError(err, 'MalformedPolicy', done));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should put a bucket policy on bucket since Id is a string',
|
||||||
|
done => {
|
||||||
|
const params = getPolicyParamsWithId(null, 'cd3ad3d9-2776-4ef1-a904-4c229d1642e');
|
||||||
|
s3.putBucketPolicy(params, err =>
|
||||||
|
assertError(err, null, done));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow bucket policy with pincipal arn less than 2048 characters', done => {
|
||||||
|
const params = getPolicyParams({ key: 'Principal', value: { AWS: `arn:aws:iam::767707094035:user/${generateRandomString(150)}` } }); // eslint-disable-line max-len
|
||||||
|
s3.putBucketPolicy(params, err =>
|
||||||
|
assertError(err, null, done));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not allow bucket policy with pincipal arn more than 2048 characters', done => {
|
||||||
|
const params = getPolicyParams({ key: 'Principal', value: { AWS: `arn:aws:iam::767707094035:user/${generateRandomString(2020)}` } }); // eslint-disable-line max-len
|
||||||
|
s3.putBucketPolicy(params, err =>
|
||||||
|
assertError(err, 'MalformedPolicy', done));
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -565,5 +565,52 @@ describe('User visits bucket website endpoint', () => {
|
||||||
}, done);
|
}, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('with bucket policy', () => {
|
||||||
|
beforeEach(done => {
|
||||||
|
const webConfig = new WebsiteConfigTester('index.html');
|
||||||
|
s3.putBucketWebsite({ Bucket: bucket,
|
||||||
|
WebsiteConfiguration: webConfig }, err => {
|
||||||
|
assert.strictEqual(err,
|
||||||
|
null, `Found unexpected err ${err}`);
|
||||||
|
s3.putBucketPolicy({ Bucket: bucket, Policy: JSON.stringify(
|
||||||
|
{
|
||||||
|
Version: '2012-10-17',
|
||||||
|
Statement: [{
|
||||||
|
Sid: 'PublicReadGetObject',
|
||||||
|
Effect: 'Allow',
|
||||||
|
Principal: '*',
|
||||||
|
Action: ['s3:GetObject'],
|
||||||
|
Resource: [`arn:aws:s3:::${bucket}/index.html`],
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
) }, err => {
|
||||||
|
assert.strictEqual(err,
|
||||||
|
null, `Found unexpected err ${err}`);
|
||||||
|
s3.putObject({ Bucket: bucket, Key: 'index.html',
|
||||||
|
Body: fs.readFileSync(path.join(__dirname,
|
||||||
|
'/websiteFiles/index.html')),
|
||||||
|
ContentType: 'text/html' },
|
||||||
|
err => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(done => {
|
||||||
|
s3.deleteObject({ Bucket: bucket, Key: 'index.html' },
|
||||||
|
err => done(err));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should serve indexDocument if no key requested', done => {
|
||||||
|
WebsiteConfigTester.checkHTML({
|
||||||
|
method: 'GET',
|
||||||
|
url: endpoint,
|
||||||
|
responseType: 'index-user',
|
||||||
|
}, done);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -566,5 +566,53 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
`${endpoint}/about/`, 301, expectedHeaders, done);
|
`${endpoint}/about/`, 301, expectedHeaders, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('with bucket policy', () => {
|
||||||
|
beforeEach(done => {
|
||||||
|
const webConfig = new WebsiteConfigTester('index.html');
|
||||||
|
s3.putBucketWebsite({ Bucket: bucket,
|
||||||
|
WebsiteConfiguration: webConfig }, err => {
|
||||||
|
assert.strictEqual(err,
|
||||||
|
null, `Found unexpected err ${err}`);
|
||||||
|
s3.putBucketPolicy({ Bucket: bucket, Policy: JSON.stringify(
|
||||||
|
{
|
||||||
|
Version: '2012-10-17',
|
||||||
|
Statement: [{
|
||||||
|
Sid: 'PublicReadGetObject',
|
||||||
|
Effect: 'Allow',
|
||||||
|
Principal: '*',
|
||||||
|
Action: ['s3:GetObject'],
|
||||||
|
Resource: [`arn:aws:s3:::${bucket}/index.html`],
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
) }, err => {
|
||||||
|
assert.strictEqual(err,
|
||||||
|
null, `Found unexpected err ${err}`);
|
||||||
|
s3.putObject({ Bucket: bucket, Key: 'index.html',
|
||||||
|
Body: fs.readFileSync(path.join(__dirname,
|
||||||
|
'/websiteFiles/index.html')),
|
||||||
|
ContentType: 'text/html',
|
||||||
|
Metadata: {
|
||||||
|
test: 'value',
|
||||||
|
} },
|
||||||
|
err => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(done => {
|
||||||
|
s3.deleteObject({ Bucket: bucket, Key: 'index.html' },
|
||||||
|
err => done(err));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return indexDocument headers if no key ' +
|
||||||
|
'requested', done => {
|
||||||
|
WebsiteConfigTester.makeHeadRequest(undefined, endpoint,
|
||||||
|
200, indexExpectedHeaders, done);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -165,7 +165,9 @@ function readJsonFromChild(child, lineFinder, cb) {
|
||||||
const findBrace = data.indexOf('{', findLine);
|
const findBrace = data.indexOf('{', findLine);
|
||||||
const findEnd = findEndJson(data, findBrace);
|
const findEnd = findEndJson(data, findBrace);
|
||||||
const endJson = data.substring(findBrace, findEnd + 1)
|
const endJson = data.substring(findBrace, findEnd + 1)
|
||||||
.replace(/"/g, '\\"').replace(/'/g, '"');
|
.replace(/"/g, '\\"').replace(/'/g, '"')
|
||||||
|
.replace(/b'/g, '\'')
|
||||||
|
.replace(/b"/g, '"');
|
||||||
return cb(JSON.parse(endJson));
|
return cb(JSON.parse(endJson));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -344,18 +346,18 @@ describe('s3cmd getService', () => {
|
||||||
|
|
||||||
it("should have response headers matching AWS's response headers",
|
it("should have response headers matching AWS's response headers",
|
||||||
done => {
|
done => {
|
||||||
provideLineOfInterest(['ls', '--debug'], 'DEBUG: Response: {',
|
provideLineOfInterest(['ls', '--debug'], '\'headers\': {',
|
||||||
parsedObject => {
|
parsedObject => {
|
||||||
assert(parsedObject.headers['x-amz-id-2']);
|
assert(parsedObject['x-amz-id-2']);
|
||||||
assert(parsedObject.headers['transfer-encoding']);
|
assert(parsedObject['transfer-encoding']);
|
||||||
assert(parsedObject.headers['x-amz-request-id']);
|
assert(parsedObject['x-amz-request-id']);
|
||||||
const gmtDate = new Date(parsedObject.headers.date)
|
const gmtDate = new Date(parsedObject.date)
|
||||||
.toUTCString();
|
.toUTCString();
|
||||||
assert.strictEqual(parsedObject.headers.date, gmtDate);
|
assert.strictEqual(parsedObject.date, gmtDate);
|
||||||
assert.strictEqual(parsedObject
|
assert.strictEqual(parsedObject
|
||||||
.headers['content-type'], 'application/xml');
|
['content-type'], 'application/xml');
|
||||||
assert.strictEqual(parsedObject
|
assert.strictEqual(parsedObject
|
||||||
.headers['set-cookie'], undefined);
|
['set-cookie'], undefined);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -395,11 +397,11 @@ describe('s3cmd getObject', function toto() {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('get non existing file in existing bucket, should fail', done => {
|
it('get non existing file in existing bucket, should fail', done => {
|
||||||
exec(['get', `s3://${bucket}/${nonexist}`, 'fail'], done, 12);
|
exec(['get', `s3://${bucket}/${nonexist}`, 'fail'], done, 64);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('get file in non existing bucket, should fail', done => {
|
it('get file in non existing bucket, should fail', done => {
|
||||||
exec(['get', `s3://${nonexist}/${nonexist}`, 'fail2'], done, 12);
|
exec(['get', `s3://${nonexist}/${nonexist}`, 'fail2'], done, 64);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -511,7 +513,7 @@ describe('s3cmd delObject', () => {
|
||||||
|
|
||||||
it('delete an already deleted object, should return a 204', done => {
|
it('delete an already deleted object, should return a 204', done => {
|
||||||
provideLineOfInterest(['rm', `s3://${bucket}/${upload}`, '--debug'],
|
provideLineOfInterest(['rm', `s3://${bucket}/${upload}`, '--debug'],
|
||||||
'DEBUG: Response: {', parsedObject => {
|
'DEBUG: Response:\n{', parsedObject => {
|
||||||
assert.strictEqual(parsedObject.status, 204);
|
assert.strictEqual(parsedObject.status, 204);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
@ -519,14 +521,14 @@ describe('s3cmd delObject', () => {
|
||||||
|
|
||||||
it('delete non-existing object, should return a 204', done => {
|
it('delete non-existing object, should return a 204', done => {
|
||||||
provideLineOfInterest(['rm', `s3://${bucket}/${nonexist}`, '--debug'],
|
provideLineOfInterest(['rm', `s3://${bucket}/${nonexist}`, '--debug'],
|
||||||
'DEBUG: Response: {', parsedObject => {
|
'DEBUG: Response:\n{', parsedObject => {
|
||||||
assert.strictEqual(parsedObject.status, 204);
|
assert.strictEqual(parsedObject.status, 204);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('try to get the deleted object, should fail', done => {
|
it('try to get the deleted object, should fail', done => {
|
||||||
exec(['get', `s3://${bucket}/${upload}`, download], done, 12);
|
exec(['get', `s3://${bucket}/${upload}`, download], done, 64);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -621,7 +623,7 @@ describe('s3cmd multipart upload', function titi() {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not be able to get deleted object', done => {
|
it('should not be able to get deleted object', done => {
|
||||||
exec(['get', `s3://${bucket}/${MPUpload}`, download], done, 12);
|
exec(['get', `s3://${bucket}/${MPUpload}`, download], done, 64);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -660,7 +662,7 @@ MPUploadSplitter.forEach(file => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not be able to get deleted object', done => {
|
it('should not be able to get deleted object', done => {
|
||||||
exec(['get', `s3://${bucket}/${file}`, download], done, 12);
|
exec(['get', `s3://${bucket}/${file}`, download], done, 64);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -728,7 +730,7 @@ describe('s3cmd info', () => {
|
||||||
|
|
||||||
// test that POLICY and CORS are returned as 'none'
|
// test that POLICY and CORS are returned as 'none'
|
||||||
it('should find that policy has a value of none', done => {
|
it('should find that policy has a value of none', done => {
|
||||||
checkRawOutput(['info', `s3://${bucket}`], 'policy', 'none',
|
checkRawOutput(['info', `s3://${bucket}`], 'Policy', 'none',
|
||||||
'stdout', foundIt => {
|
'stdout', foundIt => {
|
||||||
assert(foundIt);
|
assert(foundIt);
|
||||||
done();
|
done();
|
||||||
|
@ -736,7 +738,7 @@ describe('s3cmd info', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should find that cors has a value of none', done => {
|
it('should find that cors has a value of none', done => {
|
||||||
checkRawOutput(['info', `s3://${bucket}`], 'cors', 'none',
|
checkRawOutput(['info', `s3://${bucket}`], 'CORS', 'none',
|
||||||
'stdout', foundIt => {
|
'stdout', foundIt => {
|
||||||
assert(foundIt);
|
assert(foundIt);
|
||||||
done();
|
done();
|
||||||
|
@ -762,7 +764,7 @@ describe('s3cmd info', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should find that cors has a value', done => {
|
it('should find that cors has a value', done => {
|
||||||
checkRawOutput(['info', `s3://${bucket}`], 'cors', corsConfig,
|
checkRawOutput(['info', `s3://${bucket}`], 'CORS', corsConfig,
|
||||||
'stdout', foundIt => {
|
'stdout', foundIt => {
|
||||||
assert(foundIt, 'Did not find value for cors');
|
assert(foundIt, 'Did not find value for cors');
|
||||||
done();
|
done();
|
||||||
|
|
16
yarn.lock
16
yarn.lock
|
@ -466,9 +466,9 @@ arraybuffer.slice@~0.0.7:
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
ioctl "^2.0.2"
|
ioctl "^2.0.2"
|
||||||
|
|
||||||
"arsenal@git+https://github.com/scality/arsenal#7.10.43":
|
"arsenal@git+https://github.com/scality/arsenal#7.10.43-1":
|
||||||
version "7.10.43"
|
version "7.10.43-1"
|
||||||
resolved "git+https://github.com/scality/arsenal#054f61d6c1b3c9bdef0ad7a98bb4703b5acacad4"
|
resolved "git+https://github.com/scality/arsenal#b30d1a23a13c54351eea9efb72f83a69d51020ac"
|
||||||
dependencies:
|
dependencies:
|
||||||
"@types/async" "^3.2.12"
|
"@types/async" "^3.2.12"
|
||||||
"@types/utf8" "^3.0.1"
|
"@types/utf8" "^3.0.1"
|
||||||
|
@ -484,7 +484,7 @@ arraybuffer.slice@~0.0.7:
|
||||||
bson "4.0.0"
|
bson "4.0.0"
|
||||||
debug "~2.6.9"
|
debug "~2.6.9"
|
||||||
diskusage "^1.1.1"
|
diskusage "^1.1.1"
|
||||||
fcntl "github:scality/node-fcntl#0.2.0"
|
fcntl "github:scality/node-fcntl#0.2.2"
|
||||||
hdclient scality/hdclient#1.1.0
|
hdclient scality/hdclient#1.1.0
|
||||||
https-proxy-agent "^2.2.0"
|
https-proxy-agent "^2.2.0"
|
||||||
ioredis "^4.28.5"
|
ioredis "^4.28.5"
|
||||||
|
@ -1851,6 +1851,14 @@ fast-levenshtein@~2.0.6:
|
||||||
nan "^2.3.2"
|
nan "^2.3.2"
|
||||||
node-gyp "^8.0.0"
|
node-gyp "^8.0.0"
|
||||||
|
|
||||||
|
"fcntl@github:scality/node-fcntl#0.2.2":
|
||||||
|
version "0.2.1"
|
||||||
|
resolved "https://codeload.github.com/scality/node-fcntl/tar.gz/b1335ca204c6265cedc50c26020c4d63aabe920e"
|
||||||
|
dependencies:
|
||||||
|
bindings "^1.1.1"
|
||||||
|
nan "^2.3.2"
|
||||||
|
node-gyp "^8.0.0"
|
||||||
|
|
||||||
fecha@^4.2.0:
|
fecha@^4.2.0:
|
||||||
version "4.2.3"
|
version "4.2.3"
|
||||||
resolved "https://registry.yarnpkg.com/fecha/-/fecha-4.2.3.tgz#4d9ccdbc61e8629b259fdca67e65891448d569fd"
|
resolved "https://registry.yarnpkg.com/fecha/-/fecha-4.2.3.tgz#4d9ccdbc61e8629b259fdca67e65891448d569fd"
|
||||||
|
|
Loading…
Reference in New Issue