Compare commits

...

2 Commits

Author SHA1 Message Date
Bennett Buchanan 59ea45e2cd [squash] Cleanup 2017-02-06 11:00:45 -08:00
Bennett Buchanan 09dc0a22b6 DEV: Add sizing test 2017-02-03 18:42:51 -08:00
4 changed files with 107 additions and 121 deletions

View File

@ -52,5 +52,12 @@
"healthChecks": {
"allowFrom": ["127.0.0.1/8", "::1"]
},
"usEastBehavior": false
"usEastBehavior": false,
"utapi": {
"workers": 1,
"redis": {
"host": "127.0.0.1",
"port": 6379
}
}
}

33
request.js Normal file
View File

@ -0,0 +1,33 @@
const http = require('http');
const aws4 = require('aws4');
// Input AWS access key, secret key, and session token.
const accessKeyId = 'GN20IUQ621RW2YEBBUX1';
const secretAccessKey = 'l09PpDBkZWRp72enn+AxQ8PNF2FquyGRv=/DTA+Z';
const token = '';
const bucketName = 'bucket-1';
// Get the start and end times for a range of one month.
const startTime = new Date(2017, 1, 1, 0, 0, 0, 0).getTime();
const requestBody = JSON.stringify({
buckets: [bucketName],
timeRange: [startTime],
});
const header = {
host: 'localhost',
port: 8100,
method: 'POST',
service: 's3',
path: '/buckets?Action=ListMetrics',
signQuery: false,
body: requestBody,
};
const credentials = { accessKeyId, secretAccessKey, token };
const options = aws4.sign(header, credentials);
const request = http.request(options, response => {
const body = [];
response.on('data', chunk => body.push(chunk));
response.on('end', () => process.stdout.write(`${body.join('')}\n`));
});
request.on('error', e => process.stdout.write(`error: ${e.message}\n`));
request.write(requestBody);
request.end();

66
sizingTest.js Normal file
View File

@ -0,0 +1,66 @@
'use strict'
const AWS = require('aws-sdk');
const async = require('async');
const s3 = new AWS.S3({
accessKeyId: 'GN20IUQ621RW2YEBBUX1',
secretAccessKey: 'l09PpDBkZWRp72enn+AxQ8PNF2FquyGRv=/DTA+Z',
region: 'us-west-1',
sslEnabled: false,
endpoint: 'http://localhost:8000',
s3ForcePathStyle: true,
apiVersions: { s3: '2006-03-01' },
signatureVersion: 'v4',
signatureCache: false,
});
const minute = 60;
const fifteenMinutes = minute * 15;
const hour = minute * 60;
const day = hour * 24;
// Interval must be in milliseconds.
const interval = second * 1000;
// The current second that counts up to the `timeOut` value.
const timeOut = minute;
let currentSecond = 0;
// Perform a series of AWS operations.
function awsOperations(callback) {
const Bucket = `bucket-${currentSecond}`;
return async.waterfall([
next => s3.createBucket({ Bucket }, err => next(err)),
next => async.times(80, (n, cb) =>
s3.putObject({
Bucket,
Key: `object-${currentSecond}-${n}`,
Body: Buffer.alloc(1),
}, err => cb(err)),
err => next(err)),
next => async.times(20, (n, cb) =>
s3.getObject({
Bucket,
Key: `object-${currentSecond}-${n}`,
}, err => cb(err)),
err => next(err)),
], err => callback(err));
}
// Write output to stdout.
function log(err, isLast) {
if (err) {
return process.stdout.write(`Error: ${err}`);
}
// Log the number of operations that have occurred.
const count = currentSecond % logReqInterval === 0 ? `${currentSecond}` : '';
const end = isLast ? '\nComplete\n' : '';
return process.stdout.write(`.${count}${end}`);
}
// Perform all the actions once per `interval`, until `timeout` has occurred.
const countDown = setInterval(() => {
if (++currentSecond === timeOut) {
clearInterval(countDown);
return awsOperations(err => log(err, true));
}
return awsOperations(log);
}, interval);

View File

@ -1,120 +0,0 @@
import assert from 'assert';
import { S3 } from 'aws-sdk';
import getConfig from '../support/config';
import withV4 from '../support/withV4';
import BucketUtility from '../../lib/utility/bucket-util';
const bucket = `bucketacl-bucket-${Date.now()}`;
const bucketName = 'putbucketaclfttest';
const grants = [];
// results in body of 589824 bytes
for (let i = 0; i < 100000; i ++) {
grants.push({
Grantee: {
Type: 'CanonicalUser',
DisplayName: 'STRING_VALUE',
EmailAddress: 'STRING_VALUE',
ID: 'STRING_VALUE',
},
Permission: 'READ',
});
}
describe('aws-node-sdk test bucket put acl', () => {
let s3;
// setup test
before(done => {
const config = getConfig('default', { signatureVersion: 'v4' });
s3 = new S3(config);
s3.createBucket({ Bucket: bucket }, done);
});
// delete bucket after testing
after(done => s3.deleteBucket({ Bucket: bucket }, done));
const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it;
itSkipIfAWS('should not accept xml body larger than 512 KB', done => {
const params = {
Bucket: bucket,
AccessControlPolicy: {
Grants: grants,
Owner: {
DisplayName: 'STRING_VALUE',
ID: 'STRING_VALUE',
},
},
};
s3.putBucketAcl(params, error => {
if (error) {
assert.strictEqual(error.statusCode, 400);
assert.strictEqual(
error.code, 'InvalidRequest');
done();
} else {
done('accepted xml body larger than 512 KB');
}
});
});
});
describe('PUT Bucket ACL', () => {
withV4(sigCfg => {
const bucketUtil = new BucketUtility('default', sigCfg);
const s3 = bucketUtil.s3;
beforeEach(() => {
process.stdout.write('About to create bucket');
return bucketUtil.createOne(bucketName).catch(err => {
process.stdout.write(`Error in beforeEach ${err}\n`);
throw err;
});
});
afterEach(() => {
process.stdout.write('About to delete bucket');
return bucketUtil.deleteOne(bucketName).catch(err => {
process.stdout.write(`Error in afterEach ${err}\n`);
throw err;
});
});
it('should return InvalidArgument if invalid grantee ' +
'user ID provided in ACL header request', done => {
s3.putBucketAcl({
Bucket: bucketName,
GrantRead: 'id=invalidUserID' }, err => {
assert.strictEqual(err.statusCode, 400);
assert.strictEqual(err.code, 'InvalidArgument');
done();
});
});
it('should return InvalidArgument if invalid grantee ' +
'user ID provided in ACL request body', done => {
s3.putBucketAcl({
Bucket: bucketName,
AccessControlPolicy: {
Grants: [
{
Grantee: {
Type: 'CanonicalUser',
ID: 'invalidUserID',
},
Permission: 'WRITE_ACP',
}],
Owner: {
DisplayName: 'Bart',
ID: '79a59df900b949e55d96a1e698fbace' +
'dfd6e09d98eacf8f8d5218e7cd47ef2be',
},
},
}, err => {
assert.strictEqual(err.statusCode, 400);
assert.strictEqual(err.code, 'InvalidArgument');
done();
});
});
});
});