Compare commits

...

4 Commits

Author SHA1 Message Date
bbuchanan9 293e9c975a bugfix: S3C-2317 Append UUID to sorted set members 2019-07-19 11:46:25 -07:00
bbuchanan9 62496c3c79 feature: S3C-2273 Add unit and functional testing 2019-07-19 13:06:43 +00:00
bbuchanan9 f342c12223 feature: S3C-2260 Add unit and functional testing 2019-07-19 13:06:38 +00:00
bbuchanan9 34e82fd987 bugfix: S3C-2322 UtapiClient configuration update 2019-07-19 12:49:02 +00:00
25 changed files with 4039 additions and 30 deletions

View File

@ -41,5 +41,14 @@ stages:
name: run unit tests name: run unit tests
command: npm test command: npm test
- ShellCommand: - ShellCommand:
name: run feature tests name: run client tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash ft_test command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash false ft_test:client
- ShellCommand:
name: run server tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash false ft_test:server
- ShellCommand:
name: run cron tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash false ft_test:cron
- ShellCommand:
name: run interval tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash true ft_test:interval

View File

@ -21,6 +21,9 @@ RUN curl -sL https://deb.nodesource.com/setup_6.x | bash - \
ARG BUILDBOT_VERSION ARG BUILDBOT_VERSION
RUN pip install buildbot-worker==$BUILDBOT_VERSION RUN pip install buildbot-worker==$BUILDBOT_VERSION
RUN pip3 install requests
RUN pip3 install redis
ADD supervisor/buildbot_worker.conf /etc/supervisor/conf.d/ ADD supervisor/buildbot_worker.conf /etc/supervisor/conf.d/
ADD redis/sentinel.conf /etc/sentinel.conf
CMD ["supervisord", "-n"] CMD ["supervisord", "-n"]

View File

@ -0,0 +1,35 @@
# Example sentinel.conf
# The port that this sentinel instance will run on
port 16379
# Specify the log file name. Also the empty string can be used to force
# Sentinel to log on the standard output. Note that if you use standard
# output for logging but daemonize, logs will be sent to /dev/null
logfile ""
# dir <working-directory>
# Every long running process should have a well-defined working directory.
# For Redis Sentinel to chdir to /tmp at startup is the simplest thing
# for the process to don't interfere with administrative tasks such as
# unmounting filesystems.
dir /tmp
# sentinel monitor <master-name> <ip> <redis-port> <quorum>
#
# Tells Sentinel to monitor this master, and to consider it in O_DOWN
# (Objectively Down) state only if at least <quorum> sentinels agree.
#
# Note that whatever is the ODOWN quorum, a Sentinel will require to
# be elected by the majority of the known Sentinels in order to
# start a failover, so no failover can be performed in minority.
#
# Replicas are auto-discovered, so you don't need to specify replicas in
# any way. Sentinel itself will rewrite this configuration file adding
# the replicas using additional configuration options.
# Also note that the configuration file is rewritten when a
# replica is promoted to master.
#
# Note: master name should not include special characters or spaces.
# The valid charset is A-z 0-9 and the three characters ".-_".
sentinel monitor scality-s3 127.0.0.1 6379 1

4
eve/workers/unit_and_feature_tests/run_ft_tests.bash Normal file → Executable file
View File

@ -13,5 +13,5 @@ killandsleep () {
sleep 10 sleep 10
} }
npm start & bash tests/utils/wait_for_local_port.bash $PORT 40 UTAPI_INTERVAL_TEST_MODE=$1 npm start & bash tests/utils/wait_for_local_port.bash $PORT 40
npm run $1 UTAPI_INTERVAL_TEST_MODE=$1 npm run $2

View File

@ -3,7 +3,12 @@ command=/bin/sh -c 'buildbot-worker create-worker . "%(ENV_BUILDMASTER)s:%(ENV_B
autostart=true autostart=true
autorestart=false autorestart=false
[program:redis] [program:redis_server]
command=/usr/bin/redis-server command=/usr/bin/redis-server
autostart=true autostart=true
autorestart=false autorestart=false
[program:redis_sentinel]
command=/usr/bin/redis-server /etc/sentinel.conf --sentinel
autostart=true
autorestart=false

View File

@ -1,3 +1,5 @@
build-essential build-essential
redis-server redis-server
nodejs nodejs
python3
python3-pip

View File

@ -144,6 +144,9 @@ class ListMetrics {
res.push(last); res.push(last);
const d = new Date(last); const d = new Date(last);
last = d.setMinutes(d.getMinutes() + 15); last = d.setMinutes(d.getMinutes() + 15);
if (process.env.UTAPI_INTERVAL_TEST_MODE === 'true') {
last = d.setSeconds(d.getSeconds() + 15);
}
} }
res.push(end); res.push(end);
return res; return res;

View File

@ -5,6 +5,7 @@ const Datastore = require('./Datastore');
const { generateKey, generateCounter, generateStateKey } = require('./schema'); const { generateKey, generateCounter, generateStateKey } = require('./schema');
const { errors } = require('arsenal'); const { errors } = require('arsenal');
const redisClient = require('../utils/redisClient'); const redisClient = require('../utils/redisClient');
const member = require('../utils/member');
const methods = { const methods = {
createBucket: { method: '_genericPushMetric', changesData: true }, createBucket: { method: '_genericPushMetric', changesData: true },
@ -114,7 +115,7 @@ class UtapiClient {
} }
this.disableClient = false; this.disableClient = false;
this.expireMetrics = config.expireMetrics; this.expireMetrics = config.expireMetrics;
this.expireTTL = config.expireTTL || 0; this.expireMetricsTTL = config.expireMetricsTTL || 0;
} }
} }
@ -126,6 +127,10 @@ class UtapiClient {
static getNormalizedTimestamp() { static getNormalizedTimestamp() {
const d = new Date(); const d = new Date();
const minutes = d.getMinutes(); const minutes = d.getMinutes();
if (process.env.UTAPI_INTERVAL_TEST_MODE === 'true') {
const seconds = d.getSeconds();
return d.setSeconds((seconds - seconds % 15), 0, 0);
}
return d.setMinutes((minutes - minutes % 15), 0, 0); return d.setMinutes((minutes - minutes % 15), 0, 0);
} }
@ -434,7 +439,7 @@ class UtapiClient {
_expireMetrics(keys, log, callback) { _expireMetrics(keys, log, callback) {
// expire metrics here // expire metrics here
const expireCmds = keys.map(k => ['expire', k, this.expireTTL]); const expireCmds = keys.map(k => ['expire', k, this.expireMetricsTTL]);
return this.ds.multi(expireCmds, (err, result) => { return this.ds.multi(expireCmds, (err, result) => {
if (err) { if (err) {
const logParam = Array.isArray(err) ? { errorList: err } : const logParam = Array.isArray(err) ? { errorList: err } :
@ -513,7 +518,7 @@ class UtapiClient {
const key = generateStateKey(p, 'numberOfObjects'); const key = generateStateKey(p, 'numberOfObjects');
cmds2.push( cmds2.push(
['zremrangebyscore', key, timestamp, timestamp], ['zremrangebyscore', key, timestamp, timestamp],
['zadd', key, timestamp, actionCounter]); ['zadd', key, timestamp, member.serialize(actionCounter)]);
return true; return true;
}); });
if (noErr) { if (noErr) {
@ -593,7 +598,7 @@ class UtapiClient {
['zremrangebyscore', generateStateKey(p, 'storageUtilized'), ['zremrangebyscore', generateStateKey(p, 'storageUtilized'),
timestamp, timestamp], timestamp, timestamp],
['zadd', generateStateKey(p, 'storageUtilized'), ['zadd', generateStateKey(p, 'storageUtilized'),
timestamp, actionCounter] timestamp, member.serialize(actionCounter)]
); );
return true; return true;
}); });
@ -667,7 +672,7 @@ class UtapiClient {
} }
key = generateStateKey(p, 'numberOfObjects'); key = generateStateKey(p, 'numberOfObjects');
cmds2.push(['zremrangebyscore', key, timestamp, timestamp], cmds2.push(['zremrangebyscore', key, timestamp, timestamp],
['zadd', key, timestamp, actionCounter]); ['zadd', key, timestamp, member.serialize(actionCounter)]);
return true; return true;
}); });
if (noErr) { if (noErr) {
@ -779,7 +784,7 @@ class UtapiClient {
timestamp, timestamp], timestamp, timestamp],
['zadd', ['zadd',
generateStateKey(p, 'storageUtilized'), timestamp, generateStateKey(p, 'storageUtilized'), timestamp,
actionCounter]); member.serialize(actionCounter)]);
// The 'abortMultipartUpload' action does not affect number of // The 'abortMultipartUpload' action does not affect number of
// objects, so we return here. // objects, so we return here.
if (action === 'abortMultipartUpload') { if (action === 'abortMultipartUpload') {
@ -809,7 +814,7 @@ class UtapiClient {
generateStateKey(p, 'numberOfObjects'), timestamp, generateStateKey(p, 'numberOfObjects'), timestamp,
timestamp], timestamp],
['zadd', generateStateKey(p, 'numberOfObjects'), timestamp, ['zadd', generateStateKey(p, 'numberOfObjects'), timestamp,
actionCounter]); member.serialize(actionCounter)]);
return true; return true;
}); });
if (noErr) { if (noErr) {
@ -941,7 +946,7 @@ class UtapiClient {
generateStateKey(p, 'storageUtilized'), generateStateKey(p, 'storageUtilized'),
timestamp, timestamp], timestamp, timestamp],
['zadd', generateStateKey(p, 'storageUtilized'), ['zadd', generateStateKey(p, 'storageUtilized'),
timestamp, actionCounter]); timestamp, member.serialize(actionCounter)]);
// number of objects counter // number of objects counter
objectsIndex = (i * (cmdsLen / paramsArrLen)) + 1; objectsIndex = (i * (cmdsLen / paramsArrLen)) + 1;
@ -967,7 +972,7 @@ class UtapiClient {
generateStateKey(p, 'numberOfObjects'), generateStateKey(p, 'numberOfObjects'),
timestamp, timestamp], timestamp, timestamp],
['zadd', generateStateKey(p, 'numberOfObjects'), ['zadd', generateStateKey(p, 'numberOfObjects'),
timestamp, actionCounter]); timestamp, member.serialize(actionCounter)]);
return true; return true;
}); });
if (noErr) { if (noErr) {

3194
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -27,13 +27,18 @@
"devDependencies": { "devDependencies": {
"aws4": "^1.8.0", "aws4": "^1.8.0",
"eslint": "^2.4.0", "eslint": "^2.4.0",
"eslint-plugin-react": "^4.3.0",
"eslint-config-airbnb": "^6.0.0", "eslint-config-airbnb": "^6.0.0",
"eslint-config-scality": "scality/Guidelines#71a059ad", "eslint-config-scality": "scality/Guidelines#71a059ad",
"eslint-plugin-react": "^4.3.0",
"express": "^4.17.1",
"mocha": "^3.0.2" "mocha": "^3.0.2"
}, },
"scripts": { "scripts": {
"ft_test": "mocha --recursive tests/functional", "ft_test": "mocha --recursive tests/functional",
"ft_test:client": "mocha --recursive tests/functional/client",
"ft_test:cron": "mocha --recursive tests/functional/cron",
"ft_test:interval": "mocha --recursive tests/functional/interval",
"ft_test:server": "mocha --recursive tests/functional/server",
"lint": "eslint $(git ls-files '*.js')", "lint": "eslint $(git ls-files '*.js')",
"lint_md": "mdlint $(git ls-files '*.md')", "lint_md": "mdlint $(git ls-files '*.md')",
"start": "node server.js", "start": "node server.js",

View File

@ -1,11 +1,11 @@
const assert = require('assert'); const assert = require('assert');
const { map, series } = require('async'); const { map, series, waterfall, each } = require('async');
const UtapiClient = require('../../lib/UtapiClient'); const UtapiClient = require('../../../lib/UtapiClient');
const Datastore = require('../../lib/Datastore'); const Datastore = require('../../../lib/Datastore');
const redisClient = require('../../utils/redisClient'); const redisClient = require('../../../utils/redisClient');
const { Logger } = require('werelogs'); const { Logger } = require('werelogs');
const { getCounters, getMetricFromKey, const { getCounters, getMetricFromKey,
getStateKeys, getKeys } = require('../../lib/schema'); getStateKeys, getKeys } = require('../../../lib/schema');
const log = new Logger('TestUtapiClient'); const log = new Logger('TestUtapiClient');
const redis = redisClient({ const redis = redisClient({
host: '127.0.0.1', host: '127.0.0.1',
@ -284,4 +284,50 @@ describe('UtapiClient: expire bucket metrics', () => {
}); });
}); });
}); });
describe('with a non-zero TTL', () => {
const TTL = 10;
beforeEach(done => {
const config = Object.assign({
expireMetrics: true,
expireMetricsTTL: TTL,
}, utapiConfig);
const client = new UtapiClient(config);
const params = _getMetricObj('bucket');
series([
next => {
client.ds.getClient()
.on('ready', next)
.on('error', next);
},
next =>
client.pushMetric('createBucket', reqUid, params, next),
next =>
client.pushMetric('deleteBucket', reqUid, params, next),
], done);
});
it(`should have a TTL > than 0 and <= ${TTL}`, done => {
function assertTTL(keys, cb) {
each(keys, (key, next) =>
redis.ttl(key, (err, data) => {
if (err) {
return next(err);
}
assert(data > 0 && data <= TTL);
return next();
}),
cb);
}
waterfall([
next => redis.keys('s3:buckets:*', next),
(keys, next) => {
assert.strictEqual(keys.length, 2);
assertTTL(keys, next);
},
], done);
});
});
}); });

View File

@ -0,0 +1,272 @@
const assert = require('assert');
const async = require('async');
const { constants } = require('arsenal');
const UtapiReindex = require('../../../lib/UtapiReindex');
const redisClient = require('../../../utils/redisClient');
const mock = require('../../utils/mock');
const utils = require('../../utils/utils');
const REINDEX_LOCK_KEY = 's3:utapireindex:lock';
describe('UtapiReindex', () => {
const vault = new mock.Vault();
const bucketD = new mock.BucketD();
let reindex;
let redis;
function shouldAcquireLock(done) {
reindex._lock()
.then(res => {
assert.strictEqual(res, 'OK');
})
.then(done)
.catch(done);
}
function shouldNotAcquireLock(done) {
reindex._lock()
.then(res => {
assert.strictEqual(res, null);
})
.then(done)
.catch(done);
}
function shouldReleaseLock(done) {
reindex._unLock()
.then(res => {
assert.strictEqual(res, 1);
})
.then(done)
.catch(done);
}
function shouldNotReleaseLock(done) {
reindex._unLock()
.then(res => {
assert.strictEqual(res, 0);
})
.then(done)
.catch(done);
}
before(() => {
bucketD.start();
vault.start();
});
after(() => {
bucketD.end();
vault.end();
});
beforeEach(done => {
reindex = new UtapiReindex();
redis = redisClient({}, mock.log)
.on('ready', done)
.on('error', done);
});
afterEach(done => {
redis
.on('close', done)
.on('error', done)
.flushdb()
.then(() => redis.quit())
.catch(done);
});
describe('::_getRedisClient', () => {
it('should get a new redis client', done => {
reindex._getRedisClient()
.on('ready', done)
.on('error', done);
});
});
describe('::_connect', () => {
it('should connect to the redis sentinel', done => {
reindex._connect(done);
});
});
describe('::_lock', () => {
beforeEach(done => {
reindex._connect(done);
});
describe('lock is not acquired', () => {
it('should acquire the lock key', done => {
shouldAcquireLock(done);
});
});
describe('lock is already acquired', () => {
beforeEach(done => {
shouldAcquireLock(done);
});
it('should not acquire the lock key', done => {
shouldNotAcquireLock(done);
});
});
});
describe('::_unlock', () => {
beforeEach(done => {
reindex._connect(done);
});
describe('lock is not acquired', () => {
it('should not release the lock key', done => {
shouldNotReleaseLock(done);
});
});
describe('lock is already acquired', () => {
beforeEach(done => {
shouldAcquireLock(done);
});
it('should release the lock key', done => {
shouldReleaseLock(done);
});
});
});
describe('::_attemptLock', () => {
beforeEach(done => {
reindex._connect(done);
});
describe('lock is not acquired', () => {
it('should call the job', done => {
const job = () => {
done();
};
reindex._attemptLock(job);
});
});
describe('lock is already acquired', () => {
beforeEach(done => {
shouldAcquireLock(done);
});
it('should not call the job', done => {
const job = () => {
done(new Error('job called when lock was not acquired'));
};
reindex._attemptLock(job);
setTimeout(done, 200);
});
});
});
describe('::_attemptUnlock', () => {
beforeEach(done => {
reindex._connect(done);
});
describe('lock is already acquired', () => {
beforeEach(done => {
shouldAcquireLock(done);
});
it('should release the lock key', done => {
reindex._attemptUnlock(); // Lock should be released here.
setTimeout(() => shouldNotReleaseLock(done), 200);
});
});
});
describe('::_scheduleJob', () => {
function waitUntilLockHasValue({ value }, cb) {
let shouldLeave;
async.doUntil(next =>
redis.get(REINDEX_LOCK_KEY, (err, res) => {
if (err) {
return next(err);
}
shouldLeave = res === value;
return setTimeout(next, 200);
}),
() => shouldLeave, cb);
}
function checkMetrics({ resource, expected }, cb) {
utils.listMetrics(resource, (err, res) => {
if (err) {
return cb(err);
}
if (res.code) {
return cb(new Error(res.message));
}
const { storageUtilized, numberOfObjects } = expected;
assert.deepStrictEqual(res[0].storageUtilized, storageUtilized);
assert.deepStrictEqual(res[0].numberOfObjects, numberOfObjects);
return cb();
});
}
beforeEach(done => {
reindex._scheduleJob();
// Wait until the scripts have started and finished reindexing.
async.series([
next => waitUntilLockHasValue({ value: 'true' }, next),
next => waitUntilLockHasValue({ value: null }, next),
], done);
});
it('should reindex metrics', done => {
async.parallel([
next => {
const params = {
resource: {
type: 'buckets',
buckets: [mock.values.BUCKET_NAME],
},
expected: {
storageUtilized: [0, 1024],
numberOfObjects: [0, 1],
},
};
checkMetrics(params, next);
},
next => {
const params = {
resource: {
type: 'buckets',
buckets: [
`${constants.mpuBucketPrefix}` +
`${mock.values.BUCKET_NAME}`,
],
},
expected: {
storageUtilized: [0, 1024],
numberOfObjects: [0, 1],
},
};
checkMetrics(params, next);
},
next => {
const params = {
resource: {
type: 'accounts',
accounts: [mock.values.ACCOUNT_ID],
},
expected: {
storageUtilized: [0, 2048],
numberOfObjects: [0, 2],
},
};
checkMetrics(params, next);
},
], done);
});
});
});

View File

@ -1,12 +1,12 @@
const assert = require('assert'); const assert = require('assert');
const async = require('async'); const async = require('async');
const { Logger } = require('werelogs'); const { Logger } = require('werelogs');
const UtapiReplay = require('../../lib/UtapiReplay'); const UtapiReplay = require('../../../lib/UtapiReplay');
const UtapiClient = require('../../lib/UtapiClient'); const UtapiClient = require('../../../lib/UtapiClient');
const Datastore = require('../../lib/Datastore'); const Datastore = require('../../../lib/Datastore');
const redisClient = require('../../utils/redisClient'); const redisClient = require('../../../utils/redisClient');
const { getAllResourceTypeKeys } = require('../utils/utils'); const { getAllResourceTypeKeys } = require('../../utils/utils');
const safeJsonParse = require('../../utils/safeJsonParse'); const safeJsonParse = require('../../../utils/safeJsonParse');
const log = new Logger('UTAPIReplayTest'); const log = new Logger('UTAPIReplayTest');
const localCache = redisClient({ const localCache = redisClient({

View File

@ -0,0 +1,219 @@
const assert = require('assert');
const async = require('async');
const uuid = require('uuid/v4');
const UtapiClient = require('../../../lib/UtapiClient');
const mock = require('../../utils/mock');
const { makeUtapiClientRequest } = require('../../utils/utils');
const redisClient = require('../../../utils/redisClient');
describe('UtapiClient: Across time intervals', function test() {
this.timeout((1000 * 60) * 2);
const redis = redisClient({
host: '127.0.0.1',
port: 6379,
}, mock.log);
const utapi = new UtapiClient({
redis: {
host: '127.0.0.1',
port: 6379,
},
localCache: {
host: '127.0.0.1',
port: 6379,
},
component: 's3',
});
function checkMetricResponse(response, expected) {
const data = JSON.parse(response);
if (data.code) {
assert.ifError(data.message);
}
const { storageUtilized, numberOfObjects, incomingBytes } = data[0];
assert.deepStrictEqual(storageUtilized, expected.storageUtilized);
assert.deepStrictEqual(numberOfObjects, expected.numberOfObjects);
assert.strictEqual(incomingBytes, expected.incomingBytes);
}
function waitUntilNextInterval() {
const start = UtapiClient.getNormalizedTimestamp();
while (start === UtapiClient.getNormalizedTimestamp()) {
setTimeout(() => {}, 500);
}
}
const vault = new mock.Vault();
before(() => {
vault.start();
});
after(() => {
vault.end();
});
afterEach(() => redis.flushdb());
function putObject(cb) {
const params = {
level: 'buckets',
service: 's3',
bucket: 'my-bucket',
newByteLength: 10,
oldByteLength: null,
};
utapi.pushMetric('putObject', uuid(), params, cb);
}
function deleteObject(cb) {
const params = {
level: 'buckets',
service: 's3',
bucket: 'my-bucket',
byteLength: 10,
numberOfObjects: 1,
};
utapi.pushMetric('deleteObject', uuid(), params, cb);
}
let firstInterval;
let secondInterval;
describe('Metrics do not return to same values', () => {
beforeEach(done => {
async.series([
next => {
waitUntilNextInterval();
firstInterval = UtapiClient.getNormalizedTimestamp();
async.series([
next => putObject(next),
next => putObject(next),
], next);
},
next => {
waitUntilNextInterval();
secondInterval = UtapiClient.getNormalizedTimestamp();
async.series([
next => putObject(next),
next => putObject(next),
next => deleteObject(next),
], next);
},
], done);
});
it('should maintain data points', done => {
async.series([
next => {
const params = {
timeRange: [firstInterval, secondInterval - 1],
resource: {
type: 'buckets',
buckets: ['my-bucket'],
},
};
makeUtapiClientRequest(params, (err, response) => {
assert.ifError(err);
const expected = {
storageUtilized: [20, 20],
numberOfObjects: [2, 2],
incomingBytes: 20,
};
checkMetricResponse(response, expected);
return next();
});
},
next => {
const seconds = (15 * 1000) - 1;
const params = {
timeRange: [secondInterval, secondInterval + seconds],
resource: {
type: 'buckets',
buckets: ['my-bucket'],
},
};
makeUtapiClientRequest(params, (err, response) => {
assert.ifError(err);
const expected = {
storageUtilized: [30, 30],
numberOfObjects: [3, 3],
incomingBytes: 20,
};
checkMetricResponse(response, expected);
return next();
});
},
], done);
});
});
describe('Metrics return to same values', () => {
beforeEach(done => {
async.series([
next => {
waitUntilNextInterval();
firstInterval = UtapiClient.getNormalizedTimestamp();
async.series([
next => putObject(next),
next => putObject(next),
], next);
},
next => {
waitUntilNextInterval();
secondInterval = UtapiClient.getNormalizedTimestamp();
async.series([
next => putObject(next),
next => deleteObject(next),
], next);
},
], done);
});
it('should maintain data points', done => {
async.series([
next => {
const params = {
timeRange: [firstInterval, secondInterval - 1],
resource: {
type: 'buckets',
buckets: ['my-bucket'],
},
};
makeUtapiClientRequest(params, (err, response) => {
assert.ifError(err);
const expected = {
storageUtilized: [20, 20],
numberOfObjects: [2, 2],
incomingBytes: 20,
};
checkMetricResponse(response, expected);
return next();
});
},
next => {
const seconds = (15 * 1000) - 1;
const params = {
timeRange: [secondInterval, secondInterval + seconds],
resource: {
type: 'buckets',
buckets: ['my-bucket'],
},
};
makeUtapiClientRequest(params, (err, response) => {
assert.ifError(err);
const expected = {
storageUtilized: [20, 20],
numberOfObjects: [2, 2],
incomingBytes: 10,
};
checkMetricResponse(response, expected);
return next();
});
},
], done);
});
});
});

View File

@ -1,17 +1,21 @@
const assert = require('assert'); const assert = require('assert');
const { makeUtapiClientRequest } = require('../utils/utils'); const { makeUtapiClientRequest } = require('../../utils/utils');
const Vault = require('../utils/mock/Vault'); const Vault = require('../../utils/mock/Vault');
const MAX_RANGE_MS = (((1000 * 60) * 60) * 24) * 30; // One month. const MAX_RANGE_MS = (((1000 * 60) * 60) * 24) * 30; // One month.
describe('Request ranges', function test() { describe('Request ranges', function test() {
this.timeout((1000 * 60) * 2); this.timeout((1000 * 60) * 2);
const vault = new Vault();
before(() => { before(() => {
const vault = new Vault();
vault.start(); vault.start();
}); });
after(() => {
vault.end();
});
const tests = [ const tests = [
{ {
start: 0, start: 0,

View File

@ -0,0 +1,29 @@
const assert = require('assert');
const uuid = require('uuid/v4');
const member = require('../../utils/member');
describe('Sorted set member serialization', () => {
describe('serialize', () => {
it('should serialize the value', () => {
const value = '1';
const result = member.serialize(value);
assert(result.startsWith(`${value}:`));
});
});
describe('deserialize', () => {
it('should deserialize the member', () => {
const value = '1';
const result = member.deserialize(`${value}:${uuid()}`);
assert.strictEqual(result, value);
});
});
describe('serialize and deserialize', () => {
it('should serialize and deserialize the value', () => {
const value = '1';
const result = member.serialize(value);
assert.strictEqual(member.deserialize(result), value);
});
});
});

View File

@ -4,6 +4,7 @@ const Datastore = require('../../lib/Datastore');
const MemoryBackend = require('../../lib/backend/Memory'); const MemoryBackend = require('../../lib/backend/Memory');
const UtapiClient = require('../../lib/UtapiClient'); const UtapiClient = require('../../lib/UtapiClient');
const { getNormalizedTimestamp } = require('../utils/utils'); const { getNormalizedTimestamp } = require('../utils/utils');
const member = require('../../utils/member');
const memoryBackend = new MemoryBackend(); const memoryBackend = new MemoryBackend();
const ds = new Datastore(); const ds = new Datastore();
@ -21,6 +22,18 @@ const config = {
component: 's3', component: 's3',
}; };
function isSortedSetKey(key) {
return key.endsWith('storageUtilized') || key.endsWith('numberOfObjects');
}
function deserializeMemoryBackend(data) {
Object.keys(data).forEach(key => {
if (isSortedSetKey(key)) {
data[key][0][1] = member.deserialize(data[key][0][1]); // eslint-disable-line
}
});
}
// Build prefix values to construct the expected Redis schema keys // Build prefix values to construct the expected Redis schema keys
function getPrefixValues(timestamp) { function getPrefixValues(timestamp) {
return [ return [
@ -95,6 +108,7 @@ function testMetric(metric, params, expected, cb) {
const c = new UtapiClient(config); const c = new UtapiClient(config);
c.setDataStore(ds); c.setDataStore(ds);
c.pushMetric(metric, REQUID, params, () => { c.pushMetric(metric, REQUID, params, () => {
deserializeMemoryBackend(memoryBackend.data);
assert.deepStrictEqual(memoryBackend.data, expected); assert.deepStrictEqual(memoryBackend.data, expected);
return cb(); return cb();
}); });

View File

@ -0,0 +1,56 @@
const http = require('http');
const express = require('express');
const { models, constants } = require('arsenal');
const { CANONICAL_ID, BUCKET_NAME, OBJECT_KEY } = require('./values');
const { ObjectMD } = models;
const app = express();
app.param('bucketName', (req, res, next, bucketName) => {
let metadata;
if (bucketName === constants.usersBucket) {
metadata = {
key: `${CANONICAL_ID}${constants.splitter}${BUCKET_NAME}`,
value: JSON.stringify({ creationDate: new Date() }),
};
} else {
const value = new ObjectMD().setContentLength(1024).getValue();
metadata = {
key: OBJECT_KEY,
value: JSON.stringify(value),
};
}
const body = {
CommonPrefixes: [],
Contents: [metadata],
IsTruncated: false,
};
req.body = JSON.stringify(body); // eslint-disable-line
next();
});
app.get('/default/bucket/:bucketName', (req, res) => {
res.writeHead(200);
res.write(req.body);
res.end();
});
class BucketD {
constructor() {
this._server = null;
}
start() {
const port = 9000;
this._server = http.createServer(app).listen(port);
}
end() {
this._server.close();
}
}
module.exports = BucketD;

View File

@ -1,16 +1,31 @@
const http = require('http'); const http = require('http');
const url = require('url');
const config = require('../../../lib/Config'); const config = require('../../../lib/Config');
const { CANONICAL_ID } = require('./values');
class Vault { class Vault {
constructor() {
this._server = null;
}
_onRequest(req, res) { _onRequest(req, res) {
res.writeHead(200); res.writeHead(200);
const { query } = url.parse(req.url, true);
if (query.Action === 'AccountsCanonicalIds') {
const body = JSON.stringify([{ canonicalId: CANONICAL_ID }]);
res.write(body);
}
return res.end(); return res.end();
} }
start() { start() {
const { port } = config.vaultd; const { port } = config.vaultd;
return http.createServer(this._onRequest).listen(port); this._server = http.createServer(this._onRequest).listen(port);
}
end() {
this._server.close();
} }
} }

View File

@ -0,0 +1,8 @@
const index = {
BucketD: require('./BucketD'),
Vault: require('./Vault'),
log: require('./log'),
values: require('./values'),
};
module.exports = index;

10
tests/utils/mock/log.js Normal file
View File

@ -0,0 +1,10 @@
const log = {
trace: () => {},
error: () => {},
info: () => {},
debug: () => {},
getSerializedUids: () => {},
end: () => {},
};
module.exports = log;

View File

@ -0,0 +1,9 @@
const values = {
BUCKET_NAME: 'test-bucket-name',
OBJECT_KEY: 'test-object-key',
ACCOUNT_ID: '014810915030',
CANONICAL_ID:
'8f4bc0c540a42a9be67478d7245892a6668d337e989d2ef9d439f1d389f66817',
};
module.exports = values;

View File

@ -115,7 +115,59 @@ function makeUtapiClientRequest({ timeRange, resource }, cb) {
req.end(); req.end();
} }
function _getNormalizedTimestamp() {
const d = new Date();
const minutes = d.getMinutes();
return d.setMinutes((minutes - minutes % 15), 0, 0);
}
function _getStartTime() {
const thirtyMinutes = (1000 * 60) * 30;
return _getNormalizedTimestamp() - thirtyMinutes;
}
function _getEndTime() {
const fifteenMinutes = (1000 * 60) * 15;
return (_getNormalizedTimestamp() - 1) + fifteenMinutes;
}
function _buildRequestBody(resource) {
const { type } = resource;
const body = { timeRange: [_getStartTime(), _getEndTime()] };
body[type] = resource[type];
return JSON.stringify(body);
}
function listMetrics(resource, cb) {
const requestBody = _buildRequestBody(resource);
const header = {
host: 'localhost',
port: 8100,
method: 'POST',
service: 's3',
path: `/${resource.type}?Action=ListMetrics`,
signQuery: false,
body: requestBody,
};
const options = aws4.sign(header, {
accessKeyId: 'accessKey1',
secretAccessKey: 'verySecretKey1',
});
const request = http.request(options, response => {
const body = [];
response.on('data', chunk => body.push(chunk));
response.on('end', () => {
const data = JSON.parse(body.join(''));
cb(null, data);
});
});
request.on('error', err => cb(err));
request.write(requestBody);
request.end();
}
module.exports = { module.exports = {
listMetrics,
getAllResourceTypeKeys, getAllResourceTypeKeys,
getNormalizedTimestamp, getNormalizedTimestamp,
buildMockResponse, buildMockResponse,

11
utils/member.js Normal file
View File

@ -0,0 +1,11 @@
const uuid = require('uuid/v4');
function serialize(value) {
return `${value}:${uuid()}`;
}
function deserialize(value) {
return value.split(':')[0];
}
module.exports = { serialize, deserialize };

View File

@ -4,6 +4,9 @@
* @return {boolean} - validation result * @return {boolean} - validation result
*/ */
function validateTimeRange(timeRange) { function validateTimeRange(timeRange) {
if (process.env.UTAPI_INTERVAL_TEST_MODE === 'true') {
return true;
}
if (Array.isArray(timeRange) && timeRange.length > 0 && timeRange.length < 3 if (Array.isArray(timeRange) && timeRange.length > 0 && timeRange.length < 3
&& timeRange.every(item => typeof item === 'number')) { && timeRange.every(item => typeof item === 'number')) {
// check for start time // check for start time