Compare commits

...

2 Commits

Author SHA1 Message Date
bbuchanan9 f0c38e7b3c bugfix: S3C-2317 Parallelize tests 2019-07-18 17:11:42 -07:00
bbuchanan9 145ee8680d bugfix: S3C-2317 Append UUID to sorted set members 2019-07-18 12:54:34 -07:00
14 changed files with 359 additions and 56 deletions

View File

@ -5,6 +5,23 @@ branches:
default: default:
stage: pre-merge stage: pre-merge
models:
- Git: &clone
name: Pull repo
repourl: '%(prop:git_reference)s'
shallow: True
retryFetch: True
haltOnFailure: True
- Workspace: &workspace
type: kube_pod
path: eve/workers/pod.yml
images:
aggressor: eve/workers/unit_and_feature_tests
- Install: &install
name: install node modules
command: npm install
haltOnFailure: True
stages: stages:
pre-merge: pre-merge:
worker: worker:
@ -13,33 +30,60 @@ stages:
- TriggerStages: - TriggerStages:
name: trigger all the tests name: trigger all the tests
stage_names: stage_names:
- run-tests - linting-coverage
run-tests: - run-unit-tests
worker: &workspace - run-client-tests
type: kube_pod - run-server-tests
path: eve/workers/pod.yml - run-cron-tests
images: - run-interval-tests
aggressor: eve/workers/unit_and_feature_tests linting-coverage:
worker: *workspace
steps: steps:
- Git: &git - Git: *clone
name: fetch source - ShellCommand: *install
repourl: '%(prop:git_reference)s'
shallow: True
retryFetch: True
haltOnFailure: True
- ShellCommand:
name: npm install
command: npm install
haltOnFailure: True
- ShellCommand: - ShellCommand:
name: run static analysis tools on markdown name: run static analysis tools on markdown
command: npm run lint_md command: npm run lint_md
- ShellCommand: - ShellCommand:
name: run static analysis tools on code name: run static analysis tools on code
command: npm run lint command: npm run lint
run-unit-tests:
worker: *workspace
steps:
- Git: *clone
- ShellCommand: *install
- ShellCommand: - ShellCommand:
name: run unit tests name: run unit tests
command: npm test command: npm test
run-client-tests:
worker: *workspace
steps:
- Git: *clone
- ShellCommand: *install
- ShellCommand: - ShellCommand:
name: run feature tests name: run client tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash ft_test command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash false ft_test:client
run-server-tests:
worker: *workspace
steps:
- Git: *clone
- ShellCommand: *install
- ShellCommand:
name: run server tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash false ft_test:server
run-cron-tests:
worker: *workspace
steps:
- Git: *clone
- ShellCommand: *install
- ShellCommand:
name: run cron tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash false ft_test:cron
run-interval-tests:
worker: *workspace
steps:
- Git: *clone
- ShellCommand: *install
- ShellCommand:
name: run interval tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash true ft_test:interval

4
eve/workers/unit_and_feature_tests/run_ft_tests.bash Normal file → Executable file
View File

@ -13,5 +13,5 @@ killandsleep () {
sleep 10 sleep 10
} }
npm start & bash tests/utils/wait_for_local_port.bash $PORT 40 UTAPI_INTERVAL_TEST_MODE=$1 npm start & bash tests/utils/wait_for_local_port.bash $PORT 40
npm run $1 UTAPI_INTERVAL_TEST_MODE=$1 npm run $2

View File

@ -144,6 +144,9 @@ class ListMetrics {
res.push(last); res.push(last);
const d = new Date(last); const d = new Date(last);
last = d.setMinutes(d.getMinutes() + 15); last = d.setMinutes(d.getMinutes() + 15);
if (process.env.UTAPI_INTERVAL_TEST_MODE === 'true') {
last = d.setSeconds(d.getSeconds() + 5);
}
} }
res.push(end); res.push(end);
return res; return res;

View File

@ -5,6 +5,7 @@ const Datastore = require('./Datastore');
const { generateKey, generateCounter, generateStateKey } = require('./schema'); const { generateKey, generateCounter, generateStateKey } = require('./schema');
const { errors } = require('arsenal'); const { errors } = require('arsenal');
const redisClient = require('../utils/redisClient'); const redisClient = require('../utils/redisClient');
const member = require('../utils/member');
const methods = { const methods = {
createBucket: { method: '_genericPushMetric', changesData: true }, createBucket: { method: '_genericPushMetric', changesData: true },
@ -126,6 +127,10 @@ class UtapiClient {
static getNormalizedTimestamp() { static getNormalizedTimestamp() {
const d = new Date(); const d = new Date();
const minutes = d.getMinutes(); const minutes = d.getMinutes();
if (process.env.UTAPI_INTERVAL_TEST_MODE === 'true') {
const seconds = d.getSeconds();
return d.setSeconds((seconds - seconds % 5), 0, 0);
}
return d.setMinutes((minutes - minutes % 15), 0, 0); return d.setMinutes((minutes - minutes % 15), 0, 0);
} }
@ -513,7 +518,7 @@ class UtapiClient {
const key = generateStateKey(p, 'numberOfObjects'); const key = generateStateKey(p, 'numberOfObjects');
cmds2.push( cmds2.push(
['zremrangebyscore', key, timestamp, timestamp], ['zremrangebyscore', key, timestamp, timestamp],
['zadd', key, timestamp, actionCounter]); ['zadd', key, timestamp, member.serialize(actionCounter)]);
return true; return true;
}); });
if (noErr) { if (noErr) {
@ -593,7 +598,7 @@ class UtapiClient {
['zremrangebyscore', generateStateKey(p, 'storageUtilized'), ['zremrangebyscore', generateStateKey(p, 'storageUtilized'),
timestamp, timestamp], timestamp, timestamp],
['zadd', generateStateKey(p, 'storageUtilized'), ['zadd', generateStateKey(p, 'storageUtilized'),
timestamp, actionCounter] timestamp, member.serialize(actionCounter)]
); );
return true; return true;
}); });
@ -667,7 +672,7 @@ class UtapiClient {
} }
key = generateStateKey(p, 'numberOfObjects'); key = generateStateKey(p, 'numberOfObjects');
cmds2.push(['zremrangebyscore', key, timestamp, timestamp], cmds2.push(['zremrangebyscore', key, timestamp, timestamp],
['zadd', key, timestamp, actionCounter]); ['zadd', key, timestamp, member.serialize(actionCounter)]);
return true; return true;
}); });
if (noErr) { if (noErr) {
@ -779,7 +784,7 @@ class UtapiClient {
timestamp, timestamp], timestamp, timestamp],
['zadd', ['zadd',
generateStateKey(p, 'storageUtilized'), timestamp, generateStateKey(p, 'storageUtilized'), timestamp,
actionCounter]); member.serialize(actionCounter)]);
// The 'abortMultipartUpload' action does not affect number of // The 'abortMultipartUpload' action does not affect number of
// objects, so we return here. // objects, so we return here.
if (action === 'abortMultipartUpload') { if (action === 'abortMultipartUpload') {
@ -809,7 +814,7 @@ class UtapiClient {
generateStateKey(p, 'numberOfObjects'), timestamp, generateStateKey(p, 'numberOfObjects'), timestamp,
timestamp], timestamp],
['zadd', generateStateKey(p, 'numberOfObjects'), timestamp, ['zadd', generateStateKey(p, 'numberOfObjects'), timestamp,
actionCounter]); member.serialize(actionCounter)]);
return true; return true;
}); });
if (noErr) { if (noErr) {
@ -941,7 +946,7 @@ class UtapiClient {
generateStateKey(p, 'storageUtilized'), generateStateKey(p, 'storageUtilized'),
timestamp, timestamp], timestamp, timestamp],
['zadd', generateStateKey(p, 'storageUtilized'), ['zadd', generateStateKey(p, 'storageUtilized'),
timestamp, actionCounter]); timestamp, member.serialize(actionCounter)]);
// number of objects counter // number of objects counter
objectsIndex = (i * (cmdsLen / paramsArrLen)) + 1; objectsIndex = (i * (cmdsLen / paramsArrLen)) + 1;
@ -967,7 +972,7 @@ class UtapiClient {
generateStateKey(p, 'numberOfObjects'), generateStateKey(p, 'numberOfObjects'),
timestamp, timestamp], timestamp, timestamp],
['zadd', generateStateKey(p, 'numberOfObjects'), ['zadd', generateStateKey(p, 'numberOfObjects'),
timestamp, actionCounter]); timestamp, member.serialize(actionCounter)]);
return true; return true;
}); });
if (noErr) { if (noErr) {

24
package-lock.json generated
View File

@ -186,11 +186,11 @@
} }
}, },
"async": { "async": {
"version": "2.6.2", "version": "2.6.3",
"resolved": "https://registry.npmjs.org/async/-/async-2.6.2.tgz", "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz",
"integrity": "sha512-H1qVYh1MYhEEFLsP97cVKqCGo7KfCyTt6uEWqsTBr9SO84oK9Uwbyd/yCW+6rKJLHksBNUVWZDAjfS+Ccx0Bbg==", "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==",
"requires": { "requires": {
"lodash": "^4.17.11" "lodash": "^4.17.14"
} }
}, },
"aws4": { "aws4": {
@ -1324,9 +1324,9 @@
} }
}, },
"ioredis": { "ioredis": {
"version": "4.11.1", "version": "4.11.2",
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.11.1.tgz", "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.11.2.tgz",
"integrity": "sha512-Qnp7ecb3WeaL7ojeSlb0UBRXmsRMMFcjM+PaAcap8FLLf1NznRD6x96/PS2DEqoRfdM9WVffAjIIYuUp+q3zEw==", "integrity": "sha512-2lQM1jpTzoTPg9Lj+ayfcIn8rdvGwgNP/nzeJ7IW4va/ExqxttRnTlUaxa8jOsj/e4NYKtVkbuhEAvOVPwjztA==",
"requires": { "requires": {
"cluster-key-slot": "^1.0.6", "cluster-key-slot": "^1.0.6",
"debug": "^4.1.1", "debug": "^4.1.1",
@ -1676,9 +1676,9 @@
} }
}, },
"lodash": { "lodash": {
"version": "4.17.11", "version": "4.17.14",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.14.tgz",
"integrity": "sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==" "integrity": "sha512-mmKYbW3GLuJeX+iGP+Y7Gp1AiGHGbXHCOh/jZmrawMmsE7MS4znI3RL2FsjbqOyMayHInjOeykW7PEajUk1/xw=="
}, },
"lodash._baseassign": { "lodash._baseassign": {
"version": "3.2.0", "version": "3.2.0",
@ -2809,8 +2809,8 @@
"dev": true "dev": true
}, },
"vaultclient": { "vaultclient": {
"version": "github:scality/vaultclient#90762d2293ffea1703433dc619fb3a1e7e389886", "version": "github:scality/vaultclient#3eaaff280e097e012a1c49ffcf52e816304f25ed",
"from": "github:scality/vaultclient#90762d2", "from": "github:scality/vaultclient#3eaaff2",
"requires": { "requires": {
"arsenal": "github:scality/Arsenal#9f2e74e", "arsenal": "github:scality/Arsenal#9f2e74e",
"commander": "2.20.0", "commander": "2.20.0",

View File

@ -35,6 +35,10 @@
}, },
"scripts": { "scripts": {
"ft_test": "mocha --recursive tests/functional", "ft_test": "mocha --recursive tests/functional",
"ft_test:client": "mocha --recursive tests/functional/client",
"ft_test:cron": "mocha --recursive tests/functional/cron",
"ft_test:interval": "mocha --recursive tests/functional/interval",
"ft_test:server": "mocha --recursive tests/functional/server",
"lint": "eslint $(git ls-files '*.js')", "lint": "eslint $(git ls-files '*.js')",
"lint_md": "mdlint $(git ls-files '*.md')", "lint_md": "mdlint $(git ls-files '*.md')",
"start": "node server.js", "start": "node server.js",

View File

@ -1,11 +1,11 @@
const assert = require('assert'); const assert = require('assert');
const { map, series, waterfall, each } = require('async'); const { map, series, waterfall, each } = require('async');
const UtapiClient = require('../../lib/UtapiClient'); const UtapiClient = require('../../../lib/UtapiClient');
const Datastore = require('../../lib/Datastore'); const Datastore = require('../../../lib/Datastore');
const redisClient = require('../../utils/redisClient'); const redisClient = require('../../../utils/redisClient');
const { Logger } = require('werelogs'); const { Logger } = require('werelogs');
const { getCounters, getMetricFromKey, const { getCounters, getMetricFromKey,
getStateKeys, getKeys } = require('../../lib/schema'); getStateKeys, getKeys } = require('../../../lib/schema');
const log = new Logger('TestUtapiClient'); const log = new Logger('TestUtapiClient');
const redis = redisClient({ const redis = redisClient({
host: '127.0.0.1', host: '127.0.0.1',

View File

@ -4,10 +4,10 @@ const async = require('async');
const { constants } = require('arsenal'); const { constants } = require('arsenal');
const UtapiReindex = require('../../lib/UtapiReindex'); const UtapiReindex = require('../../../lib/UtapiReindex');
const redisClient = require('../../utils/redisClient'); const redisClient = require('../../../utils/redisClient');
const mock = require('../utils/mock'); const mock = require('../../utils/mock');
const utils = require('../utils/utils'); const utils = require('../../utils/utils');
const REINDEX_LOCK_KEY = 's3:utapireindex:lock'; const REINDEX_LOCK_KEY = 's3:utapireindex:lock';

View File

@ -1,12 +1,12 @@
const assert = require('assert'); const assert = require('assert');
const async = require('async'); const async = require('async');
const { Logger } = require('werelogs'); const { Logger } = require('werelogs');
const UtapiReplay = require('../../lib/UtapiReplay'); const UtapiReplay = require('../../../lib/UtapiReplay');
const UtapiClient = require('../../lib/UtapiClient'); const UtapiClient = require('../../../lib/UtapiClient');
const Datastore = require('../../lib/Datastore'); const Datastore = require('../../../lib/Datastore');
const redisClient = require('../../utils/redisClient'); const redisClient = require('../../../utils/redisClient');
const { getAllResourceTypeKeys } = require('../utils/utils'); const { getAllResourceTypeKeys } = require('../../utils/utils');
const safeJsonParse = require('../../utils/safeJsonParse'); const safeJsonParse = require('../../../utils/safeJsonParse');
const log = new Logger('UTAPIReplayTest'); const log = new Logger('UTAPIReplayTest');
const localCache = redisClient({ const localCache = redisClient({

View File

@ -0,0 +1,219 @@
const assert = require('assert');
const async = require('async');
const uuid = require('uuid/v4');
const UtapiClient = require('../../../lib/UtapiClient');
const mock = require('../../utils/mock');
const { makeUtapiClientRequest } = require('../../utils/utils');
const redisClient = require('../../../utils/redisClient');
describe('UtapiClient: Across time intervals', function test() {
this.timeout((1000 * 60) * 2);
const redis = redisClient({
host: '127.0.0.1',
port: 6379,
}, mock.log);
const utapi = new UtapiClient({
redis: {
host: '127.0.0.1',
port: 6379,
},
localCache: {
host: '127.0.0.1',
port: 6379,
},
component: 's3',
});
function checkMetricResponse(response, expected) {
const data = JSON.parse(response);
if (data.code) {
assert.ifError(data.message);
}
const { storageUtilized, numberOfObjects, incomingBytes } = data[0];
assert.deepStrictEqual(storageUtilized, expected.storageUtilized);
assert.deepStrictEqual(numberOfObjects, expected.numberOfObjects);
assert.strictEqual(incomingBytes, expected.incomingBytes);
}
function waitUntilNextInterval() {
const start = UtapiClient.getNormalizedTimestamp();
while (start === UtapiClient.getNormalizedTimestamp()) {
setTimeout(() => {}, 500);
}
}
const vault = new mock.Vault();
before(() => {
vault.start();
});
after(() => {
vault.end();
});
afterEach(() => redis.flushdb());
function putObject(cb) {
const params = {
level: 'buckets',
service: 's3',
bucket: 'my-bucket',
newByteLength: 10,
oldByteLength: null,
};
utapi.pushMetric('putObject', uuid(), params, cb);
}
function deleteObject(cb) {
const params = {
level: 'buckets',
service: 's3',
bucket: 'my-bucket',
byteLength: 10,
numberOfObjects: 1,
};
utapi.pushMetric('deleteObject', uuid(), params, cb);
}
let firstInterval;
let secondInterval;
describe('Metrics do not return to same values', () => {
beforeEach(done => {
async.series([
next => {
waitUntilNextInterval();
firstInterval = UtapiClient.getNormalizedTimestamp();
async.series([
next => putObject(next),
next => putObject(next),
], next);
},
next => {
waitUntilNextInterval();
secondInterval = UtapiClient.getNormalizedTimestamp();
async.series([
next => putObject(next),
next => putObject(next),
next => deleteObject(next),
], next);
},
], done);
});
it('should maintain data points', done => {
async.series([
next => {
const params = {
timeRange: [firstInterval, secondInterval - 1],
resource: {
type: 'buckets',
buckets: ['my-bucket'],
},
};
makeUtapiClientRequest(params, (err, response) => {
assert.ifError(err);
const expected = {
storageUtilized: [20, 20],
numberOfObjects: [2, 2],
incomingBytes: 20,
};
checkMetricResponse(response, expected);
return next();
});
},
next => {
const seconds = (5 * 1000) - 1;
const params = {
timeRange: [secondInterval, secondInterval + seconds],
resource: {
type: 'buckets',
buckets: ['my-bucket'],
},
};
makeUtapiClientRequest(params, (err, response) => {
assert.ifError(err);
const expected = {
storageUtilized: [30, 30],
numberOfObjects: [3, 3],
incomingBytes: 20,
};
checkMetricResponse(response, expected);
return next();
});
},
], done);
});
});
describe('Metrics return to same values', () => {
beforeEach(done => {
async.series([
next => {
waitUntilNextInterval();
firstInterval = UtapiClient.getNormalizedTimestamp();
async.series([
next => putObject(next),
next => putObject(next),
], next);
},
next => {
waitUntilNextInterval();
secondInterval = UtapiClient.getNormalizedTimestamp();
async.series([
next => putObject(next),
next => deleteObject(next),
], next);
},
], done);
});
it('should maintain data points', done => {
async.series([
next => {
const params = {
timeRange: [firstInterval, secondInterval - 1],
resource: {
type: 'buckets',
buckets: ['my-bucket'],
},
};
makeUtapiClientRequest(params, (err, response) => {
assert.ifError(err);
const expected = {
storageUtilized: [20, 20],
numberOfObjects: [2, 2],
incomingBytes: 20,
};
checkMetricResponse(response, expected);
return next();
});
},
next => {
const seconds = (5 * 1000) - 1;
const params = {
timeRange: [secondInterval, secondInterval + seconds],
resource: {
type: 'buckets',
buckets: ['my-bucket'],
},
};
makeUtapiClientRequest(params, (err, response) => {
assert.ifError(err);
const expected = {
storageUtilized: [20, 20],
numberOfObjects: [2, 2],
incomingBytes: 10,
};
checkMetricResponse(response, expected);
return next();
});
},
], done);
});
});
});

View File

@ -1,6 +1,6 @@
const assert = require('assert'); const assert = require('assert');
const { makeUtapiClientRequest } = require('../utils/utils'); const { makeUtapiClientRequest } = require('../../utils/utils');
const Vault = require('../utils/mock/Vault'); const Vault = require('../../utils/mock/Vault');
const MAX_RANGE_MS = (((1000 * 60) * 60) * 24) * 30; // One month. const MAX_RANGE_MS = (((1000 * 60) * 60) * 24) * 30; // One month.

View File

@ -4,6 +4,7 @@ const Datastore = require('../../lib/Datastore');
const MemoryBackend = require('../../lib/backend/Memory'); const MemoryBackend = require('../../lib/backend/Memory');
const UtapiClient = require('../../lib/UtapiClient'); const UtapiClient = require('../../lib/UtapiClient');
const { getNormalizedTimestamp } = require('../utils/utils'); const { getNormalizedTimestamp } = require('../utils/utils');
const member = require('../../utils/member');
const memoryBackend = new MemoryBackend(); const memoryBackend = new MemoryBackend();
const ds = new Datastore(); const ds = new Datastore();
@ -21,6 +22,18 @@ const config = {
component: 's3', component: 's3',
}; };
function isSortedSetKey(key) {
return key.endsWith('storageUtilized') || key.endsWith('numberOfObjects');
}
function deserializeMemoryBackend(data) {
Object.keys(data).forEach(key => {
if (isSortedSetKey(key)) {
data[key][0][1] = member.deserialize(data[key][0][1]); // eslint-disable-line
}
});
}
// Build prefix values to construct the expected Redis schema keys // Build prefix values to construct the expected Redis schema keys
function getPrefixValues(timestamp) { function getPrefixValues(timestamp) {
return [ return [
@ -95,6 +108,7 @@ function testMetric(metric, params, expected, cb) {
const c = new UtapiClient(config); const c = new UtapiClient(config);
c.setDataStore(ds); c.setDataStore(ds);
c.pushMetric(metric, REQUID, params, () => { c.pushMetric(metric, REQUID, params, () => {
deserializeMemoryBackend(memoryBackend.data);
assert.deepStrictEqual(memoryBackend.data, expected); assert.deepStrictEqual(memoryBackend.data, expected);
return cb(); return cb();
}); });

11
utils/member.js Normal file
View File

@ -0,0 +1,11 @@
const uuid = require('uuid/v4');
function serialize(value) {
return `${value}:${uuid()}`;
}
function deserialize(value) {
return value.split(':')[0];
}
module.exports = { serialize, deserialize };

View File

@ -4,6 +4,9 @@
* @return {boolean} - validation result * @return {boolean} - validation result
*/ */
function validateTimeRange(timeRange) { function validateTimeRange(timeRange) {
if (process.env.UTAPI_INTERVAL_TEST_MODE === 'true') {
return true;
}
if (Array.isArray(timeRange) && timeRange.length > 0 && timeRange.length < 3 if (Array.isArray(timeRange) && timeRange.length > 0 && timeRange.length < 3
&& timeRange.every(item => typeof item === 'number')) { && timeRange.every(item => typeof item === 'number')) {
// check for start time // check for start time