Compare commits

...

2 Commits

Author SHA1 Message Date
bbuchanan9 f0c38e7b3c bugfix: S3C-2317 Parallelize tests 2019-07-18 17:11:42 -07:00
bbuchanan9 145ee8680d bugfix: S3C-2317 Append UUID to sorted set members 2019-07-18 12:54:34 -07:00
14 changed files with 359 additions and 56 deletions

View File

@ -5,6 +5,23 @@ branches:
default:
stage: pre-merge
models:
- Git: &clone
name: Pull repo
repourl: '%(prop:git_reference)s'
shallow: True
retryFetch: True
haltOnFailure: True
- Workspace: &workspace
type: kube_pod
path: eve/workers/pod.yml
images:
aggressor: eve/workers/unit_and_feature_tests
- Install: &install
name: install node modules
command: npm install
haltOnFailure: True
stages:
pre-merge:
worker:
@ -13,33 +30,60 @@ stages:
- TriggerStages:
name: trigger all the tests
stage_names:
- run-tests
run-tests:
worker: &workspace
type: kube_pod
path: eve/workers/pod.yml
images:
aggressor: eve/workers/unit_and_feature_tests
- linting-coverage
- run-unit-tests
- run-client-tests
- run-server-tests
- run-cron-tests
- run-interval-tests
linting-coverage:
worker: *workspace
steps:
- Git: &git
name: fetch source
repourl: '%(prop:git_reference)s'
shallow: True
retryFetch: True
haltOnFailure: True
- ShellCommand:
name: npm install
command: npm install
haltOnFailure: True
- Git: *clone
- ShellCommand: *install
- ShellCommand:
name: run static analysis tools on markdown
command: npm run lint_md
- ShellCommand:
name: run static analysis tools on code
command: npm run lint
run-unit-tests:
worker: *workspace
steps:
- Git: *clone
- ShellCommand: *install
- ShellCommand:
name: run unit tests
command: npm test
run-client-tests:
worker: *workspace
steps:
- Git: *clone
- ShellCommand: *install
- ShellCommand:
name: run feature tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash ft_test
name: run client tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash false ft_test:client
run-server-tests:
worker: *workspace
steps:
- Git: *clone
- ShellCommand: *install
- ShellCommand:
name: run server tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash false ft_test:server
run-cron-tests:
worker: *workspace
steps:
- Git: *clone
- ShellCommand: *install
- ShellCommand:
name: run cron tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash false ft_test:cron
run-interval-tests:
worker: *workspace
steps:
- Git: *clone
- ShellCommand: *install
- ShellCommand:
name: run interval tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash true ft_test:interval

4
eve/workers/unit_and_feature_tests/run_ft_tests.bash Normal file → Executable file
View File

@ -13,5 +13,5 @@ killandsleep () {
sleep 10
}
npm start & bash tests/utils/wait_for_local_port.bash $PORT 40
npm run $1
UTAPI_INTERVAL_TEST_MODE=$1 npm start & bash tests/utils/wait_for_local_port.bash $PORT 40
UTAPI_INTERVAL_TEST_MODE=$1 npm run $2

View File

@ -144,6 +144,9 @@ class ListMetrics {
res.push(last);
const d = new Date(last);
last = d.setMinutes(d.getMinutes() + 15);
if (process.env.UTAPI_INTERVAL_TEST_MODE === 'true') {
last = d.setSeconds(d.getSeconds() + 5);
}
}
res.push(end);
return res;

View File

@ -5,6 +5,7 @@ const Datastore = require('./Datastore');
const { generateKey, generateCounter, generateStateKey } = require('./schema');
const { errors } = require('arsenal');
const redisClient = require('../utils/redisClient');
const member = require('../utils/member');
const methods = {
createBucket: { method: '_genericPushMetric', changesData: true },
@ -126,6 +127,10 @@ class UtapiClient {
static getNormalizedTimestamp() {
const d = new Date();
const minutes = d.getMinutes();
if (process.env.UTAPI_INTERVAL_TEST_MODE === 'true') {
const seconds = d.getSeconds();
return d.setSeconds((seconds - seconds % 5), 0, 0);
}
return d.setMinutes((minutes - minutes % 15), 0, 0);
}
@ -513,7 +518,7 @@ class UtapiClient {
const key = generateStateKey(p, 'numberOfObjects');
cmds2.push(
['zremrangebyscore', key, timestamp, timestamp],
['zadd', key, timestamp, actionCounter]);
['zadd', key, timestamp, member.serialize(actionCounter)]);
return true;
});
if (noErr) {
@ -593,7 +598,7 @@ class UtapiClient {
['zremrangebyscore', generateStateKey(p, 'storageUtilized'),
timestamp, timestamp],
['zadd', generateStateKey(p, 'storageUtilized'),
timestamp, actionCounter]
timestamp, member.serialize(actionCounter)]
);
return true;
});
@ -667,7 +672,7 @@ class UtapiClient {
}
key = generateStateKey(p, 'numberOfObjects');
cmds2.push(['zremrangebyscore', key, timestamp, timestamp],
['zadd', key, timestamp, actionCounter]);
['zadd', key, timestamp, member.serialize(actionCounter)]);
return true;
});
if (noErr) {
@ -779,7 +784,7 @@ class UtapiClient {
timestamp, timestamp],
['zadd',
generateStateKey(p, 'storageUtilized'), timestamp,
actionCounter]);
member.serialize(actionCounter)]);
// The 'abortMultipartUpload' action does not affect number of
// objects, so we return here.
if (action === 'abortMultipartUpload') {
@ -809,7 +814,7 @@ class UtapiClient {
generateStateKey(p, 'numberOfObjects'), timestamp,
timestamp],
['zadd', generateStateKey(p, 'numberOfObjects'), timestamp,
actionCounter]);
member.serialize(actionCounter)]);
return true;
});
if (noErr) {
@ -941,7 +946,7 @@ class UtapiClient {
generateStateKey(p, 'storageUtilized'),
timestamp, timestamp],
['zadd', generateStateKey(p, 'storageUtilized'),
timestamp, actionCounter]);
timestamp, member.serialize(actionCounter)]);
// number of objects counter
objectsIndex = (i * (cmdsLen / paramsArrLen)) + 1;
@ -967,7 +972,7 @@ class UtapiClient {
generateStateKey(p, 'numberOfObjects'),
timestamp, timestamp],
['zadd', generateStateKey(p, 'numberOfObjects'),
timestamp, actionCounter]);
timestamp, member.serialize(actionCounter)]);
return true;
});
if (noErr) {

24
package-lock.json generated
View File

@ -186,11 +186,11 @@
}
},
"async": {
"version": "2.6.2",
"resolved": "https://registry.npmjs.org/async/-/async-2.6.2.tgz",
"integrity": "sha512-H1qVYh1MYhEEFLsP97cVKqCGo7KfCyTt6uEWqsTBr9SO84oK9Uwbyd/yCW+6rKJLHksBNUVWZDAjfS+Ccx0Bbg==",
"version": "2.6.3",
"resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz",
"integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==",
"requires": {
"lodash": "^4.17.11"
"lodash": "^4.17.14"
}
},
"aws4": {
@ -1324,9 +1324,9 @@
}
},
"ioredis": {
"version": "4.11.1",
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.11.1.tgz",
"integrity": "sha512-Qnp7ecb3WeaL7ojeSlb0UBRXmsRMMFcjM+PaAcap8FLLf1NznRD6x96/PS2DEqoRfdM9WVffAjIIYuUp+q3zEw==",
"version": "4.11.2",
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.11.2.tgz",
"integrity": "sha512-2lQM1jpTzoTPg9Lj+ayfcIn8rdvGwgNP/nzeJ7IW4va/ExqxttRnTlUaxa8jOsj/e4NYKtVkbuhEAvOVPwjztA==",
"requires": {
"cluster-key-slot": "^1.0.6",
"debug": "^4.1.1",
@ -1676,9 +1676,9 @@
}
},
"lodash": {
"version": "4.17.11",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz",
"integrity": "sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg=="
"version": "4.17.14",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.14.tgz",
"integrity": "sha512-mmKYbW3GLuJeX+iGP+Y7Gp1AiGHGbXHCOh/jZmrawMmsE7MS4znI3RL2FsjbqOyMayHInjOeykW7PEajUk1/xw=="
},
"lodash._baseassign": {
"version": "3.2.0",
@ -2809,8 +2809,8 @@
"dev": true
},
"vaultclient": {
"version": "github:scality/vaultclient#90762d2293ffea1703433dc619fb3a1e7e389886",
"from": "github:scality/vaultclient#90762d2",
"version": "github:scality/vaultclient#3eaaff280e097e012a1c49ffcf52e816304f25ed",
"from": "github:scality/vaultclient#3eaaff2",
"requires": {
"arsenal": "github:scality/Arsenal#9f2e74e",
"commander": "2.20.0",

View File

@ -35,6 +35,10 @@
},
"scripts": {
"ft_test": "mocha --recursive tests/functional",
"ft_test:client": "mocha --recursive tests/functional/client",
"ft_test:cron": "mocha --recursive tests/functional/cron",
"ft_test:interval": "mocha --recursive tests/functional/interval",
"ft_test:server": "mocha --recursive tests/functional/server",
"lint": "eslint $(git ls-files '*.js')",
"lint_md": "mdlint $(git ls-files '*.md')",
"start": "node server.js",

View File

@ -1,11 +1,11 @@
const assert = require('assert');
const { map, series, waterfall, each } = require('async');
const UtapiClient = require('../../lib/UtapiClient');
const Datastore = require('../../lib/Datastore');
const redisClient = require('../../utils/redisClient');
const UtapiClient = require('../../../lib/UtapiClient');
const Datastore = require('../../../lib/Datastore');
const redisClient = require('../../../utils/redisClient');
const { Logger } = require('werelogs');
const { getCounters, getMetricFromKey,
getStateKeys, getKeys } = require('../../lib/schema');
getStateKeys, getKeys } = require('../../../lib/schema');
const log = new Logger('TestUtapiClient');
const redis = redisClient({
host: '127.0.0.1',

View File

@ -4,10 +4,10 @@ const async = require('async');
const { constants } = require('arsenal');
const UtapiReindex = require('../../lib/UtapiReindex');
const redisClient = require('../../utils/redisClient');
const mock = require('../utils/mock');
const utils = require('../utils/utils');
const UtapiReindex = require('../../../lib/UtapiReindex');
const redisClient = require('../../../utils/redisClient');
const mock = require('../../utils/mock');
const utils = require('../../utils/utils');
const REINDEX_LOCK_KEY = 's3:utapireindex:lock';

View File

@ -1,12 +1,12 @@
const assert = require('assert');
const async = require('async');
const { Logger } = require('werelogs');
const UtapiReplay = require('../../lib/UtapiReplay');
const UtapiClient = require('../../lib/UtapiClient');
const Datastore = require('../../lib/Datastore');
const redisClient = require('../../utils/redisClient');
const { getAllResourceTypeKeys } = require('../utils/utils');
const safeJsonParse = require('../../utils/safeJsonParse');
const UtapiReplay = require('../../../lib/UtapiReplay');
const UtapiClient = require('../../../lib/UtapiClient');
const Datastore = require('../../../lib/Datastore');
const redisClient = require('../../../utils/redisClient');
const { getAllResourceTypeKeys } = require('../../utils/utils');
const safeJsonParse = require('../../../utils/safeJsonParse');
const log = new Logger('UTAPIReplayTest');
const localCache = redisClient({

View File

@ -0,0 +1,219 @@
const assert = require('assert');
const async = require('async');
const uuid = require('uuid/v4');
const UtapiClient = require('../../../lib/UtapiClient');
const mock = require('../../utils/mock');
const { makeUtapiClientRequest } = require('../../utils/utils');
const redisClient = require('../../../utils/redisClient');
describe('UtapiClient: Across time intervals', function test() {
this.timeout((1000 * 60) * 2);
const redis = redisClient({
host: '127.0.0.1',
port: 6379,
}, mock.log);
const utapi = new UtapiClient({
redis: {
host: '127.0.0.1',
port: 6379,
},
localCache: {
host: '127.0.0.1',
port: 6379,
},
component: 's3',
});
function checkMetricResponse(response, expected) {
const data = JSON.parse(response);
if (data.code) {
assert.ifError(data.message);
}
const { storageUtilized, numberOfObjects, incomingBytes } = data[0];
assert.deepStrictEqual(storageUtilized, expected.storageUtilized);
assert.deepStrictEqual(numberOfObjects, expected.numberOfObjects);
assert.strictEqual(incomingBytes, expected.incomingBytes);
}
function waitUntilNextInterval() {
const start = UtapiClient.getNormalizedTimestamp();
while (start === UtapiClient.getNormalizedTimestamp()) {
setTimeout(() => {}, 500);
}
}
const vault = new mock.Vault();
before(() => {
vault.start();
});
after(() => {
vault.end();
});
afterEach(() => redis.flushdb());
function putObject(cb) {
const params = {
level: 'buckets',
service: 's3',
bucket: 'my-bucket',
newByteLength: 10,
oldByteLength: null,
};
utapi.pushMetric('putObject', uuid(), params, cb);
}
function deleteObject(cb) {
const params = {
level: 'buckets',
service: 's3',
bucket: 'my-bucket',
byteLength: 10,
numberOfObjects: 1,
};
utapi.pushMetric('deleteObject', uuid(), params, cb);
}
let firstInterval;
let secondInterval;
describe('Metrics do not return to same values', () => {
beforeEach(done => {
async.series([
next => {
waitUntilNextInterval();
firstInterval = UtapiClient.getNormalizedTimestamp();
async.series([
next => putObject(next),
next => putObject(next),
], next);
},
next => {
waitUntilNextInterval();
secondInterval = UtapiClient.getNormalizedTimestamp();
async.series([
next => putObject(next),
next => putObject(next),
next => deleteObject(next),
], next);
},
], done);
});
it('should maintain data points', done => {
async.series([
next => {
const params = {
timeRange: [firstInterval, secondInterval - 1],
resource: {
type: 'buckets',
buckets: ['my-bucket'],
},
};
makeUtapiClientRequest(params, (err, response) => {
assert.ifError(err);
const expected = {
storageUtilized: [20, 20],
numberOfObjects: [2, 2],
incomingBytes: 20,
};
checkMetricResponse(response, expected);
return next();
});
},
next => {
const seconds = (5 * 1000) - 1;
const params = {
timeRange: [secondInterval, secondInterval + seconds],
resource: {
type: 'buckets',
buckets: ['my-bucket'],
},
};
makeUtapiClientRequest(params, (err, response) => {
assert.ifError(err);
const expected = {
storageUtilized: [30, 30],
numberOfObjects: [3, 3],
incomingBytes: 20,
};
checkMetricResponse(response, expected);
return next();
});
},
], done);
});
});
describe('Metrics return to same values', () => {
beforeEach(done => {
async.series([
next => {
waitUntilNextInterval();
firstInterval = UtapiClient.getNormalizedTimestamp();
async.series([
next => putObject(next),
next => putObject(next),
], next);
},
next => {
waitUntilNextInterval();
secondInterval = UtapiClient.getNormalizedTimestamp();
async.series([
next => putObject(next),
next => deleteObject(next),
], next);
},
], done);
});
it('should maintain data points', done => {
async.series([
next => {
const params = {
timeRange: [firstInterval, secondInterval - 1],
resource: {
type: 'buckets',
buckets: ['my-bucket'],
},
};
makeUtapiClientRequest(params, (err, response) => {
assert.ifError(err);
const expected = {
storageUtilized: [20, 20],
numberOfObjects: [2, 2],
incomingBytes: 20,
};
checkMetricResponse(response, expected);
return next();
});
},
next => {
const seconds = (5 * 1000) - 1;
const params = {
timeRange: [secondInterval, secondInterval + seconds],
resource: {
type: 'buckets',
buckets: ['my-bucket'],
},
};
makeUtapiClientRequest(params, (err, response) => {
assert.ifError(err);
const expected = {
storageUtilized: [20, 20],
numberOfObjects: [2, 2],
incomingBytes: 10,
};
checkMetricResponse(response, expected);
return next();
});
},
], done);
});
});
});

View File

@ -1,6 +1,6 @@
const assert = require('assert');
const { makeUtapiClientRequest } = require('../utils/utils');
const Vault = require('../utils/mock/Vault');
const { makeUtapiClientRequest } = require('../../utils/utils');
const Vault = require('../../utils/mock/Vault');
const MAX_RANGE_MS = (((1000 * 60) * 60) * 24) * 30; // One month.

View File

@ -4,6 +4,7 @@ const Datastore = require('../../lib/Datastore');
const MemoryBackend = require('../../lib/backend/Memory');
const UtapiClient = require('../../lib/UtapiClient');
const { getNormalizedTimestamp } = require('../utils/utils');
const member = require('../../utils/member');
const memoryBackend = new MemoryBackend();
const ds = new Datastore();
@ -21,6 +22,18 @@ const config = {
component: 's3',
};
function isSortedSetKey(key) {
return key.endsWith('storageUtilized') || key.endsWith('numberOfObjects');
}
function deserializeMemoryBackend(data) {
Object.keys(data).forEach(key => {
if (isSortedSetKey(key)) {
data[key][0][1] = member.deserialize(data[key][0][1]); // eslint-disable-line
}
});
}
// Build prefix values to construct the expected Redis schema keys
function getPrefixValues(timestamp) {
return [
@ -95,6 +108,7 @@ function testMetric(metric, params, expected, cb) {
const c = new UtapiClient(config);
c.setDataStore(ds);
c.pushMetric(metric, REQUID, params, () => {
deserializeMemoryBackend(memoryBackend.data);
assert.deepStrictEqual(memoryBackend.data, expected);
return cb();
});

11
utils/member.js Normal file
View File

@ -0,0 +1,11 @@
const uuid = require('uuid/v4');
function serialize(value) {
return `${value}:${uuid()}`;
}
function deserialize(value) {
return value.split(':')[0];
}
module.exports = { serialize, deserialize };

View File

@ -4,6 +4,9 @@
* @return {boolean} - validation result
*/
function validateTimeRange(timeRange) {
if (process.env.UTAPI_INTERVAL_TEST_MODE === 'true') {
return true;
}
if (Array.isArray(timeRange) && timeRange.length > 0 && timeRange.length < 3
&& timeRange.every(item => typeof item === 'number')) {
// check for start time