hashicorp/memberlist & stupid gossip simulation tools

master
Vitaliy Filippov 2024-04-19 13:07:27 +03:00
parent 7e11ac2477
commit 88a7423453
2 changed files with 273 additions and 0 deletions

119
model_simple.js Normal file
View File

@ -0,0 +1,119 @@
#!/usr/bin/nodejs
// "Stupid" gossip algorithm simulation tool
const gossip = 4; // how many nodes to gossip with every tick
const msgcap = 9; // how many nodes to gossip about every tick
const total = 1000; // total nodes
const total_updated = 1000; // total nodes to update if testing update. if 0 then test joining
const initial_nodes = 5; // initial nodes in sync to test joining (when total_updated == 0)
function test()
{
let messages_sent = 0;
let tick = 1;
const known = {};
const lists = {};
const listsv2 = {};
for (let i = 1; i <= total; i++)
{
known[i] = {};
lists[i] = [];
for (let j = 1; j <= (total_updated ? total : initial_nodes); j++)
{
known[i][j] = 1; // meta version 1
lists[i].push(j);
}
listsv2[i] = [];
}
let cmp_lists;
let cmp_n;
if (total_updated)
{
// We want to update <total_updated> nodes metadata to version 2
for (let i = 1; i <= total_updated; i++)
{
known[i][i] = 2;
listsv2[i].push(i);
}
cmp_lists = listsv2;
cmp_n = total_updated;
}
else
{
// We want <total-initial_nodes> to join <initial_nodes>
for (let i = 1; i <= initial_nodes; i++)
{
if (!known[i][i])
{
known[i][i] = 1;
lists[i].push(i);
}
for (let alive = initial_nodes+1; alive <= total; alive++)
{
if (!known[i][alive])
{
known[i][alive] = true;
lists[i].push(alive);
}
}
}
cmp_lists = lists;
cmp_n = total;
}
let in_sync = 0;
for (let i = 1; i <= total; i++)
{
if (cmp_lists[i].length == cmp_n)
{
in_sync++;
}
}
let avg_known = 0;
while (in_sync < total)
{
console.log('tick '+tick+': '+in_sync+' in sync, avg '+avg_known);
for (let i = 1; i <= total; i++)
{
const known_i = lists[i];
const send_to = [];
for (let j = 0; j < gossip; j++)
{
send_to.push(known_i[0|(Math.random()*known_i.length)]);
}
const send_what = [];
for (let j = 0; j < msgcap; j++)
{
// FIXME: Exclude duplicates, exclude <send_to>
send_what.push(known_i[0|(Math.random()*known_i.length)]);
}
for (const alive of send_what)
{
for (const to of send_to)
{
if (!known[to][alive] || known[i][alive] > known[to][alive])
{
known[to][alive] = known[i][alive];
cmp_lists[to].push(alive);
if (cmp_lists[to].length == cmp_n)
{
console.log('node '+to+': tick '+tick);
in_sync++;
}
}
}
}
messages_sent += send_what.length*send_to.length;
}
avg_known = 0;
for (let i = 1; i <= total; i++)
{
avg_known += cmp_lists[i].length;
}
avg_known /= total;
tick++;
}
console.log('tick '+tick+': '+in_sync+' in sync, avg '+avg_known);
console.log(messages_sent+' messages sent');
}
test();

154
model_update.js Normal file
View File

@ -0,0 +1,154 @@
#!/usr/bin/nodejs
// https://github.com/hashicorp/memberlist simulation tool
const gossip = 4; // how many nodes to gossip with every tick
const msgcap = 9; // how many "alive" messages fits in a single packet (meta size/UDP packet size in memberlist)
const max_ticks = 100000; // execution limit
const max_queue = 1024; // queue size limit
const total = 1000; // total nodes
const retransmit = 18; // by default log(total)*4 in memberlist
const total_updated = 1000; // total nodes to update if testing update. if 0 then test joining
const initial_nodes = 5; // initial nodes in sync to test joining (when total_updated == 0)
class LimQ
{
constructor(retransmit, maxlen)
{
this.buckets = [];
for (let i = 0; i < retransmit; i++)
{
this.buckets.push([]);
}
this.len = 0;
this.maxlen = maxlen;
}
push(item)
{
if (this.len >= this.maxlen)
return;
const b = this.buckets[this.buckets.length-1];
b.push(item);
}
shift(n)
{
let items = [];
let move = [];
for (let i = this.buckets.length-1; i >= 0 && items.length < n; i--)
{
const rm = this.buckets[i].splice(0, n-items.length);
items.push.apply(items, rm);
if (i > 0)
for (const e of rm)
move.push([ e, i-1 ]);
else
this.len -= rm.length;
}
for (const e of move)
{
this.buckets[e[1]].push(e[0]);
}
return items;
}
}
function test()
{
let tick = 0;
let messages_sent = 0;
const queue = {};
const known = {}; // { node: { other_node: meta_version } }
const lists = {};
const listsv2 = {};
for (let i = 1; i <= total; i++)
{
known[i] = {};
lists[i] = [];
for (let j = 1; j <= (total_updated ? total : initial_nodes); j++)
{
known[i][j] = 1; // meta version 1
lists[i].push(j);
}
listsv2[i] = [];
queue[i] = new LimQ(retransmit, max_queue);
}
let cmp_lists;
let cmp_n;
if (total_updated)
{
// We want to update <total_updated> nodes metadata to version 2
for (let i = 1; i <= total_updated; i++)
{
known[i][i] = 2;
listsv2[i].push(i);
queue[i].push(i);
}
cmp_lists = listsv2;
cmp_n = total_updated;
}
else
{
// We want <total-initial_nodes> to join <initial_nodes>
for (let i = 1; i <= initial_nodes; i++)
{
for (let alive = initial_nodes+1; alive <= total; alive++)
{
known[i][alive] = 1;
lists[i].push(alive);
queue[i].push(alive);
}
}
cmp_lists = lists;
cmp_n = total;
}
let in_sync = 0;
for (let i = 1; i <= total; i++)
{
if (cmp_lists[i].length == cmp_n)
{
in_sync++;
}
}
let avg_known = 0;
while (in_sync < total && tick < max_ticks)
{
console.log('tick '+tick+': '+in_sync+' in sync, avg '+avg_known);
for (let i = 1; i <= total; i++)
{
const known_i = lists[i];
for (let g = 0; g < gossip; g++)
{
const to = known_i[0|(Math.random()*known_i.length)];
let send_what = queue[i].shift(msgcap);
messages_sent += send_what.length;
for (const alive of send_what)
{
if (!known[to][alive] || known[i][alive] > known[to][alive])
{
known[to][alive] = known[i][alive];
cmp_lists[to].push(alive);
queue[to].push(alive);
const cur_updated = cmp_lists[to].length;
if (cur_updated == cmp_n)
{
console.log('node '+to+': synced at tick '+tick);
in_sync++;
}
}
}
}
}
avg_known = 0;
for (let i = 1; i <= total; i++)
{
avg_known += cmp_lists[i].length;
}
avg_known /= total;
tick++;
}
console.log('tick '+tick+': '+in_sync+' in sync, avg '+avg_known);
console.log(messages_sent+' messages sent');
}
test();