Compare commits

...

6 Commits

Author SHA1 Message Date
Jonathan Gramain 24887caeb0 Merge branch 'bugfix/ARSN-398-doNotRefreshGapBuildingIfDisabled' into user/jonathan/S3C-3926 2024-02-14 17:38:16 -08:00
Jonathan Gramain 8cac166a69 bf: ARSN-398 DelimiterMaster: fix when gap building is disabled
- Fix the situation where gap building is disabled by
  `_saveBuildingGap()` but we attempted to reset the building gap state
  anyway.

- Introduce a new state 'Expired' that can be differentiated from
  'Disabled': it makes `getGapBuildingValidityPeriodMs()` return 0
  instead of 'null' to hint the listing backend that it should trigger
  a new listing.
2024-02-14 17:35:59 -08:00
Jonathan Gramain 86e50d3ead Merge branch 'feature/ARSN-397-gapCacheClear' into user/jonathan/S3C-3926 2024-02-14 11:40:53 -08:00
Jonathan Gramain 7908654b51 ft: ARSN-397 GapCache.clear()
Add a clear() method to clear exposed and staging gaps. Retains
invalidating updates for gaps inserted after the call to clear().
2024-02-14 11:36:28 -08:00
Jonathan Gramain c4c75e976c ARSN-389 DelimiterMaster: v0 format gap skipping
Implement logic in DelimiterMaster to improve efficiency of listings
of buckets in V0 format that have a lot of current delete markers.

A GapCache instance can be attached to a DelimiterMaster instance,
which enables the following:

- Lookups in the cache to be able to restart listing directly beyond
  the cached gaps. It is done by returning FILTER_SKIP code when
  listing inside a gap, which hints the caller (RepdServer) that it is
  allowed to restart a new listing from a specific later key.

- Building gaps and cache them, when listing inside a series of current
  delete markers. This allows future listings to benefit from the gap
  information and skip over them.

An important caveat is that there is a limited time in which gaps can
be built from the current listing: it is a trade-off to guarantee the
validity of cached gaps when concurrent operations may invalidate
them. This time is set in the GapCache instance as `exposureDelayMs`,
and is the time during which concurrent operations are kept in memory
to potentially invalidate future gap creations. Because listings use a
snapshot of the database, they return entries that are older than when
the listing started. For this reason, in order to be allowed to
consistently build new gaps, it is necessary to limit the running time
of listings, and potentially redo periodically new listings (based on
time or number of listed keys), resuming from where the previous
listing stopped, instead of continuing the current listing.
2024-02-14 10:18:02 -08:00
Jonathan Gramain 1266a14253 impr: ARSN-389 change contract of skipping() API
Instead of returning a "prefix" for the listing task to skip over,
directly return the key on which to skip and continue the listing.

It is both more natural as well as needed to implement skipping over
cached "gaps" of deleted objects.

Note that it could even be more powerful to return the type of query
param to apply for the next listing ('gt' or 'gte'), but it would be
more complex to implement with little practical benefit, so instead we
add a null byte at the end of the returned key to skip to, whenever we
want a 'gt' behavior from the returned 'gte' key.

Also in this commit: clarify the API contract and always return
FILTER_ACCEPT when not allowed to skip over low-level listing
contents. A good chunk of the history of listing bugs and workarounds
comes from this confusion.
2024-02-14 10:18:02 -08:00
4 changed files with 88 additions and 8 deletions

View File

@ -346,4 +346,18 @@ export default class GapCache implements GapCacheInterface {
toArray(): GapSetEntry[] { toArray(): GapSetEntry[] {
return this._exposedGaps.toArray(); return this._exposedGaps.toArray();
} }
/**
* Clear all exposed and staging gaps from the cache.
*
* Note: retains invalidating updates from removeOverlappingGaps()
* for correctness of gaps inserted afterwards.
*
* @return {undefined}
*/
clear(): void {
this._stagingUpdates.newGaps = new GapSet(this.maxGapWeight);
this._frozenUpdates.newGaps = new GapSet(this.maxGapWeight);
this._exposedGaps = new GapSet(this.maxGapWeight);
}
} }

View File

@ -73,13 +73,14 @@ type GapCachingInfo = GapCachingInfo_NoGapCache
export const enum GapBuildingState { export const enum GapBuildingState {
Disabled = 0, // no gap cache or not allowed to build due to exposure delay timeout Disabled = 0, // no gap cache or no gap building needed (e.g. in V1 versioning format)
NotBuilding = 1, // not currently building a gap (i.e. not listing within a gap) NotBuilding = 1, // not currently building a gap (i.e. not listing within a gap)
Building = 2, // currently building a gap (i.e. listing within a gap) Building = 2, // currently building a gap (i.e. listing within a gap)
Expired = 3, // not allowed to build due to exposure delay timeout
}; };
type GapBuildingInfo_Disabled = { type GapBuildingInfo_NothingToBuild = {
state: GapBuildingState.Disabled; state: GapBuildingState.Disabled | GapBuildingState.Expired;
}; };
type GapBuildingParams = { type GapBuildingParams = {
@ -118,7 +119,7 @@ type GapBuildingInfo_Building = {
gapWeight: number; gapWeight: number;
}; };
type GapBuildingInfo = GapBuildingInfo_Disabled type GapBuildingInfo = GapBuildingInfo_NothingToBuild
| GapBuildingInfo_NotBuilding | GapBuildingInfo_NotBuilding
| GapBuildingInfo_Building; | GapBuildingInfo_Building;
@ -214,6 +215,8 @@ export class DelimiterMaster extends Delimiter {
switch (this._gapBuilding.state) { switch (this._gapBuilding.state) {
case GapBuildingState.Disabled: case GapBuildingState.Disabled:
return null; return null;
case GapBuildingState.Expired:
return 0;
case GapBuildingState.NotBuilding: case GapBuildingState.NotBuilding:
gapBuilding = <GapBuildingInfo_NotBuilding> this._gapBuilding; gapBuilding = <GapBuildingInfo_NotBuilding> this._gapBuilding;
break; break;
@ -314,6 +317,7 @@ export class DelimiterMaster extends Delimiter {
_checkGapOnMasterDeleteMarker(key: string): FilterReturnValue { _checkGapOnMasterDeleteMarker(key: string): FilterReturnValue {
switch (this._gapBuilding.state) { switch (this._gapBuilding.state) {
case GapBuildingState.Disabled: case GapBuildingState.Disabled:
case GapBuildingState.Expired:
break; break;
case GapBuildingState.NotBuilding: case GapBuildingState.NotBuilding:
this._createBuildingGap(key, 1); this._createBuildingGap(key, 1);
@ -494,16 +498,23 @@ export class DelimiterMaster extends Delimiter {
return params; return params;
} }
_saveBuildingGap(): void { /**
* Save the gap being built if allowed (i.e. still within the
* allocated exposure time window).
*
* @return {boolean} - true if the gap was saved, false if we are
* outside the allocated exposure time window.
*/
_saveBuildingGap(): boolean {
const { gapCache, params, gap, gapWeight } = const { gapCache, params, gap, gapWeight } =
<GapBuildingInfo_Building> this._gapBuilding; <GapBuildingInfo_Building> this._gapBuilding;
const totalElapsed = Date.now() - params.initTimestamp; const totalElapsed = Date.now() - params.initTimestamp;
if (totalElapsed >= gapCache.exposureDelayMs) { if (totalElapsed >= gapCache.exposureDelayMs) {
this._gapBuilding = { this._gapBuilding = {
state: GapBuildingState.Disabled, state: GapBuildingState.Expired,
}; };
this._refreshedBuildingParams = null; this._refreshedBuildingParams = null;
return; return false;
} }
const { firstKey, lastKey, weight } = gap; const { firstKey, lastKey, weight } = gap;
gapCache.setGap(firstKey, lastKey, weight); gapCache.setGap(firstKey, lastKey, weight);
@ -518,6 +529,7 @@ export class DelimiterMaster extends Delimiter {
}, },
gapWeight, gapWeight,
}; };
return true;
} }
/** /**
@ -569,7 +581,10 @@ export class DelimiterMaster extends Delimiter {
// only set gaps that are significant enough in weight and // only set gaps that are significant enough in weight and
// with a non-empty extension // with a non-empty extension
if (gapWeight >= params.minGapWeight && gap.weight > 0) { if (gapWeight >= params.minGapWeight && gap.weight > 0) {
this._saveBuildingGap(); // we're done if we were not allowed to save the gap
if (!this._saveBuildingGap()) {
return;
}
// params may have been refreshed, reload them // params may have been refreshed, reload them
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding; gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
params = gapBuilding.params; params = gapBuilding.params;

View File

@ -50,6 +50,52 @@ describe('GapCache', () => {
}); });
}); });
describe('clear()', () => {
it('should clear all exposed gaps', async () => {
gapCache.setGap('bar', 'baz', 10);
gapCache.setGap('qux', 'quz', 20);
await new Promise(resolve => setTimeout(resolve, 300));
expect(await gapCache.lookupGap('ape', 'zoo')).toEqual(
{ firstKey: 'bar', lastKey: 'baz', weight: 10 }
);
gapCache.clear();
expect(await gapCache.lookupGap('ape', 'zoo')).toBeNull();
});
it('should clear all staging gaps', async () => {
gapCache.setGap('bar', 'baz', 10);
gapCache.setGap('qux', 'quz', 20);
gapCache.clear();
await new Promise(resolve => setTimeout(resolve, 300));
expect(await gapCache.lookupGap('ape', 'zoo')).toBeNull();
});
it('should keep existing invalidating updates against the next new gaps', async () => {
// invalidate future gaps containing 'dog'
expect(gapCache.removeOverlappingGaps(['dog'])).toEqual(0);
// then, clear the cache
gapCache.clear();
// wait for 50ms (half of exposure delay of 100ms) before
// setting a new gap overlapping with 'dog'
await new Promise(resolve => setTimeout(resolve, 50));
gapCache.setGap('cat', 'fox', 10);
// also set a non-overlapping gap to make sure it is not invalidated
gapCache.setGap('goat', 'hog', 20);
// wait an extra 250ms to ensure all valid gaps have been exposed
await new Promise(resolve => setTimeout(resolve, 250));
// the next gap is indeed 'goat'... because 'cat'... should have been invalidated
expect(await gapCache.lookupGap('bat', 'zoo')).toEqual(
{ firstKey: 'goat', lastKey: 'hog', weight: 20 });
});
});
it('should expose gaps after at least exposureDelayMs milliseconds', async () => { it('should expose gaps after at least exposureDelayMs milliseconds', async () => {
gapCache.setGap('bar', 'baz', 10); gapCache.setGap('bar', 'baz', 10);
expect(await gapCache.lookupGap('ape', 'cat')).toBeNull(); expect(await gapCache.lookupGap('ape', 'cat')).toBeNull();

View File

@ -1186,6 +1186,11 @@ describe('DelimiterMaster listing algorithm: gap caching and lookup', () => {
resumeFromState = filterEntries(listing, 'Ddv Ddv Ddv Vvv', 'ass ass ass ass', resumeFromState = filterEntries(listing, 'Ddv Ddv Ddv Vvv', 'ass ass ass ass',
resumeFromState); resumeFromState);
expect(gapCache.toArray()).toEqual(gapsArray); expect(gapCache.toArray()).toEqual(gapsArray);
// gap building should be in expired state
expect(listing._gapBuilding.state).toEqual(GapBuildingState.Expired);
// remaining validity period should still be 0 because gap building has expired
validityPeriod = listing.getGapBuildingValidityPeriodMs();
expect(validityPeriod).toEqual(0);
// we should still be able to skip over the existing cached gaps // we should still be able to skip over the existing cached gaps
expect(listing._gapCaching.state).toEqual(GapCachingState.GapLookupInProgress); expect(listing._gapCaching.state).toEqual(GapCachingState.GapLookupInProgress);