Spaces:
Sleeping
Sleeping
; | |
/** | |
* @module LRUCache | |
*/ | |
Object.defineProperty(exports, "__esModule", { value: true }); | |
exports.LRUCache = void 0; | |
const perf = typeof performance === 'object' && | |
performance && | |
typeof performance.now === 'function' | |
? performance | |
: Date; | |
const warned = new Set(); | |
/* c8 ignore start */ | |
const PROCESS = (typeof process === 'object' && !!process ? process : {}); | |
/* c8 ignore start */ | |
const emitWarning = (msg, type, code, fn) => { | |
typeof PROCESS.emitWarning === 'function' | |
? PROCESS.emitWarning(msg, type, code, fn) | |
: console.error(`[${code}] ${type}: ${msg}`); | |
}; | |
let AC = globalThis.AbortController; | |
let AS = globalThis.AbortSignal; | |
/* c8 ignore start */ | |
if (typeof AC === 'undefined') { | |
//@ts-ignore | |
AS = class AbortSignal { | |
onabort; | |
_onabort = []; | |
reason; | |
aborted = false; | |
addEventListener(_, fn) { | |
this._onabort.push(fn); | |
} | |
}; | |
//@ts-ignore | |
AC = class AbortController { | |
constructor() { | |
warnACPolyfill(); | |
} | |
signal = new AS(); | |
abort(reason) { | |
if (this.signal.aborted) | |
return; | |
//@ts-ignore | |
this.signal.reason = reason; | |
//@ts-ignore | |
this.signal.aborted = true; | |
//@ts-ignore | |
for (const fn of this.signal._onabort) { | |
fn(reason); | |
} | |
this.signal.onabort?.(reason); | |
} | |
}; | |
let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; | |
const warnACPolyfill = () => { | |
if (!printACPolyfillWarning) | |
return; | |
printACPolyfillWarning = false; | |
emitWarning('AbortController is not defined. If using lru-cache in ' + | |
'node 14, load an AbortController polyfill from the ' + | |
'`node-abort-controller` package. A minimal polyfill is ' + | |
'provided for use by LRUCache.fetch(), but it should not be ' + | |
'relied upon in other contexts (eg, passing it to other APIs that ' + | |
'use AbortController/AbortSignal might have undesirable effects). ' + | |
'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); | |
}; | |
} | |
/* c8 ignore stop */ | |
const shouldWarn = (code) => !warned.has(code); | |
const TYPE = Symbol('type'); | |
const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); | |
/* c8 ignore start */ | |
// This is a little bit ridiculous, tbh. | |
// The maximum array length is 2^32-1 or thereabouts on most JS impls. | |
// And well before that point, you're caching the entire world, I mean, | |
// that's ~32GB of just integers for the next/prev links, plus whatever | |
// else to hold that many keys and values. Just filling the memory with | |
// zeroes at init time is brutal when you get that big. | |
// But why not be complete? | |
// Maybe in the future, these limits will have expanded. | |
const getUintArray = (max) => !isPosInt(max) | |
? null | |
: max <= Math.pow(2, 8) | |
? Uint8Array | |
: max <= Math.pow(2, 16) | |
? Uint16Array | |
: max <= Math.pow(2, 32) | |
? Uint32Array | |
: max <= Number.MAX_SAFE_INTEGER | |
? ZeroArray | |
: null; | |
/* c8 ignore stop */ | |
class ZeroArray extends Array { | |
constructor(size) { | |
super(size); | |
this.fill(0); | |
} | |
} | |
class Stack { | |
heap; | |
length; | |
// private constructor | |
static #constructing = false; | |
static create(max) { | |
const HeapCls = getUintArray(max); | |
if (!HeapCls) | |
return []; | |
Stack.#constructing = true; | |
const s = new Stack(max, HeapCls); | |
Stack.#constructing = false; | |
return s; | |
} | |
constructor(max, HeapCls) { | |
/* c8 ignore start */ | |
if (!Stack.#constructing) { | |
throw new TypeError('instantiate Stack using Stack.create(n)'); | |
} | |
/* c8 ignore stop */ | |
this.heap = new HeapCls(max); | |
this.length = 0; | |
} | |
push(n) { | |
this.heap[this.length++] = n; | |
} | |
pop() { | |
return this.heap[--this.length]; | |
} | |
} | |
/** | |
* Default export, the thing you're using this module to get. | |
* | |
* The `K` and `V` types define the key and value types, respectively. The | |
* optional `FC` type defines the type of the `context` object passed to | |
* `cache.fetch()` and `cache.memo()`. | |
* | |
* Keys and values **must not** be `null` or `undefined`. | |
* | |
* All properties from the options object (with the exception of `max`, | |
* `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are | |
* added as normal public members. (The listed options are read-only getters.) | |
* | |
* Changing any of these will alter the defaults for subsequent method calls. | |
*/ | |
class LRUCache { | |
// options that cannot be changed without disaster | |
#max; | |
#maxSize; | |
#dispose; | |
#disposeAfter; | |
#fetchMethod; | |
#memoMethod; | |
/** | |
* {@link LRUCache.OptionsBase.ttl} | |
*/ | |
ttl; | |
/** | |
* {@link LRUCache.OptionsBase.ttlResolution} | |
*/ | |
ttlResolution; | |
/** | |
* {@link LRUCache.OptionsBase.ttlAutopurge} | |
*/ | |
ttlAutopurge; | |
/** | |
* {@link LRUCache.OptionsBase.updateAgeOnGet} | |
*/ | |
updateAgeOnGet; | |
/** | |
* {@link LRUCache.OptionsBase.updateAgeOnHas} | |
*/ | |
updateAgeOnHas; | |
/** | |
* {@link LRUCache.OptionsBase.allowStale} | |
*/ | |
allowStale; | |
/** | |
* {@link LRUCache.OptionsBase.noDisposeOnSet} | |
*/ | |
noDisposeOnSet; | |
/** | |
* {@link LRUCache.OptionsBase.noUpdateTTL} | |
*/ | |
noUpdateTTL; | |
/** | |
* {@link LRUCache.OptionsBase.maxEntrySize} | |
*/ | |
maxEntrySize; | |
/** | |
* {@link LRUCache.OptionsBase.sizeCalculation} | |
*/ | |
sizeCalculation; | |
/** | |
* {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} | |
*/ | |
noDeleteOnFetchRejection; | |
/** | |
* {@link LRUCache.OptionsBase.noDeleteOnStaleGet} | |
*/ | |
noDeleteOnStaleGet; | |
/** | |
* {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} | |
*/ | |
allowStaleOnFetchAbort; | |
/** | |
* {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} | |
*/ | |
allowStaleOnFetchRejection; | |
/** | |
* {@link LRUCache.OptionsBase.ignoreFetchAbort} | |
*/ | |
ignoreFetchAbort; | |
// computed properties | |
#size; | |
#calculatedSize; | |
#keyMap; | |
#keyList; | |
#valList; | |
#next; | |
#prev; | |
#head; | |
#tail; | |
#free; | |
#disposed; | |
#sizes; | |
#starts; | |
#ttls; | |
#hasDispose; | |
#hasFetchMethod; | |
#hasDisposeAfter; | |
/** | |
* Do not call this method unless you need to inspect the | |
* inner workings of the cache. If anything returned by this | |
* object is modified in any way, strange breakage may occur. | |
* | |
* These fields are private for a reason! | |
* | |
* @internal | |
*/ | |
static unsafeExposeInternals(c) { | |
return { | |
// properties | |
starts: c.#starts, | |
ttls: c.#ttls, | |
sizes: c.#sizes, | |
keyMap: c.#keyMap, | |
keyList: c.#keyList, | |
valList: c.#valList, | |
next: c.#next, | |
prev: c.#prev, | |
get head() { | |
return c.#head; | |
}, | |
get tail() { | |
return c.#tail; | |
}, | |
free: c.#free, | |
// methods | |
isBackgroundFetch: (p) => c.#isBackgroundFetch(p), | |
backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), | |
moveToTail: (index) => c.#moveToTail(index), | |
indexes: (options) => c.#indexes(options), | |
rindexes: (options) => c.#rindexes(options), | |
isStale: (index) => c.#isStale(index), | |
}; | |
} | |
// Protected read-only members | |
/** | |
* {@link LRUCache.OptionsBase.max} (read-only) | |
*/ | |
get max() { | |
return this.#max; | |
} | |
/** | |
* {@link LRUCache.OptionsBase.maxSize} (read-only) | |
*/ | |
get maxSize() { | |
return this.#maxSize; | |
} | |
/** | |
* The total computed size of items in the cache (read-only) | |
*/ | |
get calculatedSize() { | |
return this.#calculatedSize; | |
} | |
/** | |
* The number of items stored in the cache (read-only) | |
*/ | |
get size() { | |
return this.#size; | |
} | |
/** | |
* {@link LRUCache.OptionsBase.fetchMethod} (read-only) | |
*/ | |
get fetchMethod() { | |
return this.#fetchMethod; | |
} | |
get memoMethod() { | |
return this.#memoMethod; | |
} | |
/** | |
* {@link LRUCache.OptionsBase.dispose} (read-only) | |
*/ | |
get dispose() { | |
return this.#dispose; | |
} | |
/** | |
* {@link LRUCache.OptionsBase.disposeAfter} (read-only) | |
*/ | |
get disposeAfter() { | |
return this.#disposeAfter; | |
} | |
constructor(options) { | |
const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; | |
if (max !== 0 && !isPosInt(max)) { | |
throw new TypeError('max option must be a nonnegative integer'); | |
} | |
const UintArray = max ? getUintArray(max) : Array; | |
if (!UintArray) { | |
throw new Error('invalid max value: ' + max); | |
} | |
this.#max = max; | |
this.#maxSize = maxSize; | |
this.maxEntrySize = maxEntrySize || this.#maxSize; | |
this.sizeCalculation = sizeCalculation; | |
if (this.sizeCalculation) { | |
if (!this.#maxSize && !this.maxEntrySize) { | |
throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); | |
} | |
if (typeof this.sizeCalculation !== 'function') { | |
throw new TypeError('sizeCalculation set to non-function'); | |
} | |
} | |
if (memoMethod !== undefined && | |
typeof memoMethod !== 'function') { | |
throw new TypeError('memoMethod must be a function if defined'); | |
} | |
this.#memoMethod = memoMethod; | |
if (fetchMethod !== undefined && | |
typeof fetchMethod !== 'function') { | |
throw new TypeError('fetchMethod must be a function if specified'); | |
} | |
this.#fetchMethod = fetchMethod; | |
this.#hasFetchMethod = !!fetchMethod; | |
this.#keyMap = new Map(); | |
this.#keyList = new Array(max).fill(undefined); | |
this.#valList = new Array(max).fill(undefined); | |
this.#next = new UintArray(max); | |
this.#prev = new UintArray(max); | |
this.#head = 0; | |
this.#tail = 0; | |
this.#free = Stack.create(max); | |
this.#size = 0; | |
this.#calculatedSize = 0; | |
if (typeof dispose === 'function') { | |
this.#dispose = dispose; | |
} | |
if (typeof disposeAfter === 'function') { | |
this.#disposeAfter = disposeAfter; | |
this.#disposed = []; | |
} | |
else { | |
this.#disposeAfter = undefined; | |
this.#disposed = undefined; | |
} | |
this.#hasDispose = !!this.#dispose; | |
this.#hasDisposeAfter = !!this.#disposeAfter; | |
this.noDisposeOnSet = !!noDisposeOnSet; | |
this.noUpdateTTL = !!noUpdateTTL; | |
this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; | |
this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; | |
this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; | |
this.ignoreFetchAbort = !!ignoreFetchAbort; | |
// NB: maxEntrySize is set to maxSize if it's set | |
if (this.maxEntrySize !== 0) { | |
if (this.#maxSize !== 0) { | |
if (!isPosInt(this.#maxSize)) { | |
throw new TypeError('maxSize must be a positive integer if specified'); | |
} | |
} | |
if (!isPosInt(this.maxEntrySize)) { | |
throw new TypeError('maxEntrySize must be a positive integer if specified'); | |
} | |
this.#initializeSizeTracking(); | |
} | |
this.allowStale = !!allowStale; | |
this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; | |
this.updateAgeOnGet = !!updateAgeOnGet; | |
this.updateAgeOnHas = !!updateAgeOnHas; | |
this.ttlResolution = | |
isPosInt(ttlResolution) || ttlResolution === 0 | |
? ttlResolution | |
: 1; | |
this.ttlAutopurge = !!ttlAutopurge; | |
this.ttl = ttl || 0; | |
if (this.ttl) { | |
if (!isPosInt(this.ttl)) { | |
throw new TypeError('ttl must be a positive integer if specified'); | |
} | |
this.#initializeTTLTracking(); | |
} | |
// do not allow completely unbounded caches | |
if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { | |
throw new TypeError('At least one of max, maxSize, or ttl is required'); | |
} | |
if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { | |
const code = 'LRU_CACHE_UNBOUNDED'; | |
if (shouldWarn(code)) { | |
warned.add(code); | |
const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + | |
'result in unbounded memory consumption.'; | |
emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); | |
} | |
} | |
} | |
/** | |
* Return the number of ms left in the item's TTL. If item is not in cache, | |
* returns `0`. Returns `Infinity` if item is in cache without a defined TTL. | |
*/ | |
getRemainingTTL(key) { | |
return this.#keyMap.has(key) ? Infinity : 0; | |
} | |
#initializeTTLTracking() { | |
const ttls = new ZeroArray(this.#max); | |
const starts = new ZeroArray(this.#max); | |
this.#ttls = ttls; | |
this.#starts = starts; | |
this.#setItemTTL = (index, ttl, start = perf.now()) => { | |
starts[index] = ttl !== 0 ? start : 0; | |
ttls[index] = ttl; | |
if (ttl !== 0 && this.ttlAutopurge) { | |
const t = setTimeout(() => { | |
if (this.#isStale(index)) { | |
this.#delete(this.#keyList[index], 'expire'); | |
} | |
}, ttl + 1); | |
// unref() not supported on all platforms | |
/* c8 ignore start */ | |
if (t.unref) { | |
t.unref(); | |
} | |
/* c8 ignore stop */ | |
} | |
}; | |
this.#updateItemAge = index => { | |
starts[index] = ttls[index] !== 0 ? perf.now() : 0; | |
}; | |
this.#statusTTL = (status, index) => { | |
if (ttls[index]) { | |
const ttl = ttls[index]; | |
const start = starts[index]; | |
/* c8 ignore next */ | |
if (!ttl || !start) | |
return; | |
status.ttl = ttl; | |
status.start = start; | |
status.now = cachedNow || getNow(); | |
const age = status.now - start; | |
status.remainingTTL = ttl - age; | |
} | |
}; | |
// debounce calls to perf.now() to 1s so we're not hitting | |
// that costly call repeatedly. | |
let cachedNow = 0; | |
const getNow = () => { | |
const n = perf.now(); | |
if (this.ttlResolution > 0) { | |
cachedNow = n; | |
const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); | |
// not available on all platforms | |
/* c8 ignore start */ | |
if (t.unref) { | |
t.unref(); | |
} | |
/* c8 ignore stop */ | |
} | |
return n; | |
}; | |
this.getRemainingTTL = key => { | |
const index = this.#keyMap.get(key); | |
if (index === undefined) { | |
return 0; | |
} | |
const ttl = ttls[index]; | |
const start = starts[index]; | |
if (!ttl || !start) { | |
return Infinity; | |
} | |
const age = (cachedNow || getNow()) - start; | |
return ttl - age; | |
}; | |
this.#isStale = index => { | |
const s = starts[index]; | |
const t = ttls[index]; | |
return !!t && !!s && (cachedNow || getNow()) - s > t; | |
}; | |
} | |
// conditionally set private methods related to TTL | |
#updateItemAge = () => { }; | |
#statusTTL = () => { }; | |
#setItemTTL = () => { }; | |
/* c8 ignore stop */ | |
#isStale = () => false; | |
#initializeSizeTracking() { | |
const sizes = new ZeroArray(this.#max); | |
this.#calculatedSize = 0; | |
this.#sizes = sizes; | |
this.#removeItemSize = index => { | |
this.#calculatedSize -= sizes[index]; | |
sizes[index] = 0; | |
}; | |
this.#requireSize = (k, v, size, sizeCalculation) => { | |
// provisionally accept background fetches. | |
// actual value size will be checked when they return. | |
if (this.#isBackgroundFetch(v)) { | |
return 0; | |
} | |
if (!isPosInt(size)) { | |
if (sizeCalculation) { | |
if (typeof sizeCalculation !== 'function') { | |
throw new TypeError('sizeCalculation must be a function'); | |
} | |
size = sizeCalculation(v, k); | |
if (!isPosInt(size)) { | |
throw new TypeError('sizeCalculation return invalid (expect positive integer)'); | |
} | |
} | |
else { | |
throw new TypeError('invalid size value (must be positive integer). ' + | |
'When maxSize or maxEntrySize is used, sizeCalculation ' + | |
'or size must be set.'); | |
} | |
} | |
return size; | |
}; | |
this.#addItemSize = (index, size, status) => { | |
sizes[index] = size; | |
if (this.#maxSize) { | |
const maxSize = this.#maxSize - sizes[index]; | |
while (this.#calculatedSize > maxSize) { | |
this.#evict(true); | |
} | |
} | |
this.#calculatedSize += sizes[index]; | |
if (status) { | |
status.entrySize = size; | |
status.totalCalculatedSize = this.#calculatedSize; | |
} | |
}; | |
} | |
#removeItemSize = _i => { }; | |
#addItemSize = (_i, _s, _st) => { }; | |
#requireSize = (_k, _v, size, sizeCalculation) => { | |
if (size || sizeCalculation) { | |
throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); | |
} | |
return 0; | |
}; | |
*#indexes({ allowStale = this.allowStale } = {}) { | |
if (this.#size) { | |
for (let i = this.#tail; true;) { | |
if (!this.#isValidIndex(i)) { | |
break; | |
} | |
if (allowStale || !this.#isStale(i)) { | |
yield i; | |
} | |
if (i === this.#head) { | |
break; | |
} | |
else { | |
i = this.#prev[i]; | |
} | |
} | |
} | |
} | |
*#rindexes({ allowStale = this.allowStale } = {}) { | |
if (this.#size) { | |
for (let i = this.#head; true;) { | |
if (!this.#isValidIndex(i)) { | |
break; | |
} | |
if (allowStale || !this.#isStale(i)) { | |
yield i; | |
} | |
if (i === this.#tail) { | |
break; | |
} | |
else { | |
i = this.#next[i]; | |
} | |
} | |
} | |
} | |
#isValidIndex(index) { | |
return (index !== undefined && | |
this.#keyMap.get(this.#keyList[index]) === index); | |
} | |
/** | |
* Return a generator yielding `[key, value]` pairs, | |
* in order from most recently used to least recently used. | |
*/ | |
*entries() { | |
for (const i of this.#indexes()) { | |
if (this.#valList[i] !== undefined && | |
this.#keyList[i] !== undefined && | |
!this.#isBackgroundFetch(this.#valList[i])) { | |
yield [this.#keyList[i], this.#valList[i]]; | |
} | |
} | |
} | |
/** | |
* Inverse order version of {@link LRUCache.entries} | |
* | |
* Return a generator yielding `[key, value]` pairs, | |
* in order from least recently used to most recently used. | |
*/ | |
*rentries() { | |
for (const i of this.#rindexes()) { | |
if (this.#valList[i] !== undefined && | |
this.#keyList[i] !== undefined && | |
!this.#isBackgroundFetch(this.#valList[i])) { | |
yield [this.#keyList[i], this.#valList[i]]; | |
} | |
} | |
} | |
/** | |
* Return a generator yielding the keys in the cache, | |
* in order from most recently used to least recently used. | |
*/ | |
*keys() { | |
for (const i of this.#indexes()) { | |
const k = this.#keyList[i]; | |
if (k !== undefined && | |
!this.#isBackgroundFetch(this.#valList[i])) { | |
yield k; | |
} | |
} | |
} | |
/** | |
* Inverse order version of {@link LRUCache.keys} | |
* | |
* Return a generator yielding the keys in the cache, | |
* in order from least recently used to most recently used. | |
*/ | |
*rkeys() { | |
for (const i of this.#rindexes()) { | |
const k = this.#keyList[i]; | |
if (k !== undefined && | |
!this.#isBackgroundFetch(this.#valList[i])) { | |
yield k; | |
} | |
} | |
} | |
/** | |
* Return a generator yielding the values in the cache, | |
* in order from most recently used to least recently used. | |
*/ | |
*values() { | |
for (const i of this.#indexes()) { | |
const v = this.#valList[i]; | |
if (v !== undefined && | |
!this.#isBackgroundFetch(this.#valList[i])) { | |
yield this.#valList[i]; | |
} | |
} | |
} | |
/** | |
* Inverse order version of {@link LRUCache.values} | |
* | |
* Return a generator yielding the values in the cache, | |
* in order from least recently used to most recently used. | |
*/ | |
*rvalues() { | |
for (const i of this.#rindexes()) { | |
const v = this.#valList[i]; | |
if (v !== undefined && | |
!this.#isBackgroundFetch(this.#valList[i])) { | |
yield this.#valList[i]; | |
} | |
} | |
} | |
/** | |
* Iterating over the cache itself yields the same results as | |
* {@link LRUCache.entries} | |
*/ | |
[Symbol.iterator]() { | |
return this.entries(); | |
} | |
/** | |
* A String value that is used in the creation of the default string | |
* description of an object. Called by the built-in method | |
* `Object.prototype.toString`. | |
*/ | |
[Symbol.toStringTag] = 'LRUCache'; | |
/** | |
* Find a value for which the supplied fn method returns a truthy value, | |
* similar to `Array.find()`. fn is called as `fn(value, key, cache)`. | |
*/ | |
find(fn, getOptions = {}) { | |
for (const i of this.#indexes()) { | |
const v = this.#valList[i]; | |
const value = this.#isBackgroundFetch(v) | |
? v.__staleWhileFetching | |
: v; | |
if (value === undefined) | |
continue; | |
if (fn(value, this.#keyList[i], this)) { | |
return this.get(this.#keyList[i], getOptions); | |
} | |
} | |
} | |
/** | |
* Call the supplied function on each item in the cache, in order from most | |
* recently used to least recently used. | |
* | |
* `fn` is called as `fn(value, key, cache)`. | |
* | |
* If `thisp` is provided, function will be called in the `this`-context of | |
* the provided object, or the cache if no `thisp` object is provided. | |
* | |
* Does not update age or recenty of use, or iterate over stale values. | |
*/ | |
forEach(fn, thisp = this) { | |
for (const i of this.#indexes()) { | |
const v = this.#valList[i]; | |
const value = this.#isBackgroundFetch(v) | |
? v.__staleWhileFetching | |
: v; | |
if (value === undefined) | |
continue; | |
fn.call(thisp, value, this.#keyList[i], this); | |
} | |
} | |
/** | |
* The same as {@link LRUCache.forEach} but items are iterated over in | |
* reverse order. (ie, less recently used items are iterated over first.) | |
*/ | |
rforEach(fn, thisp = this) { | |
for (const i of this.#rindexes()) { | |
const v = this.#valList[i]; | |
const value = this.#isBackgroundFetch(v) | |
? v.__staleWhileFetching | |
: v; | |
if (value === undefined) | |
continue; | |
fn.call(thisp, value, this.#keyList[i], this); | |
} | |
} | |
/** | |
* Delete any stale entries. Returns true if anything was removed, | |
* false otherwise. | |
*/ | |
purgeStale() { | |
let deleted = false; | |
for (const i of this.#rindexes({ allowStale: true })) { | |
if (this.#isStale(i)) { | |
this.#delete(this.#keyList[i], 'expire'); | |
deleted = true; | |
} | |
} | |
return deleted; | |
} | |
/** | |
* Get the extended info about a given entry, to get its value, size, and | |
* TTL info simultaneously. Returns `undefined` if the key is not present. | |
* | |
* Unlike {@link LRUCache#dump}, which is designed to be portable and survive | |
* serialization, the `start` value is always the current timestamp, and the | |
* `ttl` is a calculated remaining time to live (negative if expired). | |
* | |
* Always returns stale values, if their info is found in the cache, so be | |
* sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl}) | |
* if relevant. | |
*/ | |
info(key) { | |
const i = this.#keyMap.get(key); | |
if (i === undefined) | |
return undefined; | |
const v = this.#valList[i]; | |
const value = this.#isBackgroundFetch(v) | |
? v.__staleWhileFetching | |
: v; | |
if (value === undefined) | |
return undefined; | |
const entry = { value }; | |
if (this.#ttls && this.#starts) { | |
const ttl = this.#ttls[i]; | |
const start = this.#starts[i]; | |
if (ttl && start) { | |
const remain = ttl - (perf.now() - start); | |
entry.ttl = remain; | |
entry.start = Date.now(); | |
} | |
} | |
if (this.#sizes) { | |
entry.size = this.#sizes[i]; | |
} | |
return entry; | |
} | |
/** | |
* Return an array of [key, {@link LRUCache.Entry}] tuples which can be | |
* passed to {@link LRLUCache#load}. | |
* | |
* The `start` fields are calculated relative to a portable `Date.now()` | |
* timestamp, even if `performance.now()` is available. | |
* | |
* Stale entries are always included in the `dump`, even if | |
* {@link LRUCache.OptionsBase.allowStale} is false. | |
* | |
* Note: this returns an actual array, not a generator, so it can be more | |
* easily passed around. | |
*/ | |
dump() { | |
const arr = []; | |
for (const i of this.#indexes({ allowStale: true })) { | |
const key = this.#keyList[i]; | |
const v = this.#valList[i]; | |
const value = this.#isBackgroundFetch(v) | |
? v.__staleWhileFetching | |
: v; | |
if (value === undefined || key === undefined) | |
continue; | |
const entry = { value }; | |
if (this.#ttls && this.#starts) { | |
entry.ttl = this.#ttls[i]; | |
// always dump the start relative to a portable timestamp | |
// it's ok for this to be a bit slow, it's a rare operation. | |
const age = perf.now() - this.#starts[i]; | |
entry.start = Math.floor(Date.now() - age); | |
} | |
if (this.#sizes) { | |
entry.size = this.#sizes[i]; | |
} | |
arr.unshift([key, entry]); | |
} | |
return arr; | |
} | |
/** | |
* Reset the cache and load in the items in entries in the order listed. | |
* | |
* The shape of the resulting cache may be different if the same options are | |
* not used in both caches. | |
* | |
* The `start` fields are assumed to be calculated relative to a portable | |
* `Date.now()` timestamp, even if `performance.now()` is available. | |
*/ | |
load(arr) { | |
this.clear(); | |
for (const [key, entry] of arr) { | |
if (entry.start) { | |
// entry.start is a portable timestamp, but we may be using | |
// node's performance.now(), so calculate the offset, so that | |
// we get the intended remaining TTL, no matter how long it's | |
// been on ice. | |
// | |
// it's ok for this to be a bit slow, it's a rare operation. | |
const age = Date.now() - entry.start; | |
entry.start = perf.now() - age; | |
} | |
this.set(key, entry.value, entry); | |
} | |
} | |
/** | |
* Add a value to the cache. | |
* | |
* Note: if `undefined` is specified as a value, this is an alias for | |
* {@link LRUCache#delete} | |
* | |
* Fields on the {@link LRUCache.SetOptions} options param will override | |
* their corresponding values in the constructor options for the scope | |
* of this single `set()` operation. | |
* | |
* If `start` is provided, then that will set the effective start | |
* time for the TTL calculation. Note that this must be a previous | |
* value of `performance.now()` if supported, or a previous value of | |
* `Date.now()` if not. | |
* | |
* Options object may also include `size`, which will prevent | |
* calling the `sizeCalculation` function and just use the specified | |
* number if it is a positive integer, and `noDisposeOnSet` which | |
* will prevent calling a `dispose` function in the case of | |
* overwrites. | |
* | |
* If the `size` (or return value of `sizeCalculation`) for a given | |
* entry is greater than `maxEntrySize`, then the item will not be | |
* added to the cache. | |
* | |
* Will update the recency of the entry. | |
* | |
* If the value is `undefined`, then this is an alias for | |
* `cache.delete(key)`. `undefined` is never stored in the cache. | |
*/ | |
set(k, v, setOptions = {}) { | |
if (v === undefined) { | |
this.delete(k); | |
return this; | |
} | |
const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; | |
let { noUpdateTTL = this.noUpdateTTL } = setOptions; | |
const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); | |
// if the item doesn't fit, don't do anything | |
// NB: maxEntrySize set to maxSize by default | |
if (this.maxEntrySize && size > this.maxEntrySize) { | |
if (status) { | |
status.set = 'miss'; | |
status.maxEntrySizeExceeded = true; | |
} | |
// have to delete, in case something is there already. | |
this.#delete(k, 'set'); | |
return this; | |
} | |
let index = this.#size === 0 ? undefined : this.#keyMap.get(k); | |
if (index === undefined) { | |
// addition | |
index = (this.#size === 0 | |
? this.#tail | |
: this.#free.length !== 0 | |
? this.#free.pop() | |
: this.#size === this.#max | |
? this.#evict(false) | |
: this.#size); | |
this.#keyList[index] = k; | |
this.#valList[index] = v; | |
this.#keyMap.set(k, index); | |
this.#next[this.#tail] = index; | |
this.#prev[index] = this.#tail; | |
this.#tail = index; | |
this.#size++; | |
this.#addItemSize(index, size, status); | |
if (status) | |
status.set = 'add'; | |
noUpdateTTL = false; | |
} | |
else { | |
// update | |
this.#moveToTail(index); | |
const oldVal = this.#valList[index]; | |
if (v !== oldVal) { | |
if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { | |
oldVal.__abortController.abort(new Error('replaced')); | |
const { __staleWhileFetching: s } = oldVal; | |
if (s !== undefined && !noDisposeOnSet) { | |
if (this.#hasDispose) { | |
this.#dispose?.(s, k, 'set'); | |
} | |
if (this.#hasDisposeAfter) { | |
this.#disposed?.push([s, k, 'set']); | |
} | |
} | |
} | |
else if (!noDisposeOnSet) { | |
if (this.#hasDispose) { | |
this.#dispose?.(oldVal, k, 'set'); | |
} | |
if (this.#hasDisposeAfter) { | |
this.#disposed?.push([oldVal, k, 'set']); | |
} | |
} | |
this.#removeItemSize(index); | |
this.#addItemSize(index, size, status); | |
this.#valList[index] = v; | |
if (status) { | |
status.set = 'replace'; | |
const oldValue = oldVal && this.#isBackgroundFetch(oldVal) | |
? oldVal.__staleWhileFetching | |
: oldVal; | |
if (oldValue !== undefined) | |
status.oldValue = oldValue; | |
} | |
} | |
else if (status) { | |
status.set = 'update'; | |
} | |
} | |
if (ttl !== 0 && !this.#ttls) { | |
this.#initializeTTLTracking(); | |
} | |
if (this.#ttls) { | |
if (!noUpdateTTL) { | |
this.#setItemTTL(index, ttl, start); | |
} | |
if (status) | |
this.#statusTTL(status, index); | |
} | |
if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { | |
const dt = this.#disposed; | |
let task; | |
while ((task = dt?.shift())) { | |
this.#disposeAfter?.(...task); | |
} | |
} | |
return this; | |
} | |
/** | |
* Evict the least recently used item, returning its value or | |
* `undefined` if cache is empty. | |
*/ | |
pop() { | |
try { | |
while (this.#size) { | |
const val = this.#valList[this.#head]; | |
this.#evict(true); | |
if (this.#isBackgroundFetch(val)) { | |
if (val.__staleWhileFetching) { | |
return val.__staleWhileFetching; | |
} | |
} | |
else if (val !== undefined) { | |
return val; | |
} | |
} | |
} | |
finally { | |
if (this.#hasDisposeAfter && this.#disposed) { | |
const dt = this.#disposed; | |
let task; | |
while ((task = dt?.shift())) { | |
this.#disposeAfter?.(...task); | |
} | |
} | |
} | |
} | |
#evict(free) { | |
const head = this.#head; | |
const k = this.#keyList[head]; | |
const v = this.#valList[head]; | |
if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { | |
v.__abortController.abort(new Error('evicted')); | |
} | |
else if (this.#hasDispose || this.#hasDisposeAfter) { | |
if (this.#hasDispose) { | |
this.#dispose?.(v, k, 'evict'); | |
} | |
if (this.#hasDisposeAfter) { | |
this.#disposed?.push([v, k, 'evict']); | |
} | |
} | |
this.#removeItemSize(head); | |
// if we aren't about to use the index, then null these out | |
if (free) { | |
this.#keyList[head] = undefined; | |
this.#valList[head] = undefined; | |
this.#free.push(head); | |
} | |
if (this.#size === 1) { | |
this.#head = this.#tail = 0; | |
this.#free.length = 0; | |
} | |
else { | |
this.#head = this.#next[head]; | |
} | |
this.#keyMap.delete(k); | |
this.#size--; | |
return head; | |
} | |
/** | |
* Check if a key is in the cache, without updating the recency of use. | |
* Will return false if the item is stale, even though it is technically | |
* in the cache. | |
* | |
* Check if a key is in the cache, without updating the recency of | |
* use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set | |
* to `true` in either the options or the constructor. | |
* | |
* Will return `false` if the item is stale, even though it is technically in | |
* the cache. The difference can be determined (if it matters) by using a | |
* `status` argument, and inspecting the `has` field. | |
* | |
* Will not update item age unless | |
* {@link LRUCache.OptionsBase.updateAgeOnHas} is set. | |
*/ | |
has(k, hasOptions = {}) { | |
const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; | |
const index = this.#keyMap.get(k); | |
if (index !== undefined) { | |
const v = this.#valList[index]; | |
if (this.#isBackgroundFetch(v) && | |
v.__staleWhileFetching === undefined) { | |
return false; | |
} | |
if (!this.#isStale(index)) { | |
if (updateAgeOnHas) { | |
this.#updateItemAge(index); | |
} | |
if (status) { | |
status.has = 'hit'; | |
this.#statusTTL(status, index); | |
} | |
return true; | |
} | |
else if (status) { | |
status.has = 'stale'; | |
this.#statusTTL(status, index); | |
} | |
} | |
else if (status) { | |
status.has = 'miss'; | |
} | |
return false; | |
} | |
/** | |
* Like {@link LRUCache#get} but doesn't update recency or delete stale | |
* items. | |
* | |
* Returns `undefined` if the item is stale, unless | |
* {@link LRUCache.OptionsBase.allowStale} is set. | |
*/ | |
peek(k, peekOptions = {}) { | |
const { allowStale = this.allowStale } = peekOptions; | |
const index = this.#keyMap.get(k); | |
if (index === undefined || | |
(!allowStale && this.#isStale(index))) { | |
return; | |
} | |
const v = this.#valList[index]; | |
// either stale and allowed, or forcing a refresh of non-stale value | |
return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; | |
} | |
#backgroundFetch(k, index, options, context) { | |
const v = index === undefined ? undefined : this.#valList[index]; | |
if (this.#isBackgroundFetch(v)) { | |
return v; | |
} | |
const ac = new AC(); | |
const { signal } = options; | |
// when/if our AC signals, then stop listening to theirs. | |
signal?.addEventListener('abort', () => ac.abort(signal.reason), { | |
signal: ac.signal, | |
}); | |
const fetchOpts = { | |
signal: ac.signal, | |
options, | |
context, | |
}; | |
const cb = (v, updateCache = false) => { | |
const { aborted } = ac.signal; | |
const ignoreAbort = options.ignoreFetchAbort && v !== undefined; | |
if (options.status) { | |
if (aborted && !updateCache) { | |
options.status.fetchAborted = true; | |
options.status.fetchError = ac.signal.reason; | |
if (ignoreAbort) | |
options.status.fetchAbortIgnored = true; | |
} | |
else { | |
options.status.fetchResolved = true; | |
} | |
} | |
if (aborted && !ignoreAbort && !updateCache) { | |
return fetchFail(ac.signal.reason); | |
} | |
// either we didn't abort, and are still here, or we did, and ignored | |
const bf = p; | |
if (this.#valList[index] === p) { | |
if (v === undefined) { | |
if (bf.__staleWhileFetching) { | |
this.#valList[index] = bf.__staleWhileFetching; | |
} | |
else { | |
this.#delete(k, 'fetch'); | |
} | |
} | |
else { | |
if (options.status) | |
options.status.fetchUpdated = true; | |
this.set(k, v, fetchOpts.options); | |
} | |
} | |
return v; | |
}; | |
const eb = (er) => { | |
if (options.status) { | |
options.status.fetchRejected = true; | |
options.status.fetchError = er; | |
} | |
return fetchFail(er); | |
}; | |
const fetchFail = (er) => { | |
const { aborted } = ac.signal; | |
const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; | |
const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; | |
const noDelete = allowStale || options.noDeleteOnFetchRejection; | |
const bf = p; | |
if (this.#valList[index] === p) { | |
// if we allow stale on fetch rejections, then we need to ensure that | |
// the stale value is not removed from the cache when the fetch fails. | |
const del = !noDelete || bf.__staleWhileFetching === undefined; | |
if (del) { | |
this.#delete(k, 'fetch'); | |
} | |
else if (!allowStaleAborted) { | |
// still replace the *promise* with the stale value, | |
// since we are done with the promise at this point. | |
// leave it untouched if we're still waiting for an | |
// aborted background fetch that hasn't yet returned. | |
this.#valList[index] = bf.__staleWhileFetching; | |
} | |
} | |
if (allowStale) { | |
if (options.status && bf.__staleWhileFetching !== undefined) { | |
options.status.returnedStale = true; | |
} | |
return bf.__staleWhileFetching; | |
} | |
else if (bf.__returned === bf) { | |
throw er; | |
} | |
}; | |
const pcall = (res, rej) => { | |
const fmp = this.#fetchMethod?.(k, v, fetchOpts); | |
if (fmp && fmp instanceof Promise) { | |
fmp.then(v => res(v === undefined ? undefined : v), rej); | |
} | |
// ignored, we go until we finish, regardless. | |
// defer check until we are actually aborting, | |
// so fetchMethod can override. | |
ac.signal.addEventListener('abort', () => { | |
if (!options.ignoreFetchAbort || | |
options.allowStaleOnFetchAbort) { | |
res(undefined); | |
// when it eventually resolves, update the cache. | |
if (options.allowStaleOnFetchAbort) { | |
res = v => cb(v, true); | |
} | |
} | |
}); | |
}; | |
if (options.status) | |
options.status.fetchDispatched = true; | |
const p = new Promise(pcall).then(cb, eb); | |
const bf = Object.assign(p, { | |
__abortController: ac, | |
__staleWhileFetching: v, | |
__returned: undefined, | |
}); | |
if (index === undefined) { | |
// internal, don't expose status. | |
this.set(k, bf, { ...fetchOpts.options, status: undefined }); | |
index = this.#keyMap.get(k); | |
} | |
else { | |
this.#valList[index] = bf; | |
} | |
return bf; | |
} | |
#isBackgroundFetch(p) { | |
if (!this.#hasFetchMethod) | |
return false; | |
const b = p; | |
return (!!b && | |
b instanceof Promise && | |
b.hasOwnProperty('__staleWhileFetching') && | |
b.__abortController instanceof AC); | |
} | |
async fetch(k, fetchOptions = {}) { | |
const { | |
// get options | |
allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, | |
// set options | |
ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, | |
// fetch exclusive options | |
noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; | |
if (!this.#hasFetchMethod) { | |
if (status) | |
status.fetch = 'get'; | |
return this.get(k, { | |
allowStale, | |
updateAgeOnGet, | |
noDeleteOnStaleGet, | |
status, | |
}); | |
} | |
const options = { | |
allowStale, | |
updateAgeOnGet, | |
noDeleteOnStaleGet, | |
ttl, | |
noDisposeOnSet, | |
size, | |
sizeCalculation, | |
noUpdateTTL, | |
noDeleteOnFetchRejection, | |
allowStaleOnFetchRejection, | |
allowStaleOnFetchAbort, | |
ignoreFetchAbort, | |
status, | |
signal, | |
}; | |
let index = this.#keyMap.get(k); | |
if (index === undefined) { | |
if (status) | |
status.fetch = 'miss'; | |
const p = this.#backgroundFetch(k, index, options, context); | |
return (p.__returned = p); | |
} | |
else { | |
// in cache, maybe already fetching | |
const v = this.#valList[index]; | |
if (this.#isBackgroundFetch(v)) { | |
const stale = allowStale && v.__staleWhileFetching !== undefined; | |
if (status) { | |
status.fetch = 'inflight'; | |
if (stale) | |
status.returnedStale = true; | |
} | |
return stale ? v.__staleWhileFetching : (v.__returned = v); | |
} | |
// if we force a refresh, that means do NOT serve the cached value, | |
// unless we are already in the process of refreshing the cache. | |
const isStale = this.#isStale(index); | |
if (!forceRefresh && !isStale) { | |
if (status) | |
status.fetch = 'hit'; | |
this.#moveToTail(index); | |
if (updateAgeOnGet) { | |
this.#updateItemAge(index); | |
} | |
if (status) | |
this.#statusTTL(status, index); | |
return v; | |
} | |
// ok, it is stale or a forced refresh, and not already fetching. | |
// refresh the cache. | |
const p = this.#backgroundFetch(k, index, options, context); | |
const hasStale = p.__staleWhileFetching !== undefined; | |
const staleVal = hasStale && allowStale; | |
if (status) { | |
status.fetch = isStale ? 'stale' : 'refresh'; | |
if (staleVal && isStale) | |
status.returnedStale = true; | |
} | |
return staleVal ? p.__staleWhileFetching : (p.__returned = p); | |
} | |
} | |
async forceFetch(k, fetchOptions = {}) { | |
const v = await this.fetch(k, fetchOptions); | |
if (v === undefined) | |
throw new Error('fetch() returned undefined'); | |
return v; | |
} | |
memo(k, memoOptions = {}) { | |
const memoMethod = this.#memoMethod; | |
if (!memoMethod) { | |
throw new Error('no memoMethod provided to constructor'); | |
} | |
const { context, forceRefresh, ...options } = memoOptions; | |
const v = this.get(k, options); | |
if (!forceRefresh && v !== undefined) | |
return v; | |
const vv = memoMethod(k, v, { | |
options, | |
context, | |
}); | |
this.set(k, vv, options); | |
return vv; | |
} | |
/** | |
* Return a value from the cache. Will update the recency of the cache | |
* entry found. | |
* | |
* If the key is not found, get() will return `undefined`. | |
*/ | |
get(k, getOptions = {}) { | |
const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; | |
const index = this.#keyMap.get(k); | |
if (index !== undefined) { | |
const value = this.#valList[index]; | |
const fetching = this.#isBackgroundFetch(value); | |
if (status) | |
this.#statusTTL(status, index); | |
if (this.#isStale(index)) { | |
if (status) | |
status.get = 'stale'; | |
// delete only if not an in-flight background fetch | |
if (!fetching) { | |
if (!noDeleteOnStaleGet) { | |
this.#delete(k, 'expire'); | |
} | |
if (status && allowStale) | |
status.returnedStale = true; | |
return allowStale ? value : undefined; | |
} | |
else { | |
if (status && | |
allowStale && | |
value.__staleWhileFetching !== undefined) { | |
status.returnedStale = true; | |
} | |
return allowStale ? value.__staleWhileFetching : undefined; | |
} | |
} | |
else { | |
if (status) | |
status.get = 'hit'; | |
// if we're currently fetching it, we don't actually have it yet | |
// it's not stale, which means this isn't a staleWhileRefetching. | |
// If it's not stale, and fetching, AND has a __staleWhileFetching | |
// value, then that means the user fetched with {forceRefresh:true}, | |
// so it's safe to return that value. | |
if (fetching) { | |
return value.__staleWhileFetching; | |
} | |
this.#moveToTail(index); | |
if (updateAgeOnGet) { | |
this.#updateItemAge(index); | |
} | |
return value; | |
} | |
} | |
else if (status) { | |
status.get = 'miss'; | |
} | |
} | |
#connect(p, n) { | |
this.#prev[n] = p; | |
this.#next[p] = n; | |
} | |
#moveToTail(index) { | |
// if tail already, nothing to do | |
// if head, move head to next[index] | |
// else | |
// move next[prev[index]] to next[index] (head has no prev) | |
// move prev[next[index]] to prev[index] | |
// prev[index] = tail | |
// next[tail] = index | |
// tail = index | |
if (index !== this.#tail) { | |
if (index === this.#head) { | |
this.#head = this.#next[index]; | |
} | |
else { | |
this.#connect(this.#prev[index], this.#next[index]); | |
} | |
this.#connect(this.#tail, index); | |
this.#tail = index; | |
} | |
} | |
/** | |
* Deletes a key out of the cache. | |
* | |
* Returns true if the key was deleted, false otherwise. | |
*/ | |
delete(k) { | |
return this.#delete(k, 'delete'); | |
} | |
#delete(k, reason) { | |
let deleted = false; | |
if (this.#size !== 0) { | |
const index = this.#keyMap.get(k); | |
if (index !== undefined) { | |
deleted = true; | |
if (this.#size === 1) { | |
this.#clear(reason); | |
} | |
else { | |
this.#removeItemSize(index); | |
const v = this.#valList[index]; | |
if (this.#isBackgroundFetch(v)) { | |
v.__abortController.abort(new Error('deleted')); | |
} | |
else if (this.#hasDispose || this.#hasDisposeAfter) { | |
if (this.#hasDispose) { | |
this.#dispose?.(v, k, reason); | |
} | |
if (this.#hasDisposeAfter) { | |
this.#disposed?.push([v, k, reason]); | |
} | |
} | |
this.#keyMap.delete(k); | |
this.#keyList[index] = undefined; | |
this.#valList[index] = undefined; | |
if (index === this.#tail) { | |
this.#tail = this.#prev[index]; | |
} | |
else if (index === this.#head) { | |
this.#head = this.#next[index]; | |
} | |
else { | |
const pi = this.#prev[index]; | |
this.#next[pi] = this.#next[index]; | |
const ni = this.#next[index]; | |
this.#prev[ni] = this.#prev[index]; | |
} | |
this.#size--; | |
this.#free.push(index); | |
} | |
} | |
} | |
if (this.#hasDisposeAfter && this.#disposed?.length) { | |
const dt = this.#disposed; | |
let task; | |
while ((task = dt?.shift())) { | |
this.#disposeAfter?.(...task); | |
} | |
} | |
return deleted; | |
} | |
/** | |
* Clear the cache entirely, throwing away all values. | |
*/ | |
clear() { | |
return this.#clear('delete'); | |
} | |
#clear(reason) { | |
for (const index of this.#rindexes({ allowStale: true })) { | |
const v = this.#valList[index]; | |
if (this.#isBackgroundFetch(v)) { | |
v.__abortController.abort(new Error('deleted')); | |
} | |
else { | |
const k = this.#keyList[index]; | |
if (this.#hasDispose) { | |
this.#dispose?.(v, k, reason); | |
} | |
if (this.#hasDisposeAfter) { | |
this.#disposed?.push([v, k, reason]); | |
} | |
} | |
} | |
this.#keyMap.clear(); | |
this.#valList.fill(undefined); | |
this.#keyList.fill(undefined); | |
if (this.#ttls && this.#starts) { | |
this.#ttls.fill(0); | |
this.#starts.fill(0); | |
} | |
if (this.#sizes) { | |
this.#sizes.fill(0); | |
} | |
this.#head = 0; | |
this.#tail = 0; | |
this.#free.length = 0; | |
this.#calculatedSize = 0; | |
this.#size = 0; | |
if (this.#hasDisposeAfter && this.#disposed) { | |
const dt = this.#disposed; | |
let task; | |
while ((task = dt?.shift())) { | |
this.#disposeAfter?.(...task); | |
} | |
} | |
} | |
} | |
exports.LRUCache = LRUCache; | |
//# sourceMappingURL=index.js.map |