UNPKG

48.9 kBJavaScriptView Raw
1/**
2 * @module LRUCache
3 */
4const perf = typeof performance === 'object' &&
5 performance &&
6 typeof performance.now === 'function'
7 ? performance
8 : Date;
9const warned = new Set();
10/* c8 ignore start */
11const PROCESS = (typeof process === 'object' && !!process ? process : {});
12/* c8 ignore start */
13const emitWarning = (msg, type, code, fn) => {
14 typeof PROCESS.emitWarning === 'function'
15 ? PROCESS.emitWarning(msg, type, code, fn)
16 : console.error(`[${code}] ${type}: ${msg}`);
17};
18let AC = globalThis.AbortController;
19let AS = globalThis.AbortSignal;
20/* c8 ignore start */
21if (typeof AC === 'undefined') {
22 //@ts-ignore
23 AS = class AbortSignal {
24 onabort;
25 _onabort = [];
26 reason;
27 aborted = false;
28 addEventListener(_, fn) {
29 this._onabort.push(fn);
30 }
31 };
32 //@ts-ignore
33 AC = class AbortController {
34 constructor() {
35 warnACPolyfill();
36 }
37 signal = new AS();
38 abort(reason) {
39 if (this.signal.aborted)
40 return;
41 //@ts-ignore
42 this.signal.reason = reason;
43 //@ts-ignore
44 this.signal.aborted = true;
45 //@ts-ignore
46 for (const fn of this.signal._onabort) {
47 fn(reason);
48 }
49 this.signal.onabort?.(reason);
50 }
51 };
52 let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
53 const warnACPolyfill = () => {
54 if (!printACPolyfillWarning)
55 return;
56 printACPolyfillWarning = false;
57 emitWarning('AbortController is not defined. If using lru-cache in ' +
58 'node 14, load an AbortController polyfill from the ' +
59 '`node-abort-controller` package. A minimal polyfill is ' +
60 'provided for use by LRUCache.fetch(), but it should not be ' +
61 'relied upon in other contexts (eg, passing it to other APIs that ' +
62 'use AbortController/AbortSignal might have undesirable effects). ' +
63 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
64 };
65}
66/* c8 ignore stop */
67const shouldWarn = (code) => !warned.has(code);
68const TYPE = Symbol('type');
69const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
70/* c8 ignore start */
71// This is a little bit ridiculous, tbh.
72// The maximum array length is 2^32-1 or thereabouts on most JS impls.
73// And well before that point, you're caching the entire world, I mean,
74// that's ~32GB of just integers for the next/prev links, plus whatever
75// else to hold that many keys and values. Just filling the memory with
76// zeroes at init time is brutal when you get that big.
77// But why not be complete?
78// Maybe in the future, these limits will have expanded.
79const getUintArray = (max) => !isPosInt(max)
80 ? null
81 : max <= Math.pow(2, 8)
82 ? Uint8Array
83 : max <= Math.pow(2, 16)
84 ? Uint16Array
85 : max <= Math.pow(2, 32)
86 ? Uint32Array
87 : max <= Number.MAX_SAFE_INTEGER
88 ? ZeroArray
89 : null;
90/* c8 ignore stop */
91class ZeroArray extends Array {
92 constructor(size) {
93 super(size);
94 this.fill(0);
95 }
96}
97class Stack {
98 heap;
99 length;
100 // private constructor
101 static #constructing = false;
102 static create(max) {
103 const HeapCls = getUintArray(max);
104 if (!HeapCls)
105 return [];
106 Stack.#constructing = true;
107 const s = new Stack(max, HeapCls);
108 Stack.#constructing = false;
109 return s;
110 }
111 constructor(max, HeapCls) {
112 /* c8 ignore start */
113 if (!Stack.#constructing) {
114 throw new TypeError('instantiate Stack using Stack.create(n)');
115 }
116 /* c8 ignore stop */
117 this.heap = new HeapCls(max);
118 this.length = 0;
119 }
120 push(n) {
121 this.heap[this.length++] = n;
122 }
123 pop() {
124 return this.heap[--this.length];
125 }
126}
127/**
128 * Default export, the thing you're using this module to get.
129 *
130 * All properties from the options object (with the exception of
131 * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
132 * normal public members. (`max` and `maxBase` are read-only getters.)
133 * Changing any of these will alter the defaults for subsequent method calls,
134 * but is otherwise safe.
135 */
136export class LRUCache {
137 // properties coming in from the options of these, only max and maxSize
138 // really *need* to be protected. The rest can be modified, as they just
139 // set defaults for various methods.
140 #max;
141 #maxSize;
142 #dispose;
143 #disposeAfter;
144 #fetchMethod;
145 /**
146 * {@link LRUCache.OptionsBase.ttl}
147 */
148 ttl;
149 /**
150 * {@link LRUCache.OptionsBase.ttlResolution}
151 */
152 ttlResolution;
153 /**
154 * {@link LRUCache.OptionsBase.ttlAutopurge}
155 */
156 ttlAutopurge;
157 /**
158 * {@link LRUCache.OptionsBase.updateAgeOnGet}
159 */
160 updateAgeOnGet;
161 /**
162 * {@link LRUCache.OptionsBase.updateAgeOnHas}
163 */
164 updateAgeOnHas;
165 /**
166 * {@link LRUCache.OptionsBase.allowStale}
167 */
168 allowStale;
169 /**
170 * {@link LRUCache.OptionsBase.noDisposeOnSet}
171 */
172 noDisposeOnSet;
173 /**
174 * {@link LRUCache.OptionsBase.noUpdateTTL}
175 */
176 noUpdateTTL;
177 /**
178 * {@link LRUCache.OptionsBase.maxEntrySize}
179 */
180 maxEntrySize;
181 /**
182 * {@link LRUCache.OptionsBase.sizeCalculation}
183 */
184 sizeCalculation;
185 /**
186 * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
187 */
188 noDeleteOnFetchRejection;
189 /**
190 * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
191 */
192 noDeleteOnStaleGet;
193 /**
194 * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
195 */
196 allowStaleOnFetchAbort;
197 /**
198 * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
199 */
200 allowStaleOnFetchRejection;
201 /**
202 * {@link LRUCache.OptionsBase.ignoreFetchAbort}
203 */
204 ignoreFetchAbort;
205 // computed properties
206 #size;
207 #calculatedSize;
208 #keyMap;
209 #keyList;
210 #valList;
211 #next;
212 #prev;
213 #head;
214 #tail;
215 #free;
216 #disposed;
217 #sizes;
218 #starts;
219 #ttls;
220 #hasDispose;
221 #hasFetchMethod;
222 #hasDisposeAfter;
223 /**
224 * Do not call this method unless you need to inspect the
225 * inner workings of the cache. If anything returned by this
226 * object is modified in any way, strange breakage may occur.
227 *
228 * These fields are private for a reason!
229 *
230 * @internal
231 */
232 static unsafeExposeInternals(c) {
233 return {
234 // properties
235 starts: c.#starts,
236 ttls: c.#ttls,
237 sizes: c.#sizes,
238 keyMap: c.#keyMap,
239 keyList: c.#keyList,
240 valList: c.#valList,
241 next: c.#next,
242 prev: c.#prev,
243 get head() {
244 return c.#head;
245 },
246 get tail() {
247 return c.#tail;
248 },
249 free: c.#free,
250 // methods
251 isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
252 backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
253 moveToTail: (index) => c.#moveToTail(index),
254 indexes: (options) => c.#indexes(options),
255 rindexes: (options) => c.#rindexes(options),
256 isStale: (index) => c.#isStale(index),
257 };
258 }
259 // Protected read-only members
260 /**
261 * {@link LRUCache.OptionsBase.max} (read-only)
262 */
263 get max() {
264 return this.#max;
265 }
266 /**
267 * {@link LRUCache.OptionsBase.maxSize} (read-only)
268 */
269 get maxSize() {
270 return this.#maxSize;
271 }
272 /**
273 * The total computed size of items in the cache (read-only)
274 */
275 get calculatedSize() {
276 return this.#calculatedSize;
277 }
278 /**
279 * The number of items stored in the cache (read-only)
280 */
281 get size() {
282 return this.#size;
283 }
284 /**
285 * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
286 */
287 get fetchMethod() {
288 return this.#fetchMethod;
289 }
290 /**
291 * {@link LRUCache.OptionsBase.dispose} (read-only)
292 */
293 get dispose() {
294 return this.#dispose;
295 }
296 /**
297 * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
298 */
299 get disposeAfter() {
300 return this.#disposeAfter;
301 }
302 constructor(options) {
303 const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
304 if (max !== 0 && !isPosInt(max)) {
305 throw new TypeError('max option must be a nonnegative integer');
306 }
307 const UintArray = max ? getUintArray(max) : Array;
308 if (!UintArray) {
309 throw new Error('invalid max value: ' + max);
310 }
311 this.#max = max;
312 this.#maxSize = maxSize;
313 this.maxEntrySize = maxEntrySize || this.#maxSize;
314 this.sizeCalculation = sizeCalculation;
315 if (this.sizeCalculation) {
316 if (!this.#maxSize && !this.maxEntrySize) {
317 throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
318 }
319 if (typeof this.sizeCalculation !== 'function') {
320 throw new TypeError('sizeCalculation set to non-function');
321 }
322 }
323 if (fetchMethod !== undefined &&
324 typeof fetchMethod !== 'function') {
325 throw new TypeError('fetchMethod must be a function if specified');
326 }
327 this.#fetchMethod = fetchMethod;
328 this.#hasFetchMethod = !!fetchMethod;
329 this.#keyMap = new Map();
330 this.#keyList = new Array(max).fill(undefined);
331 this.#valList = new Array(max).fill(undefined);
332 this.#next = new UintArray(max);
333 this.#prev = new UintArray(max);
334 this.#head = 0;
335 this.#tail = 0;
336 this.#free = Stack.create(max);
337 this.#size = 0;
338 this.#calculatedSize = 0;
339 if (typeof dispose === 'function') {
340 this.#dispose = dispose;
341 }
342 if (typeof disposeAfter === 'function') {
343 this.#disposeAfter = disposeAfter;
344 this.#disposed = [];
345 }
346 else {
347 this.#disposeAfter = undefined;
348 this.#disposed = undefined;
349 }
350 this.#hasDispose = !!this.#dispose;
351 this.#hasDisposeAfter = !!this.#disposeAfter;
352 this.noDisposeOnSet = !!noDisposeOnSet;
353 this.noUpdateTTL = !!noUpdateTTL;
354 this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
355 this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
356 this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
357 this.ignoreFetchAbort = !!ignoreFetchAbort;
358 // NB: maxEntrySize is set to maxSize if it's set
359 if (this.maxEntrySize !== 0) {
360 if (this.#maxSize !== 0) {
361 if (!isPosInt(this.#maxSize)) {
362 throw new TypeError('maxSize must be a positive integer if specified');
363 }
364 }
365 if (!isPosInt(this.maxEntrySize)) {
366 throw new TypeError('maxEntrySize must be a positive integer if specified');
367 }
368 this.#initializeSizeTracking();
369 }
370 this.allowStale = !!allowStale;
371 this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
372 this.updateAgeOnGet = !!updateAgeOnGet;
373 this.updateAgeOnHas = !!updateAgeOnHas;
374 this.ttlResolution =
375 isPosInt(ttlResolution) || ttlResolution === 0
376 ? ttlResolution
377 : 1;
378 this.ttlAutopurge = !!ttlAutopurge;
379 this.ttl = ttl || 0;
380 if (this.ttl) {
381 if (!isPosInt(this.ttl)) {
382 throw new TypeError('ttl must be a positive integer if specified');
383 }
384 this.#initializeTTLTracking();
385 }
386 // do not allow completely unbounded caches
387 if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
388 throw new TypeError('At least one of max, maxSize, or ttl is required');
389 }
390 if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
391 const code = 'LRU_CACHE_UNBOUNDED';
392 if (shouldWarn(code)) {
393 warned.add(code);
394 const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
395 'result in unbounded memory consumption.';
396 emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
397 }
398 }
399 }
400 /**
401 * Return the remaining TTL time for a given entry key
402 */
403 getRemainingTTL(key) {
404 return this.#keyMap.has(key) ? Infinity : 0;
405 }
406 #initializeTTLTracking() {
407 const ttls = new ZeroArray(this.#max);
408 const starts = new ZeroArray(this.#max);
409 this.#ttls = ttls;
410 this.#starts = starts;
411 this.#setItemTTL = (index, ttl, start = perf.now()) => {
412 starts[index] = ttl !== 0 ? start : 0;
413 ttls[index] = ttl;
414 if (ttl !== 0 && this.ttlAutopurge) {
415 const t = setTimeout(() => {
416 if (this.#isStale(index)) {
417 this.delete(this.#keyList[index]);
418 }
419 }, ttl + 1);
420 // unref() not supported on all platforms
421 /* c8 ignore start */
422 if (t.unref) {
423 t.unref();
424 }
425 /* c8 ignore stop */
426 }
427 };
428 this.#updateItemAge = index => {
429 starts[index] = ttls[index] !== 0 ? perf.now() : 0;
430 };
431 this.#statusTTL = (status, index) => {
432 if (ttls[index]) {
433 const ttl = ttls[index];
434 const start = starts[index];
435 status.ttl = ttl;
436 status.start = start;
437 status.now = cachedNow || getNow();
438 const age = status.now - start;
439 status.remainingTTL = ttl - age;
440 }
441 };
442 // debounce calls to perf.now() to 1s so we're not hitting
443 // that costly call repeatedly.
444 let cachedNow = 0;
445 const getNow = () => {
446 const n = perf.now();
447 if (this.ttlResolution > 0) {
448 cachedNow = n;
449 const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
450 // not available on all platforms
451 /* c8 ignore start */
452 if (t.unref) {
453 t.unref();
454 }
455 /* c8 ignore stop */
456 }
457 return n;
458 };
459 this.getRemainingTTL = key => {
460 const index = this.#keyMap.get(key);
461 if (index === undefined) {
462 return 0;
463 }
464 const ttl = ttls[index];
465 const start = starts[index];
466 if (ttl === 0 || start === 0) {
467 return Infinity;
468 }
469 const age = (cachedNow || getNow()) - start;
470 return ttl - age;
471 };
472 this.#isStale = index => {
473 return (ttls[index] !== 0 &&
474 starts[index] !== 0 &&
475 (cachedNow || getNow()) - starts[index] > ttls[index]);
476 };
477 }
478 // conditionally set private methods related to TTL
479 #updateItemAge = () => { };
480 #statusTTL = () => { };
481 #setItemTTL = () => { };
482 /* c8 ignore stop */
483 #isStale = () => false;
484 #initializeSizeTracking() {
485 const sizes = new ZeroArray(this.#max);
486 this.#calculatedSize = 0;
487 this.#sizes = sizes;
488 this.#removeItemSize = index => {
489 this.#calculatedSize -= sizes[index];
490 sizes[index] = 0;
491 };
492 this.#requireSize = (k, v, size, sizeCalculation) => {
493 // provisionally accept background fetches.
494 // actual value size will be checked when they return.
495 if (this.#isBackgroundFetch(v)) {
496 return 0;
497 }
498 if (!isPosInt(size)) {
499 if (sizeCalculation) {
500 if (typeof sizeCalculation !== 'function') {
501 throw new TypeError('sizeCalculation must be a function');
502 }
503 size = sizeCalculation(v, k);
504 if (!isPosInt(size)) {
505 throw new TypeError('sizeCalculation return invalid (expect positive integer)');
506 }
507 }
508 else {
509 throw new TypeError('invalid size value (must be positive integer). ' +
510 'When maxSize or maxEntrySize is used, sizeCalculation ' +
511 'or size must be set.');
512 }
513 }
514 return size;
515 };
516 this.#addItemSize = (index, size, status) => {
517 sizes[index] = size;
518 if (this.#maxSize) {
519 const maxSize = this.#maxSize - sizes[index];
520 while (this.#calculatedSize > maxSize) {
521 this.#evict(true);
522 }
523 }
524 this.#calculatedSize += sizes[index];
525 if (status) {
526 status.entrySize = size;
527 status.totalCalculatedSize = this.#calculatedSize;
528 }
529 };
530 }
531 #removeItemSize = _i => { };
532 #addItemSize = (_i, _s, _st) => { };
533 #requireSize = (_k, _v, size, sizeCalculation) => {
534 if (size || sizeCalculation) {
535 throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
536 }
537 return 0;
538 };
539 *#indexes({ allowStale = this.allowStale } = {}) {
540 if (this.#size) {
541 for (let i = this.#tail; true;) {
542 if (!this.#isValidIndex(i)) {
543 break;
544 }
545 if (allowStale || !this.#isStale(i)) {
546 yield i;
547 }
548 if (i === this.#head) {
549 break;
550 }
551 else {
552 i = this.#prev[i];
553 }
554 }
555 }
556 }
557 *#rindexes({ allowStale = this.allowStale } = {}) {
558 if (this.#size) {
559 for (let i = this.#head; true;) {
560 if (!this.#isValidIndex(i)) {
561 break;
562 }
563 if (allowStale || !this.#isStale(i)) {
564 yield i;
565 }
566 if (i === this.#tail) {
567 break;
568 }
569 else {
570 i = this.#next[i];
571 }
572 }
573 }
574 }
575 #isValidIndex(index) {
576 return (index !== undefined &&
577 this.#keyMap.get(this.#keyList[index]) === index);
578 }
579 /**
580 * Return a generator yielding `[key, value]` pairs,
581 * in order from most recently used to least recently used.
582 */
583 *entries() {
584 for (const i of this.#indexes()) {
585 if (this.#valList[i] !== undefined &&
586 this.#keyList[i] !== undefined &&
587 !this.#isBackgroundFetch(this.#valList[i])) {
588 yield [this.#keyList[i], this.#valList[i]];
589 }
590 }
591 }
592 /**
593 * Inverse order version of {@link LRUCache.entries}
594 *
595 * Return a generator yielding `[key, value]` pairs,
596 * in order from least recently used to most recently used.
597 */
598 *rentries() {
599 for (const i of this.#rindexes()) {
600 if (this.#valList[i] !== undefined &&
601 this.#keyList[i] !== undefined &&
602 !this.#isBackgroundFetch(this.#valList[i])) {
603 yield [this.#keyList[i], this.#valList[i]];
604 }
605 }
606 }
607 /**
608 * Return a generator yielding the keys in the cache,
609 * in order from most recently used to least recently used.
610 */
611 *keys() {
612 for (const i of this.#indexes()) {
613 const k = this.#keyList[i];
614 if (k !== undefined &&
615 !this.#isBackgroundFetch(this.#valList[i])) {
616 yield k;
617 }
618 }
619 }
620 /**
621 * Inverse order version of {@link LRUCache.keys}
622 *
623 * Return a generator yielding the keys in the cache,
624 * in order from least recently used to most recently used.
625 */
626 *rkeys() {
627 for (const i of this.#rindexes()) {
628 const k = this.#keyList[i];
629 if (k !== undefined &&
630 !this.#isBackgroundFetch(this.#valList[i])) {
631 yield k;
632 }
633 }
634 }
635 /**
636 * Return a generator yielding the values in the cache,
637 * in order from most recently used to least recently used.
638 */
639 *values() {
640 for (const i of this.#indexes()) {
641 const v = this.#valList[i];
642 if (v !== undefined &&
643 !this.#isBackgroundFetch(this.#valList[i])) {
644 yield this.#valList[i];
645 }
646 }
647 }
648 /**
649 * Inverse order version of {@link LRUCache.values}
650 *
651 * Return a generator yielding the values in the cache,
652 * in order from least recently used to most recently used.
653 */
654 *rvalues() {
655 for (const i of this.#rindexes()) {
656 const v = this.#valList[i];
657 if (v !== undefined &&
658 !this.#isBackgroundFetch(this.#valList[i])) {
659 yield this.#valList[i];
660 }
661 }
662 }
663 /**
664 * Iterating over the cache itself yields the same results as
665 * {@link LRUCache.entries}
666 */
667 [Symbol.iterator]() {
668 return this.entries();
669 }
670 /**
671 * Find a value for which the supplied fn method returns a truthy value,
672 * similar to Array.find(). fn is called as fn(value, key, cache).
673 */
674 find(fn, getOptions = {}) {
675 for (const i of this.#indexes()) {
676 const v = this.#valList[i];
677 const value = this.#isBackgroundFetch(v)
678 ? v.__staleWhileFetching
679 : v;
680 if (value === undefined)
681 continue;
682 if (fn(value, this.#keyList[i], this)) {
683 return this.get(this.#keyList[i], getOptions);
684 }
685 }
686 }
687 /**
688 * Call the supplied function on each item in the cache, in order from
689 * most recently used to least recently used. fn is called as
690 * fn(value, key, cache). Does not update age or recenty of use.
691 * Does not iterate over stale values.
692 */
693 forEach(fn, thisp = this) {
694 for (const i of this.#indexes()) {
695 const v = this.#valList[i];
696 const value = this.#isBackgroundFetch(v)
697 ? v.__staleWhileFetching
698 : v;
699 if (value === undefined)
700 continue;
701 fn.call(thisp, value, this.#keyList[i], this);
702 }
703 }
704 /**
705 * The same as {@link LRUCache.forEach} but items are iterated over in
706 * reverse order. (ie, less recently used items are iterated over first.)
707 */
708 rforEach(fn, thisp = this) {
709 for (const i of this.#rindexes()) {
710 const v = this.#valList[i];
711 const value = this.#isBackgroundFetch(v)
712 ? v.__staleWhileFetching
713 : v;
714 if (value === undefined)
715 continue;
716 fn.call(thisp, value, this.#keyList[i], this);
717 }
718 }
719 /**
720 * Delete any stale entries. Returns true if anything was removed,
721 * false otherwise.
722 */
723 purgeStale() {
724 let deleted = false;
725 for (const i of this.#rindexes({ allowStale: true })) {
726 if (this.#isStale(i)) {
727 this.delete(this.#keyList[i]);
728 deleted = true;
729 }
730 }
731 return deleted;
732 }
733 /**
734 * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
735 * passed to cache.load()
736 */
737 dump() {
738 const arr = [];
739 for (const i of this.#indexes({ allowStale: true })) {
740 const key = this.#keyList[i];
741 const v = this.#valList[i];
742 const value = this.#isBackgroundFetch(v)
743 ? v.__staleWhileFetching
744 : v;
745 if (value === undefined || key === undefined)
746 continue;
747 const entry = { value };
748 if (this.#ttls && this.#starts) {
749 entry.ttl = this.#ttls[i];
750 // always dump the start relative to a portable timestamp
751 // it's ok for this to be a bit slow, it's a rare operation.
752 const age = perf.now() - this.#starts[i];
753 entry.start = Math.floor(Date.now() - age);
754 }
755 if (this.#sizes) {
756 entry.size = this.#sizes[i];
757 }
758 arr.unshift([key, entry]);
759 }
760 return arr;
761 }
762 /**
763 * Reset the cache and load in the items in entries in the order listed.
764 * Note that the shape of the resulting cache may be different if the
765 * same options are not used in both caches.
766 */
767 load(arr) {
768 this.clear();
769 for (const [key, entry] of arr) {
770 if (entry.start) {
771 // entry.start is a portable timestamp, but we may be using
772 // node's performance.now(), so calculate the offset, so that
773 // we get the intended remaining TTL, no matter how long it's
774 // been on ice.
775 //
776 // it's ok for this to be a bit slow, it's a rare operation.
777 const age = Date.now() - entry.start;
778 entry.start = perf.now() - age;
779 }
780 this.set(key, entry.value, entry);
781 }
782 }
783 /**
784 * Add a value to the cache.
785 *
786 * Note: if `undefined` is specified as a value, this is an alias for
787 * {@link LRUCache#delete}
788 */
789 set(k, v, setOptions = {}) {
790 if (v === undefined) {
791 this.delete(k);
792 return this;
793 }
794 const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
795 let { noUpdateTTL = this.noUpdateTTL } = setOptions;
796 const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
797 // if the item doesn't fit, don't do anything
798 // NB: maxEntrySize set to maxSize by default
799 if (this.maxEntrySize && size > this.maxEntrySize) {
800 if (status) {
801 status.set = 'miss';
802 status.maxEntrySizeExceeded = true;
803 }
804 // have to delete, in case something is there already.
805 this.delete(k);
806 return this;
807 }
808 let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
809 if (index === undefined) {
810 // addition
811 index = (this.#size === 0
812 ? this.#tail
813 : this.#free.length !== 0
814 ? this.#free.pop()
815 : this.#size === this.#max
816 ? this.#evict(false)
817 : this.#size);
818 this.#keyList[index] = k;
819 this.#valList[index] = v;
820 this.#keyMap.set(k, index);
821 this.#next[this.#tail] = index;
822 this.#prev[index] = this.#tail;
823 this.#tail = index;
824 this.#size++;
825 this.#addItemSize(index, size, status);
826 if (status)
827 status.set = 'add';
828 noUpdateTTL = false;
829 }
830 else {
831 // update
832 this.#moveToTail(index);
833 const oldVal = this.#valList[index];
834 if (v !== oldVal) {
835 if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
836 oldVal.__abortController.abort(new Error('replaced'));
837 }
838 else if (!noDisposeOnSet) {
839 if (this.#hasDispose) {
840 this.#dispose?.(oldVal, k, 'set');
841 }
842 if (this.#hasDisposeAfter) {
843 this.#disposed?.push([oldVal, k, 'set']);
844 }
845 }
846 this.#removeItemSize(index);
847 this.#addItemSize(index, size, status);
848 this.#valList[index] = v;
849 if (status) {
850 status.set = 'replace';
851 const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
852 ? oldVal.__staleWhileFetching
853 : oldVal;
854 if (oldValue !== undefined)
855 status.oldValue = oldValue;
856 }
857 }
858 else if (status) {
859 status.set = 'update';
860 }
861 }
862 if (ttl !== 0 && !this.#ttls) {
863 this.#initializeTTLTracking();
864 }
865 if (this.#ttls) {
866 if (!noUpdateTTL) {
867 this.#setItemTTL(index, ttl, start);
868 }
869 if (status)
870 this.#statusTTL(status, index);
871 }
872 if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
873 const dt = this.#disposed;
874 let task;
875 while ((task = dt?.shift())) {
876 this.#disposeAfter?.(...task);
877 }
878 }
879 return this;
880 }
881 /**
882 * Evict the least recently used item, returning its value or
883 * `undefined` if cache is empty.
884 */
885 pop() {
886 try {
887 while (this.#size) {
888 const val = this.#valList[this.#head];
889 this.#evict(true);
890 if (this.#isBackgroundFetch(val)) {
891 if (val.__staleWhileFetching) {
892 return val.__staleWhileFetching;
893 }
894 }
895 else if (val !== undefined) {
896 return val;
897 }
898 }
899 }
900 finally {
901 if (this.#hasDisposeAfter && this.#disposed) {
902 const dt = this.#disposed;
903 let task;
904 while ((task = dt?.shift())) {
905 this.#disposeAfter?.(...task);
906 }
907 }
908 }
909 }
910 #evict(free) {
911 const head = this.#head;
912 const k = this.#keyList[head];
913 const v = this.#valList[head];
914 if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
915 v.__abortController.abort(new Error('evicted'));
916 }
917 else if (this.#hasDispose || this.#hasDisposeAfter) {
918 if (this.#hasDispose) {
919 this.#dispose?.(v, k, 'evict');
920 }
921 if (this.#hasDisposeAfter) {
922 this.#disposed?.push([v, k, 'evict']);
923 }
924 }
925 this.#removeItemSize(head);
926 // if we aren't about to use the index, then null these out
927 if (free) {
928 this.#keyList[head] = undefined;
929 this.#valList[head] = undefined;
930 this.#free.push(head);
931 }
932 if (this.#size === 1) {
933 this.#head = this.#tail = 0;
934 this.#free.length = 0;
935 }
936 else {
937 this.#head = this.#next[head];
938 }
939 this.#keyMap.delete(k);
940 this.#size--;
941 return head;
942 }
943 /**
944 * Check if a key is in the cache, without updating the recency of use.
945 * Will return false if the item is stale, even though it is technically
946 * in the cache.
947 *
948 * Will not update item age unless
949 * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
950 */
951 has(k, hasOptions = {}) {
952 const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
953 const index = this.#keyMap.get(k);
954 if (index !== undefined) {
955 const v = this.#valList[index];
956 if (this.#isBackgroundFetch(v) &&
957 v.__staleWhileFetching === undefined) {
958 return false;
959 }
960 if (!this.#isStale(index)) {
961 if (updateAgeOnHas) {
962 this.#updateItemAge(index);
963 }
964 if (status) {
965 status.has = 'hit';
966 this.#statusTTL(status, index);
967 }
968 return true;
969 }
970 else if (status) {
971 status.has = 'stale';
972 this.#statusTTL(status, index);
973 }
974 }
975 else if (status) {
976 status.has = 'miss';
977 }
978 return false;
979 }
980 /**
981 * Like {@link LRUCache#get} but doesn't update recency or delete stale
982 * items.
983 *
984 * Returns `undefined` if the item is stale, unless
985 * {@link LRUCache.OptionsBase.allowStale} is set.
986 */
987 peek(k, peekOptions = {}) {
988 const { allowStale = this.allowStale } = peekOptions;
989 const index = this.#keyMap.get(k);
990 if (index !== undefined &&
991 (allowStale || !this.#isStale(index))) {
992 const v = this.#valList[index];
993 // either stale and allowed, or forcing a refresh of non-stale value
994 return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
995 }
996 }
997 #backgroundFetch(k, index, options, context) {
998 const v = index === undefined ? undefined : this.#valList[index];
999 if (this.#isBackgroundFetch(v)) {
1000 return v;
1001 }
1002 const ac = new AC();
1003 const { signal } = options;
1004 // when/if our AC signals, then stop listening to theirs.
1005 signal?.addEventListener('abort', () => ac.abort(signal.reason), {
1006 signal: ac.signal,
1007 });
1008 const fetchOpts = {
1009 signal: ac.signal,
1010 options,
1011 context,
1012 };
1013 const cb = (v, updateCache = false) => {
1014 const { aborted } = ac.signal;
1015 const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
1016 if (options.status) {
1017 if (aborted && !updateCache) {
1018 options.status.fetchAborted = true;
1019 options.status.fetchError = ac.signal.reason;
1020 if (ignoreAbort)
1021 options.status.fetchAbortIgnored = true;
1022 }
1023 else {
1024 options.status.fetchResolved = true;
1025 }
1026 }
1027 if (aborted && !ignoreAbort && !updateCache) {
1028 return fetchFail(ac.signal.reason);
1029 }
1030 // either we didn't abort, and are still here, or we did, and ignored
1031 const bf = p;
1032 if (this.#valList[index] === p) {
1033 if (v === undefined) {
1034 if (bf.__staleWhileFetching) {
1035 this.#valList[index] = bf.__staleWhileFetching;
1036 }
1037 else {
1038 this.delete(k);
1039 }
1040 }
1041 else {
1042 if (options.status)
1043 options.status.fetchUpdated = true;
1044 this.set(k, v, fetchOpts.options);
1045 }
1046 }
1047 return v;
1048 };
1049 const eb = (er) => {
1050 if (options.status) {
1051 options.status.fetchRejected = true;
1052 options.status.fetchError = er;
1053 }
1054 return fetchFail(er);
1055 };
1056 const fetchFail = (er) => {
1057 const { aborted } = ac.signal;
1058 const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
1059 const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
1060 const noDelete = allowStale || options.noDeleteOnFetchRejection;
1061 const bf = p;
1062 if (this.#valList[index] === p) {
1063 // if we allow stale on fetch rejections, then we need to ensure that
1064 // the stale value is not removed from the cache when the fetch fails.
1065 const del = !noDelete || bf.__staleWhileFetching === undefined;
1066 if (del) {
1067 this.delete(k);
1068 }
1069 else if (!allowStaleAborted) {
1070 // still replace the *promise* with the stale value,
1071 // since we are done with the promise at this point.
1072 // leave it untouched if we're still waiting for an
1073 // aborted background fetch that hasn't yet returned.
1074 this.#valList[index] = bf.__staleWhileFetching;
1075 }
1076 }
1077 if (allowStale) {
1078 if (options.status && bf.__staleWhileFetching !== undefined) {
1079 options.status.returnedStale = true;
1080 }
1081 return bf.__staleWhileFetching;
1082 }
1083 else if (bf.__returned === bf) {
1084 throw er;
1085 }
1086 };
1087 const pcall = (res, rej) => {
1088 const fmp = this.#fetchMethod?.(k, v, fetchOpts);
1089 if (fmp && fmp instanceof Promise) {
1090 fmp.then(v => res(v), rej);
1091 }
1092 // ignored, we go until we finish, regardless.
1093 // defer check until we are actually aborting,
1094 // so fetchMethod can override.
1095 ac.signal.addEventListener('abort', () => {
1096 if (!options.ignoreFetchAbort ||
1097 options.allowStaleOnFetchAbort) {
1098 res();
1099 // when it eventually resolves, update the cache.
1100 if (options.allowStaleOnFetchAbort) {
1101 res = v => cb(v, true);
1102 }
1103 }
1104 });
1105 };
1106 if (options.status)
1107 options.status.fetchDispatched = true;
1108 const p = new Promise(pcall).then(cb, eb);
1109 const bf = Object.assign(p, {
1110 __abortController: ac,
1111 __staleWhileFetching: v,
1112 __returned: undefined,
1113 });
1114 if (index === undefined) {
1115 // internal, don't expose status.
1116 this.set(k, bf, { ...fetchOpts.options, status: undefined });
1117 index = this.#keyMap.get(k);
1118 }
1119 else {
1120 this.#valList[index] = bf;
1121 }
1122 return bf;
1123 }
1124 #isBackgroundFetch(p) {
1125 if (!this.#hasFetchMethod)
1126 return false;
1127 const b = p;
1128 return (!!b &&
1129 b instanceof Promise &&
1130 b.hasOwnProperty('__staleWhileFetching') &&
1131 b.__abortController instanceof AC);
1132 }
1133 async fetch(k, fetchOptions = {}) {
1134 const {
1135 // get options
1136 allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet,
1137 // set options
1138 ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL,
1139 // fetch exclusive options
1140 noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
1141 if (!this.#hasFetchMethod) {
1142 if (status)
1143 status.fetch = 'get';
1144 return this.get(k, {
1145 allowStale,
1146 updateAgeOnGet,
1147 noDeleteOnStaleGet,
1148 status,
1149 });
1150 }
1151 const options = {
1152 allowStale,
1153 updateAgeOnGet,
1154 noDeleteOnStaleGet,
1155 ttl,
1156 noDisposeOnSet,
1157 size,
1158 sizeCalculation,
1159 noUpdateTTL,
1160 noDeleteOnFetchRejection,
1161 allowStaleOnFetchRejection,
1162 allowStaleOnFetchAbort,
1163 ignoreFetchAbort,
1164 status,
1165 signal,
1166 };
1167 let index = this.#keyMap.get(k);
1168 if (index === undefined) {
1169 if (status)
1170 status.fetch = 'miss';
1171 const p = this.#backgroundFetch(k, index, options, context);
1172 return (p.__returned = p);
1173 }
1174 else {
1175 // in cache, maybe already fetching
1176 const v = this.#valList[index];
1177 if (this.#isBackgroundFetch(v)) {
1178 const stale = allowStale && v.__staleWhileFetching !== undefined;
1179 if (status) {
1180 status.fetch = 'inflight';
1181 if (stale)
1182 status.returnedStale = true;
1183 }
1184 return stale ? v.__staleWhileFetching : (v.__returned = v);
1185 }
1186 // if we force a refresh, that means do NOT serve the cached value,
1187 // unless we are already in the process of refreshing the cache.
1188 const isStale = this.#isStale(index);
1189 if (!forceRefresh && !isStale) {
1190 if (status)
1191 status.fetch = 'hit';
1192 this.#moveToTail(index);
1193 if (updateAgeOnGet) {
1194 this.#updateItemAge(index);
1195 }
1196 if (status)
1197 this.#statusTTL(status, index);
1198 return v;
1199 }
1200 // ok, it is stale or a forced refresh, and not already fetching.
1201 // refresh the cache.
1202 const p = this.#backgroundFetch(k, index, options, context);
1203 const hasStale = p.__staleWhileFetching !== undefined;
1204 const staleVal = hasStale && allowStale;
1205 if (status) {
1206 status.fetch = isStale ? 'stale' : 'refresh';
1207 if (staleVal && isStale)
1208 status.returnedStale = true;
1209 }
1210 return staleVal ? p.__staleWhileFetching : (p.__returned = p);
1211 }
1212 }
1213 /**
1214 * Return a value from the cache. Will update the recency of the cache
1215 * entry found.
1216 *
1217 * If the key is not found, get() will return `undefined`.
1218 */
1219 get(k, getOptions = {}) {
1220 const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
1221 const index = this.#keyMap.get(k);
1222 if (index !== undefined) {
1223 const value = this.#valList[index];
1224 const fetching = this.#isBackgroundFetch(value);
1225 if (status)
1226 this.#statusTTL(status, index);
1227 if (this.#isStale(index)) {
1228 if (status)
1229 status.get = 'stale';
1230 // delete only if not an in-flight background fetch
1231 if (!fetching) {
1232 if (!noDeleteOnStaleGet) {
1233 this.delete(k);
1234 }
1235 if (status && allowStale)
1236 status.returnedStale = true;
1237 return allowStale ? value : undefined;
1238 }
1239 else {
1240 if (status &&
1241 allowStale &&
1242 value.__staleWhileFetching !== undefined) {
1243 status.returnedStale = true;
1244 }
1245 return allowStale ? value.__staleWhileFetching : undefined;
1246 }
1247 }
1248 else {
1249 if (status)
1250 status.get = 'hit';
1251 // if we're currently fetching it, we don't actually have it yet
1252 // it's not stale, which means this isn't a staleWhileRefetching.
1253 // If it's not stale, and fetching, AND has a __staleWhileFetching
1254 // value, then that means the user fetched with {forceRefresh:true},
1255 // so it's safe to return that value.
1256 if (fetching) {
1257 return value.__staleWhileFetching;
1258 }
1259 this.#moveToTail(index);
1260 if (updateAgeOnGet) {
1261 this.#updateItemAge(index);
1262 }
1263 return value;
1264 }
1265 }
1266 else if (status) {
1267 status.get = 'miss';
1268 }
1269 }
1270 #connect(p, n) {
1271 this.#prev[n] = p;
1272 this.#next[p] = n;
1273 }
1274 #moveToTail(index) {
1275 // if tail already, nothing to do
1276 // if head, move head to next[index]
1277 // else
1278 // move next[prev[index]] to next[index] (head has no prev)
1279 // move prev[next[index]] to prev[index]
1280 // prev[index] = tail
1281 // next[tail] = index
1282 // tail = index
1283 if (index !== this.#tail) {
1284 if (index === this.#head) {
1285 this.#head = this.#next[index];
1286 }
1287 else {
1288 this.#connect(this.#prev[index], this.#next[index]);
1289 }
1290 this.#connect(this.#tail, index);
1291 this.#tail = index;
1292 }
1293 }
1294 /**
1295 * Deletes a key out of the cache.
1296 * Returns true if the key was deleted, false otherwise.
1297 */
1298 delete(k) {
1299 let deleted = false;
1300 if (this.#size !== 0) {
1301 const index = this.#keyMap.get(k);
1302 if (index !== undefined) {
1303 deleted = true;
1304 if (this.#size === 1) {
1305 this.clear();
1306 }
1307 else {
1308 this.#removeItemSize(index);
1309 const v = this.#valList[index];
1310 if (this.#isBackgroundFetch(v)) {
1311 v.__abortController.abort(new Error('deleted'));
1312 }
1313 else if (this.#hasDispose || this.#hasDisposeAfter) {
1314 if (this.#hasDispose) {
1315 this.#dispose?.(v, k, 'delete');
1316 }
1317 if (this.#hasDisposeAfter) {
1318 this.#disposed?.push([v, k, 'delete']);
1319 }
1320 }
1321 this.#keyMap.delete(k);
1322 this.#keyList[index] = undefined;
1323 this.#valList[index] = undefined;
1324 if (index === this.#tail) {
1325 this.#tail = this.#prev[index];
1326 }
1327 else if (index === this.#head) {
1328 this.#head = this.#next[index];
1329 }
1330 else {
1331 this.#next[this.#prev[index]] = this.#next[index];
1332 this.#prev[this.#next[index]] = this.#prev[index];
1333 }
1334 this.#size--;
1335 this.#free.push(index);
1336 }
1337 }
1338 }
1339 if (this.#hasDisposeAfter && this.#disposed?.length) {
1340 const dt = this.#disposed;
1341 let task;
1342 while ((task = dt?.shift())) {
1343 this.#disposeAfter?.(...task);
1344 }
1345 }
1346 return deleted;
1347 }
1348 /**
1349 * Clear the cache entirely, throwing away all values.
1350 */
1351 clear() {
1352 for (const index of this.#rindexes({ allowStale: true })) {
1353 const v = this.#valList[index];
1354 if (this.#isBackgroundFetch(v)) {
1355 v.__abortController.abort(new Error('deleted'));
1356 }
1357 else {
1358 const k = this.#keyList[index];
1359 if (this.#hasDispose) {
1360 this.#dispose?.(v, k, 'delete');
1361 }
1362 if (this.#hasDisposeAfter) {
1363 this.#disposed?.push([v, k, 'delete']);
1364 }
1365 }
1366 }
1367 this.#keyMap.clear();
1368 this.#valList.fill(undefined);
1369 this.#keyList.fill(undefined);
1370 if (this.#ttls && this.#starts) {
1371 this.#ttls.fill(0);
1372 this.#starts.fill(0);
1373 }
1374 if (this.#sizes) {
1375 this.#sizes.fill(0);
1376 }
1377 this.#head = 0;
1378 this.#tail = 0;
1379 this.#free.length = 0;
1380 this.#calculatedSize = 0;
1381 this.#size = 0;
1382 if (this.#hasDisposeAfter && this.#disposed) {
1383 const dt = this.#disposed;
1384 let task;
1385 while ((task = dt?.shift())) {
1386 this.#disposeAfter?.(...task);
1387 }
1388 }
1389 }
1390}
1391//# sourceMappingURL=index.js.map
\No newline at end of file