UNPKG

33.3 kBTypeScriptView Raw
1/**
2 * @module LRUCache
3 */
4declare const TYPE: unique symbol;
5export type PosInt = number & {
6 [TYPE]: 'Positive Integer';
7};
8export type Index = number & {
9 [TYPE]: 'LRUCache Index';
10};
11export type UintArray = Uint8Array | Uint16Array | Uint32Array;
12export type NumberArray = UintArray | number[];
13declare class ZeroArray extends Array<number> {
14 constructor(size: number);
15}
16export type { ZeroArray };
17export type { Stack };
18export type StackLike = Stack | Index[];
19declare class Stack {
20 #private;
21 heap: NumberArray;
22 length: number;
23 static create(max: number): StackLike;
24 constructor(max: number, HeapCls: {
25 new (n: number): NumberArray;
26 });
27 push(n: Index): void;
28 pop(): Index;
29}
30/**
31 * Promise representing an in-progress {@link LRUCache#fetch} call
32 */
33export type BackgroundFetch<V> = Promise<V | undefined> & {
34 __returned: BackgroundFetch<V> | undefined;
35 __abortController: AbortController;
36 __staleWhileFetching: V | undefined;
37};
38export type DisposeTask<K, V> = [
39 value: V,
40 key: K,
41 reason: LRUCache.DisposeReason
42];
43export declare namespace LRUCache {
44 /**
45 * An integer greater than 0, reflecting the calculated size of items
46 */
47 type Size = number;
48 /**
49 * Integer greater than 0, representing some number of milliseconds, or the
50 * time at which a TTL started counting from.
51 */
52 type Milliseconds = number;
53 /**
54 * An integer greater than 0, reflecting a number of items
55 */
56 type Count = number;
57 /**
58 * The reason why an item was removed from the cache, passed
59 * to the {@link Disposer} methods.
60 */
61 type DisposeReason = 'evict' | 'set' | 'delete';
62 /**
63 * A method called upon item removal, passed as the
64 * {@link OptionsBase.dispose} and/or
65 * {@link OptionsBase.disposeAfter} options.
66 */
67 type Disposer<K, V> = (value: V, key: K, reason: DisposeReason) => void;
68 /**
69 * A function that returns the effective calculated size
70 * of an entry in the cache.
71 */
72 type SizeCalculator<K, V> = (value: V, key: K) => Size;
73 /**
74 * Options provided to the
75 * {@link OptionsBase.fetchMethod} function.
76 */
77 interface FetcherOptions<K, V, FC = unknown> {
78 signal: AbortSignal;
79 options: FetcherFetchOptions<K, V, FC>;
80 /**
81 * Object provided in the {@link FetchOptions.context} option to
82 * {@link LRUCache#fetch}
83 */
84 context: FC;
85 }
86 /**
87 * Status object that may be passed to {@link LRUCache#fetch},
88 * {@link LRUCache#get}, {@link LRUCache#set}, and {@link LRUCache#has}.
89 */
90 interface Status<V> {
91 /**
92 * The status of a set() operation.
93 *
94 * - add: the item was not found in the cache, and was added
95 * - update: the item was in the cache, with the same value provided
96 * - replace: the item was in the cache, and replaced
97 * - miss: the item was not added to the cache for some reason
98 */
99 set?: 'add' | 'update' | 'replace' | 'miss';
100 /**
101 * the ttl stored for the item, or undefined if ttls are not used.
102 */
103 ttl?: Milliseconds;
104 /**
105 * the start time for the item, or undefined if ttls are not used.
106 */
107 start?: Milliseconds;
108 /**
109 * The timestamp used for TTL calculation
110 */
111 now?: Milliseconds;
112 /**
113 * the remaining ttl for the item, or undefined if ttls are not used.
114 */
115 remainingTTL?: Milliseconds;
116 /**
117 * The calculated size for the item, if sizes are used.
118 */
119 entrySize?: Size;
120 /**
121 * The total calculated size of the cache, if sizes are used.
122 */
123 totalCalculatedSize?: Size;
124 /**
125 * A flag indicating that the item was not stored, due to exceeding the
126 * {@link OptionsBase.maxEntrySize}
127 */
128 maxEntrySizeExceeded?: true;
129 /**
130 * The old value, specified in the case of `set:'update'` or
131 * `set:'replace'`
132 */
133 oldValue?: V;
134 /**
135 * The results of a {@link LRUCache#has} operation
136 *
137 * - hit: the item was found in the cache
138 * - stale: the item was found in the cache, but is stale
139 * - miss: the item was not found in the cache
140 */
141 has?: 'hit' | 'stale' | 'miss';
142 /**
143 * The status of a {@link LRUCache#fetch} operation.
144 * Note that this can change as the underlying fetch() moves through
145 * various states.
146 *
147 * - inflight: there is another fetch() for this key which is in process
148 * - get: there is no fetchMethod, so {@link LRUCache#get} was called.
149 * - miss: the item is not in cache, and will be fetched.
150 * - hit: the item is in the cache, and was resolved immediately.
151 * - stale: the item is in the cache, but stale.
152 * - refresh: the item is in the cache, and not stale, but
153 * {@link FetchOptions.forceRefresh} was specified.
154 */
155 fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh';
156 /**
157 * The {@link OptionsBase.fetchMethod} was called
158 */
159 fetchDispatched?: true;
160 /**
161 * The cached value was updated after a successful call to
162 * {@link OptionsBase.fetchMethod}
163 */
164 fetchUpdated?: true;
165 /**
166 * The reason for a fetch() rejection. Either the error raised by the
167 * {@link OptionsBase.fetchMethod}, or the reason for an
168 * AbortSignal.
169 */
170 fetchError?: Error;
171 /**
172 * The fetch received an abort signal
173 */
174 fetchAborted?: true;
175 /**
176 * The abort signal received was ignored, and the fetch was allowed to
177 * continue.
178 */
179 fetchAbortIgnored?: true;
180 /**
181 * The fetchMethod promise resolved successfully
182 */
183 fetchResolved?: true;
184 /**
185 * The fetchMethod promise was rejected
186 */
187 fetchRejected?: true;
188 /**
189 * The status of a {@link LRUCache#get} operation.
190 *
191 * - fetching: The item is currently being fetched. If a previous value
192 * is present and allowed, that will be returned.
193 * - stale: The item is in the cache, and is stale.
194 * - hit: the item is in the cache
195 * - miss: the item is not in the cache
196 */
197 get?: 'stale' | 'hit' | 'miss';
198 /**
199 * A fetch or get operation returned a stale value.
200 */
201 returnedStale?: true;
202 }
203 /**
204 * options which override the options set in the LRUCache constructor
205 * when calling {@link LRUCache#fetch}.
206 *
207 * This is the union of {@link GetOptions} and {@link SetOptions}, plus
208 * {@link OptionsBase.noDeleteOnFetchRejection},
209 * {@link OptionsBase.allowStaleOnFetchRejection},
210 * {@link FetchOptions.forceRefresh}, and
211 * {@link FetcherOptions.context}
212 *
213 * Any of these may be modified in the {@link OptionsBase.fetchMethod}
214 * function, but the {@link GetOptions} fields will of course have no
215 * effect, as the {@link LRUCache#get} call already happened by the time
216 * the fetchMethod is called.
217 */
218 interface FetcherFetchOptions<K, V, FC = unknown> extends Pick<OptionsBase<K, V, FC>, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet' | 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL' | 'noDeleteOnFetchRejection' | 'allowStaleOnFetchRejection' | 'ignoreFetchAbort' | 'allowStaleOnFetchAbort'> {
219 status?: Status<V>;
220 size?: Size;
221 }
222 /**
223 * Options that may be passed to the {@link LRUCache#fetch} method.
224 */
225 interface FetchOptions<K, V, FC> extends FetcherFetchOptions<K, V, FC> {
226 /**
227 * Set to true to force a re-load of the existing data, even if it
228 * is not yet stale.
229 */
230 forceRefresh?: boolean;
231 /**
232 * Context provided to the {@link OptionsBase.fetchMethod} as
233 * the {@link FetcherOptions.context} param.
234 *
235 * If the FC type is specified as unknown (the default),
236 * undefined or void, then this is optional. Otherwise, it will
237 * be required.
238 */
239 context?: FC;
240 signal?: AbortSignal;
241 status?: Status<V>;
242 }
243 /**
244 * Options provided to {@link LRUCache#fetch} when the FC type is something
245 * other than `unknown`, `undefined`, or `void`
246 */
247 interface FetchOptionsWithContext<K, V, FC> extends FetchOptions<K, V, FC> {
248 context: FC;
249 }
250 /**
251 * Options provided to {@link LRUCache#fetch} when the FC type is
252 * `undefined` or `void`
253 */
254 interface FetchOptionsNoContext<K, V> extends FetchOptions<K, V, undefined> {
255 context?: undefined;
256 }
257 /**
258 * Options that may be passed to the {@link LRUCache#has} method.
259 */
260 interface HasOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'updateAgeOnHas'> {
261 status?: Status<V>;
262 }
263 /**
264 * Options that may be passed to the {@link LRUCache#get} method.
265 */
266 interface GetOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet'> {
267 status?: Status<V>;
268 }
269 /**
270 * Options that may be passed to the {@link LRUCache#peek} method.
271 */
272 interface PeekOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'allowStale'> {
273 }
274 /**
275 * Options that may be passed to the {@link LRUCache#set} method.
276 */
277 interface SetOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL'> {
278 /**
279 * If size tracking is enabled, then setting an explicit size
280 * in the {@link LRUCache#set} call will prevent calling the
281 * {@link OptionsBase.sizeCalculation} function.
282 */
283 size?: Size;
284 /**
285 * If TTL tracking is enabled, then setting an explicit start
286 * time in the {@link LRUCache#set} call will override the
287 * default time from `performance.now()` or `Date.now()`.
288 *
289 * Note that it must be a valid value for whichever time-tracking
290 * method is in use.
291 */
292 start?: Milliseconds;
293 status?: Status<V>;
294 }
295 /**
296 * The type signature for the {@link OptionsBase.fetchMethod} option.
297 */
298 type Fetcher<K, V, FC = unknown> = (key: K, staleValue: V | undefined, options: FetcherOptions<K, V, FC>) => Promise<V | undefined | void> | V | undefined | void;
299 /**
300 * Options which may be passed to the {@link LRUCache} constructor.
301 *
302 * Most of these may be overridden in the various options that use
303 * them.
304 *
305 * Despite all being technically optional, the constructor requires that
306 * a cache is at minimum limited by one or more of {@link OptionsBase.max},
307 * {@link OptionsBase.ttl}, or {@link OptionsBase.maxSize}.
308 *
309 * If {@link OptionsBase.ttl} is used alone, then it is strongly advised
310 * (and in fact required by the type definitions here) that the cache
311 * also set {@link OptionsBase.ttlAutopurge}, to prevent potentially
312 * unbounded storage.
313 */
314 interface OptionsBase<K, V, FC> {
315 /**
316 * The maximum number of items to store in the cache before evicting
317 * old entries. This is read-only on the {@link LRUCache} instance,
318 * and may not be overridden.
319 *
320 * If set, then storage space will be pre-allocated at construction
321 * time, and the cache will perform significantly faster.
322 *
323 * Note that significantly fewer items may be stored, if
324 * {@link OptionsBase.maxSize} and/or {@link OptionsBase.ttl} are also
325 * set.
326 */
327 max?: Count;
328 /**
329 * Max time in milliseconds for items to live in cache before they are
330 * considered stale. Note that stale items are NOT preemptively removed
331 * by default, and MAY live in the cache long after they have expired.
332 *
333 * Also, as this cache is optimized for LRU/MRU operations, some of
334 * the staleness/TTL checks will reduce performance, as they will incur
335 * overhead by deleting items.
336 *
337 * Must be an integer number of ms. If set to 0, this indicates "no TTL"
338 *
339 * @default 0
340 */
341 ttl?: Milliseconds;
342 /**
343 * Minimum amount of time in ms in which to check for staleness.
344 * Defaults to 1, which means that the current time is checked
345 * at most once per millisecond.
346 *
347 * Set to 0 to check the current time every time staleness is tested.
348 * (This reduces performance, and is theoretically unnecessary.)
349 *
350 * Setting this to a higher value will improve performance somewhat
351 * while using ttl tracking, albeit at the expense of keeping stale
352 * items around a bit longer than their TTLs would indicate.
353 *
354 * @default 1
355 */
356 ttlResolution?: Milliseconds;
357 /**
358 * Preemptively remove stale items from the cache.
359 * Note that this may significantly degrade performance,
360 * especially if the cache is storing a large number of items.
361 * It is almost always best to just leave the stale items in
362 * the cache, and let them fall out as new items are added.
363 *
364 * Note that this means that {@link OptionsBase.allowStale} is a bit
365 * pointless, as stale items will be deleted almost as soon as they
366 * expire.
367 *
368 * @default false
369 */
370 ttlAutopurge?: boolean;
371 /**
372 * Update the age of items on {@link LRUCache#get}, renewing their TTL
373 *
374 * Has no effect if {@link OptionsBase.ttl} is not set.
375 *
376 * @default false
377 */
378 updateAgeOnGet?: boolean;
379 /**
380 * Update the age of items on {@link LRUCache#has}, renewing their TTL
381 *
382 * Has no effect if {@link OptionsBase.ttl} is not set.
383 *
384 * @default false
385 */
386 updateAgeOnHas?: boolean;
387 /**
388 * Allow {@link LRUCache#get} and {@link LRUCache#fetch} calls to return
389 * stale data, if available.
390 */
391 allowStale?: boolean;
392 /**
393 * Function that is called on items when they are dropped from the cache.
394 * This can be handy if you want to close file descriptors or do other
395 * cleanup tasks when items are no longer accessible. Called with `key,
396 * value`. It's called before actually removing the item from the
397 * internal cache, so it is *NOT* safe to re-add them.
398 *
399 * Use {@link OptionsBase.disposeAfter} if you wish to dispose items after
400 * they have been full removed, when it is safe to add them back to the
401 * cache.
402 */
403 dispose?: Disposer<K, V>;
404 /**
405 * The same as {@link OptionsBase.dispose}, but called *after* the entry
406 * is completely removed and the cache is once again in a clean state.
407 * It is safe to add an item right back into the cache at this point.
408 * However, note that it is *very* easy to inadvertently create infinite
409 * recursion this way.
410 */
411 disposeAfter?: Disposer<K, V>;
412 /**
413 * Set to true to suppress calling the
414 * {@link OptionsBase.dispose} function if the entry key is
415 * still accessible within the cache.
416 * This may be overridden by passing an options object to
417 * {@link LRUCache#set}.
418 */
419 noDisposeOnSet?: boolean;
420 /**
421 * Boolean flag to tell the cache to not update the TTL when
422 * setting a new value for an existing key (ie, when updating a value
423 * rather than inserting a new value). Note that the TTL value is
424 * _always_ set (if provided) when adding a new entry into the cache.
425 *
426 * Has no effect if a {@link OptionsBase.ttl} is not set.
427 */
428 noUpdateTTL?: boolean;
429 /**
430 * If you wish to track item size, you must provide a maxSize
431 * note that we still will only keep up to max *actual items*,
432 * if max is set, so size tracking may cause fewer than max items
433 * to be stored. At the extreme, a single item of maxSize size
434 * will cause everything else in the cache to be dropped when it
435 * is added. Use with caution!
436 *
437 * Note also that size tracking can negatively impact performance,
438 * though for most cases, only minimally.
439 */
440 maxSize?: Size;
441 /**
442 * The maximum allowed size for any single item in the cache.
443 *
444 * If a larger item is passed to {@link LRUCache#set} or returned by a
445 * {@link OptionsBase.fetchMethod}, then it will not be stored in the
446 * cache.
447 */
448 maxEntrySize?: Size;
449 /**
450 * A function that returns a number indicating the item's size.
451 *
452 * If not provided, and {@link OptionsBase.maxSize} or
453 * {@link OptionsBase.maxEntrySize} are set, then all
454 * {@link LRUCache#set} calls **must** provide an explicit
455 * {@link SetOptions.size} or sizeCalculation param.
456 */
457 sizeCalculation?: SizeCalculator<K, V>;
458 /**
459 * Method that provides the implementation for {@link LRUCache#fetch}
460 */
461 fetchMethod?: Fetcher<K, V, FC>;
462 /**
463 * Set to true to suppress the deletion of stale data when a
464 * {@link OptionsBase.fetchMethod} returns a rejected promise.
465 */
466 noDeleteOnFetchRejection?: boolean;
467 /**
468 * Do not delete stale items when they are retrieved with
469 * {@link LRUCache#get}.
470 *
471 * Note that the `get` return value will still be `undefined`
472 * unless {@link OptionsBase.allowStale} is true.
473 */
474 noDeleteOnStaleGet?: boolean;
475 /**
476 * Set to true to allow returning stale data when a
477 * {@link OptionsBase.fetchMethod} throws an error or returns a rejected
478 * promise.
479 *
480 * This differs from using {@link OptionsBase.allowStale} in that stale
481 * data will ONLY be returned in the case that the
482 * {@link LRUCache#fetch} fails, not any other times.
483 */
484 allowStaleOnFetchRejection?: boolean;
485 /**
486 * Set to true to return a stale value from the cache when the
487 * `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches an `'abort'`
488 * event, whether user-triggered, or due to internal cache behavior.
489 *
490 * Unless {@link OptionsBase.ignoreFetchAbort} is also set, the underlying
491 * {@link OptionsBase.fetchMethod} will still be considered canceled, and
492 * any value it returns will be ignored and not cached.
493 *
494 * Caveat: since fetches are aborted when a new value is explicitly
495 * set in the cache, this can lead to fetch returning a stale value,
496 * since that was the fallback value _at the moment the `fetch()` was
497 * initiated_, even though the new updated value is now present in
498 * the cache.
499 *
500 * For example:
501 *
502 * ```ts
503 * const cache = new LRUCache<string, any>({
504 * ttl: 100,
505 * fetchMethod: async (url, oldValue, { signal }) => {
506 * const res = await fetch(url, { signal })
507 * return await res.json()
508 * }
509 * })
510 * cache.set('https://example.com/', { some: 'data' })
511 * // 100ms go by...
512 * const result = cache.fetch('https://example.com/')
513 * cache.set('https://example.com/', { other: 'thing' })
514 * console.log(await result) // { some: 'data' }
515 * console.log(cache.get('https://example.com/')) // { other: 'thing' }
516 * ```
517 */
518 allowStaleOnFetchAbort?: boolean;
519 /**
520 * Set to true to ignore the `abort` event emitted by the `AbortSignal`
521 * object passed to {@link OptionsBase.fetchMethod}, and still cache the
522 * resulting resolution value, as long as it is not `undefined`.
523 *
524 * When used on its own, this means aborted {@link LRUCache#fetch} calls are not
525 * immediately resolved or rejected when they are aborted, and instead
526 * take the full time to await.
527 *
528 * When used with {@link OptionsBase.allowStaleOnFetchAbort}, aborted
529 * {@link LRUCache#fetch} calls will resolve immediately to their stale
530 * cached value or `undefined`, and will continue to process and eventually
531 * update the cache when they resolve, as long as the resulting value is
532 * not `undefined`, thus supporting a "return stale on timeout while
533 * refreshing" mechanism by passing `AbortSignal.timeout(n)` as the signal.
534 *
535 * **Note**: regardless of this setting, an `abort` event _is still
536 * emitted on the `AbortSignal` object_, so may result in invalid results
537 * when passed to other underlying APIs that use AbortSignals.
538 *
539 * This may be overridden in the {@link OptionsBase.fetchMethod} or the
540 * call to {@link LRUCache#fetch}.
541 */
542 ignoreFetchAbort?: boolean;
543 }
544 interface OptionsMaxLimit<K, V, FC> extends OptionsBase<K, V, FC> {
545 max: Count;
546 }
547 interface OptionsTTLLimit<K, V, FC> extends OptionsBase<K, V, FC> {
548 ttl: Milliseconds;
549 ttlAutopurge: boolean;
550 }
551 interface OptionsSizeLimit<K, V, FC> extends OptionsBase<K, V, FC> {
552 maxSize: Size;
553 }
554 /**
555 * The valid safe options for the {@link LRUCache} constructor
556 */
557 type Options<K, V, FC> = OptionsMaxLimit<K, V, FC> | OptionsSizeLimit<K, V, FC> | OptionsTTLLimit<K, V, FC>;
558 /**
559 * Entry objects used by {@link LRUCache#load} and {@link LRUCache#dump},
560 * and returned by {@link LRUCache#info}.
561 */
562 interface Entry<V> {
563 value: V;
564 ttl?: Milliseconds;
565 size?: Size;
566 start?: Milliseconds;
567 }
568}
569/**
570 * Default export, the thing you're using this module to get.
571 *
572 * All properties from the options object (with the exception of
573 * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
574 * normal public members. (`max` and `maxBase` are read-only getters.)
575 * Changing any of these will alter the defaults for subsequent method calls,
576 * but is otherwise safe.
577 */
578export declare class LRUCache<K extends {}, V extends {}, FC = unknown> implements Map<K, V> {
579 #private;
580 /**
581 * {@link LRUCache.OptionsBase.ttl}
582 */
583 ttl: LRUCache.Milliseconds;
584 /**
585 * {@link LRUCache.OptionsBase.ttlResolution}
586 */
587 ttlResolution: LRUCache.Milliseconds;
588 /**
589 * {@link LRUCache.OptionsBase.ttlAutopurge}
590 */
591 ttlAutopurge: boolean;
592 /**
593 * {@link LRUCache.OptionsBase.updateAgeOnGet}
594 */
595 updateAgeOnGet: boolean;
596 /**
597 * {@link LRUCache.OptionsBase.updateAgeOnHas}
598 */
599 updateAgeOnHas: boolean;
600 /**
601 * {@link LRUCache.OptionsBase.allowStale}
602 */
603 allowStale: boolean;
604 /**
605 * {@link LRUCache.OptionsBase.noDisposeOnSet}
606 */
607 noDisposeOnSet: boolean;
608 /**
609 * {@link LRUCache.OptionsBase.noUpdateTTL}
610 */
611 noUpdateTTL: boolean;
612 /**
613 * {@link LRUCache.OptionsBase.maxEntrySize}
614 */
615 maxEntrySize: LRUCache.Size;
616 /**
617 * {@link LRUCache.OptionsBase.sizeCalculation}
618 */
619 sizeCalculation?: LRUCache.SizeCalculator<K, V>;
620 /**
621 * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
622 */
623 noDeleteOnFetchRejection: boolean;
624 /**
625 * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
626 */
627 noDeleteOnStaleGet: boolean;
628 /**
629 * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
630 */
631 allowStaleOnFetchAbort: boolean;
632 /**
633 * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
634 */
635 allowStaleOnFetchRejection: boolean;
636 /**
637 * {@link LRUCache.OptionsBase.ignoreFetchAbort}
638 */
639 ignoreFetchAbort: boolean;
640 /**
641 * Do not call this method unless you need to inspect the
642 * inner workings of the cache. If anything returned by this
643 * object is modified in any way, strange breakage may occur.
644 *
645 * These fields are private for a reason!
646 *
647 * @internal
648 */
649 static unsafeExposeInternals<K extends {}, V extends {}, FC extends unknown = unknown>(c: LRUCache<K, V, FC>): {
650 starts: ZeroArray | undefined;
651 ttls: ZeroArray | undefined;
652 sizes: ZeroArray | undefined;
653 keyMap: Map<K, number>;
654 keyList: (K | undefined)[];
655 valList: (V | BackgroundFetch<V> | undefined)[];
656 next: NumberArray;
657 prev: NumberArray;
658 readonly head: Index;
659 readonly tail: Index;
660 free: StackLike;
661 isBackgroundFetch: (p: any) => boolean;
662 backgroundFetch: (k: K, index: number | undefined, options: LRUCache.FetchOptions<K, V, FC>, context: any) => BackgroundFetch<V>;
663 moveToTail: (index: number) => void;
664 indexes: (options?: {
665 allowStale: boolean;
666 }) => Generator<Index, void, unknown>;
667 rindexes: (options?: {
668 allowStale: boolean;
669 }) => Generator<Index, void, unknown>;
670 isStale: (index: number | undefined) => boolean;
671 };
672 /**
673 * {@link LRUCache.OptionsBase.max} (read-only)
674 */
675 get max(): LRUCache.Count;
676 /**
677 * {@link LRUCache.OptionsBase.maxSize} (read-only)
678 */
679 get maxSize(): LRUCache.Count;
680 /**
681 * The total computed size of items in the cache (read-only)
682 */
683 get calculatedSize(): LRUCache.Size;
684 /**
685 * The number of items stored in the cache (read-only)
686 */
687 get size(): LRUCache.Count;
688 /**
689 * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
690 */
691 get fetchMethod(): LRUCache.Fetcher<K, V, FC> | undefined;
692 /**
693 * {@link LRUCache.OptionsBase.dispose} (read-only)
694 */
695 get dispose(): LRUCache.Disposer<K, V> | undefined;
696 /**
697 * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
698 */
699 get disposeAfter(): LRUCache.Disposer<K, V> | undefined;
700 constructor(options: LRUCache.Options<K, V, FC> | LRUCache<K, V, FC>);
701 /**
702 * Return the remaining TTL time for a given entry key
703 */
704 getRemainingTTL(key: K): number;
705 /**
706 * Return a generator yielding `[key, value]` pairs,
707 * in order from most recently used to least recently used.
708 */
709 entries(): Generator<[K, V], void, unknown>;
710 /**
711 * Inverse order version of {@link LRUCache.entries}
712 *
713 * Return a generator yielding `[key, value]` pairs,
714 * in order from least recently used to most recently used.
715 */
716 rentries(): Generator<(K | V | BackgroundFetch<V> | undefined)[], void, unknown>;
717 /**
718 * Return a generator yielding the keys in the cache,
719 * in order from most recently used to least recently used.
720 */
721 keys(): Generator<K, void, unknown>;
722 /**
723 * Inverse order version of {@link LRUCache.keys}
724 *
725 * Return a generator yielding the keys in the cache,
726 * in order from least recently used to most recently used.
727 */
728 rkeys(): Generator<K, void, unknown>;
729 /**
730 * Return a generator yielding the values in the cache,
731 * in order from most recently used to least recently used.
732 */
733 values(): Generator<V, void, unknown>;
734 /**
735 * Inverse order version of {@link LRUCache.values}
736 *
737 * Return a generator yielding the values in the cache,
738 * in order from least recently used to most recently used.
739 */
740 rvalues(): Generator<V | BackgroundFetch<V> | undefined, void, unknown>;
741 /**
742 * Iterating over the cache itself yields the same results as
743 * {@link LRUCache.entries}
744 */
745 [Symbol.iterator](): Generator<[K, V], void, unknown>;
746 /**
747 * A String value that is used in the creation of the default string description of an object.
748 * Called by the built-in method Object.prototype.toString.
749 */
750 [Symbol.toStringTag]: string;
751 /**
752 * Find a value for which the supplied fn method returns a truthy value,
753 * similar to Array.find(). fn is called as fn(value, key, cache).
754 */
755 find(fn: (v: V, k: K, self: LRUCache<K, V, FC>) => boolean, getOptions?: LRUCache.GetOptions<K, V, FC>): V | undefined;
756 /**
757 * Call the supplied function on each item in the cache, in order from
758 * most recently used to least recently used. fn is called as
759 * fn(value, key, cache). Does not update age or recenty of use.
760 * Does not iterate over stale values.
761 */
762 forEach(fn: (v: V, k: K, self: LRUCache<K, V, FC>) => any, thisp?: any): void;
763 /**
764 * The same as {@link LRUCache.forEach} but items are iterated over in
765 * reverse order. (ie, less recently used items are iterated over first.)
766 */
767 rforEach(fn: (v: V, k: K, self: LRUCache<K, V, FC>) => any, thisp?: any): void;
768 /**
769 * Delete any stale entries. Returns true if anything was removed,
770 * false otherwise.
771 */
772 purgeStale(): boolean;
773 /**
774 * Get the extended info about a given entry, to get its value, size, and
775 * TTL info simultaneously. Like {@link LRUCache#dump}, but just for a
776 * single key. Always returns stale values, if their info is found in the
777 * cache, so be sure to check for expired TTLs if relevant.
778 */
779 info(key: K): LRUCache.Entry<V> | undefined;
780 /**
781 * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
782 * passed to cache.load()
783 */
784 dump(): [K, LRUCache.Entry<V>][];
785 /**
786 * Reset the cache and load in the items in entries in the order listed.
787 * Note that the shape of the resulting cache may be different if the
788 * same options are not used in both caches.
789 */
790 load(arr: [K, LRUCache.Entry<V>][]): void;
791 /**
792 * Add a value to the cache.
793 *
794 * Note: if `undefined` is specified as a value, this is an alias for
795 * {@link LRUCache#delete}
796 */
797 set(k: K, v: V | BackgroundFetch<V> | undefined, setOptions?: LRUCache.SetOptions<K, V, FC>): this;
798 /**
799 * Evict the least recently used item, returning its value or
800 * `undefined` if cache is empty.
801 */
802 pop(): V | undefined;
803 /**
804 * Check if a key is in the cache, without updating the recency of use.
805 * Will return false if the item is stale, even though it is technically
806 * in the cache.
807 *
808 * Will not update item age unless
809 * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
810 */
811 has(k: K, hasOptions?: LRUCache.HasOptions<K, V, FC>): boolean;
812 /**
813 * Like {@link LRUCache#get} but doesn't update recency or delete stale
814 * items.
815 *
816 * Returns `undefined` if the item is stale, unless
817 * {@link LRUCache.OptionsBase.allowStale} is set.
818 */
819 peek(k: K, peekOptions?: LRUCache.PeekOptions<K, V, FC>): V | undefined;
820 /**
821 * Make an asynchronous cached fetch using the
822 * {@link LRUCache.OptionsBase.fetchMethod} function.
823 *
824 * If multiple fetches for the same key are issued, then they will all be
825 * coalesced into a single call to fetchMethod.
826 *
827 * Note that this means that handling options such as
828 * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort},
829 * {@link LRUCache.FetchOptions.signal},
830 * and {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} will be
831 * determined by the FIRST fetch() call for a given key.
832 *
833 * This is a known (fixable) shortcoming which will be addresed on when
834 * someone complains about it, as the fix would involve added complexity and
835 * may not be worth the costs for this edge case.
836 */
837 fetch(k: K, fetchOptions: unknown extends FC ? LRUCache.FetchOptions<K, V, FC> : FC extends undefined | void ? LRUCache.FetchOptionsNoContext<K, V> : LRUCache.FetchOptionsWithContext<K, V, FC>): Promise<undefined | V>;
838 fetch(k: unknown extends FC ? K : FC extends undefined | void ? K : never, fetchOptions?: unknown extends FC ? LRUCache.FetchOptions<K, V, FC> : FC extends undefined | void ? LRUCache.FetchOptionsNoContext<K, V> : never): Promise<undefined | V>;
839 /**
840 * Return a value from the cache. Will update the recency of the cache
841 * entry found.
842 *
843 * If the key is not found, get() will return `undefined`.
844 */
845 get(k: K, getOptions?: LRUCache.GetOptions<K, V, FC>): V | undefined;
846 /**
847 * Deletes a key out of the cache.
848 * Returns true if the key was deleted, false otherwise.
849 */
850 delete(k: K): boolean;
851 /**
852 * Clear the cache entirely, throwing away all values.
853 */
854 clear(): void;
855}
856//# sourceMappingURL=index.d.ts.map
\No newline at end of file