1 | /**
|
2 | * @module LRUCache
|
3 | */
|
4 | declare const TYPE: unique symbol;
|
5 | export type PosInt = number & {
|
6 | [TYPE]: 'Positive Integer';
|
7 | };
|
8 | export type Index = number & {
|
9 | [TYPE]: 'LRUCache Index';
|
10 | };
|
11 | export type UintArray = Uint8Array | Uint16Array | Uint32Array;
|
12 | export type NumberArray = UintArray | number[];
|
13 | declare class ZeroArray extends Array<number> {
|
14 | constructor(size: number);
|
15 | }
|
16 | export type { ZeroArray };
|
17 | export type { Stack };
|
18 | export type StackLike = Stack | Index[];
|
19 | declare class Stack {
|
20 | #private;
|
21 | heap: NumberArray;
|
22 | length: number;
|
23 | static create(max: number): StackLike;
|
24 | constructor(max: number, HeapCls: {
|
25 | new (n: number): NumberArray;
|
26 | });
|
27 | push(n: Index): void;
|
28 | pop(): Index;
|
29 | }
|
30 | /**
|
31 | * Promise representing an in-progress { LRUCache#fetch} call
|
32 | */
|
33 | export type BackgroundFetch<V> = Promise<V | undefined> & {
|
34 | __returned: BackgroundFetch<V> | undefined;
|
35 | __abortController: AbortController;
|
36 | __staleWhileFetching: V | undefined;
|
37 | };
|
38 | export type DisposeTask<K, V> = [
|
39 | value: V,
|
40 | key: K,
|
41 | reason: LRUCache.DisposeReason
|
42 | ];
|
43 | export declare namespace LRUCache {
|
44 | /**
|
45 | * An integer greater than 0, reflecting the calculated size of items
|
46 | */
|
47 | type Size = number;
|
48 | /**
|
49 | * Integer greater than 0, representing some number of milliseconds, or the
|
50 | * time at which a TTL started counting from.
|
51 | */
|
52 | type Milliseconds = number;
|
53 | /**
|
54 | * An integer greater than 0, reflecting a number of items
|
55 | */
|
56 | type Count = number;
|
57 | /**
|
58 | * The reason why an item was removed from the cache, passed
|
59 | * to the {@link Disposer} methods.
|
60 | *
|
61 | * - `evict`: The item was evicted because it is the least recently used,
|
62 | * and the cache is full.
|
63 | * - `set`: A new value was set, overwriting the old value being disposed.
|
64 | * - `delete`: The item was explicitly deleted, either by calling
|
65 | * {@link LRUCache#delete}, {@link LRUCache#clear}, or
|
66 | * {@link LRUCache#set} with an undefined value.
|
67 | * - `expire`: The item was removed due to exceeding its TTL.
|
68 | * - `fetch`: A {@link OptionsBase#fetchMethod} operation returned
|
69 | * `undefined` or was aborted, causing the item to be deleted.
|
70 | */
|
71 | type DisposeReason = 'evict' | 'set' | 'delete' | 'expire' | 'fetch';
|
72 | /**
|
73 | * A method called upon item removal, passed as the
|
74 | * {@link OptionsBase.dispose} and/or
|
75 | * {@link OptionsBase.disposeAfter} options.
|
76 | */
|
77 | type Disposer<K, V> = (value: V, key: K, reason: DisposeReason) => void;
|
78 | /**
|
79 | * A function that returns the effective calculated size
|
80 | * of an entry in the cache.
|
81 | */
|
82 | type SizeCalculator<K, V> = (value: V, key: K) => Size;
|
83 | /**
|
84 | * Options provided to the
|
85 | * {@link OptionsBase.fetchMethod} function.
|
86 | */
|
87 | interface FetcherOptions<K, V, FC = unknown> {
|
88 | signal: AbortSignal;
|
89 | options: FetcherFetchOptions<K, V, FC>;
|
90 | /**
|
91 | * Object provided in the {@link FetchOptions.context} option to
|
92 | * {@link LRUCache#fetch}
|
93 | */
|
94 | context: FC;
|
95 | }
|
96 | /**
|
97 | * Occasionally, it may be useful to track the internal behavior of the
|
98 | * cache, particularly for logging, debugging, or for behavior within the
|
99 | * `fetchMethod`. To do this, you can pass a `status` object to the
|
100 | * {@link LRUCache#fetch}, {@link LRUCache#get}, {@link LRUCache#set},
|
101 | * {@link LRUCache#memo}, and {@link LRUCache#has} methods.
|
102 | *
|
103 | * The `status` option should be a plain JavaScript object. The following
|
104 | * fields will be set on it appropriately, depending on the situation.
|
105 | */
|
106 | interface Status<V> {
|
107 | /**
|
108 | * The status of a set() operation.
|
109 | *
|
110 | * - add: the item was not found in the cache, and was added
|
111 | * - update: the item was in the cache, with the same value provided
|
112 | * - replace: the item was in the cache, and replaced
|
113 | * - miss: the item was not added to the cache for some reason
|
114 | */
|
115 | set?: 'add' | 'update' | 'replace' | 'miss';
|
116 | /**
|
117 | * the ttl stored for the item, or undefined if ttls are not used.
|
118 | */
|
119 | ttl?: Milliseconds;
|
120 | /**
|
121 | * the start time for the item, or undefined if ttls are not used.
|
122 | */
|
123 | start?: Milliseconds;
|
124 | /**
|
125 | * The timestamp used for TTL calculation
|
126 | */
|
127 | now?: Milliseconds;
|
128 | /**
|
129 | * the remaining ttl for the item, or undefined if ttls are not used.
|
130 | */
|
131 | remainingTTL?: Milliseconds;
|
132 | /**
|
133 | * The calculated size for the item, if sizes are used.
|
134 | */
|
135 | entrySize?: Size;
|
136 | /**
|
137 | * The total calculated size of the cache, if sizes are used.
|
138 | */
|
139 | totalCalculatedSize?: Size;
|
140 | /**
|
141 | * A flag indicating that the item was not stored, due to exceeding the
|
142 | * {@link OptionsBase.maxEntrySize}
|
143 | */
|
144 | maxEntrySizeExceeded?: true;
|
145 | /**
|
146 | * The old value, specified in the case of `set:'update'` or
|
147 | * `set:'replace'`
|
148 | */
|
149 | oldValue?: V;
|
150 | /**
|
151 | * The results of a {@link LRUCache#has} operation
|
152 | *
|
153 | * - hit: the item was found in the cache
|
154 | * - stale: the item was found in the cache, but is stale
|
155 | * - miss: the item was not found in the cache
|
156 | */
|
157 | has?: 'hit' | 'stale' | 'miss';
|
158 | /**
|
159 | * The status of a {@link LRUCache#fetch} operation.
|
160 | * Note that this can change as the underlying fetch() moves through
|
161 | * various states.
|
162 | *
|
163 | * - inflight: there is another fetch() for this key which is in process
|
164 | * - get: there is no {@link OptionsBase.fetchMethod}, so
|
165 | * {@link LRUCache#get} was called.
|
166 | * - miss: the item is not in cache, and will be fetched.
|
167 | * - hit: the item is in the cache, and was resolved immediately.
|
168 | * - stale: the item is in the cache, but stale.
|
169 | * - refresh: the item is in the cache, and not stale, but
|
170 | * {@link FetchOptions.forceRefresh} was specified.
|
171 | */
|
172 | fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh';
|
173 | /**
|
174 | * The {@link OptionsBase.fetchMethod} was called
|
175 | */
|
176 | fetchDispatched?: true;
|
177 | /**
|
178 | * The cached value was updated after a successful call to
|
179 | * {@link OptionsBase.fetchMethod}
|
180 | */
|
181 | fetchUpdated?: true;
|
182 | /**
|
183 | * The reason for a fetch() rejection. Either the error raised by the
|
184 | * {@link OptionsBase.fetchMethod}, or the reason for an
|
185 | * AbortSignal.
|
186 | */
|
187 | fetchError?: Error;
|
188 | /**
|
189 | * The fetch received an abort signal
|
190 | */
|
191 | fetchAborted?: true;
|
192 | /**
|
193 | * The abort signal received was ignored, and the fetch was allowed to
|
194 | * continue.
|
195 | */
|
196 | fetchAbortIgnored?: true;
|
197 | /**
|
198 | * The fetchMethod promise resolved successfully
|
199 | */
|
200 | fetchResolved?: true;
|
201 | /**
|
202 | * The fetchMethod promise was rejected
|
203 | */
|
204 | fetchRejected?: true;
|
205 | /**
|
206 | * The status of a {@link LRUCache#get} operation.
|
207 | *
|
208 | * - fetching: The item is currently being fetched. If a previous value
|
209 | * is present and allowed, that will be returned.
|
210 | * - stale: The item is in the cache, and is stale.
|
211 | * - hit: the item is in the cache
|
212 | * - miss: the item is not in the cache
|
213 | */
|
214 | get?: 'stale' | 'hit' | 'miss';
|
215 | /**
|
216 | * A fetch or get operation returned a stale value.
|
217 | */
|
218 | returnedStale?: true;
|
219 | }
|
220 | /**
|
221 | * options which override the options set in the LRUCache constructor
|
222 | * when calling {@link LRUCache#fetch}.
|
223 | *
|
224 | * This is the union of {@link GetOptions} and {@link SetOptions}, plus
|
225 | * {@link OptionsBase.noDeleteOnFetchRejection},
|
226 | * {@link OptionsBase.allowStaleOnFetchRejection},
|
227 | * {@link FetchOptions.forceRefresh}, and
|
228 | * {@link FetcherOptions.context}
|
229 | *
|
230 | * Any of these may be modified in the {@link OptionsBase.fetchMethod}
|
231 | * function, but the {@link GetOptions} fields will of course have no
|
232 | * effect, as the {@link LRUCache#get} call already happened by the time
|
233 | * the fetchMethod is called.
|
234 | */
|
235 | interface FetcherFetchOptions<K, V, FC = unknown> extends Pick<OptionsBase<K, V, FC>, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet' | 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL' | 'noDeleteOnFetchRejection' | 'allowStaleOnFetchRejection' | 'ignoreFetchAbort' | 'allowStaleOnFetchAbort'> {
|
236 | status?: Status<V>;
|
237 | size?: Size;
|
238 | }
|
239 | /**
|
240 | * Options that may be passed to the {@link LRUCache#fetch} method.
|
241 | */
|
242 | interface FetchOptions<K, V, FC> extends FetcherFetchOptions<K, V, FC> {
|
243 | /**
|
244 | * Set to true to force a re-load of the existing data, even if it
|
245 | * is not yet stale.
|
246 | */
|
247 | forceRefresh?: boolean;
|
248 | /**
|
249 | * Context provided to the {@link OptionsBase.fetchMethod} as
|
250 | * the {@link FetcherOptions.context} param.
|
251 | *
|
252 | * If the FC type is specified as unknown (the default),
|
253 | * undefined or void, then this is optional. Otherwise, it will
|
254 | * be required.
|
255 | */
|
256 | context?: FC;
|
257 | signal?: AbortSignal;
|
258 | status?: Status<V>;
|
259 | }
|
260 | /**
|
261 | * Options provided to {@link LRUCache#fetch} when the FC type is something
|
262 | * other than `unknown`, `undefined`, or `void`
|
263 | */
|
264 | interface FetchOptionsWithContext<K, V, FC> extends FetchOptions<K, V, FC> {
|
265 | context: FC;
|
266 | }
|
267 | /**
|
268 | * Options provided to {@link LRUCache#fetch} when the FC type is
|
269 | * `undefined` or `void`
|
270 | */
|
271 | interface FetchOptionsNoContext<K, V> extends FetchOptions<K, V, undefined> {
|
272 | context?: undefined;
|
273 | }
|
274 | interface MemoOptions<K, V, FC = unknown> extends Pick<OptionsBase<K, V, FC>, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet' | 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL' | 'noDeleteOnFetchRejection' | 'allowStaleOnFetchRejection' | 'ignoreFetchAbort' | 'allowStaleOnFetchAbort'> {
|
275 | /**
|
276 | * Set to true to force a re-load of the existing data, even if it
|
277 | * is not yet stale.
|
278 | */
|
279 | forceRefresh?: boolean;
|
280 | /**
|
281 | * Context provided to the {@link OptionsBase.memoMethod} as
|
282 | * the {@link MemoizerOptions.context} param.
|
283 | *
|
284 | * If the FC type is specified as unknown (the default),
|
285 | * undefined or void, then this is optional. Otherwise, it will
|
286 | * be required.
|
287 | */
|
288 | context?: FC;
|
289 | status?: Status<V>;
|
290 | }
|
291 | /**
|
292 | * Options provided to {@link LRUCache#memo} when the FC type is something
|
293 | * other than `unknown`, `undefined`, or `void`
|
294 | */
|
295 | interface MemoOptionsWithContext<K, V, FC> extends MemoOptions<K, V, FC> {
|
296 | context: FC;
|
297 | }
|
298 | /**
|
299 | * Options provided to {@link LRUCache#memo} when the FC type is
|
300 | * `undefined` or `void`
|
301 | */
|
302 | interface MemoOptionsNoContext<K, V> extends MemoOptions<K, V, undefined> {
|
303 | context?: undefined;
|
304 | }
|
305 | /**
|
306 | * Options provided to the
|
307 | * {@link OptionsBase.memoMethod} function.
|
308 | */
|
309 | interface MemoizerOptions<K, V, FC = unknown> {
|
310 | options: MemoizerMemoOptions<K, V, FC>;
|
311 | /**
|
312 | * Object provided in the {@link MemoOptions.context} option to
|
313 | * {@link LRUCache#memo}
|
314 | */
|
315 | context: FC;
|
316 | }
|
317 | /**
|
318 | * options which override the options set in the LRUCache constructor
|
319 | * when calling {@link LRUCache#memo}.
|
320 | *
|
321 | * This is the union of {@link GetOptions} and {@link SetOptions}, plus
|
322 | * {@link MemoOptions.forceRefresh}, and
|
323 | * {@link MemoerOptions.context}
|
324 | *
|
325 | * Any of these may be modified in the {@link OptionsBase.memoMethod}
|
326 | * function, but the {@link GetOptions} fields will of course have no
|
327 | * effect, as the {@link LRUCache#get} call already happened by the time
|
328 | * the memoMethod is called.
|
329 | */
|
330 | interface MemoizerMemoOptions<K, V, FC = unknown> extends Pick<OptionsBase<K, V, FC>, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet' | 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL'> {
|
331 | status?: Status<V>;
|
332 | size?: Size;
|
333 | start?: Milliseconds;
|
334 | }
|
335 | /**
|
336 | * Options that may be passed to the {@link LRUCache#has} method.
|
337 | */
|
338 | interface HasOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'updateAgeOnHas'> {
|
339 | status?: Status<V>;
|
340 | }
|
341 | /**
|
342 | * Options that may be passed to the {@link LRUCache#get} method.
|
343 | */
|
344 | interface GetOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet'> {
|
345 | status?: Status<V>;
|
346 | }
|
347 | /**
|
348 | * Options that may be passed to the {@link LRUCache#peek} method.
|
349 | */
|
350 | interface PeekOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'allowStale'> {
|
351 | }
|
352 | /**
|
353 | * Options that may be passed to the {@link LRUCache#set} method.
|
354 | */
|
355 | interface SetOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL'> {
|
356 | /**
|
357 | * If size tracking is enabled, then setting an explicit size
|
358 | * in the {@link LRUCache#set} call will prevent calling the
|
359 | * {@link OptionsBase.sizeCalculation} function.
|
360 | */
|
361 | size?: Size;
|
362 | /**
|
363 | * If TTL tracking is enabled, then setting an explicit start
|
364 | * time in the {@link LRUCache#set} call will override the
|
365 | * default time from `performance.now()` or `Date.now()`.
|
366 | *
|
367 | * Note that it must be a valid value for whichever time-tracking
|
368 | * method is in use.
|
369 | */
|
370 | start?: Milliseconds;
|
371 | status?: Status<V>;
|
372 | }
|
373 | /**
|
374 | * The type signature for the {@link OptionsBase.fetchMethod} option.
|
375 | */
|
376 | type Fetcher<K, V, FC = unknown> = (key: K, staleValue: V | undefined, options: FetcherOptions<K, V, FC>) => Promise<V | undefined | void> | V | undefined | void;
|
377 | /**
|
378 | * the type signature for the {@link OptionsBase.memoMethod} option.
|
379 | */
|
380 | type Memoizer<K, V, FC = unknown> = (key: K, staleValue: V | undefined, options: MemoizerOptions<K, V, FC>) => V;
|
381 | /**
|
382 | * Options which may be passed to the {@link LRUCache} constructor.
|
383 | *
|
384 | * Most of these may be overridden in the various options that use
|
385 | * them.
|
386 | *
|
387 | * Despite all being technically optional, the constructor requires that
|
388 | * a cache is at minimum limited by one or more of {@link OptionsBase.max},
|
389 | * {@link OptionsBase.ttl}, or {@link OptionsBase.maxSize}.
|
390 | *
|
391 | * If {@link OptionsBase.ttl} is used alone, then it is strongly advised
|
392 | * (and in fact required by the type definitions here) that the cache
|
393 | * also set {@link OptionsBase.ttlAutopurge}, to prevent potentially
|
394 | * unbounded storage.
|
395 | *
|
396 | * All options are also available on the {@link LRUCache} instance, making
|
397 | * it safe to pass an LRUCache instance as the options argumemnt to
|
398 | * make another empty cache of the same type.
|
399 | *
|
400 | * Some options are marked as read-only, because changing them after
|
401 | * instantiation is not safe. Changing any of the other options will of
|
402 | * course only have an effect on subsequent method calls.
|
403 | */
|
404 | interface OptionsBase<K, V, FC> {
|
405 | /**
|
406 | * The maximum number of items to store in the cache before evicting
|
407 | * old entries. This is read-only on the {@link LRUCache} instance,
|
408 | * and may not be overridden.
|
409 | *
|
410 | * If set, then storage space will be pre-allocated at construction
|
411 | * time, and the cache will perform significantly faster.
|
412 | *
|
413 | * Note that significantly fewer items may be stored, if
|
414 | * {@link OptionsBase.maxSize} and/or {@link OptionsBase.ttl} are also
|
415 | * set.
|
416 | *
|
417 | * **It is strongly recommended to set a `max` to prevent unbounded growth
|
418 | * of the cache.**
|
419 | */
|
420 | max?: Count;
|
421 | /**
|
422 | * Max time in milliseconds for items to live in cache before they are
|
423 | * considered stale. Note that stale items are NOT preemptively removed by
|
424 | * default, and MAY live in the cache, contributing to its LRU max, long
|
425 | * after they have expired, unless {@link OptionsBase.ttlAutopurge} is
|
426 | * set.
|
427 | *
|
428 | * If set to `0` (the default value), then that means "do not track
|
429 | * TTL", not "expire immediately".
|
430 | *
|
431 | * Also, as this cache is optimized for LRU/MRU operations, some of
|
432 | * the staleness/TTL checks will reduce performance, as they will incur
|
433 | * overhead by deleting items.
|
434 | *
|
435 | * This is not primarily a TTL cache, and does not make strong TTL
|
436 | * guarantees. There is no pre-emptive pruning of expired items, but you
|
437 | * _may_ set a TTL on the cache, and it will treat expired items as missing
|
438 | * when they are fetched, and delete them.
|
439 | *
|
440 | * Optional, but must be a non-negative integer in ms if specified.
|
441 | *
|
442 | * This may be overridden by passing an options object to `cache.set()`.
|
443 | *
|
444 | * At least one of `max`, `maxSize`, or `TTL` is required. This must be a
|
445 | * positive integer if set.
|
446 | *
|
447 | * Even if ttl tracking is enabled, **it is strongly recommended to set a
|
448 | * `max` to prevent unbounded growth of the cache.**
|
449 | *
|
450 | * If ttl tracking is enabled, and `max` and `maxSize` are not set,
|
451 | * and `ttlAutopurge` is not set, then a warning will be emitted
|
452 | * cautioning about the potential for unbounded memory consumption.
|
453 | * (The TypeScript definitions will also discourage this.)
|
454 | */
|
455 | ttl?: Milliseconds;
|
456 | /**
|
457 | * Minimum amount of time in ms in which to check for staleness.
|
458 | * Defaults to 1, which means that the current time is checked
|
459 | * at most once per millisecond.
|
460 | *
|
461 | * Set to 0 to check the current time every time staleness is tested.
|
462 | * (This reduces performance, and is theoretically unnecessary.)
|
463 | *
|
464 | * Setting this to a higher value will improve performance somewhat
|
465 | * while using ttl tracking, albeit at the expense of keeping stale
|
466 | * items around a bit longer than their TTLs would indicate.
|
467 | *
|
468 | * @default 1
|
469 | */
|
470 | ttlResolution?: Milliseconds;
|
471 | /**
|
472 | * Preemptively remove stale items from the cache.
|
473 | *
|
474 | * Note that this may *significantly* degrade performance, especially if
|
475 | * the cache is storing a large number of items. It is almost always best
|
476 | * to just leave the stale items in the cache, and let them fall out as new
|
477 | * items are added.
|
478 | *
|
479 | * Note that this means that {@link OptionsBase.allowStale} is a bit
|
480 | * pointless, as stale items will be deleted almost as soon as they
|
481 | * expire.
|
482 | *
|
483 | * Use with caution!
|
484 | */
|
485 | ttlAutopurge?: boolean;
|
486 | /**
|
487 | * When using time-expiring entries with `ttl`, setting this to `true` will
|
488 | * make each item's age reset to 0 whenever it is retrieved from cache with
|
489 | * {@link LRUCache#get}, causing it to not expire. (It can still fall out
|
490 | * of cache based on recency of use, of course.)
|
491 | *
|
492 | * Has no effect if {@link OptionsBase.ttl} is not set.
|
493 | *
|
494 | * This may be overridden by passing an options object to `cache.get()`.
|
495 | */
|
496 | updateAgeOnGet?: boolean;
|
497 | /**
|
498 | * When using time-expiring entries with `ttl`, setting this to `true` will
|
499 | * make each item's age reset to 0 whenever its presence in the cache is
|
500 | * checked with {@link LRUCache#has}, causing it to not expire. (It can
|
501 | * still fall out of cache based on recency of use, of course.)
|
502 | *
|
503 | * Has no effect if {@link OptionsBase.ttl} is not set.
|
504 | */
|
505 | updateAgeOnHas?: boolean;
|
506 | /**
|
507 | * Allow {@link LRUCache#get} and {@link LRUCache#fetch} calls to return
|
508 | * stale data, if available.
|
509 | *
|
510 | * By default, if you set `ttl`, stale items will only be deleted from the
|
511 | * cache when you `get(key)`. That is, it's not preemptively pruning items,
|
512 | * unless {@link OptionsBase.ttlAutopurge} is set.
|
513 | *
|
514 | * If you set `allowStale:true`, it'll return the stale value *as well as*
|
515 | * deleting it. If you don't set this, then it'll return `undefined` when
|
516 | * you try to get a stale entry.
|
517 | *
|
518 | * Note that when a stale entry is fetched, _even if it is returned due to
|
519 | * `allowStale` being set_, it is removed from the cache immediately. You
|
520 | * can suppress this behavior by setting
|
521 | * {@link OptionsBase.noDeleteOnStaleGet}, either in the constructor, or in
|
522 | * the options provided to {@link LRUCache#get}.
|
523 | *
|
524 | * This may be overridden by passing an options object to `cache.get()`.
|
525 | * The `cache.has()` method will always return `false` for stale items.
|
526 | *
|
527 | * Only relevant if a ttl is set.
|
528 | */
|
529 | allowStale?: boolean;
|
530 | /**
|
531 | * Function that is called on items when they are dropped from the
|
532 | * cache, as `dispose(value, key, reason)`.
|
533 | *
|
534 | * This can be handy if you want to close file descriptors or do
|
535 | * other cleanup tasks when items are no longer stored in the cache.
|
536 | *
|
537 | * **NOTE**: It is called _before_ the item has been fully removed
|
538 | * from the cache, so if you want to put it right back in, you need
|
539 | * to wait until the next tick. If you try to add it back in during
|
540 | * the `dispose()` function call, it will break things in subtle and
|
541 | * weird ways.
|
542 | *
|
543 | * Unlike several other options, this may _not_ be overridden by
|
544 | * passing an option to `set()`, for performance reasons.
|
545 | *
|
546 | * The `reason` will be one of the following strings, corresponding
|
547 | * to the reason for the item's deletion:
|
548 | *
|
549 | * - `evict` Item was evicted to make space for a new addition
|
550 | * - `set` Item was overwritten by a new value
|
551 | * - `expire` Item expired its TTL
|
552 | * - `fetch` Item was deleted due to a failed or aborted fetch, or a
|
553 | * fetchMethod returning `undefined.
|
554 | * - `delete` Item was removed by explicit `cache.delete(key)`,
|
555 | * `cache.clear()`, or `cache.set(key, undefined)`.
|
556 | */
|
557 | dispose?: Disposer<K, V>;
|
558 | /**
|
559 | * The same as {@link OptionsBase.dispose}, but called *after* the entry
|
560 | * is completely removed and the cache is once again in a clean state.
|
561 | *
|
562 | * It is safe to add an item right back into the cache at this point.
|
563 | * However, note that it is *very* easy to inadvertently create infinite
|
564 | * recursion this way.
|
565 | */
|
566 | disposeAfter?: Disposer<K, V>;
|
567 | /**
|
568 | * Set to true to suppress calling the
|
569 | * {@link OptionsBase.dispose} function if the entry key is
|
570 | * still accessible within the cache.
|
571 | *
|
572 | * This may be overridden by passing an options object to
|
573 | * {@link LRUCache#set}.
|
574 | *
|
575 | * Only relevant if `dispose` or `disposeAfter` are set.
|
576 | */
|
577 | noDisposeOnSet?: boolean;
|
578 | /**
|
579 | * Boolean flag to tell the cache to not update the TTL when setting a new
|
580 | * value for an existing key (ie, when updating a value rather than
|
581 | * inserting a new value). Note that the TTL value is _always_ set (if
|
582 | * provided) when adding a new entry into the cache.
|
583 | *
|
584 | * Has no effect if a {@link OptionsBase.ttl} is not set.
|
585 | *
|
586 | * May be passed as an option to {@link LRUCache#set}.
|
587 | */
|
588 | noUpdateTTL?: boolean;
|
589 | /**
|
590 | * Set to a positive integer to track the sizes of items added to the
|
591 | * cache, and automatically evict items in order to stay below this size.
|
592 | * Note that this may result in fewer than `max` items being stored.
|
593 | *
|
594 | * Attempting to add an item to the cache whose calculated size is greater
|
595 | * that this amount will be a no-op. The item will not be cached, and no
|
596 | * other items will be evicted.
|
597 | *
|
598 | * Optional, must be a positive integer if provided.
|
599 | *
|
600 | * Sets `maxEntrySize` to the same value, unless a different value is
|
601 | * provided for `maxEntrySize`.
|
602 | *
|
603 | * At least one of `max`, `maxSize`, or `TTL` is required. This must be a
|
604 | * positive integer if set.
|
605 | *
|
606 | * Even if size tracking is enabled, **it is strongly recommended to set a
|
607 | * `max` to prevent unbounded growth of the cache.**
|
608 | *
|
609 | * Note also that size tracking can negatively impact performance,
|
610 | * though for most cases, only minimally.
|
611 | */
|
612 | maxSize?: Size;
|
613 | /**
|
614 | * The maximum allowed size for any single item in the cache.
|
615 | *
|
616 | * If a larger item is passed to {@link LRUCache#set} or returned by a
|
617 | * {@link OptionsBase.fetchMethod} or {@link OptionsBase.memoMethod}, then
|
618 | * it will not be stored in the cache.
|
619 | *
|
620 | * Attempting to add an item whose calculated size is greater than
|
621 | * this amount will not cache the item or evict any old items, but
|
622 | * WILL delete an existing value if one is already present.
|
623 | *
|
624 | * Optional, must be a positive integer if provided. Defaults to
|
625 | * the value of `maxSize` if provided.
|
626 | */
|
627 | maxEntrySize?: Size;
|
628 | /**
|
629 | * A function that returns a number indicating the item's size.
|
630 | *
|
631 | * Requires {@link OptionsBase.maxSize} to be set.
|
632 | *
|
633 | * If not provided, and {@link OptionsBase.maxSize} or
|
634 | * {@link OptionsBase.maxEntrySize} are set, then all
|
635 | * {@link LRUCache#set} calls **must** provide an explicit
|
636 | * {@link SetOptions.size} or sizeCalculation param.
|
637 | */
|
638 | sizeCalculation?: SizeCalculator<K, V>;
|
639 | /**
|
640 | * Method that provides the implementation for {@link LRUCache#fetch}
|
641 | *
|
642 | * ```ts
|
643 | * fetchMethod(key, staleValue, { signal, options, context })
|
644 | * ```
|
645 | *
|
646 | * If `fetchMethod` is not provided, then `cache.fetch(key)` is equivalent
|
647 | * to `Promise.resolve(cache.get(key))`.
|
648 | *
|
649 | * If at any time, `signal.aborted` is set to `true`, or if the
|
650 | * `signal.onabort` method is called, or if it emits an `'abort'` event
|
651 | * which you can listen to with `addEventListener`, then that means that
|
652 | * the fetch should be abandoned. This may be passed along to async
|
653 | * functions aware of AbortController/AbortSignal behavior.
|
654 | *
|
655 | * The `fetchMethod` should **only** return `undefined` or a Promise
|
656 | * resolving to `undefined` if the AbortController signaled an `abort`
|
657 | * event. In all other cases, it should return or resolve to a value
|
658 | * suitable for adding to the cache.
|
659 | *
|
660 | * The `options` object is a union of the options that may be provided to
|
661 | * `set()` and `get()`. If they are modified, then that will result in
|
662 | * modifying the settings to `cache.set()` when the value is resolved, and
|
663 | * in the case of
|
664 | * {@link OptionsBase.noDeleteOnFetchRejection} and
|
665 | * {@link OptionsBase.allowStaleOnFetchRejection}, the handling of
|
666 | * `fetchMethod` failures.
|
667 | *
|
668 | * For example, a DNS cache may update the TTL based on the value returned
|
669 | * from a remote DNS server by changing `options.ttl` in the `fetchMethod`.
|
670 | */
|
671 | fetchMethod?: Fetcher<K, V, FC>;
|
672 | /**
|
673 | * Method that provides the implementation for {@link LRUCache#memo}
|
674 | */
|
675 | memoMethod?: Memoizer<K, V, FC>;
|
676 | /**
|
677 | * Set to true to suppress the deletion of stale data when a
|
678 | * {@link OptionsBase.fetchMethod} returns a rejected promise.
|
679 | */
|
680 | noDeleteOnFetchRejection?: boolean;
|
681 | /**
|
682 | * Do not delete stale items when they are retrieved with
|
683 | * {@link LRUCache#get}.
|
684 | *
|
685 | * Note that the `get` return value will still be `undefined`
|
686 | * unless {@link OptionsBase.allowStale} is true.
|
687 | *
|
688 | * When using time-expiring entries with `ttl`, by default stale
|
689 | * items will be removed from the cache when the key is accessed
|
690 | * with `cache.get()`.
|
691 | *
|
692 | * Setting this option will cause stale items to remain in the cache, until
|
693 | * they are explicitly deleted with `cache.delete(key)`, or retrieved with
|
694 | * `noDeleteOnStaleGet` set to `false`.
|
695 | *
|
696 | * This may be overridden by passing an options object to `cache.get()`.
|
697 | *
|
698 | * Only relevant if a ttl is used.
|
699 | */
|
700 | noDeleteOnStaleGet?: boolean;
|
701 | /**
|
702 | * Set to true to allow returning stale data when a
|
703 | * {@link OptionsBase.fetchMethod} throws an error or returns a rejected
|
704 | * promise.
|
705 | *
|
706 | * This differs from using {@link OptionsBase.allowStale} in that stale
|
707 | * data will ONLY be returned in the case that the {@link LRUCache#fetch}
|
708 | * fails, not any other times.
|
709 | *
|
710 | * If a `fetchMethod` fails, and there is no stale value available, the
|
711 | * `fetch()` will resolve to `undefined`. Ie, all `fetchMethod` errors are
|
712 | * suppressed.
|
713 | *
|
714 | * Implies `noDeleteOnFetchRejection`.
|
715 | *
|
716 | * This may be set in calls to `fetch()`, or defaulted on the constructor,
|
717 | * or overridden by modifying the options object in the `fetchMethod`.
|
718 | */
|
719 | allowStaleOnFetchRejection?: boolean;
|
720 | /**
|
721 | * Set to true to return a stale value from the cache when the
|
722 | * `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches
|
723 | * an `'abort'` event, whether user-triggered, or due to internal cache
|
724 | * behavior.
|
725 | *
|
726 | * Unless {@link OptionsBase.ignoreFetchAbort} is also set, the underlying
|
727 | * {@link OptionsBase.fetchMethod} will still be considered canceled, and
|
728 | * any value it returns will be ignored and not cached.
|
729 | *
|
730 | * Caveat: since fetches are aborted when a new value is explicitly
|
731 | * set in the cache, this can lead to fetch returning a stale value,
|
732 | * since that was the fallback value _at the moment the `fetch()` was
|
733 | * initiated_, even though the new updated value is now present in
|
734 | * the cache.
|
735 | *
|
736 | * For example:
|
737 | *
|
738 | * ```ts
|
739 | * const cache = new LRUCache<string, any>({
|
740 | * ttl: 100,
|
741 | * fetchMethod: async (url, oldValue, { signal }) => {
|
742 | * const res = await fetch(url, { signal })
|
743 | * return await res.json()
|
744 | * }
|
745 | * })
|
746 | * cache.set('https://example.com/', { some: 'data' })
|
747 | * // 100ms go by...
|
748 | * const result = cache.fetch('https://example.com/')
|
749 | * cache.set('https://example.com/', { other: 'thing' })
|
750 | * console.log(await result) // { some: 'data' }
|
751 | * console.log(cache.get('https://example.com/')) // { other: 'thing' }
|
752 | * ```
|
753 | */
|
754 | allowStaleOnFetchAbort?: boolean;
|
755 | /**
|
756 | * Set to true to ignore the `abort` event emitted by the `AbortSignal`
|
757 | * object passed to {@link OptionsBase.fetchMethod}, and still cache the
|
758 | * resulting resolution value, as long as it is not `undefined`.
|
759 | *
|
760 | * When used on its own, this means aborted {@link LRUCache#fetch} calls
|
761 | * are not immediately resolved or rejected when they are aborted, and
|
762 | * instead take the full time to await.
|
763 | *
|
764 | * When used with {@link OptionsBase.allowStaleOnFetchAbort}, aborted
|
765 | * {@link LRUCache#fetch} calls will resolve immediately to their stale
|
766 | * cached value or `undefined`, and will continue to process and eventually
|
767 | * update the cache when they resolve, as long as the resulting value is
|
768 | * not `undefined`, thus supporting a "return stale on timeout while
|
769 | * refreshing" mechanism by passing `AbortSignal.timeout(n)` as the signal.
|
770 | *
|
771 | * For example:
|
772 | *
|
773 | * ```ts
|
774 | * const c = new LRUCache({
|
775 | * ttl: 100,
|
776 | * ignoreFetchAbort: true,
|
777 | * allowStaleOnFetchAbort: true,
|
778 | * fetchMethod: async (key, oldValue, { signal }) => {
|
779 | * // note: do NOT pass the signal to fetch()!
|
780 | * // let's say this fetch can take a long time.
|
781 | * const res = await fetch(`https://slow-backend-server/${key}`)
|
782 | * return await res.json()
|
783 | * },
|
784 | * })
|
785 | *
|
786 | * // this will return the stale value after 100ms, while still
|
787 | * // updating in the background for next time.
|
788 | * const val = await c.fetch('key', { signal: AbortSignal.timeout(100) })
|
789 | * ```
|
790 | *
|
791 | * **Note**: regardless of this setting, an `abort` event _is still
|
792 | * emitted on the `AbortSignal` object_, so may result in invalid results
|
793 | * when passed to other underlying APIs that use AbortSignals.
|
794 | *
|
795 | * This may be overridden in the {@link OptionsBase.fetchMethod} or the
|
796 | * call to {@link LRUCache#fetch}.
|
797 | */
|
798 | ignoreFetchAbort?: boolean;
|
799 | }
|
800 | interface OptionsMaxLimit<K, V, FC> extends OptionsBase<K, V, FC> {
|
801 | max: Count;
|
802 | }
|
803 | interface OptionsTTLLimit<K, V, FC> extends OptionsBase<K, V, FC> {
|
804 | ttl: Milliseconds;
|
805 | ttlAutopurge: boolean;
|
806 | }
|
807 | interface OptionsSizeLimit<K, V, FC> extends OptionsBase<K, V, FC> {
|
808 | maxSize: Size;
|
809 | }
|
810 | /**
|
811 | * The valid safe options for the {@link LRUCache} constructor
|
812 | */
|
813 | type Options<K, V, FC> = OptionsMaxLimit<K, V, FC> | OptionsSizeLimit<K, V, FC> | OptionsTTLLimit<K, V, FC>;
|
814 | /**
|
815 | * Entry objects used by {@link LRUCache#load} and {@link LRUCache#dump},
|
816 | * and returned by {@link LRUCache#info}.
|
817 | */
|
818 | interface Entry<V> {
|
819 | value: V;
|
820 | ttl?: Milliseconds;
|
821 | size?: Size;
|
822 | start?: Milliseconds;
|
823 | }
|
824 | }
|
825 | /**
|
826 | * Default export, the thing you're using this module to get.
|
827 | *
|
828 | * The `K` and `V` types define the key and value types, respectively. The
|
829 | * optional `FC` type defines the type of the `context` object passed to
|
830 | * `cache.fetch()` and `cache.memo()`.
|
831 | *
|
832 | * Keys and values **must not** be `null` or `undefined`.
|
833 | *
|
834 | * All properties from the options object (with the exception of `max`,
|
835 | * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
|
836 | * added as normal public members. (The listed options are read-only getters.)
|
837 | *
|
838 | * Changing any of these will alter the defaults for subsequent method calls.
|
839 | */
|
840 | export declare class LRUCache<K extends {}, V extends {}, FC = unknown> implements Map<K, V> {
|
841 | #private;
|
842 | /**
|
843 | * {@link LRUCache.OptionsBase.ttl}
|
844 | */
|
845 | ttl: LRUCache.Milliseconds;
|
846 | /**
|
847 | * {@link LRUCache.OptionsBase.ttlResolution}
|
848 | */
|
849 | ttlResolution: LRUCache.Milliseconds;
|
850 | /**
|
851 | * {@link LRUCache.OptionsBase.ttlAutopurge}
|
852 | */
|
853 | ttlAutopurge: boolean;
|
854 | /**
|
855 | * {@link LRUCache.OptionsBase.updateAgeOnGet}
|
856 | */
|
857 | updateAgeOnGet: boolean;
|
858 | /**
|
859 | * {@link LRUCache.OptionsBase.updateAgeOnHas}
|
860 | */
|
861 | updateAgeOnHas: boolean;
|
862 | /**
|
863 | * {@link LRUCache.OptionsBase.allowStale}
|
864 | */
|
865 | allowStale: boolean;
|
866 | /**
|
867 | * {@link LRUCache.OptionsBase.noDisposeOnSet}
|
868 | */
|
869 | noDisposeOnSet: boolean;
|
870 | /**
|
871 | * {@link LRUCache.OptionsBase.noUpdateTTL}
|
872 | */
|
873 | noUpdateTTL: boolean;
|
874 | /**
|
875 | * {@link LRUCache.OptionsBase.maxEntrySize}
|
876 | */
|
877 | maxEntrySize: LRUCache.Size;
|
878 | /**
|
879 | * {@link LRUCache.OptionsBase.sizeCalculation}
|
880 | */
|
881 | sizeCalculation?: LRUCache.SizeCalculator<K, V>;
|
882 | /**
|
883 | * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
|
884 | */
|
885 | noDeleteOnFetchRejection: boolean;
|
886 | /**
|
887 | * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
|
888 | */
|
889 | noDeleteOnStaleGet: boolean;
|
890 | /**
|
891 | * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
|
892 | */
|
893 | allowStaleOnFetchAbort: boolean;
|
894 | /**
|
895 | * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
|
896 | */
|
897 | allowStaleOnFetchRejection: boolean;
|
898 | /**
|
899 | * {@link LRUCache.OptionsBase.ignoreFetchAbort}
|
900 | */
|
901 | ignoreFetchAbort: boolean;
|
902 | /**
|
903 | * Do not call this method unless you need to inspect the
|
904 | * inner workings of the cache. If anything returned by this
|
905 | * object is modified in any way, strange breakage may occur.
|
906 | *
|
907 | * These fields are private for a reason!
|
908 | *
|
909 | * @internal
|
910 | */
|
911 | static unsafeExposeInternals<K extends {}, V extends {}, FC extends unknown = unknown>(c: LRUCache<K, V, FC>): {
|
912 | starts: ZeroArray | undefined;
|
913 | ttls: ZeroArray | undefined;
|
914 | sizes: ZeroArray | undefined;
|
915 | keyMap: Map<K, number>;
|
916 | keyList: (K | undefined)[];
|
917 | valList: (V | BackgroundFetch<V> | undefined)[];
|
918 | next: NumberArray;
|
919 | prev: NumberArray;
|
920 | readonly head: Index;
|
921 | readonly tail: Index;
|
922 | free: StackLike;
|
923 | isBackgroundFetch: (p: any) => boolean;
|
924 | backgroundFetch: (k: K, index: number | undefined, options: LRUCache.FetchOptions<K, V, FC>, context: any) => BackgroundFetch<V>;
|
925 | moveToTail: (index: number) => void;
|
926 | indexes: (options?: {
|
927 | allowStale: boolean;
|
928 | }) => Generator<Index, void, unknown>;
|
929 | rindexes: (options?: {
|
930 | allowStale: boolean;
|
931 | }) => Generator<Index, void, unknown>;
|
932 | isStale: (index: number | undefined) => boolean;
|
933 | };
|
934 | /**
|
935 | * {@link LRUCache.OptionsBase.max} (read-only)
|
936 | */
|
937 | get max(): LRUCache.Count;
|
938 | /**
|
939 | * {@link LRUCache.OptionsBase.maxSize} (read-only)
|
940 | */
|
941 | get maxSize(): LRUCache.Count;
|
942 | /**
|
943 | * The total computed size of items in the cache (read-only)
|
944 | */
|
945 | get calculatedSize(): LRUCache.Size;
|
946 | /**
|
947 | * The number of items stored in the cache (read-only)
|
948 | */
|
949 | get size(): LRUCache.Count;
|
950 | /**
|
951 | * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
|
952 | */
|
953 | get fetchMethod(): LRUCache.Fetcher<K, V, FC> | undefined;
|
954 | get memoMethod(): LRUCache.Memoizer<K, V, FC> | undefined;
|
955 | /**
|
956 | * {@link LRUCache.OptionsBase.dispose} (read-only)
|
957 | */
|
958 | get dispose(): LRUCache.Disposer<K, V> | undefined;
|
959 | /**
|
960 | * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
|
961 | */
|
962 | get disposeAfter(): LRUCache.Disposer<K, V> | undefined;
|
963 | constructor(options: LRUCache.Options<K, V, FC> | LRUCache<K, V, FC>);
|
964 | /**
|
965 | * Return the number of ms left in the item's TTL. If item is not in cache,
|
966 | * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
|
967 | */
|
968 | getRemainingTTL(key: K): number;
|
969 | /**
|
970 | * Return a generator yielding `[key, value]` pairs,
|
971 | * in order from most recently used to least recently used.
|
972 | */
|
973 | entries(): Generator<[K, V], void, unknown>;
|
974 | /**
|
975 | * Inverse order version of { LRUCache.entries}
|
976 | *
|
977 | * Return a generator yielding `[key, value]` pairs,
|
978 | * in order from least recently used to most recently used.
|
979 | */
|
980 | rentries(): Generator<(K | V | BackgroundFetch<V> | undefined)[], void, unknown>;
|
981 | /**
|
982 | * Return a generator yielding the keys in the cache,
|
983 | * in order from most recently used to least recently used.
|
984 | */
|
985 | keys(): Generator<K, void, unknown>;
|
986 | /**
|
987 | * Inverse order version of {@link LRUCache.keys}
|
988 | *
|
989 | * Return a generator yielding the keys in the cache,
|
990 | * in order from least recently used to most recently used.
|
991 | */
|
992 | rkeys(): Generator<K, void, unknown>;
|
993 | /**
|
994 | * Return a generator yielding the values in the cache,
|
995 | * in order from most recently used to least recently used.
|
996 | */
|
997 | values(): Generator<V, void, unknown>;
|
998 | /**
|
999 | * Inverse order version of {@link LRUCache.values}
|
1000 | *
|
1001 | * Return a generator yielding the values in the cache,
|
1002 | * in order from least recently used to most recently used.
|
1003 | */
|
1004 | rvalues(): Generator<V | BackgroundFetch<V> | undefined, void, unknown>;
|
1005 | /**
|
1006 | * Iterating over the cache itself yields the same results as
|
1007 | * {@link LRUCache.entries}
|
1008 | */
|
1009 | [Symbol.iterator](): Generator<[K, V], void, unknown>;
|
1010 | /**
|
1011 | * A String value that is used in the creation of the default string
|
1012 | * description of an object. Called by the built-in method
|
1013 | * `Object.prototype.toString`.
|
1014 | */
|
1015 | [Symbol.toStringTag]: string;
|
1016 | /**
|
1017 | * Find a value for which the supplied fn method returns a truthy value,
|
1018 | * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
|
1019 | */
|
1020 | find(fn: (v: V, k: K, self: LRUCache<K, V, FC>) => boolean, getOptions?: LRUCache.GetOptions<K, V, FC>): V | undefined;
|
1021 | /**
|
1022 | * Call the supplied function on each item in the cache, in order from most
|
1023 | * recently used to least recently used.
|
1024 | *
|
1025 | * `fn` is called as `fn(value, key, cache)`.
|
1026 | *
|
1027 | * If `thisp` is provided, function will be called in the `this`-context of
|
1028 | * the provided object, or the cache if no `thisp` object is provided.
|
1029 | *
|
1030 | * Does not update age or recenty of use, or iterate over stale values.
|
1031 | */
|
1032 | forEach(fn: (v: V, k: K, self: LRUCache<K, V, FC>) => any, thisp?: any): void;
|
1033 | /**
|
1034 | * The same as {@link LRUCache.forEach} but items are iterated over in
|
1035 | * reverse order. (ie, less recently used items are iterated over first.)
|
1036 | */
|
1037 | rforEach(fn: (v: V, k: K, self: LRUCache<K, V, FC>) => any, thisp?: any): void;
|
1038 | /**
|
1039 | * Delete any stale entries. Returns true if anything was removed,
|
1040 | * false otherwise.
|
1041 | */
|
1042 | purgeStale(): boolean;
|
1043 | /**
|
1044 | * Get the extended info about a given entry, to get its value, size, and
|
1045 | * TTL info simultaneously. Returns `undefined` if the key is not present.
|
1046 | *
|
1047 | * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
|
1048 | * serialization, the `start` value is always the current timestamp, and the
|
1049 | * `ttl` is a calculated remaining time to live (negative if expired).
|
1050 | *
|
1051 | * Always returns stale values, if their info is found in the cache, so be
|
1052 | * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
|
1053 | * if relevant.
|
1054 | */
|
1055 | info(key: K): LRUCache.Entry<V> | undefined;
|
1056 | /**
|
1057 | * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
|
1058 | * passed to {@link LRLUCache#load}.
|
1059 | *
|
1060 | * The `start` fields are calculated relative to a portable `Date.now()`
|
1061 | * timestamp, even if `performance.now()` is available.
|
1062 | *
|
1063 | * Stale entries are always included in the `dump`, even if
|
1064 | * {@link LRUCache.OptionsBase.allowStale} is false.
|
1065 | *
|
1066 | * Note: this returns an actual array, not a generator, so it can be more
|
1067 | * easily passed around.
|
1068 | */
|
1069 | dump(): [K, LRUCache.Entry<V>][];
|
1070 | /**
|
1071 | * Reset the cache and load in the items in entries in the order listed.
|
1072 | *
|
1073 | * The shape of the resulting cache may be different if the same options are
|
1074 | * not used in both caches.
|
1075 | *
|
1076 | * The `start` fields are assumed to be calculated relative to a portable
|
1077 | * `Date.now()` timestamp, even if `performance.now()` is available.
|
1078 | */
|
1079 | load(arr: [K, LRUCache.Entry<V>][]): void;
|
1080 | /**
|
1081 | * Add a value to the cache.
|
1082 | *
|
1083 | * Note: if `undefined` is specified as a value, this is an alias for
|
1084 | * {@link LRUCache#delete}
|
1085 | *
|
1086 | * Fields on the {@link LRUCache.SetOptions} options param will override
|
1087 | * their corresponding values in the constructor options for the scope
|
1088 | * of this single `set()` operation.
|
1089 | *
|
1090 | * If `start` is provided, then that will set the effective start
|
1091 | * time for the TTL calculation. Note that this must be a previous
|
1092 | * value of `performance.now()` if supported, or a previous value of
|
1093 | * `Date.now()` if not.
|
1094 | *
|
1095 | * Options object may also include `size`, which will prevent
|
1096 | * calling the `sizeCalculation` function and just use the specified
|
1097 | * number if it is a positive integer, and `noDisposeOnSet` which
|
1098 | * will prevent calling a `dispose` function in the case of
|
1099 | * overwrites.
|
1100 | *
|
1101 | * If the `size` (or return value of `sizeCalculation`) for a given
|
1102 | * entry is greater than `maxEntrySize`, then the item will not be
|
1103 | * added to the cache.
|
1104 | *
|
1105 | * Will update the recency of the entry.
|
1106 | *
|
1107 | * If the value is `undefined`, then this is an alias for
|
1108 | * `cache.delete(key)`. `undefined` is never stored in the cache.
|
1109 | */
|
1110 | set(k: K, v: V | BackgroundFetch<V> | undefined, setOptions?: LRUCache.SetOptions<K, V, FC>): this;
|
1111 | /**
|
1112 | * Evict the least recently used item, returning its value or
|
1113 | * `undefined` if cache is empty.
|
1114 | */
|
1115 | pop(): V | undefined;
|
1116 | /**
|
1117 | * Check if a key is in the cache, without updating the recency of use.
|
1118 | * Will return false if the item is stale, even though it is technically
|
1119 | * in the cache.
|
1120 | *
|
1121 | * Check if a key is in the cache, without updating the recency of
|
1122 | * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
|
1123 | * to `true` in either the options or the constructor.
|
1124 | *
|
1125 | * Will return `false` if the item is stale, even though it is technically in
|
1126 | * the cache. The difference can be determined (if it matters) by using a
|
1127 | * `status` argument, and inspecting the `has` field.
|
1128 | *
|
1129 | * Will not update item age unless
|
1130 | * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
|
1131 | */
|
1132 | has(k: K, hasOptions?: LRUCache.HasOptions<K, V, FC>): boolean;
|
1133 | /**
|
1134 | * Like {@link LRUCache#get} but doesn't update recency or delete stale
|
1135 | * items.
|
1136 | *
|
1137 | * Returns `undefined` if the item is stale, unless
|
1138 | * {@link LRUCache.OptionsBase.allowStale} is set.
|
1139 | */
|
1140 | peek(k: K, peekOptions?: LRUCache.PeekOptions<K, V, FC>): V | undefined;
|
1141 | /**
|
1142 | * Make an asynchronous cached fetch using the
|
1143 | * {@link LRUCache.OptionsBase.fetchMethod} function.
|
1144 | *
|
1145 | * If the value is in the cache and not stale, then the returned
|
1146 | * Promise resolves to the value.
|
1147 | *
|
1148 | * If not in the cache, or beyond its TTL staleness, then
|
1149 | * `fetchMethod(key, staleValue, { options, signal, context })` is
|
1150 | * called, and the value returned will be added to the cache once
|
1151 | * resolved.
|
1152 | *
|
1153 | * If called with `allowStale`, and an asynchronous fetch is
|
1154 | * currently in progress to reload a stale value, then the former
|
1155 | * stale value will be returned.
|
1156 | *
|
1157 | * If called with `forceRefresh`, then the cached item will be
|
1158 | * re-fetched, even if it is not stale. However, if `allowStale` is also
|
1159 | * set, then the old value will still be returned. This is useful
|
1160 | * in cases where you want to force a reload of a cached value. If
|
1161 | * a background fetch is already in progress, then `forceRefresh`
|
1162 | * has no effect.
|
1163 | *
|
1164 | * If multiple fetches for the same key are issued, then they will all be
|
1165 | * coalesced into a single call to fetchMethod.
|
1166 | *
|
1167 | * Note that this means that handling options such as
|
1168 | * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort},
|
1169 | * {@link LRUCache.FetchOptions.signal},
|
1170 | * and {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} will be
|
1171 | * determined by the FIRST fetch() call for a given key.
|
1172 | *
|
1173 | * This is a known (fixable) shortcoming which will be addresed on when
|
1174 | * someone complains about it, as the fix would involve added complexity and
|
1175 | * may not be worth the costs for this edge case.
|
1176 | *
|
1177 | * If {@link LRUCache.OptionsBase.fetchMethod} is not specified, then this is
|
1178 | * effectively an alias for `Promise.resolve(cache.get(key))`.
|
1179 | *
|
1180 | * When the fetch method resolves to a value, if the fetch has not
|
1181 | * been aborted due to deletion, eviction, or being overwritten,
|
1182 | * then it is added to the cache using the options provided.
|
1183 | *
|
1184 | * If the key is evicted or deleted before the `fetchMethod`
|
1185 | * resolves, then the AbortSignal passed to the `fetchMethod` will
|
1186 | * receive an `abort` event, and the promise returned by `fetch()`
|
1187 | * will reject with the reason for the abort.
|
1188 | *
|
1189 | * If a `signal` is passed to the `fetch()` call, then aborting the
|
1190 | * signal will abort the fetch and cause the `fetch()` promise to
|
1191 | * reject with the reason provided.
|
1192 | *
|
1193 | * **Setting `context`**
|
1194 | *
|
1195 | * If an `FC` type is set to a type other than `unknown`, `void`, or
|
1196 | * `undefined` in the {@link LRUCache} constructor, then all
|
1197 | * calls to `cache.fetch()` _must_ provide a `context` option. If
|
1198 | * set to `undefined` or `void`, then calls to fetch _must not_
|
1199 | * provide a `context` option.
|
1200 | *
|
1201 | * The `context` param allows you to provide arbitrary data that
|
1202 | * might be relevant in the course of fetching the data. It is only
|
1203 | * relevant for the course of a single `fetch()` operation, and
|
1204 | * discarded afterwards.
|
1205 | *
|
1206 | * **Note: `fetch()` calls are inflight-unique**
|
1207 | *
|
1208 | * If you call `fetch()` multiple times with the same key value,
|
1209 | * then every call after the first will resolve on the same
|
1210 | * promise<sup>1</sup>,
|
1211 | * _even if they have different settings that would otherwise change
|
1212 | * the behavior of the fetch_, such as `noDeleteOnFetchRejection`
|
1213 | * or `ignoreFetchAbort`.
|
1214 | *
|
1215 | * In most cases, this is not a problem (in fact, only fetching
|
1216 | * something once is what you probably want, if you're caching in
|
1217 | * the first place). If you are changing the fetch() options
|
1218 | * dramatically between runs, there's a good chance that you might
|
1219 | * be trying to fit divergent semantics into a single object, and
|
1220 | * would be better off with multiple cache instances.
|
1221 | *
|
1222 | * **1**: Ie, they're not the "same Promise", but they resolve at
|
1223 | * the same time, because they're both waiting on the same
|
1224 | * underlying fetchMethod response.
|
1225 | */
|
1226 | fetch(k: K, fetchOptions: unknown extends FC ? LRUCache.FetchOptions<K, V, FC> : FC extends undefined | void ? LRUCache.FetchOptionsNoContext<K, V> : LRUCache.FetchOptionsWithContext<K, V, FC>): Promise<undefined | V>;
|
1227 | fetch(k: unknown extends FC ? K : FC extends undefined | void ? K : never, fetchOptions?: unknown extends FC ? LRUCache.FetchOptions<K, V, FC> : FC extends undefined | void ? LRUCache.FetchOptionsNoContext<K, V> : never): Promise<undefined | V>;
|
1228 | /**
|
1229 | * In some cases, `cache.fetch()` may resolve to `undefined`, either because
|
1230 | * a {@link LRUCache.OptionsBase#fetchMethod} was not provided (turning
|
1231 | * `cache.fetch(k)` into just an async wrapper around `cache.get(k)`) or
|
1232 | * because `ignoreFetchAbort` was specified (either to the constructor or
|
1233 | * in the {@link LRUCache.FetchOptions}). Also, the
|
1234 | * {@link OptionsBase.fetchMethod} may return `undefined` or `void`, making
|
1235 | * the test even more complicated.
|
1236 | *
|
1237 | * Because inferring the cases where `undefined` might be returned are so
|
1238 | * cumbersome, but testing for `undefined` can also be annoying, this method
|
1239 | * can be used, which will reject if `this.fetch()` resolves to undefined.
|
1240 | */
|
1241 | forceFetch(k: K, fetchOptions: unknown extends FC ? LRUCache.FetchOptions<K, V, FC> : FC extends undefined | void ? LRUCache.FetchOptionsNoContext<K, V> : LRUCache.FetchOptionsWithContext<K, V, FC>): Promise<V>;
|
1242 | forceFetch(k: unknown extends FC ? K : FC extends undefined | void ? K : never, fetchOptions?: unknown extends FC ? LRUCache.FetchOptions<K, V, FC> : FC extends undefined | void ? LRUCache.FetchOptionsNoContext<K, V> : never): Promise<V>;
|
1243 | /**
|
1244 | * If the key is found in the cache, then this is equivalent to
|
1245 | * {@link LRUCache#get}. If not, in the cache, then calculate the value using
|
1246 | * the {@link LRUCache.OptionsBase.memoMethod}, and add it to the cache.
|
1247 | *
|
1248 | * If an `FC` type is set to a type other than `unknown`, `void`, or
|
1249 | * `undefined` in the LRUCache constructor, then all calls to `cache.memo()`
|
1250 | * _must_ provide a `context` option. If set to `undefined` or `void`, then
|
1251 | * calls to memo _must not_ provide a `context` option.
|
1252 | *
|
1253 | * The `context` param allows you to provide arbitrary data that might be
|
1254 | * relevant in the course of fetching the data. It is only relevant for the
|
1255 | * course of a single `memo()` operation, and discarded afterwards.
|
1256 | */
|
1257 | memo(k: K, memoOptions: unknown extends FC ? LRUCache.MemoOptions<K, V, FC> : FC extends undefined | void ? LRUCache.MemoOptionsNoContext<K, V> : LRUCache.MemoOptionsWithContext<K, V, FC>): V;
|
1258 | memo(k: unknown extends FC ? K : FC extends undefined | void ? K : never, memoOptions?: unknown extends FC ? LRUCache.MemoOptions<K, V, FC> : FC extends undefined | void ? LRUCache.MemoOptionsNoContext<K, V> : never): V;
|
1259 | /**
|
1260 | * Return a value from the cache. Will update the recency of the cache
|
1261 | * entry found.
|
1262 | *
|
1263 | * If the key is not found, get() will return `undefined`.
|
1264 | */
|
1265 | get(k: K, getOptions?: LRUCache.GetOptions<K, V, FC>): V | undefined;
|
1266 | /**
|
1267 | * Deletes a key out of the cache.
|
1268 | *
|
1269 | * Returns true if the key was deleted, false otherwise.
|
1270 | */
|
1271 | delete(k: K): boolean;
|
1272 | /**
|
1273 | * Clear the cache entirely, throwing away all values.
|
1274 | */
|
1275 | clear(): void;
|
1276 | }
|
1277 | //# sourceMappingURL=index.d.ts.map |
\ | No newline at end of file |