UNPKG

15 kBTypeScriptView Raw
1// Type definitions for lru-cache 7.6
2// Project: https://github.com/isaacs/node-lru-cache
3// Definitions by: Bart van der Schoor <https://github.com/Bartvds>
4// BendingBender <https://github.com/BendingBender>
5// isaacs <https://github.com/isaacs>
6// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
7
8/// <reference lib="DOM" />
9// tslint:disable:member-access
10declare class LRUCache<K, V> implements Iterable<[K, V]> {
11 constructor(options: LRUCache.Options<K, V>);
12
13 /**
14 * Return total length of objects in cache taking into account `length` options function.
15 *
16 * @deprecated since 7.0 use `cache.size` instead
17 */
18 public readonly length: number;
19
20 // values populated from the constructor options
21 public readonly max: number;
22 public readonly maxSize: number;
23 public readonly sizeCalculation: LRUCache.SizeCalculator<K, V> | undefined;
24 public readonly dispose: LRUCache.Disposer<K, V>;
25 /**
26 * @since 7.4.0
27 */
28 public readonly disposeAfter: LRUCache.Disposer<K, V> | null;
29 public readonly noDisposeOnSet: boolean;
30 public readonly ttl: number;
31 public readonly ttlResolution: number;
32 public readonly ttlAutopurge: boolean;
33 public readonly allowStale: boolean;
34 public readonly updateAgeOnGet: boolean;
35 public readonly fetchMethod: LRUCache.Fetcher<K, V> | null;
36
37 /**
38 * The total number of items held in the cache at the current moment.
39 */
40 public readonly size: number;
41
42 /**
43 * The total size of items in cache when using size tracking.
44 */
45 public readonly calculatedSize: number;
46
47 /**
48 * Add a value to the cache.
49 */
50 public set(key: K, value: V, options?: LRUCache.SetOptions<K, V>): this;
51
52 /**
53 * Return a value from the cache.
54 * Will update the recency of the cache entry found.
55 * If the key is not found, `get()` will return `undefined`.
56 * This can be confusing when setting values specifically to `undefined`,
57 * as in `cache.set(key, undefined)`. Use `cache.has()` to determine
58 * whether a key is present in the cache at all.
59 */
60 // tslint:disable-next-line:no-unnecessary-generics
61 public get<T = V>(key: K, options?: LRUCache.GetOptions): T | undefined;
62
63 /**
64 * Like `get()` but doesn't update recency or delete stale items.
65 * Returns `undefined` if the item is stale, unless `allowStale` is set either on the cache or in the options object.
66 */
67 // tslint:disable-next-line:no-unnecessary-generics
68 public peek<T = V>(key: K, options?: LRUCache.PeekOptions): T | undefined;
69
70 /**
71 * Check if a key is in the cache, without updating the recency or age.
72 * Will return false if the item is stale, even though it is technically in the cache.
73 */
74 public has(key: K): boolean;
75
76 /**
77 * Deletes a key out of the cache.
78 * Returns true if the key was deleted, false otherwise.
79 */
80 public delete(key: K): boolean;
81
82 /**
83 * Clear the cache entirely, throwing away all values.
84 */
85 public clear(): void;
86
87 /**
88 * Delete any stale entries. Returns true if anything was removed, false otherwise.
89 */
90 public purgeStale(): boolean;
91
92 /**
93 * Find a value for which the supplied fn method returns a truthy value, similar to Array.find().
94 * fn is called as fn(value, key, cache).
95 */
96 // tslint:disable-next-line:no-unnecessary-generics
97 public find<T = V>(callbackFn: (value: V, key: K, cache: this) => boolean | undefined | void, options?: LRUCache.GetOptions): T;
98
99 /**
100 * Same as cache.forEach(fn, thisp), but in order from least recently used to most recently used.
101 */
102 public forEach<T = this>(callbackFn: (this: T, value: V, key: K, cache: this) => void, thisArg?: T): void;
103
104 /**
105 * The same as `cache.forEach(...)` but items are iterated over in reverse order.
106 * (ie, less recently used items are iterated over first.)
107 */
108 public rforEach<T = this>(callbackFn: (this: T, value: V, key: K, cache: this) => void, thisArg?: T): void;
109
110 /**
111 * Return a generator yielding the keys in the cache,
112 * in order from most recently used to least recently used.
113 */
114 public keys(): Generator<K>;
115
116 /**
117 * Return a generator yielding the keys in the cache,
118 * in order from least recently used to most recently used.
119 */
120 public rkeys(): Generator<K>;
121
122 /**
123 * Return a generator yielding the values in the cache,
124 * in order from most recently used to least recently used.
125 */
126 public values(): Generator<V>;
127
128 /**
129 * Return a generator yielding the values in the cache,
130 * in order from least recently used to most recently used.
131 */
132 public rvalues(): Generator<V>;
133
134 /**
135 * Return a generator yielding `[key, value]` pairs,
136 * in order from most recently used to least recently used.
137 */
138 public entries(): Generator<[K, V]>;
139
140 /**
141 * Return a generator yielding `[key, value]` pairs,
142 * in order from least recently used to most recently used.
143 */
144 public rentries(): Generator<[K, V]>;
145
146 public [Symbol.iterator](): Iterator<[K, V]>;
147
148 /**
149 * Return an array of [key, entry] objects which can be passed to cache.load()
150 */
151 public dump(): Array<[K, LRUCache.Entry<V>]>;
152
153 /**
154 * Reset the cache and load in the items in entries in the order listed.
155 * Note that the shape of the resulting cache may be different if the
156 * same options are not used in both caches.
157 */
158 public load(cacheEntries: ReadonlyArray<[K, LRUCache.Entry<V>]>): void;
159
160 /**
161 * Evict the least recently used item, returning its value or `undefined` if cache is empty.
162 */
163 public pop(): V | undefined;
164
165 // ========================= Deprecated
166
167 /**
168 * Deletes a key out of the cache.
169 *
170 * @deprecated since 7.0 use delete() instead
171 */
172 public del(key: K): boolean;
173
174 /**
175 * Clear the cache entirely, throwing away all values.
176 *
177 * @deprecated since 7.0 use clear() instead
178 */
179 public reset(): void;
180
181 /**
182 * Manually iterates over the entire cache proactively pruning old entries.
183 *
184 * @deprecated since 7.0 use purgeStale() instead
185 */
186 public prune(): boolean;
187
188 /**
189 * since: 7.6.0
190 */
191 // tslint:disable-next-line:no-unnecessary-generics
192 public fetch<ExpectedValue = V>(key: K, options?: LRUCache.FetchOptions): Promise<ExpectedValue | undefined>;
193 /**
194 * since: 7.6.0
195 */
196 public getRemainingTTL(key: K): number;
197}
198
199declare namespace LRUCache {
200 type DisposeReason = 'evict' | 'set' | 'delete';
201
202 type SizeCalculator<K, V> = (value: V, key: K) => number;
203
204 type Disposer<K, V> = (value: V, key: K, reason: DisposeReason) => void;
205 type Fetcher<K, V> = (key: K, staleKey?: K, options?: FetcherOptions<K, V>) => Promise<V>;
206
207 interface DeprecatedOptions<K, V> {
208 /**
209 * Maximum age in ms. Items are not pro-actively pruned out as they age,
210 * but if you try to get an item that is too old, it'll drop it and return
211 * undefined instead of giving it to you.
212 *
213 * @deprecated since 7.0 use options.ttl instead
214 */
215 maxAge?: number;
216
217 /**
218 * Function that is used to calculate the length of stored items.
219 * If you're storing strings or buffers, then you probably want to do
220 * something like `function(n, key){return n.length}`. The default
221 * is `function(){return 1}`, which is fine if you want to store
222 * `max` like-sized things. The item is passed as the first argument,
223 * and the key is passed as the second argument.
224 *
225 * @deprecated since 7.0 use options.sizeCalculation instead
226 */
227 length?(value: V, key?: K): number;
228
229 /**
230 * By default, if you set a `maxAge`, it'll only actually pull stale items
231 * out of the cache when you `get(key)`. (That is, it's not pre-emptively
232 * doing a `setTimeout` or anything.) If you set `stale:true`, it'll return
233 * the stale value before deleting it. If you don't set this, then it'll
234 * return `undefined` when you try to get a stale entry,
235 * as if it had already been deleted.
236 *
237 * @deprecated since 7.0 use options.allowStale instead
238 */
239 stale?: boolean;
240 }
241
242 interface LimitedByCount {
243 /**
244 * The number of most recently used items to keep.
245 * Note that we may store fewer items than this if maxSize is hit.
246 */
247 max: number;
248 }
249
250 interface LimitedBySize<K, V> {
251 /**
252 * If you wish to track item size, you must provide a maxSize
253 * note that we still will only keep up to max *actual items*,
254 * so size tracking may cause fewer than max items to be stored.
255 * At the extreme, a single item of maxSize size will cause everything
256 * else in the cache to be dropped when it is added. Use with caution!
257 * Note also that size tracking can negatively impact performance,
258 * though for most cases, only minimally.
259 */
260 maxSize: number;
261
262 /**
263 * Function to calculate size of items. Useful if storing strings or
264 * buffers or other items where memory size depends on the object itself.
265 * Also note that oversized items do NOT immediately get dropped from
266 * the cache, though they will cause faster turnover in the storage.
267 */
268 sizeCalculation?: SizeCalculator<K, V>;
269 }
270
271 interface LimitedByTTL {
272 /**
273 * Max time to live for items before they are considered stale.
274 * Note that stale items are NOT preemptively removed by default,
275 * and MAY live in the cache, contributing to its LRU max, long after
276 * they have expired.
277 *
278 * Also, as this cache is optimized for LRU/MRU operations, some of
279 * the staleness/TTL checks will reduce performance, as they will incur
280 * overhead by deleting items.
281 *
282 * Must be a positive integer in ms, defaults to 0, which means "no TTL"
283 */
284 ttl: number;
285
286 /**
287 * Boolean flag to tell the cache to not update the TTL when
288 * setting a new value for an existing key (ie, when updating a value rather
289 * than inserting a new value). Note that the TTL value is _always_ set
290 * (if provided) when adding a new entry into the cache.
291 *
292 * @default false
293 * @since 7.4.0
294 */
295 noUpdateTTL?: boolean;
296
297 /**
298 * Minimum amount of time in ms in which to check for staleness.
299 * Defaults to 1, which means that the current time is checked
300 * at most once per millisecond.
301 *
302 * Set to 0 to check the current time every time staleness is tested.
303 *
304 * Note that setting this to a higher value will improve performance
305 * somewhat while using ttl tracking, albeit at the expense of keeping
306 * stale items around a bit longer than intended.
307 *
308 * @default 1
309 * @since 7.1.0
310 */
311 ttlResolution?: number;
312
313 /**
314 * Preemptively remove stale items from the cache.
315 * Note that this may significantly degrade performance,
316 * especially if the cache is storing a large number of items.
317 * It is almost always best to just leave the stale items in
318 * the cache, and let them fall out as new items are added.
319 *
320 * Note that this means that allowStale is a bit pointless,
321 * as stale items will be deleted almost as soon as they expire.
322 *
323 * Use with caution!
324 *
325 * @default false
326 * @since 7.1.0
327 */
328 ttlAutopurge?: boolean;
329
330 /**
331 * Return stale items from cache.get() before disposing of them
332 *
333 * @default false
334 */
335 allowStale?: boolean;
336
337 /**
338 * Update the age of items on cache.get(), renewing their TTL
339 *
340 * @default false
341 */
342 updateAgeOnGet?: boolean;
343 }
344 type SafetyBounds<K, V> = LimitedByCount | LimitedBySize<K, V> | LimitedByTTL;
345
346 interface SharedOptions<K, V> {
347 /**
348 * Function that is called on items when they are dropped from the cache.
349 * This can be handy if you want to close file descriptors or do other
350 * cleanup tasks when items are no longer accessible. Called with `key, value`.
351 * It's called before actually removing the item from the internal cache,
352 * so if you want to immediately put it back in, you'll have to do that in
353 * a `nextTick` or `setTimeout` callback or it won't do anything.
354 */
355 dispose?: Disposer<K, V>;
356
357 /**
358 * The same as dispose, but called *after* the entry is completely removed
359 * and the cache is once again in a clean state
360 * It is safe to add an item right back into the cache at this point.
361 * However, note that it is *very* easy to inadvertently create infinite
362 * recursion this way.
363 * @since 7.3.0
364 */
365 disposeAfter?: Disposer<K, V>;
366
367 /**
368 * Set to true to suppress calling the dispose() function if the entry
369 * key is still accessible within the cache.
370 * This may be overridden by passing an options object to cache.set().
371 *
372 * @default false
373 */
374 noDisposeOnSet?: boolean;
375
376 /**
377 * Since 7.6.0
378 * `fetchMethod` Function that is used to make background asynchronous
379 * fetches. Called with `fetchMethod(key, staleValue)`. May return a
380 * Promise.
381 *
382 * If `fetchMethod` is not provided, then `cache.fetch(key)` is equivalent
383 * to `Promise.resolve(cache.get(key))`.
384 */
385 fetchMethod?: Fetcher<K, V> | null;
386 }
387
388 type Options<K, V> = SharedOptions<K, V> & DeprecatedOptions<K, V> & SafetyBounds<K, V>;
389
390 interface SetOptions<K, V> {
391 /**
392 * A value for the size of the entry, prevents calls to `sizeCalculation` function
393 */
394 size?: number;
395 sizeCalculation?: SizeCalculator<K, V>;
396 ttl?: number;
397 noDisposeOnSet?: boolean;
398 noUpdateTTL?: boolean;
399 }
400
401 interface GetOptions {
402 allowStale?: boolean;
403 updateAgeOnGet?: boolean;
404 }
405
406 interface PeekOptions {
407 allowStale?: boolean;
408 }
409 interface FetchOptions {
410 allowStale?: boolean;
411 updateAgeOnGet?: boolean;
412 }
413
414 interface FetcherOptions<K, V> {
415 signal?: AbortSignal;
416 options?: SetOptions<K, V> & GetOptions;
417 }
418
419 interface Entry<V> {
420 value: V;
421 ttl?: number;
422 size?: number;
423 }
424}
425
426export = LRUCache;