1 | /**
|
2 | * If this token is returned in the results from a batchingFunction, the corresponding requests will be placed back
|
3 | * into the the head of the queue.
|
4 | */
|
5 | export declare const BATCHER_RETRY_TOKEN: unique symbol;
|
6 | export declare type BatchingResult<T> = T | Error | typeof BATCHER_RETRY_TOKEN;
|
7 | export interface BatcherOptions<I, O> {
|
8 | /**
|
9 | * The maximum number of requests that can be combined in a single batch.
|
10 | */
|
11 | readonly maxBatchSize?: number;
|
12 | /**
|
13 | * The number of milliseconds to wait before running a batch of requests.
|
14 | *
|
15 | * This is used to allow time for the requests to queue up. Defaults to 1ms.
|
16 | * This delay does not apply if the limit set by options.maxBatchSize is reached.
|
17 | */
|
18 | readonly queuingDelay?: number;
|
19 | /**
|
20 | * An array containing the number of requests that must be queued in order to trigger a batch request at each level
|
21 | * of concurrency.
|
22 | *
|
23 | * For example [1, 5], would require at least 1 queued request when no batch requests are active,
|
24 | * and 5 queued requests when 1 (or more) batch requests are active. Defaults to [1]. Note that the delay imposed
|
25 | * by options.queuingDelay still applies when a batch request is triggered.
|
26 | */
|
27 | readonly queuingThresholds?: readonly number[];
|
28 | /**
|
29 | * A function which is passed an array of request values, returning a promise which resolves to an array of
|
30 | * response values.
|
31 | *
|
32 | * The request and response arrays must be of equal length. To reject an individual request, return an Error object
|
33 | * (or class which extends Error) at the corresponding element in the response array.
|
34 | */
|
35 | readonly batchingFunction: (this: Batcher<I, O>, inputs: readonly I[]) => ReadonlyArray<BatchingResult<O>> | PromiseLike<ReadonlyArray<BatchingResult<O>>>;
|
36 | /**
|
37 | * A function which can delay a batch by returning a promise which resolves when the batch should be run.
|
38 | * If the function does not return a promise, no delay will be applied.
|
39 | */
|
40 | readonly delayFunction?: () => PromiseLike<void> | undefined | null | void;
|
41 | }
|
42 | export declare class Batcher<I, O> {
|
43 | private readonly _maxBatchSize;
|
44 | private readonly _queuingDelay;
|
45 | private readonly _queuingThresholds;
|
46 | private readonly _inputQueue;
|
47 | private readonly _outputQueue;
|
48 | private readonly _delayFunction?;
|
49 | private readonly _batchingFunction;
|
50 | private _waitTimeout?;
|
51 | private _waiting;
|
52 | private _activePromiseCount;
|
53 | private _immediateCount;
|
54 | constructor(options: BatcherOptions<I, O>);
|
55 | /**
|
56 | * Returns a promise which resolves or rejects with the individual result returned from the batching function.
|
57 | */
|
58 | getResult(input: I): Promise<O>;
|
59 | /**
|
60 | * Triggers a batch to run, bypassing the queuingDelay while respecting other imposed delays.
|
61 | */
|
62 | send(): void;
|
63 | /**
|
64 | * Triggers a batch to run, adhering to the maxBatchSize, queueingThresholds, and queuingDelay
|
65 | */
|
66 | private _trigger;
|
67 | /**
|
68 | * Runs the batch, while respecting delays imposed by the supplied delayFunction
|
69 | */
|
70 | private _run;
|
71 | /**
|
72 | * Runs the batch immediately without further delay
|
73 | */
|
74 | private _runImmediately;
|
75 | }
|