UNPKG

9.74 kBTypeScriptView Raw
1import { PersistentCache } from "./cache";
2export declare class Deferred<T = void> {
3 promise: Promise<T>;
4 resolve: (arg: T | PromiseLike<T>) => void;
5 reject: (err?: any) => void;
6 constructor();
7}
8export declare class DeferredWorker<T = void> extends Deferred<T> {
9 private worker;
10 private cancel?;
11 constructor(worker: () => Promise<T>, cancel?: (() => string | undefined) | undefined);
12 execute(): Promise<void>;
13}
14export declare type RetryType = number | ((err: any, retries: number) => boolean);
15export declare function retryOp<T>(retryN: RetryType, fn: (retries: number) => Promise<T>): Promise<T>;
16export declare class Funnel<T = void> {
17 concurrency: number;
18 protected shouldRetry?: RetryType | undefined;
19 protected pendingQueue: Set<DeferredWorker<T>>;
20 protected executingQueue: Set<DeferredWorker<T>>;
21 processed: number;
22 errors: number;
23 constructor(concurrency?: number, shouldRetry?: RetryType | undefined);
24 push(worker: () => Promise<T>, shouldRetry?: RetryType, cancel?: () => string | undefined): Promise<T>;
25 clear(): void;
26 promises(): Promise<T>[];
27 all(): Promise<(void | Awaited<T>)[]>;
28 size(): number;
29 setMaxConcurrency(maxConcurrency: number): void;
30 getConcurrency(): number;
31 protected doWork(): void;
32}
33/**
34 * @internal
35 */
36export interface PumpOptions {
37 concurrency: number;
38 verbose?: boolean;
39}
40/**
41 * @internal
42 */
43export declare class Pump<T = void> extends Funnel<T | void> {
44 protected options: PumpOptions;
45 protected worker: () => Promise<T>;
46 stopped: boolean;
47 constructor(options: PumpOptions, worker: () => Promise<T>);
48 start(): void;
49 stop(): void;
50 drain(): Promise<(void | Awaited<T>)[]>;
51 setMaxConcurrency(concurrency: number): void;
52}
53export declare class RateLimiter<T = void> {
54 protected targetRequestsPerSecond: number;
55 protected burst: number;
56 protected lastTick: number;
57 protected bucket: number;
58 protected queue: Set<DeferredWorker<T>>;
59 constructor(targetRequestsPerSecond: number, burst?: number);
60 push(worker: () => Promise<T>, cancel?: () => string | undefined): Promise<T>;
61 protected updateBucket(): void;
62 protected drainQueue(): Promise<void>;
63 clear(): void;
64}
65/**
66 * Specify {@link throttle} limits. These limits shape the way throttle invokes
67 * the underlying function.
68 * @public
69 */
70export interface Limits {
71 /**
72 * The maximum number of concurrent executions of the underlying function to
73 * allow. Must be supplied, there is no default. Specifying `0` or
74 * `Infinity` is allowed and means there is no concurrency limit.
75 */
76 concurrency: number;
77 /**
78 * The maximum number of calls per second to allow to the underlying
79 * function. Default: no rate limit.
80 */
81 rate?: number;
82 /**
83 * The maximum number of calls to the underlying function to "burst" -- e.g.
84 * the number that can be issued immediately as long as the rate limit is
85 * not exceeded. For example, if rate is 5 and burst is 5, and 10 calls are
86 * made to the throttled function, 5 calls are made immediately and then
87 * after 1 second, another 5 calls are made immediately. Setting burst to 1
88 * means calls are issued uniformly every `1/rate` seconds. If `rate` is not
89 * specified, then `burst` does not apply. Default: 1.
90 */
91 burst?: number;
92 /**
93 * Retry if the throttled function returns a rejected promise. `retry` can
94 * be a number or a function. If it is a number `N`, then up to `N`
95 * additional attempts are made in addition to the initial call. If retry is
96 * a function, it should return `true` if another retry attempt should be
97 * made, otherwise `false`. The first argument will be the value of the
98 * rejected promise from the previous call attempt, and the second argument
99 * will be the number of previous retry attempts (e.g. the first call will
100 * have value 0). Default: 0 (no retry attempts).
101 */
102 retry?: number | ((err: any, retries: number) => boolean);
103 /**
104 * If `memoize` is `true`, then every call to the throttled function will be
105 * saved as an entry in a map from arguments to return value. If same
106 * arguments are seen again in a future call, the return value is retrieved
107 * from the Map rather than calling the function again. This can be useful
108 * for avoiding redundant calls that are expected to return the same results
109 * given the same arguments.
110 *
111 * The arguments will be captured with `JSON.stringify`, therefore types
112 * that do not stringify uniquely won't be distinguished from each other.
113 * Care must be taken when specifying `memoize` to ensure avoid incorrect
114 * results.
115 */
116 memoize?: boolean;
117 /**
118 * Similar to `memoize` except the map from function arguments to results is
119 * stored in a persistent cache on disk. This is useful to prevent redundant
120 * calls to APIs which are expected to return the same results for the same
121 * arguments, and which are likely to be called across many faast.js module
122 * instantiations. This is used internally by faast.js for caching cloud
123 * prices for AWS and Google, and for saving the last garbage collection
124 * date for AWS. Persistent cache entries expire after a period of time. See
125 * {@link PersistentCache}.
126 */
127 cache?: PersistentCache;
128 /**
129 * A promise that, if resolved, causes cancellation of pending throttled
130 * invocations. This is typically created using `Deferred`. The idea is to
131 * use the resolving of the promise as an asynchronous signal that any
132 * pending invocations in this throttled function should be cleared.
133 * @internal
134 */
135 cancel?: Promise<void>;
136}
137export declare function memoizeFn<A extends any[], R>(fn: (...args: A) => R, cache?: Map<string, R>): (...args: A) => R;
138export declare function cacheFn<A extends any[], R>(cache: PersistentCache, fn: (...args: A) => Promise<R>): (...args: A) => Promise<any>;
139/**
140 * A decorator for rate limiting, concurrency limiting, retry, memoization, and
141 * on-disk caching. See {@link Limits}.
142 * @remarks
143 * When programming against cloud services, databases, and other resources, it
144 * is often necessary to control the rate of request issuance to avoid
145 * overwhelming the service provider. In many cases the provider has built-in
146 * safeguards against abuse, which automatically fail requests if they are
147 * coming in too fast. Some systems don't have safeguards and precipitously
148 * degrade their service level or fail outright when faced with excessive load.
149 *
150 * With faast.js it becomes very easy to (accidentally) generate requests from
151 * thousands of cloud functions. The `throttle` function can help manage request
152 * flow without resorting to setting up a separate service. This is in keeping
153 * with faast.js' zero-ops philosophy.
154 *
155 * Usage is simple:
156 *
157 * ```typescript
158 * async function operation() { ... }
159 * const throttledOperation = throttle({ concurrency: 10, rate: 5 }, operation);
160 * for(let i = 0; i < 100; i++) {
161 * // at most 10 concurrent executions at a rate of 5 invocations per second.
162 * throttledOperation();
163 * }
164 * ```
165 *
166 * Note that each invocation to `throttle` creates a separate function with a
167 * separate limits. Therefore it is likely that you want to use `throttle` in a
168 * global context, not within a dynamic context:
169 *
170 * ```typescript
171 * async function operation() { ... }
172 * for(let i = 0; i < 100; i++) {
173 * // WRONG - each iteration creates a separate throttled function that's only called once.
174 * const throttledOperation = throttle({ concurrency: 10, rate: 5 }, operation);
175 * throttledOperation();
176 * }
177 * ```
178 *
179 * A better way to use throttle avoids creating a named `operation` function
180 * altogether, ensuring it cannot be accidentally called without throttling:
181 *
182 * ```typescript
183 * const operation = throttle({ concurrency: 10, rate: 5 }, async () => {
184 * ...
185 * });
186 * ```
187 *
188 * Throttle supports functions with arguments automatically infers the correct
189 * type for the returned function:
190 *
191 * ```typescript
192 * // `operation` inferred to have type (str: string) => Promise<string>
193 * const operation = throttle({ concurrency: 10, rate: 5 }, async (str: string) => {
194 * return string;
195 * });
196 * ```
197 *
198 * In addition to limiting concurrency and invocation rate, `throttle` also
199 * supports retrying failed invocations, memoizing calls, and on-disk caching.
200 * See {@link Limits} for details.
201 *
202 * @param limits - see {@link Limits}.
203 * @param fn - The function to throttle. It can take any arguments, but must
204 * return a Promise (which includes `async` functions).
205 * @returns Returns a throttled function with the same signature as the argument
206 * `fn`.
207 * @public
208 */
209export declare function throttle<A extends any[], R>(limits: Limits, fn: (...args: A) => Promise<R>): (...args: A) => Promise<R>;
210export declare class AsyncQueue<T> {
211 protected deferred: Array<Deferred<T>>;
212 protected enqueued: Promise<T>[];
213 enqueue(value: T | Promise<T>): void;
214 next(): Promise<T>;
215 clear(): void;
216}
217export declare class AsyncIterableQueue<T> extends AsyncQueue<IteratorResult<T>> {
218 push(value: T | Promise<T>): void;
219 done(): void;
220 [Symbol.asyncIterator](): this;
221}
222export declare class AsyncOrderedQueue<T> {
223 protected queue: AsyncQueue<T>;
224 protected arrived: Map<number, Promise<T>>;
225 protected current: number;
226 push(value: T | Promise<T>, sequence: number): void;
227 pushImmediate(value: T | Promise<T>): void;
228 enqueue(value: Promise<T>, sequence: number): void;
229 next(): Promise<T>;
230 clear(): void;
231}