import { PersistentCache } from "./cache"; export declare class Deferred { promise: Promise; resolve: (arg: T | PromiseLike) => void; reject: (err?: any) => void; constructor(); } export declare class DeferredWorker extends Deferred { private worker; private cancel?; constructor(worker: () => Promise, cancel?: (() => string | undefined) | undefined); execute(): Promise; } export declare type RetryType = number | ((err: any, retries: number) => boolean); export declare function retryOp(retryN: RetryType, fn: (retries: number) => Promise): Promise; export declare class Funnel { concurrency: number; protected shouldRetry?: RetryType | undefined; protected pendingQueue: Set>; protected executingQueue: Set>; processed: number; errors: number; constructor(concurrency?: number, shouldRetry?: RetryType | undefined); push(worker: () => Promise, shouldRetry?: RetryType, cancel?: () => string | undefined): Promise; clear(): void; promises(): Promise[]; all(): Promise<(void | Awaited)[]>; size(): number; setMaxConcurrency(maxConcurrency: number): void; getConcurrency(): number; protected doWork(): void; } /** * @internal */ export interface PumpOptions { concurrency: number; verbose?: boolean; } /** * @internal */ export declare class Pump extends Funnel { protected options: PumpOptions; protected worker: () => Promise; stopped: boolean; constructor(options: PumpOptions, worker: () => Promise); start(): void; stop(): void; drain(): Promise<(void | Awaited)[]>; setMaxConcurrency(concurrency: number): void; } export declare class RateLimiter { protected targetRequestsPerSecond: number; protected burst: number; protected lastTick: number; protected bucket: number; protected queue: Set>; constructor(targetRequestsPerSecond: number, burst?: number); push(worker: () => Promise, cancel?: () => string | undefined): Promise; protected updateBucket(): void; protected drainQueue(): Promise; clear(): void; } /** * Specify {@link throttle} limits. These limits shape the way throttle invokes * the underlying function. * @public */ export interface Limits { /** * The maximum number of concurrent executions of the underlying function to * allow. Must be supplied, there is no default. Specifying `0` or * `Infinity` is allowed and means there is no concurrency limit. */ concurrency: number; /** * The maximum number of calls per second to allow to the underlying * function. Default: no rate limit. */ rate?: number; /** * The maximum number of calls to the underlying function to "burst" -- e.g. * the number that can be issued immediately as long as the rate limit is * not exceeded. For example, if rate is 5 and burst is 5, and 10 calls are * made to the throttled function, 5 calls are made immediately and then * after 1 second, another 5 calls are made immediately. Setting burst to 1 * means calls are issued uniformly every `1/rate` seconds. If `rate` is not * specified, then `burst` does not apply. Default: 1. */ burst?: number; /** * Retry if the throttled function returns a rejected promise. `retry` can * be a number or a function. If it is a number `N`, then up to `N` * additional attempts are made in addition to the initial call. If retry is * a function, it should return `true` if another retry attempt should be * made, otherwise `false`. The first argument will be the value of the * rejected promise from the previous call attempt, and the second argument * will be the number of previous retry attempts (e.g. the first call will * have value 0). Default: 0 (no retry attempts). */ retry?: number | ((err: any, retries: number) => boolean); /** * If `memoize` is `true`, then every call to the throttled function will be * saved as an entry in a map from arguments to return value. If same * arguments are seen again in a future call, the return value is retrieved * from the Map rather than calling the function again. This can be useful * for avoiding redundant calls that are expected to return the same results * given the same arguments. * * The arguments will be captured with `JSON.stringify`, therefore types * that do not stringify uniquely won't be distinguished from each other. * Care must be taken when specifying `memoize` to ensure avoid incorrect * results. */ memoize?: boolean; /** * Similar to `memoize` except the map from function arguments to results is * stored in a persistent cache on disk. This is useful to prevent redundant * calls to APIs which are expected to return the same results for the same * arguments, and which are likely to be called across many faast.js module * instantiations. This is used internally by faast.js for caching cloud * prices for AWS and Google, and for saving the last garbage collection * date for AWS. Persistent cache entries expire after a period of time. See * {@link PersistentCache}. */ cache?: PersistentCache; /** * A promise that, if resolved, causes cancellation of pending throttled * invocations. This is typically created using `Deferred`. The idea is to * use the resolving of the promise as an asynchronous signal that any * pending invocations in this throttled function should be cleared. * @internal */ cancel?: Promise; } export declare function memoizeFn(fn: (...args: A) => R, cache?: Map): (...args: A) => R; export declare function cacheFn(cache: PersistentCache, fn: (...args: A) => Promise): (...args: A) => Promise; /** * A decorator for rate limiting, concurrency limiting, retry, memoization, and * on-disk caching. See {@link Limits}. * @remarks * When programming against cloud services, databases, and other resources, it * is often necessary to control the rate of request issuance to avoid * overwhelming the service provider. In many cases the provider has built-in * safeguards against abuse, which automatically fail requests if they are * coming in too fast. Some systems don't have safeguards and precipitously * degrade their service level or fail outright when faced with excessive load. * * With faast.js it becomes very easy to (accidentally) generate requests from * thousands of cloud functions. The `throttle` function can help manage request * flow without resorting to setting up a separate service. This is in keeping * with faast.js' zero-ops philosophy. * * Usage is simple: * * ```typescript * async function operation() { ... } * const throttledOperation = throttle({ concurrency: 10, rate: 5 }, operation); * for(let i = 0; i < 100; i++) { * // at most 10 concurrent executions at a rate of 5 invocations per second. * throttledOperation(); * } * ``` * * Note that each invocation to `throttle` creates a separate function with a * separate limits. Therefore it is likely that you want to use `throttle` in a * global context, not within a dynamic context: * * ```typescript * async function operation() { ... } * for(let i = 0; i < 100; i++) { * // WRONG - each iteration creates a separate throttled function that's only called once. * const throttledOperation = throttle({ concurrency: 10, rate: 5 }, operation); * throttledOperation(); * } * ``` * * A better way to use throttle avoids creating a named `operation` function * altogether, ensuring it cannot be accidentally called without throttling: * * ```typescript * const operation = throttle({ concurrency: 10, rate: 5 }, async () => { * ... * }); * ``` * * Throttle supports functions with arguments automatically infers the correct * type for the returned function: * * ```typescript * // `operation` inferred to have type (str: string) => Promise * const operation = throttle({ concurrency: 10, rate: 5 }, async (str: string) => { * return string; * }); * ``` * * In addition to limiting concurrency and invocation rate, `throttle` also * supports retrying failed invocations, memoizing calls, and on-disk caching. * See {@link Limits} for details. * * @param limits - see {@link Limits}. * @param fn - The function to throttle. It can take any arguments, but must * return a Promise (which includes `async` functions). * @returns Returns a throttled function with the same signature as the argument * `fn`. * @public */ export declare function throttle(limits: Limits, fn: (...args: A) => Promise): (...args: A) => Promise; export declare class AsyncQueue { protected deferred: Array>; protected enqueued: Promise[]; enqueue(value: T | Promise): void; next(): Promise; clear(): void; } export declare class AsyncIterableQueue extends AsyncQueue> { push(value: T | Promise): void; done(): void; [Symbol.asyncIterator](): this; } export declare class AsyncOrderedQueue { protected queue: AsyncQueue; protected arrived: Map>; protected current: number; push(value: T | Promise, sequence: number): void; pushImmediate(value: T | Promise): void; enqueue(value: Promise, sequence: number): void; next(): Promise; clear(): void; }