UNPKG

30.3 kBTypeScriptView Raw
1/// <reference types="node" />
2import * as webpack from "webpack";
3import { CostSnapshot } from "./cost";
4import { Statistics } from "./shared";
5import { CpuMeasurement, FunctionCall } from "./wrapper";
6/**
7 * The type of all supported cloud providers.
8 * @public
9 */
10export declare type Provider = "aws" | "google" | "local";
11/**
12 * Options for the {@link CommonOptions.include} option.
13 * @public
14 */
15export interface IncludeOption {
16 /**
17 * The path to the directory or glob to add to the cloud function.
18 */
19 path: string;
20 /**
21 * The working directory if `path` is relative. Defaults to `process.cwd()`.
22 * For example, if `cwd` is `"foo"` and `path` is `"bar"`, then the
23 * contents of the directory `foo/bar/` will be added to the remote
24 * function under the path `bar/`.
25 */
26 cwd?: string;
27}
28/**
29 * Options common across all faast.js providers. Used as argument to {@link faast}.
30 * @remarks
31 * There are also more specific options for each provider. See
32 * {@link AwsOptions}, {@link GoogleOptions}, and {@link LocalOptions}.
33 * @public
34 */
35export interface CommonOptions {
36 /**
37 * If true, create a child process to isolate user code from faast
38 * scaffolding. Default: true.
39 * @remarks
40 * If a child process is not created, faast runs in the same node instance
41 * as the user code and may not execute in a timely fashion because user
42 * code may
43 * {@link https://nodejs.org/en/docs/guides/dont-block-the-event-loop/ | block the event loop}.
44 * Creating a child process for user code allows faast.js to continue
45 * executing even if user code never yields. This provides better
46 * reliability and functionality:
47 *
48 * - Detect timeout errors more reliably, even if the function doesn't
49 * relinquish the CPU. Not applicable to AWS, which sends separate failure
50 * messages in case of timeout. See {@link CommonOptions.timeout}.
51 *
52 * - CPU metrics used for detecting invocations with high latency, which can
53 * be used for automatically retrying calls to reduce tail latency.
54 *
55 * The cost of creating a child process is mainly in the memory overhead of
56 * creating another node process.
57 */
58 childProcess?: boolean;
59 /**
60 * When childProcess is true, the child process will be spawned with the
61 * value of this property as the setting for --max-old-space-size.
62 * @remarks
63 * This is useful if a function requires the node process to limit its
64 * memory so that another spawned process (e.g. a browser instance) can use
65 * the rest.
66 * @public
67 */
68 childProcessMemoryMb?: number;
69 /**
70 * The maximum number of concurrent invocations to allow. Default: 100,
71 * except for the `local` provider, where the default is 10.
72 * @remarks
73 * The concurrency limit applies to all invocations of all of the faast
74 * functions summed together. It is not a per-function limit. To apply a
75 * per-function limit, use {@link throttle}. A value of 0 is equivalent to
76 * Infinity. A value of 1 ensures mutually exclusive invocations.
77 */
78 concurrency?: number;
79 /**
80 * A user-supplied description for this function, which may make it easier
81 * to track different functions when multiple functions are created.
82 */
83 description?: string;
84 /**
85 * Exclude a subset of files included by {@link CommonOptions.include}.
86 * @remarks
87 * The exclusion can be a directory or glob. Exclusions apply to all included
88 * entries.
89 */
90 exclude?: string[];
91 /**
92 * Rate limit invocations (invocations/sec). Default: no rate limit.
93 * @remarks
94 * Some services cannot handle more than a certain number of requests per
95 * second, and it is easy to overwhelm them with a large number of cloud
96 * functions. Specify a rate limit in invocation/second to restrict how
97 * faast.js issues requests.
98 */
99 rate?: number;
100 /**
101 * Environment variables available during serverless function execution.
102 * Default: \{\}.
103 */
104 env?: {
105 [key: string]: string;
106 };
107 /**
108 * Garbage collector mode. Default: `"auto"`.
109 * @remarks
110 * Garbage collection deletes resources that were created by previous
111 * instantiations of faast that were not cleaned up by
112 * {@link FaastModule.cleanup}, either because it was not called or because
113 * the process terminated and did not execute this cleanup step. In `"auto"`
114 * mode, garbage collection may be throttled to run up to once per hour no
115 * matter how many faast.js instances are created. In `"force"` mode,
116 * garbage collection is run without regard to whether another gc has
117 * already been performed recently. In `"off"` mode, garbage collection is
118 * skipped entirely. This can be useful for performance-sensitive tests, or
119 * for more control over when gc is performed.
120 *
121 * Garbage collection is cloud-specific, but in general garbage collection
122 * should not interfere with the behavior or performance of faast cloud
123 * functions. When {@link FaastModule.cleanup} runs, it waits for garbage
124 * collection to complete. Therefore the cleanup step can in some
125 * circumstances take a significant amount of time even after all
126 * invocations have returned.
127 *
128 * It is generally recommended to leave garbage collection in `"auto"` mode,
129 * otherwise garbage resources may accumulate over time and you will
130 * eventually hit resource limits on your account.
131 *
132 * Also see {@link CommonOptions.retentionInDays}.
133 */
134 gc?: "auto" | "force" | "off";
135 /**
136 * Include files to make available in the remote function. See
137 * {@link IncludeOption}.
138 * @remarks
139 * Each include entry is a directory or glob pattern. Paths can be specified
140 * as relative or absolute paths. Relative paths are resolved relative to
141 * the current working directory, or relative to the `cwd` option.
142 *
143 * If the include entry is a directory `"foo/bar"`, the directory
144 * `"./foo/bar"` will be available in the cloud function. Directories are
145 * recursively added.
146 *
147 * Glob patterns use the syntax of
148 * {@link https://github.com/isaacs/node-glob | node glob}.
149 *
150 * Also see {@link CommonOptions.exclude} for file exclusions.
151 */
152 include?: (string | IncludeOption)[];
153 /**
154 * Maximum number of times that faast will retry each invocation. Default: 2
155 * (invocations can therefore be attemped 3 times in total).
156 * @remarks
157 * Retries are automatically attempted for transient infrastructure-level
158 * failures such as rate limits or netowrk failures. User-level exceptions
159 * are not retried automatically. In addition to retries performed by faast,
160 * some providers automatically attempt retries. These are not controllable
161 * by faast. But as a result, your function may be retried many more times
162 * than this setting suggests.
163 */
164 maxRetries?: number;
165 /**
166 * Memory limit for each function in MB. This setting has an effect on
167 * pricing. Default varies by provider.
168 * @remarks
169 * Each provider has different settings for memory size, and performance
170 * varies depending on the setting. By default faast picks a likely optimal
171 * value for each provider.
172 *
173 * - aws: 1728MB
174 *
175 * - google: 1024MB
176 *
177 * - local: 512MB (however, memory size limits aren't reliable in local mode.)
178 */
179 memorySize?: number;
180 /**
181 * Specify invocation mode. Default: `"auto"`.
182 * @remarks
183 * Modes specify how invocations are triggered. In https mode, the functions
184 * are invoked through an https request or the provider's API. In queue
185 * mode, a provider-specific queue is used to invoke functions. Queue mode
186 * adds additional latency and (usually negligible) cost, but may scale
187 * better for some providers. In auto mode the best default is chosen for
188 * each provider depending on its particular performance characteristics.
189 *
190 * The defaults are:
191 *
192 * - aws: `"auto"` is `"https"`. In https mode, the AWS SDK api
193 * is used to invoke functions. In queue mode, an AWS SNS topic is created
194 * and triggers invocations. The AWS API Gateway service is never used by
195 * faast, as it incurs a higher cost and is not needed to trigger
196 * invocations.
197 *
198 * - google: `"auto"` is `"https"`. In https mode, a PUT request is made to
199 * invoke the cloud function. In queue mode, a PubSub topic is created to
200 * invoke functions.
201 *
202 * - local: The local provider ignores the mode setting and always uses an
203 * internal asynchronous queue to schedule calls.
204 *
205 * Size limits are affected by the choice of mode. On AWS the limit is 256kb
206 * for arguments and return values in `"queue"` mode, and 6MB for `"https"`
207 * mode. For Google the limit is 10MB regardless of mode. In Local mode
208 * messages are sent via node's IPC and are subject to OS IPC limits.
209 *
210 * Note that no matter which mode is selected, faast.js always creates a
211 * queue for sending back intermediate results for bookeeping and
212 * performance monitoring.
213 */
214 mode?: "https" | "queue" | "auto";
215 /**
216 * Specify a package.json file to include with the code package.
217 * @remarks
218 * By default, faast.js will use webpack to bundle dependencies your remote
219 * module imports. In normal usage there is no need to specify a separate
220 * package.json, as webpack will statically analyze your imports and
221 * determine which files to bundle.
222 *
223 * However, there are some use cases where this is not enough. For example,
224 * some dependencies contain native code compiled during installation, and
225 * webpack cannot bundle these native modules. such as dependencies with
226 * native code. or are specifically not designed to work with webpack. In
227 * these cases, you can create a separate `package.json` for these
228 * dependencies and pass the filename as the `packageJson` option. If
229 * `packageJson` is an `object`, it is assumed to be a parsed JSON object
230 * with the same structure as a package.json file (useful for specifying a
231 * synthetic `package.json` directly in code).
232 *
233 * The way the `packageJson` is handled varies by provider:
234 *
235 * - local: Runs `npm install` in a temporary directory it prepares for the
236 * function.
237 *
238 * - google: uses Google Cloud Function's
239 * {@link https://cloud.google.com/functions/docs/writing/specifying-dependencies-nodejs | native support for package.json}.
240 *
241 * - aws: Recursively calls faast.js to run `npm install` inside a separate
242 * lambda function specifically created for this purpose. Faast.js uses
243 * lambda to install dependencies to ensure that native dependencies are
244 * compiled in an environment that can produce binaries linked against
245 * lambda's
246 * {@link https://aws.amazon.com/blogs/compute/running-executables-in-aws-lambda/ | execution environment}.
247 * Packages are saved in a Lambda Layer.
248 *
249 * For AWS, if {@link CommonOptions.useDependencyCaching} is `true` (which
250 * is the default), then the Lambda Layer created will be reused in future
251 * function creation requests if the contents of `packageJson` are the same.
252 *
253 * The `FAAST_PACKAGE_DIR` environment variable can be useful for debugging
254 * `packageJson` issues.
255 */
256 packageJson?: string | object;
257 /**
258 * Cache installed dependencies from {@link CommonOptions.packageJson}. Only
259 * applies to AWS. Default: true.
260 * @remarks
261 * If `useDependencyCaching` is `true`, The resulting `node_modules` folder
262 * is cached in a Lambda Layer with the name `faast-${key}`, where `key` is
263 * the SHA1 hash of the `packageJson` contents. These cache entries are
264 * removed by garbage collection, by default after 24h. Using caching
265 * reduces the need to install and upload dependencies every time a function
266 * is created. This is important for AWS because it creates an entirely
267 * separate lambda function to install dependencies remotely, which can
268 * substantially increase function deployment time.
269 *
270 * If `useDependencyCaching` is false, the lambda layer is created with the
271 * same name as the lambda function, and then is deleted when cleanup is
272 * run.
273 */
274 useDependencyCaching?: boolean;
275 /**
276 * Specify how many days to wait before reclaiming cloud garbage. Default:
277 * 1.
278 * @remarks
279 * Garbage collection only deletes resources after they age beyond a certain
280 * number of days. This option specifies how many days old a resource needs
281 * to be before being considered garbage by the collector. Note that this
282 * setting is not recorded when the resources are created. For example,
283 * suppose this is the sequence of events:
284 *
285 * - Day 0: `faast()` is called with `retentionInDays` set to 5. Then, the
286 * function crashes (or omits the call to {@link FaastModule.cleanup}).
287 *
288 * - Day 1: `faast()` is called with `retentionInDays` set to 1.
289 *
290 * In this sequence of events, on Day 0 the garbage collector runs and
291 * removes resources with age older than 5 days. Then the function leaves
292 * new garbage behind because it crashed or did not complete cleanup. On Day
293 * 1, the garbage collector runs and deletes resources at least 1 day old,
294 * which includes garbage left behind from Day 0 (based on the creation
295 * timestamp of the resources). This deletion occurs even though retention
296 * was set to 5 days when resources were created on Day 0.
297 *
298 * On Google, logs are retained according to Google's default expiration
299 * policy (30 days) instead of being deleted by garbage collection.
300 *
301 * Note that if `retentionInDays` is set to 0, garbage collection will
302 * remove all resources, even ones that may be in use by other running faast
303 * instances. Not recommended.
304 *
305 * See {@link CommonOptions.gc}.
306 */
307 retentionInDays?: number;
308 /**
309 * Reduce tail latency by retrying invocations that take substantially
310 * longer than other invocations of the same function. Default: 3.
311 * @remarks
312 * faast.js automatically measures the mean and standard deviation (σ) of
313 * the time taken by invocations of each function. Retries are attempted
314 * when the time for an invocation exceeds the mean time by a certain
315 * threshold. `speculativeRetryThreshold` specifies how many multiples of σ
316 * an invocation needs to exceed the mean for a given function before retry
317 * is attempted.
318 *
319 * The default value of σ is 3. This means a call to a function is retried
320 * when the time to execute exceeds three standard deviations from the mean
321 * of all prior executions of the same function.
322 *
323 * This feature is experimental.
324 * @beta
325 */
326 speculativeRetryThreshold?: number;
327 /**
328 * Execution time limit for each invocation, in seconds. Default: 60.
329 * @remarks
330 * Each provider has a maximum time limit for how long invocations can run
331 * before being automatically terminated (or frozen). The following are the
332 * maximum time limits as of February 2019:
333 *
334 * - aws:
335 * {@link https://docs.aws.amazon.com/lambda/latest/dg/limits.html | 15 minutes}
336 *
337 * - google: {@link https://cloud.google.com/functions/quotas | 9 minutes}
338 *
339 * - local: unlimited
340 *
341 * Faast.js has a proactive timeout detection feature. It automatically
342 * attempts to detect when the time limit is about to be reached and
343 * proactively sends a timeout exception. Faast does this because not all
344 * providers reliably send timely feedback when timeouts occur, leaving
345 * developers to look through cloud logs. In general faast.js' timeout will
346 * be up to 5s earlier than the timeout specified, in order to give time to
347 * allow faast.js to send a timeout message. Proactive timeout detection
348 * only works with {@link CommonOptions.childProcess} set to `true` (the
349 * default).
350 */
351 timeout?: number;
352 /**
353 * Extra webpack options to use to bundle the code package.
354 * @remarks
355 * By default, faast.js uses webpack to bundle the code package. Webpack
356 * automatically handles finding and bundling dependencies, adding source
357 * mappings, etc. If you need specialized bundling, use this option to add
358 * or override the default webpack configuration. The library
359 * {@link https://github.com/survivejs/webpack-merge | webpack-merge} is
360 * used to combine configurations.
361 *
362 * ```typescript
363 * const config: webpack.Configuration = merge({
364 * entry,
365 * mode: "development",
366 * output: {
367 * path: "/",
368 * filename: outputFilename,
369 * libraryTarget: "commonjs2"
370 * },
371 * target: "node",
372 * resolveLoader: { modules: [__dirname, `${__dirname}/dist}`] },
373 * node: { global: true, __dirname: false, __filename: false }
374 * },
375 * webpackOptions);
376 * ```
377 *
378 * Take care when setting the values of `entry`, `output`, or
379 * `resolveLoader`. If these options are overwritten, faast.js may fail to
380 * bundle your code. In particular, setting `entry` to an array value will
381 * help `webpack-merge` to concatenate its value instead of replacing the
382 * value that faast.js inserts for you.
383 *
384 * Default:
385 *
386 * - aws: `{ externals: [new RegExp("^aws-sdk/?")] }`. In the lambda
387 * environment `"aws-sdk"` is available in the ambient environment and
388 * does not need to be bundled.
389 *
390 * - other providers: `{}`
391 *
392 * The `FAAST_PACKAGE_DIR` environment variable can be useful for debugging
393 * webpack issues.
394 */
395 webpackOptions?: webpack.Configuration;
396 /**
397 * Check arguments and return values from cloud functions are serializable
398 * without losing information. Default: true.
399 * @remarks
400 * Arguments to cloud functions are automatically serialized with
401 * `JSON.stringify` with a custom replacer that handles built-in JavaScript
402 * types such as `Date` and `Buffer`. Return values go through the same
403 * process. Some JavaScript objects cannot be serialized. By default
404 * `validateSerialization` will verify that every argument and return value
405 * can be serialized and deserialized without losing information. A
406 * `FaastError` will be thrown if faast.js detects a problem according to
407 * the following procedure:
408 *
409 * 1. Serialize arguments and return values with `JSON.stringify` using a
410 * special `replacer` function.
411 *
412 * 2. Deserialize the values with `JSON.parse` with a special `reviver`
413 * function.
414 *
415 * 3. Use
416 * {@link https://nodejs.org/api/assert.html#assert_assert_deepstrictequal_actual_expected_message | assert.deepStringEqual}
417 * to compare the original object with the deserialized object from step
418 * 2.
419 *
420 * There is some overhead to this process because each argument is
421 * serialized and deserialized, which can be costly if arguments or return
422 * values are large.
423 */
424 validateSerialization?: boolean;
425 /**
426 * Debugging output options.
427 * @internal
428 */
429 debugOptions?: {
430 [key: string]: boolean;
431 };
432}
433export declare const commonDefaults: Required<CommonOptions>;
434/**
435 * Options that apply to the {@link FaastModule.cleanup} method.
436 * @public
437 */
438export interface CleanupOptions {
439 /**
440 * If true, delete provider cloud resources. Default: true.
441 * @remarks
442 * The cleanup operation has two functions: stopping the faast.js runtime
443 * and deleting cloud resources that were instantiated. If `deleteResources`
444 * is false, then only the runtime is stopped and no cloud resources are
445 * deleted. This can be useful for debugging and examining the state of
446 * resources created by faast.js.
447 *
448 * It is supported to call {@link FaastModule.cleanup} twice: once with
449 * `deleteResources` set to `false`, which only stops the runtime, and then
450 * again set to `true` to delete resources. This can be useful for testing.
451 */
452 deleteResources?: boolean;
453 /**
454 * If true, delete cached resources. Default: false.
455 * @remarks
456 * Some resources are cached persistently between calls for performance
457 * reasons. If this option is set to true, these cached resources are
458 * deleted when cleanup occurs, instead of being left behind for future use.
459 * For example, on AWS this includes the Lambda Layers that are created for
460 * {@link CommonOptions.packageJson} dependencies. Note that only the cached
461 * resources created by this instance of FaastModule are deleted, not cached
462 * resources from other FaastModules. This is similar to setting
463 * `useCachedDependencies` to `false` during function construction, except
464 * `deleteCaches` can be set at function cleanup time, and any other
465 * FaastModules created before cleanup may use the cached Layers.
466 */
467 deleteCaches?: boolean;
468 /**
469 * Number of seconds to wait for garbage collection. Default: 10.
470 * @remarks
471 * Garbage collection can still be operating when cleanup is called; this
472 * option limits the amount of time faast waits for the garbage collector.
473 * If set to 0, the wait is unlimited.
474 */
475 gcTimeout?: number;
476}
477export declare const CleanupOptionDefaults: Required<CleanupOptions>;
478/**
479 * Summary statistics for function invocations.
480 * @remarks
481 * ```
482 * localStartLatency remoteStartLatency executionTime
483 * ◀──────────────────▶◁ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ▷◀──────────▶
484 *
485 * ┌───────────────────────────────────┬──────────────────────────────────────┐
486 * │ │ │
487 * │ Local │ Cloud Provider │
488 * │ │ │
489 * │ ┌─────────┐ │ ┌──────────┐ ┌──────────┐ │
490 * │ │ │ │ │ │ │ │ │
491 * │ │ local │ │ │ request │ │ │ │
492 * │ invoke ────────▶│ queue │────┼──▶│ queue ├────────▶│ │ │
493 * │ │ │ │ │ │ │ │ │
494 * │ └─────────┘ │ └──────────┘ │ cloud │ │
495 * │ │ │ function │ │
496 * │ ┌─────────┐ │ ┌──────────┐ │ │ │
497 * │ │ │ │ │ │ │ │ │
498 * │ result ◀────────│ local │◀───┼───│ response │◀────────│ │ │
499 * │ │ polling │ │ │ queue │ │ │ │
500 * │ │ │ │ │ │ │ │ │
501 * │ └─────────┘ │ └──────────┘ └──────────┘ │
502 * │ │ │
503 * └───────────────────────────────────┴──────────────────────────────────────┘
504 *
505 * ◁ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ▷
506 * returnLatency ◀───────▶
507 * sendResponseLatency
508 * ```
509 *
510 * `localStartLatency` and `executionTime` are measured on one machine and are
511 * free of clock skew. `remoteStartLatency` and `returnLatency` are measured as
512 * time differences between machines and are subject to much more uncertainty,
513 * and effects like clock skew.
514 *
515 * All times are in milliseconds.
516 *
517 * @public
518 */
519export declare class FunctionStats {
520 /**
521 * Statistics for how long invocations stay in the local queue before being
522 * sent to the cloud provider.
523 */
524 localStartLatency: Statistics;
525 /**
526 * Statistics for how long requests take to start execution after being sent
527 * to the cloud provider. This typically includes remote queueing and cold
528 * start times. Because this measurement requires comparing timestamps from
529 * different machines, it is subject to clock skew and other effects, and
530 * should not be considered highly accurate. It can be useful for detecting
531 * excessively high latency problems. Faast.js attempt to correct for clock
532 * skew heuristically.
533 */
534 remoteStartLatency: Statistics;
535 /**
536 * Statistics for function execution time in milliseconds. This is measured
537 * as wall clock time inside the cloud function, and does not include the
538 * time taken to send the response to the response queue. Note that most
539 * cloud providers round up to the next 100ms for pricing.
540 */
541 executionTime: Statistics;
542 /**
543 * Statistics for how long it takes to send the response to the response
544 * queue.
545 */
546 sendResponseLatency: Statistics;
547 /**
548 * Statistics for how long it takes to return a response from the end of
549 * execution time to the receipt of the response locally. This measurement
550 * requires comparing timestamps from different machines, and is subject to
551 * clock skew and other effects. It should not be considered highly
552 * accurate. It can be useful for detecting excessively high latency
553 * problems. Faast.js attempts to correct for clock skew heuristically.
554 */
555 returnLatency: Statistics;
556 /**
557 * Statistics for amount of time billed. This is similar to
558 * {@link FunctionStats.executionTime} except each sampled time is rounded
559 * up to the next 100ms.
560 */
561 estimatedBilledTime: Statistics;
562 /**
563 * The number of invocations attempted. If an invocation is retried, this
564 * only counts the invocation once.
565 */
566 invocations: number;
567 /**
568 * The number of invocations that were successfully completed.
569 */
570 completed: number;
571 /**
572 * The number of invocation retries attempted. This counts retries
573 * attempted by faast.js to recover from transient errors, but does not
574 * count retries by the cloud provider.
575 */
576 retries: number;
577 /**
578 * The number of invocations that resulted in an error. If an invocation is
579 * retried, an error is only counted once, no matter how many retries were
580 * attempted.
581 */
582 errors: number;
583 /**
584 * Summarize the function stats as a string.
585 * @returns a string showing the value of completed, retries, errors, and
586 * mean execution time. This string excludes invocations by default because
587 * it is often fixed.
588 */
589 toString(): string;
590 /** @internal */
591 clone(): FunctionStats;
592}
593export declare class FunctionExecutionMetrics {
594 secondMetrics: Statistics[];
595}
596export declare type CallId = string;
597export interface ResponseContext {
598 type: "fulfill" | "reject";
599 value: string;
600 callId: CallId;
601 isErrorObject?: boolean;
602 remoteExecutionStartTime?: number;
603 remoteExecutionEndTime?: number;
604 logUrl?: string;
605 instanceId?: string;
606 executionId?: string;
607 memoryUsage?: NodeJS.MemoryUsage;
608 timestamp?: number;
609}
610export interface PromiseResponseMessage extends ResponseContext {
611 kind: "promise";
612}
613export interface IteratorResponseMessage extends ResponseContext {
614 kind: "iterator";
615 sequence: number;
616}
617export interface FunctionStartedMessage {
618 kind: "functionstarted";
619 callId: CallId;
620}
621export interface CpuMetricsMessage {
622 kind: "cpumetrics";
623 callId: CallId;
624 metrics: CpuMeasurement;
625}
626export interface PollResult {
627 Messages: Message[];
628 isFullMessageBatch?: boolean;
629}
630export declare type Message = PromiseResponseMessage | IteratorResponseMessage | FunctionStartedMessage | CpuMetricsMessage;
631export declare type Kind = Message["kind"];
632export declare type UUID = string;
633export declare function filterMessages<K extends Kind>(messages: Message[], kind: K): (Extract<PromiseResponseMessage, {
634 kind: K;
635}> | Extract<IteratorResponseMessage, {
636 kind: K;
637}> | Extract<FunctionStartedMessage, {
638 kind: K;
639}> | Extract<CpuMetricsMessage, {
640 kind: K;
641}>)[];
642export interface ProviderImpl<O extends CommonOptions, S> {
643 name: Provider;
644 defaults: Required<O>;
645 initialize(serverModule: string, nonce: UUID, options: Required<O>): Promise<S>;
646 costSnapshot(state: S, stats: FunctionStats): Promise<CostSnapshot>;
647 cleanup(state: S, options: Required<CleanupOptions>): Promise<void>;
648 logUrl(state: S): string;
649 invoke(state: S, request: FunctionCall, cancel: Promise<void>): Promise<void>;
650 poll(state: S, cancel: Promise<void>): Promise<PollResult>;
651 responseQueueId(state: S): string;
652}