/**
* Faast.js transforms ordinary JavaScript modules into serverless cloud
* functions that can run on AWS Lambda.
*
* The main entry point to faast.js is the {@link faast} function, which returns
* an object that implements the {@link FaastModule} interface. The most common
* options are {@link CommonOptions}. Using faast.js requires writing two
* modules, one containing the functions to upload to the cloud, and the other
* that invokes faast.js and calls the resulting cloud functions.
* @packageDocumentation
*/
///
import childProcess from 'child_process';
import { CloudWatchLogs } from '@aws-sdk/client-cloudwatch-logs';
import { CreateFunctionRequest } from '@aws-sdk/client-lambda';
import { default as debug_2 } from 'debug';
import { IAM } from '@aws-sdk/client-iam';
import { Lambda } from '@aws-sdk/client-lambda';
import { Pricing } from '@aws-sdk/client-pricing';
import { S3 } from '@aws-sdk/client-s3';
import { SNS } from '@aws-sdk/client-sns';
import { SQS } from '@aws-sdk/client-sqs';
import { STS } from '@aws-sdk/client-sts';
import { VError } from 'verror';
import webpack from 'webpack';
import { Writable } from 'stream';
declare type AnyFunction = (...args: any[]) => any;
/**
* `Async` maps regular values to Promises and Iterators to AsyncIterators,
* If `T` is already a Promise or an AsyncIterator, it remains the same. This
* type is used to infer the return value of cloud functions from the types of
* the functions in the user's input module.
* @public
*/
export declare type Async = T extends AsyncGenerator ? AsyncGenerator : T extends Generator ? AsyncGenerator : T extends Promise ? Promise : Promise;
/**
* `AsyncDetail` is similar to {@link Async} except it maps retun values R to
* `Detail`, which is the return value with additional information about each
* cloud function invocation.
* @public
*/
export declare type AsyncDetail = T extends AsyncGenerator ? AsyncGenerator> : T extends Generator ? AsyncGenerator> : T extends Promise ? Promise> : Promise>;
declare class AsyncIterableQueue extends AsyncQueue> {
push(value: T | Promise): void;
done(): void;
[Symbol.asyncIterator](): this;
}
declare class AsyncQueue {
protected deferred: Array>;
protected enqueued: Promise[];
enqueue(value: T | Promise): void;
next(): Promise;
clear(): void;
}
/**
* Factory for AWS service clients, which allows for custom construction and configuration.
* {@link https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/index.html#configuration | AWS Configuration}.
* @public
* @remarks
* This is an advanced option. This provides a way for a faast.js client to
* instantiate AWS service objects for itself to provide custom options.
* Note that if you create a service object yourself, it won't have the
* default options that faast.js would use, which are:
*
* - maxAttempts (faast.js default: 6)
* - region (faast.js default: "us-west-2")
* - logger (faast.js default: log.awssdk)
*/
export declare interface AwsClientFactory {
createCloudWatchLogs?: () => CloudWatchLogs;
createIAM?: () => IAM;
createLambda?: () => Lambda;
/**
* A special AWS Lambda factory for creating lambda functions that are
* used for faast.js invocations. These special clients have the following
* options set by default in faast.js:
*
* // Retries are handled by faast.js, not the sdk.
* maxAttempts: 0,
*/
createLambdaForInvocations?: () => Lambda;
createPricing?: () => Pricing;
createS3?: () => S3;
createSNS?: () => SNS;
createSQS?: () => SQS;
createSts?: () => STS;
}
/**
* The return type of {@link faastAws}. See {@link FaastModuleProxy}.
* @public
*/
export declare type AwsFaastModule = FaastModuleProxy;
declare type AwsGcWork = {
type: "SetLogRetention";
logGroupName: string;
retentionInDays: number;
} | {
type: "DeleteResources";
resources: AwsResources;
} | {
type: "DeleteLayerVersion";
LayerName: string;
VersionNumber: number;
};
declare interface AwsLayerInfo {
Version: number;
LayerVersionArn: string;
LayerName: string;
}
declare class AwsMetrics {
outboundBytes: number;
sns64kRequests: number;
sqs64kRequests: number;
}
/**
* AWS-specific options for {@link faastAws}.
* @public
*/
export declare interface AwsOptions extends CommonOptions {
/**
* The region to create resources in. Garbage collection is also limited to
* this region. Default: `"us-west-2"`.
*/
region?: AwsRegion;
/**
* The role that the lambda function will assume when executing user code.
* Default: `"faast-cached-lambda-role"`. Rarely used.
* @remarks
* When a lambda executes, it first assumes an
* {@link https://docs.aws.amazon.com/lambda/latest/dg/lambda-intro-execution-role.html | execution role}
* to grant access to resources.
*
* By default, faast.js creates this execution role for you and leaves it
* permanently in your account (the role is shared across all lambda
* functions created by faast.js). By default, faast.js grants administrator
* privileges to this role so your code can perform any AWS operation it
* requires.
*
* You can
* {@link https://console.aws.amazon.com/iam/home#/roles | create a custom role}
* that specifies more limited permissions if you prefer not to grant
* administrator privileges. Any role you assign for faast.js modules needs
* at least the following permissions:
*
* - Execution Role:
* ```json
* {
* "Version": "2012-10-17",
* "Statement": [
* {
* "Effect": "Allow",
* "Action": ["logs:*"],
* "Resource": "arn:aws:logs:*:*:log-group:faast-*"
* },
* {
* "Effect": "Allow",
* "Action": ["sqs:*"],
* "Resource": "arn:aws:sqs:*:*:faast-*"
* }
* ]
* }
* ```
*
* - Trust relationship (also known as `AssumeRolePolicyDocument` in the AWS
* SDK):
* ```json
* {
* "Version": "2012-10-17",
* "Statement": [
* {
* "Effect": "Allow",
* "Principal": {
* "Service": "lambda.amazonaws.com"
* },
* "Action": "sts:AssumeRole"
* }
* ]
* }
* ```
*
*/
RoleName?: string;
/**
* Additional options to pass to AWS Lambda creation. See
* {@link https://docs.aws.amazon.com/lambda/latest/dg/API_CreateFunction.html | CreateFunction}.
* @remarks
* If you need specialized options, you can pass them to the AWS Lambda SDK
* directly. Note that if you override any settings set by faast.js, you may
* cause faast.js to not work:
*
* ```typescript
* const request: aws.Lambda.CreateFunctionRequest = {
* FunctionName,
* Role,
* Runtime: "nodejs18.x",
* Handler: "index.trampoline",
* Code,
* Description: "faast trampoline function",
* Timeout,
* MemorySize,
* ...awsLambdaOptions
* };
* ```
*/
awsLambdaOptions?: Partial;
/**
* AWS service factories. See {@link AwsClientFactory}.
*/
awsClientFactory?: AwsClientFactory;
/** @internal */
_gcWorker?: (work: AwsGcWork, services: AwsServices) => Promise;
}
/**
* Valid AWS
* {@link https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html | regions}.
* Not all of these regions have Lambda support.
* @public
*/
export declare type AwsRegion = "us-east-1" | "us-east-2" | "us-west-1" | "us-west-2" | "ca-central-1" | "eu-central-1" | "eu-west-1" | "eu-west-2" | "eu-west-3" | "ap-northeast-1" | "ap-northeast-2" | "ap-northeast-3" | "ap-southeast-1" | "ap-southeast-2" | "ap-south-1" | "sa-east-1";
declare interface AwsResources {
FunctionName: string;
RoleName: string;
region: AwsRegion;
ResponseQueueUrl?: string;
ResponseQueueArn?: string;
RequestTopicArn?: string;
SNSLambdaSubscriptionArn?: string;
logGroupName: string;
layer?: AwsLayerInfo;
Bucket?: string;
}
declare interface AwsServices {
readonly lambda: Lambda;
readonly lambda2: Lambda;
readonly cloudwatch: CloudWatchLogs;
readonly iam: IAM;
readonly sqs: SQS;
readonly sns: SNS;
readonly pricing: Pricing;
readonly sts: STS;
readonly s3: S3;
}
/**
* @public
*/
declare interface AwsState {
/** @internal */
resources: AwsResources;
/** @internal */
services: AwsServices;
/** @internal */
options: Required;
/** @internal */
metrics: AwsMetrics;
/** @internal */
gcPromise?: Promise<"done" | "skipped">;
}
declare interface CallId {
callId: string;
}
declare type CallId_2 = string;
declare interface CallingContext {
call: FunctionCall;
startTime: number;
logUrl?: string;
executionId?: string;
instanceId?: string;
}
/**
* Options that apply to the {@link FaastModule.cleanup} method.
* @public
*/
export declare interface CleanupOptions {
/**
* If true, delete provider cloud resources. Default: true.
* @remarks
* The cleanup operation has two functions: stopping the faast.js runtime
* and deleting cloud resources that were instantiated. If `deleteResources`
* is false, then only the runtime is stopped and no cloud resources are
* deleted. This can be useful for debugging and examining the state of
* resources created by faast.js.
*
* It is supported to call {@link FaastModule.cleanup} twice: once with
* `deleteResources` set to `false`, which only stops the runtime, and then
* again set to `true` to delete resources. This can be useful for testing.
*/
deleteResources?: boolean;
/**
* If true, delete cached resources. Default: false.
* @remarks
* Some resources are cached persistently between calls for performance
* reasons. If this option is set to true, these cached resources are
* deleted when cleanup occurs, instead of being left behind for future use.
* For example, on AWS this includes the Lambda Layers that are created for
* {@link CommonOptions.packageJson} dependencies. Note that only the cached
* resources created by this instance of FaastModule are deleted, not cached
* resources from other FaastModules. This is similar to setting
* `useCachedDependencies` to `false` during function construction, except
* `deleteCaches` can be set at function cleanup time, and any other
* FaastModules created before cleanup may use the cached Layers.
*/
deleteCaches?: boolean;
/**
* Number of seconds to wait for garbage collection. Default: 10.
* @remarks
* Garbage collection can still be operating when cleanup is called; this
* option limits the amount of time faast waits for the garbage collector.
* If set to 0, the wait is unlimited.
*/
gcTimeout?: number;
}
/**
* Options common across all faast.js providers. Used as argument to {@link faast}.
* @remarks
* There are also more specific options for each provider. See
* {@link AwsOptions} and {@link LocalOptions}.
* @public
*/
export declare interface CommonOptions {
/**
* If true, create a child process to isolate user code from faast
* scaffolding. Default: true.
* @remarks
* If a child process is not created, faast runs in the same node instance
* as the user code and may not execute in a timely fashion because user
* code may
* {@link https://nodejs.org/en/docs/guides/dont-block-the-event-loop/ | block the event loop}.
* Creating a child process for user code allows faast.js to continue
* executing even if user code never yields. This provides better
* reliability and functionality:
*
* - Detect timeout errors more reliably, even if the function doesn't
* relinquish the CPU. Not applicable to AWS, which sends separate failure
* messages in case of timeout. See {@link CommonOptions.timeout}.
*
* - CPU metrics used for detecting invocations with high latency, which can
* be used for automatically retrying calls to reduce tail latency.
*
* The cost of creating a child process is mainly in the memory overhead of
* creating another node process.
*/
childProcess?: boolean;
/**
* When childProcess is true, the child process will be spawned with the
* value of this property as the setting for --max-old-space-size.
* @remarks
* This is useful if a function requires the node process to limit its
* memory so that another spawned process (e.g. a browser instance) can use
* the rest.
* @public
*/
childProcessMemoryMb?: number;
/**
* The maximum number of concurrent invocations to allow. Default: 100,
* except for the `local` provider, where the default is 10.
* @remarks
* The concurrency limit applies to all invocations of all of the faast
* functions summed together. It is not a per-function limit. To apply a
* per-function limit, use {@link throttle}. A value of 0 is equivalent to
* Infinity. A value of 1 ensures mutually exclusive invocations.
*/
concurrency?: number;
/**
* A user-supplied description for this function, which may make it easier
* to track different functions when multiple functions are created.
*/
description?: string;
/**
* Exclude a subset of files included by {@link CommonOptions.include}.
* @remarks
* The exclusion can be a directory or glob. Exclusions apply to all included
* entries.
*/
exclude?: string[];
/**
* Rate limit invocations (invocations/sec). Default: no rate limit.
* @remarks
* Some services cannot handle more than a certain number of requests per
* second, and it is easy to overwhelm them with a large number of cloud
* functions. Specify a rate limit in invocation/second to restrict how
* faast.js issues requests.
*/
rate?: number;
/**
* Environment variables available during serverless function execution.
* Default: \{\}.
*/
env?: {
[key: string]: string;
};
/**
* Garbage collector mode. Default: `"auto"`.
* @remarks
* Garbage collection deletes resources that were created by previous
* instantiations of faast that were not cleaned up by
* {@link FaastModule.cleanup}, either because it was not called or because
* the process terminated and did not execute this cleanup step. In `"auto"`
* mode, garbage collection may be throttled to run up to once per hour no
* matter how many faast.js instances are created. In `"force"` mode,
* garbage collection is run without regard to whether another gc has
* already been performed recently. In `"off"` mode, garbage collection is
* skipped entirely. This can be useful for performance-sensitive tests, or
* for more control over when gc is performed.
*
* Garbage collection is cloud-specific, but in general garbage collection
* should not interfere with the behavior or performance of faast cloud
* functions. When {@link FaastModule.cleanup} runs, it waits for garbage
* collection to complete. Therefore the cleanup step can in some
* circumstances take a significant amount of time even after all
* invocations have returned.
*
* It is generally recommended to leave garbage collection in `"auto"` mode,
* otherwise garbage resources may accumulate over time and you will
* eventually hit resource limits on your account.
*
* Also see {@link CommonOptions.retentionInDays}.
*/
gc?: "auto" | "force" | "off";
/**
* Include files to make available in the remote function. See
* {@link IncludeOption}.
* @remarks
* Each include entry is a directory or glob pattern. Paths can be specified
* as relative or absolute paths. Relative paths are resolved relative to
* the current working directory, or relative to the `cwd` option.
*
* If the include entry is a directory `"foo/bar"`, the directory
* `"./foo/bar"` will be available in the cloud function. Directories are
* recursively added.
*
* Glob patterns use the syntax of
* {@link https://github.com/isaacs/node-glob | node glob}.
*
* Also see {@link CommonOptions.exclude} for file exclusions.
*/
include?: (string | IncludeOption)[];
/**
* Maximum number of times that faast will retry each invocation. Default: 2
* (invocations can therefore be attemped 3 times in total).
* @remarks
* Retries are automatically attempted for transient infrastructure-level
* failures such as rate limits or netowrk failures. User-level exceptions
* are not retried automatically. In addition to retries performed by faast,
* some providers automatically attempt retries. These are not controllable
* by faast. But as a result, your function may be retried many more times
* than this setting suggests.
*/
maxRetries?: number;
/**
* Memory limit for each function in MB. This setting has an effect on
* pricing. Default varies by provider.
* @remarks
* Each provider has different settings for memory size, and performance
* varies depending on the setting. By default faast picks a likely optimal
* value for each provider.
*
* - aws: 1728MB
*
* - local: 512MB (however, memory size limits aren't reliable in local mode.)
*/
memorySize?: number;
/**
* Specify invocation mode. Default: `"auto"`.
* @remarks
* Modes specify how invocations are triggered. In https mode, the functions
* are invoked through an https request or the provider's API. In queue
* mode, a provider-specific queue is used to invoke functions. Queue mode
* adds additional latency and (usually negligible) cost, but may scale
* better for some providers. In auto mode the best default is chosen for
* each provider depending on its particular performance characteristics.
*
* The defaults are:
*
* - aws: `"auto"` is `"https"`. In https mode, the AWS SDK api
* is used to invoke functions. In queue mode, an AWS SNS topic is created
* and triggers invocations. The AWS API Gateway service is never used by
* faast, as it incurs a higher cost and is not needed to trigger
* invocations.
*
* - local: The local provider ignores the mode setting and always uses an
* internal asynchronous queue to schedule calls.
*
* Size limits are affected by the choice of mode. On AWS the limit is 256kb
* for arguments and return values in `"queue"` mode, and 6MB for `"https"`
* mode.
*
* Note that no matter which mode is selected, faast.js always creates a
* queue for sending back intermediate results for bookeeping and
* performance monitoring.
*/
mode?: "https" | "queue" | "auto";
/**
* Specify a package.json file to include with the code package.
* @remarks
* By default, faast.js will use webpack to bundle dependencies your remote
* module imports. In normal usage there is no need to specify a separate
* package.json, as webpack will statically analyze your imports and
* determine which files to bundle.
*
* However, there are some use cases where this is not enough. For example,
* some dependencies contain native code compiled during installation, and
* webpack cannot bundle these native modules. such as dependencies with
* native code. or are specifically not designed to work with webpack. In
* these cases, you can create a separate `package.json` for these
* dependencies and pass the filename as the `packageJson` option. If
* `packageJson` is an `object`, it is assumed to be a parsed JSON object
* with the same structure as a package.json file (useful for specifying a
* synthetic `package.json` directly in code).
*
* The way the `packageJson` is handled varies by provider:
*
* - local: Runs `npm install` in a temporary directory it prepares for the
* function.
*
* - aws: Recursively calls faast.js to run `npm install` inside a separate
* lambda function specifically created for this purpose. Faast.js uses
* lambda to install dependencies to ensure that native dependencies are
* compiled in an environment that can produce binaries linked against
* lambda's
* {@link https://aws.amazon.com/blogs/compute/running-executables-in-aws-lambda/ | execution environment}.
* Packages are saved in a Lambda Layer.
*
* For AWS, if {@link CommonOptions.useDependencyCaching} is `true` (which
* is the default), then the Lambda Layer created will be reused in future
* function creation requests if the contents of `packageJson` are the same.
*
* The `FAAST_PACKAGE_DIR` environment variable can be useful for debugging
* `packageJson` issues.
*/
packageJson?: string | object;
/**
* Cache installed dependencies from {@link CommonOptions.packageJson}. Only
* applies to AWS. Default: true.
* @remarks
* If `useDependencyCaching` is `true`, The resulting `node_modules` folder
* is cached in a Lambda Layer with the name `faast-${key}`, where `key` is
* the SHA1 hash of the `packageJson` contents. These cache entries are
* removed by garbage collection, by default after 24h. Using caching
* reduces the need to install and upload dependencies every time a function
* is created. This is important for AWS because it creates an entirely
* separate lambda function to install dependencies remotely, which can
* substantially increase function deployment time.
*
* If `useDependencyCaching` is false, the lambda layer is created with the
* same name as the lambda function, and then is deleted when cleanup is
* run.
*/
useDependencyCaching?: boolean;
/**
* Specify how many days to wait before reclaiming cloud garbage. Default:
* 1.
* @remarks
* Garbage collection only deletes resources after they age beyond a certain
* number of days. This option specifies how many days old a resource needs
* to be before being considered garbage by the collector. Note that this
* setting is not recorded when the resources are created. For example,
* suppose this is the sequence of events:
*
* - Day 0: `faast()` is called with `retentionInDays` set to 5. Then, the
* function crashes (or omits the call to {@link FaastModule.cleanup}).
*
* - Day 1: `faast()` is called with `retentionInDays` set to 1.
*
* In this sequence of events, on Day 0 the garbage collector runs and
* removes resources with age older than 5 days. Then the function leaves
* new garbage behind because it crashed or did not complete cleanup. On Day
* 1, the garbage collector runs and deletes resources at least 1 day old,
* which includes garbage left behind from Day 0 (based on the creation
* timestamp of the resources). This deletion occurs even though retention
* was set to 5 days when resources were created on Day 0.
*
* Note that if `retentionInDays` is set to 0, garbage collection will
* remove all resources, even ones that may be in use by other running faast
* instances. Not recommended.
*
* See {@link CommonOptions.gc}.
*/
retentionInDays?: number;
/**
* Reduce tail latency by retrying invocations that take substantially
* longer than other invocations of the same function. Default: 3.
* @remarks
* faast.js automatically measures the mean and standard deviation (σ) of
* the time taken by invocations of each function. Retries are attempted
* when the time for an invocation exceeds the mean time by a certain
* threshold. `speculativeRetryThreshold` specifies how many multiples of σ
* an invocation needs to exceed the mean for a given function before retry
* is attempted.
*
* The default value of σ is 3. This means a call to a function is retried
* when the time to execute exceeds three standard deviations from the mean
* of all prior executions of the same function.
*
* This feature is experimental.
* @beta
*/
speculativeRetryThreshold?: number;
/**
* Execution time limit for each invocation, in seconds. Default: 60.
* @remarks
* Each provider has a maximum time limit for how long invocations can run
* before being automatically terminated (or frozen). The following are the
* maximum time limits as of February 2019:
*
* - aws:
* {@link https://docs.aws.amazon.com/lambda/latest/dg/limits.html | 15 minutes}
*
* - local: unlimited
*
* Faast.js has a proactive timeout detection feature. It automatically
* attempts to detect when the time limit is about to be reached and
* proactively sends a timeout exception. Faast does this because not all
* providers reliably send timely feedback when timeouts occur, leaving
* developers to look through cloud logs. In general faast.js' timeout will
* be up to 5s earlier than the timeout specified, in order to give time to
* allow faast.js to send a timeout message. Proactive timeout detection
* only works with {@link CommonOptions.childProcess} set to `true` (the
* default).
*/
timeout?: number;
/**
* Extra webpack options to use to bundle the code package.
* @remarks
* By default, faast.js uses webpack to bundle the code package. Webpack
* automatically handles finding and bundling dependencies, adding source
* mappings, etc. If you need specialized bundling, use this option to add
* or override the default webpack configuration. The library
* {@link https://github.com/survivejs/webpack-merge | webpack-merge} is
* used to combine configurations.
*
* ```typescript
* const config: webpack.Configuration = merge({
* entry,
* mode: "development",
* output: {
* path: "/",
* filename: outputFilename,
* libraryTarget: "commonjs2"
* },
* target: "node",
* resolveLoader: { modules: [__dirname, `${__dirname}/dist`] },
* node: { global: true, __dirname: false, __filename: false }
* },
* webpackOptions);
* ```
*
* Take care when setting the values of `entry`, `output`, or
* `resolveLoader`. If these options are overwritten, faast.js may fail to
* bundle your code. In particular, setting `entry` to an array value will
* help `webpack-merge` to concatenate its value instead of replacing the
* value that faast.js inserts for you.
*
* Default:
*
* - aws: `{ externals: [new RegExp("^aws-sdk/?")] }`. In the lambda
* environment `"aws-sdk"` is available in the ambient environment and
* does not need to be bundled.
*
* - other providers: `{}`
*
* The `FAAST_PACKAGE_DIR` environment variable can be useful for debugging
* webpack issues.
*/
webpackOptions?: webpack.Configuration;
/**
* Check arguments and return values from cloud functions are serializable
* without losing information. Default: true.
* @remarks
* Arguments to cloud functions are automatically serialized with
* `JSON.stringify` with a custom replacer that handles built-in JavaScript
* types such as `Date` and `Buffer`. Return values go through the same
* process. Some JavaScript objects cannot be serialized. By default
* `validateSerialization` will verify that every argument and return value
* can be serialized and deserialized without losing information. A
* `FaastError` will be thrown if faast.js detects a problem according to
* the following procedure:
*
* 1. Serialize arguments and return values with `JSON.stringify` using a
* special `replacer` function.
*
* 2. Deserialize the values with `JSON.parse` with a special `reviver`
* function.
*
* 3. Use
* {@link https://nodejs.org/api/assert.html#assert_assert_deepstrictequal_actual_expected_message | assert.deepStringEqual}
* to compare the original object with the deserialized object from step
* 2.
*
* There is some overhead to this process because each argument is
* serialized and deserialized, which can be costly if arguments or return
* values are large.
*/
validateSerialization?: boolean;
/**
* Debugging output options.
* @internal
*/
debugOptions?: {
[key: string]: boolean;
};
}
/**
* Analyze the cost of a workload across many provider configurations.
* @public
*/
export declare namespace CostAnalyzer {
/**
* An input to {@link CostAnalyzer.analyze}, specifying one
* configuration of faast.js to run against a workload. See
* {@link AwsOptions}.
* @public
*/
export type Configuration = {
provider: "aws";
options: AwsOptions;
};
/**
* Default AWS cost analyzer configurations include all memory sizes for AWS
* Lambda.
* @remarks
* The default AWS cost analyzer configurations include every memory size
* from 128MB to 3008MB in 64MB increments. Each configuration has the
* following settings:
*
* ```typescript
* {
* provider: "aws",
* options: {
* mode: "https",
* memorySize,
* timeout: 300,
* gc: "off",
* childProcess: true
* }
* }
* ```
*
* Use `Array.map` to change or `Array.filter` to remove some of these
* configurations. For example:
*
* ```typescript
* const configsWithAtLeast1GB = awsConfigurations.filter(c => c.memorySize > 1024)
* const shorterTimeout = awsConfigurations.map(c => ({...c, timeout: 60 }));
* ```
* @public
*/
const awsConfigurations: Configuration[];
/**
* User-defined custom metrics for a workload. These are automatically
* summarized in the output; see {@link CostAnalyzer.Workload}.
* @public
*/
export type WorkloadAttribute = {
[attr in A]: number;
};
/**
* A user-defined cost analyzer workload for {@link CostAnalyzer.analyze}.
* @public
* Example:
*/
export interface Workload {
/**
* The imported module that contains the cloud functions to test.
*/
funcs: T;
/**
* A function that executes cloud functions on
* `faastModule.functions.*`. The work function should return `void` if
* there are no custom workload attributes. Otherwise, it should return
* a {@link CostAnalyzer.WorkloadAttribute} object which maps
* user-defined attribute names to numerical values for the workload.
* For example, this might measure bandwidth or some other metric not
* tracked by faast.js, but are relevant for evaluating the
* cost-performance tradeoff of the configurations analyzed by the cost
* analyzer.
*/
work: (faastModule: FaastModule) => Promise | void>;
/**
* An array of configurations to run the work function against (see
* {@link CostAnalyzer.Configuration}). For example, each entry in the
* array may specify a provider, memory size, and other options.
* Default: {@link CostAnalyzer.awsConfigurations}.
*/
configurations?: Configuration[];
/**
* Combine {@link CostAnalyzer.WorkloadAttribute} instances returned
* from multiple workload executions (caused by value of
* {@link CostAnalyzer.Workload.repetitions}). The default is a function
* that takes the average of each attribute.
*/
summarize?: (summaries: WorkloadAttribute[]) => WorkloadAttribute;
/**
* Format an attribute value for console output. This is displayed by
* the cost analyzer when all of the repetitions for a configuration
* have completed. The default returns
* `${attribute}:${value.toFixed(1)}`.
*/
format?: (attr: A, value: number) => string;
/**
* Format an attribute value for CSV. The default returns
* `value.toFixed(1)`.
*/
formatCSV?: (attr: A, value: number) => string;
/**
* If true, do not output live results to the console. Can be useful for
* running the cost analyzer as part of automated tests. Default: false.
*/
silent?: boolean;
/**
* The number of repetitions to run the workload for each cost analyzer
* configuration. Higher repetitions help reduce the jitter in the
* results. Repetitions execute in the same FaastModule instance.
* Default: 10.
*/
repetitions?: number;
/**
* The amount of concurrency to allow. Concurrency can arise from
* multiple repetitions of the same configuration, or concurrenct
* executions of different configurations. This concurrency limit
* throttles the total number of concurrent workload executions across
* both of these sources of concurrency. Default: 64.
*/
concurrency?: number;
}
/**
* A cost estimate result for a specific cost analyzer configuration.
* @public
*/
export interface Estimate {
/**
* The cost snapshot for the cost analysis of the specific (workload,
* configuration) combination. See {@link CostSnapshot}.
*/
costSnapshot: CostSnapshot;
/**
* The worload configuration that was analyzed. See
* {@link CostAnalyzer.Configuration}.
*/
config: Configuration;
/**
* Additional workload metrics returned from the work function. See
* {@link CostAnalyzer.WorkloadAttribute}.
*/
extraMetrics: WorkloadAttribute;
}
/**
* Estimate the cost of a workload using multiple configurations and
* providers.
* @param userWorkload - a {@link CostAnalyzer.Workload} object specifying
* the workload to run and additional parameters.
* @returns A promise for a {@link CostAnalyzer.Result}
* @public
* @remarks
* It can be deceptively difficult to set optimal parameters for AWS Lambda
* and similar services. On the surface there appears to be only one
* parameter: memory size. Choosing more memory also gives more CPU
* performance, but it's unclear how much. It's also unclear where single
* core performance stops getting better. The workload cost analyzer solves
* these problems by making it easy to run cost experiments.
* ```text
* (AWS)
* ┌───────┐
* ┌────▶│ 128MB │
* │ └───────┘
* │ ┌───────┐
* ┌─────────────────┐ ├────▶│ 256MB │
* ┌──────────────┐ │ │ │ └───────┘
* │ workload │───▶│ │ │ ...
* └──────────────┘ │ │ │ ┌───────┐
* │ cost analyzer │─────┼────▶│3008MB │
* ┌──────────────┐ │ │ └───────┘
* │configurations│───▶│ │
* └──────────────┘ │ │
* └─────────────────┘
*
* ```
* `costAnalyzer` is the entry point. It automatically runs this workload
* against multiple configurations in parallel. Then it uses faast.js' cost
* snapshot mechanism to automatically determine the price of running the
* workload with each configuration.
*
* Example:
*
* ```typescript
* // functions.ts
* export function randomNumbers(n: number) {
* let sum = 0;
* for (let i = 0; i < n; i++) {
* sum += Math.random();
* }
* return sum;
* }
*
* // cost-analyzer-example.ts
* import { writeFileSync } from "fs";
* import { CostAnalyzer, FaastModule } from "faastjs";
* import * as funcs from "./functions";
*
* async function work(faastModule: FaastModule) {
* await faastModule.functions.randomNumbers(100000000);
* }
*
* async function main() {
* const results = await CostAnalyzer.analyze({ funcs, work });
* writeFileSync("cost.csv", results.csv());
* }
*
* main();
* ```
*
* Example output (this is printed to `console.log` unless the
* {@link CostAnalyzer.Workload.silent} is `true`):
* ```text
* ✔ aws 128MB queue 15.385s 0.274σ $0.00003921
* ✔ aws 192MB queue 10.024s 0.230σ $0.00003576
* ✔ aws 256MB queue 8.077s 0.204σ $0.00003779
* ▲ ▲ ▲ ▲ ▲ ▲
* │ │ │ │ │ │
* provider │ mode │ stdev average
* │ │ execution estimated
* memory │ time cost
* size │
* average cloud
* execution time
* ```
*
* The output lists the provider, memory size, ({@link CommonOptions.mode}),
* average time of a single execution of the workload, the standard
* deviation (in seconds) of the execution time, and average estimated cost
* for a single run of the workload.
*
* The "execution time" referenced here is not wall clock time, but rather
* execution time in the cloud function. The execution time does not include
* any time the workload spends waiting locally. If the workload invokes
* multiple cloud functions, their execution times will be summed even if
* they happen concurrently. This ensures the execution time and cost are
* aligned.
*/
export function analyze(userWorkload: Workload): Promise>;
/**
* Cost analyzer results for each workload and configuration.
* @remarks
* The `estimates` property has the cost estimates for each configuration.
* See {@link CostAnalyzer.Estimate}.
* @public
*/
export class Result {
/** The workload analyzed. */
readonly workload: Required>;
/**
* Cost estimates for each configuration of the workload. See
* {@link CostAnalyzer.Estimate}.
*/
readonly estimates: Estimate[];
/** @internal */
constructor(
/** The workload analyzed. */
workload: Required>,
/**
* Cost estimates for each configuration of the workload. See
* {@link CostAnalyzer.Estimate}.
*/
estimates: Estimate[]);
/**
* Comma-separated output of cost analyzer. One line per cost analyzer
* configuration.
* @remarks
* The columns are:
*
* - `memory`: The memory size allocated.
*
* - `cloud`: The cloud provider.
*
* - `mode`: See {@link CommonOptions.mode}.
*
* - `options`: A string summarizing other faast.js options applied to the
* `workload`. See {@link CommonOptions}.
*
* - `completed`: Number of repetitions that successfully completed.
*
* - `errors`: Number of invocations that failed.
*
* - `retries`: Number of retries that were attempted.
*
* - `cost`: The average cost of executing the workload once.
*
* - `executionTime`: the aggregate time spent executing on the provider for
* all cloud function invocations in the workload. This is averaged across
* repetitions.
*
* - `executionTimeStdev`: The standard deviation of `executionTime`.
*
* - `billedTime`: the same as `exectionTime`, except rounded up to the next
* 100ms for each invocation. Usually very close to `executionTime`.
*/
csv(): string;
}
}
/**
* A line item in the cost estimate, including the resource usage metric
* measured and its pricing.
* @public
*/
export declare class CostMetric {
/** The name of the cost metric, e.g. `functionCallDuration` */
readonly name: string;
/** The price in USD per unit measured. */
readonly pricing: number;
/** The name of the units that pricing is measured in for this metric. */
readonly unit: string;
/** The measured value of the cost metric, in units. */
readonly measured: number;
/**
* The plural form of the unit name. By default the plural form will be the
* name of the unit with "s" appended at the end, unless the last letter is
* capitalized, in which case there is no plural form (e.g. "GB").
*/
readonly unitPlural?: string;
/**
* An optional comment, usually providing a link to the provider's pricing
* page and other data.
*/
readonly comment?: string;
/**
* True if this cost metric is only for informational purposes (e.g. AWS's
* `logIngestion`) and does not contribute cost.
*/
readonly informationalOnly?: boolean;
/** @internal */
constructor(arg: PropertiesExcept);
/**
* The cost contribution of this cost metric. Equal to
* {@link CostMetric.pricing} * {@link CostMetric.measured}.
*/
cost(): number;
/**
* Return a string with the cost estimate for this metric, omitting
* comments.
*/
describeCostOnly(): string;
/** Describe this cost metric, including comments. */
toString(): string;
}
/**
* A summary of the costs incurred by a faast.js module at a point in time.
* Output of {@link FaastModule.costSnapshot}.
* @remarks
* Cost information provided by faast.js is an estimate. It is derived from
* internal faast.js measurements and not by consulting data provided by your
* cloud provider.
*
* **Faast.js does not guarantee the accuracy of cost estimates.**
*
* **Use at your own risk.**
*
* Example using AWS:
* ```typescript
* const faastModule = await faast("aws", m);
* try {
* // Invoke faastModule.functions.*
* } finally {
* await faastModule.cleanup();
* console.log(`Cost estimate:`);
* console.log(`${await faastModule.costSnapshot()}`);
* }
* ```
*
* AWS example output:
* ```text
* Cost estimate:
* functionCallDuration $0.00002813/second 0.6 second $0.00001688 68.4% [1]
* sqs $0.00000040/request 9 requests $0.00000360 14.6% [2]
* sns $0.00000050/request 5 requests $0.00000250 10.1% [3]
* functionCallRequests $0.00000020/request 5 requests $0.00000100 4.1% [4]
* outboundDataTransfer $0.09000000/GB 0.00000769 GB $0.00000069 2.8% [5]
* logIngestion $0.50000000/GB 0 GB $0 0.0% [6]
* ---------------------------------------------------------------------------------------
* $0.00002467 (USD)
*
* * Estimated using highest pricing tier for each service. Limitations apply.
* ** Does not account for free tier.
* [1]: https://aws.amazon.com/lambda/pricing (rate = 0.00001667/(GB*second) * 1.6875 GB = 0.00002813/second)
* [2]: https://aws.amazon.com/sqs/pricing
* [3]: https://aws.amazon.com/sns/pricing
* [4]: https://aws.amazon.com/lambda/pricing
* [5]: https://aws.amazon.com/ec2/pricing/on-demand/#Data_Transfer
* [6]: https://aws.amazon.com/cloudwatch/pricing/ - Log ingestion costs not currently included.
* ```
*
* A cost snapshot contains several {@link CostMetric} values. Each `CostMetric`
* summarizes one component of the overall cost of executing the functions so
* far. Some cost metrics are common to all faast providers, and other metrics
* are provider-specific. The common metrics are:
*
* - `functionCallDuration`: the estimated billed CPU time (rounded to the next
* 100ms) consumed by completed cloud function calls. This is the metric that
* usually dominates cost.
*
* - `functionCallRequests`: the number of invocation requests made. Most
* providers charge for each invocation.
*
* Provider-specific metrics vary. For example, AWS has the following additional
* metrics:
*
* - `sqs`: AWS Simple Queueing Service. This metric captures the number of
* queue requests made to insert and retrieve queued results (each 64kb chunk
* is counted as an additional request). SQS is used even if
* {@link CommonOptions.mode} is not set to `"queue"`, because it is necessary
* for monitoring cloud function invocations.
*
* - `sns`: AWS Simple Notification Service. SNS is used to invoke Lambda
* functions when {@link CommonOptions.mode} is `"queue"`.
*
* - `outboundDataTransfer`: an estimate of the network data transferred out
* from the cloud provider for this faast.js module. This estimate only counts
* data returned from cloud function invocations and infrastructure that
* faast.js sets up. It does not count any outbound data sent by your cloud
* functions that are not known to faast.js. Note that if you run faast.js on
* EC2 in the same region (see {@link AwsOptions.region}), then the data
* transfer costs will be zero (however, the cost snapshot will not include
* EC2 costs). Also note that if your cloud function transfers data from/to S3
* buckets in the same region, there is no cost as long as that data is not
* returned from the function.
*
* - `logIngestion`: this cost metric is always zero for AWS. It is present to
* remind the user that AWS charges for log data ingested by CloudWatch Logs
* that are not measured by faast.js. Log entries may arrive significantly
* after function execution completes, and there is no way for faast.js to
* know exactly how long to wait, therefore it does not attempt to measure
* this cost. In practice, if your cloud functions do not perform extensive
* logging on all invocations, log ingestion costs from faast.js are likely to
* be low or fall within the free tier.
*
* The Local provider has no extra metrics.
*
* Prices are retrieved dynamically from AWS and cached locally.
* Cached prices expire after 24h. For each cost metric, faast.js uses the
* highest price tier to compute estimated pricing.
*
* Cost estimates do not take free tiers into account.
* @public
*/
export declare class CostSnapshot {
/** The {@link Provider}, e.g. "aws" */
readonly provider: string;
/**
* The options used to initialize the faast.js module where this cost
* snapshot was generated.
*/
readonly options: CommonOptions | AwsOptions;
/** The function statistics that were used to compute prices. */
readonly stats: FunctionStats;
/**
* The cost metric components for this cost snapshot. See
* {@link CostMetric}.
*/
readonly costMetrics: CostMetric[];
/** @internal */
constructor(
/** The {@link Provider}, e.g. "aws" */
provider: string,
/**
* The options used to initialize the faast.js module where this cost
* snapshot was generated.
*/
options: CommonOptions | AwsOptions, stats: FunctionStats, costMetrics?: CostMetric[]);
/** Sum of cost metrics. */
total(): number;
/** A summary of all cost metrics and prices in this cost snapshot. */
toString(): string;
/**
* Comma separated value output for a cost snapshot.
* @remarks
* The format is "metric,unit,pricing,measured,cost,percentage,comment".
*
* Example output:
* ```text
* metric,unit,pricing,measured,cost,percentage,comment
* functionCallDuration,second,0.00002813,0.60000000,0.00001688,64.1% ,"https://aws.amazon.com/lambda/pricing (rate = 0.00001667/(GB*second) * 1.6875 GB = 0.00002813/second)"
* functionCallRequests,request,0.00000020,5,0.00000100,3.8% ,"https://aws.amazon.com/lambda/pricing"
* outboundDataTransfer,GB,0.09000000,0.00000844,0.00000076,2.9% ,"https://aws.amazon.com/ec2/pricing/on-demand/#Data_Transfer"
* sqs,request,0.00000040,13,0.00000520,19.7% ,"https://aws.amazon.com/sqs/pricing"
* sns,request,0.00000050,5,0.00000250,9.5% ,"https://aws.amazon.com/sns/pricing"
* logIngestion,GB,0.50000000,0,0,0.0% ,"https://aws.amazon.com/cloudwatch/pricing/ - Log ingestion costs not currently included."
* ```
*/
csv(): string;
/** @internal */
push(metric: CostMetric): void;
/**
* Find a specific cost metric by name.
* @returns a {@link CostMetric} if found, otherwise `undefined`.
*/
find(name: string): CostMetric | undefined;
}
declare interface CpuMeasurement {
stime: number;
utime: number;
elapsed: number;
}
declare interface CpuMetricsMessage {
kind: "cpumetrics";
callId: CallId_2;
metrics: CpuMeasurement;
}
declare class Deferred {
promise: Promise;
resolve: (arg: T | PromiseLike) => void;
reject: (err?: any) => void;
constructor();
}
/**
* A function return value with additional detailed information.
* @public
*/
export declare interface Detail {
/**
* A Promise for the function's return value.
*/
value: R;
/**
* The URL of the logs for the specific execution of this function call.
* @remarks
* This is different from the general logUrl from
* {@link FaastModule.logUrl}, which provides a link to the logs for all
* invocations of all functions within that module. Whereas this logUrl is
* only for this specific invocation.
*/
logUrl?: string;
/**
* If available, the provider-specific execution identifier for this
* invocation.
* @remarks
* This ID may be added to the log entries for this invocation by the cloud
* provider.
*/
executionId?: string;
/**
* If available, the provider-specific instance identifier for this
* invocation.
* @remarks
* This ID refers to the specific container or VM used to execute this
* function invocation. The instance may be reused across multiple
* invocations.
*/
instanceId?: string;
}
declare type ErrorCallback = (err: Error) => Error;
declare interface Executor {
wrapper: Wrapper;
logUrl: string;
logStream?: Writable;
}
declare type ExtractPropertyNamesExceptType = {
[K in keyof T]: T[K] extends U ? never : K;
}[keyof T];
/**
* The main entry point for faast with any provider and only common options.
* @param provider - One of `"aws"` or `"local"`. See
* {@link Provider}.
* @param fmodule - A module imported with `import * as X from "Y";`. Using
* `require` also works but loses type information.
* @param options - See {@link CommonOptions}.
* @returns See {@link FaastModule}.
* @remarks
* Example of usage:
* ```typescript
* import { faast } from "faastjs";
* import * as mod from "./path/to/module";
* (async () => {
* const faastModule = await faast("aws", mod);
* try {
* const result = await faastModule.functions.func("arg");
* } finally {
* await faastModule.cleanup();
* }
* })();
* ```
* @public
*/
export declare function faast(provider: Provider, fmodule: M, options?: CommonOptions): Promise>;
/**
* The main entry point for faast with AWS provider.
* @param fmodule - A module imported with `import * as X from "Y";`. Using
* `require` also works but loses type information.
* @param options - Most common options are in {@link CommonOptions}.
* Additional AWS-specific options are in {@link AwsOptions}.
* @public
*/
export declare function faastAws(fmodule: M, options?: AwsOptions): Promise>;
/**
* FaastError is a subclass of VError (https://github.com/joyent/node-verror).
* that is thrown by faast.js APIs and cloud function invocations.
* @remarks
* `FaastError` is a subclass of
* {@link https://github.com/joyent/node-verror | VError}, which provides an API
* for nested error handling. The main API is the same as the standard Error
* class, namely the err.message, err.name, and err.stack properties.
*
* Several static methods on {@link FaastError} are inherited from VError:
*
* FaastError.fullStack(err) - property provides a more detailed stack trace
* that include stack traces of causes in the causal chain.
*
* FaastError.info(err) - returns an object with fields `functionName`, `args`,
* and `logUrl`. The `logUrl` property is a URL pointing to the logs for a
* specific invocation that caused the error.`logUrl` will be surrounded by
* whitespace on both sides to ease parsing as a URL by IDEs.
*
* FaastError.hasCauseWithName(err, cause) - returns true if the FaastError or
* any of its causes includes an error with the given name, otherwise false. All
* of the available names are in the enum {@link FaastErrorNames}. For example,
* to detect if a FaastError was caused by a cloud function timeout:
*
* ```typescript
* FaastError.hasCauseWithName(err, FaastErrorNames.ETIMEOUT)
* ```
*
* FaastError.findCauseByName(err, cause) - like FaastError.hasCauseWithName()
* except it returns the Error in the causal chain with the given name instead
* of a boolean, otherwise null.
*
* @public
*/
export declare class FaastError extends VError {
}
/**
* Possible FaastError names. See {@link FaastError}. To test for errors
* matching these names, use the static method
* {@link FaastError}.hasCauseWithName().
* @public
*/
export declare enum FaastErrorNames {
/** Generic error. See {@link FaastError}. */
EGENERIC = "VError",
/** The arguments passed to the cloud function could not be serialized without losing information. */
ESERIALIZE = "FaastSerializationError",
/** The remote cloud function timed out. */
ETIMEOUT = "FaastTimeoutError",
/** The remote cloud function exceeded memory limits. */
EMEMORY = "FaastOutOfMemoryError",
/** The function invocation was cancelled by user request. */
ECANCEL = "FaastCancelError",
/** The exception was thrown by user's remote code, not by faast.js or the cloud provider. */
EEXCEPTION = "UserException",
/** Could not create the remote cloud function or supporting infrastructure. */
ECREATE = "FaastCreateFunctionError",
/** The remote cloud function failed to execute because of limited concurrency. */
ECONCURRENCY = "FaastConcurrencyError"
}
/**
* The main entry point for faast with Local provider.
* @param fmodule - A module imported with `import * as X from "Y";`. Using
* `require` also works but loses type information.
* @param options - Most common options are in {@link CommonOptions}.
* Additional Local-specific options are in {@link LocalOptions}.
* @returns a Promise for {@link LocalFaastModule}.
* @public
*/
export declare function faastLocal(fmodule: M, options?: LocalOptions): Promise>;
/**
* The main interface for invoking, cleaning up, and managing faast.js cloud
* functions. Returned by {@link faast}.
* @public
*/
export declare interface FaastModule {
/** See {@link Provider}. */
provider: Provider;
/**
* Each call of a cloud function creates a separate remote invocation.
* @remarks
* The module passed into {@link faast} or its provider-specific variants
* ({@link faastAws} and {@link faastLocal}) is mapped
* to a {@link ProxyModule} version of the module, which performs the
* following mapping:
*
* - All function exports that are generators are mapped to async
* generators.
*
* - All function exports that return async generators are preserved as-is.
*
* - All function exports that return promises have their type signatures
* preserved as-is.
*
* - All function exports that return type T, where T is not a Promise,
* Generator, or AsyncGenerator, are mapped to functions that return
* Promise. Argument types are preserved as-is.
*
* - All non-function exports are omitted in the remote module.
*
* Arguments and return values are serialized with `JSON.stringify` when
* cloud functions are called, therefore what is received on the remote side
* might not match what was sent. Faast.js attempts to detect nonsupported
* arguments on a best effort basis.
*
* If the cloud function throws an exception or rejects its promise with an
* instance of `Error`, then the function will reject with
* {@link FaastError} on the local side. If the exception or rejection
* resolves to any value that is not an instance of `Error`, the remote
* function proxy will reject with the value of
* `JSON.parse(JSON.stringify(err))`.
*
* Arguments and return values have size limitations that vary by provider
* and mode:
*
* - AWS: 256KB in queue mode, 6MB arguments and 256KB return values in https mode. See
* {@link https://docs.aws.amazon.com/lambda/latest/dg/limits.html | AWS Lambda Limits}.
*
* - Local: limited only by available memory and the limits of
* {@link https://nodejs.org/api/child_process.html#child_process_subprocess_send_message_sendhandle_options_callback | childprocess.send}.
*
* Note that payloads may be base64 encoded for some providers and therefore
* different in size than the original payload. Also, some bookkeeping data
* are passed along with arguments and contribute to the size limit.
*/
functions: ProxyModule;
/**
* Similar to {@link FaastModule.functions} except each function returns a
* {@link Detail} object
* @remarks
* Advanced users of faast.js may want more information about each function
* invocation than simply the result of the function call. For example, the
* specific logUrl for each invocation, to help with detailed debugging.
* This interface provides a way to get this detailed information.
*/
functionsDetail: ProxyModuleDetail;
/**
* Stop the faast.js runtime for this cloud function and clean up ephemeral
* cloud resources.
* @returns a Promise that resolves when the `FaastModule` runtime stops and
* ephemeral resources have been deleted.
* @remarks
* It is best practice to always call `cleanup` when done with a cloud
* function. A typical way to ensure this in normal execution is to use the
* `finally` construct:
*
* ```typescript
* const faastModule = await faast("aws", m);
* try {
* // Call faastModule.functions.*
* } finally {
* // Note the `await`
* await faastModule.cleanup();
* }
* ```
*
* After the cleanup promise resolves, the cloud function instance can no
* longer invoke new calls on {@link FaastModule.functions}. However, other
* methods on {@link FaastModule} are safe to call, such as
* {@link FaastModule.costSnapshot}.
*
* Cleanup also stops statistics events (See {@link FaastModule.off}).
*
* By default, cleanup will delete all ephemeral cloud resources but leave
* behind cached resources for use by future cloud functions. Deleted
* resources typically include cloud functions, queues, and queue
* subscriptions. Logs are not deleted by cleanup.
*
* Note that `cleanup` leaves behind some provider-specific resources:
*
* - AWS: Cloudwatch logs are preserved until the garbage collector in a
* future cloud function instance deletes them. The default log expiration
* time is 24h (or the value of {@link CommonOptions.retentionInDays}). In
* addition, the AWS Lambda IAM role is not deleted by cleanup. This role
* is shared across cloud function instances. Lambda layers are also not
* cleaned up immediately on AWS when {@link CommonOptions.packageJson} is
* used and {@link CommonOptions.useDependencyCaching} is true. Cached
* layers are cleaned up by garbage collection. Also see
* {@link CleanupOptions.deleteCaches}.
*
* - Local: Logs are preserved in a temporary directory on local disk.
* Garbage collection in a future cloud function instance will delete logs
* older than 24h.
*/
cleanup(options?: CleanupOptions): Promise;
/**
* The URL of logs generated by this cloud function.
* @remarks
* Logs are not automatically downloaded because they cause outbound data
* transfer, which can be expensive. Also, logs may arrive at the logging
* service well after the cloud functions have completed. This log URL
* specifically filters the logs for this cloud function instance.
* Authentication is required to view cloud provider logs.
*
* The local provider returns a `file://` url pointing to a file for logs.
*/
logUrl(): string;
/**
* Register a callback for statistics events.
* @remarks
* The callback is invoked once for each cloud function that was invoked
* within the last 1s interval, with a {@link FunctionStatsEvent}
* summarizing the statistics for each function. Typical usage:
*
* ```typescript
* faastModule.on("stats", console.log);
* ```
*/
on(name: "stats", listener: (statsEvent: FunctionStatsEvent) => void): void;
/**
* Deregister a callback for statistics events.
* @remarks
* Stops the callback listener from receiving future function statistics
* events. Calling {@link FaastModule.cleanup} also turns off statistics
* events.
*/
off(name: "stats", listener: (statsEvent: FunctionStatsEvent) => void): void;
/**
* Get a near real-time cost estimate of cloud function invocations.
* @returns a Promise for a {@link CostSnapshot}.
* @remarks
* A cost snapshot provides a near real-time estimate of the costs of the
* cloud functions invoked. The cost estimate only includes the cost of
* successfully completed calls. Unsuccessful calls may lack the data
* required to provide cost information. Calls that are still in flight are
* not included in the cost snapshot. For this reason, it is typically a
* good idea to get a cost snapshot after awaiting the result of
* {@link FaastModule.cleanup}.
*
* Code example:
*
* ```typescript
* const faastModule = await faast("aws", m);
* try {
* // invoke cloud functions on faastModule.functions.*
* } finally {
* await faastModule.cleanup();
* const costSnapshot = await faastModule.costSnapshot();
* console.log(costSnapshot);
* }
* ```
*/
costSnapshot(): Promise;
/**
* Statistics for a specific function or the entire faast.js module.
*
* @param functionName - The name of the function to retrieve statistics
* for. If the function does not exist or has not been invoked, a new
* instance of {@link FunctionStats} is returned with zero values. If
* `functionName` omitted (undefined), then aggregate statistics are
* returned that summarize all cloud functions within this faast.js module.
* @returns an snapshot of {@link FunctionStats} at a point in time.
*/
stats(functionName?: string): FunctionStats;
}
/**
* Implementation of {@link FaastModule}.
* @remarks
* `FaastModuleProxy` provides a unified developer experience for faast.js
* modules on top of provider-specific runtime APIs. Most users will not create
* `FaastModuleProxy` instances themselves; instead use {@link faast}, or
* {@link faastAws} or {@link faastLocal}.
* `FaastModuleProxy` implements the {@link FaastModule} interface, which is the
* preferred public interface for faast modules. `FaastModuleProxy` can be used
* to access provider-specific details and state, and is useful for deeper
* testing.
* @public
*/
export declare class FaastModuleProxy implements FaastModule {
private impl;
/** @internal */
readonly state: S;
private fmodule;
private modulePath;
/** The options set for this instance, which includes default values. */
readonly options: Required;
/** The {@link Provider}, e.g. "aws". */
provider: Provider;
/** {@inheritdoc FaastModule.functions} */
functions: ProxyModule;
/** {@inheritdoc FaastModule.functionsDetail} */
functionsDetail: ProxyModuleDetail;
/** @internal */
private _stats;
private _cpuUsage;
private _memoryLeakDetector;
private _funnel;
private _rateLimiter?;
private _skew;
private _statsTimer?;
private _cleanupHooks;
private _initialInvocationTime;
private _callResultsPending;
private _collectorPump;
private _emitter;
/**
* Constructor
* @internal
*/
constructor(impl: ProviderImpl,
/** @internal */
state: S, fmodule: M, modulePath: string,
/** The options set for this instance, which includes default values. */
options: Required);
/** {@inheritdoc FaastModule.cleanup} */
cleanup(userCleanupOptions?: CleanupOptions): Promise;
/** {@inheritdoc FaastModule.logUrl} */
logUrl(): string;
private startStats;
private stopStats;
/** {@inheritdoc FaastModule.on} */
on(name: "stats", listener: (statsEvent: FunctionStatsEvent) => void): void;
/** {@inheritdoc FaastModule.off} */
off(name: "stats", listener: (statsEvent: FunctionStatsEvent) => void): void;
private withCancellation;
private processResponse;
private invoke;
private lookupFname;
private createCallId;
private wrapGenerator;
private clearPending;
private wrapFunction;
/** {@inheritdoc FaastModule.costSnapshot} */
costSnapshot(): Promise;
/** {@inheritdoc FaastModule.stats} */
stats(functionName?: string): FunctionStats;
private resultCollector;
private adjustCollectorConcurrencyLevel;
}
declare interface FunctionCall extends CallId {
args: string;
modulePath: string;
name: string;
ResponseQueueId: string;
}
declare interface FunctionStartedMessage {
kind: "functionstarted";
callId: CallId_2;
}
/**
* Summary statistics for function invocations.
* @remarks
* ```
* localStartLatency remoteStartLatency executionTime
* ◀──────────────────▶◁ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ▷◀──────────▶
*
* ┌───────────────────────────────────┬──────────────────────────────────────┐
* │ │ │
* │ Local │ Cloud Provider │
* │ │ │
* │ ┌─────────┐ │ ┌──────────┐ ┌──────────┐ │
* │ │ │ │ │ │ │ │ │
* │ │ local │ │ │ request │ │ │ │
* │ invoke ────────▶│ queue │────┼──▶│ queue ├────────▶│ │ │
* │ │ │ │ │ │ │ │ │
* │ └─────────┘ │ └──────────┘ │ cloud │ │
* │ │ │ function │ │
* │ ┌─────────┐ │ ┌──────────┐ │ │ │
* │ │ │ │ │ │ │ │ │
* │ result ◀────────│ local │◀───┼───│ response │◀────────│ │ │
* │ │ polling │ │ │ queue │ │ │ │
* │ │ │ │ │ │ │ │ │
* │ └─────────┘ │ └──────────┘ └──────────┘ │
* │ │ │
* └───────────────────────────────────┴──────────────────────────────────────┘
*
* ◁ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ▷
* returnLatency ◀───────▶
* sendResponseLatency
* ```
*
* `localStartLatency` and `executionTime` are measured on one machine and are
* free of clock skew. `remoteStartLatency` and `returnLatency` are measured as
* time differences between machines and are subject to much more uncertainty,
* and effects like clock skew.
*
* All times are in milliseconds.
*
* @public
*/
export declare class FunctionStats {
/**
* Statistics for how long invocations stay in the local queue before being
* sent to the cloud provider.
*/
localStartLatency: Statistics;
/**
* Statistics for how long requests take to start execution after being sent
* to the cloud provider. This typically includes remote queueing and cold
* start times. Because this measurement requires comparing timestamps from
* different machines, it is subject to clock skew and other effects, and
* should not be considered highly accurate. It can be useful for detecting
* excessively high latency problems. Faast.js attempt to correct for clock
* skew heuristically.
*/
remoteStartLatency: Statistics;
/**
* Statistics for function execution time in milliseconds. This is measured
* as wall clock time inside the cloud function, and does not include the
* time taken to send the response to the response queue. Note that most
* cloud providers round up to the next 100ms for pricing.
*/
executionTime: Statistics;
/**
* Statistics for how long it takes to send the response to the response
* queue.
*/
sendResponseLatency: Statistics;
/**
* Statistics for how long it takes to return a response from the end of
* execution time to the receipt of the response locally. This measurement
* requires comparing timestamps from different machines, and is subject to
* clock skew and other effects. It should not be considered highly
* accurate. It can be useful for detecting excessively high latency
* problems. Faast.js attempts to correct for clock skew heuristically.
*/
returnLatency: Statistics;
/**
* Statistics for amount of time billed. This is similar to
* {@link FunctionStats.executionTime} except each sampled time is rounded
* up to the next 100ms.
*/
estimatedBilledTime: Statistics;
/**
* The number of invocations attempted. If an invocation is retried, this
* only counts the invocation once.
*/
invocations: number;
/**
* The number of invocations that were successfully completed.
*/
completed: number;
/**
* The number of invocation retries attempted. This counts retries
* attempted by faast.js to recover from transient errors, but does not
* count retries by the cloud provider.
*/
retries: number;
/**
* The number of invocations that resulted in an error. If an invocation is
* retried, an error is only counted once, no matter how many retries were
* attempted.
*/
errors: number;
/**
* Summarize the function stats as a string.
* @returns a string showing the value of completed, retries, errors, and
* mean execution time. This string excludes invocations by default because
* it is often fixed.
*/
toString(): string;
/** @internal */
clone(): FunctionStats;
}
/**
* Summarize statistics about cloud function invocations.
* @public
*/
export declare class FunctionStatsEvent {
/** The name of the cloud function the statistics are about. */
readonly fn: string;
/** See {@link FunctionStats}. */
readonly stats: FunctionStats;
/**
* @internal
*/
constructor(
/** The name of the cloud function the statistics are about. */
fn: string,
/** See {@link FunctionStats}. */
stats: FunctionStats);
/**
* Returns a string summarizing the statistics event.
* @remarks
* The string includes number of completed calls, errors, and retries, and
* the mean execution time for the calls that completed within the last time
* interval (1s).
*/
toString(): string;
}
/**
* Options for the {@link CommonOptions.include} option.
* @public
*/
export declare interface IncludeOption {
/**
* The path to the directory or glob to add to the cloud function.
*/
path: string;
/**
* The working directory if `path` is relative. Defaults to `process.cwd()`.
* For example, if `cwd` is `"foo"` and `path` is `"bar"`, then the
* contents of the directory `foo/bar/` will be added to the remote
* function under the path `bar/`.
*/
cwd?: string;
}
declare interface IteratorResponseMessage extends ResponseContext {
kind: "iterator";
sequence: number;
}
/**
* Specify {@link throttle} limits. These limits shape the way throttle invokes
* the underlying function.
* @public
*/
export declare interface Limits {
/**
* The maximum number of concurrent executions of the underlying function to
* allow. Must be supplied, there is no default. Specifying `0` or
* `Infinity` is allowed and means there is no concurrency limit.
*/
concurrency: number;
/**
* The maximum number of calls per second to allow to the underlying
* function. Default: no rate limit.
*/
rate?: number;
/**
* The maximum number of calls to the underlying function to "burst" -- e.g.
* the number that can be issued immediately as long as the rate limit is
* not exceeded. For example, if rate is 5 and burst is 5, and 10 calls are
* made to the throttled function, 5 calls are made immediately and then
* after 1 second, another 5 calls are made immediately. Setting burst to 1
* means calls are issued uniformly every `1/rate` seconds. If `rate` is not
* specified, then `burst` does not apply. Default: 1.
*/
burst?: number;
/**
* Retry if the throttled function returns a rejected promise. `retry` can
* be a number or a function. If it is a number `N`, then up to `N`
* additional attempts are made in addition to the initial call. If retry is
* a function, it should return `true` if another retry attempt should be
* made, otherwise `false`. The first argument will be the value of the
* rejected promise from the previous call attempt, and the second argument
* will be the number of previous retry attempts (e.g. the first call will
* have value 0). Default: 0 (no retry attempts).
*/
retry?: number | ((err: any, retries: number) => boolean);
/**
* If `memoize` is `true`, then every call to the throttled function will be
* saved as an entry in a map from arguments to return value. If same
* arguments are seen again in a future call, the return value is retrieved
* from the Map rather than calling the function again. This can be useful
* for avoiding redundant calls that are expected to return the same results
* given the same arguments.
*
* The arguments will be captured with `JSON.stringify`, therefore types
* that do not stringify uniquely won't be distinguished from each other.
* Care must be taken when specifying `memoize` to ensure avoid incorrect
* results.
*/
memoize?: boolean;
/**
* Similar to `memoize` except the map from function arguments to results is
* stored in a persistent cache on disk. This is useful to prevent redundant
* calls to APIs which are expected to return the same results for the same
* arguments, and which are likely to be called across many faast.js module
* instantiations. This is used internally by faast.js for caching cloud
* prices for AWS, and for saving the last garbage collection
* date for AWS. Persistent cache entries expire after a period of time. See
* {@link PersistentCache}.
*/
cache?: PersistentCache;
/**
* A promise that, if resolved, causes cancellation of pending throttled
* invocations. This is typically created using `Deferred`. The idea is to
* use the resolving of the promise as an asynchronous signal that any
* pending invocations in this throttled function should be cleared.
* @internal
*/
cancel?: Promise;
}
/**
* The return type of {@link faastLocal}. See {@link FaastModuleProxy}.
* @public
*/
export declare type LocalFaastModule = FaastModuleProxy;
/**
* Local provider options for {@link faastLocal}.
*
* @public
*/
export declare interface LocalOptions extends CommonOptions {
/** @internal */
_gcWorker?: (tempdir: string) => Promise;
}
/**
* @public
*/
declare interface LocalState {
/** @internal */
executors: Executor[];
/** @internal */
getExecutor: () => Promise;
/** The temporary directory where the local function is deployed. */
tempDir: string;
/** The file:// URL for the local function log file directory. */
logUrl: string;
/** @internal */
gcPromise?: Promise;
/** @internal */
queue: AsyncQueue;
/** Options used to initialize the local function. */
options: Required;
}
/**
* Faast.js loggers.
* @remarks
* Unless otherwise specified, each log is disabled by default unless the value
* of the DEBUG environment variable is set to the corresponding value. For
* example:
*
* ```
* $ DEBUG=faast:info,faast:provider
* $ DEBUG=faast:*
* ```
*
* Logs can also be enabled or disabled programmatically:
* ```typescript
* import { log } from "faastjs"
* log.info.enabled = true;
* log.provider.enabled = true;
* ```
*
* Each log outputs specific information:
*
* `info` - General informational logging.
*
* `minimal` - Outputs only basic information like the function name created in
* the cloud.
*
* `warn` - Warnings. Enabled by default.
*
* `gc` - Garbage collection verbose logging.
*
* `leaks` - Memory leak detector warnings for the cloud function. Enabled by
* default.
*
* `calls` - Verbose logging of each faast.js enabled function invocation.
*
* `webpack` - Verbose logging from webpack and packaging details.
*
* `provider` - Verbose logging of each interaction between faast.js runtime and
* the provider-specific implementation.
*
* `awssdk` - Verbose logging of AWS SDK. This can be useful for identifying
* which API calls are failing, retrying, or encountering rate limits.
*
* @public
*/
export declare const log: {
info: debug_2.Debugger;
minimal: debug_2.Debugger;
warn: debug_2.Debugger;
gc: debug_2.Debugger;
leaks: debug_2.Debugger;
calls: debug_2.Debugger;
webpack: debug_2.Debugger;
provider: debug_2.Debugger;
awssdk: debug_2.Debugger;
};
declare type Message = PromiseResponseMessage | IteratorResponseMessage | FunctionStartedMessage | CpuMetricsMessage;
declare type MessageCallback = (msg: Message) => Promise;
declare interface ModuleType {
[name: string]: any;
}
/**
* A simple persistent key-value store. Used to implement {@link Limits.cache}
* for {@link throttle}.
* @remarks
* Entries can be expired, but are not actually deleted individually. The entire
* cache can be deleted at once. Hence this cache is useful for storing results
* that are expensive to compute but do not change too often (e.g. the
* node_modules folder from an 'npm install' where 'package.json' is not
* expected to change too often).
*
* By default faast.js will use the directory `~/.faastjs` as a local cache to
* store data such as pricing retrieved from cloud APIs, and garbage collection
* information. This directory can be safely deleted if no faast.js instances
* are running.
* @public
*/
export declare class PersistentCache {
/**
* The directory under the user's home directory that will be used to
* store cached values. The directory will be created if it doesn't
* exist.
*/
readonly dirRelativeToHomeDir: string;
/**
* The age (in ms) after which a cached entry is invalid. Default:
* `24*3600*1000` (1 day).
*/
readonly expiration: number;
private initialized;
private initialize;
/**
* The directory on disk where cached values are stored.
*/
readonly dir: string;
/**
* Construct a new persistent cache, typically used with {@link Limits} as
* part of the arguments to {@link throttle}.
* @param dirRelativeToHomeDir - The directory under the user's home
* directory that will be used to store cached values. The directory will be
* created if it doesn't exist.
* @param expiration - The age (in ms) after which a cached entry is
* invalid. Default: `24*3600*1000` (1 day).
*/
constructor(
/**
* The directory under the user's home directory that will be used to
* store cached values. The directory will be created if it doesn't
* exist.
*/
dirRelativeToHomeDir: string,
/**
* The age (in ms) after which a cached entry is invalid. Default:
* `24*3600*1000` (1 day).
*/
expiration?: number);
private hash;
/**
* Retrieves the value previously set for the given key, or undefined if the
* key is not found.
*/
get(key: string): Promise;
/**
* Set the cache key to the given value.
* @returns a Promise that resolves when the cache entry has been persisted.
*/
set(key: string, value: Buffer | string | Uint8Array): Promise;
/**
* Retrieve all keys stored in the cache, including expired entries.
*/
entries(): Promise;
/**
* Deletes all cached entries from disk.
* @param leaveEmptyDir - If true, leave the cache directory in place after
* deleting its contents. If false, the cache directory will be removed.
* Default: `true`.
*/
clear({ leaveEmptyDir }?: {
leaveEmptyDir?: boolean | undefined;
}): Promise;
}
declare interface PollResult {
Messages: Message[];
isFullMessageBatch?: boolean;
}
declare interface PromiseResponseMessage extends ResponseContext {
kind: "promise";
}
declare type PropertiesExcept = Pick>;
/**
* The type of all supported cloud providers.
* @public
*/
export declare type Provider = "aws" | "local";
declare interface ProviderImpl {
name: Provider;
defaults: Required;
initialize(serverModule: string, nonce: UUID, options: Required): Promise;
costSnapshot(state: S, stats: FunctionStats): Promise;
cleanup(state: S, options: Required): Promise;
logUrl(state: S): string;
invoke(state: S, request: FunctionCall, cancel: Promise): Promise;
poll(state: S, cancel: Promise): Promise;
responseQueueId(state: S): string;
}
/**
* An array of all available provider.
* @public
*/
export declare const providers: Provider[];
/**
* `ProxyModule` is the type of {@link FaastModule.functions}.
* @remarks
* `ProxyModule` maps an imported module's functions to promise-returning or
* async-iteratable versions of those functions. Non-function exports of the
* module are omitted. When invoked, the functions in a `ProxyModule` invoke a
* remote cloud function.
* @public
*/
export declare type ProxyModule = {
[K in keyof M]: M[K] extends (...args: infer A) => infer R ? (...args: A) => Async : never;
};
/**
* Similar to {@link ProxyModule} except each function returns a {@link Detail}
* object.
* @remarks
* See {@link FaastModule.functionsDetail}.
* @public
*/
export declare type ProxyModuleDetail = {
[K in keyof M]: M[K] extends (...args: infer A) => infer R ? (...args: A) => AsyncDetail : never;
};
declare interface ResponseContext {
type: "fulfill" | "reject";
value: string;
callId: CallId_2;
isErrorObject?: boolean;
remoteExecutionStartTime?: number;
remoteExecutionEndTime?: number;
logUrl?: string;
instanceId?: string;
executionId?: string;
memoryUsage?: NodeJS.MemoryUsage;
timestamp?: number;
}
/**
* Incrementally updated statistics on a set of values.
* @public
*/
export declare class Statistics {
/** The number of decimal places to print in {@link Statistics.toString} */
protected printFixedPrecision: number;
/** Number of values observed. */
samples: number;
/** The maximum value observed. Initialized to `Number.NEGATIVE_INFINITY`. */
max: number;
/** The minimum value observed. Initialized to `Number.POSITIVE_INFINITY`. */
min: number;
/** The variance of the values observed. */
variance: number;
/** The standard deviation of the values observed. */
stdev: number;
/** The mean (average) of the values observed. */
mean: number;
/**
* Incrementally track mean, stdev, min, max, of a sequence of values.
* @param printFixedPrecision - The number of decimal places to print in
* {@link Statistics.toString}.
*/
constructor(
/** The number of decimal places to print in {@link Statistics.toString} */
printFixedPrecision?: number);
/** @internal */
clone(): Statistics & this;
/**
* Update statistics with a new value in the sequence.
*/
update(value: number | undefined): void;
/**
* Print the mean of the observations seen, with the precision specified in
* the constructor.
*/
toString(): string;
}
/**
* A decorator for rate limiting, concurrency limiting, retry, memoization, and
* on-disk caching. See {@link Limits}.
* @remarks
* When programming against cloud services, databases, and other resources, it
* is often necessary to control the rate of request issuance to avoid
* overwhelming the service provider. In many cases the provider has built-in
* safeguards against abuse, which automatically fail requests if they are
* coming in too fast. Some systems don't have safeguards and precipitously
* degrade their service level or fail outright when faced with excessive load.
*
* With faast.js it becomes very easy to (accidentally) generate requests from
* thousands of cloud functions. The `throttle` function can help manage request
* flow without resorting to setting up a separate service. This is in keeping
* with faast.js' zero-ops philosophy.
*
* Usage is simple:
*
* ```typescript
* async function operation() { ... }
* const throttledOperation = throttle({ concurrency: 10, rate: 5 }, operation);
* for(let i = 0; i < 100; i++) {
* // at most 10 concurrent executions at a rate of 5 invocations per second.
* throttledOperation();
* }
* ```
*
* Note that each invocation to `throttle` creates a separate function with a
* separate limits. Therefore it is likely that you want to use `throttle` in a
* global context, not within a dynamic context:
*
* ```typescript
* async function operation() { ... }
* for(let i = 0; i < 100; i++) {
* // WRONG - each iteration creates a separate throttled function that's only called once.
* const throttledOperation = throttle({ concurrency: 10, rate: 5 }, operation);
* throttledOperation();
* }
* ```
*
* A better way to use throttle avoids creating a named `operation` function
* altogether, ensuring it cannot be accidentally called without throttling:
*
* ```typescript
* const operation = throttle({ concurrency: 10, rate: 5 }, async () => {
* ...
* });
* ```
*
* Throttle supports functions with arguments automatically infers the correct
* type for the returned function:
*
* ```typescript
* // `operation` inferred to have type (str: string) => Promise
* const operation = throttle({ concurrency: 10, rate: 5 }, async (str: string) => {
* return string;
* });
* ```
*
* In addition to limiting concurrency and invocation rate, `throttle` also
* supports retrying failed invocations, memoizing calls, and on-disk caching.
* See {@link Limits} for details.
*
* @param limits - see {@link Limits}.
* @param fn - The function to throttle. It can take any arguments, but must
* return a Promise (which includes `async` functions).
* @returns Returns a throttled function with the same signature as the argument
* `fn`.
* @public
*/
export declare function throttle(limits: Limits, fn: (...args: A) => Promise): (...args: A) => Promise;
declare type UUID = string;
declare class Wrapper {
executing: boolean;
selected: boolean;
protected verbose: boolean;
protected funcs: ModuleType;
protected child?: childProcess.ChildProcess;
protected childPid?: number;
protected log: (msg: string) => void;
protected queue: AsyncIterableQueue;
readonly options: Required;
protected monitoringTimer?: NodeJS.Timer;
constructor(fModule: ModuleType, options?: WrapperOptions);
protected lookupFunction(request: object): AnyFunction;
protected stopCpuMonitoring(): void;
protected startCpuMonitoring(pid: number, callId: string): void;
stop(): void;
execute(callingContext: CallingContext, { errorCallback, onMessage, measureCpuUsage }: WrapperExecuteOptions): Promise;
protected logLines: (msg: string) => void;
protected setupChildProcess(): childProcess.ChildProcess;
}
declare interface WrapperExecuteOptions {
errorCallback?: ErrorCallback;
onMessage: MessageCallback;
measureCpuUsage?: boolean;
}
declare interface WrapperOptions {
/**
* Logging function for console.log/warn/error output. Only available in
* child process mode. This is mainly useful for debugging the "local"
* mode which runs code locally. In real clouds the logs will end up in the
* cloud logging service (e.g. Cloudwatch Logs).
* Defaults to console.log.
*/
wrapperLog?: (msg: string) => void;
childProcess?: boolean;
childProcessMemoryLimitMb?: number;
childProcessTimeoutMs?: number;
childProcessEnvironment?: {
[key: string]: string;
};
childDir?: string;
wrapperVerbose?: boolean;
validateSerialization?: boolean;
}
export { }