UNPKG

9.35 kBTypeScriptView Raw
1import * as cdk from '@aws-cdk/core';
2import { Construct } from 'constructs';
3import { IEventSourceDlq } from './dlq';
4import { IFunction } from './function-base';
5/**
6 * The type of authentication protocol or the VPC components for your event source's SourceAccessConfiguration
7 * @see https://docs.aws.amazon.com/lambda/latest/dg/API_SourceAccessConfiguration.html#SSS-Type-SourceAccessConfiguration-Type
8 */
9export declare class SourceAccessConfigurationType {
10 /**
11 * (MQ) The Secrets Manager secret that stores your broker credentials.
12 */
13 static readonly BASIC_AUTH: SourceAccessConfigurationType;
14 /**
15 * The subnets associated with your VPC. Lambda connects to these subnets to fetch data from your Self-Managed Apache Kafka cluster.
16 */
17 static readonly VPC_SUBNET: SourceAccessConfigurationType;
18 /**
19 * The VPC security group used to manage access to your Self-Managed Apache Kafka brokers.
20 */
21 static readonly VPC_SECURITY_GROUP: SourceAccessConfigurationType;
22 /**
23 * The Secrets Manager ARN of your secret key used for SASL SCRAM-256 authentication of your Self-Managed Apache Kafka brokers.
24 */
25 static readonly SASL_SCRAM_256_AUTH: SourceAccessConfigurationType;
26 /**
27 * The Secrets Manager ARN of your secret key used for SASL SCRAM-512 authentication of your Self-Managed Apache Kafka brokers.
28 */
29 static readonly SASL_SCRAM_512_AUTH: SourceAccessConfigurationType;
30 /**
31 * The Secrets Manager ARN of your secret key containing the certificate chain (X.509 PEM), private key (PKCS#8 PEM),
32 * and private key password (optional) used for mutual TLS authentication of your MSK/Apache Kafka brokers.
33 */
34 static readonly CLIENT_CERTIFICATE_TLS_AUTH: SourceAccessConfigurationType;
35 /** A custom source access configuration property */
36 static of(name: string): SourceAccessConfigurationType;
37 /**
38 * The key to use in `SourceAccessConfigurationProperty.Type` property in CloudFormation
39 * @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lambda-eventsourcemapping-sourceaccessconfiguration.html#cfn-lambda-eventsourcemapping-sourceaccessconfiguration-type
40 */
41 readonly type: string;
42 private constructor();
43}
44/**
45 * Specific settings like the authentication protocol or the VPC components to secure access to your event source.
46 */
47export interface SourceAccessConfiguration {
48 /**
49 * The type of authentication protocol or the VPC components for your event source. For example: "SASL_SCRAM_512_AUTH".
50 */
51 readonly type: SourceAccessConfigurationType;
52 /**
53 * The value for your chosen configuration in type.
54 * For example: "URI": "arn:aws:secretsmanager:us-east-1:01234567890:secret:MyBrokerSecretName".
55 * The exact string depends on the type.
56 * @see SourceAccessConfigurationType
57 */
58 readonly uri: string;
59}
60export interface EventSourceMappingOptions {
61 /**
62 * The Amazon Resource Name (ARN) of the event source. Any record added to
63 * this stream can invoke the Lambda function.
64 *
65 * @default - not set if using a self managed Kafka cluster, throws an error otherwise
66 */
67 readonly eventSourceArn?: string;
68 /**
69 * The largest number of records that AWS Lambda will retrieve from your event
70 * source at the time of invoking your function. Your function receives an
71 * event with all the retrieved records.
72 *
73 * Valid Range: Minimum value of 1. Maximum value of 10000.
74 *
75 * @default - Amazon Kinesis, Amazon DynamoDB, and Amazon MSK is 100 records.
76 * The default for Amazon SQS is 10 messages. For standard SQS queues, the maximum is 10,000. For FIFO SQS queues, the maximum is 10.
77 */
78 readonly batchSize?: number;
79 /**
80 * If the function returns an error, split the batch in two and retry.
81 *
82 * @default false
83 */
84 readonly bisectBatchOnError?: boolean;
85 /**
86 * An Amazon SQS queue or Amazon SNS topic destination for discarded records.
87 *
88 * @default discarded records are ignored
89 */
90 readonly onFailure?: IEventSourceDlq;
91 /**
92 * Set to false to disable the event source upon creation.
93 *
94 * @default true
95 */
96 readonly enabled?: boolean;
97 /**
98 * The position in the DynamoDB, Kinesis or MSK stream where AWS Lambda should
99 * start reading.
100 *
101 * @see https://docs.aws.amazon.com/kinesis/latest/APIReference/API_GetShardIterator.html#Kinesis-GetShardIterator-request-ShardIteratorType
102 *
103 * @default - Required for Amazon Kinesis, Amazon DynamoDB, and Amazon MSK Streams sources.
104 */
105 readonly startingPosition?: StartingPosition;
106 /**
107 * Allow functions to return partially successful responses for a batch of records.
108 *
109 * @see https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html#services-ddb-batchfailurereporting
110 *
111 * @default false
112 */
113 readonly reportBatchItemFailures?: boolean;
114 /**
115 * The maximum amount of time to gather records before invoking the function.
116 * Maximum of Duration.minutes(5)
117 *
118 * @default Duration.seconds(0)
119 */
120 readonly maxBatchingWindow?: cdk.Duration;
121 /**
122 * The maximum age of a record that Lambda sends to a function for processing.
123 * Valid Range:
124 * * Minimum value of 60 seconds
125 * * Maximum value of 7 days
126 *
127 * @default - infinite or until the record expires.
128 */
129 readonly maxRecordAge?: cdk.Duration;
130 /**
131 * The maximum number of times to retry when the function returns an error.
132 * Set to `undefined` if you want lambda to keep retrying infinitely or until
133 * the record expires.
134 *
135 * Valid Range:
136 * * Minimum value of 0
137 * * Maximum value of 10000
138 *
139 * @default - infinite or until the record expires.
140 */
141 readonly retryAttempts?: number;
142 /**
143 * The number of batches to process from each shard concurrently.
144 * Valid Range:
145 * * Minimum value of 1
146 * * Maximum value of 10
147 *
148 * @default 1
149 */
150 readonly parallelizationFactor?: number;
151 /**
152 * The name of the Kafka topic.
153 *
154 * @default - no topic
155 */
156 readonly kafkaTopic?: string;
157 /**
158 * The size of the tumbling windows to group records sent to DynamoDB or Kinesis
159 *
160 * @see https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html#services-ddb-windows
161 *
162 * Valid Range: 0 - 15 minutes
163 *
164 * @default - None
165 */
166 readonly tumblingWindow?: cdk.Duration;
167 /**
168 * A list of host and port pairs that are the addresses of the Kafka brokers in a self managed "bootstrap" Kafka cluster
169 * that a Kafka client connects to initially to bootstrap itself.
170 * They are in the format `abc.example.com:9096`.
171 *
172 * @default - none
173 */
174 readonly kafkaBootstrapServers?: string[];
175 /**
176 * Specific settings like the authentication protocol or the VPC components to secure access to your event source.
177 * @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lambda-eventsourcemapping-sourceaccessconfiguration.html
178 *
179 * @default - none
180 */
181 readonly sourceAccessConfigurations?: SourceAccessConfiguration[];
182}
183/**
184 * Properties for declaring a new event source mapping.
185 */
186export interface EventSourceMappingProps extends EventSourceMappingOptions {
187 /**
188 * The target AWS Lambda function.
189 */
190 readonly target: IFunction;
191}
192/**
193 * Represents an event source mapping for a lambda function.
194 * @see https://docs.aws.amazon.com/lambda/latest/dg/invocation-eventsourcemapping.html
195 */
196export interface IEventSourceMapping extends cdk.IResource {
197 /**
198 * The identifier for this EventSourceMapping
199 * @attribute
200 */
201 readonly eventSourceMappingId: string;
202}
203/**
204 * Defines a Lambda EventSourceMapping resource.
205 *
206 * Usually, you won't need to define the mapping yourself. This will usually be done by
207 * event sources. For example, to add an SQS event source to a function:
208 *
209 * import { SqsEventSource } from '@aws-cdk/aws-lambda-event-sources';
210 * lambda.addEventSource(new SqsEventSource(sqs));
211 *
212 * The `SqsEventSource` class will automatically create the mapping, and will also
213 * modify the Lambda's execution role so it can consume messages from the queue.
214 */
215export declare class EventSourceMapping extends cdk.Resource implements IEventSourceMapping {
216 /**
217 * Import an event source into this stack from its event source id.
218 */
219 static fromEventSourceMappingId(scope: Construct, id: string, eventSourceMappingId: string): IEventSourceMapping;
220 readonly eventSourceMappingId: string;
221 constructor(scope: Construct, id: string, props: EventSourceMappingProps);
222}
223/**
224 * The position in the DynamoDB, Kinesis or MSK stream where AWS Lambda should start
225 * reading.
226 */
227export declare enum StartingPosition {
228 /**
229 * Start reading at the last untrimmed record in the shard in the system,
230 * which is the oldest data record in the shard.
231 */
232 TRIM_HORIZON = "TRIM_HORIZON",
233 /**
234 * Start reading just after the most recent record in the shard, so that you
235 * always read the most recent data in the shard
236 */
237 LATEST = "LATEST"
238}