Documentation ¶
Index ¶
- func NewApiEventSource_Override(a ApiEventSource, method *string, path *string, ...)
- func NewDynamoEventSource_Override(d DynamoEventSource, table awsdynamodb.ITable, props *DynamoEventSourceProps)
- func NewKinesisEventSource_Override(k KinesisEventSource, stream awskinesis.IStream, ...)
- func NewManagedKafkaEventSource_Override(m ManagedKafkaEventSource, props *ManagedKafkaEventSourceProps)
- func NewS3EventSource_Override(s S3EventSource, bucket awss3.Bucket, props *S3EventSourceProps)
- func NewSelfManagedKafkaEventSource_Override(s SelfManagedKafkaEventSource, props *SelfManagedKafkaEventSourceProps)
- func NewSnsDlq_Override(s SnsDlq, topic awssns.ITopic)
- func NewSnsEventSource_Override(s SnsEventSource, topic awssns.ITopic, props *SnsEventSourceProps)
- func NewSqsDlq_Override(s SqsDlq, queue awssqs.IQueue)
- func NewSqsEventSource_Override(s SqsEventSource, queue awssqs.IQueue, props *SqsEventSourceProps)
- func NewStreamEventSource_Override(s StreamEventSource, props *StreamEventSourceProps)
- type ApiEventSource
- type AuthenticationMethod
- type BaseStreamEventSourceProps
- type DynamoEventSource
- type DynamoEventSourceProps
- type KafkaEventSourceProps
- type KinesisEventSource
- type KinesisEventSourceProps
- type ManagedKafkaEventSource
- type ManagedKafkaEventSourceProps
- type S3EventSource
- type S3EventSourceProps
- type SelfManagedKafkaEventSource
- type SelfManagedKafkaEventSourceProps
- type SnsDlq
- type SnsEventSource
- type SnsEventSourceProps
- type SqsDlq
- type SqsEventSource
- type SqsEventSourceProps
- type StreamEventSource
- type StreamEventSourceProps
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func NewApiEventSource_Override ¶
func NewApiEventSource_Override(a ApiEventSource, method *string, path *string, options *awsapigateway.MethodOptions)
func NewDynamoEventSource_Override ¶
func NewDynamoEventSource_Override(d DynamoEventSource, table awsdynamodb.ITable, props *DynamoEventSourceProps)
func NewKinesisEventSource_Override ¶
func NewKinesisEventSource_Override(k KinesisEventSource, stream awskinesis.IStream, props *KinesisEventSourceProps)
func NewManagedKafkaEventSource_Override ¶
func NewManagedKafkaEventSource_Override(m ManagedKafkaEventSource, props *ManagedKafkaEventSourceProps)
func NewS3EventSource_Override ¶
func NewS3EventSource_Override(s S3EventSource, bucket awss3.Bucket, props *S3EventSourceProps)
func NewSelfManagedKafkaEventSource_Override ¶
func NewSelfManagedKafkaEventSource_Override(s SelfManagedKafkaEventSource, props *SelfManagedKafkaEventSourceProps)
func NewSnsDlq_Override ¶
func NewSnsEventSource_Override ¶
func NewSnsEventSource_Override(s SnsEventSource, topic awssns.ITopic, props *SnsEventSourceProps)
func NewSqsDlq_Override ¶
func NewSqsEventSource_Override ¶
func NewSqsEventSource_Override(s SqsEventSource, queue awssqs.IQueue, props *SqsEventSourceProps)
func NewStreamEventSource_Override ¶
func NewStreamEventSource_Override(s StreamEventSource, props *StreamEventSourceProps)
Types ¶
type ApiEventSource ¶
type ApiEventSource interface { awslambda.IEventSource // Called by `lambda.addEventSource` to allow the event source to bind to this function. Bind(target awslambda.IFunction) }
Example:
// The code below shows an example of how to instantiate this type. // The values are placeholders you should change. import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var authorizer authorizer var model model var requestValidator requestValidator apiEventSource := awscdk.Aws_lambda_event_sources.NewApiEventSource(jsii.String("method"), jsii.String("path"), &MethodOptions{ ApiKeyRequired: jsii.Boolean(false), AuthorizationScopes: []*string{ jsii.String("authorizationScopes"), }, AuthorizationType: awscdk.Aws_apigateway.AuthorizationType_NONE, Authorizer: authorizer, MethodResponses: []methodResponse{ &methodResponse{ StatusCode: jsii.String("statusCode"), // the properties below are optional ResponseModels: map[string]iModel{ "responseModelsKey": model, }, ResponseParameters: map[string]*bool{ "responseParametersKey": jsii.Boolean(false), }, }, }, OperationName: jsii.String("operationName"), RequestModels: map[string]*iModel{ "requestModelsKey": model, }, RequestParameters: map[string]*bool{ "requestParametersKey": jsii.Boolean(false), }, RequestValidator: requestValidator, RequestValidatorOptions: &RequestValidatorOptions{ RequestValidatorName: jsii.String("requestValidatorName"), ValidateRequestBody: jsii.Boolean(false), ValidateRequestParameters: jsii.Boolean(false), }, })
func NewApiEventSource ¶
func NewApiEventSource(method *string, path *string, options *awsapigateway.MethodOptions) ApiEventSource
type AuthenticationMethod ¶
type AuthenticationMethod string
The authentication method to use with SelfManagedKafkaEventSource.
const ( // SASL_SCRAM_512_AUTH authentication method for your Kafka cluster. AuthenticationMethod_SASL_SCRAM_512_AUTH AuthenticationMethod = "SASL_SCRAM_512_AUTH" // SASL_SCRAM_256_AUTH authentication method for your Kafka cluster. AuthenticationMethod_SASL_SCRAM_256_AUTH AuthenticationMethod = "SASL_SCRAM_256_AUTH" // BASIC_AUTH (SASL/PLAIN) authentication method for your Kafka cluster. AuthenticationMethod_BASIC_AUTH AuthenticationMethod = "BASIC_AUTH" // CLIENT_CERTIFICATE_TLS_AUTH (mTLS) authentication method for your Kafka cluster. AuthenticationMethod_CLIENT_CERTIFICATE_TLS_AUTH AuthenticationMethod = "CLIENT_CERTIFICATE_TLS_AUTH" )
type BaseStreamEventSourceProps ¶ added in v2.7.0
type BaseStreamEventSourceProps struct { // Where to begin consuming the stream. StartingPosition awslambda.StartingPosition `field:"required" json:"startingPosition" yaml:"startingPosition"` // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: // * Minimum value of 1 // * Maximum value of: // * 1000 for `DynamoEventSource` // * 10000 for `KinesisEventSource`, `ManagedKafkaEventSource` and `SelfManagedKafkaEventSource`. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the stream event source mapping should be enabled. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Maximum of Duration.minutes(5). // See: https://docs.aws.amazon.com/lambda/latest/dg/invocation-eventsourcemapping.html#invocation-eventsourcemapping-batching // MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` }
The set of properties for streaming event sources shared by Dynamo, Kinesis and Kafka.
Example:
// The code below shows an example of how to instantiate this type. // The values are placeholders you should change. import cdk "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" baseStreamEventSourceProps := &BaseStreamEventSourceProps{ StartingPosition: awscdk.Aws_lambda.StartingPosition_TRIM_HORIZON, // the properties below are optional BatchSize: jsii.Number(123), Enabled: jsii.Boolean(false), MaxBatchingWindow: cdk.Duration_Minutes(jsii.Number(30)), }
type DynamoEventSource ¶
type DynamoEventSource interface { StreamEventSource // The identifier for this EventSourceMapping. EventSourceMappingId() *string Props() *StreamEventSourceProps // Called by `lambda.addEventSource` to allow the event source to bind to this function. Bind(target awslambda.IFunction) EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions }
Use an Amazon DynamoDB stream as an event source for AWS Lambda.
Example:
import eventsources "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var fn function table := dynamodb.NewTable(this, jsii.String("Table"), &TableProps{ PartitionKey: &Attribute{ Name: jsii.String("id"), Type: dynamodb.AttributeType_STRING, }, Stream: dynamodb.StreamViewType_NEW_IMAGE, }) fn.AddEventSource(eventsources.NewDynamoEventSource(table, &DynamoEventSourceProps{ StartingPosition: lambda.StartingPosition_LATEST, Filters: []map[string]interface{}{ lambda.FilterCriteria_Filter(map[string]interface{}{ "eventName": lambda.FilterRule_isEqual(jsii.String("INSERT")), }), }, }))
func NewDynamoEventSource ¶
func NewDynamoEventSource(table awsdynamodb.ITable, props *DynamoEventSourceProps) DynamoEventSource
type DynamoEventSourceProps ¶
type DynamoEventSourceProps struct { // Where to begin consuming the stream. StartingPosition awslambda.StartingPosition `field:"required" json:"startingPosition" yaml:"startingPosition"` // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: // * Minimum value of 1 // * Maximum value of: // * 1000 for `DynamoEventSource` // * 10000 for `KinesisEventSource`, `ManagedKafkaEventSource` and `SelfManagedKafkaEventSource`. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the stream event source mapping should be enabled. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Maximum of Duration.minutes(5). // See: https://docs.aws.amazon.com/lambda/latest/dg/invocation-eventsourcemapping.html#invocation-eventsourcemapping-batching // MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` // If the function returns an error, split the batch in two and retry. BisectBatchOnError *bool `field:"optional" json:"bisectBatchOnError" yaml:"bisectBatchOnError"` // Add filter criteria option. Filters *[]*map[string]interface{} `field:"optional" json:"filters" yaml:"filters"` // The maximum age of a record that Lambda sends to a function for processing. // // Valid Range: // * Minimum value of 60 seconds // * Maximum value of 7 days. MaxRecordAge awscdk.Duration `field:"optional" json:"maxRecordAge" yaml:"maxRecordAge"` // An Amazon SQS queue or Amazon SNS topic destination for discarded records. OnFailure awslambda.IEventSourceDlq `field:"optional" json:"onFailure" yaml:"onFailure"` // The number of batches to process from each shard concurrently. // // Valid Range: // * Minimum value of 1 // * Maximum value of 10. ParallelizationFactor *float64 `field:"optional" json:"parallelizationFactor" yaml:"parallelizationFactor"` // Allow functions to return partially successful responses for a batch of records. // See: https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html#services-ddb-batchfailurereporting // ReportBatchItemFailures *bool `field:"optional" json:"reportBatchItemFailures" yaml:"reportBatchItemFailures"` // Maximum number of retry attempts Valid Range: * Minimum value of 0 * Maximum value of 10000. RetryAttempts *float64 `field:"optional" json:"retryAttempts" yaml:"retryAttempts"` // The size of the tumbling windows to group records sent to DynamoDB or Kinesis Valid Range: 0 - 15 minutes. TumblingWindow awscdk.Duration `field:"optional" json:"tumblingWindow" yaml:"tumblingWindow"` }
Example:
import eventsources "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var fn function table := dynamodb.NewTable(this, jsii.String("Table"), &TableProps{ PartitionKey: &Attribute{ Name: jsii.String("id"), Type: dynamodb.AttributeType_STRING, }, Stream: dynamodb.StreamViewType_NEW_IMAGE, }) fn.AddEventSource(eventsources.NewDynamoEventSource(table, &DynamoEventSourceProps{ StartingPosition: lambda.StartingPosition_LATEST, Filters: []map[string]interface{}{ lambda.FilterCriteria_Filter(map[string]interface{}{ "eventName": lambda.FilterRule_isEqual(jsii.String("INSERT")), }), }, }))
type KafkaEventSourceProps ¶
type KafkaEventSourceProps struct { // Where to begin consuming the stream. StartingPosition awslambda.StartingPosition `field:"required" json:"startingPosition" yaml:"startingPosition"` // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: // * Minimum value of 1 // * Maximum value of: // * 1000 for `DynamoEventSource` // * 10000 for `KinesisEventSource`, `ManagedKafkaEventSource` and `SelfManagedKafkaEventSource`. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the stream event source mapping should be enabled. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Maximum of Duration.minutes(5). // See: https://docs.aws.amazon.com/lambda/latest/dg/invocation-eventsourcemapping.html#invocation-eventsourcemapping-batching // MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` // The Kafka topic to subscribe to. Topic *string `field:"required" json:"topic" yaml:"topic"` // The identifier for the Kafka consumer group to join. // // The consumer group ID must be unique among all your Kafka event sources. After creating a Kafka event source mapping with the consumer group ID specified, you cannot update this value. The value must have a lenght between 1 and 200 and full the pattern '[a-zA-Z0-9-\/*:_+=.@-]*'. // See: https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html#services-msk-consumer-group-id // ConsumerGroupId *string `field:"optional" json:"consumerGroupId" yaml:"consumerGroupId"` // The secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details This field is required if your Kafka brokers are accessed over the Internet. Secret awssecretsmanager.ISecret `field:"optional" json:"secret" yaml:"secret"` }
Properties for a Kafka event source.
Example:
// The code below shows an example of how to instantiate this type. // The values are placeholders you should change. import cdk "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var secret secret kafkaEventSourceProps := &KafkaEventSourceProps{ StartingPosition: awscdk.Aws_lambda.StartingPosition_TRIM_HORIZON, Topic: jsii.String("topic"), // the properties below are optional BatchSize: jsii.Number(123), ConsumerGroupId: jsii.String("consumerGroupId"), Enabled: jsii.Boolean(false), MaxBatchingWindow: cdk.Duration_Minutes(jsii.Number(30)), Secret: secret, }
type KinesisEventSource ¶
type KinesisEventSource interface { StreamEventSource // The identifier for this EventSourceMapping. EventSourceMappingId() *string Props() *StreamEventSourceProps Stream() awskinesis.IStream // Called by `lambda.addEventSource` to allow the event source to bind to this function. Bind(target awslambda.IFunction) EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions }
Use an Amazon Kinesis stream as an event source for AWS Lambda.
Example:
import kinesis "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var myFunction function stream := kinesis.NewStream(this, jsii.String("MyStream")) myFunction.AddEventSource(awscdk.NewKinesisEventSource(stream, &KinesisEventSourceProps{ BatchSize: jsii.Number(100), // default StartingPosition: lambda.StartingPosition_TRIM_HORIZON, }))
func NewKinesisEventSource ¶
func NewKinesisEventSource(stream awskinesis.IStream, props *KinesisEventSourceProps) KinesisEventSource
type KinesisEventSourceProps ¶
type KinesisEventSourceProps struct { // Where to begin consuming the stream. StartingPosition awslambda.StartingPosition `field:"required" json:"startingPosition" yaml:"startingPosition"` // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: // * Minimum value of 1 // * Maximum value of: // * 1000 for `DynamoEventSource` // * 10000 for `KinesisEventSource`, `ManagedKafkaEventSource` and `SelfManagedKafkaEventSource`. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the stream event source mapping should be enabled. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Maximum of Duration.minutes(5). // See: https://docs.aws.amazon.com/lambda/latest/dg/invocation-eventsourcemapping.html#invocation-eventsourcemapping-batching // MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` // If the function returns an error, split the batch in two and retry. BisectBatchOnError *bool `field:"optional" json:"bisectBatchOnError" yaml:"bisectBatchOnError"` // Add filter criteria option. Filters *[]*map[string]interface{} `field:"optional" json:"filters" yaml:"filters"` // The maximum age of a record that Lambda sends to a function for processing. // // Valid Range: // * Minimum value of 60 seconds // * Maximum value of 7 days. MaxRecordAge awscdk.Duration `field:"optional" json:"maxRecordAge" yaml:"maxRecordAge"` // An Amazon SQS queue or Amazon SNS topic destination for discarded records. OnFailure awslambda.IEventSourceDlq `field:"optional" json:"onFailure" yaml:"onFailure"` // The number of batches to process from each shard concurrently. // // Valid Range: // * Minimum value of 1 // * Maximum value of 10. ParallelizationFactor *float64 `field:"optional" json:"parallelizationFactor" yaml:"parallelizationFactor"` // Allow functions to return partially successful responses for a batch of records. // See: https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html#services-ddb-batchfailurereporting // ReportBatchItemFailures *bool `field:"optional" json:"reportBatchItemFailures" yaml:"reportBatchItemFailures"` // Maximum number of retry attempts Valid Range: * Minimum value of 0 * Maximum value of 10000. RetryAttempts *float64 `field:"optional" json:"retryAttempts" yaml:"retryAttempts"` // The size of the tumbling windows to group records sent to DynamoDB or Kinesis Valid Range: 0 - 15 minutes. TumblingWindow awscdk.Duration `field:"optional" json:"tumblingWindow" yaml:"tumblingWindow"` // The time from which to start reading, in Unix time seconds. StartingPositionTimestamp *float64 `field:"optional" json:"startingPositionTimestamp" yaml:"startingPositionTimestamp"` }
Example:
import kinesis "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var myFunction function stream := kinesis.NewStream(this, jsii.String("MyStream")) myFunction.AddEventSource(awscdk.NewKinesisEventSource(stream, &KinesisEventSourceProps{ BatchSize: jsii.Number(100), // default StartingPosition: lambda.StartingPosition_TRIM_HORIZON, }))
type ManagedKafkaEventSource ¶
type ManagedKafkaEventSource interface { StreamEventSource // The identifier for this EventSourceMapping. EventSourceMappingId() *string Props() *StreamEventSourceProps // Called by `lambda.addEventSource` to allow the event source to bind to this function. Bind(target awslambda.IFunction) EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions }
Use a MSK cluster as a streaming source for AWS Lambda.
Example:
import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var myFunction function // Your MSK cluster arn clusterArn := "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4" // The Kafka topic you want to subscribe to topic := "some-cool-topic" // The secret that allows access to your MSK cluster // You still have to make sure that it is associated with your cluster as described in the documentation secret := awscdk.NewSecret(this, jsii.String("Secret"), &SecretProps{ SecretName: jsii.String("AmazonMSK_KafkaSecret"), }) myFunction.AddEventSource(awscdk.NewManagedKafkaEventSource(&ManagedKafkaEventSourceProps{ ClusterArn: jsii.String(ClusterArn), Topic: topic, Secret: secret, BatchSize: jsii.Number(100), // default StartingPosition: lambda.StartingPosition_TRIM_HORIZON, }))
func NewManagedKafkaEventSource ¶
func NewManagedKafkaEventSource(props *ManagedKafkaEventSourceProps) ManagedKafkaEventSource
type ManagedKafkaEventSourceProps ¶
type ManagedKafkaEventSourceProps struct { // Where to begin consuming the stream. StartingPosition awslambda.StartingPosition `field:"required" json:"startingPosition" yaml:"startingPosition"` // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: // * Minimum value of 1 // * Maximum value of: // * 1000 for `DynamoEventSource` // * 10000 for `KinesisEventSource`, `ManagedKafkaEventSource` and `SelfManagedKafkaEventSource`. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the stream event source mapping should be enabled. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Maximum of Duration.minutes(5). // See: https://docs.aws.amazon.com/lambda/latest/dg/invocation-eventsourcemapping.html#invocation-eventsourcemapping-batching // MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` // The Kafka topic to subscribe to. Topic *string `field:"required" json:"topic" yaml:"topic"` // The identifier for the Kafka consumer group to join. // // The consumer group ID must be unique among all your Kafka event sources. After creating a Kafka event source mapping with the consumer group ID specified, you cannot update this value. The value must have a lenght between 1 and 200 and full the pattern '[a-zA-Z0-9-\/*:_+=.@-]*'. // See: https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html#services-msk-consumer-group-id // ConsumerGroupId *string `field:"optional" json:"consumerGroupId" yaml:"consumerGroupId"` // The secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details This field is required if your Kafka brokers are accessed over the Internet. Secret awssecretsmanager.ISecret `field:"optional" json:"secret" yaml:"secret"` // An MSK cluster construct. ClusterArn *string `field:"required" json:"clusterArn" yaml:"clusterArn"` }
Properties for a MSK event source.
Example:
import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var myFunction function // Your MSK cluster arn clusterArn := "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4" // The Kafka topic you want to subscribe to topic := "some-cool-topic" // The secret that allows access to your MSK cluster // You still have to make sure that it is associated with your cluster as described in the documentation secret := awscdk.NewSecret(this, jsii.String("Secret"), &SecretProps{ SecretName: jsii.String("AmazonMSK_KafkaSecret"), }) myFunction.AddEventSource(awscdk.NewManagedKafkaEventSource(&ManagedKafkaEventSourceProps{ ClusterArn: jsii.String(ClusterArn), Topic: topic, Secret: secret, BatchSize: jsii.Number(100), // default StartingPosition: lambda.StartingPosition_TRIM_HORIZON, }))
type S3EventSource ¶
type S3EventSource interface { awslambda.IEventSource Bucket() awss3.Bucket // Called by `lambda.addEventSource` to allow the event source to bind to this function. Bind(target awslambda.IFunction) }
Use S3 bucket notifications as an event source for AWS Lambda.
Example:
import eventsources "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var fn function bucket := s3.NewBucket(this, jsii.String("Bucket")) fn.AddEventSource(eventsources.NewS3EventSource(bucket, &S3EventSourceProps{ Events: []eventType{ s3.*eventType_OBJECT_CREATED, s3.*eventType_OBJECT_REMOVED, }, Filters: []notificationKeyFilter{ ¬ificationKeyFilter{ Prefix: jsii.String("subdir/"), }, }, }))
func NewS3EventSource ¶
func NewS3EventSource(bucket awss3.Bucket, props *S3EventSourceProps) S3EventSource
type S3EventSourceProps ¶
type S3EventSourceProps struct { // The s3 event types that will trigger the notification. Events *[]awss3.EventType `field:"required" json:"events" yaml:"events"` // S3 object key filter rules to determine which objects trigger this event. // // Each filter must include a `prefix` and/or `suffix` that will be matched // against the s3 object key. Refer to the S3 Developer Guide for details // about allowed filter rules. Filters *[]*awss3.NotificationKeyFilter `field:"optional" json:"filters" yaml:"filters"` }
Example:
import eventsources "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var fn function bucket := s3.NewBucket(this, jsii.String("Bucket")) fn.AddEventSource(eventsources.NewS3EventSource(bucket, &S3EventSourceProps{ Events: []eventType{ s3.*eventType_OBJECT_CREATED, s3.*eventType_OBJECT_REMOVED, }, Filters: []notificationKeyFilter{ ¬ificationKeyFilter{ Prefix: jsii.String("subdir/"), }, }, }))
type SelfManagedKafkaEventSource ¶
type SelfManagedKafkaEventSource interface { StreamEventSource Props() *StreamEventSourceProps // Called by `lambda.addEventSource` to allow the event source to bind to this function. Bind(target awslambda.IFunction) EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions }
Use a self hosted Kafka installation as a streaming source for AWS Lambda.
Example:
// Example automatically generated from non-compiling source. May contain errors. import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" // The secret that allows access to your self hosted Kafka cluster var secret secret // (Optional) The secret containing the root CA certificate that your Kafka brokers use for TLS encryption var encryption secret var myFunction function // The list of Kafka brokers bootstrapServers := []*string{ "kafka-broker:9092", } // The Kafka topic you want to subscribe to topic := "some-cool-topic" // (Optional) The consumer group id to use when connecting to the Kafka broker. If omitted the UUID of the event source mapping will be used. var consumerGroupId string myFunction.AddEventSource(awscdk.NewSelfManagedKafkaEventSource(&SelfManagedKafkaEventSourceProps{ BootstrapServers: bootstrapServers, Topic: topic, ConsumerGroupId: consumerGroupId, Secret: secret, BatchSize: jsii.Number(100), // default StartingPosition: lambda.StartingPosition_TRIM_HORIZON, Encryption: encryption, }))
func NewSelfManagedKafkaEventSource ¶
func NewSelfManagedKafkaEventSource(props *SelfManagedKafkaEventSourceProps) SelfManagedKafkaEventSource
type SelfManagedKafkaEventSourceProps ¶
type SelfManagedKafkaEventSourceProps struct { // Where to begin consuming the stream. StartingPosition awslambda.StartingPosition `field:"required" json:"startingPosition" yaml:"startingPosition"` // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: // * Minimum value of 1 // * Maximum value of: // * 1000 for `DynamoEventSource` // * 10000 for `KinesisEventSource`, `ManagedKafkaEventSource` and `SelfManagedKafkaEventSource`. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the stream event source mapping should be enabled. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Maximum of Duration.minutes(5). // See: https://docs.aws.amazon.com/lambda/latest/dg/invocation-eventsourcemapping.html#invocation-eventsourcemapping-batching // MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` // The Kafka topic to subscribe to. Topic *string `field:"required" json:"topic" yaml:"topic"` // The identifier for the Kafka consumer group to join. // // The consumer group ID must be unique among all your Kafka event sources. After creating a Kafka event source mapping with the consumer group ID specified, you cannot update this value. The value must have a lenght between 1 and 200 and full the pattern '[a-zA-Z0-9-\/*:_+=.@-]*'. // See: https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html#services-msk-consumer-group-id // ConsumerGroupId *string `field:"optional" json:"consumerGroupId" yaml:"consumerGroupId"` // The secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details This field is required if your Kafka brokers are accessed over the Internet. Secret awssecretsmanager.ISecret `field:"optional" json:"secret" yaml:"secret"` // The list of host and port pairs that are the addresses of the Kafka brokers in a "bootstrap" Kafka cluster that a Kafka client connects to initially to bootstrap itself. // // They are in the format `abc.xyz.com:xxxx`. BootstrapServers *[]*string `field:"required" json:"bootstrapServers" yaml:"bootstrapServers"` // The authentication method for your Kafka cluster. AuthenticationMethod AuthenticationMethod `field:"optional" json:"authenticationMethod" yaml:"authenticationMethod"` // The secret with the root CA certificate used by your Kafka brokers for TLS encryption This field is required if your Kafka brokers use certificates signed by a private CA. RootCACertificate awssecretsmanager.ISecret `field:"optional" json:"rootCACertificate" yaml:"rootCACertificate"` // If your Kafka brokers are only reachable via VPC, provide the security group here. SecurityGroup awsec2.ISecurityGroup `field:"optional" json:"securityGroup" yaml:"securityGroup"` // If your Kafka brokers are only reachable via VPC provide the VPC here. Vpc awsec2.IVpc `field:"optional" json:"vpc" yaml:"vpc"` // If your Kafka brokers are only reachable via VPC, provide the subnets selection here. VpcSubnets *awsec2.SubnetSelection `field:"optional" json:"vpcSubnets" yaml:"vpcSubnets"` }
Properties for a self managed Kafka cluster event source.
If your Kafka cluster is only reachable via VPC make sure to configure it.
Example:
// Example automatically generated from non-compiling source. May contain errors. import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" // The secret that allows access to your self hosted Kafka cluster var secret secret // (Optional) The secret containing the root CA certificate that your Kafka brokers use for TLS encryption var encryption secret var myFunction function // The list of Kafka brokers bootstrapServers := []*string{ "kafka-broker:9092", } // The Kafka topic you want to subscribe to topic := "some-cool-topic" // (Optional) The consumer group id to use when connecting to the Kafka broker. If omitted the UUID of the event source mapping will be used. var consumerGroupId string myFunction.AddEventSource(awscdk.NewSelfManagedKafkaEventSource(&SelfManagedKafkaEventSourceProps{ BootstrapServers: bootstrapServers, Topic: topic, ConsumerGroupId: consumerGroupId, Secret: secret, BatchSize: jsii.Number(100), // default StartingPosition: lambda.StartingPosition_TRIM_HORIZON, Encryption: encryption, }))
type SnsDlq ¶
type SnsDlq interface { awslambda.IEventSourceDlq // Returns a destination configuration for the DLQ. Bind(_target awslambda.IEventSourceMapping, targetHandler awslambda.IFunction) *awslambda.DlqDestinationConfig }
An SNS dead letter queue destination configuration for a Lambda event source.
Example:
// The code below shows an example of how to instantiate this type. // The values are placeholders you should change. import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var topic topic snsDlq := awscdk.Aws_lambda_event_sources.NewSnsDlq(topic)
type SnsEventSource ¶
type SnsEventSource interface { awslambda.IEventSource Topic() awssns.ITopic // Called by `lambda.addEventSource` to allow the event source to bind to this function. Bind(target awslambda.IFunction) }
Use an Amazon SNS topic as an event source for AWS Lambda.
Example:
import sns "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var topic topic var fn function deadLetterQueue := sqs.NewQueue(this, jsii.String("deadLetterQueue")) fn.AddEventSource(awscdk.NewSnsEventSource(topic, &SnsEventSourceProps{ FilterPolicy: map[string]interface{}{ }, DeadLetterQueue: deadLetterQueue, }))
func NewSnsEventSource ¶
func NewSnsEventSource(topic awssns.ITopic, props *SnsEventSourceProps) SnsEventSource
type SnsEventSourceProps ¶
type SnsEventSourceProps struct { // Queue to be used as dead letter queue. // // If not passed no dead letter queue is enabled. DeadLetterQueue awssqs.IQueue `field:"optional" json:"deadLetterQueue" yaml:"deadLetterQueue"` // The filter policy. FilterPolicy *map[string]awssns.SubscriptionFilter `field:"optional" json:"filterPolicy" yaml:"filterPolicy"` // The filter policy that is applied on the message body. // // To apply a filter policy to the message attributes, use `filterPolicy`. A maximum of one of `filterPolicyWithMessageBody` and `filterPolicy` may be used. FilterPolicyWithMessageBody *map[string]awssns.FilterOrPolicy `field:"optional" json:"filterPolicyWithMessageBody" yaml:"filterPolicyWithMessageBody"` }
Properties forwarded to the Lambda Subscription.
Example:
import sns "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var topic topic var fn function deadLetterQueue := sqs.NewQueue(this, jsii.String("deadLetterQueue")) fn.AddEventSource(awscdk.NewSnsEventSource(topic, &SnsEventSourceProps{ FilterPolicy: map[string]interface{}{ }, DeadLetterQueue: deadLetterQueue, }))
type SqsDlq ¶
type SqsDlq interface { awslambda.IEventSourceDlq // Returns a destination configuration for the DLQ. Bind(_target awslambda.IEventSourceMapping, targetHandler awslambda.IFunction) *awslambda.DlqDestinationConfig }
An SQS dead letter queue destination configuration for a Lambda event source.
Example:
import dynamodb "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var table table var fn function deadLetterQueue := sqs.NewQueue(this, jsii.String("deadLetterQueue")) fn.AddEventSource(awscdk.NewDynamoEventSource(table, &DynamoEventSourceProps{ StartingPosition: lambda.StartingPosition_TRIM_HORIZON, BatchSize: jsii.Number(5), BisectBatchOnError: jsii.Boolean(true), OnFailure: awscdk.NewSqsDlq(deadLetterQueue), RetryAttempts: jsii.Number(10), }))
type SqsEventSource ¶
type SqsEventSource interface { awslambda.IEventSource // The identifier for this EventSourceMapping. EventSourceMappingId() *string Queue() awssqs.IQueue // Called by `lambda.addEventSource` to allow the event source to bind to this function. Bind(target awslambda.IFunction) }
Use an Amazon SQS queue as an event source for AWS Lambda.
Example:
import "github.com/aws/aws-cdk-go/awscdk" var fn function queue := sqs.NewQueue(this, jsii.String("MyQueue")) eventSource := awscdk.NewSqsEventSource(queue) fn.AddEventSource(eventSource) eventSourceId := eventSource.eventSourceMappingId
func NewSqsEventSource ¶
func NewSqsEventSource(queue awssqs.IQueue, props *SqsEventSourceProps) SqsEventSource
type SqsEventSourceProps ¶
type SqsEventSourceProps struct { // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: Minimum value of 1. Maximum value of 10. // If `maxBatchingWindow` is configured, this value can go up to 10,000. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the SQS event source mapping should be enabled. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // Add filter criteria option. Filters *[]*map[string]interface{} `field:"optional" json:"filters" yaml:"filters"` // The maximum amount of time to gather records before invoking the function. // // Valid Range: Minimum value of 0 minutes. Maximum value of 5 minutes. MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` // The maximum concurrency setting limits the number of concurrent instances of the function that an Amazon SQS event source can invoke. // See: https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#events-sqs-max-concurrency // // Valid Range: Minimum value of 2. Maximum value of 1000. // MaxConcurrency *float64 `field:"optional" json:"maxConcurrency" yaml:"maxConcurrency"` // Allow functions to return partially successful responses for a batch of records. // See: https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#services-sqs-batchfailurereporting // ReportBatchItemFailures *bool `field:"optional" json:"reportBatchItemFailures" yaml:"reportBatchItemFailures"` }
Example:
import "github.com/aws/aws-cdk-go/awscdk" var fn function queue := sqs.NewQueue(this, jsii.String("MyQueue"), &QueueProps{ VisibilityTimeout: awscdk.Duration_Seconds(jsii.Number(30)), // default, ReceiveMessageWaitTime: awscdk.Duration_*Seconds(jsii.Number(20)), }) fn.AddEventSource(awscdk.NewSqsEventSource(queue, &SqsEventSourceProps{ BatchSize: jsii.Number(10), // default MaxBatchingWindow: awscdk.Duration_Minutes(jsii.Number(5)), ReportBatchItemFailures: jsii.Boolean(true), }))
type StreamEventSource ¶
type StreamEventSource interface { awslambda.IEventSource Props() *StreamEventSourceProps // Called by `lambda.addEventSource` to allow the event source to bind to this function. Bind(_target awslambda.IFunction) EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions }
Use an stream as an event source for AWS Lambda.
type StreamEventSourceProps ¶
type StreamEventSourceProps struct { // Where to begin consuming the stream. StartingPosition awslambda.StartingPosition `field:"required" json:"startingPosition" yaml:"startingPosition"` // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: // * Minimum value of 1 // * Maximum value of: // * 1000 for `DynamoEventSource` // * 10000 for `KinesisEventSource`, `ManagedKafkaEventSource` and `SelfManagedKafkaEventSource`. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the stream event source mapping should be enabled. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Maximum of Duration.minutes(5). // See: https://docs.aws.amazon.com/lambda/latest/dg/invocation-eventsourcemapping.html#invocation-eventsourcemapping-batching // MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` // If the function returns an error, split the batch in two and retry. BisectBatchOnError *bool `field:"optional" json:"bisectBatchOnError" yaml:"bisectBatchOnError"` // Add filter criteria option. Filters *[]*map[string]interface{} `field:"optional" json:"filters" yaml:"filters"` // The maximum age of a record that Lambda sends to a function for processing. // // Valid Range: // * Minimum value of 60 seconds // * Maximum value of 7 days. MaxRecordAge awscdk.Duration `field:"optional" json:"maxRecordAge" yaml:"maxRecordAge"` // An Amazon SQS queue or Amazon SNS topic destination for discarded records. OnFailure awslambda.IEventSourceDlq `field:"optional" json:"onFailure" yaml:"onFailure"` // The number of batches to process from each shard concurrently. // // Valid Range: // * Minimum value of 1 // * Maximum value of 10. ParallelizationFactor *float64 `field:"optional" json:"parallelizationFactor" yaml:"parallelizationFactor"` // Allow functions to return partially successful responses for a batch of records. // See: https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html#services-ddb-batchfailurereporting // ReportBatchItemFailures *bool `field:"optional" json:"reportBatchItemFailures" yaml:"reportBatchItemFailures"` // Maximum number of retry attempts Valid Range: * Minimum value of 0 * Maximum value of 10000. RetryAttempts *float64 `field:"optional" json:"retryAttempts" yaml:"retryAttempts"` // The size of the tumbling windows to group records sent to DynamoDB or Kinesis Valid Range: 0 - 15 minutes. TumblingWindow awscdk.Duration `field:"optional" json:"tumblingWindow" yaml:"tumblingWindow"` }
The set of properties for streaming event sources shared by Dynamo and Kinesis.
Example:
// The code below shows an example of how to instantiate this type. // The values are placeholders you should change. import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var eventSourceDlq iEventSourceDlq var filters interface{} streamEventSourceProps := &StreamEventSourceProps{ StartingPosition: awscdk.Aws_lambda.StartingPosition_TRIM_HORIZON, // the properties below are optional BatchSize: jsii.Number(123), BisectBatchOnError: jsii.Boolean(false), Enabled: jsii.Boolean(false), Filters: []map[string]interface{}{ map[string]interface{}{ "filtersKey": filters, }, }, MaxBatchingWindow: cdk.Duration_Minutes(jsii.Number(30)), MaxRecordAge: cdk.Duration_*Minutes(jsii.Number(30)), OnFailure: eventSourceDlq, ParallelizationFactor: jsii.Number(123), ReportBatchItemFailures: jsii.Boolean(false), RetryAttempts: jsii.Number(123), TumblingWindow: cdk.Duration_*Minutes(jsii.Number(30)), }
Source Files ¶
- ApiEventSource.go
- ApiEventSource__checks.go
- AuthenticationMethod.go
- BaseStreamEventSourceProps.go
- DynamoEventSource.go
- DynamoEventSourceProps.go
- DynamoEventSource__checks.go
- KafkaEventSourceProps.go
- KinesisEventSource.go
- KinesisEventSourceProps.go
- KinesisEventSource__checks.go
- ManagedKafkaEventSource.go
- ManagedKafkaEventSourceProps.go
- ManagedKafkaEventSource__checks.go
- S3EventSource.go
- S3EventSourceProps.go
- S3EventSource__checks.go
- SelfManagedKafkaEventSource.go
- SelfManagedKafkaEventSourceProps.go
- SelfManagedKafkaEventSource__checks.go
- SnsDlq.go
- SnsDlq__checks.go
- SnsEventSource.go
- SnsEventSourceProps.go
- SnsEventSource__checks.go
- SqsDlq.go
- SqsDlq__checks.go
- SqsEventSource.go
- SqsEventSourceProps.go
- SqsEventSource__checks.go
- StreamEventSource.go
- StreamEventSourceProps.go
- StreamEventSource__checks.go
- main.go