Documentation ¶
Index ¶
- func NewApiEventSource_Override(a ApiEventSource, method *string, path *string, ...)
- func NewDynamoEventSource_Override(d DynamoEventSource, table awsdynamodb.ITable, props *DynamoEventSourceProps)
- func NewKinesisEventSource_Override(k KinesisEventSource, stream awskinesis.IStream, ...)
- func NewManagedKafkaEventSource_Override(m ManagedKafkaEventSource, props *ManagedKafkaEventSourceProps)
- func NewS3EventSource_Override(s S3EventSource, bucket awss3.Bucket, props *S3EventSourceProps)
- func NewSelfManagedKafkaEventSource_Override(s SelfManagedKafkaEventSource, props *SelfManagedKafkaEventSourceProps)
- func NewSnsDlq_Override(s SnsDlq, topic awssns.ITopic)
- func NewSnsEventSource_Override(s SnsEventSource, topic awssns.ITopic, props *SnsEventSourceProps)
- func NewSqsDlq_Override(s SqsDlq, queue awssqs.IQueue)
- func NewSqsEventSource_Override(s SqsEventSource, queue awssqs.IQueue, props *SqsEventSourceProps)
- func NewStreamEventSource_Override(s StreamEventSource, props *StreamEventSourceProps)
- type ApiEventSource
- type AuthenticationMethod
- type BaseStreamEventSourceProps
- type DynamoEventSource
- type DynamoEventSourceProps
- type KafkaEventSourceProps
- type KinesisEventSource
- type KinesisEventSourceProps
- type ManagedKafkaEventSource
- type ManagedKafkaEventSourceProps
- type S3EventSource
- type S3EventSourceProps
- type SelfManagedKafkaEventSource
- type SelfManagedKafkaEventSourceProps
- type SnsDlq
- type SnsEventSource
- type SnsEventSourceProps
- type SqsDlq
- type SqsEventSource
- type SqsEventSourceProps
- type StreamEventSource
- type StreamEventSourceProps
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func NewApiEventSource_Override ¶
func NewApiEventSource_Override(a ApiEventSource, method *string, path *string, options *awsapigateway.MethodOptions)
Experimental.
func NewDynamoEventSource_Override ¶
func NewDynamoEventSource_Override(d DynamoEventSource, table awsdynamodb.ITable, props *DynamoEventSourceProps)
Experimental.
func NewKinesisEventSource_Override ¶
func NewKinesisEventSource_Override(k KinesisEventSource, stream awskinesis.IStream, props *KinesisEventSourceProps)
Experimental.
func NewManagedKafkaEventSource_Override ¶
func NewManagedKafkaEventSource_Override(m ManagedKafkaEventSource, props *ManagedKafkaEventSourceProps)
Experimental.
func NewS3EventSource_Override ¶
func NewS3EventSource_Override(s S3EventSource, bucket awss3.Bucket, props *S3EventSourceProps)
Experimental.
func NewSelfManagedKafkaEventSource_Override ¶
func NewSelfManagedKafkaEventSource_Override(s SelfManagedKafkaEventSource, props *SelfManagedKafkaEventSourceProps)
Experimental.
func NewSnsEventSource_Override ¶
func NewSnsEventSource_Override(s SnsEventSource, topic awssns.ITopic, props *SnsEventSourceProps)
Experimental.
func NewSqsEventSource_Override ¶
func NewSqsEventSource_Override(s SqsEventSource, queue awssqs.IQueue, props *SqsEventSourceProps)
Experimental.
func NewStreamEventSource_Override ¶
func NewStreamEventSource_Override(s StreamEventSource, props *StreamEventSourceProps)
Experimental.
Types ¶
type ApiEventSource ¶
type ApiEventSource interface { awslambda.IEventSource // Called by `lambda.addEventSource` to allow the event source to bind to this function. // Experimental. Bind(target awslambda.IFunction) }
Example:
// The code below shows an example of how to instantiate this type. // The values are placeholders you should change. import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var authorizer authorizer var model model var requestValidator requestValidator apiEventSource := awscdk.Aws_lambda_event_sources.NewApiEventSource(jsii.String("method"), jsii.String("path"), &MethodOptions{ ApiKeyRequired: jsii.Boolean(false), AuthorizationScopes: []*string{ jsii.String("authorizationScopes"), }, AuthorizationType: awscdk.Aws_apigateway.AuthorizationType_NONE, Authorizer: authorizer, MethodResponses: []methodResponse{ &methodResponse{ StatusCode: jsii.String("statusCode"), // the properties below are optional ResponseModels: map[string]iModel{ "responseModelsKey": model, }, ResponseParameters: map[string]*bool{ "responseParametersKey": jsii.Boolean(false), }, }, }, OperationName: jsii.String("operationName"), RequestModels: map[string]*iModel{ "requestModelsKey": model, }, RequestParameters: map[string]*bool{ "requestParametersKey": jsii.Boolean(false), }, RequestValidator: requestValidator, RequestValidatorOptions: &RequestValidatorOptions{ RequestValidatorName: jsii.String("requestValidatorName"), ValidateRequestBody: jsii.Boolean(false), ValidateRequestParameters: jsii.Boolean(false), }, })
Experimental.
func NewApiEventSource ¶
func NewApiEventSource(method *string, path *string, options *awsapigateway.MethodOptions) ApiEventSource
Experimental.
type AuthenticationMethod ¶
type AuthenticationMethod string
The authentication method to use with SelfManagedKafkaEventSource. Experimental.
const ( // SASL_SCRAM_512_AUTH authentication method for your Kafka cluster. // Experimental. AuthenticationMethod_SASL_SCRAM_512_AUTH AuthenticationMethod = "SASL_SCRAM_512_AUTH" // SASL_SCRAM_256_AUTH authentication method for your Kafka cluster. // Experimental. AuthenticationMethod_SASL_SCRAM_256_AUTH AuthenticationMethod = "SASL_SCRAM_256_AUTH" // BASIC_AUTH (SASL/PLAIN) authentication method for your Kafka cluster. // Experimental. AuthenticationMethod_BASIC_AUTH AuthenticationMethod = "BASIC_AUTH" // CLIENT_CERTIFICATE_TLS_AUTH (mTLS) authentication method for your Kafka cluster. // Experimental. AuthenticationMethod_CLIENT_CERTIFICATE_TLS_AUTH AuthenticationMethod = "CLIENT_CERTIFICATE_TLS_AUTH" )
type BaseStreamEventSourceProps ¶
type BaseStreamEventSourceProps struct { // Where to begin consuming the stream. // Experimental. StartingPosition awslambda.StartingPosition `field:"required" json:"startingPosition" yaml:"startingPosition"` // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: // * Minimum value of 1 // * Maximum value of: // * 1000 for {@link DynamoEventSource} // * 10000 for {@link KinesisEventSource}, {@link ManagedKafkaEventSource} and {@link SelfManagedKafkaEventSource}. // Experimental. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the stream event source mapping should be enabled. // Experimental. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Maximum of Duration.minutes(5) // Experimental. MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` }
The set of properties for streaming event sources shared by Dynamo, Kinesis and Kafka.
Example:
// The code below shows an example of how to instantiate this type. // The values are placeholders you should change. import monocdk "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var duration duration baseStreamEventSourceProps := &BaseStreamEventSourceProps{ StartingPosition: awscdk.Aws_lambda.StartingPosition_TRIM_HORIZON, // the properties below are optional BatchSize: jsii.Number(123), Enabled: jsii.Boolean(false), MaxBatchingWindow: duration, }
Experimental.
type DynamoEventSource ¶
type DynamoEventSource interface { StreamEventSource // The identifier for this EventSourceMapping. // Experimental. EventSourceMappingId() *string // Experimental. Props() *StreamEventSourceProps // Called by `lambda.addEventSource` to allow the event source to bind to this function. // Experimental. Bind(target awslambda.IFunction) // Experimental. EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions }
Use an Amazon DynamoDB stream as an event source for AWS Lambda.
Example:
import dynamodb "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var table table var fn function deadLetterQueue := sqs.NewQueue(this, jsii.String("deadLetterQueue")) fn.AddEventSource(awscdk.NewDynamoEventSource(table, &DynamoEventSourceProps{ StartingPosition: lambda.StartingPosition_TRIM_HORIZON, BatchSize: jsii.Number(5), BisectBatchOnError: jsii.Boolean(true), OnFailure: awscdk.NewSqsDlq(deadLetterQueue), RetryAttempts: jsii.Number(10), }))
Experimental.
func NewDynamoEventSource ¶
func NewDynamoEventSource(table awsdynamodb.ITable, props *DynamoEventSourceProps) DynamoEventSource
Experimental.
type DynamoEventSourceProps ¶
type DynamoEventSourceProps struct { // Where to begin consuming the stream. // Experimental. StartingPosition awslambda.StartingPosition `field:"required" json:"startingPosition" yaml:"startingPosition"` // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: // * Minimum value of 1 // * Maximum value of: // * 1000 for {@link DynamoEventSource} // * 10000 for {@link KinesisEventSource}, {@link ManagedKafkaEventSource} and {@link SelfManagedKafkaEventSource}. // Experimental. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the stream event source mapping should be enabled. // Experimental. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Maximum of Duration.minutes(5) // Experimental. MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` // If the function returns an error, split the batch in two and retry. // Experimental. BisectBatchOnError *bool `field:"optional" json:"bisectBatchOnError" yaml:"bisectBatchOnError"` // The maximum age of a record that Lambda sends to a function for processing. // // Valid Range: // * Minimum value of 60 seconds // * Maximum value of 7 days. // Experimental. MaxRecordAge awscdk.Duration `field:"optional" json:"maxRecordAge" yaml:"maxRecordAge"` // An Amazon SQS queue or Amazon SNS topic destination for discarded records. // Experimental. OnFailure awslambda.IEventSourceDlq `field:"optional" json:"onFailure" yaml:"onFailure"` // The number of batches to process from each shard concurrently. // // Valid Range: // * Minimum value of 1 // * Maximum value of 10. // Experimental. ParallelizationFactor *float64 `field:"optional" json:"parallelizationFactor" yaml:"parallelizationFactor"` // Allow functions to return partially successful responses for a batch of records. // See: https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html#services-ddb-batchfailurereporting // // Experimental. ReportBatchItemFailures *bool `field:"optional" json:"reportBatchItemFailures" yaml:"reportBatchItemFailures"` // Maximum number of retry attempts Valid Range: * Minimum value of 0 * Maximum value of 10000. // Experimental. RetryAttempts *float64 `field:"optional" json:"retryAttempts" yaml:"retryAttempts"` // The size of the tumbling windows to group records sent to DynamoDB or Kinesis Valid Range: 0 - 15 minutes. // Experimental. TumblingWindow awscdk.Duration `field:"optional" json:"tumblingWindow" yaml:"tumblingWindow"` }
Example:
import dynamodb "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var table table var fn function deadLetterQueue := sqs.NewQueue(this, jsii.String("deadLetterQueue")) fn.AddEventSource(awscdk.NewDynamoEventSource(table, &DynamoEventSourceProps{ StartingPosition: lambda.StartingPosition_TRIM_HORIZON, BatchSize: jsii.Number(5), BisectBatchOnError: jsii.Boolean(true), OnFailure: awscdk.NewSqsDlq(deadLetterQueue), RetryAttempts: jsii.Number(10), }))
Experimental.
type KafkaEventSourceProps ¶
type KafkaEventSourceProps struct { // Where to begin consuming the stream. // Experimental. StartingPosition awslambda.StartingPosition `field:"required" json:"startingPosition" yaml:"startingPosition"` // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: // * Minimum value of 1 // * Maximum value of: // * 1000 for {@link DynamoEventSource} // * 10000 for {@link KinesisEventSource}, {@link ManagedKafkaEventSource} and {@link SelfManagedKafkaEventSource}. // Experimental. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the stream event source mapping should be enabled. // Experimental. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Maximum of Duration.minutes(5) // Experimental. MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` // The Kafka topic to subscribe to. // Experimental. Topic *string `field:"required" json:"topic" yaml:"topic"` // The secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details This field is required if your Kafka brokers are accessed over the Internet. // Experimental. Secret awssecretsmanager.ISecret `field:"optional" json:"secret" yaml:"secret"` }
Properties for a Kafka event source.
Example:
// The code below shows an example of how to instantiate this type. // The values are placeholders you should change. import monocdk "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var duration duration var secret secret kafkaEventSourceProps := &KafkaEventSourceProps{ StartingPosition: awscdk.Aws_lambda.StartingPosition_TRIM_HORIZON, Topic: jsii.String("topic"), // the properties below are optional BatchSize: jsii.Number(123), Enabled: jsii.Boolean(false), MaxBatchingWindow: duration, Secret: secret, }
Experimental.
type KinesisEventSource ¶
type KinesisEventSource interface { StreamEventSource // The identifier for this EventSourceMapping. // Experimental. EventSourceMappingId() *string // Experimental. Props() *StreamEventSourceProps // Experimental. Stream() awskinesis.IStream // Called by `lambda.addEventSource` to allow the event source to bind to this function. // Experimental. Bind(target awslambda.IFunction) // Experimental. EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions }
Use an Amazon Kinesis stream as an event source for AWS Lambda.
Example:
import kinesis "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var myFunction function stream := kinesis.NewStream(this, jsii.String("MyStream")) myFunction.AddEventSource(awscdk.NewKinesisEventSource(stream, &KinesisEventSourceProps{ BatchSize: jsii.Number(100), // default StartingPosition: lambda.StartingPosition_TRIM_HORIZON, }))
Experimental.
func NewKinesisEventSource ¶
func NewKinesisEventSource(stream awskinesis.IStream, props *KinesisEventSourceProps) KinesisEventSource
Experimental.
type KinesisEventSourceProps ¶
type KinesisEventSourceProps struct { // Where to begin consuming the stream. // Experimental. StartingPosition awslambda.StartingPosition `field:"required" json:"startingPosition" yaml:"startingPosition"` // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: // * Minimum value of 1 // * Maximum value of: // * 1000 for {@link DynamoEventSource} // * 10000 for {@link KinesisEventSource}, {@link ManagedKafkaEventSource} and {@link SelfManagedKafkaEventSource}. // Experimental. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the stream event source mapping should be enabled. // Experimental. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Maximum of Duration.minutes(5) // Experimental. MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` // If the function returns an error, split the batch in two and retry. // Experimental. BisectBatchOnError *bool `field:"optional" json:"bisectBatchOnError" yaml:"bisectBatchOnError"` // The maximum age of a record that Lambda sends to a function for processing. // // Valid Range: // * Minimum value of 60 seconds // * Maximum value of 7 days. // Experimental. MaxRecordAge awscdk.Duration `field:"optional" json:"maxRecordAge" yaml:"maxRecordAge"` // An Amazon SQS queue or Amazon SNS topic destination for discarded records. // Experimental. OnFailure awslambda.IEventSourceDlq `field:"optional" json:"onFailure" yaml:"onFailure"` // The number of batches to process from each shard concurrently. // // Valid Range: // * Minimum value of 1 // * Maximum value of 10. // Experimental. ParallelizationFactor *float64 `field:"optional" json:"parallelizationFactor" yaml:"parallelizationFactor"` // Allow functions to return partially successful responses for a batch of records. // See: https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html#services-ddb-batchfailurereporting // // Experimental. ReportBatchItemFailures *bool `field:"optional" json:"reportBatchItemFailures" yaml:"reportBatchItemFailures"` // Maximum number of retry attempts Valid Range: * Minimum value of 0 * Maximum value of 10000. // Experimental. RetryAttempts *float64 `field:"optional" json:"retryAttempts" yaml:"retryAttempts"` // The size of the tumbling windows to group records sent to DynamoDB or Kinesis Valid Range: 0 - 15 minutes. // Experimental. TumblingWindow awscdk.Duration `field:"optional" json:"tumblingWindow" yaml:"tumblingWindow"` }
Example:
import kinesis "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var myFunction function stream := kinesis.NewStream(this, jsii.String("MyStream")) myFunction.AddEventSource(awscdk.NewKinesisEventSource(stream, &KinesisEventSourceProps{ BatchSize: jsii.Number(100), // default StartingPosition: lambda.StartingPosition_TRIM_HORIZON, }))
Experimental.
type ManagedKafkaEventSource ¶
type ManagedKafkaEventSource interface { StreamEventSource // The identifier for this EventSourceMapping. // Experimental. EventSourceMappingId() *string // Experimental. Props() *StreamEventSourceProps // Called by `lambda.addEventSource` to allow the event source to bind to this function. // Experimental. Bind(target awslambda.IFunction) // Experimental. EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions }
Use a MSK cluster as a streaming source for AWS Lambda.
Example:
import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var myFunction function // Your MSK cluster arn clusterArn := "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4" // The Kafka topic you want to subscribe to topic := "some-cool-topic" // The secret that allows access to your MSK cluster // You still have to make sure that it is associated with your cluster as described in the documentation secret := awscdk.NewSecret(this, jsii.String("Secret"), &SecretProps{ SecretName: jsii.String("AmazonMSK_KafkaSecret"), }) myFunction.AddEventSource(awscdk.NewManagedKafkaEventSource(&ManagedKafkaEventSourceProps{ ClusterArn: jsii.String(ClusterArn), Topic: topic, Secret: secret, BatchSize: jsii.Number(100), // default StartingPosition: lambda.StartingPosition_TRIM_HORIZON, }))
Experimental.
func NewManagedKafkaEventSource ¶
func NewManagedKafkaEventSource(props *ManagedKafkaEventSourceProps) ManagedKafkaEventSource
Experimental.
type ManagedKafkaEventSourceProps ¶
type ManagedKafkaEventSourceProps struct { // Where to begin consuming the stream. // Experimental. StartingPosition awslambda.StartingPosition `field:"required" json:"startingPosition" yaml:"startingPosition"` // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: // * Minimum value of 1 // * Maximum value of: // * 1000 for {@link DynamoEventSource} // * 10000 for {@link KinesisEventSource}, {@link ManagedKafkaEventSource} and {@link SelfManagedKafkaEventSource}. // Experimental. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the stream event source mapping should be enabled. // Experimental. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Maximum of Duration.minutes(5) // Experimental. MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` // The Kafka topic to subscribe to. // Experimental. Topic *string `field:"required" json:"topic" yaml:"topic"` // The secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details This field is required if your Kafka brokers are accessed over the Internet. // Experimental. Secret awssecretsmanager.ISecret `field:"optional" json:"secret" yaml:"secret"` // An MSK cluster construct. // Experimental. ClusterArn *string `field:"required" json:"clusterArn" yaml:"clusterArn"` }
Properties for a MSK event source.
Example:
import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var myFunction function // Your MSK cluster arn clusterArn := "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4" // The Kafka topic you want to subscribe to topic := "some-cool-topic" // The secret that allows access to your MSK cluster // You still have to make sure that it is associated with your cluster as described in the documentation secret := awscdk.NewSecret(this, jsii.String("Secret"), &SecretProps{ SecretName: jsii.String("AmazonMSK_KafkaSecret"), }) myFunction.AddEventSource(awscdk.NewManagedKafkaEventSource(&ManagedKafkaEventSourceProps{ ClusterArn: jsii.String(ClusterArn), Topic: topic, Secret: secret, BatchSize: jsii.Number(100), // default StartingPosition: lambda.StartingPosition_TRIM_HORIZON, }))
Experimental.
type S3EventSource ¶
type S3EventSource interface { awslambda.IEventSource // Experimental. Bucket() awss3.Bucket // Called by `lambda.addEventSource` to allow the event source to bind to this function. // Experimental. Bind(target awslambda.IFunction) }
Use S3 bucket notifications as an event source for AWS Lambda.
Example:
import eventsources "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var fn function bucket := s3.NewBucket(this, jsii.String("Bucket")) fn.AddEventSource(eventsources.NewS3EventSource(bucket, &S3EventSourceProps{ Events: []eventType{ s3.*eventType_OBJECT_CREATED, s3.*eventType_OBJECT_REMOVED, }, Filters: []notificationKeyFilter{ ¬ificationKeyFilter{ Prefix: jsii.String("subdir/"), }, }, }))
Experimental.
func NewS3EventSource ¶
func NewS3EventSource(bucket awss3.Bucket, props *S3EventSourceProps) S3EventSource
Experimental.
type S3EventSourceProps ¶
type S3EventSourceProps struct { // The s3 event types that will trigger the notification. // Experimental. Events *[]awss3.EventType `field:"required" json:"events" yaml:"events"` // S3 object key filter rules to determine which objects trigger this event. // // Each filter must include a `prefix` and/or `suffix` that will be matched // against the s3 object key. Refer to the S3 Developer Guide for details // about allowed filter rules. // Experimental. Filters *[]*awss3.NotificationKeyFilter `field:"optional" json:"filters" yaml:"filters"` }
Example:
import eventsources "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var fn function bucket := s3.NewBucket(this, jsii.String("Bucket")) fn.AddEventSource(eventsources.NewS3EventSource(bucket, &S3EventSourceProps{ Events: []eventType{ s3.*eventType_OBJECT_CREATED, s3.*eventType_OBJECT_REMOVED, }, Filters: []notificationKeyFilter{ ¬ificationKeyFilter{ Prefix: jsii.String("subdir/"), }, }, }))
Experimental.
type SelfManagedKafkaEventSource ¶
type SelfManagedKafkaEventSource interface { StreamEventSource // Experimental. Props() *StreamEventSourceProps // Called by `lambda.addEventSource` to allow the event source to bind to this function. // Experimental. Bind(target awslambda.IFunction) // Experimental. EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions }
Use a self hosted Kafka installation as a streaming source for AWS Lambda.
Example:
import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" // The secret that allows access to your self hosted Kafka cluster var secret secret var myFunction function // The list of Kafka brokers bootstrapServers := []*string{ "kafka-broker:9092", } // The Kafka topic you want to subscribe to topic := "some-cool-topic" myFunction.AddEventSource(awscdk.NewSelfManagedKafkaEventSource(&SelfManagedKafkaEventSourceProps{ BootstrapServers: bootstrapServers, Topic: topic, Secret: secret, BatchSize: jsii.Number(100), // default StartingPosition: lambda.StartingPosition_TRIM_HORIZON, }))
Experimental.
func NewSelfManagedKafkaEventSource ¶
func NewSelfManagedKafkaEventSource(props *SelfManagedKafkaEventSourceProps) SelfManagedKafkaEventSource
Experimental.
type SelfManagedKafkaEventSourceProps ¶
type SelfManagedKafkaEventSourceProps struct { // Where to begin consuming the stream. // Experimental. StartingPosition awslambda.StartingPosition `field:"required" json:"startingPosition" yaml:"startingPosition"` // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: // * Minimum value of 1 // * Maximum value of: // * 1000 for {@link DynamoEventSource} // * 10000 for {@link KinesisEventSource}, {@link ManagedKafkaEventSource} and {@link SelfManagedKafkaEventSource}. // Experimental. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the stream event source mapping should be enabled. // Experimental. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Maximum of Duration.minutes(5) // Experimental. MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` // The Kafka topic to subscribe to. // Experimental. Topic *string `field:"required" json:"topic" yaml:"topic"` // The secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details This field is required if your Kafka brokers are accessed over the Internet. // Experimental. Secret awssecretsmanager.ISecret `field:"optional" json:"secret" yaml:"secret"` // The list of host and port pairs that are the addresses of the Kafka brokers in a "bootstrap" Kafka cluster that a Kafka client connects to initially to bootstrap itself. // // They are in the format `abc.xyz.com:xxxx`. // Experimental. BootstrapServers *[]*string `field:"required" json:"bootstrapServers" yaml:"bootstrapServers"` // The authentication method for your Kafka cluster. // Experimental. AuthenticationMethod AuthenticationMethod `field:"optional" json:"authenticationMethod" yaml:"authenticationMethod"` // If your Kafka brokers are only reachable via VPC, provide the security group here. // Experimental. SecurityGroup awsec2.ISecurityGroup `field:"optional" json:"securityGroup" yaml:"securityGroup"` // If your Kafka brokers are only reachable via VPC provide the VPC here. // Experimental. Vpc awsec2.IVpc `field:"optional" json:"vpc" yaml:"vpc"` // If your Kafka brokers are only reachable via VPC, provide the subnets selection here. // Experimental. VpcSubnets *awsec2.SubnetSelection `field:"optional" json:"vpcSubnets" yaml:"vpcSubnets"` }
Properties for a self managed Kafka cluster event source.
If your Kafka cluster is only reachable via VPC make sure to configure it.
Example:
import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" // The secret that allows access to your self hosted Kafka cluster var secret secret var myFunction function // The list of Kafka brokers bootstrapServers := []*string{ "kafka-broker:9092", } // The Kafka topic you want to subscribe to topic := "some-cool-topic" myFunction.AddEventSource(awscdk.NewSelfManagedKafkaEventSource(&SelfManagedKafkaEventSourceProps{ BootstrapServers: bootstrapServers, Topic: topic, Secret: secret, BatchSize: jsii.Number(100), // default StartingPosition: lambda.StartingPosition_TRIM_HORIZON, }))
Experimental.
type SnsDlq ¶
type SnsDlq interface { awslambda.IEventSourceDlq // Returns a destination configuration for the DLQ. // Experimental. Bind(_target awslambda.IEventSourceMapping, targetHandler awslambda.IFunction) *awslambda.DlqDestinationConfig }
An SNS dead letter queue destination configuration for a Lambda event source.
Example:
// The code below shows an example of how to instantiate this type. // The values are placeholders you should change. import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var topic topic snsDlq := awscdk.Aws_lambda_event_sources.NewSnsDlq(topic)
Experimental.
type SnsEventSource ¶
type SnsEventSource interface { awslambda.IEventSource // Experimental. Topic() awssns.ITopic // Called by `lambda.addEventSource` to allow the event source to bind to this function. // Experimental. Bind(target awslambda.IFunction) }
Use an Amazon SNS topic as an event source for AWS Lambda.
Example:
import sns "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var topic topic var fn function deadLetterQueue := sqs.NewQueue(this, jsii.String("deadLetterQueue")) fn.AddEventSource(awscdk.NewSnsEventSource(topic, &SnsEventSourceProps{ FilterPolicy: map[string]interface{}{ }, DeadLetterQueue: deadLetterQueue, }))
Experimental.
func NewSnsEventSource ¶
func NewSnsEventSource(topic awssns.ITopic, props *SnsEventSourceProps) SnsEventSource
Experimental.
type SnsEventSourceProps ¶
type SnsEventSourceProps struct { // Queue to be used as dead letter queue. // // If not passed no dead letter queue is enabled. // Experimental. DeadLetterQueue awssqs.IQueue `field:"optional" json:"deadLetterQueue" yaml:"deadLetterQueue"` // The filter policy. // Experimental. FilterPolicy *map[string]awssns.SubscriptionFilter `field:"optional" json:"filterPolicy" yaml:"filterPolicy"` }
Properties forwarded to the Lambda Subscription.
Example:
import sns "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var topic topic var fn function deadLetterQueue := sqs.NewQueue(this, jsii.String("deadLetterQueue")) fn.AddEventSource(awscdk.NewSnsEventSource(topic, &SnsEventSourceProps{ FilterPolicy: map[string]interface{}{ }, DeadLetterQueue: deadLetterQueue, }))
Experimental.
type SqsDlq ¶
type SqsDlq interface { awslambda.IEventSourceDlq // Returns a destination configuration for the DLQ. // Experimental. Bind(_target awslambda.IEventSourceMapping, targetHandler awslambda.IFunction) *awslambda.DlqDestinationConfig }
An SQS dead letter queue destination configuration for a Lambda event source.
Example:
import dynamodb "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var table table var fn function deadLetterQueue := sqs.NewQueue(this, jsii.String("deadLetterQueue")) fn.AddEventSource(awscdk.NewDynamoEventSource(table, &DynamoEventSourceProps{ StartingPosition: lambda.StartingPosition_TRIM_HORIZON, BatchSize: jsii.Number(5), BisectBatchOnError: jsii.Boolean(true), OnFailure: awscdk.NewSqsDlq(deadLetterQueue), RetryAttempts: jsii.Number(10), }))
Experimental.
type SqsEventSource ¶
type SqsEventSource interface { awslambda.IEventSource // The identifier for this EventSourceMapping. // Experimental. EventSourceMappingId() *string // Experimental. Queue() awssqs.IQueue // Called by `lambda.addEventSource` to allow the event source to bind to this function. // Experimental. Bind(target awslambda.IFunction) }
Use an Amazon SQS queue as an event source for AWS Lambda.
Example:
import "github.com/aws/aws-cdk-go/awscdk" var fn function queue := sqs.NewQueue(this, jsii.String("MyQueue")) eventSource := awscdk.NewSqsEventSource(queue) fn.AddEventSource(eventSource) eventSourceId := eventSource.eventSourceMappingId
Experimental.
func NewSqsEventSource ¶
func NewSqsEventSource(queue awssqs.IQueue, props *SqsEventSourceProps) SqsEventSource
Experimental.
type SqsEventSourceProps ¶
type SqsEventSourceProps struct { // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: Minimum value of 1. Maximum value of 10. // If `maxBatchingWindow` is configured, this value can go up to 10,000. // Experimental. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the SQS event source mapping should be enabled. // Experimental. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Valid Range: Minimum value of 0 minutes. Maximum value of 5 minutes. // Experimental. MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` // Allow functions to return partially successful responses for a batch of records. // See: https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#services-sqs-batchfailurereporting // // Experimental. ReportBatchItemFailures *bool `field:"optional" json:"reportBatchItemFailures" yaml:"reportBatchItemFailures"` }
Example:
import "github.com/aws/aws-cdk-go/awscdk" var fn function queue := sqs.NewQueue(this, jsii.String("MyQueue"), &QueueProps{ VisibilityTimeout: awscdk.Duration_Seconds(jsii.Number(30)), // default, ReceiveMessageWaitTime: awscdk.Duration_*Seconds(jsii.Number(20)), }) fn.AddEventSource(awscdk.NewSqsEventSource(queue, &SqsEventSourceProps{ BatchSize: jsii.Number(10), // default MaxBatchingWindow: awscdk.Duration_Minutes(jsii.Number(5)), ReportBatchItemFailures: jsii.Boolean(true), }))
Experimental.
type StreamEventSource ¶
type StreamEventSource interface { awslambda.IEventSource // Experimental. Props() *StreamEventSourceProps // Called by `lambda.addEventSource` to allow the event source to bind to this function. // Experimental. Bind(_target awslambda.IFunction) // Experimental. EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions }
Use an stream as an event source for AWS Lambda. Experimental.
type StreamEventSourceProps ¶
type StreamEventSourceProps struct { // Where to begin consuming the stream. // Experimental. StartingPosition awslambda.StartingPosition `field:"required" json:"startingPosition" yaml:"startingPosition"` // The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. // // Your function receives an // event with all the retrieved records. // // Valid Range: // * Minimum value of 1 // * Maximum value of: // * 1000 for {@link DynamoEventSource} // * 10000 for {@link KinesisEventSource}, {@link ManagedKafkaEventSource} and {@link SelfManagedKafkaEventSource}. // Experimental. BatchSize *float64 `field:"optional" json:"batchSize" yaml:"batchSize"` // If the stream event source mapping should be enabled. // Experimental. Enabled *bool `field:"optional" json:"enabled" yaml:"enabled"` // The maximum amount of time to gather records before invoking the function. // // Maximum of Duration.minutes(5) // Experimental. MaxBatchingWindow awscdk.Duration `field:"optional" json:"maxBatchingWindow" yaml:"maxBatchingWindow"` // If the function returns an error, split the batch in two and retry. // Experimental. BisectBatchOnError *bool `field:"optional" json:"bisectBatchOnError" yaml:"bisectBatchOnError"` // The maximum age of a record that Lambda sends to a function for processing. // // Valid Range: // * Minimum value of 60 seconds // * Maximum value of 7 days. // Experimental. MaxRecordAge awscdk.Duration `field:"optional" json:"maxRecordAge" yaml:"maxRecordAge"` // An Amazon SQS queue or Amazon SNS topic destination for discarded records. // Experimental. OnFailure awslambda.IEventSourceDlq `field:"optional" json:"onFailure" yaml:"onFailure"` // The number of batches to process from each shard concurrently. // // Valid Range: // * Minimum value of 1 // * Maximum value of 10. // Experimental. ParallelizationFactor *float64 `field:"optional" json:"parallelizationFactor" yaml:"parallelizationFactor"` // Allow functions to return partially successful responses for a batch of records. // See: https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html#services-ddb-batchfailurereporting // // Experimental. ReportBatchItemFailures *bool `field:"optional" json:"reportBatchItemFailures" yaml:"reportBatchItemFailures"` // Maximum number of retry attempts Valid Range: * Minimum value of 0 * Maximum value of 10000. // Experimental. RetryAttempts *float64 `field:"optional" json:"retryAttempts" yaml:"retryAttempts"` // The size of the tumbling windows to group records sent to DynamoDB or Kinesis Valid Range: 0 - 15 minutes. // Experimental. TumblingWindow awscdk.Duration `field:"optional" json:"tumblingWindow" yaml:"tumblingWindow"` }
The set of properties for streaming event sources shared by Dynamo and Kinesis.
Example:
// The code below shows an example of how to instantiate this type. // The values are placeholders you should change. import monocdk "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" import "github.com/aws/aws-cdk-go/awscdk" var duration duration var eventSourceDlq iEventSourceDlq streamEventSourceProps := &StreamEventSourceProps{ StartingPosition: awscdk.Aws_lambda.StartingPosition_TRIM_HORIZON, // the properties below are optional BatchSize: jsii.Number(123), BisectBatchOnError: jsii.Boolean(false), Enabled: jsii.Boolean(false), MaxBatchingWindow: duration, MaxRecordAge: duration, OnFailure: eventSourceDlq, ParallelizationFactor: jsii.Number(123), ReportBatchItemFailures: jsii.Boolean(false), RetryAttempts: jsii.Number(123), TumblingWindow: duration, }
Experimental.
Source Files ¶
- ApiEventSource.go
- ApiEventSource__checks.go
- AuthenticationMethod.go
- BaseStreamEventSourceProps.go
- DynamoEventSource.go
- DynamoEventSourceProps.go
- DynamoEventSource__checks.go
- KafkaEventSourceProps.go
- KinesisEventSource.go
- KinesisEventSourceProps.go
- KinesisEventSource__checks.go
- ManagedKafkaEventSource.go
- ManagedKafkaEventSourceProps.go
- ManagedKafkaEventSource__checks.go
- S3EventSource.go
- S3EventSourceProps.go
- S3EventSource__checks.go
- SelfManagedKafkaEventSource.go
- SelfManagedKafkaEventSourceProps.go
- SelfManagedKafkaEventSource__checks.go
- SnsDlq.go
- SnsDlq__checks.go
- SnsEventSource.go
- SnsEventSourceProps.go
- SnsEventSource__checks.go
- SqsDlq.go
- SqsDlq__checks.go
- SqsEventSource.go
- SqsEventSourceProps.go
- SqsEventSource__checks.go
- StreamEventSource.go
- StreamEventSourceProps.go
- StreamEventSource__checks.go
- main.go