pipelines

package
v2.6.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jan 12, 2022 License: Apache-2.0 Imports: 17 Imported by: 4

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func CodeBuildStep_Sequence added in v2.4.0

func CodeBuildStep_Sequence(steps *[]Step) *[]Step

Define a sequence of steps to be executed in order.

func CodePipelineSource_Sequence added in v2.4.0

func CodePipelineSource_Sequence(steps *[]Step) *[]Step

Define a sequence of steps to be executed in order.

func CodePipeline_IsConstruct

func CodePipeline_IsConstruct(x interface{}) *bool

Checks if `x` is a construct.

Returns: true if `x` is an object created from a class which extends `Construct`. Deprecated: use `x instanceof Construct` instead

func ConfirmPermissionsBroadening_Sequence added in v2.4.0

func ConfirmPermissionsBroadening_Sequence(steps *[]Step) *[]Step

Define a sequence of steps to be executed in order.

func ManualApprovalStep_Sequence added in v2.4.0

func ManualApprovalStep_Sequence(steps *[]Step) *[]Step

Define a sequence of steps to be executed in order.

func NewArtifactMap_Override

func NewArtifactMap_Override(a ArtifactMap)

func NewCodeBuildStep_Override

func NewCodeBuildStep_Override(c CodeBuildStep, id *string, props *CodeBuildStepProps)

func NewCodePipelineSource_Override

func NewCodePipelineSource_Override(c CodePipelineSource, id *string)

func NewCodePipeline_Override

func NewCodePipeline_Override(c CodePipeline, scope constructs.Construct, id *string, props *CodePipelineProps)

func NewConfirmPermissionsBroadening_Override

func NewConfirmPermissionsBroadening_Override(c ConfirmPermissionsBroadening, id *string, props *PermissionsBroadeningCheckProps)

func NewDockerCredential_Override

func NewDockerCredential_Override(d DockerCredential, usages *[]DockerCredentialUsage)

func NewFileSet_Override

func NewFileSet_Override(f FileSet, id *string, producer Step)

func NewManualApprovalStep_Override

func NewManualApprovalStep_Override(m ManualApprovalStep, id *string, props *ManualApprovalStepProps)

func NewPipelineBase_Override

func NewPipelineBase_Override(p PipelineBase, scope constructs.Construct, id *string, props *PipelineBaseProps)

func NewShellStep_Override

func NewShellStep_Override(s ShellStep, id *string, props *ShellStepProps)

func NewStep_Override

func NewStep_Override(s Step, id *string)

func NewWave_Override

func NewWave_Override(w Wave, id *string, props *WaveProps)

func PipelineBase_IsConstruct

func PipelineBase_IsConstruct(x interface{}) *bool

Checks if `x` is a construct.

Returns: true if `x` is an object created from a class which extends `Construct`. Deprecated: use `x instanceof Construct` instead

func ShellStep_Sequence added in v2.4.0

func ShellStep_Sequence(steps *[]Step) *[]Step

Define a sequence of steps to be executed in order.

func Step_Sequence added in v2.4.0

func Step_Sequence(steps *[]Step) *[]Step

Define a sequence of steps to be executed in order.

Types

type AddStageOpts

type AddStageOpts struct {
	// Additional steps to run after all of the stacks in the stage.
	Post *[]Step `json:"post"`
	// Additional steps to run before any of the stacks in the stage.
	Pre *[]Step `json:"pre"`
	// Instructions for stack level steps.
	StackSteps *[]*StackSteps `json:"stackSteps"`
}

Options to pass to `addStage`.

TODO: EXAMPLE

type ArtifactMap

type ArtifactMap interface {
	ToCodePipeline(x FileSet) awscodepipeline.Artifact
}

Translate FileSets to CodePipeline Artifacts.

TODO: EXAMPLE

func NewArtifactMap

func NewArtifactMap() ArtifactMap

type AssetType

type AssetType string

Type of the asset that is being published.

const (
	AssetType_FILE         AssetType = "FILE"
	AssetType_DOCKER_IMAGE AssetType = "DOCKER_IMAGE"
)

type CodeBuildOptions

type CodeBuildOptions struct {
	// Partial build environment, will be combined with other build environments that apply.
	BuildEnvironment *awscodebuild.BuildEnvironment `json:"buildEnvironment"`
	// Partial buildspec, will be combined with other buildspecs that apply.
	//
	// The BuildSpec must be available inline--it cannot reference a file
	// on disk.
	PartialBuildSpec awscodebuild.BuildSpec `json:"partialBuildSpec"`
	// Policy statements to add to role.
	RolePolicy *[]awsiam.PolicyStatement `json:"rolePolicy"`
	// Which security group(s) to associate with the project network interfaces.
	//
	// Only used if 'vpc' is supplied.
	SecurityGroups *[]awsec2.ISecurityGroup `json:"securityGroups"`
	// Which subnets to use.
	//
	// Only used if 'vpc' is supplied.
	SubnetSelection *awsec2.SubnetSelection `json:"subnetSelection"`
	// The number of minutes after which AWS CodeBuild stops the build if it's not complete.
	//
	// For valid values, see the timeoutInMinutes field in the AWS
	// CodeBuild User Guide.
	Timeout awscdk.Duration `json:"timeout"`
	// The VPC where to create the CodeBuild network interfaces in.
	Vpc awsec2.IVpc `json:"vpc"`
}

Options for customizing a single CodeBuild project.

TODO: EXAMPLE

type CodeBuildStep

type CodeBuildStep interface {
	ShellStep
	BuildEnvironment() *awscodebuild.BuildEnvironment
	Commands() *[]*string
	Dependencies() *[]Step
	DependencyFileSets() *[]FileSet
	Env() *map[string]*string
	EnvFromCfnOutputs() *map[string]StackOutputReference
	GrantPrincipal() awsiam.IPrincipal
	Id() *string
	Inputs() *[]*FileSetLocation
	InstallCommands() *[]*string
	IsSource() *bool
	Outputs() *[]*FileSetLocation
	PartialBuildSpec() awscodebuild.BuildSpec
	PrimaryOutput() FileSet
	Project() awscodebuild.IProject
	ProjectName() *string
	Role() awsiam.IRole
	RolePolicyStatements() *[]awsiam.PolicyStatement
	SecurityGroups() *[]awsec2.ISecurityGroup
	SubnetSelection() *awsec2.SubnetSelection
	Timeout() awscdk.Duration
	Vpc() awsec2.IVpc
	AddDependencyFileSet(fs FileSet)
	AddOutputDirectory(directory *string) FileSet
	AddStepDependency(step Step)
	ConfigurePrimaryOutput(fs FileSet)
	PrimaryOutputDirectory(directory *string) FileSet
	ToString() *string
}

Run a script as a CodeBuild Project.

TODO: EXAMPLE

func NewCodeBuildStep

func NewCodeBuildStep(id *string, props *CodeBuildStepProps) CodeBuildStep

type CodeBuildStepProps

type CodeBuildStepProps struct {
	// Commands to run.
	Commands *[]*string `json:"commands"`
	// Additional FileSets to put in other directories.
	//
	// Specifies a mapping from directory name to FileSets. During the
	// script execution, the FileSets will be available in the directories
	// indicated.
	//
	// The directory names may be relative. For example, you can put
	// the main input and an additional input side-by-side with the
	// following configuration:
	//
	// “`ts
	// const script = new pipelines.ShellStep('MainScript', {
	//    commands: ['npm ci','npm run build','npx cdk synth'],
	//    input: pipelines.CodePipelineSource.gitHub('org/source1', 'main'),
	//    additionalInputs: {
	//      '../siblingdir': pipelines.CodePipelineSource.gitHub('org/source2', 'main'),
	//    }
	// });
	// “`
	AdditionalInputs *map[string]IFileSetProducer `json:"additionalInputs"`
	// Environment variables to set.
	Env *map[string]*string `json:"env"`
	// Set environment variables based on Stack Outputs.
	//
	// `ShellStep`s following stack or stage deployments may
	// access the `CfnOutput`s of those stacks to get access to
	// --for example--automatically generated resource names or
	// endpoint URLs.
	EnvFromCfnOutputs *map[string]awscdk.CfnOutput `json:"envFromCfnOutputs"`
	// FileSet to run these scripts on.
	//
	// The files in the FileSet will be placed in the working directory when
	// the script is executed. Use `additionalInputs` to download file sets
	// to other directories as well.
	Input IFileSetProducer `json:"input"`
	// Installation commands to run before the regular commands.
	//
	// For deployment engines that support it, install commands will be classified
	// differently in the job history from the regular `commands`.
	InstallCommands *[]*string `json:"installCommands"`
	// The directory that will contain the primary output fileset.
	//
	// After running the script, the contents of the given directory
	// will be treated as the primary output of this Step.
	PrimaryOutputDirectory *string `json:"primaryOutputDirectory"`
	// Changes to environment.
	//
	// This environment will be combined with the pipeline's default
	// environment.
	BuildEnvironment *awscodebuild.BuildEnvironment `json:"buildEnvironment"`
	// Additional configuration that can only be configured via BuildSpec.
	//
	// You should not use this to specify output artifacts; those
	// should be supplied via the other properties of this class, otherwise
	// CDK Pipelines won't be able to inspect the artifacts.
	//
	// Set the `commands` to an empty array if you want to fully specify
	// the BuildSpec using this field.
	//
	// The BuildSpec must be available inline--it cannot reference a file
	// on disk.
	PartialBuildSpec awscodebuild.BuildSpec `json:"partialBuildSpec"`
	// Name for the generated CodeBuild project.
	ProjectName *string `json:"projectName"`
	// Custom execution role to be used for the CodeBuild project.
	Role awsiam.IRole `json:"role"`
	// Policy statements to add to role used during the synth.
	//
	// Can be used to add acces to a CodeArtifact repository etc.
	RolePolicyStatements *[]awsiam.PolicyStatement `json:"rolePolicyStatements"`
	// Which security group to associate with the script's project network interfaces.
	//
	// If no security group is identified, one will be created automatically.
	//
	// Only used if 'vpc' is supplied.
	SecurityGroups *[]awsec2.ISecurityGroup `json:"securityGroups"`
	// Which subnets to use.
	//
	// Only used if 'vpc' is supplied.
	SubnetSelection *awsec2.SubnetSelection `json:"subnetSelection"`
	// The number of minutes after which AWS CodeBuild stops the build if it's not complete.
	//
	// For valid values, see the timeoutInMinutes field in the AWS
	// CodeBuild User Guide.
	Timeout awscdk.Duration `json:"timeout"`
	// The VPC where to execute the SimpleSynth.
	Vpc awsec2.IVpc `json:"vpc"`
}

Construction props for a CodeBuildStep.

TODO: EXAMPLE

type CodeCommitSourceOptions

type CodeCommitSourceOptions struct {
	// Whether the output should be the contents of the repository (which is the default), or a link that allows CodeBuild to clone the repository before building.
	//
	// **Note**: if this option is true,
	// then only CodeBuild actions can use the resulting {@link output}.
	// See: https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference-CodeCommit.html
	//
	CodeBuildCloneOutput *bool `json:"codeBuildCloneOutput"`
	// Role to be used by on commit event rule.
	//
	// Used only when trigger value is CodeCommitTrigger.EVENTS.
	EventRole awsiam.IRole `json:"eventRole"`
	// How should CodePipeline detect source changes for this Action.
	Trigger awscodepipelineactions.CodeCommitTrigger `json:"trigger"`
}

Configuration options for a CodeCommit source.

TODO: EXAMPLE

type CodePipeline

type CodePipeline interface {
	PipelineBase
	CloudAssemblyFileSet() FileSet
	Node() constructs.Node
	Pipeline() awscodepipeline.Pipeline
	Synth() IFileSetProducer
	SynthProject() awscodebuild.IProject
	Waves() *[]Wave
	AddStage(stage awscdk.Stage, options *AddStageOpts) StageDeployment
	AddWave(id *string, options *WaveOptions) Wave
	BuildPipeline()
	DoBuildPipeline()
	ToString() *string
}

A CDK Pipeline that uses CodePipeline to deploy CDK apps.

This is a `Pipeline` with its `engine` property set to `CodePipelineEngine`, and exists for nicer ergonomics for users that don't need to switch out engines.

TODO: EXAMPLE

func NewCodePipeline

func NewCodePipeline(scope constructs.Construct, id *string, props *CodePipelineProps) CodePipeline

type CodePipelineActionFactoryResult

type CodePipelineActionFactoryResult struct {
	// How many RunOrders were consumed.
	RunOrdersConsumed *float64 `json:"runOrdersConsumed"`
	// If a CodeBuild project got created, the project.
	Project awscodebuild.IProject `json:"project"`
}

The result of adding actions to the pipeline.

TODO: EXAMPLE

type CodePipelineFileSet

type CodePipelineFileSet interface {
	FileSet
	Id() *string
	PrimaryOutput() FileSet
	Producer() Step
	ProducedBy(producer Step)
	ToString() *string
}

A FileSet created from a CodePipeline artifact.

You only need to use this if you want to add CDK Pipeline stages add the end of an existing CodePipeline, which should be very rare.

TODO: EXAMPLE

func CodePipelineFileSet_FromArtifact

func CodePipelineFileSet_FromArtifact(artifact awscodepipeline.Artifact) CodePipelineFileSet

Turn a CodePipeline Artifact into a FileSet.

type CodePipelineProps

type CodePipelineProps struct {
	// The build step that produces the CDK Cloud Assembly.
	//
	// The primary output of this step needs to be the `cdk.out` directory
	// generated by the `cdk synth` command.
	//
	// If you use a `ShellStep` here and you don't configure an output directory,
	// the output directory will automatically be assumed to be `cdk.out`.
	Synth IFileSetProducer `json:"synth"`
	// Additional customizations to apply to the asset publishing CodeBuild projects.
	AssetPublishingCodeBuildDefaults *CodeBuildOptions `json:"assetPublishingCodeBuildDefaults"`
	// CDK CLI version to use in self-mutation and asset publishing steps.
	//
	// If you want to lock the CDK CLI version used in the pipeline, by steps
	// that are automatically generated for you, specify the version here.
	//
	// We recommend you do not specify this value, as not specifying it always
	// uses the latest CLI version which is backwards compatible with old versions.
	//
	// If you do specify it, be aware that this version should always be equal to or higher than the
	// version of the CDK framework used by the CDK app, when the CDK commands are
	// run during your pipeline execution. When you change this version, the *next
	// time* the `SelfMutate` step runs it will still be using the CLI of the the
	// *previous* version that was in this property: it will only start using the
	// new version after `SelfMutate` completes successfully. That means that if
	// you want to update both framework and CLI version, you should update the
	// CLI version first, commit, push and deploy, and only then update the
	// framework version.
	CliVersion *string `json:"cliVersion"`
	// Customize the CodeBuild projects created for this pipeline.
	CodeBuildDefaults *CodeBuildOptions `json:"codeBuildDefaults"`
	// An existing Pipeline to be reused and built upon.
	//
	// [disable-awslint:ref-via-interface]
	CodePipeline awscodepipeline.Pipeline `json:"codePipeline"`
	// Create KMS keys for the artifact buckets, allowing cross-account deployments.
	//
	// The artifact buckets have to be encrypted to support deploying CDK apps to
	// another account, so if you want to do that or want to have your artifact
	// buckets encrypted, be sure to set this value to `true`.
	//
	// Be aware there is a cost associated with maintaining the KMS keys.
	CrossAccountKeys *bool `json:"crossAccountKeys"`
	// A list of credentials used to authenticate to Docker registries.
	//
	// Specify any credentials necessary within the pipeline to build, synth, update, or publish assets.
	DockerCredentials *[]DockerCredential `json:"dockerCredentials"`
	// Enable Docker for the self-mutate step.
	//
	// Set this to true if the pipeline itself uses Docker container assets
	// (for example, if you use `LinuxBuildImage.fromAsset()` as the build
	// image of a CodeBuild step in the pipeline).
	//
	// You do not need to set it if you build Docker image assets in the
	// application Stages and Stacks that are *deployed* by this pipeline.
	//
	// Configures privileged mode for the self-mutation CodeBuild action.
	//
	// If you are about to turn this on in an already-deployed Pipeline,
	// set the value to `true` first, commit and allow the pipeline to
	// self-update, and only then use the Docker asset in the pipeline.
	DockerEnabledForSelfMutation *bool `json:"dockerEnabledForSelfMutation"`
	// Enable Docker for the 'synth' step.
	//
	// Set this to true if you are using file assets that require
	// "bundling" anywhere in your application (meaning an asset
	// compilation step will be run with the tools provided by
	// a Docker image), both for the Pipeline stack as well as the
	// application stacks.
	//
	// A common way to use bundling assets in your application is by
	// using the `@aws-cdk/aws-lambda-nodejs` library.
	//
	// Configures privileged mode for the synth CodeBuild action.
	//
	// If you are about to turn this on in an already-deployed Pipeline,
	// set the value to `true` first, commit and allow the pipeline to
	// self-update, and only then use the bundled asset.
	DockerEnabledForSynth *bool `json:"dockerEnabledForSynth"`
	// The name of the CodePipeline pipeline.
	PipelineName *string `json:"pipelineName"`
	// Publish assets in multiple CodeBuild projects.
	//
	// If set to false, use one Project per type to publish all assets.
	//
	// Publishing in parallel improves concurrency and may reduce publishing
	// latency, but may also increase overall provisioning time of the CodeBuild
	// projects.
	//
	// Experiment and see what value works best for you.
	PublishAssetsInParallel *bool `json:"publishAssetsInParallel"`
	// Reuse the same cross region support stack for all pipelines in the App.
	ReuseCrossRegionSupportStacks *bool `json:"reuseCrossRegionSupportStacks"`
	// Whether the pipeline will update itself.
	//
	// This needs to be set to `true` to allow the pipeline to reconfigure
	// itself when assets or stages are being added to it, and `true` is the
	// recommended setting.
	//
	// You can temporarily set this to `false` while you are iterating
	// on the pipeline itself and prefer to deploy changes using `cdk deploy`.
	SelfMutation *bool `json:"selfMutation"`
	// Additional customizations to apply to the self mutation CodeBuild projects.
	SelfMutationCodeBuildDefaults *CodeBuildOptions `json:"selfMutationCodeBuildDefaults"`
	// Additional customizations to apply to the synthesize CodeBuild projects.
	SynthCodeBuildDefaults *CodeBuildOptions `json:"synthCodeBuildDefaults"`
}

Properties for a `CodePipeline`.

TODO: EXAMPLE

type CodePipelineSource

type CodePipelineSource interface {
	Step
	ICodePipelineActionFactory
	Dependencies() *[]Step
	DependencyFileSets() *[]FileSet
	Id() *string
	IsSource() *bool
	PrimaryOutput() FileSet
	AddDependencyFileSet(fs FileSet)
	AddStepDependency(step Step)
	ConfigurePrimaryOutput(fs FileSet)
	GetAction(output awscodepipeline.Artifact, actionName *string, runOrder *float64) awscodepipelineactions.Action
	ProduceAction(stage awscodepipeline.IStage, options *ProduceActionOptions) *CodePipelineActionFactoryResult
	ToString() *string
}

Factory for CodePipeline source steps.

This class contains a number of factory methods for the different types of sources that CodePipeline supports.

TODO: EXAMPLE

func CodePipelineSource_CodeCommit

func CodePipelineSource_CodeCommit(repository awscodecommit.IRepository, branch *string, props *CodeCommitSourceOptions) CodePipelineSource

Returns a CodeCommit source.

TODO: EXAMPLE

func CodePipelineSource_Connection

func CodePipelineSource_Connection(repoString *string, branch *string, props *ConnectionSourceOptions) CodePipelineSource

Returns a CodeStar connection source.

A CodeStar connection allows AWS CodePipeline to access external resources, such as repositories in GitHub, GitHub Enterprise or BitBucket.

To use this method, you first need to create a CodeStar connection using the AWS console. In the process, you may have to sign in to the external provider -- GitHub, for example -- to authorize AWS to read and modify your repository. Once you have done this, copy the connection ARN and use it to create the source.

Example:

```ts

pipelines.CodePipelineSource.connection('owner/repo', 'main', {
   connectionArn: 'arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41', // Created using the AWS console
});

``` See: https://docs.aws.amazon.com/dtconsole/latest/userguide/welcome-connections.html

func CodePipelineSource_GitHub

func CodePipelineSource_GitHub(repoString *string, branch *string, props *GitHubSourceOptions) CodePipelineSource

Returns a GitHub source, using OAuth tokens to authenticate with GitHub and a separate webhook to detect changes.

This is no longer the recommended method. Please consider using `connection()` instead.

Pass in the owner and repository in a single string, like this:

```ts pipelines.CodePipelineSource.gitHub('owner/repo', 'main'); ```

Authentication will be done by a secret called `github-token` in AWS Secrets Manager (unless specified otherwise).

The token should have these permissions:

* **repo** - to read the repository * **admin:repo_hook** - if you plan to use webhooks (true by default)

func CodePipelineSource_S3

func CodePipelineSource_S3(bucket awss3.IBucket, objectKey *string, props *S3SourceOptions) CodePipelineSource

Returns an S3 source.

TODO: EXAMPLE

type ConfirmPermissionsBroadening

type ConfirmPermissionsBroadening interface {
	Step
	ICodePipelineActionFactory
	Dependencies() *[]Step
	DependencyFileSets() *[]FileSet
	Id() *string
	IsSource() *bool
	PrimaryOutput() FileSet
	AddDependencyFileSet(fs FileSet)
	AddStepDependency(step Step)
	ConfigurePrimaryOutput(fs FileSet)
	ProduceAction(stage awscodepipeline.IStage, options *ProduceActionOptions) *CodePipelineActionFactoryResult
	ToString() *string
}

Pause the pipeline if a deployment would add IAM permissions or Security Group rules.

This step is only supported in CodePipeline pipelines.

TODO: EXAMPLE

type ConnectionSourceOptions

type ConnectionSourceOptions struct {
	// The ARN of the CodeStar Connection created in the AWS console that has permissions to access this GitHub or BitBucket repository.
	//
	// TODO: EXAMPLE
	//
	// See: https://docs.aws.amazon.com/codepipeline/latest/userguide/connections-create.html
	//
	ConnectionArn *string `json:"connectionArn"`
	// Whether the output should be the contents of the repository (which is the default), or a link that allows CodeBuild to clone the repository before building.
	//
	// **Note**: if this option is true,
	// then only CodeBuild actions can use the resulting {@link output}.
	// See: https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference-CodestarConnectionSource.html#action-reference-CodestarConnectionSource-config
	//
	CodeBuildCloneOutput *bool `json:"codeBuildCloneOutput"`
	// Controls automatically starting your pipeline when a new commit is made on the configured repository and branch.
	//
	// If unspecified,
	// the default value is true, and the field does not display by default.
	// See: https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference-CodestarConnectionSource.html
	//
	TriggerOnPush *bool `json:"triggerOnPush"`
}

Configuration options for CodeStar source.

TODO: EXAMPLE

type DockerCredential

type DockerCredential interface {
	Usages() *[]DockerCredentialUsage
	GrantRead(grantee awsiam.IGrantable, usage DockerCredentialUsage)
}

Represents credentials used to access a Docker registry.

TODO: EXAMPLE

func DockerCredential_CustomRegistry

func DockerCredential_CustomRegistry(registryDomain *string, secret awssecretsmanager.ISecret, opts *ExternalDockerCredentialOptions) DockerCredential

Creates a DockerCredential for a registry, based on its domain name (e.g., 'www.example.com').

func DockerCredential_DockerHub

func DockerCredential_DockerHub(secret awssecretsmanager.ISecret, opts *ExternalDockerCredentialOptions) DockerCredential

Creates a DockerCredential for DockerHub.

Convenience method for `fromCustomRegistry('index.docker.io', opts)`.

func DockerCredential_Ecr

func DockerCredential_Ecr(repositories *[]awsecr.IRepository, opts *EcrDockerCredentialOptions) DockerCredential

Creates a DockerCredential for one or more ECR repositories.

NOTE - All ECR repositories in the same account and region share a domain name (e.g., 0123456789012.dkr.ecr.eu-west-1.amazonaws.com), and can only have one associated set of credentials (and DockerCredential). Attempting to associate one set of credentials with one ECR repo and another with another ECR repo in the same account and region will result in failures when using these credentials in the pipeline.

type DockerCredentialUsage

type DockerCredentialUsage string

Defines which stages of a pipeline require the specified credentials.

TODO: EXAMPLE

const (
	DockerCredentialUsage_SYNTH            DockerCredentialUsage = "SYNTH"
	DockerCredentialUsage_SELF_UPDATE      DockerCredentialUsage = "SELF_UPDATE"
	DockerCredentialUsage_ASSET_PUBLISHING DockerCredentialUsage = "ASSET_PUBLISHING"
)

type EcrDockerCredentialOptions

type EcrDockerCredentialOptions struct {
	// An IAM role to assume prior to accessing the secret.
	AssumeRole awsiam.IRole `json:"assumeRole"`
	// Defines which stages of the pipeline should be granted access to these credentials.
	Usages *[]DockerCredentialUsage `json:"usages"`
}

Options for defining access for a Docker Credential composed of ECR repos.

TODO: EXAMPLE

type ExternalDockerCredentialOptions

type ExternalDockerCredentialOptions struct {
	// An IAM role to assume prior to accessing the secret.
	AssumeRole awsiam.IRole `json:"assumeRole"`
	// The name of the JSON field of the secret which contains the secret/password.
	SecretPasswordField *string `json:"secretPasswordField"`
	// The name of the JSON field of the secret which contains the user/login name.
	SecretUsernameField *string `json:"secretUsernameField"`
	// Defines which stages of the pipeline should be granted access to these credentials.
	Usages *[]DockerCredentialUsage `json:"usages"`
}

Options for defining credentials for a Docker Credential.

TODO: EXAMPLE

type FileSet

type FileSet interface {
	IFileSetProducer
	Id() *string
	PrimaryOutput() FileSet
	Producer() Step
	ProducedBy(producer Step)
	ToString() *string
}

A set of files traveling through the deployment pipeline.

Individual steps in the pipeline produce or consume `FileSet`s.

TODO: EXAMPLE

func NewFileSet

func NewFileSet(id *string, producer Step) FileSet

type FileSetLocation

type FileSetLocation struct {
	// The (relative) directory where the FileSet is found.
	Directory *string `json:"directory"`
	// The FileSet object.
	FileSet FileSet `json:"fileSet"`
}

Location of a FileSet consumed or produced by a ShellStep.

TODO: EXAMPLE

type GitHubSourceOptions

type GitHubSourceOptions struct {
	// A GitHub OAuth token to use for authentication.
	//
	// It is recommended to use a Secrets Manager `Secret` to obtain the token:
	//
	// “`ts
	// const oauth = cdk.SecretValue.secretsManager('my-github-token');
	// “`
	//
	// The GitHub Personal Access Token should have these scopes:
	//
	// * **repo** - to read the repository
	// * **admin:repo_hook** - if you plan to use webhooks (true by default)
	// See: https://docs.aws.amazon.com/codepipeline/latest/userguide/GitHub-create-personal-token-CLI.html
	//
	Authentication awscdk.SecretValue `json:"authentication"`
	// How AWS CodePipeline should be triggered.
	//
	// With the default value "WEBHOOK", a webhook is created in GitHub that triggers the action.
	// With "POLL", CodePipeline periodically checks the source for changes.
	// With "None", the action is not triggered through changes in the source.
	//
	// To use `WEBHOOK`, your GitHub Personal Access Token should have
	// **admin:repo_hook** scope (in addition to the regular **repo** scope).
	Trigger awscodepipelineactions.GitHubTrigger `json:"trigger"`
}

Options for GitHub sources.

TODO: EXAMPLE

type ICodePipelineActionFactory

type ICodePipelineActionFactory interface {
	// Create the desired Action and add it to the pipeline.
	ProduceAction(stage awscodepipeline.IStage, options *ProduceActionOptions) *CodePipelineActionFactoryResult
}

Factory for explicit CodePipeline Actions.

If you have specific types of Actions you want to add to a CodePipeline, write a subclass of `Step` that implements this interface, and add the action or actions you want in the `produce` method.

There needs to be a level of indirection here, because some aspects of the Action creation need to be controlled by the workflow engine (name and runOrder). All the rest of the properties are controlled by the factory.

type IFileSetProducer

type IFileSetProducer interface {
	// The `FileSet` produced by this file set producer.
	PrimaryOutput() FileSet
}

Any class that produces, or is itself, a `FileSet`.

Steps implicitly produce a primary FileSet as an output.

type ManualApprovalStep

type ManualApprovalStep interface {
	Step
	Comment() *string
	Dependencies() *[]Step
	DependencyFileSets() *[]FileSet
	Id() *string
	IsSource() *bool
	PrimaryOutput() FileSet
	AddDependencyFileSet(fs FileSet)
	AddStepDependency(step Step)
	ConfigurePrimaryOutput(fs FileSet)
	ToString() *string
}

A manual approval step.

If this step is added to a Pipeline, the Pipeline will be paused waiting for a human to resume it

Only engines that support pausing the deployment will support this step type.

TODO: EXAMPLE

func NewManualApprovalStep

func NewManualApprovalStep(id *string, props *ManualApprovalStepProps) ManualApprovalStep

type ManualApprovalStepProps

type ManualApprovalStepProps struct {
	// The comment to display with this manual approval.
	Comment *string `json:"comment"`
}

Construction properties for a `ManualApprovalStep`.

TODO: EXAMPLE

type PermissionsBroadeningCheckProps

type PermissionsBroadeningCheckProps struct {
	// The CDK Stage object to check the stacks of.
	//
	// This should be the same Stage object you are passing to `addStage()`.
	Stage awscdk.Stage `json:"stage"`
	// Topic to send notifications when a human needs to give manual confirmation.
	NotificationTopic awssns.ITopic `json:"notificationTopic"`
}

Properties for a `PermissionsBroadeningCheck`.

TODO: EXAMPLE

type PipelineBase

type PipelineBase interface {
	constructs.Construct
	CloudAssemblyFileSet() FileSet
	Node() constructs.Node
	Synth() IFileSetProducer
	Waves() *[]Wave
	AddStage(stage awscdk.Stage, options *AddStageOpts) StageDeployment
	AddWave(id *string, options *WaveOptions) Wave
	BuildPipeline()
	DoBuildPipeline()
	ToString() *string
}

A generic CDK Pipelines pipeline.

Different deployment systems will provide subclasses of `Pipeline` that generate the deployment infrastructure necessary to deploy CDK apps, specific to that system.

This library comes with the `CodePipeline` class, which uses AWS CodePipeline to deploy CDK apps.

The actual pipeline infrastructure is constructed (by invoking the engine) when `buildPipeline()` is called, or when `app.synth()` is called (whichever happens first).

type PipelineBaseProps

type PipelineBaseProps struct {
	// The build step that produces the CDK Cloud Assembly.
	//
	// The primary output of this step needs to be the `cdk.out` directory
	// generated by the `cdk synth` command.
	//
	// If you use a `ShellStep` here and you don't configure an output directory,
	// the output directory will automatically be assumed to be `cdk.out`.
	Synth IFileSetProducer `json:"synth"`
}

Properties for a `Pipeline`.

TODO: EXAMPLE

type ProduceActionOptions

type ProduceActionOptions struct {
	// Name the action should get.
	ActionName *string `json:"actionName"`
	// Helper object to translate FileSets to CodePipeline Artifacts.
	Artifacts ArtifactMap `json:"artifacts"`
	// The pipeline the action is being generated for.
	Pipeline CodePipeline `json:"pipeline"`
	// RunOrder the action should get.
	RunOrder *float64 `json:"runOrder"`
	// Scope in which to create constructs.
	Scope constructs.Construct `json:"scope"`
	// Whether or not this action is inserted before self mutation.
	//
	// If it is, the action should take care to reflect some part of
	// its own definition in the pipeline action definition, to
	// trigger a restart after self-mutation (if necessary).
	BeforeSelfMutation *bool `json:"beforeSelfMutation"`
	// If this action factory creates a CodeBuild step, default options to inherit.
	CodeBuildDefaults *CodeBuildOptions `json:"codeBuildDefaults"`
	// An input artifact that CodeBuild projects that don't actually need an input artifact can use.
	//
	// CodeBuild Projects MUST have an input artifact in order to be added to the Pipeline. If
	// the Project doesn't actually care about its input (it can be anything), it can use the
	// Artifact passed here.
	FallbackArtifact awscodepipeline.Artifact `json:"fallbackArtifact"`
}

Options for the `CodePipelineActionFactory.produce()` method.

TODO: EXAMPLE

type S3SourceOptions

type S3SourceOptions struct {
	// The action name used for this source in the CodePipeline.
	ActionName *string `json:"actionName"`
	// How should CodePipeline detect source changes for this Action.
	//
	// Note that if this is S3Trigger.EVENTS, you need to make sure to include the source Bucket in a CloudTrail Trail,
	// as otherwise the CloudWatch Events will not be emitted.
	// See: https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/log-s3-data-events.html
	//
	Trigger awscodepipelineactions.S3Trigger `json:"trigger"`
}

Options for S3 sources.

TODO: EXAMPLE

type ShellStep

type ShellStep interface {
	Step
	Commands() *[]*string
	Dependencies() *[]Step
	DependencyFileSets() *[]FileSet
	Env() *map[string]*string
	EnvFromCfnOutputs() *map[string]StackOutputReference
	Id() *string
	Inputs() *[]*FileSetLocation
	InstallCommands() *[]*string
	IsSource() *bool
	Outputs() *[]*FileSetLocation
	PrimaryOutput() FileSet
	AddDependencyFileSet(fs FileSet)
	AddOutputDirectory(directory *string) FileSet
	AddStepDependency(step Step)
	ConfigurePrimaryOutput(fs FileSet)
	PrimaryOutputDirectory(directory *string) FileSet
	ToString() *string
}

Run shell script commands in the pipeline.

TODO: EXAMPLE

func NewShellStep

func NewShellStep(id *string, props *ShellStepProps) ShellStep

type ShellStepProps

type ShellStepProps struct {
	// Commands to run.
	Commands *[]*string `json:"commands"`
	// Additional FileSets to put in other directories.
	//
	// Specifies a mapping from directory name to FileSets. During the
	// script execution, the FileSets will be available in the directories
	// indicated.
	//
	// The directory names may be relative. For example, you can put
	// the main input and an additional input side-by-side with the
	// following configuration:
	//
	// “`ts
	// const script = new pipelines.ShellStep('MainScript', {
	//    commands: ['npm ci','npm run build','npx cdk synth'],
	//    input: pipelines.CodePipelineSource.gitHub('org/source1', 'main'),
	//    additionalInputs: {
	//      '../siblingdir': pipelines.CodePipelineSource.gitHub('org/source2', 'main'),
	//    }
	// });
	// “`
	AdditionalInputs *map[string]IFileSetProducer `json:"additionalInputs"`
	// Environment variables to set.
	Env *map[string]*string `json:"env"`
	// Set environment variables based on Stack Outputs.
	//
	// `ShellStep`s following stack or stage deployments may
	// access the `CfnOutput`s of those stacks to get access to
	// --for example--automatically generated resource names or
	// endpoint URLs.
	EnvFromCfnOutputs *map[string]awscdk.CfnOutput `json:"envFromCfnOutputs"`
	// FileSet to run these scripts on.
	//
	// The files in the FileSet will be placed in the working directory when
	// the script is executed. Use `additionalInputs` to download file sets
	// to other directories as well.
	Input IFileSetProducer `json:"input"`
	// Installation commands to run before the regular commands.
	//
	// For deployment engines that support it, install commands will be classified
	// differently in the job history from the regular `commands`.
	InstallCommands *[]*string `json:"installCommands"`
	// The directory that will contain the primary output fileset.
	//
	// After running the script, the contents of the given directory
	// will be treated as the primary output of this Step.
	PrimaryOutputDirectory *string `json:"primaryOutputDirectory"`
}

Construction properties for a `ShellStep`.

TODO: EXAMPLE

type StackAsset

type StackAsset struct {
	// Asset identifier.
	AssetId *string `json:"assetId"`
	// Absolute asset manifest path.
	//
	// This needs to be made relative at a later point in time, but when this
	// information is parsed we don't know about the root cloud assembly yet.
	AssetManifestPath *string `json:"assetManifestPath"`
	// Asset selector to pass to `cdk-assets`.
	AssetSelector *string `json:"assetSelector"`
	// Type of asset to publish.
	AssetType AssetType `json:"assetType"`
	// Does this asset represent the CloudFormation template for the stack.
	IsTemplate *bool `json:"isTemplate"`
	// Role ARN to assume to publish.
	AssetPublishingRoleArn *string `json:"assetPublishingRoleArn"`
}

An asset used by a Stack.

TODO: EXAMPLE

type StackDeployment

type StackDeployment interface {
	AbsoluteTemplatePath() *string
	Account() *string
	Assets() *[]*StackAsset
	AssumeRoleArn() *string
	ChangeSet() *[]Step
	ConstructPath() *string
	ExecutionRoleArn() *string
	Post() *[]Step
	Pre() *[]Step
	Region() *string
	StackArtifactId() *string
	StackDependencies() *[]StackDeployment
	StackName() *string
	Tags() *map[string]*string
	TemplateAsset() *StackAsset
	TemplateUrl() *string
	AddStackDependency(stackDeployment StackDeployment)
	AddStackSteps(pre *[]Step, changeSet *[]Step, post *[]Step)
}

Deployment of a single Stack.

You don't need to instantiate this class -- it will be automatically instantiated as necessary when you add a `Stage` to a pipeline.

TODO: EXAMPLE

func StackDeployment_FromArtifact

func StackDeployment_FromArtifact(stackArtifact cxapi.CloudFormationStackArtifact) StackDeployment

Build a `StackDeployment` from a Stack Artifact in a Cloud Assembly.

type StackDeploymentProps

type StackDeploymentProps struct {
	// Template path on disk to cloud assembly (cdk.out).
	AbsoluteTemplatePath *string `json:"absoluteTemplatePath"`
	// Construct path for this stack.
	ConstructPath *string `json:"constructPath"`
	// Artifact ID for this stack.
	StackArtifactId *string `json:"stackArtifactId"`
	// Name for this stack.
	StackName *string `json:"stackName"`
	// Account where the stack should be deployed.
	Account *string `json:"account"`
	// Assets referenced by this stack.
	Assets *[]*StackAsset `json:"assets"`
	// Role to assume before deploying this stack.
	AssumeRoleArn *string `json:"assumeRoleArn"`
	// Execution role to pass to CloudFormation.
	ExecutionRoleArn *string `json:"executionRoleArn"`
	// Region where the stack should be deployed.
	Region *string `json:"region"`
	// Tags to apply to the stack.
	Tags *map[string]*string `json:"tags"`
	// The S3 URL which points to the template asset location in the publishing bucket.
	TemplateS3Uri *string `json:"templateS3Uri"`
}

Properties for a `StackDeployment`.

TODO: EXAMPLE

type StackOutputReference

type StackOutputReference interface {
	OutputName() *string
	StackDescription() *string
	IsProducedBy(stack StackDeployment) *bool
}

A Reference to a Stack Output.

TODO: EXAMPLE

func StackOutputReference_FromCfnOutput

func StackOutputReference_FromCfnOutput(output awscdk.CfnOutput) StackOutputReference

Create a StackOutputReference that references the given CfnOutput.

type StackSteps

type StackSteps struct {
	// The stack you want the steps to run in.
	Stack awscdk.Stack `json:"stack"`
	// Steps that execute after stack is prepared but before stack is deployed.
	ChangeSet *[]Step `json:"changeSet"`
	// Steps that execute after stack is deployed.
	Post *[]Step `json:"post"`
	// Steps that execute before stack is prepared.
	Pre *[]Step `json:"pre"`
}

Instructions for additional steps that are run at stack level.

TODO: EXAMPLE

type StageDeployment

type StageDeployment interface {
	Post() *[]Step
	Pre() *[]Step
	Stacks() *[]StackDeployment
	StackSteps() *[]*StackSteps
	StageName() *string
	AddPost(steps ...Step)
	AddPre(steps ...Step)
}

Deployment of a single `Stage`.

A `Stage` consists of one or more `Stacks`, which will be deployed in dependency order.

TODO: EXAMPLE

func StageDeployment_FromStage

func StageDeployment_FromStage(stage awscdk.Stage, props *StageDeploymentProps) StageDeployment

Create a new `StageDeployment` from a `Stage`.

Synthesizes the target stage, and deployes the stacks found inside in dependency order.

type StageDeploymentProps

type StageDeploymentProps struct {
	// Additional steps to run after all of the stacks in the stage.
	Post *[]Step `json:"post"`
	// Additional steps to run before any of the stacks in the stage.
	Pre *[]Step `json:"pre"`
	// Instructions for additional steps that are run at the stack level.
	StackSteps *[]*StackSteps `json:"stackSteps"`
	// Stage name to use in the pipeline.
	StageName *string `json:"stageName"`
}

Properties for a `StageDeployment`.

TODO: EXAMPLE

type Step

type Step interface {
	IFileSetProducer
	Dependencies() *[]Step
	DependencyFileSets() *[]FileSet
	Id() *string
	IsSource() *bool
	PrimaryOutput() FileSet
	AddDependencyFileSet(fs FileSet)
	AddStepDependency(step Step)
	ConfigurePrimaryOutput(fs FileSet)
	ToString() *string
}

A generic Step which can be added to a Pipeline.

Steps can be used to add Sources, Build Actions and Validations to your pipeline.

This class is abstract. See specific subclasses of Step for useful steps to add to your Pipeline

TODO: EXAMPLE

type Wave

type Wave interface {
	Id() *string
	Post() *[]Step
	Pre() *[]Step
	Stages() *[]StageDeployment
	AddPost(steps ...Step)
	AddPre(steps ...Step)
	AddStage(stage awscdk.Stage, options *AddStageOpts) StageDeployment
}

Multiple stages that are deployed in parallel.

TODO: EXAMPLE

func NewWave

func NewWave(id *string, props *WaveProps) Wave

type WaveOptions

type WaveOptions struct {
	// Additional steps to run after all of the stages in the wave.
	Post *[]Step `json:"post"`
	// Additional steps to run before any of the stages in the wave.
	Pre *[]Step `json:"pre"`
}

Options to pass to `addWave`.

TODO: EXAMPLE

type WaveProps

type WaveProps struct {
	// Additional steps to run after all of the stages in the wave.
	Post *[]Step `json:"post"`
	// Additional steps to run before any of the stages in the wave.
	Pre *[]Step `json:"pre"`
}

Construction properties for a `Wave`.

TODO: EXAMPLE

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL