Documentation ¶
Overview ¶
Package clicommand contains the definitions of buildkite-agent subcommands.
It is intended for internal use by buildkite-agent only.
Index ¶
- Constants
- Variables
- func CreateLogger(cfg any) logger.Logger
- func DefaultConfigFilePaths() (paths []string)
- func DefaultShell() string
- func HandleGlobalFlags(l logger.Logger, cfg any) func()
- func HandleProfileFlag(l logger.Logger, cfg any) func()
- func Profile(l logger.Logger, mode string) func()
- func UnsetConfigFromEnvironment(c *cli.Context) error
- type AcknowledgementsConfig
- type AgentStartConfig
- type AnnotateConfig
- type AnnotationRemoveConfig
- type ArtifactDownloadConfig
- type ArtifactSearchConfig
- type ArtifactShasumConfig
- type ArtifactUploadConfig
- type BootstrapConfig
- type EnvDumpConfig
- type EnvGetConfig
- type EnvSetConfig
- type EnvUnsetConfig
- type LockAcquireConfig
- type LockDoConfig
- type LockDoneConfig
- type LockGetConfig
- type LockReleaseConfig
- type MetaDataExistsConfig
- type MetaDataGetConfig
- type MetaDataKeysConfig
- type MetaDataSetConfig
- type OIDCTokenConfig
- type PipelineUploadConfig
- type StepGetConfig
- type StepUpdateConfig
Constants ¶
View Source
const (
DefaultEndpoint = "https://agent.buildkite.com/v3"
)
Variables ¶
View Source
var AcknowledgementsCommand = cli.Command{ Name: "acknowledgements", Usage: "Prints the licenses and notices of open source software incorporated into this software.", Description: acknowledgementsHelpDescription, Action: func(c *cli.Context) error { f, err := files.Open("ACKNOWLEDGEMENTS.md.gz") if err != nil { f, err = files.Open("dummy.md.gz") if err != nil { return fmt.Errorf("Couldn't open any embedded acknowledgements files: %w", err) } } r, err := gzip.NewReader(f) if err != nil { return fmt.Errorf("Couldn't create a gzip reader: %w", err) } if _, err := io.Copy(c.App.Writer, r); err != nil { return fmt.Errorf("Couldn't copy acknowledgments to output: %w", err) } return nil }, }
View Source
var AgentAccessTokenFlag = cli.StringFlag{
Name: "agent-access-token",
Value: "",
Usage: "The access token used to identify the agent",
EnvVar: "BUILDKITE_AGENT_ACCESS_TOKEN",
}
View Source
var AgentRegisterTokenFlag = cli.StringFlag{
Name: "token",
Value: "",
Usage: "Your account agent token",
EnvVar: "BUILDKITE_AGENT_TOKEN",
}
View Source
var AgentStartCommand = cli.Command{ Name: "start", Usage: "Starts a Buildkite agent", Description: startDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "config", Value: "", Usage: "Path to a configuration file", EnvVar: "BUILDKITE_AGENT_CONFIG", }, cli.StringFlag{ Name: "name", Value: "", Usage: "The name of the agent", EnvVar: "BUILDKITE_AGENT_NAME", }, cli.StringFlag{ Name: "priority", Value: "", Usage: "The priority of the agent (higher priorities are assigned work first)", EnvVar: "BUILDKITE_AGENT_PRIORITY", }, cli.StringFlag{ Name: "acquire-job", Value: "", Usage: "Start this agent and only run the specified job, disconnecting after it's finished", EnvVar: "BUILDKITE_AGENT_ACQUIRE_JOB", }, cli.BoolFlag{ Name: "disconnect-after-job", Usage: "Disconnect the agent after running exactly one job. When used in conjunction with the ′--spawn′ flag, each worker booted will run exactly one job", EnvVar: "BUILDKITE_AGENT_DISCONNECT_AFTER_JOB", }, cli.IntFlag{ Name: "disconnect-after-idle-timeout", Value: 0, Usage: "The maximum idle time in seconds to wait for a job before disconnecting. The default of 0 means no timeout", EnvVar: "BUILDKITE_AGENT_DISCONNECT_AFTER_IDLE_TIMEOUT", }, cli.IntFlag{ Name: "cancel-grace-period", Value: 10, Usage: "The number of seconds a canceled or timed out job is given to gracefully terminate and upload its artifacts", EnvVar: "BUILDKITE_CANCEL_GRACE_PERIOD", }, cli.BoolFlag{ Name: "enable-job-log-tmpfile", Usage: "Store the job logs in a temporary file ′BUILDKITE_JOB_LOG_TMPFILE′ that is accessible during the job and removed at the end of the job", EnvVar: "BUILDKITE_ENABLE_JOB_LOG_TMPFILE", }, cli.StringFlag{ Name: "job-log-path", Usage: "Location to store job logs created by configuring ′enable-job-log-tmpfile`, by default job log will be stored in TempDir", EnvVar: "BUILDKITE_JOB_LOG_PATH", }, cli.BoolFlag{ Name: "write-job-logs-to-stdout", Usage: "Writes job logs to the agent process' stdout. This simplifies log collection if running agents in Docker.", EnvVar: "BUILDKITE_WRITE_JOB_LOGS_TO_STDOUT", }, cli.StringFlag{ Name: "shell", Value: DefaultShell(), Usage: "The shell command used to interpret build commands, e.g /bin/bash -e -c", EnvVar: "BUILDKITE_SHELL", }, cli.StringSliceFlag{ Name: "tags", Value: &cli.StringSlice{}, Usage: "A comma-separated list of tags for the agent (for example, \"linux\" or \"mac,xcode=8\")", EnvVar: "BUILDKITE_AGENT_TAGS", }, cli.BoolFlag{ Name: "tags-from-host", Usage: "Include tags from the host (hostname, machine-id, os)", EnvVar: "BUILDKITE_AGENT_TAGS_FROM_HOST", }, cli.StringSliceFlag{ Name: "tags-from-ec2-meta-data", Value: &cli.StringSlice{}, Usage: "Include the default set of host EC2 meta-data as tags (instance-id, instance-type, ami-id, and instance-life-cycle)", EnvVar: "BUILDKITE_AGENT_TAGS_FROM_EC2_META_DATA", }, cli.StringSliceFlag{ Name: "tags-from-ec2-meta-data-paths", Value: &cli.StringSlice{}, Usage: "Include additional tags fetched from EC2 meta-data using tag & path suffix pairs, e.g \"tag_name=path/to/value\"", EnvVar: "BUILDKITE_AGENT_TAGS_FROM_EC2_META_DATA_PATHS", }, cli.BoolFlag{ Name: "tags-from-ec2-tags", Usage: "Include the host's EC2 tags as tags", EnvVar: "BUILDKITE_AGENT_TAGS_FROM_EC2_TAGS", }, cli.BoolFlag{ Name: "tags-from-ecs-meta-data", Usage: "Include the host's ECS meta-data as tags (container-name, image, and task-arn)", EnvVar: "BUILDKITE_AGENT_TAGS_FROM_ECS_META_DATA", }, cli.StringSliceFlag{ Name: "tags-from-gcp-meta-data", Value: &cli.StringSlice{}, Usage: "Include the default set of host Google Cloud instance meta-data as tags (instance-id, machine-type, preemptible, project-id, region, and zone)", EnvVar: "BUILDKITE_AGENT_TAGS_FROM_GCP_META_DATA", }, cli.StringSliceFlag{ Name: "tags-from-gcp-meta-data-paths", Value: &cli.StringSlice{}, Usage: "Include additional tags fetched from Google Cloud instance meta-data using tag & path suffix pairs, e.g \"tag_name=path/to/value\"", EnvVar: "BUILDKITE_AGENT_TAGS_FROM_GCP_META_DATA_PATHS", }, cli.BoolFlag{ Name: "tags-from-gcp-labels", Usage: "Include the host's Google Cloud instance labels as tags", EnvVar: "BUILDKITE_AGENT_TAGS_FROM_GCP_LABELS", }, cli.DurationFlag{ Name: "wait-for-ec2-tags-timeout", Usage: "The amount of time to wait for tags from EC2 before proceeding", EnvVar: "BUILDKITE_AGENT_WAIT_FOR_EC2_TAGS_TIMEOUT", Value: time.Second * 10, }, cli.DurationFlag{ Name: "wait-for-ec2-meta-data-timeout", Usage: "The amount of time to wait for meta-data from EC2 before proceeding", EnvVar: "BUILDKITE_AGENT_WAIT_FOR_EC2_META_DATA_TIMEOUT", Value: time.Second * 10, }, cli.DurationFlag{ Name: "wait-for-ecs-meta-data-timeout", Usage: "The amount of time to wait for meta-data from ECS before proceeding", EnvVar: "BUILDKITE_AGENT_WAIT_FOR_ECS_META_DATA_TIMEOUT", Value: time.Second * 10, }, cli.DurationFlag{ Name: "wait-for-gcp-labels-timeout", Usage: "The amount of time to wait for labels from GCP before proceeding", EnvVar: "BUILDKITE_AGENT_WAIT_FOR_GCP_LABELS_TIMEOUT", Value: time.Second * 10, }, cli.StringFlag{ Name: "git-checkout-flags", Value: "-f", Usage: "Flags to pass to \"git checkout\" command", EnvVar: "BUILDKITE_GIT_CHECKOUT_FLAGS", }, cli.StringFlag{ Name: "git-clone-flags", Value: "-v", Usage: "Flags to pass to the \"git clone\" command", EnvVar: "BUILDKITE_GIT_CLONE_FLAGS", }, cli.StringFlag{ Name: "git-clean-flags", Value: "-ffxdq", Usage: "Flags to pass to \"git clean\" command", EnvVar: "BUILDKITE_GIT_CLEAN_FLAGS", }, cli.StringFlag{ Name: "git-fetch-flags", Value: "-v --prune", Usage: "Flags to pass to \"git fetch\" command", EnvVar: "BUILDKITE_GIT_FETCH_FLAGS", }, cli.StringFlag{ Name: "git-clone-mirror-flags", Value: "-v", Usage: "Flags to pass to the \"git clone\" command when used for mirroring", EnvVar: "BUILDKITE_GIT_CLONE_MIRROR_FLAGS", }, cli.StringFlag{ Name: "git-mirrors-path", Value: "", Usage: "Path to where mirrors of git repositories are stored", EnvVar: "BUILDKITE_GIT_MIRRORS_PATH", }, cli.IntFlag{ Name: "git-mirrors-lock-timeout", Value: 300, Usage: "Seconds to lock a git mirror during clone, should exceed your longest checkout", EnvVar: "BUILDKITE_GIT_MIRRORS_LOCK_TIMEOUT", }, cli.BoolFlag{ Name: "git-mirrors-skip-update", Usage: "Skip updating the Git mirror", EnvVar: "BUILDKITE_GIT_MIRRORS_SKIP_UPDATE", }, cli.StringFlag{ Name: "bootstrap-script", Value: "", Usage: "The command that is executed for bootstrapping a job, defaults to the bootstrap sub-command of this binary", EnvVar: "BUILDKITE_BOOTSTRAP_SCRIPT_PATH", }, cli.StringFlag{ Name: "build-path", Value: "", Usage: "Path to where the builds will run from", EnvVar: "BUILDKITE_BUILD_PATH", }, cli.StringFlag{ Name: "hooks-path", Value: "", Usage: "Directory where the hook scripts are found", EnvVar: "BUILDKITE_HOOKS_PATH", }, cli.StringFlag{ Name: "sockets-path", Value: defaultSocketsPath(), Usage: "Directory where the agent will place sockets", EnvVar: "BUILDKITE_SOCKETS_PATH", }, cli.StringFlag{ Name: "plugins-path", Value: "", Usage: "Directory where the plugins are saved to", EnvVar: "BUILDKITE_PLUGINS_PATH", }, cli.BoolFlag{ Name: "no-ansi-timestamps", Usage: "Do not insert ANSI timestamp codes at the start of each line of job output", EnvVar: "BUILDKITE_NO_ANSI_TIMESTAMPS", }, cli.BoolFlag{ Name: "timestamp-lines", Usage: "Prepend timestamps on each line of job output. Has no effect unless --no-ansi-timestamps is also used", EnvVar: "BUILDKITE_TIMESTAMP_LINES", }, cli.StringFlag{ Name: "health-check-addr", Usage: "Start an HTTP server on this addr:port that returns whether the agent is healthy, disabled by default", EnvVar: "BUILDKITE_AGENT_HEALTH_CHECK_ADDR", }, cli.BoolFlag{ Name: "no-pty", Usage: "Do not run jobs within a pseudo terminal", EnvVar: "BUILDKITE_NO_PTY", }, cli.BoolFlag{ Name: "no-ssh-keyscan", Usage: "Don't automatically run ssh-keyscan before checkout", EnvVar: "BUILDKITE_NO_SSH_KEYSCAN", }, cli.BoolFlag{ Name: "no-command-eval", Usage: "Don't allow this agent to run arbitrary console commands, including plugins", EnvVar: "BUILDKITE_NO_COMMAND_EVAL", }, cli.BoolFlag{ Name: "no-plugins", Usage: "Don't allow this agent to load plugins", EnvVar: "BUILDKITE_NO_PLUGINS", }, cli.BoolTFlag{ Name: "no-plugin-validation", Usage: "Don't validate plugin configuration and requirements", EnvVar: "BUILDKITE_NO_PLUGIN_VALIDATION", }, cli.BoolFlag{ Name: "no-local-hooks", Usage: "Don't allow local hooks to be run from checked out repositories", EnvVar: "BUILDKITE_NO_LOCAL_HOOKS", }, cli.BoolFlag{ Name: "no-git-submodules", Usage: "Don't automatically checkout git submodules", EnvVar: "BUILDKITE_NO_GIT_SUBMODULES,BUILDKITE_DISABLE_GIT_SUBMODULES", }, cli.BoolFlag{ Name: "metrics-datadog", Usage: "Send metrics to DogStatsD for Datadog", EnvVar: "BUILDKITE_METRICS_DATADOG", }, cli.BoolFlag{ Name: "no-feature-reporting", Usage: "Disables sending a list of enabled features back to the Buildkite mothership. We use this information to measure feature usage, but if you're not comfortable sharing that information then that's totally okay :)", EnvVar: "BUILDKITE_AGENT_NO_FEATURE_REPORTING", }, cli.StringFlag{ Name: "metrics-datadog-host", Usage: "The dogstatsd instance to send metrics to using udp", EnvVar: "BUILDKITE_METRICS_DATADOG_HOST", Value: "127.0.0.1:8125", }, cli.BoolFlag{ Name: "metrics-datadog-distributions", Usage: "Use Datadog Distributions for Timing metrics", EnvVar: "BUILDKITE_METRICS_DATADOG_DISTRIBUTIONS", }, cli.StringFlag{ Name: "log-format", Usage: "The format to use for the logger output", EnvVar: "BUILDKITE_LOG_FORMAT", Value: "text", }, cli.IntFlag{ Name: "spawn", Usage: "The number of agents to spawn in parallel", Value: 1, EnvVar: "BUILDKITE_AGENT_SPAWN", }, cli.BoolFlag{ Name: "spawn-with-priority", Usage: "Assign priorities to every spawned agent (when using --spawn) equal to the agent's index", EnvVar: "BUILDKITE_AGENT_SPAWN_WITH_PRIORITY", }, cli.StringFlag{ Name: "cancel-signal", Usage: "The signal to use for cancellation", EnvVar: "BUILDKITE_CANCEL_SIGNAL", Value: "SIGTERM", }, cli.StringFlag{ Name: "tracing-backend", Usage: `Enable tracing for build jobs by specifying a backend, "datadog" or "opentelemetry"`, EnvVar: "BUILDKITE_TRACING_BACKEND", Value: "", }, cli.StringFlag{ Name: "tracing-service-name", Usage: "Service name to use when reporting traces.", EnvVar: "BUILDKITE_TRACING_SERVICE_NAME", Value: "buildkite-agent", }, AgentRegisterTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, RedactedVars, cli.StringSliceFlag{ Name: "meta-data", Value: &cli.StringSlice{}, Hidden: true, EnvVar: "BUILDKITE_AGENT_META_DATA", }, cli.BoolFlag{ Name: "meta-data-ec2", Hidden: true, EnvVar: "BUILDKITE_AGENT_META_DATA_EC2", }, cli.BoolFlag{ Name: "meta-data-ec2-tags", Hidden: true, EnvVar: "BUILDKITE_AGENT_TAGS_FROM_EC2_TAGS", }, cli.BoolFlag{ Name: "meta-data-gcp", Hidden: true, EnvVar: "BUILDKITE_AGENT_META_DATA_GCP", }, cli.BoolFlag{ Name: "no-automatic-ssh-fingerprint-verification", Hidden: true, EnvVar: "BUILDKITE_NO_AUTOMATIC_SSH_FINGERPRINT_VERIFICATION", }, cli.BoolFlag{ Name: "tags-from-ec2", Usage: "Include the host's EC2 meta-data as tags (instance-id, instance-type, and ami-id)", EnvVar: "BUILDKITE_AGENT_TAGS_FROM_EC2", }, cli.BoolFlag{ Name: "tags-from-gcp", Usage: "Include the host's Google Cloud instance meta-data as tags (instance-id, machine-type, preemptible, project-id, region, and zone)", EnvVar: "BUILDKITE_AGENT_TAGS_FROM_GCP", }, cli.IntFlag{ Name: "disconnect-after-job-timeout", Hidden: true, Usage: "When --disconnect-after-job is specified, the number of seconds to wait for a job before shutting down", EnvVar: "BUILDKITE_AGENT_DISCONNECT_AFTER_JOB_TIMEOUT", }, }, Action: func(c *cli.Context) { ctx := context.Background() cfg := AgentStartConfig{} loader := cliconfig.Loader{ CLI: c, Config: &cfg, DefaultConfigFilePaths: DefaultConfigFilePaths(), } warnings, err := loader.Load() if err != nil { fmt.Fprintf(os.Stderr, "Error loading config: %s\n", err) os.Exit(1) } l := CreateLogger(cfg) if cfg.LogFormat == "json" { l = l.WithFields(logger.StringField("source", "agent")) } for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() err = UnsetConfigFromEnvironment(c) if err != nil { fmt.Printf("%s", err) os.Exit(1) } if runtime.GOOS == "windows" { cfg.NoPTY = true } if cfg.BootstrapScript == "" { exePath, err := os.Executable() if err != nil { l.Fatal("Unable to find executable path for bootstrap") } cfg.BootstrapScript = fmt.Sprintf("%s bootstrap", shellwords.Quote(exePath)) } isSetNoPlugins := c.IsSet("no-plugins") if loader.File != nil { if _, exists := loader.File.Config["no-plugins"]; exists { isSetNoPlugins = true } } if isSetNoPlugins && !cfg.NoPlugins { msg := "Plugins have been specifically enabled, despite %s being enabled. " + "Plugins can execute arbitrary hooks and commands, make sure you are " + "whitelisting your plugins in " + "your environment hook." switch { case cfg.NoCommandEval: l.Warn(msg, "no-command-eval") case cfg.NoLocalHooks: l.Warn(msg, "no-local-hooks") } } if (cfg.NoCommandEval || cfg.NoLocalHooks) && !isSetNoPlugins { cfg.NoPlugins = true } if cfg.Shell == "" { cfg.Shell = DefaultShell() } if cfg.DisconnectAfterJobTimeout > 0 { cfg.DisconnectAfterIdleTimeout = cfg.DisconnectAfterJobTimeout } var ec2TagTimeout time.Duration if t := cfg.WaitForEC2TagsTimeout; t != "" { var err error ec2TagTimeout, err = time.ParseDuration(t) if err != nil { l.Fatal("Failed to parse ec2 tag timeout: %v", err) } } var ec2MetaDataTimeout time.Duration if t := cfg.WaitForEC2MetaDataTimeout; t != "" { var err error ec2MetaDataTimeout, err = time.ParseDuration(t) if err != nil { l.Fatal("Failed to parse ec2 meta-data timeout: %v", err) } } var ecsMetaDataTimeout time.Duration if t := cfg.WaitForECSMetaDataTimeout; t != "" { var err error ecsMetaDataTimeout, err = time.ParseDuration(t) if err != nil { l.Fatal("Failed to parse ecs meta-data timeout: %v", err) } } var gcpLabelsTimeout time.Duration if t := cfg.WaitForGCPLabelsTimeout; t != "" { var err error gcpLabelsTimeout, err = time.ParseDuration(t) if err != nil { l.Fatal("Failed to parse gcp labels timeout: %v", err) } } mc := metrics.NewCollector(l, metrics.CollectorConfig{ Datadog: cfg.MetricsDatadog, DatadogHost: cfg.MetricsDatadogHost, DatadogDistributions: cfg.MetricsDatadogDistributions, }) if _, has := tracetools.ValidTracingBackends[cfg.TracingBackend]; !has { l.Fatal("The given tracing backend %q is not supported. Valid backends are: %q", cfg.TracingBackend, maps.Keys(tracetools.ValidTracingBackends)) } if experiments.IsEnabled(experiments.AgentAPI) { shutdown := runAgentAPI(ctx, l, cfg.SocketsPath) defer shutdown() } agentConf := agent.AgentConfiguration{ BootstrapScript: cfg.BootstrapScript, BuildPath: cfg.BuildPath, SocketsPath: cfg.SocketsPath, GitMirrorsPath: cfg.GitMirrorsPath, GitMirrorsLockTimeout: cfg.GitMirrorsLockTimeout, GitMirrorsSkipUpdate: cfg.GitMirrorsSkipUpdate, HooksPath: cfg.HooksPath, PluginsPath: cfg.PluginsPath, GitCheckoutFlags: cfg.GitCheckoutFlags, GitCloneFlags: cfg.GitCloneFlags, GitCloneMirrorFlags: cfg.GitCloneMirrorFlags, GitCleanFlags: cfg.GitCleanFlags, GitFetchFlags: cfg.GitFetchFlags, GitSubmodules: !cfg.NoGitSubmodules, SSHKeyscan: !cfg.NoSSHKeyscan, CommandEval: !cfg.NoCommandEval, PluginsEnabled: !cfg.NoPlugins, PluginValidation: !cfg.NoPluginValidation, LocalHooksEnabled: !cfg.NoLocalHooks, RunInPty: !cfg.NoPTY, ANSITimestamps: !cfg.NoANSITimestamps, TimestampLines: cfg.TimestampLines, DisconnectAfterJob: cfg.DisconnectAfterJob, DisconnectAfterIdleTimeout: cfg.DisconnectAfterIdleTimeout, CancelGracePeriod: cfg.CancelGracePeriod, EnableJobLogTmpfile: cfg.EnableJobLogTmpfile, JobLogPath: cfg.JobLogPath, WriteJobLogsToStdout: cfg.WriteJobLogsToStdout, LogFormat: cfg.LogFormat, Shell: cfg.Shell, RedactedVars: cfg.RedactedVars, AcquireJob: cfg.AcquireJob, TracingBackend: cfg.TracingBackend, TracingServiceName: cfg.TracingServiceName, } if loader.File != nil { agentConf.ConfigPath = loader.File.Path } if cfg.LogFormat == "text" { welcomeMessage := "\n" + "%s _ _ _ _ _ _ _ _\n" + " | | (_) | | | | (_) | | |\n" + " | |__ _ _ _| | __| | | ___| |_ ___ __ _ __ _ ___ _ __ | |_\n" + " | '_ \\| | | | | |/ _` | |/ / | __/ _ \\ / _` |/ _` |/ _ \\ '_ \\| __|\n" + " | |_) | |_| | | | (_| | <| | || __/ | (_| | (_| | __/ | | | |_\n" + " |_.__/ \\__,_|_|_|\\__,_|_|\\_\\_|\\__\\___| \\__,_|\\__, |\\___|_| |_|\\__|\n" + " __/ |\n" + " https://buildkite.com/agent |___/\n%s\n" if !cfg.NoColor { fmt.Fprintf(os.Stderr, welcomeMessage, "\x1b[38;5;48m", "\x1b[0m") } else { fmt.Fprintf(os.Stderr, welcomeMessage, "", "") } } else if cfg.LogFormat != "json" { l.Fatal("Invalid log format %v. Only 'text' or 'json' are allowed.", cfg.LogFormat) } l.Notice("Starting buildkite-agent v%s with PID: %s", version.Version(), fmt.Sprintf("%d", os.Getpid())) l.Notice("The agent source code can be found here: https://github.com/buildkite/agent") l.Notice("For questions and support, email us at: hello@buildkite.com") if agentConf.ConfigPath != "" { l.WithFields(logger.StringField(`path`, agentConf.ConfigPath)).Info("Configuration loaded") } l.Debug("Bootstrap command: %s", agentConf.BootstrapScript) l.Debug("Build path: %s", agentConf.BuildPath) l.Debug("Hooks directory: %s", agentConf.HooksPath) l.Debug("Plugins directory: %s", agentConf.PluginsPath) if !agentConf.SSHKeyscan { l.Info("Automatic ssh-keyscan has been disabled") } if !agentConf.CommandEval { l.Info("Evaluating console commands has been disabled") } if !agentConf.PluginsEnabled { l.Info("Plugins have been disabled") } if !agentConf.RunInPty { l.Info("Running builds within a pseudoterminal (PTY) has been disabled") } if agentConf.DisconnectAfterJob { l.Info("Agents will disconnect after a job run has completed") } if agentConf.DisconnectAfterIdleTimeout > 0 { l.Info("Agents will disconnect after %d seconds of inactivity", agentConf.DisconnectAfterIdleTimeout) } cancelSig, err := process.ParseSignal(cfg.CancelSignal) if err != nil { l.Fatal("Failed to parse cancel-signal: %v", err) } if !utils.FileExists(agentConf.BuildPath) { l.Info("Build Path doesn't exist, creating it (%s)", agentConf.BuildPath) if err := os.MkdirAll(agentConf.BuildPath, 0777); err != nil { l.Fatal("Failed to create builds path: %v", err) } } client := api.NewClient(l, loadAPIClientConfig(cfg, "Token")) registerReq := api.AgentRegisterRequest{ Name: cfg.Name, Priority: cfg.Priority, ScriptEvalEnabled: !cfg.NoCommandEval, Tags: agent.FetchTags(ctx, l, agent.FetchTagsConfig{ Tags: cfg.Tags, TagsFromEC2MetaData: (cfg.TagsFromEC2MetaData || cfg.TagsFromEC2), TagsFromEC2MetaDataPaths: cfg.TagsFromEC2MetaDataPaths, TagsFromEC2Tags: cfg.TagsFromEC2Tags, TagsFromECSMetaData: cfg.TagsFromECSMetaData, TagsFromGCPMetaData: (cfg.TagsFromGCPMetaData || cfg.TagsFromGCP), TagsFromGCPMetaDataPaths: cfg.TagsFromGCPMetaDataPaths, TagsFromGCPLabels: cfg.TagsFromGCPLabels, TagsFromHost: cfg.TagsFromHost, WaitForEC2TagsTimeout: ec2TagTimeout, WaitForEC2MetaDataTimeout: ec2MetaDataTimeout, WaitForECSMetaDataTimeout: ecsMetaDataTimeout, WaitForGCPLabelsTimeout: gcpLabelsTimeout, }), IgnoreInDispatches: cfg.AcquireJob != "", Features: cfg.Features(), } if cfg.Spawn > 1 && cfg.AcquireJob != "" { l.Fatal("You can't spawn multiple agents and acquire a job at the same time") } var workers []*agent.AgentWorker for i := 1; i <= cfg.Spawn; i++ { if cfg.Spawn == 1 { l.Info("Registering agent with Buildkite...") } else { l.Info("Registering agent %d of %d with Buildkite...", i, cfg.Spawn) } registerReq.Name = strings.ReplaceAll(cfg.Name, "%spawn", strconv.Itoa(i)) if cfg.SpawnWithPriority { p := i if experiments.IsEnabled(experiments.DescendingSpawnPrioity) { p = -i } l.Info("Assigning priority %d for agent %d", p, i) registerReq.Priority = strconv.Itoa(p) } ag, err := agent.Register(ctx, l, client, registerReq) if err != nil { l.Fatal("%s", err) } workers = append(workers, agent.NewAgentWorker( l.WithFields(logger.StringField("agent", ag.Name)), ag, mc, client, agent.AgentWorkerConfig{ AgentConfiguration: agentConf, CancelSignal: cancelSig, Debug: cfg.Debug, DebugHTTP: cfg.DebugHTTP, SpawnIndex: i, AgentStdout: os.Stdout, })) } pool := agent.NewAgentPool(workers) defer agentShutdownHook(l, cfg) if err := agentStartupHook(l, cfg); err != nil { l.Fatal("%s", err) } signals := handlePoolSignals(ctx, l, pool) defer close(signals) l.Info("Starting %d Agent(s)", cfg.Spawn) l.Info("You can press Ctrl-C to stop the agents") if cfg.HealthCheckAddr != "" { http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { l.Info("%s %s", r.Method, r.URL.Path) if r.URL.Path != "/" { http.NotFound(w, r) } else { fmt.Fprintf(w, "OK: Buildkite agent is running") } }) http.HandleFunc("/status", status.Handle) go func() { _, setStatus, done := status.AddSimpleItem(ctx, "Health check server") defer done() setStatus("👂 Listening") l.Notice("Starting HTTP health check server on %v", cfg.HealthCheckAddr) err := http.ListenAndServe(cfg.HealthCheckAddr, nil) if err != nil { l.Error("Could not start health check server: %v", err) } }() } if err := pool.Start(ctx); err != nil { l.Fatal("%s", err) } }, }
View Source
var AnnotateCommand = cli.Command{ Name: "annotate", Usage: "Annotate the build page within the Buildkite UI with text from within a Buildkite job", Description: annotateHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "context", Usage: "The context of the annotation used to differentiate this annotation from others", EnvVar: "BUILDKITE_ANNOTATION_CONTEXT", }, cli.StringFlag{ Name: "style", Usage: "The style of the annotation (′success′, ′info′, ′warning′ or ′error′)", EnvVar: "BUILDKITE_ANNOTATION_STYLE", }, cli.BoolFlag{ Name: "append", Usage: "Append to the body of an existing annotation", EnvVar: "BUILDKITE_ANNOTATION_APPEND", }, cli.StringFlag{ Name: "job", Value: "", Usage: "Which job should the annotation come from", EnvVar: "BUILDKITE_JOB_ID", }, AgentAccessTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, }, Action: func(c *cli.Context) { ctx := context.Background() cfg := AnnotateConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Printf("%s", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() if err := annotate(ctx, cfg, l); err != nil { l.Fatal(err.Error()) } }, }
View Source
var AnnotationRemoveCommand = cli.Command{ Name: "remove", Usage: "Remove an existing annotation from a Buildkite build", Description: annotationRemoveHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "context", Value: "default", Usage: "The context of the annotation used to differentiate this annotation from others", EnvVar: "BUILDKITE_ANNOTATION_CONTEXT", }, cli.StringFlag{ Name: "job", Value: "", Usage: "Which job is removing the annotation", EnvVar: "BUILDKITE_JOB_ID", }, AgentAccessTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, }, Action: func(c *cli.Context) { ctx := context.Background() cfg := AnnotationRemoveConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Printf("%s", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() client := api.NewClient(l, loadAPIClientConfig(cfg, "AgentAccessToken")) err = roko.NewRetrier( roko.WithMaxAttempts(5), roko.WithStrategy(roko.Constant(1*time.Second)), roko.WithJitter(), ).DoWithContext(ctx, func(r *roko.Retrier) error { resp, err := client.AnnotationRemove(ctx, cfg.Job, cfg.Context) if resp != nil && (resp.StatusCode == 401 || resp.StatusCode == 404 || resp.StatusCode == 400 || resp.StatusCode == 410) { r.Break() return err } if err != nil { l.Warn("%s (%s)", err, r) return err } return nil }) if err != nil { l.Fatal("Failed to remove annotation: %s", err) } l.Debug("Successfully removed annotation") }, }
View Source
var ArtifactDownloadCommand = cli.Command{ Name: "download", Usage: "Downloads artifacts from Buildkite to the local machine", Description: downloadHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "step", Value: "", Usage: "Scope the search to a particular step by using either its name or job ID", }, cli.StringFlag{ Name: "build", Value: "", EnvVar: "BUILDKITE_BUILD_ID", Usage: "The build that the artifacts were uploaded to", }, cli.BoolFlag{ Name: "include-retried-jobs", EnvVar: "BUILDKITE_AGENT_INCLUDE_RETRIED_JOBS", Usage: "Include artifacts from retried jobs in the search", }, AgentAccessTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, }, Action: func(c *cli.Context) { ctx := context.Background() cfg := ArtifactDownloadConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Printf("%s", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() client := api.NewClient(l, loadAPIClientConfig(cfg, "AgentAccessToken")) downloader := agent.NewArtifactDownloader(l, client, agent.ArtifactDownloaderConfig{ Query: cfg.Query, Destination: cfg.Destination, BuildID: cfg.Build, Step: cfg.Step, IncludeRetriedJobs: cfg.IncludeRetriedJobs, DebugHTTP: cfg.DebugHTTP, }) if err := downloader.Download(ctx); err != nil { l.Fatal("Failed to download artifacts: %s", err) } }, }
View Source
var ArtifactSearchCommand = cli.Command{ Name: "search", Usage: "Searches artifacts in Buildkite", Description: searchHelpDescription, CustomHelpTemplate: `{{.Description}} Options: {{range .VisibleFlags}}{{.}} {{end}} Format specifiers: %i UUID of the artifact %p Artifact path %c Artifact creation time (an ISO 8601 / RFC-3339 formatted UTC timestamp) %j UUID of the job that uploaded the artifact, helpful for subsequent artifact downloads %s File size of the artifact in bytes %S SHA1 checksum of the artifact %u Download URL for the artifact, though consider using 'buildkite-agent artifact download' instead `, Flags: []cli.Flag{ cli.StringFlag{ Name: "step", Value: "", Usage: "Scope the search to a particular step by using either its name or job ID", }, cli.StringFlag{ Name: "build", Value: "", EnvVar: "BUILDKITE_BUILD_ID", Usage: "The build that the artifacts were uploaded to", }, cli.BoolFlag{ Name: "include-retried-jobs", EnvVar: "BUILDKITE_AGENT_INCLUDE_RETRIED_JOBS", Usage: "Include artifacts from retried jobs in the search", }, cli.BoolFlag{ Name: "allow-empty-results", Usage: "By default, searches exit 1 if there are no results. If this flag is set, searches will exit 0 with an empty set", }, cli.StringFlag{ Name: "format", Value: "%j %p %c\n", Usage: "Output formatting of results. See below for listing of available format specifiers.", }, AgentAccessTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, }, Action: func(c *cli.Context) error { ctx := context.Background() cfg := ArtifactSearchConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Printf("%s", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() client := api.NewClient(l, loadAPIClientConfig(cfg, "AgentAccessToken")) searcher := agent.NewArtifactSearcher(l, client, cfg.Build) artifacts, err := searcher.Search(ctx, cfg.Query, cfg.Step, cfg.IncludeRetriedJobs, true) if err != nil { return err } if len(artifacts) == 0 { if cfg.AllowEmptyResults { l.Info("No matches found for %q", cfg.Query) } else { l.Fatal("No matches found for %q", cfg.Query) } } for _, artifact := range artifacts { r := strings.NewReplacer( "%p", artifact.Path, "%c", artifact.CreatedAt.Format(time.RFC3339), "%j", artifact.JobID, "%s", strconv.FormatInt(artifact.FileSize, 10), "%S", artifact.Sha1Sum, "%u", artifact.URL, "%i", artifact.ID, ) fmt.Print(r.Replace(cfg.PrintFormat)) } return nil }, }
View Source
var ArtifactShasumCommand = cli.Command{ Name: "shasum", Usage: "Prints the SHA-1 hash for a single artifact specified by a search query", Description: shasumHelpDescription, Flags: []cli.Flag{ cli.BoolFlag{ Name: "sha256", Usage: "Request SHA-256 instead of SHA-1, errors if SHA-256 not available", }, cli.StringFlag{ Name: "step", Value: "", Usage: "Scope the search to a particular step by its name or job ID", }, cli.StringFlag{ Name: "build", Value: "", EnvVar: "BUILDKITE_BUILD_ID", Usage: "The build that the artifact was uploaded to", }, cli.BoolFlag{ Name: "include-retried-jobs", EnvVar: "BUILDKITE_AGENT_INCLUDE_RETRIED_JOBS", Usage: "Include artifacts from retried jobs in the search", }, AgentAccessTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, }, Action: func(c *cli.Context) { ctx := context.Background() cfg := ArtifactShasumConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Printf("%s", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() if err := searchAndPrintShaSum(ctx, cfg, l, os.Stdout); err != nil { l.Fatal(err.Error()) } }, }
View Source
var ArtifactUploadCommand = cli.Command{ Name: "upload", Usage: "Uploads files to a job as artifacts", Description: uploadHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "job", Value: "", Usage: "Which job should the artifacts be uploaded to", EnvVar: "BUILDKITE_JOB_ID", }, cli.StringFlag{ Name: "content-type", Value: "", Usage: "A specific Content-Type to set for the artifacts (otherwise detected)", EnvVar: "BUILDKITE_ARTIFACT_CONTENT_TYPE", }, cli.BoolFlag{ Name: "glob-resolve-follow-symlinks", Usage: "Follow symbolic links to directories while resolving globs. Note: this will not prevent symlinks to files from being uploaded. Use --upload-skip-symlinks to do that", EnvVar: "BUILDKITE_AGENT_ARTIFACT_GLOB_RESOLVE_FOLLOW_SYMLINKS", }, cli.BoolFlag{ Name: "upload-skip-symlinks", Usage: "After the glob has been resolved to a list of files to upload, skip uploading those that are symlinks to files", EnvVar: "BUILDKITE_ARTIFACT_UPLOAD_SKIP_SYMLINKS", }, cli.BoolFlag{ Name: "follow-symlinks", Usage: "Follow symbolic links while resolving globs. Note this argument is deprecated. Use `--glob-resolve-follow-symlinks` instead", EnvVar: "BUILDKITE_AGENT_ARTIFACT_SYMLINKS", }, AgentAccessTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, }, Action: func(c *cli.Context) { ctx := context.Background() cfg := ArtifactUploadConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Printf("%s", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() client := api.NewClient(l, loadAPIClientConfig(cfg, "AgentAccessToken")) uploader := agent.NewArtifactUploader(l, client, agent.ArtifactUploaderConfig{ JobID: cfg.Job, Paths: cfg.UploadPaths, Destination: cfg.Destination, ContentType: cfg.ContentType, DebugHTTP: cfg.DebugHTTP, GlobResolveFollowSymlinks: (cfg.GlobResolveFollowSymlinks || cfg.FollowSymlinks), UploadSkipSymlinks: cfg.UploadSkipSymlinks, }) if err := uploader.Upload(ctx); err != nil { l.Fatal("Failed to upload artifacts: %s", err) } }, }
View Source
var BootstrapCommand = cli.Command{ Name: "bootstrap", Usage: "Run a Buildkite job locally", Description: bootstrapHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "command", Value: "", Usage: "The command to run", EnvVar: "BUILDKITE_COMMAND", }, cli.StringFlag{ Name: "job", Value: "", Usage: "The ID of the job being run", EnvVar: "BUILDKITE_JOB_ID", }, cli.StringFlag{ Name: "repository", Value: "", Usage: "The repository to clone and run the job from", EnvVar: "BUILDKITE_REPO", }, cli.StringFlag{ Name: "commit", Value: "", Usage: "The commit to checkout in the repository", EnvVar: "BUILDKITE_COMMIT", }, cli.StringFlag{ Name: "branch", Value: "", Usage: "The branch the commit is in", EnvVar: "BUILDKITE_BRANCH", }, cli.StringFlag{ Name: "tag", Value: "", Usage: "The tag the commit", EnvVar: "BUILDKITE_TAG", }, cli.StringFlag{ Name: "refspec", Value: "", Usage: "Optional refspec to override git fetch", EnvVar: "BUILDKITE_REFSPEC", }, cli.StringFlag{ Name: "plugins", Value: "", Usage: "The plugins for the job", EnvVar: "BUILDKITE_PLUGINS", }, cli.StringFlag{ Name: "pullrequest", Value: "", Usage: "The number/id of the pull request this commit belonged to", EnvVar: "BUILDKITE_PULL_REQUEST", }, cli.StringFlag{ Name: "agent", Value: "", Usage: "The name of the agent running the job", EnvVar: "BUILDKITE_AGENT_NAME", }, cli.StringFlag{ Name: "queue", Value: "", Usage: "The name of the queue the agent belongs to, if tagged", EnvVar: "BUILDKITE_AGENT_META_DATA_QUEUE", }, cli.StringFlag{ Name: "organization", Value: "", Usage: "The slug of the organization that the job is a part of", EnvVar: "BUILDKITE_ORGANIZATION_SLUG", }, cli.StringFlag{ Name: "pipeline", Value: "", Usage: "The slug of the pipeline that the job is a part of", EnvVar: "BUILDKITE_PIPELINE_SLUG", }, cli.StringFlag{ Name: "pipeline-provider", Value: "", Usage: "The id of the SCM provider that the repository is hosted on", EnvVar: "BUILDKITE_PIPELINE_PROVIDER", }, cli.StringFlag{ Name: "artifact-upload-paths", Value: "", Usage: "Paths to files to automatically upload at the end of a job", EnvVar: "BUILDKITE_ARTIFACT_PATHS", }, cli.StringFlag{ Name: "artifact-upload-destination", Value: "", Usage: "A custom location to upload artifact paths to (for example, s3://my-custom-bucket/and/prefix)", EnvVar: "BUILDKITE_ARTIFACT_UPLOAD_DESTINATION", }, cli.BoolFlag{ Name: "clean-checkout", Usage: "Whether or not the bootstrap should remove the existing repository before running the command", EnvVar: "BUILDKITE_CLEAN_CHECKOUT", }, cli.StringFlag{ Name: "git-checkout-flags", Value: "-f", Usage: "Flags to pass to \"git checkout\" command", EnvVar: "BUILDKITE_GIT_CHECKOUT_FLAGS", }, cli.StringFlag{ Name: "git-clone-flags", Value: "-v", Usage: "Flags to pass to \"git clone\" command", EnvVar: "BUILDKITE_GIT_CLONE_FLAGS", }, cli.StringFlag{ Name: "git-clone-mirror-flags", Value: "-v", Usage: "Flags to pass to \"git clone\" command when mirroring", EnvVar: "BUILDKITE_GIT_CLONE_MIRROR_FLAGS", }, cli.StringFlag{ Name: "git-clean-flags", Value: "-ffxdq", Usage: "Flags to pass to \"git clean\" command", EnvVar: "BUILDKITE_GIT_CLEAN_FLAGS", }, cli.StringFlag{ Name: "git-fetch-flags", Value: "", Usage: "Flags to pass to \"git fetch\" command", EnvVar: "BUILDKITE_GIT_FETCH_FLAGS", }, cli.StringSliceFlag{ Name: "git-submodule-clone-config", Value: &cli.StringSlice{}, Usage: "Comma separated key=value git config pairs applied before git submodule clone commands. For example, ′update --init′. If the config is needed to be applied to all git commands, supply it in a global git config file for the system that the agent runs in instead.", EnvVar: "BUILDKITE_GIT_SUBMODULE_CLONE_CONFIG", }, cli.StringFlag{ Name: "git-mirrors-path", Value: "", Usage: "Path to where mirrors of git repositories are stored", EnvVar: "BUILDKITE_GIT_MIRRORS_PATH", }, cli.IntFlag{ Name: "git-mirrors-lock-timeout", Value: 300, Usage: "Seconds to lock a git mirror during clone, should exceed your longest checkout", EnvVar: "BUILDKITE_GIT_MIRRORS_LOCK_TIMEOUT", }, cli.BoolFlag{ Name: "git-mirrors-skip-update", Usage: "Skip updating the Git mirror", EnvVar: "BUILDKITE_GIT_MIRRORS_SKIP_UPDATE", }, cli.StringFlag{ Name: "bin-path", Value: "", Usage: "Directory where the buildkite-agent binary lives", EnvVar: "BUILDKITE_BIN_PATH", }, cli.StringFlag{ Name: "build-path", Value: "", Usage: "Directory where builds will be created", EnvVar: "BUILDKITE_BUILD_PATH", }, cli.StringFlag{ Name: "hooks-path", Value: "", Usage: "Directory where the hook scripts are found", EnvVar: "BUILDKITE_HOOKS_PATH", }, cli.StringFlag{ Name: "sockets-path", Value: defaultSocketsPath(), Usage: "Directory where the agent will place sockets", EnvVar: "BUILDKITE_SOCKETS_PATH", }, cli.StringFlag{ Name: "plugins-path", Value: "", Usage: "Directory where the plugins are saved to", EnvVar: "BUILDKITE_PLUGINS_PATH", }, cli.BoolTFlag{ Name: "command-eval", Usage: "Allow running of arbitrary commands", EnvVar: "BUILDKITE_COMMAND_EVAL", }, cli.BoolTFlag{ Name: "plugins-enabled", Usage: "Allow plugins to be run", EnvVar: "BUILDKITE_PLUGINS_ENABLED", }, cli.BoolFlag{ Name: "plugin-validation", Usage: "Validate plugin configuration", EnvVar: "BUILDKITE_PLUGIN_VALIDATION", }, cli.BoolFlag{ Name: "plugins-always-clone-fresh", Usage: "Always make a new clone of plugin source, even if already present", EnvVar: "BUILDKITE_PLUGINS_ALWAYS_CLONE_FRESH", }, cli.BoolTFlag{ Name: "local-hooks-enabled", Usage: "Allow local hooks to be run", EnvVar: "BUILDKITE_LOCAL_HOOKS_ENABLED", }, cli.BoolTFlag{ Name: "ssh-keyscan", Usage: "Automatically run ssh-keyscan before checkout", EnvVar: "BUILDKITE_SSH_KEYSCAN", }, cli.BoolTFlag{ Name: "git-submodules", Usage: "Enable git submodules", EnvVar: "BUILDKITE_GIT_SUBMODULES", }, cli.BoolTFlag{ Name: "pty", Usage: "Run jobs within a pseudo terminal", EnvVar: "BUILDKITE_PTY", }, cli.StringFlag{ Name: "shell", Usage: "The shell to use to interpret build commands", EnvVar: "BUILDKITE_SHELL", Value: DefaultShell(), }, cli.StringSliceFlag{ Name: "phases", Usage: "The specific phases to execute. The order they're defined is irrelevant.", EnvVar: "BUILDKITE_BOOTSTRAP_PHASES", }, cli.StringFlag{ Name: "cancel-signal", Usage: "The signal to use for cancellation", EnvVar: "BUILDKITE_CANCEL_SIGNAL", Value: "SIGTERM", }, cli.StringSliceFlag{ Name: "redacted-vars", Usage: "Pattern of environment variable names containing sensitive values", EnvVar: "BUILDKITE_REDACTED_VARS", }, cli.StringFlag{ Name: "tracing-backend", Usage: "The name of the tracing backend to use.", EnvVar: "BUILDKITE_TRACING_BACKEND", Value: "", }, cli.StringFlag{ Name: "tracing-service-name", Usage: "Service name to use when reporting traces.", EnvVar: "BUILDKITE_TRACING_SERVICE_NAME", Value: "buildkite-agent", }, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, }, Action: func(c *cli.Context) { cfg := BootstrapConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Printf("%s", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } for _, name := range cfg.Experiments { experiments.EnableWithWarnings(l, name) } done := HandleProfileFlag(l, cfg) defer done() runInPty := cfg.PTY if runtime.GOOS == "windows" { runInPty = false } for _, phase := range cfg.Phases { switch phase { case "plugin", "checkout", "command": default: l.Fatal("Invalid phase %q", phase) } } cancelSig, err := process.ParseSignal(cfg.CancelSignal) if err != nil { l.Fatal("Failed to parse cancel-signal: %v", err) } bootstrap := job.New(job.ExecutorConfig{ AgentName: cfg.AgentName, ArtifactUploadDestination: cfg.ArtifactUploadDestination, AutomaticArtifactUploadPaths: cfg.AutomaticArtifactUploadPaths, BinPath: cfg.BinPath, Branch: cfg.Branch, BuildPath: cfg.BuildPath, SocketsPath: cfg.SocketsPath, CancelSignal: cancelSig, CleanCheckout: cfg.CleanCheckout, Command: cfg.Command, CommandEval: cfg.CommandEval, Commit: cfg.Commit, Debug: cfg.Debug, GitCheckoutFlags: cfg.GitCheckoutFlags, GitCleanFlags: cfg.GitCleanFlags, GitCloneFlags: cfg.GitCloneFlags, GitCloneMirrorFlags: cfg.GitCloneMirrorFlags, GitFetchFlags: cfg.GitFetchFlags, GitMirrorsLockTimeout: cfg.GitMirrorsLockTimeout, GitMirrorsPath: cfg.GitMirrorsPath, GitMirrorsSkipUpdate: cfg.GitMirrorsSkipUpdate, GitSubmodules: cfg.GitSubmodules, GitSubmoduleCloneConfig: cfg.GitSubmoduleCloneConfig, HooksPath: cfg.HooksPath, JobID: cfg.JobID, LocalHooksEnabled: cfg.LocalHooksEnabled, OrganizationSlug: cfg.OrganizationSlug, Phases: cfg.Phases, PipelineProvider: cfg.PipelineProvider, PipelineSlug: cfg.PipelineSlug, PluginValidation: cfg.PluginValidation, Plugins: cfg.Plugins, PluginsEnabled: cfg.PluginsEnabled, PluginsAlwaysCloneFresh: cfg.PluginsAlwaysCloneFresh, PluginsPath: cfg.PluginsPath, PullRequest: cfg.PullRequest, Queue: cfg.Queue, RedactedVars: cfg.RedactedVars, RefSpec: cfg.RefSpec, Repository: cfg.Repository, RunInPty: runInPty, SSHKeyscan: cfg.SSHKeyscan, Shell: cfg.Shell, Tag: cfg.Tag, TracingBackend: cfg.TracingBackend, TracingServiceName: cfg.TracingServiceName, }) ctx, cancel := context.WithCancel(context.Background()) defer cancel() signals := make(chan os.Signal, 1) signal.Notify(signals, os.Interrupt, syscall.SIGHUP, syscall.SIGTERM, syscall.SIGINT, syscall.SIGQUIT) defer signal.Stop(signals) var ( cancelled bool received os.Signal signalMu sync.Mutex ) go func() { sig := <-signals signalMu.Lock() defer signalMu.Unlock() bootstrap.Cancel() cancelled = true received = sig signal.Stop(signals) }() exitCode := bootstrap.Run(ctx) signalMu.Lock() defer signalMu.Unlock() if cancelled && runtime.GOOS != "windows" { p, err := os.FindProcess(os.Getpid()) if err != nil { l.Error("Failed to find current process: %v", err) } l.Debug("Terminating bootstrap after cancellation with %v", received) err = p.Signal(received) if err != nil { l.Error("Failed to signal self: %v", err) } } os.Exit(exitCode) }, }
View Source
var DebugFlag = cli.BoolFlag{
Name: "debug",
Usage: "Enable debug mode. Synonym for ′--log-level debug′. Takes precedence over ′--log-level′",
EnvVar: "BUILDKITE_AGENT_DEBUG",
}
View Source
var DebugHTTPFlag = cli.BoolFlag{
Name: "debug-http",
Usage: "Enable HTTP debug mode, which dumps all request and response bodies to the log",
EnvVar: "BUILDKITE_AGENT_DEBUG_HTTP",
}
View Source
var EndpointFlag = cli.StringFlag{ Name: "endpoint", Value: DefaultEndpoint, Usage: "The Agent API endpoint", EnvVar: "BUILDKITE_AGENT_ENDPOINT", }
View Source
var EnvDumpCommand = cli.Command{ Name: "dump", Usage: "Print the environment of the current process as a JSON object", Description: envDumpHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "format", Usage: "Output format; json or json-pretty", EnvVar: "BUILDKITE_AGENT_ENV_DUMP_FORMAT", Value: "json", }, }, Action: func(c *cli.Context) error { envn := os.Environ() envMap := make(map[string]string, len(envn)) for _, e := range envn { if k, v, ok := env.Split(e); ok { envMap[k] = v } } enc := json.NewEncoder(c.App.Writer) if c.String("format") == "json-pretty" { enc.SetIndent("", " ") } if err := enc.Encode(envMap); err != nil { fmt.Fprintf(c.App.ErrWriter, "Error marshalling JSON: %v\n", err) os.Exit(1) } return nil }, }
View Source
var EnvGetCommand = cli.Command{ Name: "get", Usage: "Gets variables from the job execution environment", Description: envGetHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "format", Usage: "Output format: plain, json, or json-pretty", EnvVar: "BUILDKITE_AGENT_ENV_GET_FORMAT", Value: "plain", }, }, Action: envGetAction, }
View Source
var EnvSetCommand = cli.Command{ Name: "set", Usage: "Sets variables in the job execution environment", Description: envSetHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "input-format", Usage: "Input format: plain or json", EnvVar: "BUILDKITE_AGENT_ENV_SET_INPUT_FORMAT", Value: "plain", }, cli.StringFlag{ Name: "output-format", Usage: "Output format: quiet (no output), plain, json, or json-pretty", EnvVar: "BUILDKITE_AGENT_ENV_SET_OUTPUT_FORMAT", Value: "plain", }, }, Action: envSetAction, }
View Source
var EnvUnsetCommand = cli.Command{ Name: "unset", Usage: "Unsets variables from the job execution environment", Description: envUnsetHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "input-format", Usage: "Input format: plain or json", EnvVar: "BUILDKITE_AGENT_ENV_UNSET_INPUT_FORMAT", Value: "plain", }, cli.StringFlag{ Name: "output-format", Usage: "Output format: quiet (no output), plain, json, or json-pretty", EnvVar: "BUILDKITE_AGENT_ENV_UNSET_OUTPUT_FORMAT", Value: "plain", }, }, Action: envUnsetAction, }
View Source
var ExperimentsFlag = cli.StringSliceFlag{ Name: "experiment", Value: &cli.StringSlice{}, Usage: "Enable experimental features within the buildkite-agent", EnvVar: "BUILDKITE_AGENT_EXPERIMENT", }
View Source
var LockAcquireCommand = cli.Command{ Name: "acquire", Usage: "Acquires a lock from the agent leader", Description: lockAcquireHelpDescription, Flags: append( lockCommonFlags, cli.DurationFlag{ Name: "lock-wait-timeout", Usage: "If specified, sets a maximum duration to wait for a lock before giving up", EnvVar: "BUILDKITE_LOCK_WAIT_TIMEOUT", }, ), Action: lockAcquireAction, }
View Source
var LockDoCommand = cli.Command{ Name: "do", Usage: "Begins a do-once lock", Description: lockDoHelpDescription, Flags: append( lockCommonFlags, cli.DurationFlag{ Name: "lock-wait-timeout", Usage: "If specified, sets a maximum duration to wait for a lock before giving up", EnvVar: "BUILDKITE_LOCK_WAIT_TIMEOUT", }, ), Action: lockDoAction, }
View Source
var LockDoneCommand = cli.Command{
Name: "done",
Usage: "Completes a do-once lock",
Description: lockDoneHelpDescription,
Flags: lockCommonFlags,
Action: lockDoneAction,
}
View Source
var LockGetCommand = cli.Command{
Name: "get",
Usage: "Gets a lock value from the agent leader",
Description: lockGetHelpDescription,
Flags: lockCommonFlags,
Action: lockGetAction,
}
View Source
var LockReleaseCommand = cli.Command{
Name: "release",
Usage: "Releases a previously-acquired lock",
Description: lockReleaseHelpDescription,
Flags: lockCommonFlags,
Action: lockReleaseAction,
}
View Source
var LogLevelFlag = cli.StringFlag{
Name: "log-level",
Value: "notice",
Usage: "Set the log level for the agent, making logging more or less verbose. Defaults to notice. Allowed values are: debug, info, error, warn, fatal",
EnvVar: "BUILDKITE_AGENT_LOG_LEVEL",
}
View Source
var MetaDataExistsCommand = cli.Command{ Name: "exists", Usage: "Check to see if the meta data key exists for a build", Description: metaDataExistsHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "job", Value: "", Usage: "Which job's build should the meta-data be checked for", EnvVar: "BUILDKITE_JOB_ID", }, cli.StringFlag{ Name: "build", Value: "", Usage: "Which build should the meta-data be retrieved from. --build will take precedence over --job", EnvVar: "BUILDKITE_METADATA_BUILD_ID", }, AgentAccessTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, }, Action: func(c *cli.Context) { ctx := context.Background() cfg := MetaDataExistsConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Printf("%s", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() client := api.NewClient(l, loadAPIClientConfig(cfg, "AgentAccessToken")) // Find the meta data value var exists *api.MetaDataExists var resp *api.Response scope := "job" id := cfg.Job if cfg.Build != "" { scope = "build" id = cfg.Build } err = roko.NewRetrier( roko.WithMaxAttempts(10), roko.WithStrategy(roko.Constant(5*time.Second)), ).DoWithContext(ctx, func(r *roko.Retrier) error { exists, resp, err = client.ExistsMetaData(ctx, scope, id, cfg.Key) if resp != nil && (resp.StatusCode == 401 || resp.StatusCode == 404) { r.Break() } if err != nil { l.Warn("%s (%s)", err, r) return err } return nil }) if err != nil { l.Fatal("Failed to see if meta-data exists: %s", err) } if !exists.Exists { os.Exit(100) } }, }
View Source
var MetaDataGetCommand = cli.Command{ Name: "get", Usage: "Get data from a build", Description: metaDataGetHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "default", Value: "", Usage: "If the meta-data value doesn't exist return this instead", }, cli.StringFlag{ Name: "job", Value: "", Usage: "Which job's build should the meta-data be retrieved from", EnvVar: "BUILDKITE_JOB_ID", }, cli.StringFlag{ Name: "build", Value: "", Usage: "Which build should the meta-data be retrieved from. --build will take precedence over --job", EnvVar: "BUILDKITE_METADATA_BUILD_ID", }, AgentAccessTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, }, Action: func(c *cli.Context) { ctx := context.Background() cfg := MetaDataGetConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Printf("%s", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() client := api.NewClient(l, loadAPIClientConfig(cfg, "AgentAccessToken")) // Find the meta data value var metaData *api.MetaData var resp *api.Response scope := "job" id := cfg.Job if cfg.Build != "" { scope = "build" id = cfg.Build } err = roko.NewRetrier( roko.WithMaxAttempts(10), roko.WithStrategy(roko.Constant(5*time.Second)), ).DoWithContext(ctx, func(r *roko.Retrier) error { metaData, resp, err = client.GetMetaData(ctx, scope, id, cfg.Key) if resp != nil && (resp.StatusCode == 401 || resp.StatusCode == 404 || resp.StatusCode == 400) { r.Break() return err } if err != nil { l.Warn("%s (%s)", err, r) return err } return nil }) if err != nil { if resp.StatusCode == 404 && c.IsSet("default") { l.Warn("No meta-data value exists with key `%s`, returning the supplied default \"%s\"", cfg.Key, cfg.Default) fmt.Print(cfg.Default) return } else { l.Fatal("Failed to get meta-data: %s", err) } } fmt.Print(metaData.Value) }, }
View Source
var MetaDataKeysCommand = cli.Command{ Name: "keys", Usage: "Lists all meta-data keys that have been previously set", Description: metaDataKeysHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "job", Value: "", Usage: "Which job's build should the meta-data be checked for", EnvVar: "BUILDKITE_JOB_ID", }, cli.StringFlag{ Name: "build", Value: "", Usage: "Which build should the meta-data be retrieved from. --build will take precedence over --job", EnvVar: "BUILDKITE_METADATA_BUILD_ID", }, AgentAccessTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, }, Action: func(c *cli.Context) { ctx := context.Background() cfg := MetaDataKeysConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Printf("%s", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() client := api.NewClient(l, loadAPIClientConfig(cfg, "AgentAccessToken")) // Find the meta data keys var keys []string var resp *api.Response scope := "job" id := cfg.Job if cfg.Build != "" { scope = "build" id = cfg.Build } err = roko.NewRetrier( roko.WithMaxAttempts(10), roko.WithStrategy(roko.Constant(5*time.Second)), ).DoWithContext(ctx, func(r *roko.Retrier) error { keys, resp, err = client.MetaDataKeys(ctx, scope, id) if resp != nil && (resp.StatusCode == 401 || resp.StatusCode == 404) { r.Break() } if err != nil { l.Warn("%s (%s)", err, r) return err } return nil }) if err != nil { l.Fatal("Failed to find meta-data keys: %s", err) } for _, key := range keys { fmt.Printf("%s\n", key) } }, }
View Source
var MetaDataSetCommand = cli.Command{ Name: "set", Usage: "Set data on a build", Description: metaDataSetHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "job", Value: "", Usage: "Which job's build should the meta-data be set on", EnvVar: "BUILDKITE_JOB_ID", }, AgentAccessTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, }, Action: func(c *cli.Context) { ctx := context.Background() cfg := MetaDataSetConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Printf("%s", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() if len(c.Args()) < 2 { l.Info("Reading meta-data value from STDIN") input, err := io.ReadAll(os.Stdin) if err != nil { l.Fatal("Failed to read from STDIN: %s", err) } cfg.Value = string(input) } if strings.TrimSpace(cfg.Key) == "" { l.Fatal("Key cannot be empty, or composed of only whitespace characters") } if strings.TrimSpace(cfg.Value) == "" { l.Fatal("Value cannot be empty, or composed of only whitespace characters") } client := api.NewClient(l, loadAPIClientConfig(cfg, "AgentAccessToken")) metaData := &api.MetaData{ Key: cfg.Key, Value: cfg.Value, } err = roko.NewRetrier( roko.WithMaxAttempts(10), roko.WithStrategy(roko.Constant(5*time.Second)), ).DoWithContext(ctx, func(r *roko.Retrier) error { resp, err := client.SetMetaData(ctx, cfg.Job, metaData) if resp != nil && (resp.StatusCode == 401 || resp.StatusCode == 404) { r.Break() } if err != nil { l.Warn("%s (%s)", err, r) return err } return nil }) if err != nil { l.Fatal("Failed to set meta-data: %s", err) } }, }
View Source
var NoColorFlag = cli.BoolFlag{
Name: "no-color",
Usage: "Don't show colors in logging",
EnvVar: "BUILDKITE_AGENT_NO_COLOR",
}
View Source
var NoHTTP2Flag = cli.BoolFlag{
Name: "no-http2",
Usage: "Disable HTTP2 when communicating with the Agent API.",
EnvVar: "BUILDKITE_NO_HTTP2",
}
View Source
var OIDCRequestTokenCommand = cli.Command{ Name: "request-token", Usage: "Requests and prints an OIDC token from Buildkite with the specified audience,", Description: oidcTokenDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "audience", Usage: "The audience that will consume the OIDC token. The API will choose a default audience if it is omitted.", }, cli.IntFlag{ Name: "lifetime", Usage: "The time (in seconds) the OIDC token will be valid for before expiry. Must be a non-negative integer. If the flag is omitted or set to 0, the API will choose a default finite lifetime.", }, cli.StringFlag{ Name: "job", Usage: "Buildkite Job Id to claim in the OIDC token", EnvVar: "BUILDKITE_JOB_ID", }, cli.StringSliceFlag{ Name: "claim", Value: &cli.StringSlice{}, Usage: "Claims to add to the OIDC token", EnvVar: "BUILDKITE_OIDC_TOKEN_CLAIMS", }, AgentAccessTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, }, Action: func(c *cli.Context) error { ctx := context.Background() cfg := OIDCTokenConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Fprintf(c.App.ErrWriter, "%s\n", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() if cfg.Lifetime < 0 { l.Fatal("Lifetime %d must be a non-negative integer.", cfg.Lifetime) } client := api.NewClient(l, loadAPIClientConfig(cfg, "AgentAccessToken")) // Request the token var token *api.OIDCToken if err := roko.NewRetrier( roko.WithMaxAttempts(maxAttempts), roko.WithStrategy(roko.Exponential(backoffSeconds*time.Second, 0)), ).DoWithContext(ctx, func(r *roko.Retrier) error { req := &api.OIDCTokenRequest{ Job: cfg.Job, Audience: cfg.Audience, Lifetime: cfg.Lifetime, Claims: cfg.Claims, } var resp *api.Response token, resp, err = client.OIDCToken(ctx, req) if resp != nil { switch resp.StatusCode { case http.StatusBadRequest, http.StatusUnauthorized, http.StatusForbidden, http.StatusUnprocessableEntity: r.Break() return err } } if err != nil { l.Warn("%s (%s)", err, r) return err } return nil }); err != nil { if len(cfg.Audience) > 0 { l.Error("Could not obtain OIDC token for audience %s", cfg.Audience) } else { l.Error("Could not obtain OIDC token for default audience") } return err } fmt.Println(token.Token) return nil }, }
View Source
var PipelineUploadCommand = cli.Command{ Name: "upload", Usage: "Uploads a description of a build pipeline adds it to the currently running build after the current job", Description: pipelineUploadHelpDescription, Flags: []cli.Flag{ cli.BoolFlag{ Name: "replace", Usage: "Replace the rest of the existing pipeline with the steps uploaded. Jobs that are already running are not removed.", EnvVar: "BUILDKITE_PIPELINE_REPLACE", }, cli.StringFlag{ Name: "job", Value: "", Usage: "The job that is making the changes to its build", EnvVar: "BUILDKITE_JOB_ID", }, cli.BoolFlag{ Name: "dry-run", Usage: "Rather than uploading the pipeline, it will be echoed to stdout", EnvVar: "BUILDKITE_PIPELINE_UPLOAD_DRY_RUN", }, cli.StringFlag{ Name: "format", Usage: "In dry-run mode, specifies the form to output the pipeline in. Must be one of: json,yaml", Value: "json", EnvVar: "BUILDKITE_PIPELINE_UPLOAD_DRY_RUN_FORMAT", }, cli.BoolFlag{ Name: "no-interpolation", Usage: "Skip variable interpolation the pipeline when uploaded", EnvVar: "BUILDKITE_PIPELINE_NO_INTERPOLATION", }, cli.BoolFlag{ Name: "reject-secrets", Usage: "When true, fail the pipeline upload early if the pipeline contains secrets", EnvVar: "BUILDKITE_AGENT_PIPELINE_UPLOAD_REJECT_SECRETS", }, cli.StringFlag{ Name: "signing-key-path", Usage: "Path to a file containing a signing key. Passing this flag enables pipeline signing. For hmac-sha256, the raw file content is used as the shared key", EnvVar: "BUILDKITE_PIPELINE_UPLOAD_SIGNING_KEY_PATH", }, AgentAccessTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, RedactedVars, }, Action: func(c *cli.Context) { ctx := context.Background() cfg := PipelineUploadConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Printf("%s", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() // Find the pipeline either from STDIN or the first argument var input *os.File var filename string switch { case cfg.FilePath != "": l.Info("Reading pipeline config from %q", cfg.FilePath) filename = filepath.Base(cfg.FilePath) file, err := os.Open(cfg.FilePath) if err != nil { l.Fatal("Failed to read file: %v", err) } defer file.Close() input = file case stdin.IsReadable(): l.Info("Reading pipeline config from STDIN") input = os.Stdin default: l.Info("Searching for pipeline config...") paths := []string{ "buildkite.yml", "buildkite.yaml", "buildkite.json", filepath.FromSlash(".buildkite/pipeline.yml"), filepath.FromSlash(".buildkite/pipeline.yaml"), filepath.FromSlash(".buildkite/pipeline.json"), filepath.FromSlash("buildkite/pipeline.yml"), filepath.FromSlash("buildkite/pipeline.yaml"), filepath.FromSlash("buildkite/pipeline.json"), } exists := []string{} for _, path := range paths { if _, err := os.Stat(path); err == nil { exists = append(exists, path) } } if len(exists) > 1 { l.Fatal("Found multiple configuration files: %s. Please only have 1 configuration file present.", strings.Join(exists, ", ")) } if len(exists) == 0 { l.Fatal("Could not find a default pipeline configuration file. See `buildkite-agent pipeline upload --help` for more information.") } found := exists[0] l.Info("Found config file %q", found) filename = path.Base(found) file, err := os.Open(found) if err != nil { l.Fatal("Failed to read file %q: %v", found, err) } defer file.Close() input = file } if input != os.Stdin { fi, err := input.Stat() if err != nil { l.Fatal("Couldn't stat pipeline configuration file %q: %v", input.Name(), err) } if fi.Size() == 0 { l.Fatal("Pipeline file %q is empty", input.Name()) } } var environ *env.Environment if !cfg.NoInterpolation { environ = env.FromSlice(os.Environ()) if commitRef, ok := environ.Get("BUILDKITE_COMMIT"); ok { cmdOut, err := exec.Command("git", "rev-parse", commitRef).Output() if err != nil { l.Warn("Error running git rev-parse %q: %v", commitRef, err) } else { trimmedCmdOut := strings.TrimSpace(string(cmdOut)) l.Info("Updating BUILDKITE_COMMIT to %q", trimmedCmdOut) environ.Set("BUILDKITE_COMMIT", trimmedCmdOut) } } } src := filename if src == "" { src = "(stdin)" } result, err := pipeline.Parse(input) if err != nil { l.Fatal("Pipeline parsing of %q failed: %v", src, err) } if !cfg.NoInterpolation { if err := result.Interpolate(environ); err != nil { l.Fatal("Pipeline interpolation of %q failed: %v", src, err) } } if len(cfg.RedactedVars) > 0 { needles := redactor.VarsToRedact(shell.StderrLogger, cfg.RedactedVars, env.FromSlice(os.Environ()).Dump()) serialisedPipeline, err := result.MarshalJSON() if err != nil { l.Fatal("Couldn’t scan the %q pipeline for redacted variables. This parsed pipeline could not be serialized, ensure the pipeline YAML is valid, or ignore interpolated secrets for this upload by passing --redacted-vars=''. (%s)", src, err) } stringifiedserialisedPipeline := string(serialisedPipeline) secretsFound := make([]string, 0, len(needles)) for needleKey, needle := range needles { if strings.Contains(stringifiedserialisedPipeline, needle) { secretsFound = append(secretsFound, needleKey) } } if len(secretsFound) > 0 { if cfg.RejectSecrets { l.Fatal("Pipeline %q contains values interpolated from the following secret environment variables: %v, and cannot be uploaded to Buildkite", src, secretsFound) } else { l.Warn("Pipeline %q contains values interpolated from the following secret environment variables: %v, which could leak sensitive information into the Buildkite UI.", src, secretsFound) l.Warn("This pipeline will still be uploaded, but if you'd like to to prevent this from happening, you can use the `--reject-secrets` cli flag, or the `BUILDKITE_AGENT_PIPELINE_UPLOAD_REJECT_SECRETS` environment variable, which will make the `buildkite-agent pipeline upload` command fail if it finds secrets in the pipeline.") l.Warn("The behaviour in the above flags will become default in Buildkite Agent v4") } } } if cfg.SigningKeyPath != "" { l.Warn("Pipeline signing is experimental and the user interface might change! Also it might not work, it might sign the pipeline only partially, or it might eat your pet dog. You have been warned!") key, err := os.ReadFile(cfg.SigningKeyPath) if err != nil { l.Fatal("Couldn't read the signing key file: %v", err) } signer, err := pipeline.NewSigner("hmac-sha256", key) if err != nil { l.Fatal("Couldn't create a pipeline signer: %v", err) } if err := result.Sign(signer); err != nil { l.Fatal("Couldn't sign pipeline: %v", err) } } if cfg.DryRun { var encode func(any) error switch cfg.DryRunFormat { case "json": enc := json.NewEncoder(os.Stdout) enc.SetIndent("", " ") encode = enc.Encode case "yaml": encode = yaml.NewEncoder(os.Stdout).Encode default: l.Fatal("Unknown output format %q", cfg.DryRunFormat) } if err := encode(result); err != nil { l.Fatal("%#v", err) } return } if cfg.Job == "" { l.Fatal("Missing job parameter. Usually this is set in the environment for a Buildkite job via BUILDKITE_JOB_ID.") } if cfg.AgentAccessToken == "" { l.Fatal("Missing agent-access-token parameter. Usually this is set in the environment for a Buildkite job via BUILDKITE_AGENT_ACCESS_TOKEN.") } uploader := &agent.PipelineUploader{ Client: api.NewClient(l, loadAPIClientConfig(cfg, "AgentAccessToken")), JobID: cfg.Job, Change: &api.PipelineChange{ UUID: api.NewUUID(), Replace: cfg.Replace, Pipeline: result, }, RetrySleepFunc: time.Sleep, } if err := uploader.Upload(ctx, l); err != nil { l.Fatal("%v", err) } l.Info("Successfully uploaded and parsed pipeline config") }, }
View Source
var ProfileFlag = cli.StringFlag{
Name: "profile",
Usage: "Enable a profiling mode, either cpu, memory, mutex or block",
EnvVar: "BUILDKITE_AGENT_PROFILE",
}
View Source
var RedactedVars = cli.StringSliceFlag{ Name: "redacted-vars", Usage: "Pattern of environment variable names containing sensitive values", EnvVar: "BUILDKITE_REDACTED_VARS", Value: &cli.StringSlice{ "*_PASSWORD", "*_SECRET", "*_TOKEN", "*_PRIVATE_KEY", "*_ACCESS_KEY", "*_SECRET_KEY", }, }
View Source
var StepGetCommand = cli.Command{ Name: "get", Usage: "Get the value of an attribute", Description: stepGetHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "step", Value: "", Usage: "The step to get. Can be either its ID (BUILDKITE_STEP_ID) or key (BUILDKITE_STEP_KEY)", EnvVar: "BUILDKITE_STEP_ID", }, cli.StringFlag{ Name: "build", Value: "", Usage: "The build to look for the step in. Only required when targeting a step using its key (BUILDKITE_STEP_KEY)", EnvVar: "BUILDKITE_BUILD_ID", }, cli.StringFlag{ Name: "format", Value: "", Usage: "The format to output the attribute value in (currently only JSON is supported)", EnvVar: "BUILDKITE_STEP_GET_FORMAT", }, AgentAccessTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, }, Action: func(c *cli.Context) { ctx := context.Background() cfg := StepGetConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Printf("%s", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() client := api.NewClient(l, loadAPIClientConfig(cfg, "AgentAccessToken")) stepExportRequest := &api.StepExportRequest{ Build: cfg.Build, Attribute: cfg.Attribute, Format: cfg.Format, } // Find the step attribute var resp *api.Response var stepExportResponse *api.StepExportResponse err = roko.NewRetrier( roko.WithMaxAttempts(10), roko.WithStrategy(roko.Constant(5*time.Second)), ).DoWithContext(ctx, func(r *roko.Retrier) error { stepExportResponse, resp, err = client.StepExport(ctx, cfg.StepOrKey, stepExportRequest) if resp != nil && (resp.StatusCode == 401 || resp.StatusCode == 404 || resp.StatusCode == 400) { r.Break() } if err != nil { l.Warn("%s (%s)", err, r) return err } return nil }) if err != nil { l.Fatal("Failed to get step: %s", err) } fmt.Print(stepExportResponse.Output) }, }
View Source
var StepUpdateCommand = cli.Command{ Name: "update", Usage: "Change the value of an attribute", Description: stepUpdateHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "step", Value: "", Usage: "The step to update. Can be either its ID (BUILDKITE_STEP_ID) or key (BUILDKITE_STEP_KEY)", EnvVar: "BUILDKITE_STEP_ID", }, cli.StringFlag{ Name: "build", Value: "", Usage: "The build to look for the step in. Only required when targeting a step using its key (BUILDKITE_STEP_KEY)", EnvVar: "BUILDKITE_BUILD_ID", }, cli.BoolFlag{ Name: "append", Usage: "Append to current attribute instead of replacing it", EnvVar: "BUILDKITE_STEP_UPDATE_APPEND", }, AgentAccessTokenFlag, EndpointFlag, NoHTTP2Flag, DebugHTTPFlag, NoColorFlag, DebugFlag, LogLevelFlag, ExperimentsFlag, ProfileFlag, }, Action: func(c *cli.Context) { ctx := context.Background() cfg := StepUpdateConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} warnings, err := loader.Load() if err != nil { fmt.Printf("%s", err) os.Exit(1) } l := CreateLogger(&cfg) for _, warning := range warnings { l.Warn("%s", warning) } done := HandleGlobalFlags(l, cfg) defer done() if len(c.Args()) < 2 { l.Info("Reading value from STDIN") input, err := io.ReadAll(os.Stdin) if err != nil { l.Fatal("Failed to read from STDIN: %s", err) } cfg.Value = string(input) } client := api.NewClient(l, loadAPIClientConfig(cfg, "AgentAccessToken")) idempotencyUUID := api.NewUUID() update := &api.StepUpdate{ IdempotencyUUID: idempotencyUUID, Build: cfg.Build, Attribute: cfg.Attribute, Value: cfg.Value, Append: cfg.Append, } err = roko.NewRetrier( roko.WithMaxAttempts(10), roko.WithStrategy(roko.Constant(5*time.Second)), ).DoWithContext(ctx, func(r *roko.Retrier) error { resp, err := client.StepUpdate(ctx, cfg.StepOrKey, update) if resp != nil && (resp.StatusCode == 400 || resp.StatusCode == 401 || resp.StatusCode == 404) { r.Break() } if err != nil { l.Warn("%s (%s)", err, r) return err } return nil }) if err != nil { l.Fatal("Failed to change step: %s", err) } }, }
Functions ¶
func CreateLogger ¶
func DefaultConfigFilePaths ¶
func DefaultConfigFilePaths() (paths []string)
func DefaultShell ¶
func DefaultShell() string
func HandleGlobalFlags ¶
func HandleProfileFlag ¶
Types ¶
type AcknowledgementsConfig ¶ added in v3.45.0
type AcknowledgementsConfig struct{}
type AgentStartConfig ¶
type AgentStartConfig struct { Config string `cli:"config"` Name string `cli:"name"` Priority string `cli:"priority"` AcquireJob string `cli:"acquire-job"` DisconnectAfterJob bool `cli:"disconnect-after-job"` DisconnectAfterIdleTimeout int `cli:"disconnect-after-idle-timeout"` BootstrapScript string `cli:"bootstrap-script" normalize:"commandpath"` CancelGracePeriod int `cli:"cancel-grace-period"` EnableJobLogTmpfile bool `cli:"enable-job-log-tmpfile"` JobLogPath string `cli:"job-log-path" normalize:"filepath"` WriteJobLogsToStdout bool `cli:"write-job-logs-to-stdout"` BuildPath string `cli:"build-path" normalize:"filepath" validate:"required"` HooksPath string `cli:"hooks-path" normalize:"filepath"` SocketsPath string `cli:"sockets-path" normalize:"filepath"` PluginsPath string `cli:"plugins-path" normalize:"filepath"` Shell string `cli:"shell"` Tags []string `cli:"tags" normalize:"list"` TagsFromEC2MetaData bool `cli:"tags-from-ec2-meta-data"` TagsFromEC2MetaDataPaths []string `cli:"tags-from-ec2-meta-data-paths" normalize:"list"` TagsFromEC2Tags bool `cli:"tags-from-ec2-tags"` TagsFromECSMetaData bool `cli:"tags-from-ecs-meta-data"` TagsFromGCPMetaData bool `cli:"tags-from-gcp-meta-data"` TagsFromGCPMetaDataPaths []string `cli:"tags-from-gcp-meta-data-paths" normalize:"list"` TagsFromGCPLabels bool `cli:"tags-from-gcp-labels"` TagsFromHost bool `cli:"tags-from-host"` WaitForEC2TagsTimeout string `cli:"wait-for-ec2-tags-timeout"` WaitForEC2MetaDataTimeout string `cli:"wait-for-ec2-meta-data-timeout"` WaitForECSMetaDataTimeout string `cli:"wait-for-ecs-meta-data-timeout"` WaitForGCPLabelsTimeout string `cli:"wait-for-gcp-labels-timeout"` GitCheckoutFlags string `cli:"git-checkout-flags"` GitCloneFlags string `cli:"git-clone-flags"` GitCloneMirrorFlags string `cli:"git-clone-mirror-flags"` GitCleanFlags string `cli:"git-clean-flags"` GitFetchFlags string `cli:"git-fetch-flags"` GitMirrorsPath string `cli:"git-mirrors-path" normalize:"filepath"` GitMirrorsLockTimeout int `cli:"git-mirrors-lock-timeout"` GitMirrorsSkipUpdate bool `cli:"git-mirrors-skip-update"` NoGitSubmodules bool `cli:"no-git-submodules"` NoSSHKeyscan bool `cli:"no-ssh-keyscan"` NoCommandEval bool `cli:"no-command-eval"` NoLocalHooks bool `cli:"no-local-hooks"` NoPlugins bool `cli:"no-plugins"` NoPluginValidation bool `cli:"no-plugin-validation"` NoPTY bool `cli:"no-pty"` NoFeatureReporting bool `cli:"no-feature-reporting"` NoANSITimestamps bool `cli:"no-ansi-timestamps"` TimestampLines bool `cli:"timestamp-lines"` HealthCheckAddr string `cli:"health-check-addr"` MetricsDatadog bool `cli:"metrics-datadog"` MetricsDatadogHost string `cli:"metrics-datadog-host"` MetricsDatadogDistributions bool `cli:"metrics-datadog-distributions"` TracingBackend string `cli:"tracing-backend"` TracingServiceName string `cli:"tracing-service-name"` Spawn int `cli:"spawn"` SpawnWithPriority bool `cli:"spawn-with-priority"` LogFormat string `cli:"log-format"` CancelSignal string `cli:"cancel-signal"` RedactedVars []string `cli:"redacted-vars" normalize:"list"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` Token string `cli:"token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` // Deprecated NoSSHFingerprintVerification bool `cli:"no-automatic-ssh-fingerprint-verification" deprecated-and-renamed-to:"NoSSHKeyscan"` MetaData []string `cli:"meta-data" deprecated-and-renamed-to:"Tags"` MetaDataEC2 bool `cli:"meta-data-ec2" deprecated-and-renamed-to:"TagsFromEC2"` MetaDataEC2Tags bool `cli:"meta-data-ec2-tags" deprecated-and-renamed-to:"TagsFromEC2Tags"` MetaDataGCP bool `cli:"meta-data-gcp" deprecated-and-renamed-to:"TagsFromGCP"` TagsFromEC2 bool `cli:"tags-from-ec2" deprecated-and-renamed-to:"TagsFromEC2MetaData"` TagsFromGCP bool `cli:"tags-from-gcp" deprecated-and-renamed-to:"TagsFromGCPMetaData"` DisconnectAfterJobTimeout int `cli:"disconnect-after-job-timeout" deprecated:"Use disconnect-after-idle-timeout instead"` }
func (AgentStartConfig) Features ¶ added in v3.38.0
func (asc AgentStartConfig) Features() []string
type AnnotateConfig ¶
type AnnotateConfig struct { Body string `cli:"arg:0" label:"annotation body"` Style string `cli:"style"` Context string `cli:"context"` Append bool `cli:"append"` Job string `cli:"job" validate:"required"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` }
type AnnotationRemoveConfig ¶ added in v3.28.1
type AnnotationRemoveConfig struct { Context string `cli:"context" validate:"required"` Job string `cli:"job" validate:"required"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` }
type ArtifactDownloadConfig ¶
type ArtifactDownloadConfig struct { Query string `cli:"arg:0" label:"artifact search query" validate:"required"` Destination string `cli:"arg:1" label:"artifact download path" validate:"required"` Step string `cli:"step"` Build string `cli:"build" validate:"required"` IncludeRetriedJobs bool `cli:"include-retried-jobs"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` }
type ArtifactSearchConfig ¶ added in v3.23.0
type ArtifactSearchConfig struct { Query string `cli:"arg:0" label:"artifact search query" validate:"required"` Step string `cli:"step"` Build string `cli:"build" validate:"required"` IncludeRetriedJobs bool `cli:"include-retried-jobs"` AllowEmptyResults bool `cli:"allow-empty-results"` PrintFormat string `cli:"format"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` }
type ArtifactShasumConfig ¶
type ArtifactShasumConfig struct { Query string `cli:"arg:0" label:"artifact search query" validate:"required"` Sha256 bool `cli:"sha256"` Step string `cli:"step"` Build string `cli:"build" validate:"required"` IncludeRetriedJobs bool `cli:"include-retried-jobs"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` }
type ArtifactUploadConfig ¶
type ArtifactUploadConfig struct { UploadPaths string `cli:"arg:0" label:"upload paths" validate:"required"` Destination string `cli:"arg:1" label:"destination" env:"BUILDKITE_ARTIFACT_UPLOAD_DESTINATION"` Job string `cli:"job" validate:"required"` ContentType string `cli:"content-type"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` // Uploader flags GlobResolveFollowSymlinks bool `cli:"glob-resolve-follow-symlinks"` UploadSkipSymlinks bool `cli:"upload-skip-symlinks"` // deprecated FollowSymlinks bool `cli:"follow-symlinks" deprecated-and-renamed-to:"GlobResolveFollowSymlinks"` }
type BootstrapConfig ¶
type BootstrapConfig struct { Command string `cli:"command"` JobID string `cli:"job" validate:"required"` Repository string `cli:"repository" validate:"required"` Commit string `cli:"commit" validate:"required"` Branch string `cli:"branch" validate:"required"` Tag string `cli:"tag"` RefSpec string `cli:"refspec"` Plugins string `cli:"plugins"` PullRequest string `cli:"pullrequest"` GitSubmodules bool `cli:"git-submodules"` SSHKeyscan bool `cli:"ssh-keyscan"` AgentName string `cli:"agent" validate:"required"` Queue string `cli:"queue"` OrganizationSlug string `cli:"organization" validate:"required"` PipelineSlug string `cli:"pipeline" validate:"required"` PipelineProvider string `cli:"pipeline-provider" validate:"required"` AutomaticArtifactUploadPaths string `cli:"artifact-upload-paths"` ArtifactUploadDestination string `cli:"artifact-upload-destination"` CleanCheckout bool `cli:"clean-checkout"` GitCheckoutFlags string `cli:"git-checkout-flags"` GitCloneFlags string `cli:"git-clone-flags"` GitFetchFlags string `cli:"git-fetch-flags"` GitCloneMirrorFlags string `cli:"git-clone-mirror-flags"` GitCleanFlags string `cli:"git-clean-flags"` GitMirrorsPath string `cli:"git-mirrors-path" normalize:"filepath"` GitMirrorsLockTimeout int `cli:"git-mirrors-lock-timeout"` GitMirrorsSkipUpdate bool `cli:"git-mirrors-skip-update"` GitSubmoduleCloneConfig []string `cli:"git-submodule-clone-config"` BinPath string `cli:"bin-path" normalize:"filepath"` BuildPath string `cli:"build-path" normalize:"filepath"` HooksPath string `cli:"hooks-path" normalize:"filepath"` SocketsPath string `cli:"sockets-path" normalize:"filepath"` PluginsPath string `cli:"plugins-path" normalize:"filepath"` CommandEval bool `cli:"command-eval"` PluginsEnabled bool `cli:"plugins-enabled"` PluginValidation bool `cli:"plugin-validation"` PluginsAlwaysCloneFresh bool `cli:"plugins-always-clone-fresh"` LocalHooksEnabled bool `cli:"local-hooks-enabled"` PTY bool `cli:"pty"` LogLevel string `cli:"log-level"` Debug bool `cli:"debug"` Shell string `cli:"shell"` Experiments []string `cli:"experiment" normalize:"list"` Phases []string `cli:"phases" normalize:"list"` Profile string `cli:"profile"` CancelSignal string `cli:"cancel-signal"` RedactedVars []string `cli:"redacted-vars" normalize:"list"` TracingBackend string `cli:"tracing-backend"` TracingServiceName string `cli:"tracing-service-name"` }
type EnvDumpConfig ¶ added in v3.43.0
type EnvDumpConfig struct { }
type EnvGetConfig ¶ added in v3.45.0
type EnvGetConfig struct{}
type EnvSetConfig ¶ added in v3.45.0
type EnvSetConfig struct{}
type EnvUnsetConfig ¶ added in v3.45.0
type EnvUnsetConfig struct{}
type LockAcquireConfig ¶ added in v3.46.1
type LockDoConfig ¶ added in v3.46.1
type LockDoneConfig ¶ added in v3.46.1
type LockGetConfig ¶ added in v3.46.1
type LockReleaseConfig ¶ added in v3.46.1
type MetaDataExistsConfig ¶
type MetaDataExistsConfig struct { Key string `cli:"arg:0" label:"meta-data key" validate:"required"` Job string `cli:"job"` Build string `cli:"build"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` }
type MetaDataGetConfig ¶
type MetaDataGetConfig struct { Key string `cli:"arg:0" label:"meta-data key" validate:"required"` Default string `cli:"default"` Job string `cli:"job"` Build string `cli:"build"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` }
type MetaDataKeysConfig ¶
type MetaDataKeysConfig struct { Job string `cli:"job"` Build string `cli:"build"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` }
type MetaDataSetConfig ¶
type MetaDataSetConfig struct { Key string `cli:"arg:0" label:"meta-data key" validate:"required"` Value string `cli:"arg:1" label:"meta-data value"` Job string `cli:"job" validate:"required"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` }
type OIDCTokenConfig ¶ added in v3.41.0
type OIDCTokenConfig struct { Audience string `cli:"audience"` Lifetime int `cli:"lifetime"` Job string `cli:"job" validate:"required"` // TODO: enumerate possible values, perhaps by adding a link to the documentation Claims []string `cli:"claim" normalize:"list"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` }
type PipelineUploadConfig ¶
type PipelineUploadConfig struct { FilePath string `cli:"arg:0" label:"upload paths"` Replace bool `cli:"replace"` Job string `cli:"job"` // required, but not in dry-run mode DryRun bool `cli:"dry-run"` DryRunFormat string `cli:"format"` NoInterpolation bool `cli:"no-interpolation"` RedactedVars []string `cli:"redacted-vars" normalize:"list"` RejectSecrets bool `cli:"reject-secrets"` SigningKeyPath string `cli:"signing-key-path"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` AgentAccessToken string `cli:"agent-access-token"` // required, but not in dry-run mode Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` }
type StepGetConfig ¶
type StepGetConfig struct { Attribute string `cli:"arg:0" label:"step attribute"` StepOrKey string `cli:"step" validate:"required"` Build string `cli:"build"` Format string `cli:"format"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` }
type StepUpdateConfig ¶
type StepUpdateConfig struct { Attribute string `cli:"arg:0" label:"attribute" validate:"required"` Value string `cli:"arg:1" label:"value"` Append bool `cli:"append"` StepOrKey string `cli:"step" validate:"required"` Build string `cli:"build"` // Global flags Debug bool `cli:"debug"` LogLevel string `cli:"log-level"` NoColor bool `cli:"no-color"` Experiments []string `cli:"experiment" normalize:"list"` Profile string `cli:"profile"` // API config DebugHTTP bool `cli:"debug-http"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoHTTP2 bool `cli:"no-http2"` }
Source Files ¶
- acknowledgements.go
- agent_start.go
- annotate.go
- annotation_remove.go
- artifact_download.go
- artifact_search.go
- artifact_shasum.go
- artifact_upload.go
- bootstrap.go
- doc.go
- env_dump.go
- env_get.go
- env_set.go
- env_unset.go
- global.go
- lock_acquire.go
- lock_common.go
- lock_do.go
- lock_done.go
- lock_get.go
- lock_release.go
- meta_data_exists.go
- meta_data_get.go
- meta_data_keys.go
- meta_data_set.go
- oidc_request_token.go
- pipeline_upload.go
- profiler.go
- step_get.go
- step_update.go
Click to show internal directories.
Click to hide internal directories.