Documentation ¶
Index ¶
- Constants
- Variables
- func DefaultConfigFilePaths() (paths []string)
- func HandleGlobalFlags(cfg interface{})
- type AgentStartConfig
- type ArtifactDownloadConfig
- type ArtifactShasumConfig
- type ArtifactUploadConfig
- type MetaDataExistsConfig
- type MetaDataGetConfig
- type MetaDataSetConfig
- type PipelineUploadConfig
Constants ¶
View Source
const (
DefaultEndpoint = "https://agent.buildkite.com/v3"
)
Variables ¶
View Source
var AgentAccessTokenFlag = cli.StringFlag{
Name: "agent-access-token",
Value: "",
Usage: "The access token used to identify the agent",
EnvVar: "BUILDKITE_AGENT_ACCESS_TOKEN",
}
View Source
var AgentStartCommand = cli.Command{ Name: "start", Usage: "Starts a Buildkite agent", Description: StartDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "config", Value: "", Usage: "Path to a configuration file", EnvVar: "BUILDKITE_AGENT_CONFIG", }, cli.StringFlag{ Name: "token", Value: "", Usage: "Your account agent token", EnvVar: "BUILDKITE_AGENT_TOKEN", }, cli.StringFlag{ Name: "name", Value: "", Usage: "The name of the agent", EnvVar: "BUILDKITE_AGENT_NAME", }, cli.StringFlag{ Name: "priority", Value: "", Usage: "The priority of the agent (higher priorities are assigned work first)", EnvVar: "BUILDKITE_AGENT_PRIORITY", }, cli.StringSliceFlag{ Name: "meta-data", Value: &cli.StringSlice{}, Usage: "Meta data for the agent (default is \"queue=default\")", EnvVar: "BUILDKITE_AGENT_META_DATA", }, cli.BoolFlag{ Name: "meta-data-ec2-tags", Usage: "Populate the meta data from the current instances EC2 Tags", }, cli.StringFlag{ Name: "bootstrap-script", Value: "", Usage: "Path to the bootstrap script", EnvVar: "BUILDKITE_BOOTSTRAP_SCRIPT_PATH", }, cli.StringFlag{ Name: "build-path", Value: "", Usage: "Path to where the builds will run from", EnvVar: "BUILDKITE_BUILD_PATH", }, cli.StringFlag{ Name: "hooks-path", Value: "", Usage: "Directory where the hook scripts are found", EnvVar: "BUILDKITE_HOOKS_PATH", }, cli.BoolFlag{ Name: "no-pty", Usage: "Do not run jobs within a pseudo terminal", EnvVar: "BUILDKITE_NO_PTY", }, cli.BoolFlag{ Name: "no-automatic-ssh-fingerprint-verification", Usage: "Don't automatically verify SSH fingerprints", EnvVar: "BUILDKITE_NO_AUTOMATIC_SSH_FINGERPRINT_VERIFICATION", }, cli.BoolFlag{ Name: "no-command-eval", Usage: "Don't allow this agent to run arbitrary console commands", EnvVar: "BUILDKITE_NO_COMMAND_EVAL", }, EndpointFlag, NoColorFlag, DebugFlag, DebugHTTPFlag, }, Action: func(c *cli.Context) { cfg := AgentStartConfig{} loader := cliconfig.Loader{ CLI: c, Config: &cfg, DefaultConfigFilePaths: DefaultConfigFilePaths(), } if err := loader.Load(); err != nil { logger.Fatal("%s", err) } HandleGlobalFlags(cfg) if runtime.GOOS == "windows" { cfg.NoAutoSSHFingerprintVerification = true cfg.NoPTY = true cfg.NoCommandEval = true } pool := agent.AgentPool{ Token: cfg.Token, Name: cfg.Name, Priority: cfg.Priority, MetaData: cfg.MetaData, MetaDataEC2Tags: cfg.MetaDataEC2Tags, Endpoint: cfg.Endpoint, AgentConfiguration: &agent.AgentConfiguration{ BootstrapScript: cfg.BootstrapScript, BuildPath: cfg.BuildPath, HooksPath: cfg.HooksPath, AutoSSHFingerprintVerification: !cfg.NoAutoSSHFingerprintVerification, CommandEval: !cfg.NoCommandEval, RunInPty: !cfg.NoPTY, }, } if loader.File != nil { pool.ConfigFilePath = loader.File.Path } if err := pool.Start(); err != nil { logger.Fatal("%s", err) } }, }
View Source
var ArtifactDownloadCommand = cli.Command{ Name: "download", Usage: "Downloads artifacts from Buildkite to the local machine", Description: DownloadHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "step", Value: "", Usage: "Scope the search to a paticular step by using either it's name of job ID", }, cli.StringFlag{ Name: "build", Value: "", EnvVar: "BUILDKITE_BUILD_ID", Usage: "The build that the artifacts were uploaded to", }, AgentAccessTokenFlag, EndpointFlag, NoColorFlag, DebugFlag, DebugHTTPFlag, }, Action: func(c *cli.Context) { cfg := ArtifactDownloadConfig{} if err := cliconfig.Load(c, &cfg); err != nil { logger.Fatal("%s", err) } HandleGlobalFlags(cfg) downloader := agent.ArtifactDownloader{ APIClient: agent.APIClient{ Endpoint: cfg.Endpoint, Token: cfg.AgentAccessToken, }.Create(), Query: cfg.Query, Destination: cfg.Destination, BuildID: cfg.Build, Step: cfg.Step, } if err := downloader.Download(); err != nil { logger.Fatal("Failed to download artifacts: %s", err) } }, }
View Source
var ArtifactShasumCommand = cli.Command{ Name: "shasum", Usage: "Prints the SHA-1 checksum for the artifact provided to STDOUT", Description: ShasumHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "step", Value: "", Usage: "Scope the search to a paticular step by using either it's name of job ID", }, cli.StringFlag{ Name: "build", Value: "", EnvVar: "BUILDKITE_BUILD_ID", Usage: "The build that the artifacts were uploaded to", }, AgentAccessTokenFlag, EndpointFlag, NoColorFlag, DebugFlag, DebugHTTPFlag, }, Action: func(c *cli.Context) { cfg := ArtifactShasumConfig{} if err := cliconfig.Load(c, &cfg); err != nil { logger.Fatal("%s", err) } HandleGlobalFlags(cfg) searcher := agent.ArtifactSearcher{ APIClient: agent.APIClient{ Endpoint: cfg.Endpoint, Token: cfg.AgentAccessToken, }.Create(), BuildID: cfg.Build, } artifacts, err := searcher.Search(cfg.Query, cfg.Step) if err != nil { logger.Fatal("Failed to find artifacts: %s", err) } artifactsFoundLength := len(artifacts) if artifactsFoundLength == 0 { logger.Fatal("No artifacts found for downloading") } else if artifactsFoundLength > 1 { logger.Fatal("Multiple artifacts were found. Try being more specific with the search or scope by step") } else { logger.Debug("Artifact \"%s\" found", artifacts[0].Path) fmt.Printf("%s\n", artifacts[0].Sha1Sum) } }, }
View Source
var ArtifactUploadCommand = cli.Command{ Name: "upload", Usage: "Uploads files to a job as artifacts", Description: UploadHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "job", Value: "", Usage: "Which job should the artifacts be uploaded to", EnvVar: "BUILDKITE_JOB_ID", }, AgentAccessTokenFlag, EndpointFlag, NoColorFlag, DebugFlag, DebugHTTPFlag, }, Action: func(c *cli.Context) { cfg := ArtifactUploadConfig{} if err := cliconfig.Load(c, &cfg); err != nil { logger.Fatal("%s", err) } HandleGlobalFlags(cfg) uploader := agent.ArtifactUploader{ APIClient: agent.APIClient{ Endpoint: cfg.Endpoint, Token: cfg.AgentAccessToken, }.Create(), JobID: cfg.Job, Paths: cfg.UploadPaths, Destination: cfg.Destination, } if err := uploader.Upload(); err != nil { logger.Fatal("Failed to upload artifacts: %s", err) } }, }
View Source
var DebugFlag = cli.BoolFlag{
Name: "debug",
Usage: "Enable debug mode",
EnvVar: "BUILDKITE_AGENT_DEBUG",
}
View Source
var DebugHTTPFlag = cli.BoolFlag{
Name: "debug-http",
Usage: "Enable HTTP debug mode, which dumps all request and response bodies to the log",
EnvVar: "BUILDKITE_AGENT_DEBUG_HTTP",
}
View Source
var DownloadHelpDescription = `` /* 954-byte string literal not displayed */
View Source
var EndpointFlag = cli.StringFlag{ Name: "endpoint", Value: DefaultEndpoint, Usage: "The Agent API endpoint", EnvVar: "BUILDKITE_AGENT_ENDPOINT", }
View Source
var MetaDataExistsCommand = cli.Command{ Name: "exists", Usage: "Check to see if the meta data key exists for a build", Description: MetaDataExistsHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "job", Value: "", Usage: "Which job should the meta-data be checked for", EnvVar: "BUILDKITE_JOB_ID", }, AgentAccessTokenFlag, EndpointFlag, NoColorFlag, DebugFlag, DebugHTTPFlag, }, Action: func(c *cli.Context) { cfg := MetaDataExistsConfig{} if err := cliconfig.Load(c, &cfg); err != nil { logger.Fatal("%s", err) } HandleGlobalFlags(cfg) client := agent.APIClient{ Endpoint: cfg.Endpoint, Token: cfg.AgentAccessToken, }.Create() // Find the meta data value var err error var exists *api.MetaDataExists var resp *api.Response err = retry.Do(func(s *retry.Stats) error { exists, resp, err = client.MetaData.Exists(cfg.Job, cfg.Key) if resp != nil && (resp.StatusCode == 401 || resp.StatusCode == 404) { s.Break() } if err != nil { logger.Warn("%s (%s)", err, s) } return err }, &retry.Config{Maximum: 10, Interval: 5 * time.Second}) if err != nil { logger.Fatal("Failed to see if meta-data exists: %s", err) } if !exists.Exists { os.Exit(100) } }, }
View Source
var MetaDataExistsHelpDescription = `` /* 265-byte string literal not displayed */
View Source
var MetaDataGetCommand = cli.Command{ Name: "get", Usage: "Get data from a build", Description: MetaDataGetHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "job", Value: "", Usage: "Which job should the meta-data be retrieved from", EnvVar: "BUILDKITE_JOB_ID", }, AgentAccessTokenFlag, EndpointFlag, NoColorFlag, DebugFlag, DebugHTTPFlag, }, Action: func(c *cli.Context) { cfg := MetaDataGetConfig{} if err := cliconfig.Load(c, &cfg); err != nil { logger.Fatal("%s", err) } HandleGlobalFlags(cfg) client := agent.APIClient{ Endpoint: cfg.Endpoint, Token: cfg.AgentAccessToken, }.Create() // Find the meta data value var metaData *api.MetaData var err error var resp *api.Response err = retry.Do(func(s *retry.Stats) error { metaData, resp, err = client.MetaData.Get(cfg.Job, cfg.Key) if resp != nil && (resp.StatusCode == 401 || resp.StatusCode == 404 || resp.StatusCode == 400) { s.Break() } if err != nil { logger.Warn("%s (%s)", err, s) } return err }, &retry.Config{Maximum: 10, Interval: 5 * time.Second}) if err != nil { logger.Fatal("Failed to get meta-data: %s", err) } fmt.Print(metaData.Value) }, }
View Source
var MetaDataGetHelpDescription = `` /* 171-byte string literal not displayed */
View Source
var MetaDataSetCommand = cli.Command{ Name: "set", Usage: "Set data on a build", Description: MetaDataSetHelpDescription, Flags: []cli.Flag{ cli.StringFlag{ Name: "job", Value: "", Usage: "Which job should the meta-data be set on", EnvVar: "BUILDKITE_JOB_ID", }, AgentAccessTokenFlag, EndpointFlag, NoColorFlag, DebugFlag, DebugHTTPFlag, }, Action: func(c *cli.Context) { cfg := MetaDataSetConfig{} if err := cliconfig.Load(c, &cfg); err != nil { logger.Fatal("%s", err) } HandleGlobalFlags(cfg) client := agent.APIClient{ Endpoint: cfg.Endpoint, Token: cfg.AgentAccessToken, }.Create() metaData := &api.MetaData{ Key: cfg.Key, Value: cfg.Value, } err := retry.Do(func(s *retry.Stats) error { resp, err := client.MetaData.Set(cfg.Job, metaData) if resp != nil && (resp.StatusCode == 401 || resp.StatusCode == 404) { s.Break() } if err != nil { logger.Warn("%s (%s)", err, s) } return err }, &retry.Config{Maximum: 10, Interval: 5 * time.Second}) if err != nil { logger.Fatal("Failed to set meta-data: %s", err) } }, }
View Source
var MetaDataSetHelpDescription = `` /* 206-byte string literal not displayed */
View Source
var NoColorFlag = cli.BoolFlag{
Name: "no-color",
Usage: "Don't show colors in logging",
EnvVar: "BUILDKITE_AGENT_NO_COLOR",
}
View Source
var PipelineUploadCommand = cli.Command{ Name: "upload", Usage: "Uploads a description of a build pipleine adds it to the currently running build after the current job.", Description: PipelineUploadHelpDescription, Flags: []cli.Flag{ cli.BoolFlag{ Name: "replace", Usage: "Replace the rest of the existing pipeline with the steps uploaded. Jobs that are already running are not removed.", EnvVar: "BUILDKITE_PIPELINE_REPLACE", }, cli.StringFlag{ Name: "job", Value: "", Usage: "The job that is making the changes to it's build", EnvVar: "BUILDKITE_JOB_ID", }, AgentAccessTokenFlag, EndpointFlag, NoColorFlag, DebugFlag, DebugHTTPFlag, }, Action: func(c *cli.Context) { cfg := PipelineUploadConfig{} loader := cliconfig.Loader{CLI: c, Config: &cfg} if err := loader.Load(); err != nil { logger.Fatal("%s", err) } HandleGlobalFlags(cfg) // Find the pipeline file either from STDIN or the first // argument var input []byte var err error var filename string if cfg.FilePath != "" { filename = filepath.Base(cfg.FilePath) input, err = ioutil.ReadFile(cfg.FilePath) if err != nil { logger.Fatal("Failed to read file: %s", err) } } else if !termutil.Isatty(os.Stdin.Fd()) { input, err = ioutil.ReadAll(os.Stdin) if err != nil { logger.Fatal("Failed to read from STDIN: %s", err) } } else { paths := []string{ "buildkite.yml", "buildkite.json", ".buildkite/pipeline.yml", ".buildkite/pipeline.json", ".buildkite/steps.json", } exists := []string{} for _, path := range paths { if _, err := os.Stat(path); err == nil { exists = append(exists, path) } } if len(exists) > 1 { logger.Fatal("Found multiple configuration files: %s. Please only have 1 configuration file present.", strings.Join(exists, ", ")) } else if len(exists) == 0 { logger.Fatal("Could not find a default pipeline configuration file. See `buildkite-agent pipeline upload --help` for more information.") } found := exists[0] if found == ".buildkite/steps.json" { logger.Warn("The default steps.json file has been deprecated and will be removed in v2.2. Please rename to .buildkite/pipeline.json and wrap the steps array in a `steps` property: { \"steps\": [ ... ] } }") } filename = path.Base(found) input, err = ioutil.ReadFile(found) if err != nil { logger.Fatal("Failed to read file %s: %s", found, err) } } client := agent.APIClient{ Endpoint: cfg.Endpoint, Token: cfg.AgentAccessToken, }.Create() uuid := api.NewUUID() err = retry.Do(func(s *retry.Stats) error { _, err = client.Pipelines.Upload(cfg.Job, &api.Pipeline{UUID: uuid, Data: input, FileName: filename, Replace: cfg.Replace}) if err != nil { logger.Warn("%s (%s)", err, s) } return err }, &retry.Config{Maximum: 5, Interval: 1 * time.Second}) if err != nil { logger.Fatal("Failed to upload and process pipeline: %s", err) } logger.Info("Successfully uploaded and parsed pipeline config") }, }
View Source
var PipelineUploadHelpDescription = `` /* 737-byte string literal not displayed */
View Source
var ShasumHelpDescription = `` /* 926-byte string literal not displayed */
View Source
var StartDescription = `` /* 449-byte string literal not displayed */
View Source
var UploadHelpDescription = `` /* 784-byte string literal not displayed */
Functions ¶
func DefaultConfigFilePaths ¶
func DefaultConfigFilePaths() (paths []string)
func HandleGlobalFlags ¶
func HandleGlobalFlags(cfg interface{})
Types ¶
type AgentStartConfig ¶
type AgentStartConfig struct { Config string `cli:"config"` Token string `cli:"token" validate:"required"` Name string `cli:"name"` Priority string `cli:"priority"` BootstrapScript string `cli:"bootstrap-script" normalize:"filepath" validate:"required,file-exists"` BuildPath string `cli:"build-path" normalize:"filepath" validate:"required"` HooksPath string `cli:"hooks-path" normalize:"filepath"` MetaData []string `cli:"meta-data"` MetaDataEC2Tags bool `cli:"meta-data-ec2-tags"` NoColor bool `cli:"no-color"` NoAutoSSHFingerprintVerification bool `cli:"no-automatic-ssh-fingerprint-verification"` NoCommandEval bool `cli:"no-command-eval"` NoPTY bool `cli:"no-pty"` Endpoint string `cli:"endpoint" validate:"required"` Debug bool `cli:"debug"` DebugHTTP bool `cli:"debug-http"` }
type ArtifactDownloadConfig ¶
type ArtifactDownloadConfig struct { Query string `cli:"arg:0" label:"artifact search query" validate:"required"` Destination string `cli:"arg:1" label:"artifact download path" validate:"required"` Step string `cli:"step"` Build string `cli:"build" validate:"required"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoColor bool `cli:"no-color"` Debug bool `cli:"debug"` DebugHTTP bool `cli:"debug-http"` }
type ArtifactShasumConfig ¶
type ArtifactShasumConfig struct { Query string `cli:"arg:0" label:"artifact search query" validate:"required"` Step string `cli:"step"` Build string `cli:"build" validate:"required"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoColor bool `cli:"no-color"` Debug bool `cli:"debug"` DebugHTTP bool `cli:"debug-http"` }
type ArtifactUploadConfig ¶
type ArtifactUploadConfig struct { UploadPaths string `cli:"arg:0" label:"upload paths" validate:"required"` Destination string `cli:"arg:1" label:"destination"` Job string `cli:"job" validate:"required"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoColor bool `cli:"no-color"` Debug bool `cli:"debug"` DebugHTTP bool `cli:"debug-http"` }
type MetaDataExistsConfig ¶
type MetaDataExistsConfig struct { Key string `cli:"arg:0" label:"meta-data key" validate:"required"` Job string `cli:"job" validate:"required"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoColor bool `cli:"no-color"` Debug bool `cli:"debug"` DebugHTTP bool `cli:"debug-http"` }
type MetaDataGetConfig ¶
type MetaDataGetConfig struct { Key string `cli:"arg:0" label:"meta-data key" validate:"required"` Job string `cli:"job" validate:"required"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoColor bool `cli:"no-color"` Debug bool `cli:"debug"` DebugHTTP bool `cli:"debug-http"` }
type MetaDataSetConfig ¶
type MetaDataSetConfig struct { Key string `cli:"arg:0" label:"meta-data key" validate:"required"` Value string `cli:"arg:1" label:"meta-data value validate:"required"` Job string `cli:"job" validate:"required"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoColor bool `cli:"no-color"` Debug bool `cli:"debug"` DebugHTTP bool `cli:"debug-http"` }
type PipelineUploadConfig ¶
type PipelineUploadConfig struct { FilePath string `cli:"arg:0" label:"upload paths"` Replace bool `cli:"replace"` Job string `cli:"job" validate:"required"` AgentAccessToken string `cli:"agent-access-token" validate:"required"` Endpoint string `cli:"endpoint" validate:"required"` NoColor bool `cli:"no-color"` Debug bool `cli:"debug"` DebugHTTP bool `cli:"debug-http"` }
Click to show internal directories.
Click to hide internal directories.