Documentation ¶
Index ¶
- Constants
- func CreateEC2IMDSHTTPClient(conf ServerConfig) *http.Client
- func CreateHTTPClient(conf ServerConfig, logger *util.Logger, retry bool) *http.Client
- type Config
- type ServerConfig
- func (config ServerConfig) GetDbHost() string
- func (config ServerConfig) GetDbName() string
- func (config ServerConfig) GetDbPort() int
- func (config ServerConfig) GetDbURLRedacted() string
- func (config ServerConfig) GetDbUsername() string
- func (config ServerConfig) GetPqOpenString(dbNameOverride string, passwordOverride string) (string, error)
- func (config ServerConfig) SupportsLogDownload() bool
- type ServerIdentifier
Constants ¶
View Source
const DefaultAPIBaseURL = "https://api.pganalyze.com"
Variables ¶
This section is empty.
Functions ¶
func CreateEC2IMDSHTTPClient ¶
func CreateEC2IMDSHTTPClient(conf ServerConfig) *http.Client
CreateEC2IMDSHTTPClient - Create HTTP client for EC2 instance meta data service (IMDS)
func CreateHTTPClient ¶
Types ¶
type Config ¶
type Config struct {
Servers []ServerConfig
}
type ServerConfig ¶
type ServerConfig struct { APIKey string `ini:"api_key"` APIBaseURL string `ini:"api_base_url"` ErrorCallback string `ini:"error_callback"` SuccessCallback string `ini:"success_callback"` EnableReports bool `ini:"enable_reports"` DisableLogs bool `ini:"disable_logs"` DisableActivity bool `ini:"disable_activity"` EnableLogExplain bool `ini:"enable_log_explain"` DbURL string `ini:"db_url"` DbName string `ini:"db_name"` DbUsername string `ini:"db_username"` DbPassword string `ini:"db_password"` DbHost string `ini:"db_host"` DbPort int `ini:"db_port"` DbSslMode string `ini:"db_sslmode"` DbSslRootCert string `ini:"db_sslrootcert"` DbSslRootCertContents string `ini:"db_sslrootcert_contents"` DbSslCert string `ini:"db_sslcert"` DbSslCertContents string `ini:"db_sslcert_contents"` DbSslKey string `ini:"db_sslkey"` DbSslKeyContents string `ini:"db_sslkey_contents"` DbUseIamAuth bool `ini:"db_use_iam_auth"` // We have to do some tricks to support sslmode=prefer, namely we have to // first try an SSL connection (= require), and if that fails change the // sslmode to none DbSslModePreferFailed bool DbExtraNames []string // Additional databases that should be fetched (determined by additional databases in db_name) DbAllNames bool // All databases except template databases should be fetched (determined by * in the db_name list) AwsRegion string `ini:"aws_region"` AwsAccountID string `ini:"aws_account_id"` AwsDbInstanceID string `ini:"aws_db_instance_id"` AwsDbClusterID string `ini:"aws_db_cluster_id"` AwsDbClusterReadonly bool `ini:"aws_db_cluster_readonly"` AwsAccessKeyID string `ini:"aws_access_key_id"` AwsSecretAccessKey string `ini:"aws_secret_access_key"` AwsAssumeRole string `ini:"aws_assume_role"` AwsWebIdentityTokenFile string `ini:"aws_web_identity_token_file"` AwsRoleArn string `ini:"aws_role_arn"` // Support for custom AWS endpoints // See https://docs.aws.amazon.com/sdk-for-go/api/aws/endpoints/ AwsEndpointSigningRegion string `ini:"aws_endpoint_signing_region"` AwsEndpointSigningRegionLegacy string `ini:"aws_endpoint_rds_signing_region"` AwsEndpointRdsURL string `ini:"aws_endpoint_rds_url"` AwsEndpointEc2URL string `ini:"aws_endpoint_ec2_url"` AwsEndpointCloudwatchURL string `ini:"aws_endpoint_cloudwatch_url"` AwsEndpointCloudwatchLogsURL string `ini:"aws_endpoint_cloudwatch_logs_url"` AzureDbServerName string `ini:"azure_db_server_name"` AzureEventhubNamespace string `ini:"azure_eventhub_namespace"` AzureEventhubName string `ini:"azure_eventhub_name"` AzureADTenantID string `ini:"azure_ad_tenant_id"` AzureADClientID string `ini:"azure_ad_client_id"` AzureADClientSecret string `ini:"azure_ad_client_secret"` AzureADCertificatePath string `ini:"azure_ad_certificate_path"` AzureADCertificatePassword string `ini:"azure_ad_certificate_password"` GcpProjectID string `ini:"gcp_project_id"` // Optional for CloudSQL (you can pass the full "Connection name" as the instance ID) GcpCloudSQLInstanceID string `ini:"gcp_cloudsql_instance_id"` GcpAlloyDBClusterID string `ini:"gcp_alloydb_cluster_id"` GcpAlloyDBInstanceID string `ini:"gcp_alloydb_instance_id"` GcpPubsubSubscription string `ini:"gcp_pubsub_subscription"` GcpCredentialsFile string `ini:"gcp_credentials_file"` CrunchyBridgeClusterID string `ini:"crunchy_bridge_cluster_id"` AivenProjectID string `ini:"aiven_project_id"` AivenServiceID string `ini:"aiven_service_id"` SectionName string Identifier ServerIdentifier SystemID string `ini:"api_system_id"` SystemType string `ini:"api_system_type"` SystemScope string `ini:"api_system_scope"` SystemIDFallback string `ini:"api_system_id_fallback"` SystemTypeFallback string `ini:"api_system_type_fallback"` SystemScopeFallback string `ini:"api_system_scope_fallback"` AlwaysCollectSystemData bool `ini:"always_collect_system_data"` // Configures the location where logfiles are - this can either be a directory, // or a file - needs to readable by the regular pganalyze user LogLocation string `ini:"db_log_location"` // Configures the collector to tail a local docker container using // "docker logs -t" - this is currently experimental and mostly intended for // development and debugging. The value needs to be the name of the container. LogDockerTail string `ini:"db_log_docker_tail"` // Configures the collector to start a built-in syslog server that listens // on the specifed "hostname:port" for Postgres log messages LogSyslogServer string `ini:"db_log_syslog_server"` // Configures the collector to use the "pg_read_file" (superuser) or // "pganalyze.read_log_file" (helper) function to retrieve log data // directly over the Postgres connection. This only works when superuser // access to the server is possible, either directly, or via the helper // function. Used by default for Crunchy Bridge. LogPgReadFile bool `ini:"db_log_pg_read_file"` // Specifies a table pattern to ignore - no statistics will be collected for // tables that match the name. This uses Golang's filepath.Match function for // comparison, so you can e.g. use "*" for wildcard matching. // // Deprecated: Please use ignore_schema_regexp instead, since that uses an // optimized code path in the collector and can avoid long-running queries. IgnoreTablePattern string `ini:"ignore_table_pattern"` // Specifies a regular expression to ignore - no statistics will be collected for // tables, views, functions, or schemas that match the name. Note that the match // is applied to the '.'-joined concantenation of schema name and object name. // E.g., to ignore tables that start with "ignored_", set this to "^ignored_". To // ignore table "foo" only in the public schema, set to "^public\.foo$" (N.B.: you // should escape the dot since that has special meaning in a regexp). IgnoreSchemaRegexp string `ini:"ignore_schema_regexp"` // Specifies the frequency of query statistics collection in seconds // // Currently supported values: 600 (10 minutes), 60 (1 minute) // // Defaults to once per minute (60) QueryStatsInterval int `ini:"query_stats_interval"` // Maximum connections allowed to the database with the collector // application_name, in order to protect against accidental connection leaks // in the collector // // This defaults to 10 connections, but you may want to raise this when running // the collector multiple times against the same database server MaxCollectorConnections int `ini:"max_collector_connections"` // Do not monitor this server while it is a replica (according to pg_is_in_recovery), // but keep checking on standard snapshot intervals and automatically start monitoring // once the server is promoted SkipIfReplica bool `ini:"skip_if_replica"` // Configuration for PII filtering FilterLogSecret string `ini:"filter_log_secret"` // none/all/credential/parsing_error/statement_text/statement_parameter/table_data/ops/unidentified (comma separated) FilterQuerySample string `ini:"filter_query_sample"` // none/normalize/all (defaults to "none") FilterQueryText string `ini:"filter_query_text"` // none/unparsable (defaults to "unparsable") // HTTP proxy overrides HTTPProxy string `ini:"http_proxy"` HTTPSProxy string `ini:"https_proxy"` NoProxy string `ini:"no_proxy"` // HTTP clients to be used for API connections HTTPClient *http.Client HTTPClientWithRetry *http.Client }
ServerConfig -
Contains the information how to connect to a Postgres instance, with optional AWS credentials to get metrics from AWS CloudWatch as well as RDS logfiles
func (ServerConfig) GetDbHost ¶
func (config ServerConfig) GetDbHost() string
GetDbHost - Gets the database hostname from the given configuration
func (ServerConfig) GetDbName ¶
func (config ServerConfig) GetDbName() string
GetDbName - Gets the database name from the given configuration
func (ServerConfig) GetDbPort ¶
func (config ServerConfig) GetDbPort() int
GetDbPort - Gets the database port from the given configuration
func (ServerConfig) GetDbURLRedacted ¶
func (config ServerConfig) GetDbURLRedacted() string
func (ServerConfig) GetDbUsername ¶
func (config ServerConfig) GetDbUsername() string
GetDbUsername - Gets the database hostname from the given configuration
func (ServerConfig) GetPqOpenString ¶
func (config ServerConfig) GetPqOpenString(dbNameOverride string, passwordOverride string) (string, error)
GetPqOpenString - Gets the database configuration as a string that can be passed to lib/pq for connecting
func (ServerConfig) SupportsLogDownload ¶ added in v0.43.0
func (config ServerConfig) SupportsLogDownload() bool
SupportsLogDownload - Determines whether the specified config can download logs
type ServerIdentifier ¶
type ServerIdentifier struct { APIKey string APIBaseURL string SystemID string SystemType string SystemScope string }
ServerIdentifier -
Unique identity of each configured server, for deduplication inside the collector. Note we intentionally don't include the Fallback variables in the identifier, since that is mostly intended to help transition systems when their "identity" is altered due to collector changes - in the collector we rely on the non-Fallback values only.
Click to show internal directories.
Click to hide internal directories.