Documentation ¶
Index ¶
- Constants
- func GetDriverAnnotationOption(key string, value string) string
- func GetDriverEnvVarConfOptions(app *v1beta2.SparkApplication) []string
- func GetDriverSecretConfOptions(app *v1beta2.SparkApplication) []string
- func GetExecutorAnnotationOption(key string, value string) string
- func GetExecutorEnvVarConfOptions(app *v1beta2.SparkApplication) []string
- func GetExecutorSecretConfOptions(app *v1beta2.SparkApplication) []string
- func GetPrometheusConfigMapName(app *v1beta2.SparkApplication) string
Constants ¶
const ( // DefaultSparkConfDir is the default directory for Spark configuration files if not specified. // This directory is where the Spark ConfigMap is mounted in the driver and executor containers. DefaultSparkConfDir = "/etc/spark/conf" // SparkConfigMapVolumeName is the name of the ConfigMap volume of Spark configuration files. SparkConfigMapVolumeName = "spark-configmap-volume" // DefaultHadoopConfDir is the default directory for Spark configuration files if not specified. // This directory is where the Hadoop ConfigMap is mounted in the driver and executor containers. DefaultHadoopConfDir = "/etc/hadoop/conf" // HadoopConfigMapVolumeName is the name of the ConfigMap volume of Hadoop configuration files. HadoopConfigMapVolumeName = "hadoop-configmap-volume" // SparkConfDirEnvVar is the environment variable to add to the driver and executor Pods that point // to the directory where the Spark ConfigMap is mounted. SparkConfDirEnvVar = "SPARK_CONF_DIR" // HadoopConfDirEnvVar is the environment variable to add to the driver and executor Pods that point // to the directory where the Hadoop ConfigMap is mounted. HadoopConfDirEnvVar = "HADOOP_CONF_DIR" )
const ( // LabelAnnotationPrefix is the prefix of every labels and annotations added by the controller. LabelAnnotationPrefix = "sparkoperator.k8s.io/" // SparkAppNameLabel is the name of the label for the SparkApplication object name. SparkAppNameLabel = LabelAnnotationPrefix + "app-name" // ScheduledSparkAppNameLabel is the name of the label for the ScheduledSparkApplication object name. ScheduledSparkAppNameLabel = LabelAnnotationPrefix + "scheduled-app-name" // LaunchedBySparkOperatorLabel is a label on Spark pods launched through the Spark Operator. LaunchedBySparkOperatorLabel = LabelAnnotationPrefix + "launched-by-spark-operator" // SparkApplicationSelectorLabel is the AppID set by the spark-distribution on the driver/executors Pods. SparkApplicationSelectorLabel = "spark-app-selector" // SparkRoleLabel is the driver/executor label set by the operator/spark-distribution on the driver/executors Pods. SparkRoleLabel = "spark-role" // SparkDriverRole is the value of the spark-role label for the driver. SparkDriverRole = "driver" // SparkExecutorRole is the value of the spark-role label for the executors. SparkExecutorRole = "executor" // SubmissionIDLabel is the label that records the submission ID of the current run of an application. SubmissionIDLabel = LabelAnnotationPrefix + "submission-id" )
const ( // SparkAppNameKey is the configuration property for application name. SparkAppNameKey = "spark.app.name" // SparkAppNamespaceKey is the configuration property for application namespace. SparkAppNamespaceKey = "spark.kubernetes.namespace" // SparkContainerImageKey is the configuration property for specifying the unified container image. SparkContainerImageKey = "spark.kubernetes.container.image" // SparkImagePullSecretKey is the configuration property for specifying the comma-separated list of image-pull // secrets. SparkImagePullSecretKey = "spark.kubernetes.container.image.pullSecrets" // SparkContainerImagePullPolicyKey is the configuration property for specifying the container image pull policy. SparkContainerImagePullPolicyKey = "spark.kubernetes.container.image.pullPolicy" // SparkNodeSelectorKeyPrefix is the configuration property prefix for specifying node selector for the pods. SparkNodeSelectorKeyPrefix = "spark.kubernetes.node.selector." // SparkDriverContainerImageKey is the configuration property for specifying a custom driver container image. SparkDriverContainerImageKey = "spark.kubernetes.driver.container.image" // SparkExecutorContainerImageKey is the configuration property for specifying a custom executor container image. SparkExecutorContainerImageKey = "spark.kubernetes.executor.container.image" // SparkDriverCoreRequestKey is the configuration property for specifying the physical CPU request for the driver. SparkDriverCoreRequestKey = "spark.kubernetes.driver.request.cores" // SparkExecutorCoreRequestKey is the configuration property for specifying the physical CPU request for executors. SparkExecutorCoreRequestKey = "spark.kubernetes.executor.request.cores" // SparkDriverCoreLimitKey is the configuration property for specifying the hard CPU limit for the driver pod. SparkDriverCoreLimitKey = "spark.kubernetes.driver.limit.cores" // SparkExecutorCoreLimitKey is the configuration property for specifying the hard CPU limit for the executor pods. SparkExecutorCoreLimitKey = "spark.kubernetes.executor.limit.cores" // SparkDriverSecretKeyPrefix is the configuration property prefix for specifying secrets to be mounted into the // driver. SparkDriverSecretKeyPrefix = "spark.kubernetes.driver.secrets." // SparkExecutorSecretKeyPrefix is the configuration property prefix for specifying secrets to be mounted into the // executors. SparkExecutorSecretKeyPrefix = "spark.kubernetes.executor.secrets." // SparkDriverSecretKeyRefKeyPrefix is the configuration property prefix for specifying environment variables // from SecretKeyRefs for the driver. SparkDriverSecretKeyRefKeyPrefix = "spark.kubernetes.driver.secretKeyRef." // SparkExecutorSecretKeyRefKeyPrefix is the configuration property prefix for specifying environment variables // from SecretKeyRefs for the executors. SparkExecutorSecretKeyRefKeyPrefix = "spark.kubernetes.executor.secretKeyRef." // SparkDriverEnvVarConfigKeyPrefix is the Spark configuration prefix for setting environment variables // into the driver. SparkDriverEnvVarConfigKeyPrefix = "spark.kubernetes.driverEnv." // SparkExecutorEnvVarConfigKeyPrefix is the Spark configuration prefix for setting environment variables // into the executor. SparkExecutorEnvVarConfigKeyPrefix = "spark.executorEnv." // SparkDriverAnnotationKeyPrefix is the Spark configuration key prefix for annotations on the driver Pod. SparkDriverAnnotationKeyPrefix = "spark.kubernetes.driver.annotation." // SparkExecutorAnnotationKeyPrefix is the Spark configuration key prefix for annotations on the executor Pods. SparkExecutorAnnotationKeyPrefix = "spark.kubernetes.executor.annotation." // SparkDriverLabelKeyPrefix is the Spark configuration key prefix for labels on the driver Pod. SparkDriverLabelKeyPrefix = "spark.kubernetes.driver.label." // SparkExecutorLabelKeyPrefix is the Spark configuration key prefix for labels on the executor Pods. SparkExecutorLabelKeyPrefix = "spark.kubernetes.executor.label." // SparkDriverVolumesPrefix is the Spark volumes configuration for mounting a volume into the driver pod. SparkDriverVolumesPrefix = "spark.kubernetes.driver.volumes." // SparkExecutorVolumesPrefix is the Spark volumes configuration for mounting a volume into the driver pod. SparkExecutorVolumesPrefix = "spark.kubernetes.executor.volumes." // SparkDriverPodNameKey is the Spark configuration key for driver pod name. SparkDriverPodNameKey = "spark.kubernetes.driver.pod.name" // SparkDriverServiceAccountName is the Spark configuration key for specifying name of the Kubernetes service // account used by the driver pod. SparkDriverServiceAccountName = "spark.kubernetes.authenticate.driver.serviceAccountName" // SparkInitContainerImage is the Spark configuration key for specifying a custom init-container image. SparkInitContainerImage = "spark.kubernetes.initContainer.image" // SparkJarsDownloadDir is the Spark configuration key for specifying the download path in the driver and // executors for remote jars. SparkJarsDownloadDir = "spark.kubernetes.mountDependencies.jarsDownloadDir" // SparkFilesDownloadDir is the Spark configuration key for specifying the download path in the driver and // executors for remote files. SparkFilesDownloadDir = "spark.kubernetes.mountDependencies.filesDownloadDir" // SparkDownloadTimeout is the Spark configuration key for specifying the timeout in seconds of downloading // remote dependencies. SparkDownloadTimeout = "spark.kubernetes.mountDependencies.timeout" // SparkMaxSimultaneousDownloads is the Spark configuration key for specifying the maximum number of remote // dependencies to download. SparkMaxSimultaneousDownloads = "spark.kubernetes.mountDependencies.maxSimultaneousDownloads" // SparkWaitAppCompletion is the Spark configuration key for specifying whether to wait for application to complete. SparkWaitAppCompletion = "spark.kubernetes.submission.waitAppCompletion" // SparkPythonVersion is the Spark configuration key for specifying python version used. SparkPythonVersion = "spark.kubernetes.pyspark.pythonVersion" // SparkMemoryOverheadFactor is the Spark configuration key for specifying memory overhead factor used for Non-JVM memory. SparkMemoryOverheadFactor = "spark.kubernetes.memoryOverheadFactor" // SparkDriverJavaOptions is the Spark configuration key for a string of extra JVM options to pass to driver. SparkDriverJavaOptions = "spark.driver.extraJavaOptions" // SparkExecutorJavaOptions is the Spark configuration key for a string of extra JVM options to pass to executors. SparkExecutorJavaOptions = "spark.executor.extraJavaOptions" // SparkExecutorDeleteOnTermination is the Spark configuration for specifying whether executor pods should be deleted in case of failure or normal termination SparkExecutorDeleteOnTermination = "spark.kubernetes.executor.deleteOnTermination" )
const ( // GoogleApplicationCredentialsEnvVar is the environment variable used by the // Application Default Credentials mechanism. More details can be found at // https://developers.google.com/identity/protocols/application-default-credentials. GoogleApplicationCredentialsEnvVar = "GOOGLE_APPLICATION_CREDENTIALS" // ServiceAccountJSONKeyFileName is the assumed name of the service account // Json key file. This name is added to the service account secret mount path to // form the path to the Json key file referred to by GOOGLE_APPLICATION_CREDENTIALS. ServiceAccountJSONKeyFileName = "key.json" // HadoopTokenFileLocationEnvVar is the environment variable for specifying the location // where the file storing the Hadoop delegation token is located. HadoopTokenFileLocationEnvVar = "HADOOP_TOKEN_FILE_LOCATION" // HadoopDelegationTokenFileName is the assumed name of the file storing the Hadoop // delegation token. This name is added to the delegation token secret mount path to // form the path to the file referred to by HADOOP_TOKEN_FILE_LOCATION. HadoopDelegationTokenFileName = "hadoop.token" )
const ( // PrometheusConfigMapNameSuffix is the name prefix of the Prometheus ConfigMap. PrometheusConfigMapNameSuffix = "prom-conf" // PrometheusConfigMapMountPath is the mount path of the Prometheus ConfigMap. PrometheusConfigMapMountPath = "/etc/metrics/conf" )
const ( // SparkDriverContainerName is name of driver container in spark driver pod SparkDriverContainerName = "spark-kubernetes-driver" // SparkExecutorContainerName is name of executor container in spark executor pod SparkExecutorContainerName = "executor" // Spark3DefaultExecutorContainerName is the default executor container name in // Spark 3.x, which allows the container name to be configured through the pod // template support. Spark3DefaultExecutorContainerName = "spark-kubernetes-executor" // SparkLocalDirVolumePrefix is the volume name prefix for "scratch" space directory SparkLocalDirVolumePrefix = "spark-local-dir-" )
const DefaultMetricsProperties = `` /* 189-byte string literal not displayed */
DefaultMetricsProperties is the default content of metrics.properties.
const DefaultPrometheusConfiguration = `` /* 2764-byte string literal not displayed */
DefaultPrometheusConfiguration is the default content of prometheus.yaml.
const DefaultPrometheusJavaAgentPort int32 = 8090
DefaultPrometheusJavaAgentPort is the default port used by the Prometheus JMX exporter.
Variables ¶
This section is empty.
Functions ¶
func GetDriverAnnotationOption ¶
GetDriverAnnotationOption returns a spark-submit option for a driver annotation of the given key and value.
func GetDriverEnvVarConfOptions ¶
func GetDriverEnvVarConfOptions(app *v1beta2.SparkApplication) []string
GetDriverEnvVarConfOptions returns a list of spark-submit options for setting driver environment variables.
func GetDriverSecretConfOptions ¶
func GetDriverSecretConfOptions(app *v1beta2.SparkApplication) []string
GetDriverSecretConfOptions returns a list of spark-submit options for mounting driver secrets.
func GetExecutorAnnotationOption ¶
GetExecutorAnnotationOption returns a spark-submit option for an executor annotation of the given key and value.
func GetExecutorEnvVarConfOptions ¶
func GetExecutorEnvVarConfOptions(app *v1beta2.SparkApplication) []string
GetExecutorEnvVarConfOptions returns a list of spark-submit options for setting executor environment variables.
func GetExecutorSecretConfOptions ¶
func GetExecutorSecretConfOptions(app *v1beta2.SparkApplication) []string
GetExecutorSecretConfOptions returns a list of spark-submit options for mounting executor secrets.
func GetPrometheusConfigMapName ¶ added in v0.1.2
func GetPrometheusConfigMapName(app *v1beta2.SparkApplication) string
GetPrometheusConfigMapName returns the name of the ConfigMap for Prometheus configuration.
Types ¶
This section is empty.