diff --git a/CHANGELOG.md b/CHANGELOG.md index 342467ba..5b4db3b9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,12 @@ All notable changes to this project will be documented in this file. We now correctly handle multiple certificates in this cases. See [this GitHub issue](https://github.com/stackabletech/issues/issues/764) for details +- The service account of spark applications can now be overridden with pod overrides ([#617]). + + Previously the application service account was passed as command line argument to spark-submit + and was thus not possible to overwrite with pod overrides for the driver and executors. + This CLI argument has now been moved to the pod templates of the individual roles. + ### Removed - Support for Spark versions 3.5.5 has been dropped ([#610]). @@ -28,6 +34,7 @@ All notable changes to this project will be documented in this file. [#608]: https://github.com/stackabletech/spark-k8s-operator/pull/608 [#610]: https://github.com/stackabletech/spark-k8s-operator/pull/610 [#611]: https://github.com/stackabletech/spark-k8s-operator/pull/611 +[#617]: https://github.com/stackabletech/spark-k8s-operator/pull/617 ## [25.7.0] - 2025-07-23 diff --git a/rust/operator-binary/src/crd/mod.rs b/rust/operator-binary/src/crd/mod.rs index 250c8733..88daf7b2 100644 --- a/rust/operator-binary/src/crd/mod.rs +++ b/rust/operator-binary/src/crd/mod.rs @@ -543,7 +543,6 @@ impl v1alpha1::SparkApplication { pub fn build_command( &self, - serviceaccount_name: &str, s3conn: &Option, log_dir: &Option, spark_image: &str, @@ -585,10 +584,6 @@ impl v1alpha1::SparkApplication { "--conf spark.kubernetes.executor.container.image={}", spark_image.to_string() ), - format!( - "--conf spark.kubernetes.authenticate.driver.serviceAccountName={}", - serviceaccount_name - ), format!( "--conf spark.driver.defaultJavaOptions=-Dlog4j.configurationFile={VOLUME_MOUNT_PATH_LOG_CONFIG}/{LOG4J2_CONFIG_FILE}" ), diff --git a/rust/operator-binary/src/spark_k8s_controller.rs b/rust/operator-binary/src/spark_k8s_controller.rs index 6619d813..2f15f6d0 100644 --- a/rust/operator-binary/src/spark_k8s_controller.rs +++ b/rust/operator-binary/src/spark_k8s_controller.rs @@ -324,6 +324,7 @@ pub async fn reconcile( &opt_s3conn, &logdir, &resolved_product_image, + &serviceaccount, )?; client .apply_patch( @@ -352,6 +353,7 @@ pub async fn reconcile( &opt_s3conn, &logdir, &resolved_product_image, + &serviceaccount, )?; client .apply_patch( @@ -363,13 +365,7 @@ pub async fn reconcile( .context(ApplyApplicationSnafu)?; let job_commands = spark_application - .build_command( - // TODO (@NickLarsenNZ): Explain this unwrap. Either convert to expect, or gracefully handle the error. - serviceaccount.metadata.name.as_ref().unwrap(), - &opt_s3conn, - &logdir, - &resolved_product_image.image, - ) + .build_command(&opt_s3conn, &logdir, &resolved_product_image.image) .context(BuildCommandSnafu)?; let submit_config = spark_application @@ -593,6 +589,7 @@ fn pod_template( s3conn: &Option, logdir: &Option, spark_image: &ResolvedProductImage, + service_account: &ServiceAccount, ) -> Result { let container_name = SparkContainer::Spark.to_string(); let mut cb = ContainerBuilder::new(&container_name).context(IllegalContainerNameSnafu)?; @@ -641,7 +638,8 @@ fn pod_template( .context(AddVolumeSnafu)? .security_context(security_context()) .image_pull_secrets_from_product_image(spark_image) - .affinity(&config.affinity); + .affinity(&config.affinity) + .service_account_name(service_account.name_any()); let init_containers = init_containers( spark_application, @@ -700,6 +698,7 @@ fn pod_template_config_map( s3conn: &Option, logdir: &Option, spark_image: &ResolvedProductImage, + service_account: &ServiceAccount, ) -> Result { let cm_name = spark_application.pod_template_config_map_name(role.clone()); @@ -741,6 +740,7 @@ fn pod_template_config_map( s3conn, logdir, spark_image, + service_account, )?; let mut cm_builder = ConfigMapBuilder::new(); diff --git a/tests/templates/kuttl/overrides/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/overrides/10-deploy-spark-app.yaml.j2 index d59ceaf1..85080363 100644 --- a/tests/templates/kuttl/overrides/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/overrides/10-deploy-spark-app.yaml.j2 @@ -1,4 +1,21 @@ --- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: override-sa +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: override-sa-binding +subjects: + - kind: ServiceAccount + name: override-sa +roleRef: + kind: ClusterRole + name: spark-k8s-clusterrole + apiGroup: rbac.authorization.k8s.io +--- apiVersion: spark.stackable.tech/v1alpha1 kind: SparkApplication metadata: @@ -39,6 +56,7 @@ spec: TEST_JOB_SPARK-ENV-SH: TEST podOverrides: spec: + serviceAccountName: override-sa containers: - name: spark-submit resources: @@ -57,6 +75,7 @@ spec: TEST_DRIVER_SPARK-ENV-SH: TEST podOverrides: spec: + serviceAccountName: override-sa containers: - name: spark resources: @@ -76,6 +95,7 @@ spec: TEST_EXECUTOR_SPARK-ENV-SH: TEST podOverrides: spec: + serviceAccountName: override-sa containers: - name: spark resources: diff --git a/tests/templates/kuttl/overrides/11-assert.yaml b/tests/templates/kuttl/overrides/11-assert.yaml index 8020c1e7..af5d33b4 100644 --- a/tests/templates/kuttl/overrides/11-assert.yaml +++ b/tests/templates/kuttl/overrides/11-assert.yaml @@ -13,6 +13,13 @@ commands: POD=$(kubectl -n $NAMESPACE get pod -l app.kubernetes.io/instance=spark-pi-s3-1 -o name | head -n 1 | sed -e 's#pod/##') kubectl -n $NAMESPACE get pod $POD -o yaml | yq '.spec.containers[0].env[] | select (.name == "TEST_SPARK_VAR_0").value' | grep 'REPLACED' kubectl -n $NAMESPACE get pod $POD -o yaml | yq '.spec.containers[0].env[] | select (.name == "TEST_SPARK_VAR_1").value' | grep 'DONOTREPLACE' + - script: | + for POD_SA_NAME in $(kubectl get pods -n $NAMESPACE -l app.kubernetes.io/instance=spark-pi-s3-1 -o=jsonpath='{.items[*].spec.serviceAccountName}'); do + if [ "$POD_SA_NAME" != "override-sa" ]; then + echo "Expected Pod service account [override-sa], but got [$POD_SA_NAME]" + exit 1 + fi + done --- apiVersion: v1 kind: ConfigMap