Skip to content

Commit eb96db5

Browse files
committed
[KYUUBI #3087] Convert the kyuubi batch conf with spark. prefix so that spark could identify
### _Why are the changes needed?_ We might need transfer some parameters like `kyuubi.client.ipAddress`, but it can not be identified by spark. So we need convert them. ### _How was this patch tested?_ - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible - [ ] Add screenshots for manual tests if appropriate - [x] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request Closes #3087 from turboFei/batch_spark. Closes #3087 bf614e4 [Fei Wang] comment 58c651d [Fei Wang] refactor 2c39c51 [Fei Wang] convert the kyuubi conf with spark. prefix, so that we can expose some info Authored-by: Fei Wang <fwang12@ebay.com> Signed-off-by: Fei Wang <fwang12@ebay.com>
1 parent 976af3d commit eb96db5

File tree

2 files changed

+18
-16
lines changed

2 files changed

+18
-16
lines changed

kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkBatchProcessBuilder.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ class SparkBatchProcessBuilder(
5151

5252
(batchKyuubiConf.getAll ++ sparkAppNameConf()).foreach { case (k, v) =>
5353
buffer += CONF
54-
buffer += s"$k=$v"
54+
buffer += s"${convertConfigKey(k)}=$v"
5555
}
5656

5757
buffer += PROXY_USER

kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala

Lines changed: 17 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,22 @@ class SparkProcessBuilder(
5454

5555
override def mainClass: String = "org.apache.kyuubi.engine.spark.SparkSQLEngine"
5656

57+
/**
58+
* Converts kyuubi config key so that Spark could identify.
59+
* - If the key is start with `spark.`, keep it AS IS as it is a Spark Conf
60+
* - If the key is start with `hadoop.`, it will be prefixed with `spark.hadoop.`
61+
* - Otherwise, the key will be added a `spark.` prefix
62+
*/
63+
protected def convertConfigKey(key: String): String = {
64+
if (key.startsWith("spark.")) {
65+
key
66+
} else if (key.startsWith("hadoop.")) {
67+
"spark.hadoop." + key
68+
} else {
69+
"spark." + key
70+
}
71+
}
72+
5773
override protected val commands: Array[String] = {
5874
KyuubiApplicationManager.tagApplication(engineRefId, shortName, clusterManager(), conf)
5975
val buffer = new ArrayBuffer[String]()
@@ -69,23 +85,9 @@ class SparkProcessBuilder(
6985
allConf = allConf ++ zkAuthKeytabFileConf(allConf)
7086
}
7187

72-
/**
73-
* Converts kyuubi configs to configs that Spark could identify.
74-
* - If the key is start with `spark.`, keep it AS IS as it is a Spark Conf
75-
* - If the key is start with `hadoop.`, it will be prefixed with `spark.hadoop.`
76-
* - Otherwise, the key will be added a `spark.` prefix
77-
*/
7888
allConf.foreach { case (k, v) =>
79-
val newKey =
80-
if (k.startsWith("spark.")) {
81-
k
82-
} else if (k.startsWith("hadoop.")) {
83-
"spark.hadoop." + k
84-
} else {
85-
"spark." + k
86-
}
8789
buffer += CONF
88-
buffer += s"$newKey=$v"
90+
buffer += s"${convertConfigKey(k)}=$v"
8991
}
9092

9193
// iff the keytab is specified, PROXY_USER is not supported

0 commit comments

Comments
 (0)