diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala index ee641bdfeb2d7..5ed3fe31c7f6a 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala @@ -199,8 +199,15 @@ private[sql] class SQLConf extends Serializable { /** ********************** SQLConf functionality methods ************ */ /** Set Spark SQL configuration properties. */ - def setConf(props: Properties): Unit = settings.synchronized { - props.foreach { case (k, v) => settings.put(k, v) } + def setConf(props: Properties, overwrite: Boolean = true): Unit = settings.synchronized { + if (overwrite) { + props.foreach { case (k, v) => settings.put(k, v) } + } else { + props + .filter(p => !settings.containsKey(p._1)) + .foreach { case (k, v) => settings.put(k, v) } + } + } /** Set the given Spark SQL configuration property. */ diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala index c25ef58e6f62a..e761009f7db79 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala @@ -73,7 +73,7 @@ class SQLContext(@transient val sparkContext: SparkContext) * * @group config */ - def setConf(props: Properties): Unit = conf.setConf(props) + def setConf(props: Properties, overwrite: Boolean = true): Unit = conf.setConf(props, overwrite) /** * Set the given Spark SQL configuration property. diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala index 7c6a7df2bd01e..e13f0e98e0f65 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala @@ -264,7 +264,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) { } protected[hive] lazy val hiveconf: HiveConf = { - setConf(sessionState.getConf.getAllProperties) + setConf(sessionState.getConf.getAllProperties, false) sessionState.getConf } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextInitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextInitSuite.scala new file mode 100644 index 0000000000000..4f06d673e97b6 --- /dev/null +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextInitSuite.scala @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.hive + +import org.apache.spark.sql.hive.test.TestHive +import org.scalatest.FunSuite + +class HiveContextInitSuite extends FunSuite { + test("SPARK-6675 Hive setConf") { + val hc = new HiveContext(TestHive.sparkContext) + hc.setConf("hive.metastore.warehouse.dir", "/home/spark/hive/warehouse_test") + hc.setConf("spark.sql.shuffle.partitions", "10") + assert(hc.getAllConfs.get("hive.metastore.warehouse.dir") + .toList.contains("/home/spark/hive/warehouse_test")) + assert(hc.getAllConfs.get("spark.sql.shuffle.partitions").toList.contains("10")) + } +}