Skip to content

Commit

Permalink
Move conf into sparkConf function
Browse files Browse the repository at this point in the history
  • Loading branch information
hvanhovell committed Apr 20, 2017
1 parent 638ac34 commit e2401f9
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ import org.apache.spark.util.Utils
class FileSourceStrategySuite extends QueryTest with SharedSQLContext with PredicateHelper {
import testImplicits._

protected override def sparkConf = new SparkConf().set("spark.default.parallelism", "1")
protected override def sparkConf = super.sparkConf.set("spark.default.parallelism", "1")

test("unpartitioned table, single partition") {
val table =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class CompactibleFileStreamLogSuite extends SparkFunSuite with SharedSQLContext

/** To avoid caching of FS objects */
override protected def sparkConf =
new SparkConf().set(s"spark.hadoop.fs.$scheme.impl.disable.cache", "true")
super.sparkConf.set(s"spark.hadoop.fs.$scheme.impl.disable.cache", "true")

import CompactibleFileStreamLog._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class HDFSMetadataLogSuite extends SparkFunSuite with SharedSQLContext {

/** To avoid caching of FS objects */
override protected def sparkConf =
new SparkConf().set(s"spark.hadoop.fs.$scheme.impl.disable.cache", "true")
super.sparkConf.set(s"spark.hadoop.fs.$scheme.impl.disable.cache", "true")

private implicit def toOption[A](a: A): Option[A] = Option(a)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,9 @@ import org.apache.spark.sql.internal.SQLConf
*/
trait SharedSQLContext extends SQLTestUtils with BeforeAndAfterEach {

protected def sparkConf = new SparkConf()
protected def sparkConf = {
new SparkConf().set("spark.hadoop.fs.file.impl", classOf[DebugFilesystem].getName)
}

/**
* The [[TestSparkSession]] to use for all tests in this suite.
Expand All @@ -50,8 +52,7 @@ trait SharedSQLContext extends SQLTestUtils with BeforeAndAfterEach {
protected implicit def sqlContext: SQLContext = _spark.sqlContext

protected def createSparkSession: TestSparkSession = {
new TestSparkSession(
sparkConf.set("spark.hadoop.fs.file.impl", classOf[DebugFilesystem].getName))
new TestSparkSession(sparkConf)
}

/**
Expand Down

0 comments on commit e2401f9

Please sign in to comment.