Skip to content

Commit

Permalink
Also run spark distribution based tests in local mode
Browse files Browse the repository at this point in the history
  • Loading branch information
alexarchambault committed Nov 13, 2018
1 parent 2a46377 commit b713ca9
Show file tree
Hide file tree
Showing 8 changed files with 55 additions and 15 deletions.
2 changes: 2 additions & 0 deletions .travis.sh
Expand Up @@ -4,6 +4,8 @@ set -e
case "${MASTER:-"local"}" in
local)
sbt ++$TRAVIS_SCALA_VERSION'!' publishLocal test ;;
local-distrib)
./with-spark-home.sh sbt ++$TRAVIS_SCALA_VERSION'!' publishLocal local-spark-distrib-tests/test ;;
standalone)
./sbt-with-standalone-cluster.sh ++$TRAVIS_SCALA_VERSION'!' publishLocal standalone-tests/test ;;
yarn)
Expand Down
2 changes: 2 additions & 0 deletions .travis.yml
Expand Up @@ -17,6 +17,8 @@ stages:
if: (branch = master AND type = push) OR (tag IS present)
jobs:
include:
- env: MASTER=local-distrib
scala: 2.11.12
- env: MASTER=local
scala: 2.11.12
- env: MASTER=local
Expand Down
9 changes: 9 additions & 0 deletions build.sbt
Expand Up @@ -70,6 +70,15 @@ lazy val tests = project
)
)

lazy val `local-spark-distrib-tests` = project
.dependsOn(tests)
.underModules
.settings(
shared,
dontPublish,
testSettings
)

lazy val `standalone-tests` = project
.dependsOn(tests)
.underModules
Expand Down
@@ -0,0 +1,9 @@
package ammonite.spark

object LocalSparkHomeTests extends SparkReplTests(
sys.env("SPARK_VERSION"),
Local.master
) {
override def sparkHomeBased =
true
}
19 changes: 19 additions & 0 deletions modules/tests/src/main/scala/ammonite/spark/Init.scala
Expand Up @@ -32,6 +32,25 @@ object Init {
@ def sc = spark.sparkContext"""
}

def sparkHomeInit(
master: String,
sparkVersion: String,
conf: Seq[(String, String)],
prependBuilderCalls: Seq[String] = Nil
): String =
s"""
@ interp.load.cp {
@ import java.nio.file.{Files, Paths}, scala.collection.JavaConverters._
@ Files.list(Paths.get(s"$${sys.env("SPARK_HOME")}/jars"))
@ .iterator()
@ .asScala
@ .toVector
@ .filter(f => !f.getFileName.toString.startsWith("scala-compiler") && !f.getFileName.toString.startsWith("scala-reflect") && !f.getFileName.toString.startsWith("scala-library"))
@ .sortBy(_.getFileName.toString)
@ .map(ammonite.ops.Path(_))
@ }
""" ++ init(master, sparkVersion, conf, loadSparkSql = false)

def end = "@ spark.sparkContext.stop()"

def setupLog4j(): Unit =
Expand Down
14 changes: 12 additions & 2 deletions modules/tests/src/main/scala/ammonite/spark/SparkReplTests.scala
Expand Up @@ -3,16 +3,26 @@ package ammonite.spark
import ammonite.spark.fromammonite.TestRepl
import utest._

class SparkReplTests(val sparkVersion: String, val master: String, val conf: (String, String)*) extends TestSuite {
class SparkReplTests(
val sparkVersion: String,
val master: String,
val conf: (String, String)*
) extends TestSuite {

// Most of the tests here were adapted from https://github.com/apache/spark/blob/ab18b02e66fd04bc8f1a4fb7b6a7f2773902a494/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala

Init.setupLog4j()

val check = new TestRepl

def sparkHomeBased: Boolean =
false

def init =
Init.init(master, sparkVersion, conf)
if (sparkHomeBased)
Init.sparkHomeInit(master, sparkVersion, conf)
else
Init.init(master, sparkVersion, conf)

check.session(init)

Expand Down
Expand Up @@ -12,19 +12,8 @@ object YarnSparkDistribTests extends SparkReplTests(
if (!sys.env.contains("SPARK_HOME"))
sys.error("SPARK_HOME not set")

override def init =
s"""
@ interp.load.cp {
@ import java.nio.file.{Files, Paths}, scala.collection.JavaConverters._
@ Files.list(Paths.get("/spark/jars"))
@ .iterator()
@ .asScala
@ .toVector
@ .filter(f => !f.getFileName.toString.startsWith("scala-compiler") && !f.getFileName.toString.startsWith("scala-reflect") && !f.getFileName.toString.startsWith("scala-library"))
@ .sortBy(_.getFileName.toString)
@ .map(ammonite.ops.Path(_))
@ }
""" ++ Init.init(master, sparkVersion, conf, loadSparkSql = false)
override def sparkHomeBased =
true

override def inputUrlOpt =
Some(
Expand Down

0 comments on commit b713ca9

Please sign in to comment.