Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
Merge pull request #9 from MrPowers/remove-test-sugar
Remove spark-test-sugar dependency
  • Loading branch information
ssimeonov committed Oct 16, 2020
2 parents b9e65b9 + 57aa9b7 commit 5ea2935
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 12 deletions.
1 change: 0 additions & 1 deletion build.sbt
Expand Up @@ -25,7 +25,6 @@ libraryDependencies ++= Seq(
"org.apache.spark" %% "spark-sql" % vSpark % "provided" withSources() excludeAll ExclusionRule(organization = "org.mortbay.jetty"),
"org.apache.logging.log4j" % "log4j-core" % "2.7" % "provided" withSources(),
"org.apache.logging.log4j" % "log4j-api" % "2.7" % "provided" withSources(),
"com.swoop" %% "spark-test-sugar" % "1.5.0" % "test" withSources(),
"org.scalatest" %% "scalatest" % "3.0.4" % "test" withSources()
)

Expand Down
12 changes: 12 additions & 0 deletions src/test/resources/log4j.properties
@@ -0,0 +1,12 @@
# Set everything to be logged to the console
log4j.rootCategory=ERROR, console
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n

# Settings to quiet third party logs that are too verbose
log4j.logger.org.eclipse.jetty=WARN
log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR
log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=WARN
log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=WARN
11 changes: 11 additions & 0 deletions src/test/scala/com/swoop/spark/SparkSessionTestWrapper.scala
@@ -0,0 +1,11 @@
package com.swoop.spark

import org.apache.spark.sql.SparkSession

trait SparkSessionTestWrapper {

lazy val spark: SparkSession = {
SparkSession.builder().master("local[2]").appName("spark-records").config("spark.sql.shuffle.partitions", "4").getOrCreate()
}

}
@@ -1,19 +1,19 @@
package com.swoop.spark.accumulators

import com.swoop.spark.test.SparkSqlSpec
import com.swoop.spark.SparkSessionTestWrapper
import org.scalatest.{Matchers, WordSpec}

import scala.collection.JavaConversions._


class ByKeyAdditiveAccumulatorTest extends WordSpec with Matchers with SparkSqlSpec {
class ByKeyAdditiveAccumulatorTest extends WordSpec with Matchers with SparkSessionTestWrapper {

"the accumulator" should {
"sum values into a map" in {
val acc = new ByKeyAdditiveAccumulator[String, Int]
sc.register(acc)
spark.sparkContext.register(acc)

sc.parallelize(1 to 100)
spark.sparkContext.parallelize(1 to 100)
.foreach { value =>
val category = if (value % 2 == 0) "even" else "odd"
acc.add(category, 1)
Expand Down
13 changes: 6 additions & 7 deletions src/test/scala/examples/fancy_numbers/SparkTest.scala
@@ -1,22 +1,22 @@
package examples.fancy_numbers

import com.swoop.spark.SparkSessionTestWrapper
import com.swoop.spark.records._
import com.swoop.spark.test.SparkSqlSpec
import org.apache.spark.sql.Dataset
import org.apache.spark.storage.StorageLevel


class SparkTest extends ExampleSpec with SparkSqlSpec with TestNegative5To100 {
class SparkTest extends ExampleSpec with SparkSessionTestWrapper with TestNegative5To100 {

val sc = spark.sparkContext
lazy val dc = SimpleDriverContext(sc)
lazy val jc = dc.jobContext(SimpleJobContext)
lazy val ds = recordsDataset(-5 to 100, jc)
lazy val records = ds.collect

"in an integration test" - {
implicit val env = FlatRecordEnvironment()
val sqlContext = sqlc
import sqlContext.implicits._
import spark.implicits._

behave like fancyRecordBuilder(records, jc)

Expand Down Expand Up @@ -60,9 +60,8 @@ class SparkTest extends ExampleSpec with SparkSqlSpec with TestNegative5To100 {
}

def recordsDataset(numbers: Seq[Int], jc: JobContext): Dataset[FancyNumberRecord] = {
val sqlContext = sqlc
import sqlContext.implicits._
sqlc.createDataset(numbers)
import spark.implicits._
spark.createDataset(numbers)
.mapPartitions(inputs => Example.buildRecords(inputs, jc))
.persist(StorageLevel.MEMORY_ONLY)
}
Expand Down

0 comments on commit 5ea2935

Please sign in to comment.