diff --git a/core/pom.xml b/core/pom.xml
index e31d90f608892..8a20181096223 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -331,16 +331,6 @@
scalacheck_${scala.binary.version}
test
-
- junit
- junit
- test
-
-
- com.novocode
- junit-interface
- test
-
org.apache.curator
curator-test
diff --git a/dev/run-tests.py b/dev/run-tests.py
index d8b22e1665e7b..1a816585187d9 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -118,6 +118,14 @@ def determine_modules_to_test(changed_modules):
return modules_to_test.union(set(changed_modules))
+def determine_tags_to_exclude(changed_modules):
+ tags = []
+ for m in modules.all_modules:
+ if m not in changed_modules:
+ tags += m.test_tags
+ return tags
+
+
# -------------------------------------------------------------------------------------------------
# Functions for working with subprocesses and shell tools
# -------------------------------------------------------------------------------------------------
@@ -369,6 +377,7 @@ def detect_binary_inop_with_mima():
def run_scala_tests_maven(test_profiles):
mvn_test_goals = ["test", "--fail-at-end"]
+
profiles_and_goals = test_profiles + mvn_test_goals
print("[info] Running Spark tests using Maven with these arguments: ",
@@ -392,7 +401,7 @@ def run_scala_tests_sbt(test_modules, test_profiles):
exec_sbt(profiles_and_goals)
-def run_scala_tests(build_tool, hadoop_version, test_modules):
+def run_scala_tests(build_tool, hadoop_version, test_modules, excluded_tags):
"""Function to properly execute all tests passed in as a set from the
`determine_test_suites` function"""
set_title_and_block("Running Spark unit tests", "BLOCK_SPARK_UNIT_TESTS")
@@ -401,6 +410,10 @@ def run_scala_tests(build_tool, hadoop_version, test_modules):
test_profiles = get_hadoop_profiles(hadoop_version) + \
list(set(itertools.chain.from_iterable(m.build_profile_flags for m in test_modules)))
+
+ if excluded_tags:
+ test_profiles += ['-Dtest.exclude.tags=' + ",".join(excluded_tags)]
+
if build_tool == "maven":
run_scala_tests_maven(test_profiles)
else:
@@ -500,8 +513,10 @@ def main():
target_branch = os.environ["ghprbTargetBranch"]
changed_files = identify_changed_files_from_git_commits("HEAD", target_branch=target_branch)
changed_modules = determine_modules_for_files(changed_files)
+ excluded_tags = determine_tags_to_exclude(changed_modules)
if not changed_modules:
changed_modules = [modules.root]
+ excluded_tags = []
print("[info] Found the following changed modules:",
", ".join(x.name for x in changed_modules))
@@ -541,7 +556,7 @@ def main():
detect_binary_inop_with_mima()
# run the test suites
- run_scala_tests(build_tool, hadoop_version, test_modules)
+ run_scala_tests(build_tool, hadoop_version, test_modules, excluded_tags)
modules_with_python_tests = [m for m in test_modules if m.python_test_goals]
if modules_with_python_tests:
diff --git a/dev/sparktestsupport/modules.py b/dev/sparktestsupport/modules.py
index 346452f3174e4..65397f1f3e0bc 100644
--- a/dev/sparktestsupport/modules.py
+++ b/dev/sparktestsupport/modules.py
@@ -31,7 +31,7 @@ class Module(object):
def __init__(self, name, dependencies, source_file_regexes, build_profile_flags=(), environ={},
sbt_test_goals=(), python_test_goals=(), blacklisted_python_implementations=(),
- should_run_r_tests=False):
+ test_tags=(), should_run_r_tests=False):
"""
Define a new module.
@@ -50,6 +50,8 @@ def __init__(self, name, dependencies, source_file_regexes, build_profile_flags=
:param blacklisted_python_implementations: A set of Python implementations that are not
supported by this module's Python components. The values in this set should match
strings returned by Python's `platform.python_implementation()`.
+ :param test_tags A set of tags that will be excluded when running unit tests if the module
+ is not explicitly changed.
:param should_run_r_tests: If true, changes in this module will trigger all R tests.
"""
self.name = name
@@ -60,6 +62,7 @@ def __init__(self, name, dependencies, source_file_regexes, build_profile_flags=
self.environ = environ
self.python_test_goals = python_test_goals
self.blacklisted_python_implementations = blacklisted_python_implementations
+ self.test_tags = test_tags
self.should_run_r_tests = should_run_r_tests
self.dependent_modules = set()
@@ -85,6 +88,9 @@ def contains_file(self, filename):
"catalyst/test",
"sql/test",
"hive/test",
+ ],
+ test_tags=[
+ "org.apache.spark.sql.hive.ExtendedHiveTest"
]
)
@@ -398,6 +404,22 @@ def contains_file(self, filename):
)
+yarn = Module(
+ name="yarn",
+ dependencies=[],
+ source_file_regexes=[
+ "yarn/",
+ "network/yarn/",
+ ],
+ sbt_test_goals=[
+ "yarn/test",
+ "network-yarn/test",
+ ],
+ test_tags=[
+ "org.apache.spark.deploy.yarn.ExtendedYarnTest"
+ ]
+)
+
# The root module is a dummy module which is used to run all of the tests.
# No other modules should directly depend on this module.
root = Module(
diff --git a/external/flume/pom.xml b/external/flume/pom.xml
index 132062f94fb45..3154e36c21ef5 100644
--- a/external/flume/pom.xml
+++ b/external/flume/pom.xml
@@ -66,16 +66,6 @@
scalacheck_${scala.binary.version}
test
-
- junit
- junit
- test
-
-
- com.novocode
- junit-interface
- test
-
target/scala-${scala.binary.version}/classes
diff --git a/external/kafka/pom.xml b/external/kafka/pom.xml
index 05abd9e2e6810..7d0d46dadc727 100644
--- a/external/kafka/pom.xml
+++ b/external/kafka/pom.xml
@@ -86,16 +86,6 @@
scalacheck_${scala.binary.version}
test
-
- junit
- junit
- test
-
-
- com.novocode
- junit-interface
- test
-
target/scala-${scala.binary.version}/classes
diff --git a/external/mqtt/pom.xml b/external/mqtt/pom.xml
index 05e6338a08b0a..913c47d33f488 100644
--- a/external/mqtt/pom.xml
+++ b/external/mqtt/pom.xml
@@ -58,16 +58,6 @@
scalacheck_${scala.binary.version}
test
-
- junit
- junit
- test
-
-
- com.novocode
- junit-interface
- test
-
org.apache.activemq
activemq-core
diff --git a/external/twitter/pom.xml b/external/twitter/pom.xml
index 244ad58ae9593..9137bf25ee8ae 100644
--- a/external/twitter/pom.xml
+++ b/external/twitter/pom.xml
@@ -58,16 +58,6 @@
scalacheck_${scala.binary.version}
test
-
- junit
- junit
- test
-
-
- com.novocode
- junit-interface
- test
-
target/scala-${scala.binary.version}/classes
diff --git a/external/zeromq/pom.xml b/external/zeromq/pom.xml
index 171df8682c848..6fec4f0e8a0f9 100644
--- a/external/zeromq/pom.xml
+++ b/external/zeromq/pom.xml
@@ -57,16 +57,6 @@
scalacheck_${scala.binary.version}
test
-
- junit
- junit
- test
-
-
- com.novocode
- junit-interface
- test
-
target/scala-${scala.binary.version}/classes
diff --git a/extras/java8-tests/pom.xml b/extras/java8-tests/pom.xml
index 81794a8536318..dba3dda8a9562 100644
--- a/extras/java8-tests/pom.xml
+++ b/extras/java8-tests/pom.xml
@@ -58,16 +58,6 @@
test-jar
test
-
- junit
- junit
- test
-
-
- com.novocode
- junit-interface
- test
-
diff --git a/extras/kinesis-asl/pom.xml b/extras/kinesis-asl/pom.xml
index 6dd8ff69c2943..760f183a2ef37 100644
--- a/extras/kinesis-asl/pom.xml
+++ b/extras/kinesis-asl/pom.xml
@@ -74,11 +74,6 @@
scalacheck_${scala.binary.version}
test
-
- com.novocode
- junit-interface
- test
-
target/scala-${scala.binary.version}/classes
diff --git a/launcher/pom.xml b/launcher/pom.xml
index ed38e66aa2467..80696280a1d18 100644
--- a/launcher/pom.xml
+++ b/launcher/pom.xml
@@ -42,11 +42,6 @@
log4j
test
-
- junit
- junit
- test
-
org.mockito
mockito-core
diff --git a/mllib/pom.xml b/mllib/pom.xml
index 22c0c6008ba37..5dedacb38874e 100644
--- a/mllib/pom.xml
+++ b/mllib/pom.xml
@@ -94,16 +94,6 @@
scalacheck_${scala.binary.version}
test
-
- junit
- junit
- test
-
-
- com.novocode
- junit-interface
- test
-
org.mockito
mockito-core
diff --git a/network/common/pom.xml b/network/common/pom.xml
index 1cc054a8936c5..9c12cca0df609 100644
--- a/network/common/pom.xml
+++ b/network/common/pom.xml
@@ -64,16 +64,6 @@
-
- junit
- junit
- test
-
-
- com.novocode
- junit-interface
- test
-
log4j
log4j
diff --git a/network/shuffle/pom.xml b/network/shuffle/pom.xml
index 7a66c968041ce..e4f4c57b683c8 100644
--- a/network/shuffle/pom.xml
+++ b/network/shuffle/pom.xml
@@ -78,16 +78,6 @@
test-jar
test
-
- junit
- junit
- test
-
-
- com.novocode
- junit-interface
- test
-
log4j
log4j
diff --git a/pom.xml b/pom.xml
index 6535994641145..2927d3e107563 100644
--- a/pom.xml
+++ b/pom.xml
@@ -181,6 +181,7 @@
0.9.2
${java.home}
+
@@ -1952,6 +1964,7 @@
__not_used__
+ ${test.exclude.tags}
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 901cfa538d23e..d80d300f1c3b2 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -567,11 +567,20 @@ object TestSettings {
javaOptions in Test ++= "-Xmx3g -Xss4096k -XX:PermSize=128M -XX:MaxNewSize=256m -XX:MaxPermSize=1g"
.split(" ").toSeq,
javaOptions += "-Xmx3g",
+ // Exclude tags defined in a system property
+ testOptions in Test += Tests.Argument(TestFrameworks.ScalaTest,
+ sys.props.get("test.exclude.tags").map { tags =>
+ tags.split(",").flatMap { tag => Seq("-l", tag) }.toSeq
+ }.getOrElse(Nil): _*),
+ testOptions in Test += Tests.Argument(TestFrameworks.JUnit,
+ sys.props.get("test.exclude.tags").map { tags =>
+ Seq("--exclude-categories=" + tags)
+ }.getOrElse(Nil): _*),
// Show full stack trace and duration in test cases.
testOptions in Test += Tests.Argument("-oDF"),
- testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
+ testOptions in Test += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
// Enable Junit testing.
- libraryDependencies += "com.novocode" % "junit-interface" % "0.9" % "test",
+ libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test",
// Only allow one test at a time, even across projects, since they run in the same JVM
parallelExecution in Test := false,
// Make sure the test temp directory exists.
diff --git a/sql/core/pom.xml b/sql/core/pom.xml
index 465aa3a3888c2..fa6732db183d8 100644
--- a/sql/core/pom.xml
+++ b/sql/core/pom.xml
@@ -73,11 +73,6 @@
jackson-databind
${fasterxml.jackson.version}
-
- junit
- junit
- test
-
org.scalacheck
scalacheck_${scala.binary.version}
diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
index ab309e0a1d36b..ffc4c32794ca4 100644
--- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
+++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
@@ -24,11 +24,13 @@ import org.apache.spark.sql.catalyst.rules.RuleExecutor
import org.scalatest.BeforeAndAfter
import org.apache.spark.sql.SQLConf
+import org.apache.spark.sql.hive.ExtendedHiveTest
import org.apache.spark.sql.hive.test.TestHive
/**
* Runs the test cases that are included in the hive distribution.
*/
+@ExtendedHiveTest
class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
// TODO: bundle in jar files... get from classpath
private lazy val hiveQueryDir = TestHive.getHiveFile(
diff --git a/sql/hive/pom.xml b/sql/hive/pom.xml
index ac67fe5f47be9..82cfeb2bb95d3 100644
--- a/sql/hive/pom.xml
+++ b/sql/hive/pom.xml
@@ -160,11 +160,6 @@
scalacheck_${scala.binary.version}
test
-
- junit
- junit
- test
-
org.apache.spark
spark-sql_${scala.binary.version}
diff --git a/sql/hive/src/test/java/org/apache/spark/sql/hive/ExtendedHiveTest.java b/sql/hive/src/test/java/org/apache/spark/sql/hive/ExtendedHiveTest.java
new file mode 100644
index 0000000000000..e2183183fb559
--- /dev/null
+++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/ExtendedHiveTest.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.hive;
+
+import java.lang.annotation.*;
+import org.scalatest.TagAnnotation;
+
+@TagAnnotation
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.METHOD, ElementType.TYPE})
+public @interface ExtendedHiveTest { }
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
index f0bb77092c0cf..888d1b7b45532 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
@@ -23,6 +23,7 @@ import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.{Logging, SparkFunSuite}
import org.apache.spark.sql.catalyst.expressions.{NamedExpression, Literal, AttributeReference, EqualTo}
import org.apache.spark.sql.catalyst.util.quietly
+import org.apache.spark.sql.hive.ExtendedHiveTest
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.util.Utils
@@ -32,6 +33,7 @@ import org.apache.spark.util.Utils
* sure that reflective calls are not throwing NoSuchMethod error, but the actually functionality
* is not fully tested.
*/
+@ExtendedHiveTest
class VersionsSuite extends SparkFunSuite with Logging {
// Do not use a temp path here to speed up subsequent executions of the unit test during
diff --git a/streaming/pom.xml b/streaming/pom.xml
index 5cc9001b0e9ab..1e6ee009ca6d5 100644
--- a/streaming/pom.xml
+++ b/streaming/pom.xml
@@ -84,21 +84,11 @@
scalacheck_${scala.binary.version}
test
-
- junit
- junit
- test
-
org.seleniumhq.selenium
selenium-java
test
-
- com.novocode
- junit-interface
- test
-
target/scala-${scala.binary.version}/classes
diff --git a/unsafe/pom.xml b/unsafe/pom.xml
index 066abe92e51c0..4e8b9a84bb67f 100644
--- a/unsafe/pom.xml
+++ b/unsafe/pom.xml
@@ -55,16 +55,6 @@
-
- junit
- junit
- test
-
-
- com.novocode
- junit-interface
- test
-
org.mockito
mockito-core
diff --git a/yarn/src/test/java/org/apache/spark/deploy/yarn/ExtendedYarnTest.java b/yarn/src/test/java/org/apache/spark/deploy/yarn/ExtendedYarnTest.java
new file mode 100644
index 0000000000000..7a8f2fe979c1f
--- /dev/null
+++ b/yarn/src/test/java/org/apache/spark/deploy/yarn/ExtendedYarnTest.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.deploy.yarn;
+
+import java.lang.annotation.*;
+import org.scalatest.TagAnnotation;
+
+@TagAnnotation
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.METHOD, ElementType.TYPE})
+public @interface ExtendedYarnTest { }
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
index b5a42fd6afd98..105c3090d489d 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
@@ -39,6 +39,7 @@ import org.apache.spark.util.Utils
* applications, and require the Spark assembly to be built before they can be successfully
* run.
*/
+@ExtendedYarnTest
class YarnClusterSuite extends BaseYarnClusterSuite {
override def newYarnConfig(): YarnConfiguration = new YarnConfiguration()
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala
index 8d9c9b3004eda..4700e2428df08 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala
@@ -32,6 +32,7 @@ import org.apache.spark.network.yarn.{YarnShuffleService, YarnTestAccessor}
/**
* Integration test for the external shuffle service with a yarn mini-cluster
*/
+@ExtendedYarnTest
class YarnShuffleIntegrationSuite extends BaseYarnClusterSuite {
override def newYarnConfig(): YarnConfiguration = {