diff --git a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSinkSuite.scala b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSinkSuite.scala index f54eff90a5e07..3a400c657bab3 100644 --- a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSinkSuite.scala +++ b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSinkSuite.scala @@ -27,7 +27,6 @@ import org.apache.kafka.clients.producer.ProducerConfig import org.apache.kafka.clients.producer.internals.DefaultPartitioner import org.apache.kafka.common.Cluster import org.apache.kafka.common.serialization.ByteArraySerializer -import org.scalatest.concurrent.TimeLimits.failAfter import org.scalatest.time.SpanSugar._ import org.apache.spark.{SparkConf, SparkContext, SparkException, TestUtils} diff --git a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala index ff12f643497d0..692e3215aeff7 100644 --- a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala @@ -33,7 +33,9 @@ import org.apache.logging.log4j.core.appender.AbstractAppender import org.apache.logging.log4j.core.config.Property import org.scalactic.source.Position import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, BeforeAndAfterEach, Failed, Outcome, Tag} +import org.scalatest.concurrent.TimeLimits import org.scalatest.funsuite.AnyFunSuite // scalastyle:ignore funsuite +import org.scalatest.time._ // scalastyle:ignore import org.apache.spark.deploy.LocalSparkCluster import org.apache.spark.internal.Logging @@ -69,6 +71,7 @@ abstract class SparkFunSuite with BeforeAndAfterAll with BeforeAndAfterEach with ThreadAudit + with TimeLimits with Logging { // scalastyle:on @@ -147,7 +150,10 @@ abstract class SparkFunSuite if (excluded.contains(testName)) { ignore(s"$testName (excluded)")(testBody) } else { - super.test(testName, testTags: _*)(testBody) + val timeout = sys.props.getOrElse("spark.test.timeout", "20").toLong + super.test(testName, testTags: _*)( + failAfter(Span(timeout, Minutes))(testBody) + ) } }