From 5b59a449dc2535242e218ce2b8b316beae410564 Mon Sep 17 00:00:00 2001 From: Bryan Cutler Date: Fri, 15 May 2015 17:03:23 -0700 Subject: [PATCH] [SPARK-6980] Added some RpcTimeout unit tests --- .../org/apache/spark/rpc/RpcEnvSuite.scala | 25 ++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala index ae3339d80f9c6..93f46ca8a4ea9 100644 --- a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala +++ b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala @@ -155,6 +155,7 @@ abstract class RpcEnvSuite extends FunSuite with BeforeAndAfterAll { }) val conf = new SparkConf() + val shortProp = "spark.rpc.short.timeout" conf.set("spark.rpc.retry.wait", "0") conf.set("spark.rpc.numRetries", "1") val anotherEnv = createRpcEnv(conf, "remote", 13345) @@ -162,9 +163,15 @@ abstract class RpcEnvSuite extends FunSuite with BeforeAndAfterAll { val rpcEndpointRef = anotherEnv.setupEndpointRef("local", env.address, "ask-timeout") try { val e = intercept[Exception] { - rpcEndpointRef.askWithRetry[String]("hello", 1 millis) + rpcEndpointRef.askWithRetry[String]("hello", new RpcTimeout(1 millis, shortProp)) } assert(e.isInstanceOf[TimeoutException] || e.getCause.isInstanceOf[TimeoutException]) + e match { + case te: TimeoutException => + assert(te.getMessage().contains(shortProp)) + case e: Exception => + assert(e.getCause().getMessage().contains(shortProp)) + } } finally { anotherEnv.shutdown() anotherEnv.awaitTermination() @@ -539,6 +546,22 @@ abstract class RpcEnvSuite extends FunSuite with BeforeAndAfterAll { } } + test("construction of RpcTimeout using properties") { + val conf = new SparkConf + + val testProp = "spark.ask.test.timeout" + val testDurationSeconds = 30 + + conf.set(testProp, testDurationSeconds.toString + "s") + + val rt = RpcTimeout(conf, testProp) + assert( testDurationSeconds === rt.duration.toSeconds ) + + val ex = intercept[Throwable] { + RpcTimeout(conf, "spark.ask.invalid.timeout") + } + } + } class UnserializableClass