From c6cfd33ccf34dd6c19ba8263eec0061f1034ba28 Mon Sep 17 00:00:00 2001 From: Bryan Cutler Date: Tue, 23 Jun 2015 11:52:16 -0700 Subject: [PATCH] [SPARK-6980] Changed UT ask message timeout to explicitly intercept a SparkException --- .../scala/org/apache/spark/rpc/RpcEnvSuite.scala | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala index 7b728334e9282..4087a7fb5bcc6 100644 --- a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala +++ b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala @@ -162,16 +162,14 @@ abstract class RpcEnvSuite extends SparkFunSuite with BeforeAndAfterAll { // Use anotherEnv to find out the RpcEndpointRef val rpcEndpointRef = anotherEnv.setupEndpointRef("local", env.address, "ask-timeout") try { - val e = intercept[Exception] { + // Any exception thrown in askWithRetry is wrapped with a SparkException and set as the cause + val e = intercept[SparkException] { rpcEndpointRef.askWithRetry[String]("hello", new RpcTimeout(1 millis, shortProp)) } - assert(e.isInstanceOf[TimeoutException] || e.getCause.isInstanceOf[TimeoutException]) - e match { - case te: TimeoutException => - assert(te.getMessage().contains(shortProp)) - case e: Exception => - assert(e.getCause().getMessage().contains(shortProp)) - } + // The SparkException cause should be a RpcTimeoutException with message indicating the + // controlling timeout property + assert(e.getCause.isInstanceOf[RpcTimeoutException]) + assert(e.getCause().getMessage().contains(shortProp)) } finally { anotherEnv.shutdown() anotherEnv.awaitTermination()