From 34cab3a7afafacc96656d7e0ff735cb642d8b683 Mon Sep 17 00:00:00 2001 From: echo2mei <534384876@qq.com> Date: Wed, 23 Dec 2015 20:23:37 +0800 Subject: [PATCH] [SPARK-12396][Core] Modify the function scheduleAtFixedRate to schedule. Instead of just cancel the registrationRetryTimer to avoid driver retry connect to master, change the function to schedule. It is no need to register to master iteratively. --- .../main/scala/org/apache/spark/deploy/client/AppClient.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala b/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala index 1e2f469214b84..a5753e1053649 100644 --- a/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala +++ b/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala @@ -124,7 +124,7 @@ private[spark] class AppClient( */ private def registerWithMaster(nthRetry: Int) { registerMasterFutures.set(tryRegisterAllMasters()) - registrationRetryTimer.set(registrationRetryThread.scheduleAtFixedRate(new Runnable { + registrationRetryTimer.set(registrationRetryThread.schedule(new Runnable { override def run(): Unit = { Utils.tryOrExit { if (registered.get) { @@ -138,7 +138,7 @@ private[spark] class AppClient( } } } - }, REGISTRATION_TIMEOUT_SECONDS, REGISTRATION_TIMEOUT_SECONDS, TimeUnit.SECONDS)) + }, REGISTRATION_TIMEOUT_SECONDS, TimeUnit.SECONDS)) } /**