From de5b5fe5942bdea0fbd0a98ee11fcca035dccaf0 Mon Sep 17 00:00:00 2001 From: Yuming Wang Date: Fri, 7 Apr 2017 12:51:01 +0800 Subject: [PATCH] Catch exception when jar is missing. --- .../org/apache/spark/executor/Executor.scala | 23 +++++++++++-------- .../org/apache/spark/SparkContextSuite.scala | 11 +++++++++ 2 files changed, 25 insertions(+), 9 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala index 83469c5ff0600..3d36fd4b402eb 100644 --- a/core/src/main/scala/org/apache/spark/executor/Executor.scala +++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala @@ -675,15 +675,20 @@ private[spark] class Executor( .getOrElse(-1L) if (currentTimeStamp < timestamp) { logInfo("Fetching " + name + " with timestamp " + timestamp) - // Fetch file with useCache mode, close cache for local mode. - Utils.fetchFile(name, new File(SparkFiles.getRootDirectory()), conf, - env.securityManager, hadoopConf, timestamp, useCache = !isLocal) - currentJars(name) = timestamp - // Add it to our class loader - val url = new File(SparkFiles.getRootDirectory(), localName).toURI.toURL - if (!urlClassLoader.getURLs().contains(url)) { - logInfo("Adding " + url + " to class loader") - urlClassLoader.addURL(url) + try { + // Fetch file with useCache mode, close cache for local mode. + Utils.fetchFile(name, new File(SparkFiles.getRootDirectory()), conf, + env.securityManager, hadoopConf, timestamp, useCache = !isLocal) + currentJars(name) = timestamp + // Add it to our class loader + val url = new File(SparkFiles.getRootDirectory(), localName).toURI.toURL + if (!urlClassLoader.getURLs().contains(url)) { + logInfo("Adding " + url + " to class loader") + urlClassLoader.addURL(url) + } + } catch { + case e: RuntimeException => + logWarning(s"Failed to add ${name} to class loader, ${e}") } } } diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala index 735f4454e299e..678c2a7328174 100644 --- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala @@ -310,6 +310,17 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu sc.listJars().head should include (tmpJar.getName) } + test("add jar but this jar is missing later") { + val tmpDir = Utils.createTempDir() + val tmpJar = File.createTempFile("test-1.0.0", ".jar", tmpDir) + + sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local")) + sc.addJar(tmpJar.getAbsolutePath) + Utils.deleteRecursively(tmpJar) + + assert (sc.parallelize(Array(1, 2, 3)).count === 3) + } + test("Cancelling job group should not cause SparkContext to shutdown (SPARK-6414)") { try { sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local"))