From d8cf2b493a589b745d54b3b903848d4d0827e642 Mon Sep 17 00:00:00 2001 From: sharkd Date: Wed, 13 Jul 2016 07:59:26 +0800 Subject: [PATCH 1/5] rebase apache/master --- .../spark/deploy/yarn/YarnClusterSuite.scala | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index 8ab7b21c22139..3c5b204d69705 100644 --- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -204,8 +204,13 @@ class YarnClusterSuite extends BaseYarnClusterSuite { val result = File.createTempFile("result", null, tempDir) val finalState = runSpark(false, mainClassName(YarnClusterDriverUseSparkHadoopUtilConf.getClass), +<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 appArgs = Seq("key=value", result.getAbsolutePath()), extraConf = Map("spark.hadoop.key" -> "value")) +======= + appArgs = Seq("spark.hadoop.key=value", result.getAbsolutePath()), + extraConf = Map(("spark.hadoop.key", "value"))) +>>>>>>> add a unit test checkResult(finalState, result) } @@ -312,9 +317,15 @@ private object YarnClusterDriverUseSparkHadoopUtilConf extends Logging with Matc // scalastyle:off println System.err.println( s""" +<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 |Invalid command line: ${args.mkString(" ")} | |Usage: YarnClusterDriverUseSparkHadoopUtilConf [hadoopConfKey=value] [result file] +======= + |Invalid command line: ${args.mkString(" ")} + | + |Usage: YarnClusterDriverUseSparkHadoopUtilConf [propertyKey=value] [result file] +>>>>>>> add a unit test """.stripMargin) // scalastyle:on println System.exit(1) @@ -324,11 +335,19 @@ private object YarnClusterDriverUseSparkHadoopUtilConf extends Logging with Matc .set("spark.extraListeners", classOf[SaveExecutorInfo].getName) .setAppName("yarn test using SparkHadoopUtil's conf")) +<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 val kv = args(0).split("=") val status = new File(args(1)) var result = "failure" try { SparkHadoopUtil.get.conf.get(kv(0)) should be (kv(1)) +======= + val propertyKeyValue = args(0).split("=") + val status = new File(args(1)) + var result = "failure" + try { + SparkHadoopUtil.get.conf.get(propertyKeyValue(0)) should be (propertyKeyValue(1)) +>>>>>>> add a unit test result = "success" } finally { Files.write(result, status, StandardCharsets.UTF_8) From 8b0c40ab555336899b684fc2a1d6cc1c0886cd11 Mon Sep 17 00:00:00 2001 From: sharkd Date: Tue, 12 Jul 2016 00:49:56 +0800 Subject: [PATCH 2/5] fix style --- .../spark/deploy/yarn/YarnClusterSuite.scala | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index 3c5b204d69705..8ab7b21c22139 100644 --- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -204,13 +204,8 @@ class YarnClusterSuite extends BaseYarnClusterSuite { val result = File.createTempFile("result", null, tempDir) val finalState = runSpark(false, mainClassName(YarnClusterDriverUseSparkHadoopUtilConf.getClass), -<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 appArgs = Seq("key=value", result.getAbsolutePath()), extraConf = Map("spark.hadoop.key" -> "value")) -======= - appArgs = Seq("spark.hadoop.key=value", result.getAbsolutePath()), - extraConf = Map(("spark.hadoop.key", "value"))) ->>>>>>> add a unit test checkResult(finalState, result) } @@ -317,15 +312,9 @@ private object YarnClusterDriverUseSparkHadoopUtilConf extends Logging with Matc // scalastyle:off println System.err.println( s""" -<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 |Invalid command line: ${args.mkString(" ")} | |Usage: YarnClusterDriverUseSparkHadoopUtilConf [hadoopConfKey=value] [result file] -======= - |Invalid command line: ${args.mkString(" ")} - | - |Usage: YarnClusterDriverUseSparkHadoopUtilConf [propertyKey=value] [result file] ->>>>>>> add a unit test """.stripMargin) // scalastyle:on println System.exit(1) @@ -335,19 +324,11 @@ private object YarnClusterDriverUseSparkHadoopUtilConf extends Logging with Matc .set("spark.extraListeners", classOf[SaveExecutorInfo].getName) .setAppName("yarn test using SparkHadoopUtil's conf")) -<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 val kv = args(0).split("=") val status = new File(args(1)) var result = "failure" try { SparkHadoopUtil.get.conf.get(kv(0)) should be (kv(1)) -======= - val propertyKeyValue = args(0).split("=") - val status = new File(args(1)) - var result = "failure" - try { - SparkHadoopUtil.get.conf.get(propertyKeyValue(0)) should be (propertyKeyValue(1)) ->>>>>>> add a unit test result = "success" } finally { Files.write(result, status, StandardCharsets.UTF_8) From 888cf1fa2187e4f92286c74ba6a05196348eff79 Mon Sep 17 00:00:00 2001 From: sharkd Date: Wed, 13 Jul 2016 07:59:26 +0800 Subject: [PATCH 3/5] rebase apache/master --- .../spark/deploy/yarn/YarnClusterSuite.scala | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index 8ab7b21c22139..3c5b204d69705 100644 --- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -204,8 +204,13 @@ class YarnClusterSuite extends BaseYarnClusterSuite { val result = File.createTempFile("result", null, tempDir) val finalState = runSpark(false, mainClassName(YarnClusterDriverUseSparkHadoopUtilConf.getClass), +<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 appArgs = Seq("key=value", result.getAbsolutePath()), extraConf = Map("spark.hadoop.key" -> "value")) +======= + appArgs = Seq("spark.hadoop.key=value", result.getAbsolutePath()), + extraConf = Map(("spark.hadoop.key", "value"))) +>>>>>>> add a unit test checkResult(finalState, result) } @@ -312,9 +317,15 @@ private object YarnClusterDriverUseSparkHadoopUtilConf extends Logging with Matc // scalastyle:off println System.err.println( s""" +<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 |Invalid command line: ${args.mkString(" ")} | |Usage: YarnClusterDriverUseSparkHadoopUtilConf [hadoopConfKey=value] [result file] +======= + |Invalid command line: ${args.mkString(" ")} + | + |Usage: YarnClusterDriverUseSparkHadoopUtilConf [propertyKey=value] [result file] +>>>>>>> add a unit test """.stripMargin) // scalastyle:on println System.exit(1) @@ -324,11 +335,19 @@ private object YarnClusterDriverUseSparkHadoopUtilConf extends Logging with Matc .set("spark.extraListeners", classOf[SaveExecutorInfo].getName) .setAppName("yarn test using SparkHadoopUtil's conf")) +<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 val kv = args(0).split("=") val status = new File(args(1)) var result = "failure" try { SparkHadoopUtil.get.conf.get(kv(0)) should be (kv(1)) +======= + val propertyKeyValue = args(0).split("=") + val status = new File(args(1)) + var result = "failure" + try { + SparkHadoopUtil.get.conf.get(propertyKeyValue(0)) should be (propertyKeyValue(1)) +>>>>>>> add a unit test result = "success" } finally { Files.write(result, status, StandardCharsets.UTF_8) From c470ab74b1bfc4814f0ca683102ed55b6c2a1410 Mon Sep 17 00:00:00 2001 From: sharkd Date: Tue, 12 Jul 2016 00:49:56 +0800 Subject: [PATCH 4/5] fix style --- .../spark/deploy/yarn/YarnClusterSuite.scala | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index 3c5b204d69705..8ab7b21c22139 100644 --- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -204,13 +204,8 @@ class YarnClusterSuite extends BaseYarnClusterSuite { val result = File.createTempFile("result", null, tempDir) val finalState = runSpark(false, mainClassName(YarnClusterDriverUseSparkHadoopUtilConf.getClass), -<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 appArgs = Seq("key=value", result.getAbsolutePath()), extraConf = Map("spark.hadoop.key" -> "value")) -======= - appArgs = Seq("spark.hadoop.key=value", result.getAbsolutePath()), - extraConf = Map(("spark.hadoop.key", "value"))) ->>>>>>> add a unit test checkResult(finalState, result) } @@ -317,15 +312,9 @@ private object YarnClusterDriverUseSparkHadoopUtilConf extends Logging with Matc // scalastyle:off println System.err.println( s""" -<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 |Invalid command line: ${args.mkString(" ")} | |Usage: YarnClusterDriverUseSparkHadoopUtilConf [hadoopConfKey=value] [result file] -======= - |Invalid command line: ${args.mkString(" ")} - | - |Usage: YarnClusterDriverUseSparkHadoopUtilConf [propertyKey=value] [result file] ->>>>>>> add a unit test """.stripMargin) // scalastyle:on println System.exit(1) @@ -335,19 +324,11 @@ private object YarnClusterDriverUseSparkHadoopUtilConf extends Logging with Matc .set("spark.extraListeners", classOf[SaveExecutorInfo].getName) .setAppName("yarn test using SparkHadoopUtil's conf")) -<<<<<<< 7f968867ff61c6b1a007874ee7e3a7421d94d373 val kv = args(0).split("=") val status = new File(args(1)) var result = "failure" try { SparkHadoopUtil.get.conf.get(kv(0)) should be (kv(1)) -======= - val propertyKeyValue = args(0).split("=") - val status = new File(args(1)) - var result = "failure" - try { - SparkHadoopUtil.get.conf.get(propertyKeyValue(0)) should be (propertyKeyValue(1)) ->>>>>>> add a unit test result = "success" } finally { Files.write(result, status, StandardCharsets.UTF_8) From 8ae5ec71c9e12b4004d0563c9b581b590890369f Mon Sep 17 00:00:00 2001 From: sharkdtu Date: Wed, 3 Aug 2016 19:51:45 +0800 Subject: [PATCH 5/5] SpillReader NPE when spillFile has no data --- .../org/apache/spark/util/collection/ExternalSorter.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala b/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala index 708a0070e2253..7c98e8cabb229 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala @@ -611,7 +611,9 @@ private[spark] class ExternalSorter[K, V, C]( val ds = deserializeStream deserializeStream = null fileStream = null - ds.close() + if (ds != null) { + ds.close() + } // NOTE: We don't do file.delete() here because that is done in ExternalSorter.stop(). // This should also be fixed in ExternalAppendOnlyMap. }