From f8d411e4c7332967914b930c120e6629a1a17684 Mon Sep 17 00:00:00 2001 From: x00228947 Date: Wed, 18 Jan 2017 17:38:08 +0800 Subject: [PATCH 1/2] shuffle stage not retry --- .../org/apache/spark/sql/hive/hiveWriterContainers.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala index 16cfa9d1cc5c4..d1f290f0e89a5 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala @@ -21,7 +21,6 @@ import java.text.NumberFormat import java.util.{Date, Locale} import scala.collection.JavaConverters._ - import org.apache.hadoop.fs.Path import org.apache.hadoop.hive.common.FileUtils import org.apache.hadoop.hive.conf.HiveConf.ConfVars @@ -34,10 +33,10 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.Object import org.apache.hadoop.io.Writable import org.apache.hadoop.mapred._ import org.apache.hadoop.mapreduce.TaskType - import org.apache.spark._ import org.apache.spark.internal.Logging import org.apache.spark.mapred.SparkHadoopMapRedUtil +import org.apache.spark.shuffle.FetchFailedException import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.execution.UnsafeKVExternalSorter @@ -322,6 +321,10 @@ private[spark] class SparkHiveDynamicPartitionWriterContainer( } commit() } catch { + case ffe: FetchFailedException => + logError("Aborting task because of FetchFailedException.", ffe) + abortTask() + throw ffe case cause: Throwable => logError("Aborting task.", cause) abortTask() From dd04ceb766f2e2654c6d14e79a197ba55f0ea4b0 Mon Sep 17 00:00:00 2001 From: xukun Date: Wed, 18 Jan 2017 17:46:22 +0800 Subject: [PATCH 2/2] Update hiveWriterContainers.scala --- .../scala/org/apache/spark/sql/hive/hiveWriterContainers.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala index d1f290f0e89a5..556aebb069d25 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala @@ -21,6 +21,7 @@ import java.text.NumberFormat import java.util.{Date, Locale} import scala.collection.JavaConverters._ + import org.apache.hadoop.fs.Path import org.apache.hadoop.hive.common.FileUtils import org.apache.hadoop.hive.conf.HiveConf.ConfVars @@ -33,6 +34,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.Object import org.apache.hadoop.io.Writable import org.apache.hadoop.mapred._ import org.apache.hadoop.mapreduce.TaskType + import org.apache.spark._ import org.apache.spark.internal.Logging import org.apache.spark.mapred.SparkHadoopMapRedUtil