Permalink
Browse files

checkOutputSpecs not applicable to FSOutputFormat

  • Loading branch information...
1 parent ec490e8 commit ac631366c22c8191ebc2f31b92905e165f6ad210 @CodingCat CodingCat committed Mar 1, 2014
Showing with 10 additions and 4 deletions.
  1. +10 −4 core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
@@ -713,10 +713,16 @@ class PairRDDFunctions[K: ClassTag, V: ClassTag](self: RDD[(K, V)])
logDebug("Saving as hadoop file of type (" + keyClass.getSimpleName + ", " +
valueClass.getSimpleName + ")")
- val path = new Path(conf.get("mapred.output.dir"))
- val fs = path.getFileSystem(conf)
- conf.getOutputFormat.checkOutputSpecs(fs, conf)
-
+ if (outputFormatClass.isInstanceOf[FileOutputFormat[_, _]]) {
+ val outputPath = conf.get("mapred.output.dir")
+ if (outputPath == null) {
+ throw new SparkException("mapred.output.dir not set")
+ }
+ val path = new Path(outputPath)
+ val fs = path.getFileSystem(conf)
+ conf.getOutputFormat.checkOutputSpecs(fs, conf)
+ }
+
val writer = new SparkHadoopWriter(conf)
writer.preSetup()

0 comments on commit ac63136

Please sign in to comment.