From b7bd0ed458e999614e2d0a6a6a96adeaed44d9e3 Mon Sep 17 00:00:00 2001 From: Cheng Lian Date: Fri, 7 Aug 2015 12:32:38 +0800 Subject: [PATCH] Fixes logs --- .../org/apache/spark/sql/hive/HiveMetastoreCatalog.scala | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala index 1523ebe9d5493..7198a32df4a02 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala @@ -317,19 +317,17 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive case (Some(serde), relation: HadoopFsRelation) if relation.partitionColumns.nonEmpty => logWarning { - val paths = relation.paths.mkString(", ") "Persisting partitioned data source relation into Hive metastore in " + s"Spark SQL specific format, which is NOT compatible with Hive. Input path(s): " + - paths.mkString("\n", "\n", "") + relation.paths.mkString("\n", "\n", "") } newSparkSQLSpecificMetastoreTable() case (Some(serde), relation: HadoopFsRelation) => logWarning { - val paths = relation.paths.mkString(", ") "Persisting data source relation with multiple input paths into Hive metastore in " + s"Spark SQL specific format, which is NOT compatible with Hive. Input paths: " + - paths.mkString("\n", "\n", "") + relation.paths.mkString("\n", "\n", "") } newSparkSQLSpecificMetastoreTable()