diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala index cf72345efa63f..91f9da35abeee 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala @@ -56,7 +56,12 @@ case class DropTable( try { hiveContext.tryUncacheQuery(hiveContext.table(tableName)) } catch { + // This table's metadata is not in case _: org.apache.hadoop.hive.ql.metadata.InvalidTableException => + // Other exceptions can be caused by users providing wrong parameters in OPTIONS + // (e.g. invalid paths). We catch it and log a warning message. + // Users should be able to drop such kinds of tables regardless if there is an exception. + case e: Exception => log.warn(s"${e.getMessage}") } hiveContext.invalidateTable(tableName) hiveContext.runSqlHive(s"DROP TABLE $ifExistsClause$tableName") diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala index 8ff833e0d60d1..53d8aa7739bc2 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala @@ -242,4 +242,17 @@ class MetastoreDataSourcesSuite extends QueryTest with BeforeAndAfterEach { assert(expectedSchema == table("jsonTable").schema) } + + test("SPARK-5286 Fail to drop an invalid table when using the data source API") { + sql( + s""" + |CREATE TABLE jsonTable + |USING org.apache.spark.sql.json.DefaultSource + |OPTIONS ( + | path 'it is not a path at all!' + |) + """.stripMargin) + + sql("DROP TABLE jsonTable").collect.foreach(println) + } }