From 2bc3e7ae215772008054e3d0f3eb2e16c61811b7 Mon Sep 17 00:00:00 2001 From: baishuo Date: Tue, 23 Jun 2015 03:45:03 -0700 Subject: [PATCH 1/4] drop table under specific database --- .../spark/sql/hive/execution/commands.scala | 5 +++-- .../sql/hive/MetastoreDataSourcesSuite.scala | 18 ++++++++++++++++++ 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala index aad58bfa2e6e0..f401ef3bb2062 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala @@ -56,6 +56,7 @@ case class DropTable( override def run(sqlContext: SQLContext): Seq[InternalRow] = { val hiveContext = sqlContext.asInstanceOf[HiveContext] val ifExistsClause = if (ifExists) "IF EXISTS " else "" + val databaseName = hiveContext.catalog.client.currentDatabase try { hiveContext.cacheManager.tryUncacheQuery(hiveContext.table(tableName)) } catch { @@ -68,8 +69,8 @@ case class DropTable( case e: Throwable => log.warn(s"${e.getMessage}", e) } hiveContext.invalidateTable(tableName) - hiveContext.runSqlHive(s"DROP TABLE $ifExistsClause$tableName") - hiveContext.catalog.unregisterTable(Seq(tableName)) + hiveContext.runSqlHive(s"DROP TABLE $ifExistsClause$databaseName.$tableName") + hiveContext.catalog.unregisterTable(Seq(databaseName, tableName)) Seq.empty[InternalRow] } } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala index cc294bc3e8bc3..c59b25f0ecb14 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala @@ -850,4 +850,22 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with BeforeA sqlContext.sql("""use default""") sqlContext.sql("""drop database if exists testdb8156 CASCADE""") } + + test("SPARK-8561:drop table under specific database ") { + + val df = (1 to 3).map(i => (i, s"val_$i", i * 2)).toDF("a", "b", "c") + sqlContext.sql("""create database if not exists testdb8561""") + sqlContext.sql("""use testdb8561""") + df.write + .format("parquet") + .mode(SaveMode.Overwrite) + .saveAsTable("testdb8561_tbl1") + + assert(sqlContext.sql("show TABLES in testdb8561").collect().size === 1) + sqlContext.sql("drop TABLE testdb8561_tbl1") + assert(sqlContext.sql("show TABLES in testdb8561").collect().size === 0) + + sqlContext.sql("""use default""") + sqlContext.sql("""drop database if exists testdb8561 CASCADE""") + } } From 2893b7749f6c5cbfbd96dfa5f853cd25f4c69388 Mon Sep 17 00:00:00 2001 From: baishuo Date: Tue, 23 Jun 2015 07:59:24 -0700 Subject: [PATCH 2/4] handle dbname.tablename --- .../apache/spark/sql/hive/execution/commands.scala | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala index f401ef3bb2062..8951dcb1bd91a 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala @@ -56,9 +56,12 @@ case class DropTable( override def run(sqlContext: SQLContext): Seq[InternalRow] = { val hiveContext = sqlContext.asInstanceOf[HiveContext] val ifExistsClause = if (ifExists) "IF EXISTS " else "" - val databaseName = hiveContext.catalog.client.currentDatabase + val dbAndTableName = tableName.split("\\.") + val databaseName = dbAndTableName + .lift(dbAndTableName.size -2) + .getOrElse(hiveContext.catalog.client.currentDatabase) try { - hiveContext.cacheManager.tryUncacheQuery(hiveContext.table(tableName)) + hiveContext.cacheManager.tryUncacheQuery(hiveContext.table(dbAndTableName.last)) } catch { // This table's metadata is not in Hive metastore (e.g. the table does not exist). case _: org.apache.hadoop.hive.ql.metadata.InvalidTableException => @@ -68,9 +71,9 @@ case class DropTable( // Users should be able to drop such kinds of tables regardless if there is an error. case e: Throwable => log.warn(s"${e.getMessage}", e) } - hiveContext.invalidateTable(tableName) - hiveContext.runSqlHive(s"DROP TABLE $ifExistsClause$databaseName.$tableName") - hiveContext.catalog.unregisterTable(Seq(databaseName, tableName)) + hiveContext.invalidateTable(dbAndTableName.last) + hiveContext.runSqlHive(s"DROP TABLE $ifExistsClause$databaseName.${dbAndTableName.last}") + hiveContext.catalog.unregisterTable(Seq(databaseName, dbAndTableName.last)) Seq.empty[InternalRow] } } From 3ba7913d0ebca59a4465c28cb46ad23c252e78c5 Mon Sep 17 00:00:00 2001 From: baishuo Date: Wed, 24 Jun 2015 21:09:07 -0700 Subject: [PATCH 3/4] pass test drop_partitions_filter --- .../org/apache/spark/sql/hive/execution/commands.scala | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala index 8951dcb1bd91a..566b1cd0d3a4b 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala @@ -28,6 +28,7 @@ import org.apache.spark.sql.execution.RunnableCommand import org.apache.spark.sql.hive.HiveContext import org.apache.spark.sql.types._ import org.apache.spark.util.Utils +import org.apache.hadoop.hive.conf.HiveConf /** * Analyzes the given table in the current database to generate statistics, which will be @@ -60,6 +61,12 @@ case class DropTable( val databaseName = dbAndTableName .lift(dbAndTableName.size -2) .getOrElse(hiveContext.catalog.client.currentDatabase) + //tempDbname is used to pass the test "drop_partitions_filter" + //when we set hive.exec.drop.ignorenonexistent=false and run "drop table dbname.tablename" + //Hive will throws out Exception (This is a bug of Hive) + val tempDbname = + if (hiveContext.hiveconf.getBoolVar(HiveConf.ConfVars.DROPIGNORESNONEXISTENT)) + s"$databaseName."else "" try { hiveContext.cacheManager.tryUncacheQuery(hiveContext.table(dbAndTableName.last)) } catch { @@ -72,7 +79,7 @@ case class DropTable( case e: Throwable => log.warn(s"${e.getMessage}", e) } hiveContext.invalidateTable(dbAndTableName.last) - hiveContext.runSqlHive(s"DROP TABLE $ifExistsClause$databaseName.${dbAndTableName.last}") + hiveContext.runSqlHive(s"DROP TABLE $ifExistsClause$tempDbname${dbAndTableName.last}") hiveContext.catalog.unregisterTable(Seq(databaseName, dbAndTableName.last)) Seq.empty[InternalRow] } From c09cb9fdd153db01132159dd6b83a4c08b596692 Mon Sep 17 00:00:00 2001 From: baishuo Date: Wed, 24 Jun 2015 22:19:28 -0700 Subject: [PATCH 4/4] modify style --- .../apache/spark/sql/hive/execution/commands.scala | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala index 566b1cd0d3a4b..7806ced6ae834 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala @@ -61,12 +61,15 @@ case class DropTable( val databaseName = dbAndTableName .lift(dbAndTableName.size -2) .getOrElse(hiveContext.catalog.client.currentDatabase) - //tempDbname is used to pass the test "drop_partitions_filter" - //when we set hive.exec.drop.ignorenonexistent=false and run "drop table dbname.tablename" - //Hive will throws out Exception (This is a bug of Hive) + // tempDbname is used to pass the test "drop_partitions_filter" + // when we set hive.exec.drop.ignorenonexistent=false and run "drop table dbname.tablename" + // Hive will throws out Exception (This is a bug of Hive) val tempDbname = - if (hiveContext.hiveconf.getBoolVar(HiveConf.ConfVars.DROPIGNORESNONEXISTENT)) - s"$databaseName."else "" + if (hiveContext.hiveconf.getBoolVar(HiveConf.ConfVars.DROPIGNORESNONEXISTENT)) { + s"$databaseName." + } else { + "" + } try { hiveContext.cacheManager.tryUncacheQuery(hiveContext.table(dbAndTableName.last)) } catch {