Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-8561][SQL]drop table under specific database #6951

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import org.apache.spark.sql.execution.RunnableCommand
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
import org.apache.hadoop.hive.conf.HiveConf

/**
* Analyzes the given table in the current database to generate statistics, which will be
Expand Down Expand Up @@ -56,8 +57,21 @@ case class DropTable(
override def run(sqlContext: SQLContext): Seq[InternalRow] = {
val hiveContext = sqlContext.asInstanceOf[HiveContext]
val ifExistsClause = if (ifExists) "IF EXISTS " else ""
val dbAndTableName = tableName.split("\\.")
val databaseName = dbAndTableName
.lift(dbAndTableName.size -2)
.getOrElse(hiveContext.catalog.client.currentDatabase)
// tempDbname is used to pass the test "drop_partitions_filter"
// when we set hive.exec.drop.ignorenonexistent=false and run "drop table dbname.tablename"
// Hive will throws out Exception (This is a bug of Hive)
val tempDbname =
if (hiveContext.hiveconf.getBoolVar(HiveConf.ConfVars.DROPIGNORESNONEXISTENT)) {
s"$databaseName."
} else {
""
}
try {
hiveContext.cacheManager.tryUncacheQuery(hiveContext.table(tableName))
hiveContext.cacheManager.tryUncacheQuery(hiveContext.table(dbAndTableName.last))
} catch {
// This table's metadata is not in Hive metastore (e.g. the table does not exist).
case _: org.apache.hadoop.hive.ql.metadata.InvalidTableException =>
Expand All @@ -67,9 +81,9 @@ case class DropTable(
// Users should be able to drop such kinds of tables regardless if there is an error.
case e: Throwable => log.warn(s"${e.getMessage}", e)
}
hiveContext.invalidateTable(tableName)
hiveContext.runSqlHive(s"DROP TABLE $ifExistsClause$tableName")
hiveContext.catalog.unregisterTable(Seq(tableName))
hiveContext.invalidateTable(dbAndTableName.last)
hiveContext.runSqlHive(s"DROP TABLE $ifExistsClause$tempDbname${dbAndTableName.last}")
hiveContext.catalog.unregisterTable(Seq(databaseName, dbAndTableName.last))
Seq.empty[InternalRow]
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -850,4 +850,22 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with BeforeA
sqlContext.sql("""use default""")
sqlContext.sql("""drop database if exists testdb8156 CASCADE""")
}

test("SPARK-8561:drop table under specific database ") {

val df = (1 to 3).map(i => (i, s"val_$i", i * 2)).toDF("a", "b", "c")
sqlContext.sql("""create database if not exists testdb8561""")
sqlContext.sql("""use testdb8561""")
df.write
.format("parquet")
.mode(SaveMode.Overwrite)
.saveAsTable("testdb8561_tbl1")

assert(sqlContext.sql("show TABLES in testdb8561").collect().size === 1)
sqlContext.sql("drop TABLE testdb8561_tbl1")
assert(sqlContext.sql("show TABLES in testdb8561").collect().size === 0)

sqlContext.sql("""use default""")
sqlContext.sql("""drop database if exists testdb8561 CASCADE""")
}
}