Skip to content

Commit

Permalink
[SPARK-35629][SQL] Use better exception type if database doesn't exis…
Browse files Browse the repository at this point in the history
…t on `drop database`

### What changes were proposed in this pull request?

Add database if exists check in `SeesionCatalog`

### Why are the changes needed?

Curently execute `drop database test` will throw unfriendly error msg.

```
Error in query: org.apache.hadoop.hive.metastore.api.NoSuchObjectException: test
org.apache.spark.sql.AnalysisException: org.apache.hadoop.hive.metastore.api.NoSuchObjectException: test
	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:112)
	at org.apache.spark.sql.hive.HiveExternalCatalog.dropDatabase(HiveExternalCatalog.scala:200)
	at org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.dropDatabase(ExternalCatalogWithListener.scala:53)
	at org.apache.spark.sql.catalyst.catalog.SessionCatalog.dropDatabase(SessionCatalog.scala:273)
	at org.apache.spark.sql.execution.command.DropDatabaseCommand.run(ddl.scala:111)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
	at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:228)
	at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3707)
```

### Does this PR introduce _any_ user-facing change?

Yes, more cleaner error msg.

### How was this patch tested?

Add test.

Closes #32768 from ulysses-you/SPARK-35629.

Authored-by: ulysses-you <ulyssesyou18@gmail.com>
Signed-off-by: Gengliang Wang <gengliang@apache.org>
  • Loading branch information
ulysses-you authored and gengliangwang committed Jun 4, 2021
1 parent 53a758b commit c7fb0e1
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -265,6 +265,9 @@ class SessionCatalog(
if (dbName == DEFAULT_DATABASE) {
throw QueryCompilationErrors.cannotDropDefaultDatabaseError
}
if (!ignoreIfNotExists) {
requireDbExists(dbName)
}
if (cascade && databaseExists(dbName)) {
listTables(dbName).foreach { t =>
invalidateCachedTable(QualifiedTableName(dbName, t.table))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,17 +191,8 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually {

test("drop database when the database does not exist") {
withBasicCatalog { catalog =>
// TODO: fix this inconsistent between HiveExternalCatalog and InMemoryCatalog
if (isHiveExternalCatalog) {
val e = intercept[AnalysisException] {
catalog.dropDatabase("db_that_does_not_exist", ignoreIfNotExists = false, cascade = false)
}.getMessage
assert(e.contains(
"org.apache.hadoop.hive.metastore.api.NoSuchObjectException: db_that_does_not_exist"))
} else {
intercept[NoSuchDatabaseException] {
catalog.dropDatabase("db_that_does_not_exist", ignoreIfNotExists = false, cascade = false)
}
intercept[NoSuchDatabaseException] {
catalog.dropDatabase("db_that_does_not_exist", ignoreIfNotExists = false, cascade = false)
}
catalog.dropDatabase("db_that_does_not_exist", ignoreIfNotExists = true, cascade = false)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -823,12 +823,7 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
var message = intercept[AnalysisException] {
sql(s"DROP DATABASE $dbName")
}.getMessage
// TODO: Unify the exception.
if (isUsingHiveMetastore) {
assert(message.contains(s"NoSuchObjectException: $dbNameWithoutBackTicks"))
} else {
assert(message.contains(s"Database '$dbNameWithoutBackTicks' not found"))
}
assert(message.contains(s"Database '$dbNameWithoutBackTicks' not found"))

message = intercept[AnalysisException] {
sql(s"ALTER DATABASE $dbName SET DBPROPERTIES ('d'='d')")
Expand Down

0 comments on commit c7fb0e1

Please sign in to comment.