From 76c1813cf6e0e0e0d085cd31dcf1633c80829eff Mon Sep 17 00:00:00 2001 From: xubo245 <601450868@qq.com> Date: Sat, 13 Jan 2018 21:53:52 +0800 Subject: [PATCH] [SPARK-23039][SQL] Fix the bug in alter table set location. TOBO work: Fix the bug in alter table set location. org.apache.spark.sql.execution.command.DDLSuite#testSetLocation // TODO(gatorsmile): fix the bug in alter table set location. // if (isUsingHiveMetastore) { // assert(storageFormat.properties.get("path") === expected) // } --- .../sql/execution/command/DDLSuite.scala | 38 +++++++++++++++++-- .../spark/sql/hive/HiveExternalCatalog.scala | 2 +- 2 files changed, 36 insertions(+), 4 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index 591510c1d8283..85d6d9b519622 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -1141,9 +1141,9 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { .map { s => catalog.getPartition(tableIdent, s).storage } .getOrElse { catalog.getTableMetadata(tableIdent).storage } // TODO(gatorsmile): fix the bug in alter table set location. - // if (isUsingHiveMetastore) { - // assert(storageFormat.properties.get("path") === expected) - // } + if (isUsingHiveMetastore) { + assert(storageFormat.properties.get("path").get === expected.toString) + } assert(storageFormat.locationUri === Some(expected)) } // set table location @@ -1869,6 +1869,38 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { } } + test("SPARK-23039: check path after SET LOCATION") { + withTable("tbl") { + withTempDir { dir => + sql("CREATE TABLE tbl(i INT) USING parquet") + sql("INSERT INTO tbl SELECT 1") + checkAnswer(spark.table("tbl"), Row(1)) + val defaultTablePath = spark.sessionState.catalog + .getTableMetadata(TableIdentifier("tbl")).storage.locationUri.get + try { + val catalog = spark.sessionState.catalog + val tableIdent = TableIdentifier("tbl") + // before set location + if (isUsingHiveMetastore) { + assert(catalog.getTableMetadata(tableIdent).storage + .properties.get("path").get === defaultTablePath.toString) + } + + sql(s"ALTER TABLE tbl SET LOCATION '${dir.getCanonicalPath}'") + spark.catalog.refreshTable("tbl") + + // after set location + if (isUsingHiveMetastore) { + assert(catalog.getTableMetadata(tableIdent).storage + .properties.get("path").get === dir.getCanonicalPath) + } + } finally { + Utils.deleteRecursively(new File(defaultTablePath)) + } + } + } + } + test("insert data to a data source table which has a non-existing location should succeed") { withTable("t") { withTempDir { dir => diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala index 632e3e0c4c3f9..8d81172ff4830 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala @@ -787,7 +787,7 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat val storageWithLocation = { val tableLocation = getLocationFromStorageProps(table) // We pass None as `newPath` here, to remove the path option in storage properties. - updateLocationInStorageProps(table, newPath = None).copy( + table.storage.copy( locationUri = tableLocation.map(CatalogUtils.stringToURI(_))) } val partitionProvider = table.properties.get(TABLE_PARTITION_PROVIDER)