From 2ca37061c37edb400c5779009c86abfb492032ac Mon Sep 17 00:00:00 2001 From: Takuya UESHIN Date: Tue, 30 Oct 2018 15:33:46 +0900 Subject: [PATCH 1/6] Split ShowCreateTableSuite. --- sql/core/src/test/resources/sample.json | 2 + .../spark/sql}/ShowCreateTableSuite.scala | 181 +--------------- .../sql/hive/HiveShowCreateTableSuite.scala | 198 ++++++++++++++++++ 3 files changed, 211 insertions(+), 170 deletions(-) create mode 100644 sql/core/src/test/resources/sample.json rename sql/{hive/src/test/scala/org/apache/spark/sql/hive => core/src/test/scala/org/apache/spark/sql}/ShowCreateTableSuite.scala (55%) create mode 100644 sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveShowCreateTableSuite.scala diff --git a/sql/core/src/test/resources/sample.json b/sql/core/src/test/resources/sample.json new file mode 100644 index 0000000000000..a2c2ffd5e0330 --- /dev/null +++ b/sql/core/src/test/resources/sample.json @@ -0,0 +1,2 @@ +{"a" : "2" ,"b" : "blah", "c_!@(3)":1} +{"" : {"d!" : [4, 5], "=" : [{"Dd2": null}, {"Dd2" : true}]}} diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ShowCreateTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala similarity index 55% rename from sql/hive/src/test/scala/org/apache/spark/sql/hive/ShowCreateTableSuite.scala rename to sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala index 34ca790299859..0d90199cb6b55 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ShowCreateTableSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala @@ -15,16 +15,16 @@ * limitations under the License. */ -package org.apache.spark.sql.hive +package org.apache.spark.sql -import org.apache.spark.sql.{AnalysisException, QueryTest} import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.catalog.CatalogTable -import org.apache.spark.sql.hive.test.TestHiveSingleton -import org.apache.spark.sql.test.SQLTestUtils +import org.apache.spark.sql.test.{SharedSQLContext, SQLTestUtils} import org.apache.spark.util.Utils -class ShowCreateTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleton { +class SimpleShowCreateTableSuite extends ShowCreateTableSuite with SharedSQLContext + +abstract class ShowCreateTableSuite extends QueryTest with SQLTestUtils { import testImplicits._ test("data source table with user specified schema") { @@ -120,178 +120,24 @@ class ShowCreateTableSuite extends QueryTest with SQLTestUtils with TestHiveSing } } - test("simple hive table") { - withTable("t1") { - sql( - s"""CREATE TABLE t1 ( - | c1 INT COMMENT 'bla', - | c2 STRING - |) - |TBLPROPERTIES ( - | 'prop1' = 'value1', - | 'prop2' = 'value2' - |) - """.stripMargin - ) - - checkCreateTable("t1") - } - } - - test("simple external hive table") { - withTempDir { dir => - withTable("t1") { - sql( - s"""CREATE TABLE t1 ( - | c1 INT COMMENT 'bla', - | c2 STRING - |) - |LOCATION '${dir.toURI}' - |TBLPROPERTIES ( - | 'prop1' = 'value1', - | 'prop2' = 'value2' - |) - """.stripMargin - ) - - checkCreateTable("t1") - } - } - } - - test("partitioned hive table") { - withTable("t1") { - sql( - s"""CREATE TABLE t1 ( - | c1 INT COMMENT 'bla', - | c2 STRING - |) - |COMMENT 'bla' - |PARTITIONED BY ( - | p1 BIGINT COMMENT 'bla', - | p2 STRING - |) - """.stripMargin - ) - - checkCreateTable("t1") - } - } - - test("hive table with explicit storage info") { - withTable("t1") { - sql( - s"""CREATE TABLE t1 ( - | c1 INT COMMENT 'bla', - | c2 STRING - |) - |ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' - |COLLECTION ITEMS TERMINATED BY '@' - |MAP KEYS TERMINATED BY '#' - |NULL DEFINED AS 'NaN' - """.stripMargin - ) - - checkCreateTable("t1") - } - } - - test("hive table with STORED AS clause") { - withTable("t1") { - sql( - s"""CREATE TABLE t1 ( - | c1 INT COMMENT 'bla', - | c2 STRING - |) - |STORED AS PARQUET - """.stripMargin - ) - - checkCreateTable("t1") - } - } - - test("hive table with serde info") { - withTable("t1") { - sql( - s"""CREATE TABLE t1 ( - | c1 INT COMMENT 'bla', - | c2 STRING - |) - |ROW FORMAT SERDE 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' - |WITH SERDEPROPERTIES ( - | 'mapkey.delim' = ',', - | 'field.delim' = ',' - |) - |STORED AS - | INPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' - | OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat' - """.stripMargin - ) - - checkCreateTable("t1") - } - } - - test("hive view") { + test("view") { withView("v1") { sql("CREATE VIEW v1 AS SELECT 1 AS a") checkCreateView("v1") } } - test("hive view with output columns") { + test("view with output columns") { withView("v1") { sql("CREATE VIEW v1 (b) AS SELECT 1 AS a") checkCreateView("v1") } } - test("hive bucketing is supported") { - withTable("t1") { - sql( - s"""CREATE TABLE t1 (a INT, b STRING) - |CLUSTERED BY (a) - |SORTED BY (b) - |INTO 2 BUCKETS - """.stripMargin - ) - checkCreateTable("t1") - } - } - - test("hive partitioned view is not supported") { - withTable("t1") { - withView("v1") { - sql( - s""" - |CREATE TABLE t1 (c1 INT, c2 STRING) - |PARTITIONED BY ( - | p1 BIGINT COMMENT 'bla', - | p2 STRING ) - """.stripMargin) - - createRawHiveTable( - s""" - |CREATE VIEW v1 - |PARTITIONED ON (p1, p2) - |AS SELECT * from t1 - """.stripMargin - ) - - val cause = intercept[AnalysisException] { - sql("SHOW CREATE TABLE v1") - } - - assert(cause.getMessage.contains(" - partitioned view")) - } - } - } - test("SPARK-24911: keep quotes for nested fields") { withTable("t1") { - val createTable = "CREATE TABLE `t1`(`a` STRUCT<`b`: STRING>)" - sql(createTable) + val createTable = "CREATE TABLE `t1` (`a` STRUCT<`b`: STRING>)" + sql(s"$createTable USING json") val shownDDL = sql(s"SHOW CREATE TABLE t1") .head() .getString(0) @@ -303,16 +149,11 @@ class ShowCreateTableSuite extends QueryTest with SQLTestUtils with TestHiveSing } } - private def createRawHiveTable(ddl: String): Unit = { - hiveContext.sharedState.externalCatalog.unwrapped.asInstanceOf[HiveExternalCatalog] - .client.runSqlHive(ddl) - } - - private def checkCreateTable(table: String): Unit = { + protected def checkCreateTable(table: String): Unit = { checkCreateTableOrView(TableIdentifier(table, Some("default")), "TABLE") } - private def checkCreateView(table: String): Unit = { + protected def checkCreateView(table: String): Unit = { checkCreateTableOrView(TableIdentifier(table, Some("default")), "VIEW") } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveShowCreateTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveShowCreateTableSuite.scala new file mode 100644 index 0000000000000..0386dc79804c6 --- /dev/null +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveShowCreateTableSuite.scala @@ -0,0 +1,198 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.hive + +import org.apache.spark.sql.{AnalysisException, ShowCreateTableSuite} +import org.apache.spark.sql.hive.test.TestHiveSingleton + +class HiveShowCreateTableSuite extends ShowCreateTableSuite with TestHiveSingleton { + + test("simple hive table") { + withTable("t1") { + sql( + s"""CREATE TABLE t1 ( + | c1 INT COMMENT 'bla', + | c2 STRING + |) + |TBLPROPERTIES ( + | 'prop1' = 'value1', + | 'prop2' = 'value2' + |) + """.stripMargin + ) + + checkCreateTable("t1") + } + } + + test("simple external hive table") { + withTempDir { dir => + withTable("t1") { + sql( + s"""CREATE TABLE t1 ( + | c1 INT COMMENT 'bla', + | c2 STRING + |) + |LOCATION '${dir.toURI}' + |TBLPROPERTIES ( + | 'prop1' = 'value1', + | 'prop2' = 'value2' + |) + """.stripMargin + ) + + checkCreateTable("t1") + } + } + } + + test("partitioned hive table") { + withTable("t1") { + sql( + s"""CREATE TABLE t1 ( + | c1 INT COMMENT 'bla', + | c2 STRING + |) + |COMMENT 'bla' + |PARTITIONED BY ( + | p1 BIGINT COMMENT 'bla', + | p2 STRING + |) + """.stripMargin + ) + + checkCreateTable("t1") + } + } + + test("hive table with explicit storage info") { + withTable("t1") { + sql( + s"""CREATE TABLE t1 ( + | c1 INT COMMENT 'bla', + | c2 STRING + |) + |ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' + |COLLECTION ITEMS TERMINATED BY '@' + |MAP KEYS TERMINATED BY '#' + |NULL DEFINED AS 'NaN' + """.stripMargin + ) + + checkCreateTable("t1") + } + } + + test("hive table with STORED AS clause") { + withTable("t1") { + sql( + s"""CREATE TABLE t1 ( + | c1 INT COMMENT 'bla', + | c2 STRING + |) + |STORED AS PARQUET + """.stripMargin + ) + + checkCreateTable("t1") + } + } + + test("hive table with serde info") { + withTable("t1") { + sql( + s"""CREATE TABLE t1 ( + | c1 INT COMMENT 'bla', + | c2 STRING + |) + |ROW FORMAT SERDE 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' + |WITH SERDEPROPERTIES ( + | 'mapkey.delim' = ',', + | 'field.delim' = ',' + |) + |STORED AS + | INPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' + | OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat' + """.stripMargin + ) + + checkCreateTable("t1") + } + } + + test("hive bucketing is supported") { + withTable("t1") { + sql( + s"""CREATE TABLE t1 (a INT, b STRING) + |CLUSTERED BY (a) + |SORTED BY (b) + |INTO 2 BUCKETS + """.stripMargin + ) + checkCreateTable("t1") + } + } + + test("hive partitioned view is not supported") { + withTable("t1") { + withView("v1") { + sql( + s""" + |CREATE TABLE t1 (c1 INT, c2 STRING) + |PARTITIONED BY ( + | p1 BIGINT COMMENT 'bla', + | p2 STRING ) + """.stripMargin) + + createRawHiveTable( + s""" + |CREATE VIEW v1 + |PARTITIONED ON (p1, p2) + |AS SELECT * from t1 + """.stripMargin + ) + + val cause = intercept[AnalysisException] { + sql("SHOW CREATE TABLE v1") + } + + assert(cause.getMessage.contains(" - partitioned view")) + } + } + } + + test("SPARK-24911: keep quotes for nested fields in hive") { + withTable("t1") { + val createTable = "CREATE TABLE `t1`(`a` STRUCT<`b`: STRING>)" + sql(createTable) + val shownDDL = sql(s"SHOW CREATE TABLE t1") + .head() + .getString(0) + .split("\n") + .head + assert(shownDDL == createTable) + + checkCreateTable("t1") + } + } + + private def createRawHiveTable(ddl: String): Unit = { + hiveContext.sharedState.externalCatalog.unwrapped.asInstanceOf[HiveExternalCatalog] + .client.runSqlHive(ddl) + } +} From c2dce69370415f64c4ed802e5bdc241f1c868596 Mon Sep 17 00:00:00 2001 From: Takuya UESHIN Date: Tue, 30 Oct 2018 18:55:06 +0900 Subject: [PATCH 2/6] Add TBLPROPERTIES and COMMENT, and use LOCATION. --- .../spark/sql/execution/command/tables.scala | 34 ++++++++++--------- .../spark/sql/ShowCreateTableSuite.scala | 28 +++++++++++++++ 2 files changed, 46 insertions(+), 16 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 64831e5089a67..62a9b3aed19d6 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -957,9 +957,10 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman builder ++= metadata.viewText.mkString(" AS\n", "", "\n") } else { showHiveTableHeader(metadata, builder) + showTableComment(metadata, builder) showHiveTableNonDataColumns(metadata, builder) showHiveTableStorageInfo(metadata, builder) - showHiveTableProperties(metadata, builder) + showTableProperties(metadata, builder) } builder.toString() @@ -973,14 +974,8 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman if (columns.nonEmpty) { builder ++= columns.mkString("(", ", ", ")\n") } - - metadata - .comment - .map("COMMENT '" + escapeSingleQuotedString(_) + "'\n") - .foreach(builder.append) } - private def showHiveTableNonDataColumns(metadata: CatalogTable, builder: StringBuilder): Unit = { if (metadata.partitionColumnNames.nonEmpty) { val partCols = metadata.partitionSchema.map(_.toDDL) @@ -1031,7 +1026,14 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman } } - private def showHiveTableProperties(metadata: CatalogTable, builder: StringBuilder): Unit = { + private def showTableComment(metadata: CatalogTable, builder: StringBuilder): Unit = { + metadata + .comment + .map("COMMENT '" + escapeSingleQuotedString(_) + "'\n") + .foreach(builder.append) + } + + private def showTableProperties(metadata: CatalogTable, builder: StringBuilder): Unit = { if (metadata.properties.nonEmpty) { val props = metadata.properties.map { case (key, value) => s"'${escapeSingleQuotedString(key)}' = '${escapeSingleQuotedString(value)}'" @@ -1048,6 +1050,8 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman showDataSourceTableDataColumns(metadata, builder) showDataSourceTableOptions(metadata, builder) showDataSourceTableNonDataColumns(metadata, builder) + showTableComment(metadata, builder) + showTableProperties(metadata, builder) builder.toString() } @@ -1063,14 +1067,6 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman val dataSourceOptions = metadata.storage.properties.map { case (key, value) => s"${quoteIdentifier(key)} '${escapeSingleQuotedString(value)}'" - } ++ metadata.storage.locationUri.flatMap { location => - if (metadata.tableType == MANAGED) { - // If it's a managed table, omit PATH option. Spark SQL always creates external table - // when the table creation DDL contains the PATH option. - None - } else { - Some(s"path '${escapeSingleQuotedString(CatalogUtils.URIToString(location))}'") - } } if (dataSourceOptions.nonEmpty) { @@ -1078,6 +1074,12 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman builder ++= dataSourceOptions.mkString(" ", ",\n ", "\n") builder ++= ")\n" } + + if (metadata.tableType == EXTERNAL) { + metadata.storage.locationUri.foreach { location => + builder ++= s"LOCATION '${escapeSingleQuotedString(CatalogUtils.URIToString(location))}'\n" + } + } } private def showDataSourceTableNonDataColumns( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala index 0d90199cb6b55..5c347d2677d5e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala @@ -105,6 +105,34 @@ abstract class ShowCreateTableSuite extends QueryTest with SQLTestUtils { } } + test("data source table with a comment") { + withTable("ddl_test") { + sql( + s"""CREATE TABLE ddl_test + |USING json + |COMMENT 'This is a comment' + |AS SELECT 1 AS a, "foo" AS b, 2.5 AS c + """.stripMargin + ) + + checkCreateTable("ddl_test") + } + } + + test("data source table with table properties") { + withTable("ddl_test") { + sql( + s"""CREATE TABLE ddl_test + |USING json + |TBLPROPERTIES ('a' = '1') + |AS SELECT 1 AS a, "foo" AS b, 2.5 AS c + """.stripMargin + ) + + checkCreateTable("ddl_test") + } + } + test("data source table using Dataset API") { withTable("ddl_test") { spark From 1cc1657d7a5cb6cf54580faeb57990b39e9f0f90 Mon Sep 17 00:00:00 2001 From: Takuya UESHIN Date: Tue, 30 Oct 2018 20:20:14 +0900 Subject: [PATCH 3/6] Remove DDL_TIME property. --- .../org/apache/spark/sql/hive/HiveExternalCatalog.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala index 445161d5de1c2..c1178ad4a84fb 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala @@ -27,6 +27,7 @@ import scala.util.control.NonFatal import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileSystem, Path} +import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.DDL_TIME import org.apache.hadoop.hive.ql.metadata.HiveException import org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT import org.apache.thrift.TException @@ -821,7 +822,8 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat schema = reorderedSchema, partitionColumnNames = partColumnNames, bucketSpec = getBucketSpecFromTableProperties(table), - tracksPartitionsInCatalog = partitionProvider == Some(TABLE_PARTITION_PROVIDER_CATALOG)) + tracksPartitionsInCatalog = partitionProvider == Some(TABLE_PARTITION_PROVIDER_CATALOG), + properties = table.properties.filterKeys(!HIVE_GENERATED_TABLE_PROPERTIES(_))) } override def tableExists(db: String, table: String): Boolean = withClient { @@ -1328,6 +1330,7 @@ object HiveExternalCatalog { val CREATED_SPARK_VERSION = SPARK_SQL_PREFIX + "create.version" + val HIVE_GENERATED_TABLE_PROPERTIES = Set(DDL_TIME) val HIVE_GENERATED_STORAGE_PROPERTIES = Set(SERIALIZATION_FORMAT) // When storing data source tables in hive metastore, we need to set data schema to empty if the From 0bb6aa4998043e75da5bf602c22659c0f6cecedd Mon Sep 17 00:00:00 2001 From: Takuya UESHIN Date: Tue, 30 Oct 2018 21:04:26 +0900 Subject: [PATCH 4/6] Remove an unneeded file. --- sql/hive/src/test/resources/sample.json | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 sql/hive/src/test/resources/sample.json diff --git a/sql/hive/src/test/resources/sample.json b/sql/hive/src/test/resources/sample.json deleted file mode 100644 index a2c2ffd5e0330..0000000000000 --- a/sql/hive/src/test/resources/sample.json +++ /dev/null @@ -1,2 +0,0 @@ -{"a" : "2" ,"b" : "blah", "c_!@(3)":1} -{"" : {"d!" : [4, 5], "=" : [{"Dd2": null}, {"Dd2" : true}]}} From 0a3b5f34d8c7952a54dbcdfe18217fc7634f2d3c Mon Sep 17 00:00:00 2001 From: Takuya UESHIN Date: Wed, 31 Oct 2018 18:35:53 +0900 Subject: [PATCH 5/6] Add a golden file. --- .../sql-tests/inputs/show-create-table.sql | 61 +++++ .../results/show-create-table.sql.out | 222 ++++++++++++++++++ 2 files changed, 283 insertions(+) create mode 100644 sql/core/src/test/resources/sql-tests/inputs/show-create-table.sql create mode 100644 sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out diff --git a/sql/core/src/test/resources/sql-tests/inputs/show-create-table.sql b/sql/core/src/test/resources/sql-tests/inputs/show-create-table.sql new file mode 100644 index 0000000000000..852bfbd63847d --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/inputs/show-create-table.sql @@ -0,0 +1,61 @@ +-- simple +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet; + +SHOW CREATE TABLE tbl; +DROP TABLE tbl; + + +-- options +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +OPTIONS ('a' 1); + +SHOW CREATE TABLE tbl; +DROP TABLE tbl; + + +-- path option +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +OPTIONS ('path' '/path/to/table'); + +SHOW CREATE TABLE tbl; +DROP TABLE tbl; + + +-- location +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +LOCATION '/path/to/table'; + +SHOW CREATE TABLE tbl; +DROP TABLE tbl; + + +-- partition by +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +PARTITIONED BY (a); + +SHOW CREATE TABLE tbl; +DROP TABLE tbl; + + +-- clustered by +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +CLUSTERED BY (a) SORTED BY (b ASC) INTO 2 BUCKETS; + +SHOW CREATE TABLE tbl; +DROP TABLE tbl; + + +-- comment +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +COMMENT 'This is a comment'; + +SHOW CREATE TABLE tbl; +DROP TABLE tbl; + + +-- tblproperties +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +TBLPROPERTIES ('a' = '1'); + +SHOW CREATE TABLE tbl; +DROP TABLE tbl; diff --git a/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out b/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out new file mode 100644 index 0000000000000..1faf16cc30509 --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out @@ -0,0 +1,222 @@ +-- Automatically generated by SQLQueryTestSuite +-- Number of queries: 24 + + +-- !query 0 +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +-- !query 0 schema +struct<> +-- !query 0 output + + + +-- !query 1 +SHOW CREATE TABLE tbl +-- !query 1 schema +struct +-- !query 1 output +CREATE TABLE `tbl` (`a` INT, `b` STRING, `c` INT) +USING parquet + + +-- !query 2 +DROP TABLE tbl +-- !query 2 schema +struct<> +-- !query 2 output + + + +-- !query 3 +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +OPTIONS ('a' 1) +-- !query 3 schema +struct<> +-- !query 3 output + + + +-- !query 4 +SHOW CREATE TABLE tbl +-- !query 4 schema +struct +-- !query 4 output +CREATE TABLE `tbl` (`a` INT, `b` STRING, `c` INT) +USING parquet +OPTIONS ( + `a` '1' +) + + +-- !query 5 +DROP TABLE tbl +-- !query 5 schema +struct<> +-- !query 5 output + + + +-- !query 6 +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +OPTIONS ('path' '/path/to/table') +-- !query 6 schema +struct<> +-- !query 6 output + + + +-- !query 7 +SHOW CREATE TABLE tbl +-- !query 7 schema +struct +-- !query 7 output +CREATE TABLE `tbl` (`a` INT, `b` STRING, `c` INT) +USING parquet +LOCATION 'file:/path/to/table' + + +-- !query 8 +DROP TABLE tbl +-- !query 8 schema +struct<> +-- !query 8 output + + + +-- !query 9 +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +LOCATION '/path/to/table' +-- !query 9 schema +struct<> +-- !query 9 output + + + +-- !query 10 +SHOW CREATE TABLE tbl +-- !query 10 schema +struct +-- !query 10 output +CREATE TABLE `tbl` (`a` INT, `b` STRING, `c` INT) +USING parquet +LOCATION 'file:/path/to/table' + + +-- !query 11 +DROP TABLE tbl +-- !query 11 schema +struct<> +-- !query 11 output + + + +-- !query 12 +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +PARTITIONED BY (a) +-- !query 12 schema +struct<> +-- !query 12 output + + + +-- !query 13 +SHOW CREATE TABLE tbl +-- !query 13 schema +struct +-- !query 13 output +CREATE TABLE `tbl` (`b` STRING, `c` INT, `a` INT) +USING parquet +PARTITIONED BY (a) + + +-- !query 14 +DROP TABLE tbl +-- !query 14 schema +struct<> +-- !query 14 output + + + +-- !query 15 +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +CLUSTERED BY (a) SORTED BY (b ASC) INTO 2 BUCKETS +-- !query 15 schema +struct<> +-- !query 15 output + + + +-- !query 16 +SHOW CREATE TABLE tbl +-- !query 16 schema +struct +-- !query 16 output +CREATE TABLE `tbl` (`a` INT, `b` STRING, `c` INT) +USING parquet +CLUSTERED BY (a) +SORTED BY (b) +INTO 2 BUCKETS + + +-- !query 17 +DROP TABLE tbl +-- !query 17 schema +struct<> +-- !query 17 output + + + +-- !query 18 +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +COMMENT 'This is a comment' +-- !query 18 schema +struct<> +-- !query 18 output + + + +-- !query 19 +SHOW CREATE TABLE tbl +-- !query 19 schema +struct +-- !query 19 output +CREATE TABLE `tbl` (`a` INT, `b` STRING, `c` INT) +USING parquet +COMMENT 'This is a comment' + + +-- !query 20 +DROP TABLE tbl +-- !query 20 schema +struct<> +-- !query 20 output + + + +-- !query 21 +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +TBLPROPERTIES ('a' = '1') +-- !query 21 schema +struct<> +-- !query 21 output + + + +-- !query 22 +SHOW CREATE TABLE tbl +-- !query 22 schema +struct +-- !query 22 output +CREATE TABLE `tbl` (`a` INT, `b` STRING, `c` INT) +USING parquet +TBLPROPERTIES ( + 'a' = '1' +) + + +-- !query 23 +DROP TABLE tbl +-- !query 23 schema +struct<> +-- !query 23 output + From 4f1c9c89b4a76e14d3c7ddd75334ad5584da9832 Mon Sep 17 00:00:00 2001 From: Takuya UESHIN Date: Thu, 1 Nov 2018 00:05:23 +0900 Subject: [PATCH 6/6] Use `showTableLocation`. --- .../spark/sql/execution/command/tables.scala | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 62a9b3aed19d6..871eba49dfbd0 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -960,6 +960,7 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman showTableComment(metadata, builder) showHiveTableNonDataColumns(metadata, builder) showHiveTableStorageInfo(metadata, builder) + showTableLocation(metadata, builder) showTableProperties(metadata, builder) } @@ -1018,10 +1019,12 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman builder ++= s" OUTPUTFORMAT '${escapeSingleQuotedString(format)}'\n" } } + } + private def showTableLocation(metadata: CatalogTable, builder: StringBuilder): Unit = { if (metadata.tableType == EXTERNAL) { - storage.locationUri.foreach { uri => - builder ++= s"LOCATION '$uri'\n" + metadata.storage.locationUri.foreach { location => + builder ++= s"LOCATION '${escapeSingleQuotedString(CatalogUtils.URIToString(location))}'\n" } } } @@ -1051,6 +1054,7 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman showDataSourceTableOptions(metadata, builder) showDataSourceTableNonDataColumns(metadata, builder) showTableComment(metadata, builder) + showTableLocation(metadata, builder) showTableProperties(metadata, builder) builder.toString() @@ -1074,12 +1078,6 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman builder ++= dataSourceOptions.mkString(" ", ",\n ", "\n") builder ++= ")\n" } - - if (metadata.tableType == EXTERNAL) { - metadata.storage.locationUri.foreach { location => - builder ++= s"LOCATION '${escapeSingleQuotedString(CatalogUtils.URIToString(location))}'\n" - } - } } private def showDataSourceTableNonDataColumns(