From 0201caecc4181bb0c884d0eae5febb33f1ba216a Mon Sep 17 00:00:00 2001 From: Kent Yao Date: Fri, 2 Feb 2024 13:35:58 +0800 Subject: [PATCH] [SPARK-43742][TEST] Wrap withTable for a test in ResolveDefaultColumnsSuite --- .../sql/ResolveDefaultColumnsSuite.scala | 104 +++++++++--------- 1 file changed, 53 insertions(+), 51 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ResolveDefaultColumnsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ResolveDefaultColumnsSuite.scala index 29b2796d25aa4..00529559a4853 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/ResolveDefaultColumnsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/ResolveDefaultColumnsSuite.scala @@ -76,57 +76,59 @@ class ResolveDefaultColumnsSuite extends QueryTest with SharedSparkSession { } test("INSERT into partitioned tables") { - sql("create table t(c1 int, c2 int, c3 int, c4 int) using parquet partitioned by (c3, c4)") - - // INSERT without static partitions - checkError( - exception = intercept[AnalysisException] { - sql("insert into t values (1, 2, 3)") - }, - errorClass = "INSERT_COLUMN_ARITY_MISMATCH.NOT_ENOUGH_DATA_COLUMNS", - parameters = Map( - "tableName" -> "`spark_catalog`.`default`.`t`", - "tableColumns" -> "`c1`, `c2`, `c3`, `c4`", - "dataColumns" -> "`col1`, `col2`, `col3`")) - - // INSERT without static partitions but with column list - sql("truncate table t") - sql("insert into t (c2, c1, c4) values (1, 2, 3)") - checkAnswer(spark.table("t"), Row(2, 1, null, 3)) - - // INSERT with static partitions - sql("truncate table t") - checkError( - exception = intercept[AnalysisException] { - sql("insert into t partition(c3=3, c4=4) values (1)") - }, - errorClass = "INSERT_PARTITION_COLUMN_ARITY_MISMATCH", - parameters = Map( - "tableName" -> "`spark_catalog`.`default`.`t`", - "tableColumns" -> "`c1`, `c2`, `c3`, `c4`", - "dataColumns" -> "`col1`", - "staticPartCols" -> "`c3`, `c4`")) - - // INSERT with static partitions and with column list - sql("truncate table t") - sql("insert into t partition(c3=3, c4=4) (c2) values (1)") - checkAnswer(spark.table("t"), Row(null, 1, 3, 4)) - - // INSERT with partial static partitions - sql("truncate table t") - checkError( - exception = intercept[AnalysisException] { - sql("insert into t partition(c3=3, c4) values (1, 2)") - }, - errorClass = "INSERT_PARTITION_COLUMN_ARITY_MISMATCH", - parameters = Map( - "tableName" -> "`spark_catalog`.`default`.`t`", - "tableColumns" -> "`c1`, `c2`, `c3`, `c4`", - "dataColumns" -> "`col1`, `col2`", - "staticPartCols" -> "`c3`")) - - // INSERT with partial static partitions and with column list is not allowed - intercept[AnalysisException](sql("insert into t partition(c3=3, c4) (c1) values (1, 4)")) + withTable("t") { + sql("create table t(c1 int, c2 int, c3 int, c4 int) using parquet partitioned by (c3, c4)") + + // INSERT without static partitions + checkError( + exception = intercept[AnalysisException] { + sql("insert into t values (1, 2, 3)") + }, + errorClass = "INSERT_COLUMN_ARITY_MISMATCH.NOT_ENOUGH_DATA_COLUMNS", + parameters = Map( + "tableName" -> "`spark_catalog`.`default`.`t`", + "tableColumns" -> "`c1`, `c2`, `c3`, `c4`", + "dataColumns" -> "`col1`, `col2`, `col3`")) + + // INSERT without static partitions but with column list + sql("truncate table t") + sql("insert into t (c2, c1, c4) values (1, 2, 3)") + checkAnswer(spark.table("t"), Row(2, 1, null, 3)) + + // INSERT with static partitions + sql("truncate table t") + checkError( + exception = intercept[AnalysisException] { + sql("insert into t partition(c3=3, c4=4) values (1)") + }, + errorClass = "INSERT_PARTITION_COLUMN_ARITY_MISMATCH", + parameters = Map( + "tableName" -> "`spark_catalog`.`default`.`t`", + "tableColumns" -> "`c1`, `c2`, `c3`, `c4`", + "dataColumns" -> "`col1`", + "staticPartCols" -> "`c3`, `c4`")) + + // INSERT with static partitions and with column list + sql("truncate table t") + sql("insert into t partition(c3=3, c4=4) (c2) values (1)") + checkAnswer(spark.table("t"), Row(null, 1, 3, 4)) + + // INSERT with partial static partitions + sql("truncate table t") + checkError( + exception = intercept[AnalysisException] { + sql("insert into t partition(c3=3, c4) values (1, 2)") + }, + errorClass = "INSERT_PARTITION_COLUMN_ARITY_MISMATCH", + parameters = Map( + "tableName" -> "`spark_catalog`.`default`.`t`", + "tableColumns" -> "`c1`, `c2`, `c3`, `c4`", + "dataColumns" -> "`col1`, `col2`", + "staticPartCols" -> "`c3`")) + + // INSERT with partial static partitions and with column list is not allowed + intercept[AnalysisException](sql("insert into t partition(c3=3, c4) (c1) values (1, 4)")) + } } test("SPARK-43085: Column DEFAULT assignment for target tables with multi-part names") {