Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-33667][SQL][3.0] Respect the spark.sql.caseSensitive config while resolving partition spec in v1 SHOW PARTITIONS #30626

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1012,20 +1012,18 @@ case class ShowPartitionsCommand(
DDLUtils.verifyPartitionProviderIsHive(sparkSession, table, "SHOW PARTITIONS")

/**
* Validate the partitioning spec by making sure all the referenced columns are
* Normalizes the partition spec w.r.t the partition columns and case sensitivity settings,
* and validates the spec by making sure all the referenced columns are
* defined as partitioning columns in table definition. An AnalysisException exception is
* thrown if the partitioning spec is invalid.
*/
if (spec.isDefined) {
val badColumns = spec.get.keySet.filterNot(table.partitionColumnNames.contains)
if (badColumns.nonEmpty) {
val badCols = badColumns.mkString("[", ", ", "]")
throw new AnalysisException(
s"Non-partitioning column(s) $badCols are specified for SHOW PARTITIONS")
}
}
val normalizedSpec = spec.map(partitionSpec => PartitioningUtils.normalizePartitionSpec(
partitionSpec,
table.partitionColumnNames,
table.identifier.quotedString,
sparkSession.sessionState.conf.resolver))

val partNames = catalog.listPartitionNames(tableName, spec)
val partNames = catalog.listPartitionNames(tableName, normalizedSpec)
partNames.map(Row(_))
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3032,7 +3032,7 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
}
}

test("SPARK-33588: case sensitivity of partition spec") {
test("SPARK-33588: case sensitivity of partition spec in SHOW TABLE") {
val t = "part_table"
withTable(t) {
sql(s"""
Expand All @@ -3052,6 +3052,27 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
}
}
}

test("SPARK-33667: case sensitivity of partition spec in SHOW PARTITIONS") {
val t = "part_table"
withTable(t) {
sql(s"""
|CREATE TABLE $t (price int, qty int, year int, month int)
|USING $dataSource
|PARTITIONED BY (year, month)""".stripMargin)
sql(s"INSERT INTO $t PARTITION(year = 2015, month = 1) SELECT 1, 1")
Seq(
true -> "PARTITION(year = 2015, month = 1)",
false -> "PARTITION(YEAR = 2015, Month = 1)"
).foreach { case (caseSensitive, partitionSpec) =>
withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) {
checkAnswer(
sql(s"SHOW PARTITIONS $t $partitionSpec"),
Row("year=2015/month=1"))
}
}
}
}
}

object FakeLocalFsFileSystem {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -475,7 +475,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
val message2 = intercept[AnalysisException] {
sql("SHOW PARTITIONS parquet_tab4 PARTITION(abcd=2015, xyz=1)")
}.getMessage
assert(message2.contains("Non-partitioning column(s) [abcd, xyz] are specified"))
assert(message2.contains("abcd is not a valid partition column"))

val message3 = intercept[AnalysisException] {
sql("SHOW PARTITIONS parquet_view1")
Expand Down