From c4cdf18aad213c1de030cc5312487340c4074f7b Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Thu, 19 Oct 2023 20:22:29 +0800 Subject: [PATCH] [KYUUBI #5478][AUTHZ] Support Hudi ShowHoodieTablePartitionsCommand ### _Why are the changes needed?_ To close #5478. Kyuubi authz support hudi ShowHoodieTablePartitionsCommand - ShowHoodieTablePartitionsCommand: https://github.com/apache/hudi/blob/master/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/ShowHoodieTablePartitionsCommand.scala ### _How was this patch tested?_ - [x] Add some test cases that check the changes thoroughly including negative and positive cases if possible - [ ] Add screenshots for manual tests if appropriate - [ ] [Run test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests) locally before make a pull request ### _Was this patch authored or co-authored using generative AI tooling?_ No Closes #5481 from AngersZhuuuu/KYUUBI-5478. Closes #5478 ef276f36c [Angerszhuuuu] [KYUUBI #5478][AUTHZ] Support Hudi ShowHoodieTablePartitionsCommand Authored-by: Angerszhuuuu Signed-off-by: Cheng Pan --- .../main/resources/table_command_spec.json | 17 +++++++++ .../plugin/spark/authz/gen/HudiCommands.scala | 12 ++++++ ...HudiCatalogRangerSparkExtensionSuite.scala | 37 +++++++++++++++++++ 3 files changed, 66 insertions(+) diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json index 2d7199ff92f..c739fe295de 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json +++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json @@ -1657,6 +1657,23 @@ } ], "opType" : "MSCK", "queryDescs" : [ ] +}, { + "classname" : "org.apache.spark.sql.hudi.command.ShowHoodieTablePartitionsCommand", + "tableDescs" : [ { + "fieldName" : "tableIdentifier", + "fieldExtractor" : "TableIdentifierTableExtractor", + "columnDesc" : { + "fieldName" : "specOpt", + "fieldExtractor" : "PartitionOptionColumnExtractor" + }, + "actionTypeDesc" : null, + "tableTypeDesc" : null, + "catalogDesc" : null, + "isInput" : true, + "setCurrentDatabaseIfMissing" : false + } ], + "opType" : "SHOWPARTITIONS", + "queryDescs" : [ ] }, { "classname" : "org.apache.spark.sql.hudi.command.Spark31AlterTableCommand", "tableDescs" : [ { diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala index 0b19204f530..d7e40237bfc 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala @@ -154,6 +154,17 @@ object HudiCommands { TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query"))) } + val ShowHoodieTablePartitionsCommand = { + val cmd = "org.apache.spark.sql.hudi.command.ShowHoodieTablePartitionsCommand" + val columnDesc = ColumnDesc("specOpt", classOf[PartitionOptionColumnExtractor]) + val tableDesc = TableDesc( + "tableIdentifier", + classOf[TableIdentifierTableExtractor], + isInput = true, + columnDesc = Some(columnDesc)) + TableCommandSpec(cmd, Seq(tableDesc), SHOWPARTITIONS) + } + val data: Array[TableCommandSpec] = Array( AlterHoodieTableAddColumnsCommand, AlterHoodieTableChangeColumnCommand, @@ -169,5 +180,6 @@ object HudiCommands { InsertIntoHoodieTableCommand, RepairHoodieTableCommand, TruncateHoodieTableCommand, + ShowHoodieTablePartitionsCommand, Spark31AlterTableCommand) } diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala index e707f0c9ed2..193446bb24f 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala @@ -370,4 +370,41 @@ class HudiCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { } } } + + test("ShowHoodieTablePartitionsCommand") { + withSingleCallEnabled { + withCleanTmpResources(Seq( + (s"$namespace1.$table1", "table"), + (s"$namespace1.$table2", "table"), + (namespace1, "database"))) { + doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $namespace1")) + doAs( + admin, + sql( + s""" + |CREATE TABLE IF NOT EXISTS $namespace1.$table1(id int, name string, city string) + |USING HUDI + |OPTIONS ( + | type = 'cow', + | primaryKey = 'id', + | 'hoodie.datasource.hive_sync.enable' = 'false' + |) + |PARTITIONED BY(city) + |""".stripMargin)) + + val showPartitionsSql = s"SHOW PARTITIONS $namespace1.$table1" + interceptContains[AccessControlException] { + doAs(someone, sql(showPartitionsSql)) + }(s"does not have [select] privilege on [$namespace1/$table1]") + doAs(admin, sql(showPartitionsSql)) + + val showPartitionSpecSql = + s"SHOW PARTITIONS $namespace1.$table1 PARTITION (city = 'hangzhou')" + interceptContains[AccessControlException] { + doAs(someone, sql(showPartitionSpecSql)) + }(s"does not have [select] privilege on [$namespace1/$table1/city]") + doAs(admin, sql(showPartitionSpecSql)) + } + } + } }