Skip to content

Commit

Permalink
[KYUUBI apache#5652][AUTHZ] CreateTable related command support path …
Browse files Browse the repository at this point in the history
…privilege

### _Why are the changes needed?_
To close apache#5652
CreateTable related command support path privilege

### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible

- [ ] Add screenshots for manual tests if appropriate

- [ ] [Run test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests) locally before make a pull request

### _Was this patch authored or co-authored using generative AI tooling?_
No

Closes apache#5656 from AngersZhuuuu/KYUUBI-5652.

Closes apache#5652

e89d208 [Angerszhuuuu] Update org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
002fe3b [Angerszhuuuu] Update org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
6a31edc [Angerszhuuuu] Merge branch 'master' into KYUUBI-5652
0d5b4fa [Angerszhuuuu] Update RangerSparkExtensionSuite.scala
3e686cf [Angerszhuuuu] Revert "Update extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala"
8b4cbda [Angerszhuuuu] Update extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
1a9959c [Angerszhuuuu] Update RangerSparkExtensionSuite.scala
97e6912 [Angerszhuuuu] Update table_command_spec.json
4b96799 [Angerszhuuuu] update
f34ebdb [Angerszhuuuu] Update PrivilegesBuilder.scala
239da7f [Angerszhuuuu] Merge branch 'master' into KYUUBI-5652
b45ecfc [Angerszhuuuu] [KYUUBI apache#5652][AUTHZ] CreateTable related command support path privilege

Authored-by: Angerszhuuuu <angers.zhu@gmail.com>
Signed-off-by: Cheng Pan <chengpan@apache.org>
  • Loading branch information
AngersZhuuuu authored and pan3793 committed Nov 10, 2023
1 parent bad0ccb commit eb84e15
Show file tree
Hide file tree
Showing 5 changed files with 179 additions and 20 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,10 @@

org.apache.kyuubi.plugin.spark.authz.serde.BaseRelationFileIndexURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.CatalogStorageFormatURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.CatalogTableURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.PartitionLocsSeqURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.PropertiesLocationUriExtractor
org.apache.kyuubi.plugin.spark.authz.serde.PropertiesPathUriExtractor
org.apache.kyuubi.plugin.spark.authz.serde.StringSeqURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.StringURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.TableSpecURIExtractor
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,15 @@
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
"uriDescs" : [ ]
"uriDescs" : [ {
"fieldName" : "tableSpec",
"fieldExtractor" : "TableSpecURIExtractor",
"isInput" : false
}, {
"fieldName" : "properties",
"fieldExtractor" : "PropertiesLocationUriExtractor",
"isInput" : false
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.CreateTableAsSelect",
"tableDescs" : [ {
Expand Down Expand Up @@ -187,7 +195,15 @@
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanQueryExtractor"
} ],
"uriDescs" : [ ]
"uriDescs" : [ {
"fieldName" : "tableSpec",
"fieldExtractor" : "TableSpecURIExtractor",
"isInput" : false
}, {
"fieldName" : "properties",
"fieldExtractor" : "PropertiesLocationUriExtractor",
"isInput" : false
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.CreateV2Table",
"tableDescs" : [ {
Expand All @@ -205,7 +221,11 @@
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
"uriDescs" : [ ]
"uriDescs" : [ {
"fieldName" : "properties",
"fieldExtractor" : "PropertiesLocationUriExtractor",
"isInput" : false
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.DeleteFromTable",
"tableDescs" : [ {
Expand Down Expand Up @@ -492,7 +512,15 @@
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
"uriDescs" : [ ]
"uriDescs" : [ {
"fieldName" : "tableSpec",
"fieldExtractor" : "TableSpecURIExtractor",
"isInput" : false
}, {
"fieldName" : "properties",
"fieldExtractor" : "PropertiesLocationUriExtractor",
"isInput" : false
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.ReplaceTableAsSelect",
"tableDescs" : [ {
Expand Down Expand Up @@ -531,7 +559,15 @@
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanQueryExtractor"
} ],
"uriDescs" : [ ]
"uriDescs" : [ {
"fieldName" : "tableSpec",
"fieldExtractor" : "TableSpecURIExtractor",
"isInput" : false
}, {
"fieldName" : "properties",
"fieldExtractor" : "PropertiesLocationUriExtractor",
"isInput" : false
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.SetTableProperties",
"tableDescs" : [ {
Expand Down Expand Up @@ -1012,7 +1048,11 @@
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanQueryExtractor"
} ],
"uriDescs" : [ ]
"uriDescs" : [ {
"fieldName" : "table",
"fieldExtractor" : "CatalogTableURIExtractor",
"isInput" : false
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.CreateDataSourceTableCommand",
"tableDescs" : [ {
Expand All @@ -1027,7 +1067,11 @@
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
"uriDescs" : [ ]
"uriDescs" : [ {
"fieldName" : "table",
"fieldExtractor" : "CatalogTableURIExtractor",
"isInput" : false
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.CreateTableCommand",
"tableDescs" : [ {
Expand All @@ -1042,7 +1086,11 @@
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
"uriDescs" : [ ]
"uriDescs" : [ {
"fieldName" : "table",
"fieldExtractor" : "CatalogTableURIExtractor",
"isInput" : false
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.CreateTableLikeCommand",
"tableDescs" : [ {
Expand All @@ -1066,7 +1114,11 @@
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
"uriDescs" : [ ]
"uriDescs" : [ {
"fieldName" : "fileFormat",
"fieldExtractor" : "CatalogStorageFormatURIExtractor",
"isInput" : false
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.CreateViewCommand",
"tableDescs" : [ {
Expand Down Expand Up @@ -1329,7 +1381,11 @@
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanOptionQueryExtractor"
} ],
"uriDescs" : [ ]
"uriDescs" : [ {
"fieldName" : "tableDesc",
"fieldExtractor" : "CatalogTableURIExtractor",
"isInput" : false
} ]
}, {
"classname" : "org.apache.spark.sql.execution.datasources.CreateTempViewUsing",
"tableDescs" : [ ],
Expand Down Expand Up @@ -1431,7 +1487,11 @@
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanQueryExtractor"
} ],
"uriDescs" : [ ]
"uriDescs" : [ {
"fieldName" : "tableDesc",
"fieldExtractor" : "CatalogTableURIExtractor",
"isInput" : false
} ]
}, {
"classname" : "org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand",
"tableDescs" : [ ],
Expand Down Expand Up @@ -1490,7 +1550,11 @@
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanQueryExtractor"
} ],
"uriDescs" : [ ]
"uriDescs" : [ {
"fieldName" : "tableDesc",
"fieldExtractor" : "CatalogTableURIExtractor",
"isInput" : false
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.Call",
"tableDescs" : [ {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,11 @@

package org.apache.kyuubi.plugin.spark.authz.serde

import org.apache.spark.sql.catalyst.catalog.CatalogStorageFormat
import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable}
import org.apache.spark.sql.execution.datasources.HadoopFsRelation

import org.apache.kyuubi.util.reflect.ReflectUtils.invokeAs

trait URIExtractor extends (AnyRef => Seq[Uri]) with Extractor

object URIExtractor {
Expand Down Expand Up @@ -74,6 +76,18 @@ class BaseRelationFileIndexURIExtractor extends URIExtractor {
}
}

class TableSpecURIExtractor extends URIExtractor {
override def apply(v1: AnyRef): Seq[Uri] = {
new StringURIExtractor().apply(invokeAs[Option[String]](v1, "location"))
}
}

class CatalogTableURIExtractor extends URIExtractor {
override def apply(v1: AnyRef): Seq[Uri] = {
v1.asInstanceOf[CatalogTable].storage.locationUri.map(_.toString).map(Uri).toSeq
}
}

class PartitionLocsSeqURIExtractor extends URIExtractor {
override def apply(v1: AnyRef): Seq[Uri] = {
v1.asInstanceOf[Seq[(_, Option[String])]].flatMap(_._2).map(Uri)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -214,10 +214,14 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
"tableName",
classOf[IdentifierTableExtractor],
catalogDesc = Some(CatalogDesc()))
val uriDescs = Seq(
UriDesc("tableSpec", classOf[TableSpecURIExtractor]),
UriDesc("properties", classOf[PropertiesLocationUriExtractor]))
TableCommandSpec(
cmd,
Seq(resolvedIdentifierTableDesc, tableDesc, resolvedDbObjectNameDesc),
CREATETABLE)
CREATETABLE,
uriDescs = uriDescs)
}

val CreateV2Table = {
Expand All @@ -226,7 +230,8 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
"tableName",
classOf[IdentifierTableExtractor],
catalogDesc = Some(CatalogDesc()))
TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE)
val uriDescs = Seq(UriDesc("properties", classOf[PropertiesLocationUriExtractor]))
TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE, uriDescs = uriDescs)
}

val CreateTableAsSelectV2 = {
Expand All @@ -235,14 +240,18 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
"tableName",
classOf[IdentifierTableExtractor],
catalogDesc = Some(CatalogDesc()))
val uriDescs = Seq(
UriDesc("tableSpec", classOf[TableSpecURIExtractor]),
UriDesc("properties", classOf[PropertiesLocationUriExtractor]))
TableCommandSpec(
cmd,
Seq(
resolvedIdentifierTableDesc.copy(fieldName = "name"),
tableDesc,
resolvedDbObjectNameDesc.copy(fieldName = "name")),
CREATETABLE_AS_SELECT,
Seq(queryQueryDesc))
Seq(queryQueryDesc),
uriDescs = uriDescs)
}

val CommentOnTable = {
Expand Down Expand Up @@ -376,14 +385,21 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
val cmd = "org.apache.spark.sql.execution.datasources.CreateTable"
val tableDesc = TableDesc("tableDesc", classOf[CatalogTableTableExtractor])
val queryDesc = QueryDesc("query", "LogicalPlanOptionQueryExtractor")
TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE, queryDescs = Seq(queryDesc))
val uriDesc = UriDesc("tableDesc", classOf[CatalogTableURIExtractor])
TableCommandSpec(
cmd,
Seq(tableDesc),
CREATETABLE,
queryDescs = Seq(queryDesc),
uriDescs = Seq(uriDesc))
}

val CreateDataSourceTable = {
val cmd = "org.apache.spark.sql.execution.command.CreateDataSourceTableCommand"
val tableDesc =
TableDesc("table", classOf[CatalogTableTableExtractor], setCurrentDatabaseIfMissing = true)
TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE)
val uriDesc = UriDesc("table", classOf[CatalogTableURIExtractor])
TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE, uriDescs = Seq(uriDesc))
}

val CreateDataSourceTableAsSelect = {
Expand All @@ -399,8 +415,14 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
val columnDesc = ColumnDesc("outputColumnNames", classOf[StringSeqColumnExtractor])
val tableDesc =
TableDesc("tableDesc", classOf[CatalogTableTableExtractor], Some(columnDesc))
val uriDesc = UriDesc("tableDesc", classOf[CatalogTableURIExtractor])
val queryDesc = queryQueryDesc
TableCommandSpec(cmd, Seq(tableDesc), "CREATETABLE_AS_SELECT", queryDescs = Seq(queryDesc))
TableCommandSpec(
cmd,
Seq(tableDesc),
"CREATETABLE_AS_SELECT",
queryDescs = Seq(queryDesc),
uriDescs = Seq(uriDesc))
}

val CreateTableLike = {
Expand All @@ -414,7 +436,8 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
classOf[TableIdentifierTableExtractor],
isInput = true,
setCurrentDatabaseIfMissing = true)
TableCommandSpec(cmd, Seq(tableDesc1, tableDesc2), CREATETABLE)
val uriDesc = UriDesc("fileFormat", classOf[CatalogStorageFormatURIExtractor])
TableCommandSpec(cmd, Seq(tableDesc1, tableDesc2), CREATETABLE, uriDescs = Seq(uriDesc))
}

val DescribeColumn = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1240,4 +1240,60 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
}
}
}

test("Table Command location privilege") {
val db1 = defaultDb
val table1 = "table1"
val table2 = "table2"
withSingleCallEnabled {
withTempDir { path =>
withCleanTmpResources(Seq((s"$db1.$table1", "table"), (s"$db1.$table2", "table"))) {
interceptContains[AccessControlException](doAs(
someone,
sql(
s"""
|CREATE TABLE IF NOT EXISTS $db1.$table1(id int, scope int)
|LOCATION '$path'""".stripMargin)))(
s"does not have [create] privilege on [$db1/$table1]")
doAs(
admin,
sql(
s"""
|CREATE TABLE IF NOT EXISTS $db1.$table1(id int, scope int)
|LOCATION '$path'""".stripMargin))
interceptContains[AccessControlException](
doAs(
someone,
sql(
s"""
|CREATE TABLE $db1.$table2
|LIKE $db1.$table1
|LOCATION '$path'
|""".stripMargin)))(
s"does not have [select] privilege on [$db1/$table1], " +
s"[create] privilege on [$db1/$table2], " +
s"[write] privilege on [[$path, $path/]]")
interceptContains[AccessControlException](
doAs(
someone,
sql(
s"""
|CREATE TABLE $db1.$table2
|LOCATION '$path'
|AS
|SELECT * FROM $db1.$table1
|""".stripMargin)))(
if (!isSparkV35OrGreater) {
s"does not have [select] privilege on [$db1/$table1/id,$db1/$table1/scope], " +
s"[create] privilege on [$db1/$table2/id,$db1/$table2/scope], " +
s"[write] privilege on [[$path, $path/]]"
} else {
s"does not have [select] privilege on [$db1/$table1/id,$db1/$table1/scope], " +
s"[create] privilege on [$db1/$table2/id,$db1/$table2/scope], " +
s"[write] privilege on [[file://$path, file://$path/]]"
})
}
}
}
}
}

0 comments on commit eb84e15

Please sign in to comment.