Skip to content

Commit

Permalink
[KYUUBI apache#5816] Change spark rule class to object or case class
Browse files Browse the repository at this point in the history
# 馃攳 Description
## Issue References 馃敆

This pull request fixes apache#5816

## Describe Your Solution 馃敡

## Types of changes 馃敄

- [ ] Bugfix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to change)

## Test Plan 馃И

#### Behavior Without This Pull Request 鈿帮笍

#### Behavior With This Pull Request 馃帀

#### Related Unit Tests

---

# Checklists
## 馃摑 Author Self Checklist

- [ ] My code follows the [style guidelines](https://kyuubi.readthedocs.io/en/master/contributing/code/style.html) of this project
- [ ] I have performed a self-review
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] I have made corresponding changes to the documentation
- [ ] My changes generate no new warnings
- [ ] I have added tests that prove my fix is effective or that my feature works
- [ ] New and existing unit tests pass locally with my changes
- [ ] This patch was not authored or co-authored using [Generative Tooling](https://www.apache.org/legal/generative-tooling.html)

## 馃摑 Committer Pre-Merge Checklist

- [ ] Pull request title is okay.
- [ ] No license issues.
- [ ] Milestone correctly set?
- [ ] Test coverage is ok
- [ ] Assignees are selected.
- [ ] Minimum number of approvals
- [ ] No changes are requested

**Be nice. Be informative.**

Closes apache#5817 from zml1206/KYUUBI-5816.

Closes apache#5816

437dd1f [zml1206] Change spark rule class to object or case class

Authored-by: zml1206 <zhuml1206@gmail.com>
Signed-off-by: wforget <643348094@qq.com>
  • Loading branch information
zml1206 authored and wForget committed Dec 6, 2023
1 parent 52d25c7 commit e779b42
Show file tree
Hide file tree
Showing 19 changed files with 26 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ object KyuubiSparkSQLCommonExtension {

extensions.injectQueryStagePrepRule(_ => InsertShuffleNodeBeforeJoin)

extensions.injectPostHocResolutionRule(session => MarkNumOutputColumnsRule(session))
extensions.injectPostHocResolutionRule(MarkNumOutputColumnsRule(_))
extensions.injectQueryStagePrepRule(FinalStageConfigIsolation(_))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class KyuubiSparkSQLExtension extends (SparkSessionExtensions => Unit) {
extensions.injectPostHocResolutionRule(DropIgnoreNonexistent)

// watchdog extension
extensions.injectCheckRule(_ => new KyuubiUnsupportedOperationsCheck)
extensions.injectCheckRule(_ => KyuubiUnsupportedOperationsCheck)
extensions.injectOptimizerRule(ForcedMaxOutputRowsRule)
extensions.injectPlannerStrategy(MaxScanStrategy)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class KyuubiSparkSQLExtension extends (SparkSessionExtensions => Unit) {
extensions.injectPostHocResolutionRule(DropIgnoreNonexistent)

// watchdog extension
extensions.injectCheckRule(_ => new KyuubiUnsupportedOperationsCheck)
extensions.injectCheckRule(_ => KyuubiUnsupportedOperationsCheck)
extensions.injectOptimizerRule(ForcedMaxOutputRowsRule)
extensions.injectPlannerStrategy(MaxScanStrategy)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ object KyuubiSparkSQLCommonExtension {

extensions.injectQueryStagePrepRule(_ => InsertShuffleNodeBeforeJoin)

extensions.injectPostHocResolutionRule(session => MarkNumOutputColumnsRule(session))
extensions.injectPostHocResolutionRule(MarkNumOutputColumnsRule(_))
extensions.injectQueryStagePrepRule(FinalStageConfigIsolation(_))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class KyuubiSparkSQLExtension extends (SparkSessionExtensions => Unit) {
extensions.injectPostHocResolutionRule(DropIgnoreNonexistent)

// watchdog extension
extensions.injectCheckRule(_ => new KyuubiUnsupportedOperationsCheck)
extensions.injectCheckRule(_ => KyuubiUnsupportedOperationsCheck)
extensions.injectOptimizerRule(ForcedMaxOutputRowsRule)
extensions.injectPlannerStrategy(MaxScanStrategy)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class KyuubiSparkSQLExtension extends (SparkSessionExtensions => Unit) {
extensions.injectPostHocResolutionRule(DropIgnoreNonexistent)

// watchdog extension
extensions.injectCheckRule(_ => new KyuubiUnsupportedOperationsCheck)
extensions.injectCheckRule(_ => KyuubiUnsupportedOperationsCheck)
extensions.injectOptimizerRule(ForcedMaxOutputRowsRule)
extensions.injectPlannerStrategy(MaxScanStrategy)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, ScriptTransform

import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}

class KyuubiUnsupportedOperationsCheck extends (LogicalPlan => Unit) with SQLConfHelper {
object KyuubiUnsupportedOperationsCheck extends (LogicalPlan => Unit) with SQLConfHelper {
override def apply(plan: LogicalPlan): Unit =
conf.getConf(KyuubiSQLConf.SCRIPT_TRANSFORMATION_ENABLED) match {
case false => plan foreach {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class KyuubiSparkSQLExtension extends (SparkSessionExtensions => Unit) {
extensions.injectPostHocResolutionRule(DropIgnoreNonexistent)

// watchdog extension
extensions.injectCheckRule(_ => new KyuubiUnsupportedOperationsCheck)
extensions.injectCheckRule(_ => KyuubiUnsupportedOperationsCheck)
extensions.injectOptimizerRule(ForcedMaxOutputRowsRule)
extensions.injectPlannerStrategy(MaxScanStrategy)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, ScriptTransform

import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}

class KyuubiUnsupportedOperationsCheck extends (LogicalPlan => Unit) with SQLConfHelper {
object KyuubiUnsupportedOperationsCheck extends (LogicalPlan => Unit) with SQLConfHelper {
override def apply(plan: LogicalPlan): Unit =
conf.getConf(KyuubiSQLConf.SCRIPT_TRANSFORMATION_ENABLED) match {
case false => plan foreach {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, ScriptTransform

import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}

class KyuubiUnsupportedOperationsCheck extends (LogicalPlan => Unit) with SQLConfHelper {
object KyuubiUnsupportedOperationsCheck extends (LogicalPlan => Unit) with SQLConfHelper {
override def apply(plan: LogicalPlan): Unit =
conf.getConf(KyuubiSQLConf.SCRIPT_TRANSFORMATION_ENABLED) match {
case false => plan foreach {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,16 +45,16 @@ class RangerSparkExtension extends (SparkSessionExtensions => Unit) {

override def apply(v1: SparkSessionExtensions): Unit = {
v1.injectCheckRule(AuthzConfigurationChecker)
v1.injectResolutionRule(_ => new RuleReplaceShowObjectCommands())
v1.injectResolutionRule(_ => new RuleApplyPermanentViewMarker())
v1.injectResolutionRule(_ => new RuleApplyTypeOfMarker())
v1.injectResolutionRule(_ => RuleReplaceShowObjectCommands)
v1.injectResolutionRule(_ => RuleApplyPermanentViewMarker)
v1.injectResolutionRule(_ => RuleApplyTypeOfMarker)
v1.injectResolutionRule(RuleApplyRowFilter)
v1.injectResolutionRule(RuleApplyDataMaskingStage0)
v1.injectResolutionRule(RuleApplyDataMaskingStage1)
v1.injectOptimizerRule(_ => new RuleEliminateMarker())
v1.injectOptimizerRule(new RuleAuthorization(_))
v1.injectOptimizerRule(new RuleEliminatePermanentViewMarker(_))
v1.injectOptimizerRule(_ => new RuleEliminateTypeOf())
v1.injectPlannerStrategy(new FilterDataSourceV2Strategy(_))
v1.injectOptimizerRule(_ => RuleEliminateMarker)
v1.injectOptimizerRule(RuleAuthorization)
v1.injectOptimizerRule(RuleEliminatePermanentViewMarker)
v1.injectOptimizerRule(_ => RuleEliminateTypeOf)
v1.injectPlannerStrategy(FilterDataSourceV2Strategy)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import org.apache.kyuubi.plugin.spark.authz.ranger.SparkRangerAdminPlugin._
import org.apache.kyuubi.plugin.spark.authz.rule.Authorization
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._

class RuleAuthorization(spark: SparkSession) extends Authorization(spark) {
case class RuleAuthorization(spark: SparkSession) extends Authorization(spark) {
override def checkPrivileges(spark: SparkSession, plan: LogicalPlan): Unit = {
val auditHandler = new SparkRangerAuditHandler
val ugi = getAuthzUgi(spark.sparkContext)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.kyuubi.plugin.spark.authz.rule.datamasking.{DataMaskingStage0Marker, DataMaskingStage1Marker}
import org.apache.kyuubi.plugin.spark.authz.rule.rowfilter.RowFilterMarker

class RuleEliminateMarker extends Rule[LogicalPlan] {
object RuleEliminateMarker extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = {
plan.transformUp { case p =>
p.transformExpressionsUp {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import org.apache.kyuubi.plugin.spark.authz.rule.permanentview.PermanentViewMark
/**
* Transforming up [[PermanentViewMarker]]
*/
class RuleEliminatePermanentViewMarker(sparkSession: SparkSession) extends Rule[LogicalPlan] {
case class RuleEliminatePermanentViewMarker(sparkSession: SparkSession) extends Rule[LogicalPlan] {

def eliminatePVM(plan: LogicalPlan): LogicalPlan = {
plan.transformUp {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import org.apache.spark.sql.catalyst.rules.Rule

import org.apache.kyuubi.plugin.spark.authz.rule.expression.TypeOfPlaceHolder

class RuleEliminateTypeOf extends Rule[LogicalPlan] {
object RuleEliminateTypeOf extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = {
plan.transformUp { case p =>
p.transformExpressionsUp {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import org.apache.spark.sql.catalyst.expressions.TypeOf
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.rules.Rule

class RuleApplyTypeOfMarker extends Rule[LogicalPlan] {
object RuleApplyTypeOfMarker extends Rule[LogicalPlan] {

override def apply(plan: LogicalPlan): LogicalPlan = {
plan transformAllExpressions {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
* [[PermanentViewMarker]] must be transformed up later
* in [[org.apache.kyuubi.plugin.spark.authz.rule.RuleEliminatePermanentViewMarker]] optimizer.
*/
class RuleApplyPermanentViewMarker extends Rule[LogicalPlan] {
object RuleApplyPermanentViewMarker extends Rule[LogicalPlan] {

private def resolveSubqueryExpression(
plan: LogicalPlan,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import org.apache.spark.sql.{SparkSession, Strategy}
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project}
import org.apache.spark.sql.execution.SparkPlan

class FilterDataSourceV2Strategy(spark: SparkSession) extends Strategy {
case class FilterDataSourceV2Strategy(spark: SparkSession) extends Strategy {
override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
// For Spark 3.1 and below, `ColumnPruning` rule will set `ObjectFilterPlaceHolder#child` to
// `Project`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import org.apache.kyuubi.plugin.spark.authz.ranger.{AccessRequest, AccessResourc
import org.apache.kyuubi.plugin.spark.authz.util.{AuthZUtils, WithInternalChildren}
import org.apache.kyuubi.util.reflect.ReflectUtils._

class RuleReplaceShowObjectCommands extends Rule[LogicalPlan] {
object RuleReplaceShowObjectCommands extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan match {
case r: RunnableCommand if r.nodeName == "ShowTablesCommand" => FilteredShowTablesCommand(r)
case n: LogicalPlan if n.nodeName == "ShowTables" =>
Expand Down

0 comments on commit e779b42

Please sign in to comment.