Skip to content

Commit bade427

Browse files
committed
[KYUUBI #5780][AUTHZ] Kyuubi tread PVM ass LeafNode to make logic more simple
1 parent 7f02809 commit bade427

File tree

7 files changed

+21
-32
lines changed

7 files changed

+21
-32
lines changed

extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@ import org.slf4j.LoggerFactory
2828
import org.apache.kyuubi.plugin.spark.authz.OperationType.OperationType
2929
import org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType._
3030
import org.apache.kyuubi.plugin.spark.authz.rule.Authorization._
31-
import org.apache.kyuubi.plugin.spark.authz.rule.permanentview.PermanentViewMarker
3231
import org.apache.kyuubi.plugin.spark.authz.rule.rowfilter._
3332
import org.apache.kyuubi.plugin.spark.authz.serde._
3433
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
@@ -68,13 +67,7 @@ object PrivilegesBuilder {
6867

6968
def mergeProjection(table: Table, plan: LogicalPlan): Unit = {
7069
if (projectionList.isEmpty) {
71-
plan match {
72-
case pvm: PermanentViewMarker
73-
if pvm.isSubqueryExpressionPlaceHolder || pvm.output.isEmpty =>
74-
privilegeObjects += PrivilegeObject(table, pvm.outputColNames)
75-
case _ =>
76-
privilegeObjects += PrivilegeObject(table, plan.output.map(_.name))
77-
}
70+
privilegeObjects += PrivilegeObject(table, plan.output.map(_.name))
7871
} else {
7972
val cols = (projectionList ++ conditionList).flatMap(collectLeaves)
8073
.filter(plan.outputSet.contains).map(_.name).distinct

extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtension.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ class RangerSparkExtension extends (SparkSessionExtensions => Unit) {
5353
v1.injectResolutionRule(RuleApplyDataMaskingStage1)
5454
v1.injectOptimizerRule(_ => new RuleEliminateMarker())
5555
v1.injectOptimizerRule(new RuleAuthorization(_))
56-
v1.injectOptimizerRule(_ => new RuleEliminatePermanentViewMarker())
56+
v1.injectOptimizerRule(new RuleEliminatePermanentViewMarker(_))
5757
v1.injectOptimizerRule(_ => new RuleEliminateTypeOf())
5858
v1.injectPlannerStrategy(new FilterDataSourceV2Strategy(_))
5959
}

extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/Authorization.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ import org.apache.spark.sql.catalyst.trees.TreeNodeTag
2424
import org.apache.spark.sql.execution.SQLExecution.EXECUTION_ID_KEY
2525

2626
import org.apache.kyuubi.plugin.spark.authz.rule.Authorization._
27-
import org.apache.kyuubi.plugin.spark.authz.rule.permanentview.PermanentViewMarker
2827
import org.apache.kyuubi.plugin.spark.authz.util.ReservedKeys._
2928

3029
abstract class Authorization(spark: SparkSession) extends Rule[LogicalPlan] {
@@ -51,11 +50,9 @@ object Authorization {
5150
}
5251
}
5352

54-
protected def markAuthChecked(plan: LogicalPlan): LogicalPlan = {
53+
def markAuthChecked(plan: LogicalPlan): LogicalPlan = {
5554
plan.setTagValue(KYUUBI_AUTHZ_TAG, ())
5655
plan transformDown {
57-
case pvm: PermanentViewMarker =>
58-
markAllNodesAuthChecked(pvm)
5956
case subquery: Subquery =>
6057
markAllNodesAuthChecked(subquery)
6158
}

extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminatePermanentViewMarker.scala

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,18 +20,27 @@ package org.apache.kyuubi.plugin.spark.authz.rule
2020
import org.apache.spark.sql.catalyst.expressions.SubqueryExpression
2121
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2222
import org.apache.spark.sql.catalyst.rules.Rule
23-
2423
import org.apache.kyuubi.plugin.spark.authz.rule.permanentview.PermanentViewMarker
24+
import org.apache.spark.sql.SparkSession
2525

2626
/**
2727
* Transforming up [[PermanentViewMarker]]
2828
*/
29-
class RuleEliminatePermanentViewMarker extends Rule[LogicalPlan] {
29+
class RuleEliminatePermanentViewMarker(sparkSession: SparkSession) extends Rule[LogicalPlan] {
3030
override def apply(plan: LogicalPlan): LogicalPlan = {
31-
plan.transformUp {
32-
case pvm: PermanentViewMarker => pvm.child.transformAllExpressions {
31+
var matched = false
32+
val eliminatedPVM = plan.transformUp {
33+
case pvm: PermanentViewMarker =>
34+
matched = true
35+
pvm.child.transformAllExpressions {
3336
case s: SubqueryExpression => s.withNewPlan(apply(s.plan))
3437
}
3538
}
39+
if (matched) {
40+
Authorization.markAuthChecked(eliminatedPVM)
41+
sparkSession.sessionState.optimizer.execute(eliminatedPVM)
42+
} else {
43+
eliminatedPVM
44+
}
3645
}
3746
}

extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/PermanentViewMarker.scala

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -19,20 +19,15 @@ package org.apache.kyuubi.plugin.spark.authz.rule.permanentview
1919

2020
import org.apache.spark.sql.catalyst.catalog.CatalogTable
2121
import org.apache.spark.sql.catalyst.expressions.Attribute
22-
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, UnaryNode}
22+
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, LeafNode}
2323

2424
import org.apache.kyuubi.plugin.spark.authz.util.WithInternalChild
2525

26-
case class PermanentViewMarker(
27-
child: LogicalPlan,
28-
catalogTable: CatalogTable,
29-
outputColNames: Seq[String],
30-
isSubqueryExpressionPlaceHolder: Boolean = false) extends UnaryNode
26+
case class PermanentViewMarker(child: LogicalPlan, catalogTable: CatalogTable) extends LeafNode
3127
with WithInternalChild {
3228

3329
override def output: Seq[Attribute] = child.output
3430

3531
override def withNewChildInternal(newChild: LogicalPlan): LogicalPlan =
3632
copy(child = newChild)
37-
3833
}

extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/RuleApplyPermanentViewMarker.scala

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -38,14 +38,9 @@ class RuleApplyPermanentViewMarker extends Rule[LogicalPlan] {
3838
case permanentView: View if hasResolvedPermanentView(permanentView) =>
3939
val resolved = permanentView.transformAllExpressions {
4040
case subquery: SubqueryExpression =>
41-
subquery.withNewPlan(plan =
42-
PermanentViewMarker(
43-
subquery.plan,
44-
permanentView.desc,
45-
permanentView.output.map(_.name),
46-
true))
41+
subquery.withNewPlan(plan = PermanentViewMarker(subquery.plan, permanentView.desc))
4742
}
48-
PermanentViewMarker(resolved, resolved.desc, resolved.output.map(_.name))
43+
PermanentViewMarker(resolved, resolved.desc)
4944
case other => apply(other)
5045
}
5146
}

extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -889,7 +889,7 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
889889
sql(s"SELECT id as new_id, name, max_scope FROM $db1.$view1".stripMargin).show()))
890890
assert(e2.getMessage.contains(
891891
s"does not have [select] privilege on " +
892-
s"[$db1/$view1/id,$db1/$view1/name,$db1/$view1/max_scope,$db1/$view1/sum_age]"))
892+
s"[$db1/$view1/id,$db1/$view1/name,$db1/$view1/max_scope]"))
893893
}
894894
}
895895
}

0 commit comments

Comments
 (0)