Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[KYUUBI #3325] [FEATURE] [AUTHZ] Privilege checks for permanent views and skipping shadowed tables #3326

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ import org.apache.spark.sql.types.StructField
import org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType._
import org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectType._
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.plugin.spark.authz.util.PermanentViewMarker

object PrivilegesBuilder {

Expand Down Expand Up @@ -130,6 +131,9 @@ object PrivilegesBuilder {
val db = quote(parts.init)
privilegeObjects += tablePrivileges(TableIdentifier(parts.last, Some(db)))

case permanentViewMarker: PermanentViewMarker =>
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

  case PermanentViewMarker(child, table) => mergeProjection(table, child)

looks we need to change to the above one, we may still project a view

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

OK.
Changed to mergeProjection, and checking column level privileges for perm views.

mergeProjection(permanentViewMarker.catalogTable, plan)

case p =>
for (child <- p.children) {
buildQuery(child, privilegeObjects, projectionList, conditionList)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.kyuubi.plugin.spark.authz.ranger

import org.apache.spark.sql.SparkSessionExtensions

import org.apache.kyuubi.plugin.spark.authz.util.RuleEliminateMarker
import org.apache.kyuubi.plugin.spark.authz.util.{RuleEliminateMarker, RuleEliminateViewMarker}

/**
* ACL Management for Apache Spark SQL with Apache Ranger, enabling:
Expand All @@ -40,9 +40,11 @@ class RangerSparkExtension extends (SparkSessionExtensions => Unit) {

override def apply(v1: SparkSessionExtensions): Unit = {
v1.injectResolutionRule(_ => new RuleReplaceShowObjectCommands())
v1.injectResolutionRule(_ => new RuleApplyPermanentViewMarker())
v1.injectResolutionRule(new RuleApplyRowFilterAndDataMasking(_))
v1.injectOptimizerRule(_ => new RuleEliminateMarker())
v1.injectOptimizerRule(new RuleAuthorization(_))
v1.injectOptimizerRule(_ => new RuleEliminateViewMarker())
v1.injectPlannerStrategy(new FilterDataSourceV2Strategy(_))
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.kyuubi.plugin.spark.authz.ranger

import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, View}
import org.apache.spark.sql.catalyst.rules.Rule

import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.plugin.spark.authz.util.PermanentViewMarker

/**
* Adding [[org.apache.kyuubi.plugin.spark.authz.util.PermanentViewMarker]] for permanent views
* for marking catalogTable of views used by privilege checking
* in [[org.apache.kyuubi.plugin.spark.authz.ranger.RuleAuthorization]].
* [[org.apache.kyuubi.plugin.spark.authz.util.PermanentViewMarker]] must be transformed up later
* in [[org.apache.kyuubi.plugin.spark.authz.util.RuleEliminateViewMarker]] optimizer.
*/
class RuleApplyPermanentViewMarker extends Rule[LogicalPlan] {

override def apply(plan: LogicalPlan): LogicalPlan = {
plan mapChildren {
case p: PermanentViewMarker => p
case permanentView: View if hasResolvedPermanentView(permanentView) =>
PermanentViewMarker(permanentView, permanentView.desc)
case other => apply(other)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import org.apache.hadoop.security.UserGroupInformation
import org.apache.spark.{SPARK_VERSION, SparkContext}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.CatalogTable
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, View}

private[authz] object AuthZUtils {

Expand Down Expand Up @@ -98,6 +98,15 @@ private[authz] object AuthZUtils {
}
}

def hasResolvedPermanentView(plan: LogicalPlan): Boolean = {
plan match {
case view: View if view.resolved && isSparkVersionAtLeast("3.1.0") =>
!getFieldVal[Boolean](view, "isTempView")
case _ =>
false
}
}

def isSparkVersionAtMost(targetVersionString: String): Boolean = {
SemanticVersion(SPARK_VERSION).isVersionAtMost(targetVersionString)
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.kyuubi.plugin.spark.authz.util

import org.apache.spark.sql.catalyst.catalog.CatalogTable
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, UnaryNode}

case class PermanentViewMarker(child: LogicalPlan, catalogTable: CatalogTable) extends UnaryNode
with WithInternalChild {

override def output: Seq[Attribute] = child.output

override def withNewChildInternal(newChild: LogicalPlan): LogicalPlan =
copy(child = newChild)

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.kyuubi.plugin.spark.authz.util

import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.rules.Rule

/**
* Transforming up [[org.apache.kyuubi.plugin.spark.authz.util.PermanentViewMarker]]
*/
class RuleEliminateViewMarker extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = {
plan.transformUp { case pvm: PermanentViewMarker => pvm.child }
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ abstract class RangerSparkExtensionSuite extends AnyFunSuite
case (t, "table") => doAs("admin", sql(s"DROP TABLE IF EXISTS $t"))
case (db, "database") => doAs("admin", sql(s"DROP DATABASE IF EXISTS $db"))
case (fn, "function") => doAs("admin", sql(s"DROP FUNCTION IF EXISTS $fn"))
case (view, "view") => doAs("admin", sql(s"DROP VIEW IF EXISTS $view"))
case (_, e) =>
throw new RuntimeException(s"the resource whose resource type is $e cannot be cleared")
}
Expand Down Expand Up @@ -526,12 +527,16 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {

test("[KYUUBI #3343] check persisted view creation") {
val table = "hive_src"
val adminPermView = "admin_perm_view"
val permView = "perm_view"

withCleanTmpResources(Seq((table, "table"))) {
withCleanTmpResources(Seq(
(adminPermView, "view"),
(permView, "view"),
(table, "table"))) {
doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $table (id int)"))

doAs("admin", sql(s"CREATE VIEW admin_perm_view AS SELECT * FROM $table"))
doAs("admin", sql(s"CREATE VIEW ${adminPermView} AS SELECT * FROM $table"))

val e1 = intercept[AccessControlException](
doAs("someone", sql(s"CREATE VIEW $permView AS SELECT 1 as a")))
Expand All @@ -546,4 +551,29 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
}
}
}

test("[KYUUBI #3326] check persisted view and skip shadowed table") {
val table = "hive_src"
val permView = "perm_view"
val db1 = "default"
val db2 = "db2"

withCleanTmpResources(Seq(
(s"$db1.$table", "table"),
(s"$db2.$permView", "view"),
(db2, "database"))) {
doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $db1.$table (id int)"))

doAs("admin", sql(s"CREATE DATABASE IF NOT EXISTS $db2"))
doAs("admin", sql(s"CREATE VIEW $db2.$permView AS SELECT * FROM $table"))

val e1 = intercept[AccessControlException](
doAs("someone", sql(s"select * from $db2.$permView")).show(0))
if (isSparkV31OrGreater) {
assert(e1.getMessage.contains(s"does not have [select] privilege on [$db2/$permView/id]"))
} else {
assert(e1.getMessage.contains(s"does not have [select] privilege on [$db1/$table/id]"))
}
}
}
}