Skip to content

Commit

Permalink
[KYUUBI #5726][AUTHZ] Support optimize path-based table for Delta Lak…
Browse files Browse the repository at this point in the history
…e in Authz

# 🔍 Description
## Issue References 🔗

This pull request fixes #5726.

## Describe Your Solution 🔧

`org.apache.spark.sql.delta.commands.OptimizeTableCommand` add uriDescs.

## Types of changes 🔖

- [ ] Bugfix (non-breaking change which fixes an issue)
- [x] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to change)

## Test Plan 🧪

#### Behavior Without This Pull Request ⚰️

#### Behavior With This Pull Request 🎉

#### Related Unit Tests

org.apache.kyuubi.plugin.spark.authz.ranger.DeltaCatalogRangerSparkExtensionSuite.test("optimize path-based table")

---

# Checklists
## 📝 Author Self Checklist

- [x] My code follows the [style guidelines](https://kyuubi.readthedocs.io/en/master/contributing/code/style.html) of this project
- [x] I have performed a self-review
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] I have made corresponding changes to the documentation
- [x] My changes generate no new warnings
- [x] I have added tests that prove my fix is effective or that my feature works
- [ ] New and existing unit tests pass locally with my changes
- [x] This patch was not authored or co-authored using [Generative Tooling](https://www.apache.org/legal/generative-tooling.html)

## 📝 Committer Pre-Merge Checklist

- [x] Pull request title is okay.
- [x] No license issues.
- [x] Milestone correctly set?
- [x] Test coverage is ok
- [x] Assignees are selected.
- [x] Minimum number of approvals
- [x] No changes are requested

**Be nice. Be informative.**

Closes #5732 from zml1206/KYUUBI-5726.

Closes #5726

e4cab7a [zml1206] update
62c52b2 [zml1206] Support optimize path-based table for Delta Lake in Authz

Authored-by: zml1206 <zhuml1206@gmail.com>
Signed-off-by: Kent Yao <yao@apache.org>
  • Loading branch information
zml1206 authored and yaooqinn committed Nov 21, 2023
1 parent c6accec commit 9306e57
Show file tree
Hide file tree
Showing 6 changed files with 78 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,10 @@ org.apache.kyuubi.plugin.spark.authz.serde.IdentifierURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.PartitionLocsSeqURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.PropertiesLocationUriExtractor
org.apache.kyuubi.plugin.spark.authz.serde.PropertiesPathUriExtractor
org.apache.kyuubi.plugin.spark.authz.serde.ResolvedTableURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.StringSeqURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.StringURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.SubqueryAliasURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.TableIdentifierOptionURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.TableIdentifierURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.TableSpecURIExtractor
Original file line number Diff line number Diff line change
Expand Up @@ -2226,7 +2226,19 @@
} ],
"opType" : "ALTERTABLE_COMPACT",
"queryDescs" : [ ],
"uriDescs" : [ ]
"uriDescs" : [ {
"fieldName" : "child",
"fieldExtractor" : "ResolvedTableURIExtractor",
"isInput" : false
}, {
"fieldName" : "tableId",
"fieldExtractor" : "TableIdentifierOptionURIExtractor",
"isInput" : false
}, {
"fieldName" : "path",
"fieldExtractor" : "StringURIExtractor",
"isInput" : false
} ]
}, {
"classname" : "org.apache.spark.sql.delta.commands.UpdateCommand",
"tableDescs" : [ {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@

package org.apache.kyuubi.plugin.spark.authz.serde

import java.util.{Map => JMap}
import java.util.LinkedHashMap
import java.util.{LinkedHashMap, Map => JMap}

import scala.collection.JavaConverters._

Expand Down Expand Up @@ -81,14 +80,18 @@ object TableExtractor {
class TableIdentifierTableExtractor extends TableExtractor {
override def apply(spark: SparkSession, v1: AnyRef): Option[Table] = {
val identifier = v1.asInstanceOf[TableIdentifier]
val owner =
try {
val catalogTable = spark.sessionState.catalog.getTableMetadata(identifier)
Option(catalogTable.owner).filter(_.nonEmpty)
} catch {
case _: Exception => None
}
Some(Table(None, identifier.database, identifier.table, owner))
if (isPathIdentifier(identifier.table, spark)) {
None
} else {
val owner =
try {
val catalogTable = spark.sessionState.catalog.getTableMetadata(identifier)
Option(catalogTable.owner).filter(_.nonEmpty)
} catch {
case _: Exception => None
}
Some(Table(None, identifier.database, identifier.table, owner))
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.kyuubi.plugin.spark.authz.serde

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable}
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
import org.apache.spark.sql.connector.catalog.Identifier
Expand Down Expand Up @@ -133,3 +134,26 @@ class DataSourceV2RelationURIExtractor extends URIExtractor {
}
}
}

class ResolvedTableURIExtractor extends URIExtractor {
override def apply(spark: SparkSession, v1: AnyRef): Seq[Uri] = {
val identifier = invokeAs[AnyRef](v1, "identifier")
lookupExtractor[IdentifierURIExtractor].apply(spark, identifier)
}
}

class TableIdentifierURIExtractor extends URIExtractor {
override def apply(spark: SparkSession, v1: AnyRef): Seq[Uri] = v1 match {
case tableIdentifier: TableIdentifier if isPathIdentifier(tableIdentifier.table, spark) =>
Seq(tableIdentifier.table).map(Uri)
case _ => Nil
}
}

class TableIdentifierOptionURIExtractor extends URIExtractor {
override def apply(spark: SparkSession, v1: AnyRef): Seq[Uri] = v1 match {
case Some(tableIdentifier: TableIdentifier) =>
lookupExtractor[TableIdentifierURIExtractor].apply(spark, tableIdentifier)
case _ => Nil
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,11 @@ object DeltaCommands extends CommandSpecs[TableCommandSpec] {
val cmd = "org.apache.spark.sql.delta.commands.OptimizeTableCommand"
val childDesc = TableDesc("child", classOf[ResolvedTableTableExtractor])
val tableDesc = TableDesc("tableId", classOf[TableIdentifierOptionTableExtractor])
TableCommandSpec(cmd, Seq(childDesc, tableDesc), ALTERTABLE_COMPACT)
val uriDescs = Seq(
UriDesc("child", classOf[ResolvedTableURIExtractor]),
UriDesc("tableId", classOf[TableIdentifierOptionURIExtractor]),
UriDesc("path", classOf[StringURIExtractor]))
TableCommandSpec(cmd, Seq(childDesc, tableDesc), ALTERTABLE_COMPACT, uriDescs = uriDescs)
}

val VacuumTableCommand = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ class DeltaCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
}

test("optimize table") {
assume(isSparkV32OrGreater)
assume(isSparkV32OrGreater, "optimize table is available in Delta Lake 1.2.0 and above")

withCleanTmpResources(Seq((s"$namespace1.$table1", "table"), (s"$namespace1", "database"))) {
doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $namespace1"))
Expand Down Expand Up @@ -432,6 +432,25 @@ class DeltaCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
}
}
}

test("optimize path-based table") {
assume(isSparkV32OrGreater, "optimize table is available in Delta Lake 1.2.0 and above")

withTempDir(path => {
doAs(admin, sql(createPathBasedTableSql(path)))
val optimizeTableSql1 = s"OPTIMIZE delta.`$path`"
interceptContains[AccessControlException](
doAs(someone, sql(optimizeTableSql1)))(
s"does not have [write] privilege on [[$path, $path/]]")
doAs(admin, sql(optimizeTableSql1))

val optimizeTableSql2 = s"OPTIMIZE '$path'"
interceptContains[AccessControlException](
doAs(someone, sql(optimizeTableSql2)))(
s"does not have [write] privilege on [[$path, $path/]]")
doAs(admin, sql(optimizeTableSql2))
})
}
}

object DeltaCatalogRangerSparkExtensionSuite {
Expand Down

0 comments on commit 9306e57

Please sign in to comment.