Skip to content

Commit

Permalink
[KYUUBI apache#4658] [Authz] [Bug] Fix InsertIntoHiveDirCommand class…
Browse files Browse the repository at this point in the history
…name so that we can extract the query in it when authorization.

### _Why are the changes needed?_

To fix apache#4658.

### _How was this patch tested?_

Add ut that will run a InsertHiveDirCommand which query from a no permission table

- [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible

- [ ] Add screenshots for manual tests if appropriate

- [x] [Run test](https://kyuubi.readthedocs.io/en/master/develop_tools/testing.html#running-tests) locally before make a pull request

Closes apache#4660 from Karsonnel/4658-authz-insert.

Closes apache#4658

1dfb60e [Karsonnel] fix style
8063ec0 [Karsonnel] Update PrivilegesBuilderSuite.scala
4c6c8e1 [Karsonnel] add a test in privilegeBuilderSuite
5c652d3 [root] fix InsertIntoHiveDirCommand classname

Lead-authored-by: Karsonnel <747100667@qq.com>
Co-authored-by: root <root@example.com>
Signed-off-by: Cheng Pan <chengpan@apache.org>
  • Loading branch information
2 people authored and pan3793 committed Apr 4, 2023
1 parent 0c6ba94 commit 7a83901
Show file tree
Hide file tree
Showing 4 changed files with 46 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1244,7 +1244,7 @@
"fieldExtractor" : "LogicalPlanQueryExtractor"
} ]
}, {
"classname" : "org.apache.spark.sql.execution.datasources.InsertIntoHiveDirCommand",
"classname" : "org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand",
"tableDescs" : [ ],
"opType" : "QUERY",
"queryDescs" : [ {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1546,7 +1546,7 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
}
}

test("InsertIntoHiveDirCommand") {
test("InsertIntoDataSourceDirCommand") {
assume(!isSparkV2)
val tableDirectory = getClass.getResource("/").getPath + "table_directory"
val directory = File(tableDirectory).createDirectory()
Expand All @@ -1572,6 +1572,32 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
assert(out.isEmpty)
}

test("InsertIntoHiveDirCommand") {
assume(!isSparkV2)
val tableDirectory = getClass.getResource("/").getPath + "table_directory"
val directory = File(tableDirectory).createDirectory()
val plan = sql(
s"""
|INSERT OVERWRITE DIRECTORY '$directory.path'
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|SELECT * FROM $reusedPartTable""".stripMargin)
.queryExecution.analyzed
val (in, out, operationType) = PrivilegesBuilder.build(plan, spark)
assert(operationType === QUERY)
assert(in.size === 1)
val po0 = in.head
assert(po0.actionType === PrivilegeObjectActionType.OTHER)
assert(po0.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
assert(po0.dbname equalsIgnoreCase reusedDb)
assert(po0.objectName equalsIgnoreCase reusedPartTable.split("\\.").last)
assert(po0.columns === Seq("key", "value", "pid"))
checkTableOwner(po0)
val accessType0 = ranger.AccessType(po0, operationType, isInput = true)
assert(accessType0 === AccessType.SELECT)

assert(out.isEmpty)
}

test("InsertIntoHiveTableCommand") {
assume(!isSparkV2)
val tableName = "InsertIntoHiveTable"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -637,7 +637,7 @@ object TableCommands {
"org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand"),
InsertIntoHadoopFsRelationCommand,
InsertIntoDataSourceDir.copy(classname =
"org.apache.spark.sql.execution.datasources.InsertIntoHiveDirCommand"),
"org.apache.spark.sql.execution.datasources.InsertIntoDataSourceDirCommand"),
InsertIntoHiveTable,
LoadData,
MergeIntoTable,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -707,4 +707,21 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
sql(s"SHOW TABLES IN $db").queryExecution.optimizedPlan.stats
}
}

test("[KYUUBI #4658] INSERT OVERWRITE DIRECTORY did check query permission") {
val db1 = "default"
val table = "src"

withCleanTmpResources(Seq((s"$db1.$table", "table"))) {
doAs("bob", sql(s"CREATE TABLE IF NOT EXISTS $db1.$table (id int, name string)"))
val e1 = intercept[AccessControlException](
doAs(
"someone",
sql(
s"""INSERT OVERWRITE DIRECTORY '/tmp/test_dir' ROW FORMAT DELIMITED FIELDS
| TERMINATED BY ','
| SELECT * FROM $db1.$table;""".stripMargin)))
assert(e1.getMessage.contains(s"does not have [select] privilege on [$db1/$table/id"))
}
}
}

0 comments on commit 7a83901

Please sign in to comment.