From 7a83901ea24a8035fbbed8de1e20b05712becfef Mon Sep 17 00:00:00 2001 From: Karsonnel <747100667@qq.com> Date: Tue, 4 Apr 2023 13:46:01 +0800 Subject: [PATCH] [KYUUBI #4658] [Authz] [Bug] Fix InsertIntoHiveDirCommand classname so that we can extract the query in it when authorization. ### _Why are the changes needed?_ To fix https://github.com/apache/kyuubi/issues/4658. ### _How was this patch tested?_ Add ut that will run a InsertHiveDirCommand which query from a no permission table - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible - [ ] Add screenshots for manual tests if appropriate - [x] [Run test](https://kyuubi.readthedocs.io/en/master/develop_tools/testing.html#running-tests) locally before make a pull request Closes #4660 from Karsonnel/4658-authz-insert. Closes #4658 1dfb60ea4 [Karsonnel] fix style 8063ec067 [Karsonnel] Update PrivilegesBuilderSuite.scala 4c6c8e1e2 [Karsonnel] add a test in privilegeBuilderSuite 5c652d3df [root] fix InsertIntoHiveDirCommand classname Lead-authored-by: Karsonnel <747100667@qq.com> Co-authored-by: root Signed-off-by: Cheng Pan --- .../main/resources/table_command_spec.json | 2 +- .../spark/authz/PrivilegesBuilderSuite.scala | 28 ++++++++++++++++++- .../spark/authz/gen/TableCommands.scala | 2 +- .../ranger/RangerSparkExtensionSuite.scala | 17 +++++++++++ 4 files changed, 46 insertions(+), 3 deletions(-) diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json index f1c2297b38e..81ccd8da085 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json +++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json @@ -1244,7 +1244,7 @@ "fieldExtractor" : "LogicalPlanQueryExtractor" } ] }, { - "classname" : "org.apache.spark.sql.execution.datasources.InsertIntoHiveDirCommand", + "classname" : "org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand", "tableDescs" : [ ], "opType" : "QUERY", "queryDescs" : [ { diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala index 43929091769..e9483eb34ba 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala @@ -1546,7 +1546,7 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite { } } - test("InsertIntoHiveDirCommand") { + test("InsertIntoDataSourceDirCommand") { assume(!isSparkV2) val tableDirectory = getClass.getResource("/").getPath + "table_directory" val directory = File(tableDirectory).createDirectory() @@ -1572,6 +1572,32 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite { assert(out.isEmpty) } + test("InsertIntoHiveDirCommand") { + assume(!isSparkV2) + val tableDirectory = getClass.getResource("/").getPath + "table_directory" + val directory = File(tableDirectory).createDirectory() + val plan = sql( + s""" + |INSERT OVERWRITE DIRECTORY '$directory.path' + |ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' + |SELECT * FROM $reusedPartTable""".stripMargin) + .queryExecution.analyzed + val (in, out, operationType) = PrivilegesBuilder.build(plan, spark) + assert(operationType === QUERY) + assert(in.size === 1) + val po0 = in.head + assert(po0.actionType === PrivilegeObjectActionType.OTHER) + assert(po0.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW) + assert(po0.dbname equalsIgnoreCase reusedDb) + assert(po0.objectName equalsIgnoreCase reusedPartTable.split("\\.").last) + assert(po0.columns === Seq("key", "value", "pid")) + checkTableOwner(po0) + val accessType0 = ranger.AccessType(po0, operationType, isInput = true) + assert(accessType0 === AccessType.SELECT) + + assert(out.isEmpty) + } + test("InsertIntoHiveTableCommand") { assume(!isSparkV2) val tableName = "InsertIntoHiveTable" diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala index a8b8121e2b0..7bf01b43f89 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala @@ -637,7 +637,7 @@ object TableCommands { "org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand"), InsertIntoHadoopFsRelationCommand, InsertIntoDataSourceDir.copy(classname = - "org.apache.spark.sql.execution.datasources.InsertIntoHiveDirCommand"), + "org.apache.spark.sql.execution.datasources.InsertIntoDataSourceDirCommand"), InsertIntoHiveTable, LoadData, MergeIntoTable, diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala index 4ccf15cba98..2d108615e4c 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala @@ -707,4 +707,21 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { sql(s"SHOW TABLES IN $db").queryExecution.optimizedPlan.stats } } + + test("[KYUUBI #4658] INSERT OVERWRITE DIRECTORY did check query permission") { + val db1 = "default" + val table = "src" + + withCleanTmpResources(Seq((s"$db1.$table", "table"))) { + doAs("bob", sql(s"CREATE TABLE IF NOT EXISTS $db1.$table (id int, name string)")) + val e1 = intercept[AccessControlException]( + doAs( + "someone", + sql( + s"""INSERT OVERWRITE DIRECTORY '/tmp/test_dir' ROW FORMAT DELIMITED FIELDS + | TERMINATED BY ',' + | SELECT * FROM $db1.$table;""".stripMargin))) + assert(e1.getMessage.contains(s"does not have [select] privilege on [$db1/$table/id")) + } + } }