Skip to content

Commit e0f28a6

Browse files
committed
Merge branch 'master' into KYUUBI-5594
2 parents 0ebdd5d + c1685c6 commit e0f28a6

File tree

26 files changed

+134
-56
lines changed

26 files changed

+134
-56
lines changed

.github/workflows/master.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -398,9 +398,9 @@ jobs:
398398
uses: actions/checkout@v4
399399
# https://github.com/docker/build-push-action
400400
- name: Set up Docker Buildx
401-
uses: docker/setup-buildx-action@v2
401+
uses: docker/setup-buildx-action@v3
402402
- name: Build Kyuubi Docker Image
403-
uses: docker/build-push-action@v3
403+
uses: docker/build-push-action@v5
404404
with:
405405
# passthrough CI into build container
406406
build-args: |

.github/workflows/publish-snapshot-docker.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,16 +30,16 @@ jobs:
3030
- name: Checkout
3131
uses: actions/checkout@v4
3232
- name: Set up QEMU
33-
uses: docker/setup-qemu-action@v2
33+
uses: docker/setup-qemu-action@v3
3434
- name: Set up Docker Buildx
35-
uses: docker/setup-buildx-action@v2
35+
uses: docker/setup-buildx-action@v3
3636
- name: Login to Docker Hub
37-
uses: docker/login-action@v2
37+
uses: docker/login-action@v3
3838
with:
3939
username: ${{ secrets.DOCKERHUB_USER }}
4040
password: ${{ secrets.DOCKERHUB_TOKEN }}
4141
- name: Build and Push Kyuubi Docker Image
42-
uses: docker/build-push-action@v4
42+
uses: docker/build-push-action@v5
4343
with:
4444
# build cache on Github Actions, See: https://docs.docker.com/build/cache/backends/gha/#using-dockerbuild-push-action
4545
cache-from: type=gha

extensions/spark/kyuubi-spark-authz/src/main/resources/database_command_spec.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -215,4 +215,4 @@
215215
} ],
216216
"opType" : "SWITCHDATABASE",
217217
"uriDescs" : [ ]
218-
} ]
218+
} ]

extensions/spark/kyuubi-spark-authz/src/main/resources/function_command_spec.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,4 +111,4 @@
111111
"comment" : ""
112112
} ],
113113
"opType" : "RELOADFUNCTION"
114-
} ]
114+
} ]

extensions/spark/kyuubi-spark-authz/src/main/resources/scan_command_spec.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,4 +111,4 @@
111111
"comment" : ""
112112
} ],
113113
"uriDescs" : [ ]
114-
} ]
114+
} ]

extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2528,4 +2528,4 @@
25282528
"isInput" : false,
25292529
"comment" : "Delta"
25302530
} ]
2531-
} ]
2531+
} ]

extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/Authorization.scala

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,13 +18,12 @@
1818
package org.apache.kyuubi.plugin.spark.authz.rule
1919

2020
import org.apache.spark.sql.SparkSession
21-
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Subquery}
21+
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, View}
2222
import org.apache.spark.sql.catalyst.rules.Rule
2323
import org.apache.spark.sql.catalyst.trees.TreeNodeTag
2424
import org.apache.spark.sql.execution.SQLExecution.EXECUTION_ID_KEY
2525

2626
import org.apache.kyuubi.plugin.spark.authz.rule.Authorization._
27-
import org.apache.kyuubi.plugin.spark.authz.rule.permanentview.PermanentViewMarker
2827
import org.apache.kyuubi.plugin.spark.authz.util.ReservedKeys._
2928

3029
abstract class Authorization(spark: SparkSession) extends Rule[LogicalPlan] {
@@ -54,18 +53,15 @@ object Authorization {
5453
def markAuthChecked(plan: LogicalPlan): LogicalPlan = {
5554
plan.setTagValue(KYUUBI_AUTHZ_TAG, ())
5655
plan transformDown {
57-
case pvm: PermanentViewMarker =>
58-
markAllNodesAuthChecked(pvm)
59-
case subquery: Subquery =>
60-
markAllNodesAuthChecked(subquery)
56+
// TODO: Add this line Support for spark3.1, we can remove this
57+
// after spark 3.2 since https://issues.apache.org/jira/browse/SPARK-34269
58+
case view: View =>
59+
markAllNodesAuthChecked(view.child)
6160
}
6261
}
6362

6463
protected def isAuthChecked(plan: LogicalPlan): Boolean = {
65-
plan match {
66-
case subquery: Subquery => isAuthChecked(subquery.child)
67-
case p => p.getTagValue(KYUUBI_AUTHZ_TAG).nonEmpty
68-
}
64+
plan.getTagValue(KYUUBI_AUTHZ_TAG).nonEmpty
6965
}
7066

7167
def setExplainCommandExecutionId(sparkSession: SparkSession): Unit = {

extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/PermanentViewMarker.scala

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -21,13 +21,7 @@ import org.apache.spark.sql.catalyst.catalog.CatalogTable
2121
import org.apache.spark.sql.catalyst.expressions.Attribute
2222
import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan}
2323

24-
import org.apache.kyuubi.plugin.spark.authz.util.WithInternalChild
25-
26-
case class PermanentViewMarker(child: LogicalPlan, catalogTable: CatalogTable) extends LeafNode
27-
with WithInternalChild {
24+
case class PermanentViewMarker(child: LogicalPlan, catalogTable: CatalogTable) extends LeafNode {
2825

2926
override def output: Seq[Attribute] = child.output
30-
31-
override def withNewChildInternal(newChild: LogicalPlan): LogicalPlan =
32-
copy(child = newChild)
3327
}

externals/kyuubi-flink-sql-engine/src/main/scala/org/apache/kyuubi/engine/flink/session/FlinkSessionImpl.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,7 @@ class FlinkSessionImpl(
109109
case TGetInfoType.CLI_SERVER_NAME | TGetInfoType.CLI_DBMS_NAME =>
110110
TGetInfoValue.stringValue("Apache Flink")
111111
case TGetInfoType.CLI_DBMS_VER => TGetInfoValue.stringValue(EnvironmentInformation.getVersion)
112+
case TGetInfoType.CLI_ODBC_KEYWORDS => TGetInfoValue.stringValue("Unimplemented")
112113
case _ => throw KyuubiSQLException(s"Unrecognized GetInfoType value: $infoType")
113114
}
114115
}

externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveSQLEngine.scala

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,15 @@ object HiveSQLEngine extends Logging {
130130
} else {
131131
val effectiveUser = UserGroupInformation.createProxyUser(sessionUser.get, realUser)
132132
effectiveUser.doAs(new PrivilegedExceptionAction[Unit] {
133-
override def run(): Unit = startEngine()
133+
override def run(): Unit = {
134+
val engineCredentials =
135+
kyuubiConf.getOption(KyuubiReservedKeys.KYUUBI_ENGINE_CREDENTIALS_KEY)
136+
kyuubiConf.unset(KyuubiReservedKeys.KYUUBI_ENGINE_CREDENTIALS_KEY)
137+
engineCredentials.filter(_.nonEmpty).foreach { credentials =>
138+
HiveTBinaryFrontendService.renewDelegationToken(credentials)
139+
}
140+
startEngine()
141+
}
134142
})
135143
}
136144

0 commit comments

Comments
 (0)