Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/5438_add_common_method_to_suppor…
Browse files Browse the repository at this point in the history
…t_session_config' into 5438_add_common_method_to_support_session_config
  • Loading branch information
davidyuan1223 committed Oct 26, 2023
2 parents 8011959 + 605ef16 commit 623200f
Show file tree
Hide file tree
Showing 18 changed files with 519 additions and 41 deletions.
22 changes: 22 additions & 0 deletions LICENSE-binary
Original file line number Diff line number Diff line change
Expand Up @@ -374,12 +374,16 @@ is auto-generated by `pnpm licenses list --prod`.
├────────────────────────────────────┼──────────────┤
│ typescript │ Apache-2.0 │
├────────────────────────────────────┼──────────────┤
│ moo │ BSD-3-Clause │
├────────────────────────────────────┼──────────────┤
│ normalize-wheel-es │ BSD-3-Clause │
├────────────────────────────────────┼──────────────┤
│ source-map │ BSD-3-Clause │
├────────────────────────────────────┼──────────────┤
│ source-map-js │ BSD-3-Clause │
├────────────────────────────────────┼──────────────┤
│ railroad-diagrams │ CC0-1.0 │
├────────────────────────────────────┼──────────────┤
│ picocolors │ ISC │
├────────────────────────────────────┼──────────────┤
│ @babel/helper-string-parser │ MIT │
Expand Down Expand Up @@ -452,6 +456,8 @@ is auto-generated by `pnpm licenses list --prod`.
├────────────────────────────────────┼──────────────┤
│ combined-stream │ MIT │
├────────────────────────────────────┼──────────────┤
│ commander │ MIT │
├────────────────────────────────────┼──────────────┤
│ csstype │ MIT │
├────────────────────────────────────┼──────────────┤
│ date-fns │ MIT │
Expand All @@ -460,6 +466,8 @@ is auto-generated by `pnpm licenses list --prod`.
├────────────────────────────────────┼──────────────┤
│ delayed-stream │ MIT │
├────────────────────────────────────┼──────────────┤
│ discontinuous-range │ MIT │
├────────────────────────────────────┼──────────────┤
│ element-plus │ MIT │
├────────────────────────────────────┼──────────────┤
│ escape-html │ MIT │
Expand All @@ -470,6 +478,8 @@ is auto-generated by `pnpm licenses list --prod`.
├────────────────────────────────────┼──────────────┤
│ form-data │ MIT │
├────────────────────────────────────┼──────────────┤
│ get-stdin │ MIT │
├────────────────────────────────────┼──────────────┤
│ lodash │ MIT │
├────────────────────────────────────┼──────────────┤
│ lodash-es │ MIT │
Expand All @@ -484,16 +494,26 @@ is auto-generated by `pnpm licenses list --prod`.
├────────────────────────────────────┼──────────────┤
│ mime-types │ MIT │
├────────────────────────────────────┼──────────────┤
│ monaco-editor │ MIT │
├────────────────────────────────────┼──────────────┤
│ nanoid │ MIT │
├────────────────────────────────────┼──────────────┤
│ nearley │ MIT │
├────────────────────────────────────┼──────────────┤
│ pinia │ MIT │
├────────────────────────────────────┼──────────────┤
│ pinia-plugin-persistedstate │ MIT │
├────────────────────────────────────┼──────────────┤
│ postcss │ MIT │
├────────────────────────────────────┼──────────────┤
│ randexp │ MIT │
├────────────────────────────────────┼──────────────┤
│ ret │ MIT │
├────────────────────────────────────┼──────────────┤
│ sourcemap-codec │ MIT │
├────────────────────────────────────┼──────────────┤
│ sql-formatter │ MIT │
├────────────────────────────────────┼──────────────┤
│ to-fast-properties │ MIT │
├────────────────────────────────────┼──────────────┤
│ vue │ MIT │
Expand All @@ -503,4 +523,6 @@ is auto-generated by `pnpm licenses list --prod`.
│ vue-i18n │ MIT │
├────────────────────────────────────┼──────────────┤
│ vue-router │ MIT │
├────────────────────────────────────┼──────────────┤
│ argparse │ Python-2.0 │
└────────────────────────────────────┴──────────────┘
12 changes: 12 additions & 0 deletions build/release/release.sh
Original file line number Diff line number Diff line change
Expand Up @@ -109,15 +109,27 @@ upload_svn_staging() {
}

upload_nexus_staging() {
# Spark Extension Plugin for Spark 3.1
${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.1 \
-s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
-pl extensions/spark/kyuubi-extension-spark-3-1 -am

# Spark Extension Plugin for Spark 3.2
${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.2 \
-s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
-pl extensions/spark/kyuubi-extension-spark-3-2 -am

# Spark Extension Plugin for Spark 3.3
${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.3 \
-s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
-pl extensions/spark/kyuubi-extension-spark-3-3 -am

# Spark TPC-DS/TPC-H Connector build with default Spark version (3.4) and Scala 2.13
${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.4 \
-s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
-pl extensions/spark/kyuubi-connector-tpcds,extensions/spark/kyuubi-connector-tpch

# All modules including Spark Extension Plugin and Connectors build with default Spark version (3.4) and default Scala version (2.12)
${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.4 \
-s "${KYUUBI_DIR}/build/release/asf-settings.xml"
}
Expand Down
6 changes: 3 additions & 3 deletions dev/dependencyList
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ jetty-util-ajax/9.4.52.v20230823//jetty-util-ajax-9.4.52.v20230823.jar
jetty-util/9.4.52.v20230823//jetty-util-9.4.52.v20230823.jar
jline/0.9.94//jline-0.9.94.jar
jul-to-slf4j/1.7.36//jul-to-slf4j-1.7.36.jar
kafka-clients/3.4.0//kafka-clients-3.4.0.jar
kafka-clients/3.5.1//kafka-clients-3.5.1.jar
kubernetes-client-api/6.8.1//kubernetes-client-api-6.8.1.jar
kubernetes-client/6.8.1//kubernetes-client-6.8.1.jar
kubernetes-httpclient-okhttp/6.8.1//kubernetes-httpclient-okhttp-6.8.1.jar
Expand Down Expand Up @@ -184,7 +184,7 @@ simpleclient_tracer_otel_agent/0.16.0//simpleclient_tracer_otel_agent-0.16.0.jar
slf4j-api/1.7.36//slf4j-api-1.7.36.jar
snakeyaml-engine/2.6//snakeyaml-engine-2.6.jar
snakeyaml/2.2//snakeyaml-2.2.jar
snappy-java/1.1.8.4//snappy-java-1.1.8.4.jar
snappy-java/1.1.10.1//snappy-java-1.1.10.1.jar
sqlite-jdbc/3.42.0.0//sqlite-jdbc-3.42.0.0.jar
swagger-annotations/2.2.1//swagger-annotations-2.2.1.jar
swagger-core/2.2.1//swagger-core-2.2.1.jar
Expand All @@ -196,4 +196,4 @@ units/1.6//units-1.6.jar
vertx-core/4.3.2//vertx-core-4.3.2.jar
vertx-grpc/4.3.2//vertx-grpc-4.3.2.jar
zjsonpatch/0.3.0//zjsonpatch-0.3.0.jar
zstd-jni/1.5.2-1//zstd-jni-1.5.2-1.jar
zstd-jni/1.5.5-1//zstd-jni-1.5.5-1.jar
6 changes: 3 additions & 3 deletions docs/quick_start/quick_start.rst
Original file line number Diff line number Diff line change
Expand Up @@ -34,17 +34,17 @@ For quick start deployment, we need to prepare the following stuffs:
use Spark for demonstration.

These essential components are JVM-based applications. So, the JRE needs to be
pre-installed and the `JAVA_HOME` is correctly set to each component.
pre-installed and the ``JAVA_HOME`` is correctly set to each component.

================ ============ =============== ===========================================
Component Role Version Remarks
================ ============ =============== ===========================================
**Java** JRE 8/11 Officially released against JDK8
**Java** JRE 8/11/17 Officially released against JDK8
**Kyuubi** Gateway \ |release| \ - Kyuubi Server
Engine lib - Kyuubi Engine
Beeline - Kyuubi Hive Beeline
**Spark** Engine >=3.1 A Spark distribution
**Flink** Engine 1.16/1.17 A Flink distribution
**Flink** Engine 1.16/1.17/1.18 A Flink distribution
**Trino** Engine >=363 A Trino cluster
**Doris** Engine N/A A Doris cluster
**Hive** Engine - 3.1.x - A Hive distribution
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,22 +84,21 @@ class BatchJobSubmission(

@VisibleForTesting
private[kyuubi] val builder: ProcBuilder = {
Option(batchType).map(_.toUpperCase(Locale.ROOT)) match {
case Some("SPARK") | Some("PYSPARK") =>
new SparkBatchProcessBuilder(
session.user,
session.sessionConf,
batchId,
batchName,
Option(resource),
className,
batchConf,
batchArgs,
getOperationLog)

case _ =>
throw new UnsupportedOperationException(s"Batch type $batchType unsupported")
val mainClass = Option(batchType).map(_.toUpperCase(Locale.ROOT)) match {
case Some("SPARK") => className
case Some("PYSPARK") => null
case _ => throw new UnsupportedOperationException(s"Batch type $batchType unsupported")
}
new SparkBatchProcessBuilder(
session.user,
session.sessionConf,
batchId,
batchName,
Option(resource),
mainClass,
batchConf,
batchArgs,
getOperationLog)
}

override def currentApplicationInfo(): Option[ApplicationInfo] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -477,16 +477,15 @@ private[v1] class BatchesResource extends ApiRequestContext with Logging {
checkPermission(userName, metadata.username)
if (OperationState.isTerminal(OperationState.withName(metadata.state))) {
new CloseBatchResponse(false, s"The batch[$metadata] has been terminated.")
} else if (batchV2Enabled(metadata.requestConf) && metadata.state == "INITIALIZED") {
if (batchService.get.cancelUnscheduledBatch(batchId)) {
new CloseBatchResponse(true, s"Unscheduled batch $batchId is canceled.")
} else if (OperationState.isTerminal(OperationState.withName(metadata.state))) {
new CloseBatchResponse(false, s"The batch[$metadata] has been terminated.")
} else {
info(s"Cancel batch[$batchId] with state ${metadata.state} by killing application")
val (killed, msg) = forceKill(metadata.appMgrInfo, batchId, userName)
new CloseBatchResponse(killed, msg)
}
} else if (batchV2Enabled(metadata.requestConf) && metadata.state == "INITIALIZED" &&
// there is a chance that metadata is outdated, then `cancelUnscheduledBatch` fails
// and returns false
batchService.get.cancelUnscheduledBatch(batchId)) {
new CloseBatchResponse(true, s"Unscheduled batch $batchId is canceled.")
} else if (batchV2Enabled(metadata.requestConf) && metadata.kyuubiInstance == null) {
// code goes here indicates metadata is outdated, recursively calls itself to refresh
// the metadata
closeBatchSession(batchId, hs2ProxyUser)
} else if (metadata.kyuubiInstance != fe.connectionUrl) {
info(s"Redirecting delete batch[$batchId] to ${metadata.kyuubiInstance}")
val internalRestClient = getInternalRestClient(metadata.kyuubiInstance)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,10 +104,10 @@ abstract class ServerKafkaLoggingEventHandlerSuite extends WithKyuubiServer with

class ServerKafkaLoggingEventHandlerSuiteForKafka2 extends ServerKafkaLoggingEventHandlerSuite {
// equivalent to Apache Kafka 2.8.x
override val imageTag = "6.2.10"
override val imageTag = "6.2.12"
}

class ServerKafkaLoggingEventHandlerSuiteForKafka3 extends ServerKafkaLoggingEventHandlerSuite {
// equivalent to Apache Kafka 3.3.x
override val imageTag = "7.3.3"
// equivalent to Apache Kafka 3.5.x
override val imageTag = "7.5.1"
}
2 changes: 2 additions & 0 deletions kyuubi-server/web-ui/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,10 @@
"axios": "^0.27.2",
"date-fns": "^2.29.3",
"element-plus": "^2.2.12",
"monaco-editor": "^0.44.0",
"pinia": "^2.0.18",
"pinia-plugin-persistedstate": "^2.1.1",
"sql-formatter": "^13.0.1",
"swagger-ui-dist": "^4.9.1",
"vue": "^3.2.37",
"vue-i18n": "^9.2.2",
Expand Down
Loading

0 comments on commit 623200f

Please sign in to comment.