From cee18e5ea5522b92eb03ce6ebb042fbfe7dd1851 Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Tue, 1 Oct 2024 21:14:24 -0700 Subject: [PATCH] [SPARK-49845][CORE] Make `appArgs` and `environmentVariables` optional in REST API ### What changes were proposed in this pull request? This PR aims to make `appArgs` and `environmentVariables` fields optional in REST API. ### Why are the changes needed? `appArgs` and `environmentVariables` became mandatory due to the Apache Mesos limitation at Spark 2.2.2. Technically, this is a revert of SPARK-22574. - https://github.com/apache/spark/pull/19966 Since Apache Spark 4.0 removed Mesos support, we don't need these requirements. - https://github.com/apache/spark/pull/43135 ### Does this PR introduce _any_ user-facing change? No because this is a relaxation of enforcement. ### How was this patch tested? Pass the CIs with the revised test case. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #48316 from dongjoon-hyun/SPARK-49845. Authored-by: Dongjoon Hyun Signed-off-by: Dongjoon Hyun --- .../org/apache/spark/deploy/rest/StandaloneRestServer.scala | 5 +++-- .../apache/spark/deploy/rest/SubmitRestProtocolRequest.scala | 2 -- .../apache/spark/deploy/rest/SubmitRestProtocolSuite.scala | 2 -- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala b/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala index 31673f666173a..c92e79381ca9b 100644 --- a/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala +++ b/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala @@ -218,11 +218,12 @@ private[rest] class StandaloneSubmitRequestServlet( val (_, masterPort) = Utils.extractHostPortFromSparkUrl(masterUrl) val updatedMasters = masters.map( _.replace(s":$masterRestPort", s":$masterPort")).getOrElse(masterUrl) - val appArgs = request.appArgs + val appArgs = Option(request.appArgs).getOrElse(Array[String]()) // Filter SPARK_LOCAL_(IP|HOSTNAME) environment variables from being set on the remote system. // In addition, the placeholders are replaced into the values of environment variables. val environmentVariables = - request.environmentVariables.filterNot(x => x._1.matches("SPARK_LOCAL_(IP|HOSTNAME)")) + Option(request.environmentVariables).getOrElse(Map.empty[String, String]) + .filterNot(x => x._1.matches("SPARK_LOCAL_(IP|HOSTNAME)")) .map(x => (x._1, replacePlaceHolder(x._2))) // Construct driver description diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/SubmitRestProtocolRequest.scala b/core/src/main/scala/org/apache/spark/deploy/rest/SubmitRestProtocolRequest.scala index 7f462148c71a1..63882259adcb5 100644 --- a/core/src/main/scala/org/apache/spark/deploy/rest/SubmitRestProtocolRequest.scala +++ b/core/src/main/scala/org/apache/spark/deploy/rest/SubmitRestProtocolRequest.scala @@ -47,8 +47,6 @@ private[rest] class CreateSubmissionRequest extends SubmitRestProtocolRequest { super.doValidate() assert(sparkProperties != null, "No Spark properties set!") assertFieldIsSet(appResource, "appResource") - assertFieldIsSet(appArgs, "appArgs") - assertFieldIsSet(environmentVariables, "environmentVariables") assertPropertyIsSet("spark.app.name") assertPropertyIsBoolean(config.DRIVER_SUPERVISE.key) assertPropertyIsNumeric(config.DRIVER_CORES.key) diff --git a/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala index 9eb5172583120..f2807f258f2d1 100644 --- a/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala @@ -87,8 +87,6 @@ class SubmitRestProtocolSuite extends SparkFunSuite { message.clientSparkVersion = "1.2.3" message.appResource = "honey-walnut-cherry.jar" message.mainClass = "org.apache.spark.examples.SparkPie" - message.appArgs = Array("two slices") - message.environmentVariables = Map("PATH" -> "/dev/null") val conf = new SparkConf(false) conf.set("spark.app.name", "SparkPie") message.sparkProperties = conf.getAll.toMap