Skip to content

Commit aeb7cce

Browse files
committed
Review fixes:
* Removed unused config entries * Fallback test added * Fixed Mesos and Rest server property handling
1 parent ea50aef commit aeb7cce

File tree

4 files changed

+32
-12
lines changed

4 files changed

+32
-12
lines changed

core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import org.apache.spark.{SPARK_VERSION => sparkVersion, SparkConf}
2424
import org.apache.spark.deploy.{Command, DeployMessages, DriverDescription}
2525
import org.apache.spark.deploy.ClientArguments._
2626
import org.apache.spark.internal.config
27+
import org.apache.spark.launcher.SparkLauncher
2728
import org.apache.spark.rpc.RpcEndpointRef
2829
import org.apache.spark.util.Utils
2930

@@ -135,6 +136,7 @@ private[rest] class StandaloneSubmitRequestServlet(
135136
val sparkProperties = request.sparkProperties
136137
val driverMemory = sparkProperties.get(config.DRIVER_MEMORY.key)
137138
val driverCores = sparkProperties.get(config.DRIVER_CORES.key)
139+
val driverDefaultJavaOptions = sparkProperties.get(SparkLauncher.DRIVER_DEFAULT_JAVA_OPTIONS)
138140
val driverExtraJavaOptions = sparkProperties.get(config.DRIVER_JAVA_OPTIONS.key)
139141
val driverExtraClassPath = sparkProperties.get(config.DRIVER_CLASS_PATH.key)
140142
val driverExtraLibraryPath = sparkProperties.get(config.DRIVER_LIBRARY_PATH.key)
@@ -160,9 +162,11 @@ private[rest] class StandaloneSubmitRequestServlet(
160162
.set("spark.master", updatedMasters)
161163
val extraClassPath = driverExtraClassPath.toSeq.flatMap(_.split(File.pathSeparator))
162164
val extraLibraryPath = driverExtraLibraryPath.toSeq.flatMap(_.split(File.pathSeparator))
165+
val defaultJavaOpts = driverDefaultJavaOptions.map(Utils.splitCommandString)
166+
.getOrElse(Seq.empty)
163167
val extraJavaOpts = driverExtraJavaOptions.map(Utils.splitCommandString).getOrElse(Seq.empty)
164168
val sparkJavaOpts = Utils.sparkJavaOpts(conf)
165-
val javaOpts = sparkJavaOpts ++ extraJavaOpts
169+
val javaOpts = sparkJavaOpts ++ defaultJavaOpts ++ extraJavaOpts
166170
val command = new Command(
167171
"org.apache.spark.deploy.worker.DriverWrapper",
168172
Seq("{{WORKER_URL}}", "{{USER_JAR}}", mainClass) ++ appArgs, // args to the DriverWrapper

core/src/main/scala/org/apache/spark/internal/config/package.scala

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -51,11 +51,6 @@ package object config {
5151
private[spark] val DRIVER_CLASS_PATH =
5252
ConfigBuilder(SparkLauncher.DRIVER_EXTRA_CLASSPATH).stringConf.createOptional
5353

54-
private[spark] val DRIVER_DEFAULT_JAVA_OPTIONS =
55-
ConfigBuilder(SparkLauncher.DRIVER_DEFAULT_JAVA_OPTIONS)
56-
.stringConf
57-
.createOptional
58-
5954
private[spark] val DRIVER_JAVA_OPTIONS =
6055
ConfigBuilder(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS)
6156
.withPrepended(SparkLauncher.DRIVER_DEFAULT_JAVA_OPTIONS)
@@ -185,11 +180,6 @@ package object config {
185180
private[spark] val EXECUTOR_HEARTBEAT_MAX_FAILURES =
186181
ConfigBuilder("spark.executor.heartbeat.maxFailures").internal().intConf.createWithDefault(60)
187182

188-
private[spark] val EXECUTOR_DEFAULT_JAVA_OPTIONS =
189-
ConfigBuilder(SparkLauncher.EXECUTOR_DEFAULT_JAVA_OPTIONS)
190-
.stringConf
191-
.createOptional
192-
193183
private[spark] val EXECUTOR_JAVA_OPTIONS =
194184
ConfigBuilder(SparkLauncher.EXECUTOR_EXTRA_JAVA_OPTIONS)
195185
.withPrepended(SparkLauncher.EXECUTOR_DEFAULT_JAVA_OPTIONS)

core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -321,6 +321,28 @@ class ConfigEntrySuite extends SparkFunSuite {
321321
assert(conf.get(derivedConf) === Some("2,1"))
322322
}
323323

324+
test("conf entry: prepend with fallback") {
325+
val conf = new SparkConf()
326+
val prependedKey = testKey("prepended3")
327+
val prependedConf = ConfigBuilder(prependedKey).stringConf.createOptional
328+
val derivedConf = ConfigBuilder(testKey("prepend3"))
329+
.withPrepended(prependedKey)
330+
.stringConf
331+
.createOptional
332+
val confWithFallback = ConfigBuilder(testKey("fallback")).fallbackConf(derivedConf)
333+
334+
assert(conf.get(confWithFallback) === None)
335+
336+
conf.set(derivedConf, "1")
337+
assert(conf.get(confWithFallback) === Some("1"))
338+
339+
conf.set(prependedConf, "2")
340+
assert(conf.get(confWithFallback) === Some("2 1"))
341+
342+
conf.set(confWithFallback, Some("3"))
343+
assert(conf.get(confWithFallback) === Some("3"))
344+
}
345+
324346
test("conf entry: prepend should work only with string type") {
325347
var i = 0
326348
def testPrependFail(createConf: (String, String) => Unit): Unit = {

resource-managers/mesos/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ import org.apache.spark.deploy.Command
2828
import org.apache.spark.deploy.mesos.MesosDriverDescription
2929
import org.apache.spark.deploy.rest._
3030
import org.apache.spark.internal.config
31+
import org.apache.spark.launcher.SparkLauncher
3132
import org.apache.spark.scheduler.cluster.mesos.MesosClusterScheduler
3233
import org.apache.spark.util.Utils
3334

@@ -97,6 +98,7 @@ private[mesos] class MesosSubmitRequestServlet(
9798

9899
// Optional fields
99100
val sparkProperties = request.sparkProperties
101+
val driverDefaultJavaOptions = sparkProperties.get(SparkLauncher.DRIVER_DEFAULT_JAVA_OPTIONS)
100102
val driverExtraJavaOptions = sparkProperties.get(config.DRIVER_JAVA_OPTIONS.key)
101103
val driverExtraClassPath = sparkProperties.get(config.DRIVER_CLASS_PATH.key)
102104
val driverExtraLibraryPath = sparkProperties.get(config.DRIVER_LIBRARY_PATH.key)
@@ -110,9 +112,11 @@ private[mesos] class MesosSubmitRequestServlet(
110112
val conf = new SparkConf(false).setAll(sparkProperties)
111113
val extraClassPath = driverExtraClassPath.toSeq.flatMap(_.split(File.pathSeparator))
112114
val extraLibraryPath = driverExtraLibraryPath.toSeq.flatMap(_.split(File.pathSeparator))
115+
val defaultJavaOpts = driverDefaultJavaOptions.map(Utils.splitCommandString)
116+
.getOrElse(Seq.empty)
113117
val extraJavaOpts = driverExtraJavaOptions.map(Utils.splitCommandString).getOrElse(Seq.empty)
114118
val sparkJavaOpts = Utils.sparkJavaOpts(conf)
115-
val javaOpts = sparkJavaOpts ++ extraJavaOpts
119+
val javaOpts = sparkJavaOpts ++ defaultJavaOpts ++ extraJavaOpts
116120
val command = new Command(
117121
mainClass, appArgs, environmentVariables, extraClassPath, extraLibraryPath, javaOpts)
118122
val actualSuperviseDriver = superviseDriver.map(_.toBoolean).getOrElse(DEFAULT_SUPERVISE)

0 commit comments

Comments
 (0)