Skip to content

Commit 696b75a

Browse files
mpmoleksrowen
authored andcommitted
[SPARK-25934][MESOS] Don't propagate SPARK_CONF_DIR from spark submit
## What changes were proposed in this pull request? Don't propagate SPARK_CONF_DIR to the driver in mesos cluster mode. ## How was this patch tested? I built the 2.3.2 tag with this patch added and deployed a test job to a mesos cluster to confirm that the incorrect SPARK_CONF_DIR was no longer passed from the submit command. Closes apache#22937 from mpmolek/fix-conf-dir. Authored-by: Matt Molek <mpmolek@gmail.com> Signed-off-by: Sean Owen <sean.owen@databricks.com>
1 parent 2aef79a commit 696b75a

File tree

2 files changed

+17
-3
lines changed

2 files changed

+17
-3
lines changed

core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -408,6 +408,10 @@ private[spark] class RestSubmissionClient(master: String) extends Logging {
408408
}
409409

410410
private[spark] object RestSubmissionClient {
411+
412+
// SPARK_HOME and SPARK_CONF_DIR are filtered out because they are usually wrong
413+
// on the remote machine (SPARK-12345) (SPARK-25934)
414+
private val BLACKLISTED_SPARK_ENV_VARS = Set("SPARK_ENV_LOADED", "SPARK_HOME", "SPARK_CONF_DIR")
411415
private val REPORT_DRIVER_STATUS_INTERVAL = 1000
412416
private val REPORT_DRIVER_STATUS_MAX_TRIES = 10
413417
val PROTOCOL_VERSION = "v1"
@@ -417,9 +421,7 @@ private[spark] object RestSubmissionClient {
417421
*/
418422
private[rest] def filterSystemEnvironment(env: Map[String, String]): Map[String, String] = {
419423
env.filterKeys { k =>
420-
// SPARK_HOME is filtered out because it is usually wrong on the remote machine (SPARK-12345)
421-
(k.startsWith("SPARK_") && k != "SPARK_ENV_LOADED" && k != "SPARK_HOME") ||
422-
k.startsWith("MESOS_")
424+
(k.startsWith("SPARK_") && !BLACKLISTED_SPARK_ENV_VARS.contains(k)) || k.startsWith("MESOS_")
423425
}
424426
}
425427
}

core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -396,6 +396,18 @@ class StandaloneRestSubmitSuite extends SparkFunSuite with BeforeAndAfterEach {
396396
assert(filteredVariables == Map("SPARK_VAR" -> "1"))
397397
}
398398

399+
test("client does not send 'SPARK_HOME' env var by default") {
400+
val environmentVariables = Map("SPARK_VAR" -> "1", "SPARK_HOME" -> "1")
401+
val filteredVariables = RestSubmissionClient.filterSystemEnvironment(environmentVariables)
402+
assert(filteredVariables == Map("SPARK_VAR" -> "1"))
403+
}
404+
405+
test("client does not send 'SPARK_CONF_DIR' env var by default") {
406+
val environmentVariables = Map("SPARK_VAR" -> "1", "SPARK_CONF_DIR" -> "1")
407+
val filteredVariables = RestSubmissionClient.filterSystemEnvironment(environmentVariables)
408+
assert(filteredVariables == Map("SPARK_VAR" -> "1"))
409+
}
410+
399411
test("client includes mesos env vars") {
400412
val environmentVariables = Map("SPARK_VAR" -> "1", "MESOS_VAR" -> "1", "OTHER_VAR" -> "1")
401413
val filteredVariables = RestSubmissionClient.filterSystemEnvironment(environmentVariables)

0 commit comments

Comments
 (0)