Skip to content

Commit 03da61b

Browse files
SPARK-5425: Modified Utils.getSystemProperties to return a map of all system properties - explicit + defaults
1 parent 8faf2ea commit 03da61b

File tree

3 files changed

+14
-7
lines changed

3 files changed

+14
-7
lines changed

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ import scala.collection.JavaConverters._
2323
import scala.collection.mutable.LinkedHashSet
2424

2525
import org.apache.spark.serializer.KryoSerializer
26+
import org.apache.spark.util.Utils
2627

2728
/**
2829
* Configuration for a Spark application. Used to set various Spark parameters as key-value pairs.
@@ -53,9 +54,8 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
5354

5455
if (loadDefaults) {
5556
// Load any spark.* system properties
56-
val propNames = System.getProperties.stringPropertyNames().asScala
57-
for (k <- propNames if k.startsWith("spark.")) {
58-
set(k, System.getProperty(k))
57+
for ((key, value) <- Utils.getSystemProperties if key.startsWith("spark.")) {
58+
set(key, value)
5959
}
6060
}
6161

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1312,9 +1312,14 @@ private[spark] object Utils extends Logging {
13121312
hashAbs
13131313
}
13141314

1315-
/** Returns a copy of the system properties that is thread-safe to iterator over. */
1316-
def getSystemProperties(): Map[String, String] = {
1317-
System.getProperties.clone().asInstanceOf[java.util.Properties].toMap[String, String]
1315+
/** Returns the system properties map that is thread-safe to iterator over. It gets the
1316+
* properties which have been set explicitly, as well as those for which only a default value
1317+
* has been defined. */
1318+
def getSystemProperties: Map[String, String] = {
1319+
val sysProps = for (key <- System.getProperties.stringPropertyNames()) yield
1320+
(key, System.getProperty(key))
1321+
1322+
sysProps.toMap
13181323
}
13191324

13201325
/**

examples/src/main/scala/org/apache/spark/examples/DriverSubmissionTest.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ package org.apache.spark.examples
1919

2020
import scala.collection.JavaConversions._
2121

22+
import org.apache.spark.util.Utils
23+
2224
/** Prints out environmental information, sleeps, and then exits. Made to
2325
* test driver submission in the standalone scheduler. */
2426
object DriverSubmissionTest {
@@ -30,7 +32,7 @@ object DriverSubmissionTest {
3032
val numSecondsToSleep = args(0).toInt
3133

3234
val env = System.getenv()
33-
val properties = System.getProperties()
35+
val properties = Utils.getSystemProperties
3436

3537
println("Environment variables containing SPARK_TEST:")
3638
env.filter{case (k, v) => k.contains("SPARK_TEST")}.foreach(println)

0 commit comments

Comments
 (0)