Skip to content

Commit fc59a02

Browse files
committed
Merge remote-tracking branch 'apache/master' into SPARK-4586
2 parents 0882513 + 119f45d commit fc59a02

File tree

155 files changed

+6392
-2104
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

155 files changed

+6392
-2104
lines changed

assembly/pom.xml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -142,8 +142,10 @@
142142
</includes>
143143
<excludes>
144144
<exclude>com/google/common/base/Absent*</exclude>
145+
<exclude>com/google/common/base/Function</exclude>
145146
<exclude>com/google/common/base/Optional*</exclude>
146147
<exclude>com/google/common/base/Present*</exclude>
148+
<exclude>com/google/common/base/Supplier</exclude>
147149
</excludes>
148150
</relocation>
149151
</relocations>

bin/spark-class

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
2929

3030
# Export this as SPARK_HOME
3131
export SPARK_HOME="$FWDIR"
32+
export SPARK_CONF_DIR="${SPARK_CONF_DIR:-"$SPARK_HOME/conf"}"
3233

3334
. "$FWDIR"/bin/load-spark-env.sh
3435

@@ -120,8 +121,8 @@ fi
120121
JAVA_OPTS="$JAVA_OPTS -Xms$OUR_JAVA_MEM -Xmx$OUR_JAVA_MEM"
121122

122123
# Load extra JAVA_OPTS from conf/java-opts, if it exists
123-
if [ -e "$FWDIR/conf/java-opts" ] ; then
124-
JAVA_OPTS="$JAVA_OPTS `cat "$FWDIR"/conf/java-opts`"
124+
if [ -e "$SPARK_CONF_DIR/java-opts" ] ; then
125+
JAVA_OPTS="$JAVA_OPTS `cat "$SPARK_CONF_DIR"/java-opts`"
125126
fi
126127

127128
# Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in CommandUtils.scala!

build/mvn

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -68,10 +68,10 @@ install_app() {
6868
# Install maven under the build/ folder
6969
install_mvn() {
7070
install_app \
71-
"http://apache.claz.org/maven/maven-3/3.2.3/binaries" \
72-
"apache-maven-3.2.3-bin.tar.gz" \
73-
"apache-maven-3.2.3/bin/mvn"
74-
MVN_BIN="${_DIR}/apache-maven-3.2.3/bin/mvn"
71+
"http://archive.apache.org/dist/maven/maven-3/3.2.5/binaries" \
72+
"apache-maven-3.2.5-bin.tar.gz" \
73+
"apache-maven-3.2.5/bin/mvn"
74+
MVN_BIN="${_DIR}/apache-maven-3.2.5/bin/mvn"
7575
}
7676

7777
# Install zinc under the build/ folder

core/pom.xml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -372,8 +372,10 @@
372372
<artifact>com.google.guava:guava</artifact>
373373
<includes>
374374
<include>com/google/common/base/Absent*</include>
375+
<include>com/google/common/base/Function</include>
375376
<include>com/google/common/base/Optional*</include>
376377
<include>com/google/common/base/Present*</include>
378+
<include>com/google/common/base/Supplier</include>
377379
</includes>
378380
</filter>
379381
</filters>

core/src/main/resources/org/apache/spark/ui/static/webui.css

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -190,6 +190,7 @@ span.additional-metric-title {
190190

191191
/* Hide all additional metrics by default. This is done here rather than using JavaScript to
192192
* avoid slow page loads for stage pages with large numbers (e.g., thousands) of tasks. */
193-
.scheduler_delay, .deserialization_time, .serialization_time, .getting_result_time {
193+
.scheduler_delay, .deserialization_time, .fetch_wait_time, .serialization_time,
194+
.getting_result_time {
194195
display: none;
195196
}

core/src/main/scala/org/apache/spark/Logging.scala

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -118,15 +118,17 @@ trait Logging {
118118
// org.slf4j.impl.Log4jLoggerFactory, from the log4j 2.0 binding, currently
119119
// org.apache.logging.slf4j.Log4jLoggerFactory
120120
val usingLog4j12 = "org.slf4j.impl.Log4jLoggerFactory".equals(binderClass)
121-
val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements
122-
if (!log4j12Initialized && usingLog4j12) {
123-
val defaultLogProps = "org/apache/spark/log4j-defaults.properties"
124-
Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match {
125-
case Some(url) =>
126-
PropertyConfigurator.configure(url)
127-
System.err.println(s"Using Spark's default log4j profile: $defaultLogProps")
128-
case None =>
129-
System.err.println(s"Spark was unable to load $defaultLogProps")
121+
if (usingLog4j12) {
122+
val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements
123+
if (!log4j12Initialized) {
124+
val defaultLogProps = "org/apache/spark/log4j-defaults.properties"
125+
Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match {
126+
case Some(url) =>
127+
PropertyConfigurator.configure(url)
128+
System.err.println(s"Using Spark's default log4j profile: $defaultLogProps")
129+
case None =>
130+
System.err.println(s"Spark was unable to load $defaultLogProps")
131+
}
130132
}
131133
}
132134
Logging.initialized = true

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 20 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,11 @@
1717

1818
package org.apache.spark
1919

20+
import java.util.concurrent.ConcurrentHashMap
21+
2022
import scala.collection.JavaConverters._
21-
import scala.collection.mutable.{HashMap, LinkedHashSet}
23+
import scala.collection.mutable.LinkedHashSet
24+
2225
import org.apache.spark.serializer.KryoSerializer
2326

2427
/**
@@ -46,12 +49,12 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
4649
/** Create a SparkConf that loads defaults from system properties and the classpath */
4750
def this() = this(true)
4851

49-
private[spark] val settings = new HashMap[String, String]()
52+
private val settings = new ConcurrentHashMap[String, String]()
5053

5154
if (loadDefaults) {
5255
// Load any spark.* system properties
5356
for ((k, v) <- System.getProperties.asScala if k.startsWith("spark.")) {
54-
settings(k) = v
57+
set(k, v)
5558
}
5659
}
5760

@@ -63,7 +66,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
6366
if (value == null) {
6467
throw new NullPointerException("null value for " + key)
6568
}
66-
settings(key) = value
69+
settings.put(key, value)
6770
this
6871
}
6972

@@ -129,15 +132,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
129132

130133
/** Set multiple parameters together */
131134
def setAll(settings: Traversable[(String, String)]) = {
132-
this.settings ++= settings
135+
this.settings.putAll(settings.toMap.asJava)
133136
this
134137
}
135138

136139
/** Set a parameter if it isn't already configured */
137140
def setIfMissing(key: String, value: String): SparkConf = {
138-
if (!settings.contains(key)) {
139-
settings(key) = value
140-
}
141+
settings.putIfAbsent(key, value)
141142
this
142143
}
143144

@@ -163,21 +164,23 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
163164

164165
/** Get a parameter; throws a NoSuchElementException if it's not set */
165166
def get(key: String): String = {
166-
settings.getOrElse(key, throw new NoSuchElementException(key))
167+
getOption(key).getOrElse(throw new NoSuchElementException(key))
167168
}
168169

169170
/** Get a parameter, falling back to a default if not set */
170171
def get(key: String, defaultValue: String): String = {
171-
settings.getOrElse(key, defaultValue)
172+
getOption(key).getOrElse(defaultValue)
172173
}
173174

174175
/** Get a parameter as an Option */
175176
def getOption(key: String): Option[String] = {
176-
settings.get(key)
177+
Option(settings.get(key))
177178
}
178179

179180
/** Get all parameters as a list of pairs */
180-
def getAll: Array[(String, String)] = settings.clone().toArray
181+
def getAll: Array[(String, String)] = {
182+
settings.entrySet().asScala.map(x => (x.getKey, x.getValue)).toArray
183+
}
181184

182185
/** Get a parameter as an integer, falling back to a default if not set */
183186
def getInt(key: String, defaultValue: Int): Int = {
@@ -224,11 +227,11 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
224227
def getAppId: String = get("spark.app.id")
225228

226229
/** Does the configuration contain a given parameter? */
227-
def contains(key: String): Boolean = settings.contains(key)
230+
def contains(key: String): Boolean = settings.containsKey(key)
228231

229232
/** Copy this object */
230233
override def clone: SparkConf = {
231-
new SparkConf(false).setAll(settings)
234+
new SparkConf(false).setAll(getAll)
232235
}
233236

234237
/**
@@ -240,7 +243,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
240243
/** Checks for illegal or deprecated config settings. Throws an exception for the former. Not
241244
* idempotent - may mutate this conf object to convert deprecated settings to supported ones. */
242245
private[spark] def validateSettings() {
243-
if (settings.contains("spark.local.dir")) {
246+
if (contains("spark.local.dir")) {
244247
val msg = "In Spark 1.0 and later spark.local.dir will be overridden by the value set by " +
245248
"the cluster manager (via SPARK_LOCAL_DIRS in mesos/standalone and LOCAL_DIRS in YARN)."
246249
logWarning(msg)
@@ -265,7 +268,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
265268
}
266269

267270
// Validate spark.executor.extraJavaOptions
268-
settings.get(executorOptsKey).map { javaOpts =>
271+
getOption(executorOptsKey).map { javaOpts =>
269272
if (javaOpts.contains("-Dspark")) {
270273
val msg = s"$executorOptsKey is not allowed to set Spark options (was '$javaOpts'). " +
271274
"Set them directly on a SparkConf or in a properties file when using ./bin/spark-submit."
@@ -345,7 +348,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
345348
* configuration out for debugging.
346349
*/
347350
def toDebugString: String = {
348-
settings.toArray.sorted.map{case (k, v) => k + "=" + v}.mkString("\n")
351+
getAll.sorted.map{case (k, v) => k + "=" + v}.mkString("\n")
349352
}
350353
}
351354

0 commit comments

Comments
 (0)