Skip to content

Commit d7a1f9f

Browse files
author
Andrew Or
committed
Fix local cluster tests
1 parent efa5e18 commit d7a1f9f

File tree

6 files changed

+13
-10
lines changed

6 files changed

+13
-10
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1945,7 +1945,7 @@ object SparkContext extends Logging {
19451945

19461946
val scheduler = new TaskSchedulerImpl(sc)
19471947
val localCluster = new LocalSparkCluster(
1948-
numSlaves.toInt, coresPerSlave.toInt, memoryPerSlaveInt)
1948+
numSlaves.toInt, coresPerSlave.toInt, memoryPerSlaveInt, sc.conf)
19491949
val masterUrls = localCluster.start()
19501950
val backend = new SparkDeploySchedulerBackend(scheduler, sc, masterUrls)
19511951
scheduler.initialize(backend)

core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,11 @@ import org.apache.spark.util.Utils
3333
* fault recovery without spinning up a lot of processes.
3434
*/
3535
private[spark]
36-
class LocalSparkCluster(numWorkers: Int, coresPerWorker: Int, memoryPerWorker: Int)
36+
class LocalSparkCluster(
37+
numWorkers: Int,
38+
coresPerWorker: Int,
39+
memoryPerWorker: Int,
40+
conf: SparkConf)
3741
extends Logging {
3842

3943
private val localHostname = Utils.localHostName()
@@ -43,9 +47,11 @@ class LocalSparkCluster(numWorkers: Int, coresPerWorker: Int, memoryPerWorker: I
4347
def start(): Array[String] = {
4448
logInfo("Starting a local Spark cluster with " + numWorkers + " workers.")
4549

50+
// Disable REST server on Master in this mode unless otherwise specified
51+
val _conf = conf.clone().setIfMissing("spark.master.rest.enabled", "false")
52+
4653
/* Start the Master */
47-
val conf = new SparkConf(false)
48-
val (masterSystem, masterPort, _, _) = Master.startSystemAndActor(localHostname, 0, 0, conf)
54+
val (masterSystem, masterPort, _, _) = Master.startSystemAndActor(localHostname, 0, 0, _conf)
4955
masterActorSystems += masterSystem
5056
val masterUrl = "spark://" + localHostname + ":" + masterPort
5157
val masters = Array(masterUrl)

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -306,7 +306,6 @@ class SparkSubmitSuite extends FunSuite with Matchers with ResetSystemProperties
306306
"--master", "local-cluster[2,1,512]",
307307
"--jars", jarsString,
308308
"--conf", "spark.ui.enabled=false",
309-
"--conf", "spark.master.rest.enabled=false",
310309
unusedJar.toString)
311310
runSparkSubmit(args)
312311
}

core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,9 @@ class KryoSerializerDistributedSuite extends FunSuite {
2929

3030
test("kryo objects are serialised consistently in different processes") {
3131
val conf = new SparkConf(false)
32-
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
33-
conf.set("spark.kryo.registrator", classOf[AppJarRegistrator].getName)
34-
conf.set("spark.task.maxFailures", "1")
32+
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
33+
.set("spark.kryo.registrator", classOf[AppJarRegistrator].getName)
34+
.set("spark.task.maxFailures", "1")
3535

3636
val jar = TestUtils.createJarWithClasses(List(AppJarRegistrator.customClassName))
3737
conf.setJars(List(jar.getPath))

pom.xml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1127,7 +1127,6 @@
11271127
<spark.testing>1</spark.testing>
11281128
<spark.ui.enabled>false</spark.ui.enabled>
11291129
<spark.ui.showConsoleProgress>false</spark.ui.showConsoleProgress>
1130-
<spark.master.rest.enabled>false</spark.master.rest.enabled>
11311130
<spark.executor.extraClassPath>${test_classpath}</spark.executor.extraClassPath>
11321131
<spark.driver.allowMultipleContexts>true</spark.driver.allowMultipleContexts>
11331132
</systemProperties>

project/SparkBuild.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -380,7 +380,6 @@ object TestSettings {
380380
javaOptions in Test += "-Dspark.port.maxRetries=100",
381381
javaOptions in Test += "-Dspark.ui.enabled=false",
382382
javaOptions in Test += "-Dspark.ui.showConsoleProgress=false",
383-
javaOptions in Test += "-Dspark.master.rest.enabled=false",
384383
javaOptions in Test += "-Dspark.driver.allowMultipleContexts=true",
385384
javaOptions in Test += "-Dsun.io.serialization.extendedDebugInfo=true",
386385
javaOptions in Test ++= System.getProperties.filter(_._1 startsWith "spark")

0 commit comments

Comments
 (0)