Skip to content

Commit 43008a7

Browse files
author
Marcelo Vanzin
committed
Don't make builder extend SparkLauncher.
Instead, make SparkLauncher a thin layer on top of SparkSubmitCommandBuilder, and make all the builder code internal to the library.
1 parent b4d6912 commit 43008a7

File tree

10 files changed

+528
-500
lines changed

10 files changed

+528
-500
lines changed

core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package org.apache.spark.deploy.worker
1919

2020
import java.io.{File, FileOutputStream, InputStream, IOException}
2121
import java.lang.System._
22-
import java.util.{List => JList, Map => JMap}
2322

2423
import scala.collection.JavaConversions._
2524
import scala.collection.Map

core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package org.apache.spark.launcher
1919

2020
import java.io.File
21-
import java.util.{List => JList}
21+
import java.util.{HashMap => JHashMap, List => JList, Map => JMap}
2222

2323
import scala.collection.JavaConversions._
2424

@@ -29,11 +29,12 @@ import org.apache.spark.deploy.Command
2929
* needs to live in the same package as the rest of the library.
3030
*/
3131
private[spark] class WorkerCommandBuilder(sparkHome: String, memoryMb: Int, command: Command)
32-
extends SparkLauncher(command.environment) {
32+
extends AbstractCommandBuilder {
3333

34-
setSparkHome(sparkHome)
34+
childEnv.putAll(command.environment)
35+
childEnv.put(CommandBuilderUtils.ENV_SPARK_HOME, sparkHome)
3536

36-
def buildCommand(): JList[String] = {
37+
override def buildCommand(env: JMap[String, String]): JList[String] = {
3738
val cmd = buildJavaCommand(command.classPathEntries.mkString(File.pathSeparator))
3839
cmd.add(s"-Xms${memoryMb}M")
3940
cmd.add(s"-Xmx${memoryMb}M")
@@ -43,4 +44,6 @@ private[spark] class WorkerCommandBuilder(sparkHome: String, memoryMb: Int, comm
4344
cmd
4445
}
4546

47+
def buildCommand(): JList[String] = buildCommand(new JHashMap[String, String]())
48+
4649
}

0 commit comments

Comments
 (0)