Skip to content

Commit 7da53f2

Browse files
committed
Update SparkSubmit.scala
1 parent 23d4451 commit 7da53f2

File tree

1 file changed

+6
-17
lines changed

1 file changed

+6
-17
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 6 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -588,7 +588,7 @@ private[spark] class SparkSubmit extends Logging {
588588
OptionAssigner(args.deployMode, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES,
589589
confKey = SUBMIT_DEPLOY_MODE.key),
590590
OptionAssigner(args.name, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES, confKey = "spark.app.name"),
591-
OptionAssigner(args.ivyRepoPath, ALL_CLUSTER_MGRS, CLIENT, confKey = "spark.jars.ivy"),
591+
OptionAssigner(args.ivyRepoPath.orNull, ALL_CLUSTER_MGRS, CLIENT, confKey = "spark.jars.ivy"),
592592
OptionAssigner(args.driverMemory, ALL_CLUSTER_MGRS, CLIENT,
593593
confKey = DRIVER_MEMORY.key),
594594
OptionAssigner(args.driverExtraClassPath, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES,
@@ -604,13 +604,13 @@ private[spark] class SparkSubmit extends Logging {
604604
OptionAssigner(args.pyFiles, ALL_CLUSTER_MGRS, CLUSTER, confKey = SUBMIT_PYTHON_FILES.key),
605605

606606
// Propagate attributes for dependency resolution at the driver side
607-
OptionAssigner(args.packages, STANDALONE | MESOS | KUBERNETES,
607+
OptionAssigner(args.packages.orNull, STANDALONE | MESOS | KUBERNETES,
608608
CLUSTER, confKey = "spark.jars.packages"),
609-
OptionAssigner(args.repositories, STANDALONE | MESOS | KUBERNETES,
609+
OptionAssigner(args.repositories.orNull, STANDALONE | MESOS | KUBERNETES,
610610
CLUSTER, confKey = "spark.jars.repositories"),
611-
OptionAssigner(args.ivyRepoPath, STANDALONE | MESOS | KUBERNETES,
611+
OptionAssigner(args.ivyRepoPath.orNull, STANDALONE | MESOS | KUBERNETES,
612612
CLUSTER, confKey = "spark.jars.ivy"),
613-
OptionAssigner(args.packagesExclusions, STANDALONE | MESOS | KUBERNETES,
613+
OptionAssigner(args.packagesExclusions.orNull, STANDALONE | MESOS | KUBERNETES,
614614
CLUSTER, confKey = "spark.jars.excludes"),
615615

616616
// Yarn only
@@ -646,7 +646,7 @@ private[spark] class SparkSubmit extends Logging {
646646
confKey = DRIVER_CORES.key),
647647
OptionAssigner(args.supervise.toString, STANDALONE | MESOS, CLUSTER,
648648
confKey = DRIVER_SUPERVISE.key),
649-
OptionAssigner(args.ivyRepoPath, STANDALONE, CLUSTER, confKey = "spark.jars.ivy"),
649+
OptionAssigner(args.ivyRepoPath.orNull, STANDALONE, CLUSTER, confKey = "spark.jars.ivy"),
650650

651651
// An internal option used only for spark-shell to add user jars to repl's classloader,
652652
// previously it uses "spark.jars" or "spark.yarn.dist.jars" which now may be pointed to
@@ -1483,17 +1483,6 @@ private case class OptionAssigner(
14831483
confKey: String = null,
14841484
mergeFn: Option[(String, String) => String] = None)
14851485

1486-
private object OptionAssigner {
1487-
def apply(
1488-
value: Option[String],
1489-
clusterManager: Int,
1490-
deployMode: Int,
1491-
clOption: String = null,
1492-
confKey: String = null,
1493-
mergeFn: Option[(String, String) => String] = None): OptionAssigner =
1494-
new OptionAssigner(value.get, clusterManager, deployMode, clOption, confKey, mergeFn)
1495-
}
1496-
14971486
private[spark] trait SparkSubmitOperation {
14981487

14991488
def kill(submissionId: String, conf: SparkConf): Unit

0 commit comments

Comments
 (0)