@@ -588,7 +588,7 @@ private[spark] class SparkSubmit extends Logging {
588
588
OptionAssigner (args.deployMode, ALL_CLUSTER_MGRS , ALL_DEPLOY_MODES ,
589
589
confKey = SUBMIT_DEPLOY_MODE .key),
590
590
OptionAssigner (args.name, ALL_CLUSTER_MGRS , ALL_DEPLOY_MODES , confKey = " spark.app.name" ),
591
- OptionAssigner (args.ivyRepoPath, ALL_CLUSTER_MGRS , CLIENT , confKey = " spark.jars.ivy" ),
591
+ OptionAssigner (args.ivyRepoPath.orNull , ALL_CLUSTER_MGRS , CLIENT , confKey = " spark.jars.ivy" ),
592
592
OptionAssigner (args.driverMemory, ALL_CLUSTER_MGRS , CLIENT ,
593
593
confKey = DRIVER_MEMORY .key),
594
594
OptionAssigner (args.driverExtraClassPath, ALL_CLUSTER_MGRS , ALL_DEPLOY_MODES ,
@@ -604,13 +604,13 @@ private[spark] class SparkSubmit extends Logging {
604
604
OptionAssigner (args.pyFiles, ALL_CLUSTER_MGRS , CLUSTER , confKey = SUBMIT_PYTHON_FILES .key),
605
605
606
606
// Propagate attributes for dependency resolution at the driver side
607
- OptionAssigner (args.packages, STANDALONE | MESOS | KUBERNETES ,
607
+ OptionAssigner (args.packages.orNull , STANDALONE | MESOS | KUBERNETES ,
608
608
CLUSTER , confKey = " spark.jars.packages" ),
609
- OptionAssigner (args.repositories, STANDALONE | MESOS | KUBERNETES ,
609
+ OptionAssigner (args.repositories.orNull , STANDALONE | MESOS | KUBERNETES ,
610
610
CLUSTER , confKey = " spark.jars.repositories" ),
611
- OptionAssigner (args.ivyRepoPath, STANDALONE | MESOS | KUBERNETES ,
611
+ OptionAssigner (args.ivyRepoPath.orNull , STANDALONE | MESOS | KUBERNETES ,
612
612
CLUSTER , confKey = " spark.jars.ivy" ),
613
- OptionAssigner (args.packagesExclusions, STANDALONE | MESOS | KUBERNETES ,
613
+ OptionAssigner (args.packagesExclusions.orNull , STANDALONE | MESOS | KUBERNETES ,
614
614
CLUSTER , confKey = " spark.jars.excludes" ),
615
615
616
616
// Yarn only
@@ -646,7 +646,7 @@ private[spark] class SparkSubmit extends Logging {
646
646
confKey = DRIVER_CORES .key),
647
647
OptionAssigner (args.supervise.toString, STANDALONE | MESOS , CLUSTER ,
648
648
confKey = DRIVER_SUPERVISE .key),
649
- OptionAssigner (args.ivyRepoPath, STANDALONE , CLUSTER , confKey = " spark.jars.ivy" ),
649
+ OptionAssigner (args.ivyRepoPath.orNull , STANDALONE , CLUSTER , confKey = " spark.jars.ivy" ),
650
650
651
651
// An internal option used only for spark-shell to add user jars to repl's classloader,
652
652
// previously it uses "spark.jars" or "spark.yarn.dist.jars" which now may be pointed to
@@ -1483,17 +1483,6 @@ private case class OptionAssigner(
1483
1483
confKey : String = null ,
1484
1484
mergeFn : Option [(String , String ) => String ] = None )
1485
1485
1486
- private object OptionAssigner {
1487
- def apply (
1488
- value : Option [String ],
1489
- clusterManager : Int ,
1490
- deployMode : Int ,
1491
- clOption : String = null ,
1492
- confKey : String = null ,
1493
- mergeFn : Option [(String , String ) => String ] = None ): OptionAssigner =
1494
- new OptionAssigner (value.get, clusterManager, deployMode, clOption, confKey, mergeFn)
1495
- }
1496
-
1497
1486
private [spark] trait SparkSubmitOperation {
1498
1487
1499
1488
def kill (submissionId : String , conf : SparkConf ): Unit
0 commit comments