@@ -304,8 +304,8 @@ private[spark] class SparkSubmit extends Logging {
304
304
// Resolve maven dependencies if there are any and add classpath to jars. Add them to py-files
305
305
// too for packages that include Python code
306
306
val resolvedMavenCoordinates = DependencyUtils .resolveMavenDependencies(
307
- packagesTransitive = true , args.packagesExclusions, args.packages,
308
- args.repositories, args.ivyRepoPath, args.ivySettingsPath)
307
+ packagesTransitive = true , Option ( args.packagesExclusions), Option ( args.packages) ,
308
+ Option ( args.repositories), Option ( args.ivyRepoPath) , args.ivySettingsPath)
309
309
310
310
if (resolvedMavenCoordinates.nonEmpty) {
311
311
// In K8s client mode, when in the driver, add resolved jars early as we might need
@@ -589,7 +589,7 @@ private[spark] class SparkSubmit extends Logging {
589
589
OptionAssigner (args.deployMode, ALL_CLUSTER_MGRS , ALL_DEPLOY_MODES ,
590
590
confKey = SUBMIT_DEPLOY_MODE .key),
591
591
OptionAssigner (args.name, ALL_CLUSTER_MGRS , ALL_DEPLOY_MODES , confKey = " spark.app.name" ),
592
- OptionAssigner (args.ivyRepoPath.orNull , ALL_CLUSTER_MGRS , CLIENT , confKey = " spark.jars.ivy" ),
592
+ OptionAssigner (args.ivyRepoPath, ALL_CLUSTER_MGRS , CLIENT , confKey = " spark.jars.ivy" ),
593
593
OptionAssigner (args.driverMemory, ALL_CLUSTER_MGRS , CLIENT ,
594
594
confKey = DRIVER_MEMORY .key),
595
595
OptionAssigner (args.driverExtraClassPath, ALL_CLUSTER_MGRS , ALL_DEPLOY_MODES ,
@@ -605,13 +605,13 @@ private[spark] class SparkSubmit extends Logging {
605
605
OptionAssigner (args.pyFiles, ALL_CLUSTER_MGRS , CLUSTER , confKey = SUBMIT_PYTHON_FILES .key),
606
606
607
607
// Propagate attributes for dependency resolution at the driver side
608
- OptionAssigner (args.packages.orNull , STANDALONE | MESOS | KUBERNETES ,
608
+ OptionAssigner (args.packages, STANDALONE | MESOS | KUBERNETES ,
609
609
CLUSTER , confKey = " spark.jars.packages" ),
610
- OptionAssigner (args.repositories.orNull , STANDALONE | MESOS | KUBERNETES ,
610
+ OptionAssigner (args.repositories, STANDALONE | MESOS | KUBERNETES ,
611
611
CLUSTER , confKey = " spark.jars.repositories" ),
612
- OptionAssigner (args.ivyRepoPath.orNull , STANDALONE | MESOS | KUBERNETES ,
612
+ OptionAssigner (args.ivyRepoPath, STANDALONE | MESOS | KUBERNETES ,
613
613
CLUSTER , confKey = " spark.jars.ivy" ),
614
- OptionAssigner (args.packagesExclusions.orNull , STANDALONE | MESOS | KUBERNETES ,
614
+ OptionAssigner (args.packagesExclusions, STANDALONE | MESOS | KUBERNETES ,
615
615
CLUSTER , confKey = " spark.jars.excludes" ),
616
616
617
617
// Yarn only
@@ -647,7 +647,7 @@ private[spark] class SparkSubmit extends Logging {
647
647
confKey = DRIVER_CORES .key),
648
648
OptionAssigner (args.supervise.toString, STANDALONE | MESOS , CLUSTER ,
649
649
confKey = DRIVER_SUPERVISE .key),
650
- OptionAssigner (args.ivyRepoPath.orNull , STANDALONE , CLUSTER , confKey = " spark.jars.ivy" ),
650
+ OptionAssigner (args.ivyRepoPath, STANDALONE , CLUSTER , confKey = " spark.jars.ivy" ),
651
651
652
652
// An internal option used only for spark-shell to add user jars to repl's classloader,
653
653
// previously it uses "spark.jars" or "spark.yarn.dist.jars" which now may be pointed to
0 commit comments