@@ -29,7 +29,9 @@ import scala.collection.mutable.{ArrayBuffer, HashMap}
29
29
import scala .io .Source
30
30
import scala .util .Try
31
31
32
+ import org .apache .spark .{SparkException , SparkUserAppException }
32
33
import org .apache .spark .deploy .SparkSubmitAction ._
34
+ import org .apache .spark .internal .Logging
33
35
import org .apache .spark .launcher .SparkSubmitArgumentsParser
34
36
import org .apache .spark .network .util .JavaUtils
35
37
import org .apache .spark .util .Utils
@@ -40,7 +42,7 @@ import org.apache.spark.util.Utils
40
42
* The env argument is used for testing.
41
43
*/
42
44
private [deploy] class SparkSubmitArguments (args : Seq [String ], env : Map [String , String ] = sys.env)
43
- extends SparkSubmitArgumentsParser {
45
+ extends SparkSubmitArgumentsParser with Logging {
44
46
var master : String = null
45
47
var deployMode : String = null
46
48
var executorMemory : String = null
@@ -85,8 +87,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
85
87
/** Default properties present in the currently defined defaults file. */
86
88
lazy val defaultSparkProperties : HashMap [String , String ] = {
87
89
val defaultProperties = new HashMap [String , String ]()
88
- // scalastyle:off println
89
- if (verbose) SparkSubmit .printStream.println(s " Using properties file: $propertiesFile" )
90
+ if (verbose) {
91
+ logInfo(s " Using properties file: $propertiesFile" )
92
+ }
90
93
Option (propertiesFile).foreach { filename =>
91
94
val properties = Utils .getPropertiesFromFile(filename)
92
95
properties.foreach { case (k, v) =>
@@ -95,21 +98,16 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
95
98
// Property files may contain sensitive information, so redact before printing
96
99
if (verbose) {
97
100
Utils .redact(properties).foreach { case (k, v) =>
98
- SparkSubmit .printStream.println (s " Adding default property: $k= $v" )
101
+ logInfo (s " Adding default property: $k= $v" )
99
102
}
100
103
}
101
104
}
102
- // scalastyle:on println
103
105
defaultProperties
104
106
}
105
107
106
108
// Set parameters from command line arguments
107
- try {
108
- parse(args.asJava)
109
- } catch {
110
- case e : IllegalArgumentException =>
111
- SparkSubmit .printErrorAndExit(e.getMessage())
112
- }
109
+ parse(args.asJava)
110
+
113
111
// Populate `sparkProperties` map from properties file
114
112
mergeDefaultSparkProperties()
115
113
// Remove keys that don't start with "spark." from `sparkProperties`.
@@ -141,7 +139,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
141
139
sparkProperties.foreach { case (k, v) =>
142
140
if (! k.startsWith(" spark." )) {
143
141
sparkProperties -= k
144
- SparkSubmit .printWarning (s " Ignoring non-spark config property: $k= $v" )
142
+ logWarning (s " Ignoring non-spark config property: $k= $v" )
145
143
}
146
144
}
147
145
}
@@ -215,10 +213,10 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
215
213
}
216
214
} catch {
217
215
case _ : Exception =>
218
- SparkSubmit .printErrorAndExit (s " Cannot load main class from JAR $primaryResource" )
216
+ error (s " Cannot load main class from JAR $primaryResource" )
219
217
}
220
218
case _ =>
221
- SparkSubmit .printErrorAndExit (
219
+ error (
222
220
s " Cannot load main class from JAR $primaryResource with URI $uriScheme. " +
223
221
" Please specify a class through --class." )
224
222
}
@@ -248,6 +246,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
248
246
case SUBMIT => validateSubmitArguments()
249
247
case KILL => validateKillArguments()
250
248
case REQUEST_STATUS => validateStatusRequestArguments()
249
+ case PRINT_VERSION =>
251
250
}
252
251
}
253
252
@@ -256,62 +255,61 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
256
255
printUsageAndExit(- 1 )
257
256
}
258
257
if (primaryResource == null ) {
259
- SparkSubmit .printErrorAndExit (" Must specify a primary resource (JAR or Python or R file)" )
258
+ error (" Must specify a primary resource (JAR or Python or R file)" )
260
259
}
261
260
if (mainClass == null && SparkSubmit .isUserJar(primaryResource)) {
262
- SparkSubmit .printErrorAndExit (" No main class set in JAR; please specify one with --class" )
261
+ error (" No main class set in JAR; please specify one with --class" )
263
262
}
264
263
if (driverMemory != null
265
264
&& Try (JavaUtils .byteStringAsBytes(driverMemory)).getOrElse(- 1L ) <= 0 ) {
266
- SparkSubmit .printErrorAndExit (" Driver Memory must be a positive number" )
265
+ error (" Driver memory must be a positive number" )
267
266
}
268
267
if (executorMemory != null
269
268
&& Try (JavaUtils .byteStringAsBytes(executorMemory)).getOrElse(- 1L ) <= 0 ) {
270
- SparkSubmit .printErrorAndExit (" Executor Memory cores must be a positive number" )
269
+ error (" Executor memory must be a positive number" )
271
270
}
272
271
if (executorCores != null && Try (executorCores.toInt).getOrElse(- 1 ) <= 0 ) {
273
- SparkSubmit .printErrorAndExit (" Executor cores must be a positive number" )
272
+ error (" Executor cores must be a positive number" )
274
273
}
275
274
if (totalExecutorCores != null && Try (totalExecutorCores.toInt).getOrElse(- 1 ) <= 0 ) {
276
- SparkSubmit .printErrorAndExit (" Total executor cores must be a positive number" )
275
+ error (" Total executor cores must be a positive number" )
277
276
}
278
277
if (numExecutors != null && Try (numExecutors.toInt).getOrElse(- 1 ) <= 0 ) {
279
- SparkSubmit .printErrorAndExit (" Number of executors must be a positive number" )
278
+ error (" Number of executors must be a positive number" )
280
279
}
281
280
if (pyFiles != null && ! isPython) {
282
- SparkSubmit .printErrorAndExit (" --py-files given but primary resource is not a Python script" )
281
+ error (" --py-files given but primary resource is not a Python script" )
283
282
}
284
283
285
284
if (master.startsWith(" yarn" )) {
286
285
val hasHadoopEnv = env.contains(" HADOOP_CONF_DIR" ) || env.contains(" YARN_CONF_DIR" )
287
286
if (! hasHadoopEnv && ! Utils .isTesting) {
288
- throw new Exception (s " When running with master ' $master' " +
287
+ error (s " When running with master ' $master' " +
289
288
" either HADOOP_CONF_DIR or YARN_CONF_DIR must be set in the environment." )
290
289
}
291
290
}
292
291
293
292
if (proxyUser != null && principal != null ) {
294
- SparkSubmit .printErrorAndExit (" Only one of --proxy-user or --principal can be provided." )
293
+ error (" Only one of --proxy-user or --principal can be provided." )
295
294
}
296
295
}
297
296
298
297
private def validateKillArguments (): Unit = {
299
298
if (! master.startsWith(" spark://" ) && ! master.startsWith(" mesos://" )) {
300
- SparkSubmit .printErrorAndExit(
301
- " Killing submissions is only supported in standalone or Mesos mode!" )
299
+ error(" Killing submissions is only supported in standalone or Mesos mode!" )
302
300
}
303
301
if (submissionToKill == null ) {
304
- SparkSubmit .printErrorAndExit (" Please specify a submission to kill." )
302
+ error (" Please specify a submission to kill." )
305
303
}
306
304
}
307
305
308
306
private def validateStatusRequestArguments (): Unit = {
309
307
if (! master.startsWith(" spark://" ) && ! master.startsWith(" mesos://" )) {
310
- SparkSubmit .printErrorAndExit (
308
+ error (
311
309
" Requesting submission statuses is only supported in standalone or Mesos mode!" )
312
310
}
313
311
if (submissionToRequestStatusFor == null ) {
314
- SparkSubmit .printErrorAndExit (" Please specify a submission to request status for." )
312
+ error (" Please specify a submission to request status for." )
315
313
}
316
314
}
317
315
@@ -368,7 +366,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
368
366
369
367
case DEPLOY_MODE =>
370
368
if (value != " client" && value != " cluster" ) {
371
- SparkSubmit .printErrorAndExit (" --deploy-mode must be either \" client\" or \" cluster\" " )
369
+ error (" --deploy-mode must be either \" client\" or \" cluster\" " )
372
370
}
373
371
deployMode = value
374
372
@@ -405,14 +403,14 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
405
403
case KILL_SUBMISSION =>
406
404
submissionToKill = value
407
405
if (action != null ) {
408
- SparkSubmit .printErrorAndExit (s " Action cannot be both $action and $KILL. " )
406
+ error (s " Action cannot be both $action and $KILL. " )
409
407
}
410
408
action = KILL
411
409
412
410
case STATUS =>
413
411
submissionToRequestStatusFor = value
414
412
if (action != null ) {
415
- SparkSubmit .printErrorAndExit (s " Action cannot be both $action and $REQUEST_STATUS. " )
413
+ error (s " Action cannot be both $action and $REQUEST_STATUS. " )
416
414
}
417
415
action = REQUEST_STATUS
418
416
@@ -444,7 +442,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
444
442
repositories = value
445
443
446
444
case CONF =>
447
- val (confName, confValue) = SparkSubmit .parseSparkConfProperty(value)
445
+ val (confName, confValue) = SparkSubmitUtils .parseSparkConfProperty(value)
448
446
sparkProperties(confName) = confValue
449
447
450
448
case PROXY_USER =>
@@ -463,15 +461,15 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
463
461
verbose = true
464
462
465
463
case VERSION =>
466
- SparkSubmit .printVersionAndExit()
464
+ action = SparkSubmitAction . PRINT_VERSION
467
465
468
466
case USAGE_ERROR =>
469
467
printUsageAndExit(1 )
470
468
471
469
case _ =>
472
- throw new IllegalArgumentException (s " Unexpected argument ' $opt'. " )
470
+ error (s " Unexpected argument ' $opt'. " )
473
471
}
474
- true
472
+ action != SparkSubmitAction . PRINT_VERSION
475
473
}
476
474
477
475
/**
@@ -482,7 +480,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
482
480
*/
483
481
override protected def handleUnknown (opt : String ): Boolean = {
484
482
if (opt.startsWith(" -" )) {
485
- SparkSubmit .printErrorAndExit (s " Unrecognized option ' $opt'. " )
483
+ error (s " Unrecognized option ' $opt'. " )
486
484
}
487
485
488
486
primaryResource =
@@ -501,20 +499,18 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
501
499
}
502
500
503
501
private def printUsageAndExit (exitCode : Int , unknownParam : Any = null ): Unit = {
504
- // scalastyle:off println
505
- val outStream = SparkSubmit .printStream
506
502
if (unknownParam != null ) {
507
- outStream.println (" Unknown/unsupported param " + unknownParam)
503
+ logInfo (" Unknown/unsupported param " + unknownParam)
508
504
}
509
505
val command = sys.env.get(" _SPARK_CMD_USAGE" ).getOrElse(
510
506
""" Usage: spark-submit [options] <app jar | python file | R file> [app arguments]
511
507
|Usage: spark-submit --kill [submission ID] --master [spark://...]
512
508
|Usage: spark-submit --status [submission ID] --master [spark://...]
513
509
|Usage: spark-submit run-example [options] example-class [example args]""" .stripMargin)
514
- outStream.println (command)
510
+ logInfo (command)
515
511
516
512
val mem_mb = Utils .DEFAULT_DRIVER_MEM_MB
517
- outStream.println (
513
+ logInfo (
518
514
s """
519
515
|Options:
520
516
| --master MASTER_URL spark://host:port, mesos://host:port, yarn,
@@ -596,12 +592,11 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
596
592
)
597
593
598
594
if (SparkSubmit .isSqlShell(mainClass)) {
599
- outStream.println (" CLI options:" )
600
- outStream.println (getSqlShellOptions())
595
+ logInfo (" CLI options:" )
596
+ logInfo (getSqlShellOptions())
601
597
}
602
- // scalastyle:on println
603
598
604
- SparkSubmit .exitFn (exitCode)
599
+ throw new SparkUserAppException (exitCode)
605
600
}
606
601
607
602
/**
@@ -655,4 +650,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
655
650
System .setErr(currentErr)
656
651
}
657
652
}
653
+
654
+ private def error (msg : String ): Unit = throw new SparkException (msg)
655
+
658
656
}
0 commit comments