@@ -29,7 +29,9 @@ import scala.collection.mutable.{ArrayBuffer, HashMap}
29
29
import scala .io .Source
30
30
import scala .util .Try
31
31
32
+ import org .apache .spark .{SparkException , SparkUserAppException }
32
33
import org .apache .spark .deploy .SparkSubmitAction ._
34
+ import org .apache .spark .internal .Logging
33
35
import org .apache .spark .launcher .SparkSubmitArgumentsParser
34
36
import org .apache .spark .network .util .JavaUtils
35
37
import org .apache .spark .util .Utils
@@ -40,7 +42,7 @@ import org.apache.spark.util.Utils
40
42
* The env argument is used for testing.
41
43
*/
42
44
private [deploy] class SparkSubmitArguments (args : Seq [String ], env : Map [String , String ] = sys.env)
43
- extends SparkSubmitArgumentsParser {
45
+ extends SparkSubmitArgumentsParser with Logging {
44
46
var master : String = null
45
47
var deployMode : String = null
46
48
var executorMemory : String = null
@@ -84,8 +86,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
84
86
/** Default properties present in the currently defined defaults file. */
85
87
lazy val defaultSparkProperties : HashMap [String , String ] = {
86
88
val defaultProperties = new HashMap [String , String ]()
87
- // scalastyle:off println
88
- if (verbose) SparkSubmit .printStream.println(s " Using properties file: $propertiesFile" )
89
+ if (verbose) {
90
+ logInfo(s " Using properties file: $propertiesFile" )
91
+ }
89
92
Option (propertiesFile).foreach { filename =>
90
93
val properties = Utils .getPropertiesFromFile(filename)
91
94
properties.foreach { case (k, v) =>
@@ -94,21 +97,16 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
94
97
// Property files may contain sensitive information, so redact before printing
95
98
if (verbose) {
96
99
Utils .redact(properties).foreach { case (k, v) =>
97
- SparkSubmit .printStream.println (s " Adding default property: $k= $v" )
100
+ logInfo (s " Adding default property: $k= $v" )
98
101
}
99
102
}
100
103
}
101
- // scalastyle:on println
102
104
defaultProperties
103
105
}
104
106
105
107
// Set parameters from command line arguments
106
- try {
107
- parse(args.asJava)
108
- } catch {
109
- case e : IllegalArgumentException =>
110
- SparkSubmit .printErrorAndExit(e.getMessage())
111
- }
108
+ parse(args.asJava)
109
+
112
110
// Populate `sparkProperties` map from properties file
113
111
mergeDefaultSparkProperties()
114
112
// Remove keys that don't start with "spark." from `sparkProperties`.
@@ -140,7 +138,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
140
138
sparkProperties.foreach { case (k, v) =>
141
139
if (! k.startsWith(" spark." )) {
142
140
sparkProperties -= k
143
- SparkSubmit .printWarning (s " Ignoring non-spark config property: $k= $v" )
141
+ logWarning (s " Ignoring non-spark config property: $k= $v" )
144
142
}
145
143
}
146
144
}
@@ -213,10 +211,10 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
213
211
}
214
212
} catch {
215
213
case _ : Exception =>
216
- SparkSubmit .printErrorAndExit (s " Cannot load main class from JAR $primaryResource" )
214
+ error (s " Cannot load main class from JAR $primaryResource" )
217
215
}
218
216
case _ =>
219
- SparkSubmit .printErrorAndExit (
217
+ error (
220
218
s " Cannot load main class from JAR $primaryResource with URI $uriScheme. " +
221
219
" Please specify a class through --class." )
222
220
}
@@ -246,6 +244,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
246
244
case SUBMIT => validateSubmitArguments()
247
245
case KILL => validateKillArguments()
248
246
case REQUEST_STATUS => validateStatusRequestArguments()
247
+ case PRINT_VERSION =>
249
248
}
250
249
}
251
250
@@ -254,30 +253,30 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
254
253
printUsageAndExit(- 1 )
255
254
}
256
255
if (primaryResource == null ) {
257
- SparkSubmit .printErrorAndExit (" Must specify a primary resource (JAR or Python or R file)" )
256
+ error (" Must specify a primary resource (JAR or Python or R file)" )
258
257
}
259
258
if (mainClass == null && SparkSubmit .isUserJar(primaryResource)) {
260
- SparkSubmit .printErrorAndExit (" No main class set in JAR; please specify one with --class" )
259
+ error (" No main class set in JAR; please specify one with --class" )
261
260
}
262
261
if (driverMemory != null
263
262
&& Try (JavaUtils .byteStringAsBytes(driverMemory)).getOrElse(- 1L ) <= 0 ) {
264
- SparkSubmit .printErrorAndExit (" Driver Memory must be a positive number" )
263
+ error (" Driver Memory must be a positive number" )
265
264
}
266
265
if (executorMemory != null
267
266
&& Try (JavaUtils .byteStringAsBytes(executorMemory)).getOrElse(- 1L ) <= 0 ) {
268
- SparkSubmit .printErrorAndExit (" Executor Memory cores must be a positive number" )
267
+ error (" Executor Memory cores must be a positive number" )
269
268
}
270
269
if (executorCores != null && Try (executorCores.toInt).getOrElse(- 1 ) <= 0 ) {
271
- SparkSubmit .printErrorAndExit (" Executor cores must be a positive number" )
270
+ error (" Executor cores must be a positive number" )
272
271
}
273
272
if (totalExecutorCores != null && Try (totalExecutorCores.toInt).getOrElse(- 1 ) <= 0 ) {
274
- SparkSubmit .printErrorAndExit (" Total executor cores must be a positive number" )
273
+ error (" Total executor cores must be a positive number" )
275
274
}
276
275
if (numExecutors != null && Try (numExecutors.toInt).getOrElse(- 1 ) <= 0 ) {
277
- SparkSubmit .printErrorAndExit (" Number of executors must be a positive number" )
276
+ error (" Number of executors must be a positive number" )
278
277
}
279
278
if (pyFiles != null && ! isPython) {
280
- SparkSubmit .printErrorAndExit (" --py-files given but primary resource is not a Python script" )
279
+ error (" --py-files given but primary resource is not a Python script" )
281
280
}
282
281
283
282
if (master.startsWith(" yarn" )) {
@@ -289,27 +288,26 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
289
288
}
290
289
291
290
if (proxyUser != null && principal != null ) {
292
- SparkSubmit .printErrorAndExit (" Only one of --proxy-user or --principal can be provided." )
291
+ error (" Only one of --proxy-user or --principal can be provided." )
293
292
}
294
293
}
295
294
296
295
private def validateKillArguments (): Unit = {
297
296
if (! master.startsWith(" spark://" ) && ! master.startsWith(" mesos://" )) {
298
- SparkSubmit .printErrorAndExit(
299
- " Killing submissions is only supported in standalone or Mesos mode!" )
297
+ error(" Killing submissions is only supported in standalone or Mesos mode!" )
300
298
}
301
299
if (submissionToKill == null ) {
302
- SparkSubmit .printErrorAndExit (" Please specify a submission to kill." )
300
+ error (" Please specify a submission to kill." )
303
301
}
304
302
}
305
303
306
304
private def validateStatusRequestArguments (): Unit = {
307
305
if (! master.startsWith(" spark://" ) && ! master.startsWith(" mesos://" )) {
308
- SparkSubmit .printErrorAndExit (
306
+ error (
309
307
" Requesting submission statuses is only supported in standalone or Mesos mode!" )
310
308
}
311
309
if (submissionToRequestStatusFor == null ) {
312
- SparkSubmit .printErrorAndExit (" Please specify a submission to request status for." )
310
+ error (" Please specify a submission to request status for." )
313
311
}
314
312
}
315
313
@@ -366,7 +364,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
366
364
367
365
case DEPLOY_MODE =>
368
366
if (value != " client" && value != " cluster" ) {
369
- SparkSubmit .printErrorAndExit (" --deploy-mode must be either \" client\" or \" cluster\" " )
367
+ error (" --deploy-mode must be either \" client\" or \" cluster\" " )
370
368
}
371
369
deployMode = value
372
370
@@ -403,14 +401,14 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
403
401
case KILL_SUBMISSION =>
404
402
submissionToKill = value
405
403
if (action != null ) {
406
- SparkSubmit .printErrorAndExit (s " Action cannot be both $action and $KILL. " )
404
+ error (s " Action cannot be both $action and $KILL. " )
407
405
}
408
406
action = KILL
409
407
410
408
case STATUS =>
411
409
submissionToRequestStatusFor = value
412
410
if (action != null ) {
413
- SparkSubmit .printErrorAndExit (s " Action cannot be both $action and $REQUEST_STATUS. " )
411
+ error (s " Action cannot be both $action and $REQUEST_STATUS. " )
414
412
}
415
413
action = REQUEST_STATUS
416
414
@@ -442,7 +440,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
442
440
repositories = value
443
441
444
442
case CONF =>
445
- val (confName, confValue) = SparkSubmit .parseSparkConfProperty(value)
443
+ val (confName, confValue) = SparkSubmitUtils .parseSparkConfProperty(value)
446
444
sparkProperties(confName) = confValue
447
445
448
446
case PROXY_USER =>
@@ -461,15 +459,15 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
461
459
verbose = true
462
460
463
461
case VERSION =>
464
- SparkSubmit .printVersionAndExit()
462
+ action = SparkSubmitAction . PRINT_VERSION
465
463
466
464
case USAGE_ERROR =>
467
465
printUsageAndExit(1 )
468
466
469
467
case _ =>
470
- throw new IllegalArgumentException (s " Unexpected argument ' $opt'. " )
468
+ error (s " Unexpected argument ' $opt'. " )
471
469
}
472
- true
470
+ action != SparkSubmitAction . PRINT_VERSION
473
471
}
474
472
475
473
/**
@@ -480,7 +478,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
480
478
*/
481
479
override protected def handleUnknown (opt : String ): Boolean = {
482
480
if (opt.startsWith(" -" )) {
483
- SparkSubmit .printErrorAndExit (s " Unrecognized option ' $opt'. " )
481
+ error (s " Unrecognized option ' $opt'. " )
484
482
}
485
483
486
484
primaryResource =
@@ -499,20 +497,18 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
499
497
}
500
498
501
499
private def printUsageAndExit (exitCode : Int , unknownParam : Any = null ): Unit = {
502
- // scalastyle:off println
503
- val outStream = SparkSubmit .printStream
504
500
if (unknownParam != null ) {
505
- outStream.println (" Unknown/unsupported param " + unknownParam)
501
+ logInfo (" Unknown/unsupported param " + unknownParam)
506
502
}
507
503
val command = sys.env.get(" _SPARK_CMD_USAGE" ).getOrElse(
508
504
""" Usage: spark-submit [options] <app jar | python file | R file> [app arguments]
509
505
|Usage: spark-submit --kill [submission ID] --master [spark://...]
510
506
|Usage: spark-submit --status [submission ID] --master [spark://...]
511
507
|Usage: spark-submit run-example [options] example-class [example args]""" .stripMargin)
512
- outStream.println (command)
508
+ logInfo (command)
513
509
514
510
val mem_mb = Utils .DEFAULT_DRIVER_MEM_MB
515
- outStream.println (
511
+ logInfo (
516
512
s """
517
513
|Options:
518
514
| --master MASTER_URL spark://host:port, mesos://host:port, yarn,
@@ -594,12 +590,11 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
594
590
)
595
591
596
592
if (SparkSubmit .isSqlShell(mainClass)) {
597
- outStream.println (" CLI options:" )
598
- outStream.println (getSqlShellOptions())
593
+ logInfo (" CLI options:" )
594
+ logInfo (getSqlShellOptions())
599
595
}
600
- // scalastyle:on println
601
596
602
- SparkSubmit .exitFn (exitCode)
597
+ throw new SparkUserAppException (exitCode)
603
598
}
604
599
605
600
/**
@@ -653,4 +648,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
653
648
System .setErr(currentErr)
654
649
}
655
650
}
651
+
652
+ private def error (msg : String ): Unit = throw new SparkException (msg)
653
+
656
654
}
0 commit comments