@@ -23,6 +23,7 @@ import java.util.jar.JarFile
23
23
import scala.collection.mutable.{ArrayBuffer, HashMap}
24
24
25
25
import org.apache.spark.util.Utils
26
+ import org.apache.spark.deploy.Action.Action
26
27
27
28
/**
28
29
* Parses and encapsulates arguments from the spark-submit script.
@@ -39,8 +40,6 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
39
40
var driverExtraClassPath: String = null
40
41
var driverExtraLibraryPath: String = null
41
42
var driverExtraJavaOptions: String = null
42
- var driverCores: String = null
43
- var supervise: Boolean = false
44
43
var queue: String = null
45
44
var numExecutors: String = null
46
45
var files: String = null
@@ -55,6 +54,23 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
55
54
var pyFiles: String = null
56
55
val sparkProperties: HashMap[String, String] = new HashMap[String, String]()
57
56
57
+ // Standalone cluster mode only
58
+ var supervise: Boolean = false
59
+ var driverCores: String = null
60
+ var driverToKill: String = null
61
+ var driverToRequestStatusFor: String = null
62
+
63
+ def action: Action = {
64
+ (driverToKill, driverToRequestStatusFor) match {
65
+ case (null, null) => Action.SUBMIT
66
+ case (_, null) => Action.KILL
67
+ case (null, _) => Action.REQUEST_STATUS
68
+ case _ => SparkSubmit.printErrorAndExit(
69
+ "Requested to both kill and request status for a driver. Choose only one.")
70
+ null // never reached
71
+ }
72
+ }
73
+
58
74
/** Default properties present in the currently defined defaults file. */
59
75
lazy val defaultSparkProperties: HashMap[String, String] = {
60
76
val defaultProperties = new HashMap[String, String]()
@@ -79,7 +95,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
79
95
// Use `sparkProperties` map along with env vars to fill in any missing parameters
80
96
loadEnvironmentArguments()
81
97
82
- checkRequiredArguments ()
98
+ validateArguments ()
83
99
84
100
/**
85
101
* Merge values from the default properties file with those specified through --conf.
@@ -171,7 +187,15 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
171
187
}
172
188
173
189
/** Ensure that required fields exists. Call this only once all defaults are loaded. */
174
- private def checkRequiredArguments(): Unit = {
190
+ private def validateArguments(): Unit = {
191
+ action match {
192
+ case Action.SUBMIT => validateSubmitArguments()
193
+ case Action.KILL => validateKillArguments()
194
+ case Action.REQUEST_STATUS => validateStatusRequestArguments()
195
+ }
196
+ }
197
+
198
+ private def validateSubmitArguments(): Unit = {
175
199
if (args.length == 0) {
176
200
printUsageAndExit(-1)
177
201
}
@@ -206,6 +230,25 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
206
230
}
207
231
}
208
232
233
+ private def validateKillArguments(): Unit = {
234
+ if (!master.startsWith("spark://") || deployMode != "cluster") {
235
+ SparkSubmit.printErrorAndExit("Killing drivers is only supported in standalone cluster mode")
236
+ }
237
+ if (driverToKill == null) {
238
+ SparkSubmit.printErrorAndExit("Please specify a driver to kill")
239
+ }
240
+ }
241
+
242
+ private def validateStatusRequestArguments(): Unit = {
243
+ if (!master.startsWith("spark://") || deployMode != "cluster") {
244
+ SparkSubmit.printErrorAndExit(
245
+ "Requesting driver statuses is only supported in standalone cluster mode")
246
+ }
247
+ if (driverToRequestStatusFor == null) {
248
+ SparkSubmit.printErrorAndExit("Please specify a driver to request status for")
249
+ }
250
+ }
251
+
209
252
override def toString = {
210
253
s"""Parsed arguments:
211
254
| master $master
@@ -312,6 +355,14 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
312
355
propertiesFile = value
313
356
parse(tail)
314
357
358
+ case ("--kill") :: value :: tail =>
359
+ driverToKill = value
360
+ parse(tail)
361
+
362
+ case ("--status") :: value :: tail =>
363
+ driverToRequestStatusFor = value
364
+ parse(tail)
365
+
315
366
case ("--supervise") :: tail =>
316
367
supervise = true
317
368
parse(tail)
@@ -410,6 +461,8 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
410
461
| Spark standalone with cluster deploy mode only:
411
462
| --driver-cores NUM Cores for driver (Default: 1).
412
463
| --supervise If given, restarts the driver on failure.
464
+ | --kill DRIVER_ID If given, kills the driver specified.
465
+ | --status DRIVER_ID If given, requests the status of the driver specified.
413
466
|
414
467
| Spark standalone and Mesos only:
415
468
| --total-executor-cores NUM Total cores for all executors.
0 commit comments