@@ -31,16 +31,17 @@ private[spark] class SparkDeploySchedulerBackend(
31
31
with AppClientListener
32
32
with Logging {
33
33
34
- var client : AppClient = null
35
- var stopping = false
36
- var shutdownCallback : (SparkDeploySchedulerBackend ) => Unit = _
37
- @ volatile var appId : String = _
34
+ private var client : AppClient = null
35
+ private var stopping = false
36
+ private val shutdownCallbackLock = new Object ()
37
+ private var shutdownCallback : (SparkDeploySchedulerBackend ) => Unit = _
38
+ @ volatile private var appId : String = _
38
39
39
- val registrationLock = new Object ()
40
- var registrationDone = false
40
+ private val registrationLock = new Object ()
41
+ private var registrationDone = false
41
42
42
- val maxCores = conf.getOption(" spark.cores.max" ).map(_.toInt)
43
- val totalExpectedCores = maxCores.getOrElse(0 )
43
+ private val maxCores = conf.getOption(" spark.cores.max" ).map(_.toInt)
44
+ private val totalExpectedCores = maxCores.getOrElse(0 )
44
45
45
46
override def start () {
46
47
super .start()
@@ -82,8 +83,11 @@ private[spark] class SparkDeploySchedulerBackend(
82
83
stopping = true
83
84
super .stop()
84
85
client.stop()
85
- if (shutdownCallback != null ) {
86
- shutdownCallback(this )
86
+
87
+ shutdownCallbackLock.synchronized {
88
+ if (shutdownCallback != null ) {
89
+ shutdownCallback(this )
90
+ }
87
91
}
88
92
}
89
93
@@ -135,6 +139,12 @@ private[spark] class SparkDeploySchedulerBackend(
135
139
super .applicationId
136
140
}
137
141
142
+ def setShutdownCallback (f : SparkDeploySchedulerBackend => Unit ) {
143
+ shutdownCallbackLock.synchronized {
144
+ shutdownCallback = f
145
+ }
146
+ }
147
+
138
148
private def waitForRegistration () = {
139
149
registrationLock.synchronized {
140
150
while (! registrationDone) {
0 commit comments