Skip to content

Commit 934f237

Browse files
committed
Avoid potential deadlock with a user app's shutdown hook thread by more narrowly synchronizing access to 'hooks'
1 parent 26c3581 commit 934f237

File tree

1 file changed

+9
-8
lines changed

1 file changed

+9
-8
lines changed

core/src/main/scala/org/apache/spark/util/ShutdownHookManager.scala

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -206,7 +206,7 @@ private[spark] object ShutdownHookManager extends Logging {
206206
private [util] class SparkShutdownHookManager {
207207

208208
private val hooks = new PriorityQueue[SparkShutdownHook]()
209-
private var shuttingDown = false
209+
@volatile private var shuttingDown = false
210210

211211
/**
212212
* Install a hook to run at shutdown and run all registered hooks in order. Hadoop 1.x does not
@@ -232,22 +232,23 @@ private [util] class SparkShutdownHookManager {
232232
}
233233
}
234234

235-
def runAll(): Unit = synchronized {
235+
def runAll(): Unit = {
236236
shuttingDown = true
237-
while (!hooks.isEmpty()) {
238-
Try(Utils.logUncaughtExceptions(hooks.poll().run()))
237+
var nextHook: SparkShutdownHook = null
238+
while ({nextHook = hooks synchronized { hooks.poll() }; nextHook != null}) {
239+
Try(Utils.logUncaughtExceptions(nextHook.run()))
239240
}
240241
}
241242

242-
def add(priority: Int, hook: () => Unit): AnyRef = synchronized {
243+
def add(priority: Int, hook: () => Unit): AnyRef = {
243244
checkState()
244245
val hookRef = new SparkShutdownHook(priority, hook)
245-
hooks.add(hookRef)
246+
hooks synchronized { hooks.add(hookRef) }
246247
hookRef
247248
}
248249

249-
def remove(ref: AnyRef): Boolean = synchronized {
250-
hooks.remove(ref)
250+
def remove(ref: AnyRef): Boolean = {
251+
hooks synchronized { hooks.remove(ref) }
251252
}
252253

253254
private def checkState(): Unit = {

0 commit comments

Comments
 (0)