Skip to content

Commit 1514d0b

Browse files
committed
Fixed shutdown hook priorities
1 parent 852f4de commit 1514d0b

File tree

2 files changed

+13
-4
lines changed

2 files changed

+13
-4
lines changed

core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -139,8 +139,8 @@ private[spark] class DiskBlockManager(blockManager: BlockManager, conf: SparkCon
139139
}
140140

141141
private def addShutdownHook(): AnyRef = {
142-
Utils.addShutdownHook { () =>
143-
logDebug("Shutdown hook called")
142+
Utils.addShutdownHook(Utils.TEMP_DIR_SHUTDOWN_PRIORITY + 1) { () =>
143+
logInfo("Shutdown hook called")
144144
DiskBlockManager.this.doStop()
145145
}
146146
}

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,13 @@ private[spark] object Utils extends Logging {
7373
*/
7474
val SPARK_CONTEXT_SHUTDOWN_PRIORITY = 50
7575

76+
/**
77+
* The shutdown priority of temp directory must be lower than the SparkContext shutdown
78+
* priority. Otherwise cleaning the temp directories while Spark jobs are running can
79+
* throw undesirable errors at the time of shutdown.
80+
*/
81+
val TEMP_DIR_SHUTDOWN_PRIORITY = 25
82+
7683
private val MAX_DIR_CREATION_ATTEMPTS: Int = 10
7784
@volatile private var localRootDirs: Array[String] = null
7885

@@ -189,10 +196,11 @@ private[spark] object Utils extends Logging {
189196
private val shutdownDeleteTachyonPaths = new scala.collection.mutable.HashSet[String]()
190197

191198
// Add a shutdown hook to delete the temp dirs when the JVM exits
192-
addShutdownHook { () =>
193-
logDebug("Shutdown hook called")
199+
addShutdownHook(TEMP_DIR_SHUTDOWN_PRIORITY) { () =>
200+
logInfo("Shutdown hook called")
194201
shutdownDeletePaths.foreach { dirPath =>
195202
try {
203+
logInfo("Deleting directory " + dirPath)
196204
Utils.deleteRecursively(new File(dirPath))
197205
} catch {
198206
case e: Exception => logError(s"Exception while deleting Spark temp dir: $dirPath", e)
@@ -202,6 +210,7 @@ private[spark] object Utils extends Logging {
202210

203211
// Register the path to be deleted via shutdown hook
204212
def registerShutdownDeleteDir(file: File) {
213+
logInfo("Registering shutdown hook for deleting dir " + file + ": " + Thread.currentThread().getStackTrace.toSeq.mkString("\n\t"))
205214
val absolutePath = file.getAbsolutePath()
206215
shutdownDeletePaths.synchronized {
207216
shutdownDeletePaths += absolutePath

0 commit comments

Comments
 (0)