@@ -73,6 +73,13 @@ private[spark] object Utils extends Logging {
7373 */
7474 val SPARK_CONTEXT_SHUTDOWN_PRIORITY = 50
7575
76+ /**
77+ * The shutdown priority of temp directory must be lower than the SparkContext shutdown
78+ * priority. Otherwise cleaning the temp directories while Spark jobs are running can
79+ * throw undesirable errors at the time of shutdown.
80+ */
81+ val TEMP_DIR_SHUTDOWN_PRIORITY = 25
82+
7683 private val MAX_DIR_CREATION_ATTEMPTS : Int = 10
7784 @ volatile private var localRootDirs : Array [String ] = null
7885
@@ -189,10 +196,11 @@ private[spark] object Utils extends Logging {
189196 private val shutdownDeleteTachyonPaths = new scala.collection.mutable.HashSet [String ]()
190197
191198 // Add a shutdown hook to delete the temp dirs when the JVM exits
192- addShutdownHook { () =>
193- logDebug (" Shutdown hook called" )
199+ addShutdownHook( TEMP_DIR_SHUTDOWN_PRIORITY ) { () =>
200+ logInfo (" Shutdown hook called" )
194201 shutdownDeletePaths.foreach { dirPath =>
195202 try {
203+ logInfo(" Deleting directory " + dirPath)
196204 Utils .deleteRecursively(new File (dirPath))
197205 } catch {
198206 case e : Exception => logError(s " Exception while deleting Spark temp dir: $dirPath" , e)
@@ -202,6 +210,7 @@ private[spark] object Utils extends Logging {
202210
203211 // Register the path to be deleted via shutdown hook
204212 def registerShutdownDeleteDir (file : File ) {
213+ logInfo(" Registering shutdown hook for deleting dir " + file + " : " + Thread .currentThread().getStackTrace.toSeq.mkString(" \n\t " ))
205214 val absolutePath = file.getAbsolutePath()
206215 shutdownDeletePaths.synchronized {
207216 shutdownDeletePaths += absolutePath
0 commit comments