Skip to content

[SPARK-3410] The priority of shutdownhook for ApplicationMaster should not be integer literal #2283

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 8 commits into from
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,8 @@ import java.io.IOException
import java.net.Socket
import java.util.concurrent.atomic.AtomicReference

import scala.collection.JavaConversions._
import scala.util.Try

import akka.actor._
import akka.remote._
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.util.ShutdownHookManager
import org.apache.hadoop.yarn.api._
Expand Down Expand Up @@ -107,8 +103,11 @@ private[spark] class ApplicationMaster(args: ApplicationMasterArguments,
}
}
}
// Use priority 30 as it's higher than HDFS. It's the same priority MapReduce is using.
ShutdownHookManager.get().addShutdownHook(cleanupHook, 30)

// Use higher priority than FileSystem.
assert(ApplicationMaster.SHUTDOWN_HOOK_PRIORITY > FileSystem.SHUTDOWN_HOOK_PRIORITY)
ShutdownHookManager
.get().addShutdownHook(cleanupHook, ApplicationMaster.SHUTDOWN_HOOK_PRIORITY)

// Call this to force generation of secret so it gets populated into the
// Hadoop UGI. This has to happen before the startUserClass which does a
Expand Down Expand Up @@ -407,6 +406,8 @@ private[spark] class ApplicationMaster(args: ApplicationMasterArguments,

object ApplicationMaster extends Logging {

val SHUTDOWN_HOOK_PRIORITY: Int = 30

private var master: ApplicationMaster = _

def main(args: Array[String]) = {
Expand Down