Skip to content

Commit b566b66

Browse files
committed
SPARK-2645: Fix for SparkContext stop behavior
1 parent 0be142d commit b566b66

File tree

1 file changed

+18
-10
lines changed

1 file changed

+18
-10
lines changed

core/src/main/scala/org/apache/spark/SparkEnv.scala

Lines changed: 18 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -90,17 +90,25 @@ class SparkEnv (
9090
private var driverTmpDirToDelete: Option[String] = None
9191

9292
private[spark] def stop() {
93+
94+
if(isStopped) return
95+
9396
isStopped = true
94-
pythonWorkers.foreach { case(key, worker) => worker.stop() }
95-
Option(httpFileServer).foreach(_.stop())
96-
mapOutputTracker.stop()
97-
shuffleManager.stop()
98-
broadcastManager.stop()
99-
blockManager.stop()
100-
blockManager.master.stop()
101-
metricsSystem.stop()
102-
outputCommitCoordinator.stop()
103-
rpcEnv.shutdown()
97+
try {
98+
pythonWorkers.foreach { case (key, worker) => worker.stop()}
99+
Option(httpFileServer).foreach(_.stop())
100+
mapOutputTracker.stop()
101+
shuffleManager.stop()
102+
broadcastManager.stop()
103+
blockManager.stop()
104+
blockManager.master.stop()
105+
metricsSystem.stop()
106+
outputCommitCoordinator.stop()
107+
rpcEnv.shutdown()
108+
} catch {
109+
case e: Exception =>
110+
logInfo("Exception while SparkEnv stop", e)
111+
}
104112

105113
// Unfortunately Akka's awaitTermination doesn't actually wait for the Netty server to shut
106114
// down, but let's call it anyway in case it gets fixed in a later release

0 commit comments

Comments
 (0)