@@ -90,19 +90,19 @@ import org.apache.spark.util.Utils
9090 val conf = new SparkConf ()
9191
9292 val SPARK_DEBUG_REPL : Boolean = (System .getenv(" SPARK_DEBUG_REPL" ) == " 1" )
93- /** Local directory to save .class files too */
94- val outputDir = {
95- val tmp = System .getProperty(" java.io.tmpdir" )
96- val rootDir = conf.get(" spark.repl.classdir" , tmp)
97- Utils .createTempDir(rootDir)
98- }
99- if (SPARK_DEBUG_REPL ) {
100- echo(" Output directory: " + outputDir)
101- }
93+ /** Local directory to save .class files too */
94+ lazy val outputDir = {
95+ val tmp = System .getProperty(" java.io.tmpdir" )
96+ val rootDir = conf.get(" spark.repl.classdir" , tmp)
97+ Utils .createTempDir(rootDir)
98+ }
99+ if (SPARK_DEBUG_REPL ) {
100+ echo(" Output directory: " + outputDir)
101+ }
102102
103103 val virtualDirectory = new PlainFile (outputDir) // "directory" for classfiles
104- val classServer = new HttpServer (outputDir,
105- new SecurityManager (conf)) /* * Jetty server that will serve our classes to worker nodes */
104+ /** Jetty server that will serve our classes to worker nodes */
105+ val classServer = new HttpServer (outputDir, new SecurityManager (conf))
106106 private var currentSettings : Settings = initialSettings
107107 var printResults = true // whether to print result lines
108108 var totalSilence = false // whether to print anything
@@ -112,12 +112,12 @@ import org.apache.spark.util.Utils
112112 private var _executionWrapper = " " // code to be wrapped around all lines
113113
114114
115- // Start the classServer and store its URI in a spark system property
115+ // Start the classServer and store its URI in a spark system property
116116 // (which will be passed to executors so that they can connect to it)
117- classServer.start()
118- if (SPARK_DEBUG_REPL ) {
119- echo(" Class server started, URI = " + classServer.uri)
120- }
117+ classServer.start()
118+ if (SPARK_DEBUG_REPL ) {
119+ echo(" Class server started, URI = " + classServer.uri)
120+ }
121121
122122 /** We're going to go to some trouble to initialize the compiler asynchronously.
123123 * It's critical that nothing call into it until it's been initialized or we will
@@ -138,7 +138,7 @@ import org.apache.spark.util.Utils
138138 if (isInitializeComplete) global.classPath.asURLs
139139 else new PathResolver (settings).result.asURLs // the compiler's classpath
140140 )
141- def settings = currentSettings
141+ def settings = currentSettings
142142 def mostRecentLine = prevRequestList match {
143143 case Nil => " "
144144 case req :: _ => req.originalLine
@@ -725,6 +725,17 @@ import org.apache.spark.util.Utils
725725 classServer.stop()
726726 }
727727
728+ /**
729+ * Captures the session names (which are set by system properties) once, instead of for each line.
730+ */
731+ object FixedSessionNames {
732+ val lineName = sessionNames.line
733+ val readName = sessionNames.read
734+ val evalName = sessionNames.eval
735+ val printName = sessionNames.print
736+ val resultName = sessionNames.result
737+ }
738+
728739 /** Here is where we:
729740 *
730741 * 1) Read some source code, and put it in the "read" object.
@@ -740,11 +751,11 @@ import org.apache.spark.util.Utils
740751 private var evalCaught : Option [Throwable ] = None
741752 private var conditionalWarnings : List [ConditionalWarning ] = Nil
742753
743- val packageName = sessionNames.line + lineId
744- val readName = sessionNames.read
745- val evalName = sessionNames.eval
746- val printName = sessionNames.print
747- val resultName = sessionNames.result
754+ val packageName = FixedSessionNames .lineName + lineId
755+ val readName = FixedSessionNames .readName
756+ val evalName = FixedSessionNames .evalName
757+ val printName = FixedSessionNames .printName
758+ val resultName = FixedSessionNames .resultName
748759
749760 def bindError (t : Throwable ) = {
750761 if (! bindExceptions) // avoid looping if already binding
0 commit comments