Skip to content

Commit 32d5c62

Browse files
vlyubinyhuai
authored andcommitted
Make various things public
## What changes were proposed in this pull request? This patch is originally authored by vlyubin. ## How was this patch tested? N/A Author: Volodymyr Lyubinets <vlyubin@gmail.com> Closes apache#45 from rxin/rxin-make-various-things-public.
1 parent 8afe345 commit 32d5c62

File tree

4 files changed

+12
-12
lines changed

4 files changed

+12
-12
lines changed

core/src/main/scala/org/apache/spark/api/r/RBackend.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ import org.apache.spark.internal.Logging
3535
/**
3636
* Netty-based backend server that is used to communicate between R and Java.
3737
*/
38-
private[spark] class RBackend {
38+
class RBackend {
3939

4040
private[this] var channelFuture: ChannelFuture = null
4141
private[this] var bootstrap: ServerBootstrap = null

core/src/main/scala/org/apache/spark/api/r/RRDD.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ private[r] object RRDD {
126126
sparkConf.setExecutorEnv(name.toString, value.toString)
127127
}
128128

129-
val jsc = new JavaSparkContext(sparkConf)
129+
val jsc = new JavaSparkContext(SparkContext.getOrCreate(sparkConf))
130130
jars.foreach { jar =>
131131
jsc.addJar(jar)
132132
}

repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -71,9 +71,9 @@ class SparkILoop(
7171
private var in: InteractiveReader = _ // the input stream from which commands come
7272

7373
// NOTE: Exposed in package for testing
74-
private[repl] var settings: Settings = _
74+
var settings: Settings = _
7575

76-
private[repl] var intp: SparkIMain = _
76+
var intp: SparkIMain = _
7777

7878
@deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp
7979
@deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: SparkIMain): Unit = intp = i
@@ -143,7 +143,7 @@ class SparkILoop(
143143
protected val originalClassLoader = Utils.getContextOrSparkClassLoader
144144

145145
// classpath entries added via :cp
146-
private var addedClasspath: String = ""
146+
var addedClasspath: String = ""
147147

148148
/** A reverse list of commands to replay if the user requests a :replay */
149149
private var replayCommandStack: List[String] = Nil
@@ -738,7 +738,7 @@ class SparkILoop(
738738
}
739739
}
740740

741-
private def addClasspath(arg: String): Unit = {
741+
def addClasspath(arg: String): Unit = {
742742
val f = File(arg).normalize
743743
if (f.exists) {
744744
addedClasspath = ClassPath.join(addedClasspath, f.path)

repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkIMain.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ import org.apache.spark.annotation.DeveloperApi
9797

9898
private val SPARK_DEBUG_REPL: Boolean = (System.getenv("SPARK_DEBUG_REPL") == "1")
9999
/** Local directory to save .class files too */
100-
private[repl] val outputDir = {
100+
lazy val outputDir = {
101101
val rootDir = conf.getOption("spark.repl.classdir").getOrElse(Utils.getLocalDir(conf))
102102
Utils.createTempDir(root = rootDir, namePrefix = "repl")
103103
}
@@ -206,7 +206,7 @@ import org.apache.spark.annotation.DeveloperApi
206206

207207
// argument is a thunk to execute after init is done
208208
// NOTE: Exposed to repl package since used by SparkILoop
209-
private[repl] def initialize(postInitSignal: => Unit) {
209+
def initialize(postInitSignal: => Unit) {
210210
synchronized {
211211
if (_isInitialized == null) {
212212
_isInitialized = io.spawn {
@@ -371,7 +371,7 @@ import org.apache.spark.annotation.DeveloperApi
371371
def clearExecutionWrapper() = _executionWrapper = ""
372372

373373
/** interpreter settings */
374-
private lazy val isettings = new SparkISettings(this)
374+
lazy val isettings = new SparkISettings(this)
375375

376376
/**
377377
* Instantiates a new compiler used by SparkIMain. Overridable to provide
@@ -477,7 +477,7 @@ import org.apache.spark.annotation.DeveloperApi
477477
}
478478

479479
// NOTE: Exposed to repl package since used by SparkILoop
480-
private[repl] def classLoader: AbstractFileClassLoader = {
480+
def classLoader: AbstractFileClassLoader = {
481481
ensureClassLoader()
482482
_classLoader
483483
}
@@ -504,11 +504,11 @@ import org.apache.spark.annotation.DeveloperApi
504504
_runtimeClassLoader
505505
})
506506

507-
private def getInterpreterClassLoader() = classLoader
507+
def getInterpreterClassLoader() = classLoader
508508

509509
// Set the current Java "context" class loader to this interpreter's class loader
510510
// NOTE: Exposed to repl package since used by SparkILoopInit
511-
private[repl] def setContextClassLoader() = classLoader.setAsContext()
511+
def setContextClassLoader() = classLoader.setAsContext()
512512

513513
/**
514514
* Returns the real name of a class based on its repl-defined name.

0 commit comments

Comments
 (0)