Skip to content

Commit

Permalink
Make various things public
Browse files Browse the repository at this point in the history
## What changes were proposed in this pull request?
This patch is originally authored by vlyubin.

## How was this patch tested?
N/A

Author: Volodymyr Lyubinets <vlyubin@gmail.com>

Closes apache#45 from rxin/rxin-make-various-things-public.
  • Loading branch information
vlyubin authored and yhuai committed Aug 9, 2016
1 parent 8afe345 commit 32d5c62
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 12 deletions.
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/api/r/RBackend.scala
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ import org.apache.spark.internal.Logging
/**
* Netty-based backend server that is used to communicate between R and Java.
*/
private[spark] class RBackend {
class RBackend {

private[this] var channelFuture: ChannelFuture = null
private[this] var bootstrap: ServerBootstrap = null
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/api/r/RRDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ private[r] object RRDD {
sparkConf.setExecutorEnv(name.toString, value.toString)
}

val jsc = new JavaSparkContext(sparkConf)
val jsc = new JavaSparkContext(SparkContext.getOrCreate(sparkConf))
jars.foreach { jar =>
jsc.addJar(jar)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,9 +71,9 @@ class SparkILoop(
private var in: InteractiveReader = _ // the input stream from which commands come

// NOTE: Exposed in package for testing
private[repl] var settings: Settings = _
var settings: Settings = _

private[repl] var intp: SparkIMain = _
var intp: SparkIMain = _

@deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp
@deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: SparkIMain): Unit = intp = i
Expand Down Expand Up @@ -143,7 +143,7 @@ class SparkILoop(
protected val originalClassLoader = Utils.getContextOrSparkClassLoader

// classpath entries added via :cp
private var addedClasspath: String = ""
var addedClasspath: String = ""

/** A reverse list of commands to replay if the user requests a :replay */
private var replayCommandStack: List[String] = Nil
Expand Down Expand Up @@ -738,7 +738,7 @@ class SparkILoop(
}
}

private def addClasspath(arg: String): Unit = {
def addClasspath(arg: String): Unit = {
val f = File(arg).normalize
if (f.exists) {
addedClasspath = ClassPath.join(addedClasspath, f.path)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ import org.apache.spark.annotation.DeveloperApi

private val SPARK_DEBUG_REPL: Boolean = (System.getenv("SPARK_DEBUG_REPL") == "1")
/** Local directory to save .class files too */
private[repl] val outputDir = {
lazy val outputDir = {
val rootDir = conf.getOption("spark.repl.classdir").getOrElse(Utils.getLocalDir(conf))
Utils.createTempDir(root = rootDir, namePrefix = "repl")
}
Expand Down Expand Up @@ -206,7 +206,7 @@ import org.apache.spark.annotation.DeveloperApi

// argument is a thunk to execute after init is done
// NOTE: Exposed to repl package since used by SparkILoop
private[repl] def initialize(postInitSignal: => Unit) {
def initialize(postInitSignal: => Unit) {
synchronized {
if (_isInitialized == null) {
_isInitialized = io.spawn {
Expand Down Expand Up @@ -371,7 +371,7 @@ import org.apache.spark.annotation.DeveloperApi
def clearExecutionWrapper() = _executionWrapper = ""

/** interpreter settings */
private lazy val isettings = new SparkISettings(this)
lazy val isettings = new SparkISettings(this)

/**
* Instantiates a new compiler used by SparkIMain. Overridable to provide
Expand Down Expand Up @@ -477,7 +477,7 @@ import org.apache.spark.annotation.DeveloperApi
}

// NOTE: Exposed to repl package since used by SparkILoop
private[repl] def classLoader: AbstractFileClassLoader = {
def classLoader: AbstractFileClassLoader = {
ensureClassLoader()
_classLoader
}
Expand All @@ -504,11 +504,11 @@ import org.apache.spark.annotation.DeveloperApi
_runtimeClassLoader
})

private def getInterpreterClassLoader() = classLoader
def getInterpreterClassLoader() = classLoader

// Set the current Java "context" class loader to this interpreter's class loader
// NOTE: Exposed to repl package since used by SparkILoopInit
private[repl] def setContextClassLoader() = classLoader.setAsContext()
def setContextClassLoader() = classLoader.setAsContext()

/**
* Returns the real name of a class based on its repl-defined name.
Expand Down

0 comments on commit 32d5c62

Please sign in to comment.