Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-7944][SPARK-8013] Remove most of the Spark REPL fork for Scala 2.11 #6903

Closed
wants to merge 11 commits into from
18 changes: 10 additions & 8 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -341,11 +341,6 @@
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>${jline.groupid}</groupId>
<artifactId>jline</artifactId>
<version>${jline.version}</version>
</dependency>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>chill_${scala.binary.version}</artifactId>
Expand Down Expand Up @@ -1826,6 +1821,15 @@
<jline.version>${scala.version}</jline.version>
<jline.groupid>org.scala-lang</jline.groupid>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>${jline.groupid}</groupId>
<artifactId>jline</artifactId>
<version>${jline.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
</profile>

<profile>
Expand All @@ -1844,10 +1848,8 @@
<property><name>scala-2.11</name></property>
</activation>
<properties>
<scala.version>2.11.6</scala.version>
<scala.version>2.11.7</scala.version>
<scala.binary.version>2.11</scala.binary.version>
<jline.version>2.12.1</jline.version>
<jline.groupid>jline</jline.groupid>
</properties>
</profile>

Expand Down
19 changes: 14 additions & 5 deletions repl/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,6 @@
</properties>

<dependencies>
<dependency>
<groupId>${jline.groupid}</groupId>
<artifactId>jline</artifactId>
<version>${jline.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
Expand Down Expand Up @@ -161,6 +156,20 @@
</plugins>
</build>
<profiles>
<profile>
<id>scala-2.10</id>
<activation>
<property><name>!scala-2.11</name></property>
</activation>
<dependencies>
<dependency>
<groupId>${jline.groupid}</groupId>
<artifactId>jline</artifactId>
<version>${jline.version}</version>
</dependency>
</dependencies>
</profile>

<profile>
<id>scala-2.11</id>
<activation>
Expand Down
16 changes: 8 additions & 8 deletions repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,14 @@

package org.apache.spark.repl

import java.io.File

import scala.tools.nsc.Settings

import org.apache.spark.util.Utils
import org.apache.spark._
import org.apache.spark.sql.SQLContext

import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.SparkILoop

object Main extends Logging {

val conf = new SparkConf()
Expand All @@ -32,7 +33,8 @@ object Main extends Logging {
val outputDir = Utils.createTempDir(rootDir)
val s = new Settings()
s.processArguments(List("-Yrepl-class-based",
"-Yrepl-outdir", s"${outputDir.getAbsolutePath}", "-Yrepl-sync"), true)
"-Yrepl-outdir", s"${outputDir.getAbsolutePath}",
"-classpath", getAddedJars.mkString(File.pathSeparator)), true)
val classServer = new HttpServer(conf, outputDir, new SecurityManager(conf))
var sparkContext: SparkContext = _
var sqlContext: SQLContext = _
Expand All @@ -48,7 +50,6 @@ object Main extends Logging {
Option(sparkContext).map(_.stop)
}


def getAddedJars: Array[String] = {
val envJars = sys.env.get("ADD_JARS")
if (envJars.isDefined) {
Expand Down Expand Up @@ -84,10 +85,9 @@ object Main extends Logging {
val loader = Utils.getContextOrSparkClassLoader
try {
sqlContext = loader.loadClass(name).getConstructor(classOf[SparkContext])
.newInstance(sparkContext).asInstanceOf[SQLContext]
.newInstance(sparkContext).asInstanceOf[SQLContext]
logInfo("Created sql context (with Hive support)..")
}
catch {
} catch {
case _: java.lang.ClassNotFoundException | _: java.lang.NoClassDefFoundError =>
sqlContext = new SQLContext(sparkContext)
logInfo("Created sql context..")
Expand Down

This file was deleted.

Loading