-
Notifications
You must be signed in to change notification settings - Fork 28.1k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[SPARK-19810][BUILD][CORE] Remove support for Scala 2.10 #17150
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -35,21 +35,21 @@ if [%SPARK_ENV_LOADED%] == [] ( | |
|
||
rem Setting SPARK_SCALA_VERSION if not already set. | ||
|
||
set ASSEMBLY_DIR2="%SPARK_HOME%\assembly\target\scala-2.11" | ||
set ASSEMBLY_DIR1="%SPARK_HOME%\assembly\target\scala-2.10" | ||
rem set ASSEMBLY_DIR2="%SPARK_HOME%\assembly\target\scala-2.11" | ||
rem set ASSEMBLY_DIR1="%SPARK_HOME%\assembly\target\scala-2.12" | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I commented bits like this out, but updated it as well for when 2.12 needs to be supported as an alternative |
||
|
||
if [%SPARK_SCALA_VERSION%] == [] ( | ||
|
||
if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% ( | ||
echo "Presence of build for both scala versions(SCALA 2.10 and SCALA 2.11) detected." | ||
echo "Either clean one of them or, set SPARK_SCALA_VERSION=2.11 in spark-env.cmd." | ||
exit 1 | ||
) | ||
if exist %ASSEMBLY_DIR2% ( | ||
rem if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% ( | ||
rem echo "Presence of build for multiple Scala versions detected." | ||
rem echo "Either clean one of them or, set SPARK_SCALA_VERSION=2.11 in spark-env.cmd." | ||
rem exit 1 | ||
rem ) | ||
rem if exist %ASSEMBLY_DIR2% ( | ||
set SPARK_SCALA_VERSION=2.11 | ||
) else ( | ||
set SPARK_SCALA_VERSION=2.10 | ||
) | ||
rem ) else ( | ||
rem set SPARK_SCALA_VERSION=2.12 | ||
rem ) | ||
) | ||
exit /b 0 | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -91,13 +91,13 @@ install_mvn() { | |
|
||
# Install zinc under the build/ folder | ||
install_zinc() { | ||
local zinc_path="zinc-0.3.11/bin/zinc" | ||
local zinc_path="zinc-0.3.15/bin/zinc" | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I went ahead and updated zinc along the way here |
||
[ ! -f "${_DIR}/${zinc_path}" ] && ZINC_INSTALL_FLAG=1 | ||
local TYPESAFE_MIRROR=${TYPESAFE_MIRROR:-https://downloads.typesafe.com} | ||
|
||
install_app \ | ||
"${TYPESAFE_MIRROR}/zinc/0.3.11" \ | ||
"zinc-0.3.11.tgz" \ | ||
"${TYPESAFE_MIRROR}/zinc/0.3.15" \ | ||
"zinc-0.3.15.tgz" \ | ||
"${zinc_path}" | ||
ZINC_BIN="${_DIR}/${zinc_path}" | ||
} | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -183,8 +183,6 @@ class SparkContext(config: SparkConf) extends Logging { | |
// log out Spark Version in Spark driver log | ||
logInfo(s"Running Spark version $SPARK_VERSION") | ||
|
||
warnDeprecatedVersions() | ||
|
||
/* ------------------------------------------------------------------------------------- * | ||
| Private variables. These variables keep the internal state of the context, and are | | ||
| not accessible by the outside world. They're mutable since we want to initialize all | | ||
|
@@ -349,13 +347,6 @@ class SparkContext(config: SparkConf) extends Logging { | |
value | ||
} | ||
|
||
private def warnDeprecatedVersions(): Unit = { | ||
val javaVersion = System.getProperty("java.version").split("[+.\\-]+", 3) | ||
if (scala.util.Properties.releaseVersion.exists(_.startsWith("2.10"))) { | ||
logWarning("Support for Scala 2.10 is deprecated as of Spark 2.1.0") | ||
} | ||
} | ||
|
||
/** Control our logLevel. This overrides any user-defined log settings. | ||
* @param logLevel The desired log level as a string. | ||
* Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN | ||
|
@@ -1396,6 +1387,8 @@ class SparkContext(config: SparkConf) extends Logging { | |
@deprecated("use AccumulatorV2", "2.0.0") | ||
def accumulableCollection[R <% Growable[T] with TraversableOnce[T] with Serializable: ClassTag, T] | ||
(initialValue: R): Accumulable[R, T] = { | ||
// TODO the context bound (<%) above should be replaced with simple type bound and implicit | ||
// conversion but is a breaking change. This should be fixed in Spark 3.x. | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. so context bound is not recommended in scala? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It generates a warning in scala 2.11, yeah |
||
val param = new GrowableAccumulableParam[R, T] | ||
val acc = new Accumulable(initialValue, param) | ||
cleaner.foreach(_.registerAccumulatorForCleanup(acc.newAcc)) | ||
|
@@ -2605,9 +2598,9 @@ object SparkContext extends Logging { | |
*/ | ||
private[spark] val LEGACY_DRIVER_IDENTIFIER = "<driver>" | ||
|
||
private implicit def arrayToArrayWritable[T <% Writable: ClassTag](arr: Traversable[T]) | ||
private implicit def arrayToArrayWritable[T <: Writable : ClassTag](arr: Traversable[T]) | ||
: ArrayWritable = { | ||
def anyToWritable[U <% Writable](u: U): Writable = u | ||
def anyToWritable[U <: Writable](u: U): Writable = u | ||
|
||
new ArrayWritable(classTag[T].runtimeClass.asInstanceOf[Class[Writable]], | ||
arr.map(x => anyToWritable(x)).toArray) | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -125,9 +125,9 @@ private[spark] object RpcTimeout { | |
var foundProp: Option[(String, String)] = None | ||
while (itr.hasNext && foundProp.isEmpty) { | ||
val propKey = itr.next() | ||
conf.getOption(propKey).foreach { prop => foundProp = Some(propKey, prop) } | ||
conf.getOption(propKey).foreach { prop => foundProp = Some((propKey, prop)) } | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Lots of the changes are of this form. Whereas scalac will still accept two args to a method that accepts one Tuple2 arg, it's a warning in Scala 2.11 in some cases. I actually also enabled all warnings of this form. |
||
} | ||
val finalProp = foundProp.getOrElse(timeoutPropList.head, defaultValue) | ||
val finalProp = foundProp.getOrElse((timeoutPropList.head, defaultValue)) | ||
val timeout = { Utils.timeStringAsSeconds(finalProp._2).seconds } | ||
new RpcTimeout(timeout, finalProp._1) | ||
} | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -54,7 +54,7 @@ private[spark] object JettyUtils extends Logging { | |
// implicit conversion from many types of functions to jetty Handlers. | ||
type Responder[T] = HttpServletRequest => T | ||
|
||
class ServletParams[T <% AnyRef](val responder: Responder[T], | ||
class ServletParams[T <: AnyRef](val responder: Responder[T], | ||
val contentType: String, | ||
val extractFn: T => String = (in: Any) => in.toString) {} | ||
|
||
|
@@ -68,7 +68,7 @@ private[spark] object JettyUtils extends Logging { | |
implicit def textResponderToServlet(responder: Responder[String]): ServletParams[String] = | ||
new ServletParams(responder, "text/plain") | ||
|
||
def createServlet[T <% AnyRef]( | ||
def createServlet[T <: AnyRef]( | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. These didn't need to be context bounds in the first place |
||
servletParams: ServletParams[T], | ||
securityMgr: SecurityManager, | ||
conf: SparkConf): HttpServlet = { | ||
|
@@ -113,7 +113,7 @@ private[spark] object JettyUtils extends Logging { | |
} | ||
|
||
/** Create a context handler that responds to a request with the given path prefix */ | ||
def createServletHandler[T <% AnyRef]( | ||
def createServletHandler[T <: AnyRef]( | ||
path: String, | ||
servletParams: ServletParams[T], | ||
securityMgr: SecurityManager, | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
These changes to dummy artifacts used in examples aren't that important, but figured it'd be tidy