-
Notifications
You must be signed in to change notification settings - Fork 28.1k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[SPARK-13294] [PROJECT INFRA] Remove MiMa's dependency on spark-class / Spark assembly #11178
Changes from 7 commits
73b1550
b6f1ce8
5528c48
bef62eb
76a365e
9fc0f7a
31854eb
906d8c8
1f995a4
902b1b7
a73118e
64330d6
9a122ad
db8e532
cd7eb04
4756a1e
ae6b002
dae4725
373fd52
8ec1cc4
5d32e74
97b5d78
f9e2b42
4070c0d
86cd513
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -381,18 +381,19 @@ object OldDeps { | |
|
||
lazy val project = Project("oldDeps", file("dev"), settings = oldDepsSettings) | ||
|
||
def versionArtifact(id: String): Option[sbt.ModuleID] = { | ||
val fullId = id + "_2.11" | ||
Some("org.apache.spark" % fullId % "1.2.0") | ||
} | ||
|
||
def oldDepsSettings() = Defaults.coreDefaultSettings ++ Seq( | ||
name := "old-deps", | ||
scalaVersion := "2.10.5", | ||
libraryDependencies := Seq("spark-streaming-mqtt", "spark-streaming-zeromq", | ||
"spark-streaming-flume", "spark-streaming-twitter", | ||
"spark-streaming", "spark-mllib", "spark-graphx", | ||
"spark-core").map(versionArtifact(_).get intransitive()) | ||
libraryDependencies := Seq( | ||
"spark-streaming-mqtt", | ||
"spark-streaming-zeromq", | ||
"spark-streaming-flume", | ||
"spark-streaming-twitter", | ||
"spark-streaming", | ||
"spark-mllib", | ||
"spark-graphx", | ||
"spark-core" | ||
).map(id => "org.apache.spark" % (id + "_2.11") % "1.2.0") | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. intransitive at the end is no longer needed ? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'm going to post a larger followup in a few minutes with diffs to explain this fully, but in a nutshell the GenerateMiMaIgnore was hitting many spurious errors because it couldn't load / reflect on classes since their dependencies were missing; pulling in these transitive deps fixes that. |
||
) | ||
} | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -18,15 +18,13 @@ | |
// scalastyle:off classforname | ||
package org.apache.spark.tools | ||
|
||
import java.io.File | ||
import java.util.jar.JarFile | ||
|
||
import scala.collection.mutable | ||
import scala.collection.JavaConverters._ | ||
import scala.reflect.runtime.{universe => unv} | ||
import scala.reflect.runtime.universe.runtimeMirror | ||
import scala.util.Try | ||
|
||
import org.clapper.classutil.ClassFinder | ||
|
||
/** | ||
* A tool for generating classes to be excluded during binary checking with MIMA. It is expected | ||
* that this tool is run with ./spark-class. | ||
|
@@ -42,12 +40,13 @@ object GenerateMIMAIgnore { | |
private val classLoader = Thread.currentThread().getContextClassLoader | ||
private val mirror = runtimeMirror(classLoader) | ||
|
||
private def isDeveloperApi(sym: unv.Symbol) = sym.annotations.exists { | ||
_.tpe =:= mirror.staticClass("org.apache.spark.annotation.DeveloperApi").toType | ||
} | ||
|
||
private def isDeveloperApi(sym: unv.Symbol) = | ||
sym.annotations.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.DeveloperApi]) | ||
|
||
private def isExperimental(sym: unv.Symbol) = | ||
sym.annotations.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.Experimental]) | ||
private def isExperimental(sym: unv.Symbol) = sym.annotations.exists { | ||
_.tpe =:= mirror.staticClass("org.apache.spark.annotation.Experimental").toType | ||
} | ||
|
||
|
||
private def isPackagePrivate(sym: unv.Symbol) = | ||
|
@@ -160,35 +159,13 @@ object GenerateMIMAIgnore { | |
* and subpackages both from directories and jars present on the classpath. | ||
*/ | ||
private def getClasses(packageName: String): Set[String] = { | ||
val path = packageName.replace('.', '/') | ||
val resources = classLoader.getResources(path) | ||
|
||
val jars = resources.asScala.filter(_.getProtocol == "jar") | ||
.map(_.getFile.split(":")(1).split("!")(0)).toSeq | ||
|
||
jars.flatMap(getClassesFromJar(_, path)) | ||
.map(_.getName) | ||
.filterNot(shouldExclude).toSet | ||
} | ||
|
||
/** | ||
* Get all classes in a package from a jar file. | ||
*/ | ||
private def getClassesFromJar(jarPath: String, packageName: String) = { | ||
import scala.collection.mutable | ||
val jar = new JarFile(new File(jarPath)) | ||
val enums = jar.entries().asScala.map(_.getName).filter(_.startsWith(packageName)) | ||
val classes = mutable.HashSet[Class[_]]() | ||
for (entry <- enums if entry.endsWith(".class")) { | ||
try { | ||
classes += Class.forName(entry.replace('/', '.').stripSuffix(".class"), false, classLoader) | ||
} catch { | ||
// scalastyle:off println | ||
case _: Throwable => println("Unable to load:" + entry) | ||
// scalastyle:on println | ||
} | ||
} | ||
classes | ||
val finder = ClassFinder() | ||
finder | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. These changes are just part of switching us to use ClassUtil for finding the Spark classes. |
||
.getClasses | ||
.map(_.name) | ||
.filter(_.startsWith(packageName)) | ||
.filterNot(shouldExclude) | ||
.toSet | ||
} | ||
} | ||
// scalastyle:on classforname |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
A full assembly is no longer needed ?, how do you configure classpath ?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
See https://issues.apache.org/jira/browse/SPARK-9284
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Understood, for tests assembly is no longer needed.