Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revert "Check duplicated classes in dependencies tool" #70

Merged
merged 1 commit into from
May 3, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 0 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ A sbt plugin for creating distributable Scala packages that include dependent ja
- The latest version is linked from `~/local/{project name}/current`
- You can add other resources in `src/pack` folder.
- All resources in this folder will be copied to `target/pack`.
- Check duplicated classes in dependencies.

* [Release Notes](ReleaseNotes.md)

Expand Down Expand Up @@ -169,10 +168,6 @@ export PATH=$(HOME)/local/bin:$PATH

$ sbt pack-archive

### Find duplicated classes in dependencies

This feature is documented in a [separate page](USAGE.md)

### Example projects

See also [examples](src/sbt-test/sbt-pack) folder
Expand Down
98 changes: 0 additions & 98 deletions USAGE.md

This file was deleted.

110 changes: 0 additions & 110 deletions src/main/scala/xerial/sbt/Pack.scala
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,6 @@

package xerial.sbt

import java.io.InputStream
import java.security.{DigestInputStream, MessageDigest}
import java.util.zip.ZipFile

import sbt._
import org.fusesource.scalate.TemplateEngine
import Keys._
Expand Down Expand Up @@ -70,9 +66,6 @@ object Pack extends sbt.Plugin with PackArchive {
val packJarNameConvention = SettingKey[String]("pack-jarname-convention", "default: (artifact name)-(version).jar; original: original JAR name; full: (organization).(artifact name)-(version).jar; no-version: (organization).(artifact name).jar")
val packDuplicateJarStrategy = SettingKey[String]("deal with duplicate jars. default to use latest version", "latest: use the jar with a higher version; exit: exit the task with error")

val checkDuplicatedExclude = settingKey[Seq[(ModuleID, ModuleID)]]("List of pair of modules whose duplicated dependencies are ignored, because they are known to be harmless.")
val checkDuplicatedDependencies = taskKey[Unit]("Checks there are no duplicated dependencies, incompatible between them.")

import complete.DefaultParsers._
private val targetFolderParser: complete.Parser[Option[String]] =
(Space ~> token(StringBasic, "(target folder)")).?.!!!("invalid input. please input target folder name")
Expand Down Expand Up @@ -302,109 +295,6 @@ object Pack extends sbt.Plugin with PackArchive {

out.log.info("done.")
distDir
},

checkDuplicatedExclude := Seq.empty,

checkDuplicatedDependencies := {
val log = streams.value.log

val dependentJars =
for {
(r: sbt.UpdateReport, projectRef) <- packUpdateReports.value
c <- r.configurations if c.configuration == "runtime"
m <- c.modules
(artifact, file) <- m.artifacts if !packExcludeArtifactTypes.value.contains(artifact.`type`)
} yield {
val mid = m.module
val me = ModuleEntry(mid.organization, mid.name, VersionString(mid.revision), artifact.name, artifact.classifier, file.getName, projectRef)
me -> file
}

def hash(is: InputStream) = {
val md = MessageDigest.getInstance("MD5")
val dis = new DigestInputStream(is, md)
Iterator.continually(dis.read()).takeWhile(_ >= 0).foreach{_ ⇒ ()}
md.digest()
}

def modID(m: ModuleEntry) = m.org % m.artifactName % m.revision.toString

val distinctDpJars = dependentJars
.groupBy(_._1.noVersionModuleName)
.map {
case (key, entries) if entries.groupBy(_._1.revision).size == 1 ⇒
val e0 = entries(0)
(modID(e0._1), e0._2)
case (key, entries) ⇒
val revisions = entries.groupBy(_._1.revision).map(_._1).toList.sorted
val latestRevision = revisions.last
packDuplicateJarStrategy.value match {
case "latest" =>
log.warn(s"Version conflict on $key. Using ${latestRevision} (found ${revisions.mkString(", ")})")
val entry = entries.filter(_._1.revision == latestRevision)(0)
(modID(entry._1), entry._2)
case "exit" =>
sys.error(s"Version conflict on $key (found ${revisions.mkString(", ")})")
case x =>
sys.error("Unknown duplicate JAR strategy '%s'".format(x))
}
}.par

val allClasses = distinctDpJars.map { case (mod, file) ⇒
import scala.collection.JavaConversions._
log debug s"Scanning $file"
val jar = new ZipFile(file)
val classes = try {
jar.entries
.filter { e ⇒ !e.isDirectory && e.getName.endsWith(".class") }
.toList
.map { e ⇒
val h = hash(jar.getInputStream(e))
//log debug s"${e.getName} ⇒ ${h.map(a ⇒ f"$a%02X").mkString}"
(e.getName, h)
}
} finally
jar.close()
(mod, classes)
}

val conflicts = for {
((mod1, hashes1), index) ← allClasses.zipWithIndex
others = allClasses.seq.view(index+1, allClasses.size).par
(file1, hash1) ← hashes1
(mod2, hashes2) ← others
if !checkDuplicatedExclude.value.exists{ case (m1, m2) ⇒
m1 == mod1 && m2 == mod2 || m2 == mod1 && m1 == mod2
}
(file2, hash2) ← hashes2
if file1 == file2 && !(hash1 sameElements hash2)
} yield {
//log debug mod+" "+mod2+" "+file1
(mod1, mod2, file1)
}

if (conflicts.size > 0) {
val groupedConflicts = conflicts.groupBy { case (mod1, mod2, file) ⇒
(mod1, mod2)
}.mapValues { _.map{ case (mod1, mod2, file) ⇒ file } }
groupedConflicts.foreach { case ((m1, m2), files) ⇒
val f = files.map{ "\n "+_.replaceFirst(".class$", "")}.mkString
println(s"Conflict between $m1 and $m2:"+f)
}

def toStr(m: ModuleID) = s""""${m.organization}" % "${m.name}" % "${m.revision}""""
val excludes = groupedConflicts.map{ case ((m1, m2), _) ⇒ s" ${toStr(m1)} -> ${toStr(m2)}" }.mkString(",\n")

println(s"""
|If you consider these conflicts are inoffensive, in order to ignore them, use:
|set checkDuplicatedExclude := Seq(
|$excludes
|)
""".stripMargin)
sys.error(s"Detected ${conflicts.size} conflict(s)")
} else
log info s"No conflicts detected, scanned ${dependentJars.size} jar files."
}
) ++ packArchiveSettings

Expand Down

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

58 changes: 0 additions & 58 deletions src/sbt-test/sbt-pack/duplicated-classes/project/Build.scala

This file was deleted.

3 changes: 0 additions & 3 deletions src/sbt-test/sbt-pack/duplicated-classes/project/plugins.sbt

This file was deleted.

8 changes: 0 additions & 8 deletions src/sbt-test/sbt-pack/duplicated-classes/test

This file was deleted.