Skip to content

Commit

Permalink
Merge pull request #944 from coursier/topic/bootstrap-sources
Browse files Browse the repository at this point in the history
Allow to add classifier artifacts in bootstraps
  • Loading branch information
alexarchambault committed Oct 25, 2018
2 parents f846020 + d3bf181 commit a23ee41
Show file tree
Hide file tree
Showing 12 changed files with 279 additions and 88 deletions.
43 changes: 23 additions & 20 deletions modules/cli/src/main/scala-2.12/coursier/cli/Bootstrap.scala
Expand Up @@ -25,9 +25,10 @@ object Bootstrap extends CaseApp[BootstrapOptions] {
): Unit = {

val files = helper.fetch(
sources = false,
javadoc = false,
artifactTypes = options.artifactOptions.artifactTypes(sources = false, javadoc = false)
sources = options.artifactOptions.sources,
javadoc = options.artifactOptions.javadoc,
default = options.artifactOptions.default0(options.options.common.classifier0),
artifactTypes = options.artifactOptions.artifactTypes(options.options.common.classifier0)
)

val log: String => Unit =
Expand Down Expand Up @@ -159,27 +160,28 @@ object Bootstrap extends CaseApp[BootstrapOptions] {

val isolatedDeps = options.options.isolated.isolatedDeps(options.options.common.resolutionOptions.scalaVersion)

val (_, isolatedArtifactFiles) =
options.options.isolated.targets.foldLeft((Vector.empty[String], Map.empty[String, (Seq[String], Seq[File])])) {
val (done, isolatedArtifactFiles) =
options.options.isolated.targets.foldLeft((Set.empty[String], Map.empty[String, (Seq[String], Seq[File])])) {
case ((done, acc), target) =>

// TODO Add non regression test checking that optional artifacts indeed land in the isolated loader URLs

val m = helper.fetchMap(
sources = false,
javadoc = false,
artifactTypes = options.artifactOptions.artifactTypes(sources = false, javadoc = false),
sources = options.artifactOptions.sources,
javadoc = options.artifactOptions.javadoc,
default = options.artifactOptions.default0(options.options.common.classifier0),
artifactTypes = options.artifactOptions.artifactTypes(options.options.common.classifier0),
subset = isolatedDeps.getOrElse(target, Seq.empty).toSet
)

val (done0, subUrls, subFiles) =
if (options.options.standalone) {
val subFiles0 = m.values.toSeq
(done, Nil, subFiles0)
} else {
val filteredSubArtifacts = m.keys.toSeq.diff(done)
(done ++ filteredSubArtifacts, filteredSubArtifacts, Nil)
}
val m0 = m.filterKeys(url => !done(url))
val done0 = done ++ m0.keys

val (subUrls, subFiles) =
if (options.options.standalone)
(Nil, m0.values.toSeq)
else
(m0.keys.toSeq, Nil)

val updatedAcc = acc + (target -> (subUrls, subFiles))

Expand Down Expand Up @@ -221,7 +223,7 @@ object Bootstrap extends CaseApp[BootstrapOptions] {
outputZip.closeEntry()
}

putStringEntry("bootstrap-jar-urls", urls.mkString("\n"))
putStringEntry("bootstrap-jar-urls", urls.filterNot(done).mkString("\n"))

if (options.options.isolated.anyIsolatedDep) {
putStringEntry("bootstrap-isolation-ids", options.options.isolated.targets.mkString("\n"))
Expand Down Expand Up @@ -351,9 +353,10 @@ object Bootstrap extends CaseApp[BootstrapOptions] {

val (urls, files) =
helper.fetchMap(
sources = false,
javadoc = false,
artifactTypes = options.artifactOptions.artifactTypes(sources = false, javadoc = false)
sources = options.artifactOptions.sources,
javadoc = options.artifactOptions.javadoc,
default = options.artifactOptions.default0(options.options.common.classifier0),
artifactTypes = options.artifactOptions.artifactTypes(options.options.common.classifier0)
).toList.foldLeft((List.empty[String], List.empty[File])){
case ((urls, files), (url, file)) =>
if (options.options.assembly || options.options.standalone) (urls, file :: files)
Expand Down
12 changes: 6 additions & 6 deletions modules/cli/src/main/scala-2.12/coursier/cli/Fetch.scala
Expand Up @@ -11,13 +11,13 @@ final class Fetch(options: FetchOptions, args: RemainingArgs) {

val helper = new Helper(options.common, args.all, ignoreErrors = options.artifactOptions.force)

val default = options.artifactOptions.default0(options.common.classifier0)

val files0 = helper.fetch(
sources = options.sources,
javadoc = options.javadoc,
artifactTypes = options.artifactOptions.artifactTypes(
options.sources || options.common.classifier0(Classifier.sources),
options.javadoc || options.common.classifier0(Classifier.javadoc)
)
sources = options.artifactOptions.sources,
javadoc = options.artifactOptions.javadoc,
default = default,
artifactTypes = options.artifactOptions.artifactTypes(options.common.classifier0)
)

}
Expand Down
65 changes: 51 additions & 14 deletions modules/cli/src/main/scala-2.12/coursier/cli/Helper.scala
Expand Up @@ -564,6 +564,7 @@ class Helper(
def artifacts(
sources: Boolean,
javadoc: Boolean,
default: Boolean,
artifactTypes: Set[Type],
subset: Set[Dependency] = null
): Seq[Artifact] = {
Expand All @@ -587,7 +588,7 @@ class Helper(

val res0 = Option(subset).fold(res)(res.subset)

val artifacts0 = getDepArtifactsForClassifier(sources, javadoc, res0).map(t => (t._2, t._3))
val artifacts0 = getDepArtifactsForClassifier(sources, javadoc, default, res0).map(t => (t._2, t._3))

if (artifactTypes(Type("*")))
artifacts0.map(_._2)
Expand All @@ -598,12 +599,28 @@ class Helper(
}
}

private def getDepArtifactsForClassifier(sources: Boolean, javadoc: Boolean, res0: Resolution): Seq[(Dependency, Attributes, Artifact)] = {
private def getDepArtifactsForClassifier(
sources: Boolean,
javadoc: Boolean,
default: Boolean,
res0: Resolution
): Seq[(Dependency, Attributes, Artifact)] = {

val raw =
if (hasOverrideClassifiers(sources, javadoc))
//TODO: this function somehow gives duplicated things
res0.dependencyArtifacts(Some(overrideClassifiers(sources, javadoc).toVector.sorted))
else
if (hasOverrideClassifiers(sources, javadoc)) {
val classifiers = overrideClassifiers(sources, javadoc, default)

val baseArtifacts =
if (classifiers(Classifier("_")))
res0.dependencyArtifacts(None)
else
Nil

val classifierArtifacts =
res0.dependencyArtifacts(Some(classifiers.filter(_ != Classifier("_")).toVector.sorted))

baseArtifacts ++ classifierArtifacts
} else
res0.dependencyArtifacts(None)

raw.map {
Expand All @@ -617,12 +634,18 @@ class Helper(
}
}

private def overrideClassifiers(sources: Boolean, javadoc:Boolean): Set[Classifier] = {
private def overrideClassifiers(
sources: Boolean,
javadoc: Boolean,
default: Boolean
): Set[Classifier] = {
var classifiers = classifier0
if (sources)
classifiers = classifiers + Classifier.sources
if (javadoc)
classifiers = classifiers + Classifier.javadoc
if (default)
classifiers = classifiers + Classifier("_")
classifiers
}

Expand All @@ -633,11 +656,12 @@ class Helper(
def fetchMap(
sources: Boolean,
javadoc: Boolean,
default: Boolean,
artifactTypes: Set[Type],
subset: Set[Dependency] = null
): Map[String, File] = {

val artifacts0 = artifacts(sources, javadoc, artifactTypes, subset).distinct
val artifacts0 = artifacts(sources, javadoc, default, artifactTypes, subset).distinct

val logger =
if (common.verbosityLevel >= 0)
Expand Down Expand Up @@ -718,13 +742,13 @@ class Helper(
}

val depToArtifacts: Map[Dependency, Vector[(Attributes, Artifact)]] =
getDepArtifactsForClassifier(sources, javadoc, res).groupBy(_._1).mapValues(_.map(t => (t._2, t._3)).toVector)
getDepArtifactsForClassifier(sources, javadoc, default, res).groupBy(_._1).mapValues(_.map(t => (t._2, t._3)).toVector)


if (!jsonOutputFile.isEmpty) {
// TODO(wisechengyi): This is not exactly the root dependencies we are asking for on the command line, but it should be
// a strict super set.
val deps: Seq[Dependency] = Set(getDepArtifactsForClassifier(sources, javadoc, res).map(_._1): _*).toSeq
val deps: Seq[Dependency] = Set(getDepArtifactsForClassifier(sources, javadoc, default, res).map(_._1): _*).toSeq

// A map from requested org:name:version to reconciled org:name:version
val conflictResolutionForRoots: Map[String, String] = allDependencies.map({ dep =>
Expand All @@ -743,7 +767,18 @@ class Helper(
}

val jsonReq = JsonPrintRequirement(artifactToFile, depToArtifacts)
val roots = deps.toVector.map(JsonElem(_, artifacts, Option(jsonReq), res, printExclusions = common.verbosityLevel >= 1, excluded = false, colors = false, overrideClassifiers = overrideClassifiers(sources, javadoc)))
val roots = deps.toVector.map(d =>
JsonElem(
d,
artifacts,
Option(jsonReq),
res,
printExclusions = common.verbosityLevel >= 1,
excluded = false,
colors = false,
overrideClassifiers = overrideClassifiers(sources, javadoc, default)
)
)
val jsonStr = JsonReport(
roots,
conflictResolutionForRoots
Expand All @@ -763,11 +798,11 @@ class Helper(
def fetch(
sources: Boolean,
javadoc: Boolean,
default: Boolean,
artifactTypes: Set[Type],
subset: Set[Dependency] = null
): Seq[File] = {
fetchMap(sources, javadoc, artifactTypes, subset).values.toSeq
}
): Seq[File] =
fetchMap(sources, javadoc, default, artifactTypes, subset).values.toSeq

def contextLoader = Thread.currentThread().getContextClassLoader

Expand All @@ -792,6 +827,7 @@ class Helper(
val files0 = fetch(
sources = false,
javadoc = false,
default = true,
artifactTypes = artifactTypes
)

Expand All @@ -808,6 +844,7 @@ class Helper(
val isolatedFiles = fetch(
sources = false,
javadoc = false,
default = true,
artifactTypes = artifactTypes,
subset = isolatedDeps.getOrElse(target, Seq.empty).toSet
)
Expand Down
Expand Up @@ -65,7 +65,8 @@ object SparkSubmit extends CaseApp[SparkSubmitOptions] {
helper.fetch(
sources = false,
javadoc = false,
artifactTypes = options.artifactOptions.artifactTypes(sources = false, javadoc = false)
default = true,
artifactTypes = options.artifactOptions.artifactTypes()
) ++ options.extraJars.map(new File(_))

val (scalaVersion, sparkVersion) =
Expand Down Expand Up @@ -93,7 +94,7 @@ object SparkSubmit extends CaseApp[SparkSubmitOptions] {
options.assemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty) ++
options.sparkAssemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty).map(_ + s":$sparkVersion"),
options.common,
options.artifactOptions.artifactTypes(sources = false, javadoc = false)
options.artifactOptions.artifactTypes()
)

val extraConf =
Expand All @@ -115,7 +116,7 @@ object SparkSubmit extends CaseApp[SparkSubmitOptions] {
options.assemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty) ++
options.sparkAssemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty).map(_ + s":$sparkVersion"),
options.common,
options.artifactOptions.artifactTypes(sources = false, javadoc = false)
options.artifactOptions.artifactTypes()
)

val (assembly, assemblyJars) = assemblyAndJarsOrError match {
Expand Down Expand Up @@ -191,7 +192,7 @@ object SparkSubmit extends CaseApp[SparkSubmitOptions] {
sparkVersion,
options.noDefaultSubmitDependencies,
options.submitDependencies.flatMap(_.split(",")).filter(_.nonEmpty),
options.artifactOptions.artifactTypes(sources = false, javadoc = false),
options.artifactOptions.artifactTypes(),
options.common
)

Expand Down
@@ -1,7 +1,7 @@
package coursier.cli.options

import caseapp.{ExtraName => Short, HelpMessage => Help, ValueDescription => Value, _}
import coursier.core.{Resolution, Type}
import coursier.core.{Classifier, Resolution, Type}

object ArtifactOptions {
def defaultArtifactTypes = Resolution.defaultTypes
Expand All @@ -11,14 +11,29 @@ object ArtifactOptions {
}

final case class ArtifactOptions(
@Help("Fetch source artifacts")
sources: Boolean = false,
@Help("Fetch javadoc artifacts")
javadoc: Boolean = false,
@Help("Fetch default artifacts (default: false if --sources or --javadoc or --classifier are passed, true else)")
default: Option[Boolean] = None,
@Help("Artifact types that should be retained (e.g. jar, src, doc, etc.) - defaults to jar,bundle")
@Value("type1,type2,...")
@Short("A")
artifactType: List[String] = Nil,
@Help("Fetch artifacts even if the resolution is errored")
force: Boolean = false
) {
def artifactTypes(sources: Boolean, javadoc: Boolean): Set[Type] = {

def default0(classifiers: Set[Classifier]): Boolean =
default.getOrElse {
(!sources && !javadoc && classifiers.isEmpty) ||
classifiers(Classifier("_"))
}

def artifactTypes(): Set[Type] =
artifactTypes(Set())
def artifactTypes(classifiers: Set[Classifier]): Set[Type] = {

val types0 = artifactType
.flatMap(_.split(','))
Expand All @@ -27,10 +42,10 @@ final case class ArtifactOptions(
.toSet

if (types0.isEmpty) {
if (sources || javadoc)
Some(Type.source).filter(_ => sources).toSet ++ Some(Type.doc).filter(_ => javadoc)
else
ArtifactOptions.defaultArtifactTypes
val sourceTypes = Some(Type.source).filter(_ => sources || classifiers(Classifier.sources)).toSet
val javadocTypes = Some(Type.doc).filter(_ => javadoc || classifiers(Classifier.javadoc)).toSet
val defaultTypes = if (default0(classifiers)) ArtifactOptions.defaultArtifactTypes else Set()
sourceTypes ++ javadocTypes ++ defaultTypes
} else if (types0(Type("*")))
Set(Type("*"))
else
Expand Down
Expand Up @@ -3,12 +3,6 @@ package coursier.cli.options
import caseapp.{ HelpMessage => Help, ExtraName => Short, _ }

final case class FetchOptions(
@Help("Fetch source artifacts")
@Short("S")
sources: Boolean = false,
@Help("Fetch javadoc artifacts")
@Short("D")
javadoc: Boolean = false,
@Help("Print java -cp compatible output")
@Short("p")
classpath: Boolean = false,
Expand Down
Expand Up @@ -82,7 +82,7 @@ object SparkAssembly {

val helper = sparkJarsHelper(scalaVersion, sparkVersion, yarnVersion, default, extraDependencies, options)

helper.fetch(sources = false, javadoc = false, artifactTypes = artifactTypes)
helper.fetch(sources = false, javadoc = false, default = true, artifactTypes = artifactTypes)
}

def spark(
Expand All @@ -99,8 +99,8 @@ object SparkAssembly {

val helper = sparkJarsHelper(scalaVersion, sparkVersion, yarnVersion, default, extraDependencies, options)

val artifacts = helper.artifacts(sources = false, javadoc = false, artifactTypes = artifactTypes)
val jars = helper.fetch(sources = false, javadoc = false, artifactTypes = artifactTypes)
val artifacts = helper.artifacts(sources = false, javadoc = false, default = true, artifactTypes = artifactTypes)
val jars = helper.fetch(sources = false, javadoc = false, default = true, artifactTypes = artifactTypes)

val checksums = artifacts.map { a =>
val f = a.checksumUrls.get("SHA-1") match {
Expand Down
Expand Up @@ -51,6 +51,7 @@ object Submit {
helper.fetch(
sources = false,
javadoc = false,
default = true,
artifactTypes = artifactTypes
) ++ extraCp
}
Expand Down

0 comments on commit a23ee41

Please sign in to comment.