Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

* move Environment classes to util/env module

* move TrapExit, SelectMainClass to run module
* rearrange some compilation-related code
* Jetty-related code moved to web module
  • Loading branch information...
commit d0fa1eb461a616a34aa021b5a7e60849c772cf72 1 parent 6f3c699
@harrah harrah authored
Showing with 534 additions and 2,072 deletions.
  1. +4 −0 .gitignore
  2. +1 −1  LICENSE
  3. +1 −1  NOTICE
  4. 0  {sbt/src/main/scala/sbt → compile}/CompileOptions.scala
  5. +15 −7 compile/JavaCompiler.scala
  6. +4 −2 compile/inc/CompileSetup.scala
  7. +1 −1  compile/interface/API.scala
  8. +1 −1  compile/persist/AnalysisFormats.scala
  9. +1 −38 ivy/IvyInterface.scala
  10. +28 −12 main/AggressiveCompile.scala
  11. +4 −2 main/AggressiveCompiler.scala
  12. +0 −21 project/build/SbtProject.scala
  13. +41 −6 project/build/XSbt.scala
  14. 0  {sbt/src/main/scala/sbt → run}/Fork.scala
  15. +6 −74 {sbt/src/main/scala/sbt → run}/Run.scala
  16. +6 −8 {sbt/src/main/scala/sbt/impl → run}/SelectMainClass.scala
  17. +2 −5 {sbt/src/main/scala/sbt → run}/TrapExit.scala
  18. +0 −3  sbt/.gitignore
  19. +0 −228 sbt/src/main/scala/sbt/Analysis.scala
  20. +0 −101 sbt/src/main/scala/sbt/AnalysisCallback.scala
  21. +0 −96 sbt/src/main/scala/sbt/Compile.scala
  22. +0 −394 sbt/src/main/scala/sbt/Conditional.scala
  23. +2 −6 sbt/src/main/scala/sbt/Credentials.scala
  24. +38 −0 sbt/src/main/scala/sbt/Doc.scala
  25. +0 −43 sbt/src/main/scala/sbt/ExitHook.scala
  26. +0 −126 sbt/src/main/scala/sbt/Logger.scala
  27. +0 −494 sbt/src/main/scala/sbt/ParallelRunner.scala
  28. +79 −0 sbt/src/main/scala/sbt/ProjectConsole.scala
  29. +0 −1  sbt/src/main/scala/sbt/TaskManager.scala
  30. +0 −67 sbt/src/main/scala/sbt/impl/MapUtilities.scala
  31. +0 −123 sbt/src/main/scala/sbt/wrap/Wrappers.scala
  32. +15 −12 util/classfile/Analyze.scala
  33. +0 −25 util/classfile/ClassfileLogger.scala
  34. +1 −1  util/classpath/ClasspathUtilities.scala
  35. +5 −4 {sbt/src/main/scala/sbt → util/collection}/Dag.scala
  36. +3 −3 {sbt/src/test/scala/sbt → util/collection/src/test/scala}/DagSpecification.scala
  37. +32 −0 util/control/ExitHook.scala
  38. +12 −137 sbt/src/main/scala/sbt/Environment.scala → util/env/BasicEnvironment.scala
  39. +68 −0 util/env/Environment.scala
  40. +4 −4 {sbt/src/main/scala/sbt → util/env}/Format.scala
  41. +22 −0 util/env/LazyVar.scala
  42. +43 −0 util/env/MapIO.scala
  43. +46 −0 util/env/PropertyResolution.scala
  44. +1 −1  {sbt/src/main/scala/sbt → util/env}/Version.scala
  45. 0  {sbt/src/test/scala/sbt → util/env/src/test/scala}/EnvironmentSpecification.scala
  46. +7 −0 util/io/IO.scala
  47. +2 −0  util/io/Path.scala
  48. +19 −16 {sbt/src/main/scala/sbt → util/io}/Resources.scala
  49. +3 −0  util/process/NOTICE
  50. +6 −2 {sbt/src/main/scala/sbt/jetty → web}/LazyJettyRun.scala.templ
  51. +3 −0  web/NOTICE
  52. +8 −6 {sbt/src/main/scala/sbt → web}/WebApp.scala
  53. 0  {sbt/src/main/scala/sbt/jetty → web}/jetty6.imports
  54. 0  {sbt/src/main/scala/sbt/jetty → web}/jetty7.imports
View
4 .gitignore
@@ -0,0 +1,4 @@
+LazyJettyRun6.scala
+LazyJettyRun7.scala
+project/plugins/project/
+interface/src/main/resources/xsbt.version.properties
View
2  LICENSE
@@ -1,4 +1,4 @@
-Copyright (c) 2008, 2009, 2010 Mark Harrah, Tony Sloane, Jason Zaugg
+Copyright (c) 2008, 2009, 2010 Steven Blundy, Josh Cough, Mark Harrah, Stuart Roebuck, Tony Sloane, Vesa Vilhonen, Jason Zaugg
All rights reserved.
Redistribution and use in source and binary forms, with or without
View
2  NOTICE
@@ -1,4 +1,4 @@
-Simple Build Tool (xsbt components other than sbt/)
+Simple Build Tool
Copyright 2008, 2009, 2010 Mark Harrah, Jason Zaugg
Licensed under BSD-style license (see LICENSE)
View
0  sbt/src/main/scala/sbt/CompileOptions.scala → compile/CompileOptions.scala
File renamed without changes
View
22 compile/JavaCompiler.scala
@@ -12,6 +12,8 @@ trait JavaCompiler
}
object JavaCompiler
{
+ type Fork = (Seq[String], Logger) => Int
+
def construct(f: (Seq[String], Logger) => Int, cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaCompiler =
new JavaCompiler {
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger) {
@@ -24,20 +26,26 @@ object JavaCompiler
if( code != 0 ) throw new CompileFailed(arguments.toArray, "javac returned nonzero exit code")
}
}
- def directOrFork(cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaCompiler = construct(directOrForkJavac, cp, scalaInstance)
- def direct(cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaCompiler = construct(directJavac, cp, scalaInstance)
- def fork(cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaCompiler = construct(forkJavac, cp, scalaInstance)
+ def directOrFork(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaCompiler =
+ construct(directOrForkJavac, cp, scalaInstance)
+
+ def direct(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaCompiler =
+ construct(directJavac, cp, scalaInstance)
+
+ def fork(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaCompiler =
+ construct(forkJavac, cp, scalaInstance)
- val directOrForkJavac = (arguments: Seq[String], log: Logger) =>
+ def directOrForkJavac(implicit doFork: Fork) = (arguments: Seq[String], log: Logger) =>
try { directJavac(arguments, log) }
catch { case e: ClassNotFoundException =>
log.debug("com.sun.tools.javac.Main not found; forking javac instead")
- forkJavac(arguments, log)
+ forkJavac(doFork)(arguments, log)
}
- val forkJavac = (arguments: Seq[String], log: Logger) =>
+ /** `fork` should be a function that forks javac with the provided arguments and sends output to the given Logger.*/
+ def forkJavac(implicit doFork: Fork) = (arguments: Seq[String], log: Logger) =>
{
- def externalJavac(argFile: File) = Process("javac", ("@" + normalizeSlash(argFile.getAbsolutePath)) :: Nil) ! log
+ def externalJavac(argFile: File) = doFork(("@" + normalizeSlash(argFile.getAbsolutePath)) :: Nil, log)
withArgumentFile(arguments)(externalJavac)
}
val directJavac = (arguments: Seq[String], log: Logger) =>
View
6 compile/inc/CompileSetup.scala
@@ -15,7 +15,7 @@ object CompileOrder extends Enumeration
// We cannot require an implicit parameter Equiv[Seq[String]] to construct Equiv[CompileSetup]
// because complexity(Equiv[Seq[String]]) > complexity(Equiv[CompileSetup])
// (6 > 4)
-final class CompileOptions(val options: Seq[String])
+final class CompileOptions(val options: Seq[String], val javacOptions: Seq[String])
final class CompileSetup(val outputDirectory: File, val options: CompileOptions, val compilerVersion: String, val order: CompileOrder.Value)
object CompileSetup
@@ -32,7 +32,9 @@ object CompileSetup
def equiv(a: File, b: File) = a.getAbsoluteFile == b.getAbsoluteFile
}
implicit val equivOpts: Equiv[CompileOptions] = new Equiv[CompileOptions] {
- def equiv(a: CompileOptions, b: CompileOptions) = a.options sameElements b.options
+ def equiv(a: CompileOptions, b: CompileOptions) =
+ (a.options sameElements b.options) &&
+ (a.javacOptions sameElements b.javacOptions)
}
implicit val equivCompilerVersion: Equiv[String] = new Equiv[String] {
def equiv(a: String, b: String) = a == b
View
2  compile/interface/API.scala
@@ -300,7 +300,7 @@ final class API(val global: Global, val callback: xsbti.AnalysisCallback) extend
}
def isTopLevel(sym: Symbol): Boolean =
(sym ne null) && (sym != NoSymbol) && !sym.isImplClass && !sym.isNestedClass && sym.isStatic &&
- !sym.hasFlag(Flags.SYNTHETIC)// && !sym.hasFlag(Flags.JAVA)
+ !sym.hasFlag(Flags.SYNTHETIC) && !sym.hasFlag(Flags.JAVA)
}
// In 2.8, attributes is renamed to annotations
View
2  compile/persist/AnalysisFormats.scala
@@ -42,7 +42,7 @@ object AnalysisFormats
implicit def fileFormat: Format[File] = wrap[File, String](_.getAbsolutePath, s => new File(s))
// can't require Format[Seq[String]] because its complexity is higher than Format[CompileOptions]
implicit def optsFormat(implicit strF: Format[String]): Format[CompileOptions] =
- wrap[CompileOptions, Seq[String]](_.options, os => new CompileOptions(os))(seqFormat[String])
+ wrap[CompileOptions, (Seq[String],Seq[String])](co => (co.options, co.javacOptions), os => new CompileOptions(os._1, os._2))
implicit val orderFormat: Format[CompileOrder.Value] = enumerationFormat(CompileOrder)
implicit def seqFormat[T](implicit optionFormat: Format[T]): Format[Seq[T]] = viaSeq[Seq[T], T](x => x)
View
39 ivy/IvyInterface.scala
@@ -365,41 +365,4 @@ object ModuleConfiguration
{
def apply(org: String, resolver: Resolver): ModuleConfiguration = apply(org, "*", "*", resolver)
def apply(org: String, name: String, resolver: Resolver): ModuleConfiguration = ModuleConfiguration(org, name, "*", resolver)
-}
-/*
-object Credentials
-{
- /** Add the provided credentials to Ivy's credentials cache.*/
- def add(realm: String, host: String, userName: String, passwd: String): Unit =
- CredentialsStore.INSTANCE.addCredentials(realm, host, userName, passwd)
- /** Load credentials from the given file into Ivy's credentials cache.*/
- def apply(file: String, log: Logger): Unit = apply(Path.fromFile(file), log)
- /** Load credentials from the given file into Ivy's credentials cache.*/
- def apply(file: File, log: Logger): Unit = apply(Path.fromFile(file), log)
- /** Load credentials from the given file into Ivy's credentials cache.*/
- def apply(path: Path, log: Logger)
- {
- val msg =
- if(path.exists)
- {
- val properties = new scala.collection.mutable.HashMap[String, String]
- def get(keys: List[String]) = keys.flatMap(properties.get).firstOption.toRight(keys.head + " not specified in credentials file: " + path)
-
- impl.MapUtilities.read(properties, path, log) orElse
- {
- List.separate( List(RealmKeys, HostKeys, UserKeys, PasswordKeys).map(get) ) match
- {
- case (Nil, List(realm, host, user, pass)) => add(realm, host, user, pass); None
- case (errors, _) => Some(errors.mkString("\n"))
- }
- }
- }
- else
- Some("Credentials file " + path + " does not exist")
- msg.foreach(x => log.warn(x))
- }
- private[this] val RealmKeys = List("realm")
- private[this] val HostKeys = List("host", "hostname")
- private[this] val UserKeys = List("user", "user.name", "username")
- private[this] val PasswordKeys = List("password", "pwd", "pass", "passwd")
-}*/
+}
View
40 main/AggressiveCompile.scala
@@ -6,36 +6,38 @@ package sbt
import inc._
import java.io.File
- import sbt.compile.{AnalyzingCompiler, CompilerArguments}
+ import compile.{AnalyzingCompiler, CompilerArguments, JavaCompiler}
+ import classpath.ClasspathUtilities
+ import classfile.Analyze
import xsbti.api.Source
import xsbti.AnalysisCallback
import CompileSetup._
import sbinary.DefaultProtocol.{ immutableMapFormat, immutableSetFormat, StringFormat }
-final class CompileConfiguration(val sources: Seq[File], val classpath: Seq[File], val previousAnalysis: Analysis,
- val previousSetup: Option[CompileSetup], val currentSetup: CompileSetup, val getAnalysis: File => Option[Analysis],
- val maxErrors: Int, val compiler: AnalyzingCompiler)
+final class CompileConfiguration(val sources: Seq[File], val classpath: Seq[File], val javaSrcBases: Seq[File],
+ val previousAnalysis: Analysis, val previousSetup: Option[CompileSetup], val currentSetup: CompileSetup, val getAnalysis: File => Option[Analysis],
+ val maxErrors: Int, val compiler: AnalyzingCompiler, val javac: JavaCompiler)
class AggressiveCompile(cacheDirectory: File)
{
- def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], compiler: AnalyzingCompiler, log: Logger): Analysis =
+ def apply(compiler: AnalyzingCompiler, javac: JavaCompiler, sources: Seq[File], classpath: Seq[File], outputDirectory: File, javaSrcBases: Seq[File] = Nil, options: Seq[String] = Nil, javacOptions: Seq[String] = Nil, maxErrors: Int = 100)(implicit log: Logger): Analysis =
{
- val setup = new CompileSetup(outputDirectory, new CompileOptions(options), compiler.scalaInstance.actualVersion, CompileOrder.Mixed)
- compile1(sources, classpath, setup, store, Map.empty, compiler, log)
+ val setup = new CompileSetup(outputDirectory, new CompileOptions(options, javacOptions), compiler.scalaInstance.actualVersion, CompileOrder.Mixed)
+ compile1(sources, classpath, javaSrcBases, setup, store, Map.empty, compiler, javac, maxErrors)
}
def withBootclasspath(args: CompilerArguments, classpath: Seq[File]): Seq[File] =
args.bootClasspath ++ classpath
- def compile1(sources: Seq[File], classpath: Seq[File], setup: CompileSetup, store: AnalysisStore, analysis: Map[File, Analysis], compiler: AnalyzingCompiler, log: Logger): Analysis =
+ def compile1(sources: Seq[File], classpath: Seq[File], javaSrcBases: Seq[File], setup: CompileSetup, store: AnalysisStore, analysis: Map[File, Analysis], compiler: AnalyzingCompiler, javac: JavaCompiler, maxErrors: Int)(implicit log: Logger): Analysis =
{
val (previousAnalysis, previousSetup) = extract(store.get())
- val config = new CompileConfiguration(sources, classpath, previousAnalysis, previousSetup, setup, analysis.get _, 100, compiler)
- val result = compile2(config, log)
+ val config = new CompileConfiguration(sources, classpath, javaSrcBases, previousAnalysis, previousSetup, setup, analysis.get _, maxErrors, compiler, javac)
+ val result = compile2(config)
store.set(result, setup)
result
}
- def compile2(config: CompileConfiguration, log: Logger)(implicit equiv: Equiv[CompileSetup]): Analysis =
+ def compile2(config: CompileConfiguration)(implicit log: Logger, equiv: Equiv[CompileSetup]): Analysis =
{
import config._
import currentSetup._
@@ -46,11 +48,24 @@ class AggressiveCompile(cacheDirectory: File)
val apiOrEmpty = (api: Either[Boolean, Source]) => api.right.toOption.getOrElse( APIs.emptyAPI )
val cArgs = new CompilerArguments(compiler.scalaInstance, compiler.cp)
val externalAPI = apiOrEmpty compose Locate.value(withBootclasspath(cArgs, classpath), getAPI)
+
val compile0 = (include: Set[File], callback: AnalysisCallback) => {
IO.createDirectory(outputDirectory)
- val arguments = cArgs(sources.filter(include), classpath, outputDirectory, options.options)
+ val incSrc = sources.filter(include)
+ val arguments = cArgs(incSrc, classpath, outputDirectory, options.options)
compiler.compile(arguments, callback, maxErrors, log)
+ val javaSrcs = incSrc.filter(javaOnly)
+ if(!javaSrcs.isEmpty)
+ {
+ import Path._
+ val loader = ClasspathUtilities.toLoader(classpath, compiler.scalaInstance.loader)
+ // TODO: Analyze needs to generate API from Java class files
+ Analyze(outputDirectory, javaSrcs, javaSrcBases, log)(callback, loader) {
+ javac(javaSrcs, classpath, outputDirectory, options.javacOptions)
+ }
+ }
}
+
val sourcesSet = sources.toSet
val analysis = previousSetup match {
case Some(previous) if equiv.equiv(previous, currentSetup) => previousAnalysis
@@ -64,6 +79,7 @@ class AggressiveCompile(cacheDirectory: File)
case Some((an, setup)) => (an, Some(setup))
case None => (Analysis.Empty, None)
}
+ def javaOnly(f: File) = f.getName.endsWith(".java")
import AnalysisFormats._
// The following intermediate definitions are needed because of Scala's implicit parameter rules.
View
6 main/AggressiveCompiler.scala
@@ -3,7 +3,7 @@
*/
package sbt
- import sbt.compile.AnalyzingCompiler
+ import sbt.compile.{AnalyzingCompiler, JavaCompiler}
import java.io.File
import System.{currentTimeMillis => now}
import Path._
@@ -25,6 +25,7 @@ class AggressiveCompiler extends xsbti.AppMain
val launcher = app.scalaProvider.launcher
val sources = cwd ** ("*.scala" | "*.java")
val target = cwd / "target"
+ val javaBaseDirs = cwd :: Nil
val outputDirectory = target / "classes"
val classpath = outputDirectory +++ (cwd * "*.jar") +++(cwd * (-"project")).descendentsExcept( "*.jar", "project" || HiddenFileFilter)
val cacheDirectory = target / "cache"
@@ -32,11 +33,12 @@ class AggressiveCompiler extends xsbti.AppMain
val log = new ConsoleLogger with Logger with sbt.IvyLogger
val componentManager = new ComponentManager(launcher.globalLock, app.components, log)
val compiler = new AnalyzingCompiler(ScalaInstance(args.head, launcher), componentManager, log)
+ val javac = JavaCompiler.directOrFork(compiler.cp, compiler.scalaInstance)( (args: Seq[String], log: Logger) => Process("javac", args) ! log )
val agg = new AggressiveCompile(cacheDirectory)
try
{
- val analysis = agg(sources.get.toSeq, classpath.get.toSeq, outputDirectory, options, compiler, log)
+ val analysis = agg(compiler, javac, sources.get.toSeq, classpath.get.toSeq, outputDirectory, javaBaseDirs, options)(log)
processResult(analysis, command)
true
}
View
21 project/build/SbtProject.scala
@@ -12,7 +12,6 @@ abstract class SbtProject(info: ProjectInfo) extends DefaultProject(info) with t
def extraResources = descendents(info.projectPath / "licenses", "*") +++ "LICENSE" +++ "NOTICE"
override def mainResources = super.mainResources +++ extraResources
- override def testOptions = ExcludeTests("sbt.ReflectiveSpecification" :: Nil) :: super.testOptions.toList
override def normalizedName = "sbt"
override def managedStyle = ManagedStyle.Ivy
@@ -41,10 +40,6 @@ abstract class SbtProject(info: ProjectInfo) extends DefaultProject(info) with t
val ivy = "org.apache.ivy" % "ivy" % "2.1.0" intransitive()
val jsch = "com.jcraft" % "jsch" % "0.1.31" intransitive()
- val jetty = "org.mortbay.jetty" % "jetty" % "6.1.14" % "optional"
-
- val jetty7server = "org.eclipse.jetty" % "jetty-server" % "7.0.1.v20091125" % "optional"
- val jetty7webapp = "org.eclipse.jetty" % "jetty-webapp" % "7.0.1.v20091125" % "optional"
val testInterface = "org.scala-tools.testing" % "test-interface" % "0.5"
@@ -64,20 +59,4 @@ abstract class SbtProject(info: ProjectInfo) extends DefaultProject(info) with t
override def packageSrcJar= defaultJarPath("-sources.jar")
/*val sourceArtifact = Artifact(artifactID, "src", "jar", "sources")
val docsArtifact = Artifact(artifactID, "doc", "jar", "javadoc")*/
-
- /* For generating JettyRun for Jetty 6 and 7. The only difference is the imports, but the file has to be compiled against each set of imports. */
- override def compileAction = super.compileAction dependsOn (generateJettyRun6, generateJettyRun7)
- def jettySrcDir = mainScalaSourcePath / "sbt" / "jetty"
- def jettyTemplate = jettySrcDir / "LazyJettyRun.scala.templ"
-
- lazy val generateJettyRun6 = generateJettyRun(jettyTemplate, jettySrcDir / "LazyJettyRun6.scala", "6", jettySrcDir / "jetty6.imports")
- lazy val generateJettyRun7 = generateJettyRun(jettyTemplate, jettySrcDir / "LazyJettyRun7.scala", "7", jettySrcDir / "jetty7.imports")
- def generateJettyRun(in: Path, out: Path, version: String, importsPath: Path) =
- task
- {
- (for(template <- FileUtilities.readString(in asFile, log).right; imports <- FileUtilities.readString(importsPath asFile, log).right) yield
- FileUtilities.write(out asFile, processJettyTemplate(template, version, imports), log).toLeft(()) ).left.toOption
- }
- def processJettyTemplate(template: String, version: String, imports: String): String =
- template.replaceAll("""\Q${jetty.version}\E""", version).replaceAll("""\Q${jetty.imports}\E""", imports)
}
View
47 project/build/XSbt.scala
@@ -17,17 +17,20 @@ class XSbt(info: ProjectInfo) extends ParentProject(info) with NoCrossPaths
val collectionSub = testedBase(utilPath / "collection", "Collections")
val ioSub = testedBase(utilPath / "io", "IO", controlSub)
val classpathSub = baseProject(utilPath / "classpath", "Classpath", launchInterfaceSub, ioSub)
- val classfileSub = testedBase(utilPath / "classfile", "Classfile", ioSub, interfaceSub)
val completeSub = testedBase(utilPath / "complete", "Completion", ioSub)
val logSub = project(utilPath / "log", "Logging", new LogProject(_), interfaceSub)
+ val classfileSub = testedBase(utilPath / "classfile", "Classfile", ioSub, interfaceSub, logSub)
val datatypeSub = baseProject(utilPath /"datatype", "Datatype Generator", ioSub)
- val processSub = project(utilPath /"process", "Process", new Base(_) with TestWithIO, ioSub, logSub)
+ val processSub = project(utilPath / "process", "Process", new Base(_) with TestWithIO, ioSub, logSub)
+ val envSub= baseProject(utilPath / "env", "Properties", ioSub, logSub, classpathSub)
// intermediate-level modules
val ivySub = project("ivy", "Ivy", new IvyProject(_), interfaceSub, launchInterfaceSub, logSub)
val testingSub = project("testing", "Testing", new TestingProject(_), ioSub, classpathSub, logSub)
val taskSub = testedBase(tasksPath, "Tasks", controlSub, collectionSub)
val cacheSub = project(cachePath, "Cache", new CacheProject(_), ioSub, collectionSub)
+ val webappSub = project("web", "Web App", new WebAppProject(_), ioSub, logSub, classpathSub, controlSub)
+ val runSub = baseProject("run", "Run", ioSub, logSub, classpathSub, processSub)
// compilation/discovery related modules
val compileInterfaceSub = project(compilePath / "interface", "Compiler Interface", new CompilerInterfaceProject(_), interfaceSub)
@@ -35,16 +38,19 @@ class XSbt(info: ProjectInfo) extends ParentProject(info) with NoCrossPaths
val discoverySub = testedBase(compilePath / "discover", "Discovery", compileIncrementalSub, apiSub)
val compilePersistSub = project(compilePath / "persist", "Persist", new PersistProject(_), compileIncrementalSub, apiSub)
val compilerSub = project(compilePath, "Compile", new CompileProject(_),
- launchInterfaceSub, interfaceSub, ivySub, ioSub, classpathSub, compileInterfaceSub, logSub, processSub)
+ launchInterfaceSub, interfaceSub, ivySub, ioSub, classpathSub, compileInterfaceSub, logSub)
- val altCompilerSub = baseProject("main", "Alternate Compiler Test", compileIncrementalSub, compilerSub, ioSub, logSub, discoverySub, compilePersistSub)
+ val altCompilerSub = baseProject("main", "Alternate Compiler Test",
+ classfileSub, compileIncrementalSub, compilerSub, ioSub, logSub, discoverySub, compilePersistSub, processSub)
- /** following are not updated for 2.8 or 0.9 */
+ /** following modules are not updated for 2.8 or 0.9 */
val testSub = project("scripted", "Test", new TestProject(_), ioSub)
val trackingSub = baseProject(cachePath / "tracking", "Tracking", cacheSub)
- val sbtSub = project(sbtPath, "Simple Build Tool", new SbtProject(_) {}, compilerSub, launchInterfaceSub, testingSub, cacheSub, taskSub)
+ val sbtSub = project(sbtPath, "Simple Build Tool", new SbtProject(_) {},
+ compilerSub, launchInterfaceSub, testingSub, cacheSub, taskSub)
+
val installerSub = project(sbtPath / "install", "Installer", new InstallerProject(_) {}, sbtSub)
lazy val dist = task { None } dependsOn(launchSub.proguard, sbtSub.publishLocal, installerSub.publishLocal)
@@ -93,6 +99,35 @@ class XSbt(info: ProjectInfo) extends ParentProject(info) with NoCrossPaths
override def deliverProjectDependencies = Nil
}
}
+ class WebAppProject(info: ProjectInfo) extends Base(info)
+ {
+ val jetty = "org.mortbay.jetty" % "jetty" % "6.1.14" % "optional"
+
+ val jetty7server = "org.eclipse.jetty" % "jetty-server" % "7.0.1.v20091125" % "optional"
+ val jetty7webapp = "org.eclipse.jetty" % "jetty-webapp" % "7.0.1.v20091125" % "optional"
+
+ val optional = Configurations.Optional
+
+ /* For generating JettyRun for Jetty 6 and 7. The only difference is the imports, but the file has to be compiled against each set of imports. */
+ override def compileAction = super.compileAction dependsOn (generateJettyRun6, generateJettyRun7)
+ def jettySrcDir = info.projectPath
+ def jettyTemplate = jettySrcDir / "LazyJettyRun.scala.templ"
+
+ lazy val generateJettyRun6 = generateJettyRunN("6")
+ lazy val generateJettyRun7 = generateJettyRunN("7")
+
+ def generateJettyRunN(n: String) =
+ generateJettyRun(jettyTemplate, jettySrcDir / ("LazyJettyRun" + n + ".scala"), n, jettySrcDir / ("jetty" + n + ".imports"))
+
+ def generateJettyRun(in: Path, out: Path, version: String, importsPath: Path) =
+ task
+ {
+ (for(template <- FileUtilities.readString(in asFile, log).right; imports <- FileUtilities.readString(importsPath asFile, log).right) yield
+ FileUtilities.write(out asFile, processJettyTemplate(template, version, imports), log).toLeft(()) ).left.toOption
+ }
+ def processJettyTemplate(template: String, version: String, imports: String): String =
+ template.replaceAll("""\Q${jetty.version}\E""", version).replaceAll("""\Q${jetty.imports}\E""", imports)
+ }
trait TestDependencies extends Project
{
val sc = "org.scala-tools.testing" %% "scalacheck" % "1.7" % "test"
View
0  sbt/src/main/scala/sbt/Fork.scala → run/Fork.scala
File renamed without changes
View
80 sbt/src/main/scala/sbt/Run.scala → run/Run.scala
@@ -3,15 +3,11 @@
*/
package sbt
-import scala.tools.nsc.{GenericRunnerCommand, Interpreter, InterpreterLoop, ObjectRunner, Settings}
-import scala.tools.nsc.interpreter.InteractiveReader
-import scala.tools.nsc.reporters.Reporter
-import scala.tools.nsc.util.ClassPath
-
import java.io.File
import java.net.{URL, URLClassLoader}
import java.lang.reflect.{Method, Modifier}
import Modifier.{isPublic, isStatic}
+import classpath.ClasspathUtilities
trait ScalaRun
{
@@ -37,7 +33,7 @@ class ForkRun(config: ForkScalaRun) extends ScalaRun
Some("Nonzero exit code returned from " + label + ": " + exitCode)
}
}
-class Run(instance: xsbt.ScalaInstance) extends ScalaRun
+class Run(instance: ScalaInstance) extends ScalaRun
{
/** Runs the class 'mainClass' using the given classpath and options using the scala runner.*/
def run(mainClass: String, classpath: Iterable[Path], options: Seq[String], log: Logger) =
@@ -59,7 +55,7 @@ class Run(instance: xsbt.ScalaInstance) extends ScalaRun
val main = getMainMethod(mainClassName, loader)
invokeMain(loader, main, options)
}
- finally { xsbt.FileUtilities.delete(tempDir asFile) }
+ finally { IO.delete(tempDir asFile) }
}
private def invokeMain(loader: ClassLoader, main: Method, options: Seq[String])
{
@@ -85,8 +81,9 @@ object Run
{
def run(mainClass: String, classpath: Iterable[Path], options: Seq[String], log: Logger)(implicit runner: ScalaRun) =
runner.run(mainClass, classpath, options, log)
+
/** Executes the given function, trapping calls to System.exit. */
- private[sbt] def executeTrapExit(f: => Unit, log: Logger): Option[String] =
+ def executeTrapExit(f: => Unit, log: Logger): Option[String] =
{
val exitCode = TrapExit(f, log)
if(exitCode == 0)
@@ -97,69 +94,4 @@ object Run
else
Some("Nonzero exit code: " + exitCode)
}
- /** Create a settings object and execute the provided function if the settings are created ok.*/
- private def createSettings(log: Logger)(f: Settings => Option[String]) =
- {
- val command = new GenericRunnerCommand(Nil, message => log.error(message))
- if(command.ok)
- f(command.settings)
- else
- Some(command.usageMsg)
- }
-
- /** Starts a Scala interpreter session with 'project' bound to the value 'current' in the console
- * and the following two lines executed:
- * import sbt._
- * import current._
- */
- def projectConsole(project: Project): Option[String] =
- {
- import project.log
- createSettings(log) { interpreterSettings =>
- createSettings(log) { compilerSettings =>
- log.info("Starting scala interpreter with project definition " + project.name + " ...")
- log.info("")
- Control.trapUnit("Error during session: ", log)
- {
- JLine.withJLine {
- val loop = new ProjectInterpreterLoop(compilerSettings, project)
- executeTrapExit(loop.main(interpreterSettings), log)
- }
- }
- }}
- }
- /** A custom InterpreterLoop with the purpose of creating an interpreter with Project 'project' bound to the value 'current',
- * and the following three lines interpreted:
- * import sbt._
- * import Process._
- * import current._.
- * To do this,
- * 1) The compiler uses a different settings instance: 'compilerSettings', which will have its classpath set to include
- * the Scala compiler and library jars and the classpath used to compile the project.
- * 2) The parent class loader for the interpreter is the loader that loaded the project, so that the project can be bound to a variable
- * in the interpreter.
- */
- private class ProjectInterpreterLoop(compilerSettings: Settings, project: Project) extends InterpreterLoop
- {
- override def createInterpreter()
- {
- val projectLoader = project.getClass.getClassLoader
- val classpath = Project.getProjectClasspath(project)
- val fullClasspath = classpath.get ++ Path.fromFiles(project.info.app.scalaProvider.jars)
- compilerSettings.classpath.value = Path.makeString(fullClasspath)
- project.log.debug(" console-project classpath:\n\t" + fullClasspath.mkString("\n\t"))
-
- in = InteractiveReader.createDefault()
- interpreter = new Interpreter(settings)
- {
- override protected def parentClassLoader = projectLoader
- override protected def newCompiler(settings: Settings, reporter: Reporter) = super.newCompiler(compilerSettings, reporter)
- }
- interpreter.setContextClassLoader()
- interpreter.bind("current", project.getClass.getName, project)
- interpreter.interpret("import sbt._")
- interpreter.interpret("import Process._")
- interpreter.interpret("import current._")
- }
- }
-}
+}
View
14 ...main/scala/sbt/impl/SelectMainClass.scala → run/SelectMainClass.scala
@@ -1,29 +1,27 @@
/* sbt -- Simple Build Tool
* Copyright 2009 Mark Harrah
*/
-package sbt.impl
-import sbt._
+package sbt
-private[sbt] object SelectMainClass
+object SelectMainClass
{
- def apply(promptIfMultipleChoices: Boolean, mainClasses: List[String]) =
+ // Some(SimpleReader.readLine _)
+ def apply(promptIfMultipleChoices: Option[String => Option[String]], mainClasses: List[String]) =
{
mainClasses match
{
case Nil => None
case head :: Nil => Some(head)
case multiple =>
- if(promptIfMultipleChoices)
+ for(prompt <- promptIfMultipleChoices) yield
{
println("\nMultiple main classes detected, select one to run:\n")
for( (className, index) <- multiple.zipWithIndex )
println(" [" + (index+1) + "] " + className)
- val line = trim(SimpleReader.readLine("\nEnter number: "))
+ val line = trim(prompt("\nEnter number: "))
println("")
toInt(line, multiple.length) map multiple.apply
}
- else
- None
}
}
private def trim(s: Option[String]) = s.getOrElse("")
View
7 sbt/src/main/scala/sbt/TrapExit.scala → run/TrapExit.scala
@@ -106,11 +106,8 @@ object TrapExit
* thread (AWT-XAWT, AWT-Windows, ...)*/
private def allThreads: Set[Thread] =
{
- val allThreads = wrap.Wrappers.toList(Thread.getAllStackTraces.keySet)
- val threads = new scala.collection.mutable.HashSet[Thread]
- for(thread <- allThreads if !isSystemThread(thread))
- threads += thread
- threads
+ import collection.JavaConversions._
+ Thread.getAllStackTraces.keySet.filter(thread => !isSystemThread(thread))
}
/** Returns true if the given thread is in the 'system' thread group and is an AWT thread other than
* AWT-EventQueue or AWT-Shutdown.*/
View
3  sbt/.gitignore
@@ -1,5 +1,2 @@
-LazyJettyRun6.scala
-LazyJettyRun7.scala
install/project/boot/
scripted/project/boot/
-project/plugins/project/
View
228 sbt/src/main/scala/sbt/Analysis.scala
@@ -1,228 +0,0 @@
-/* sbt -- Simple Build Tool
- * Copyright 2008, 2009 Mark Harrah
- */
-package sbt
-
-trait TaskAnalysis[Source, Product, External] extends NotNull
-{
- import scala.collection.Set
- def save(): Option[String]
- def revert(): Option[String]
- def clear(): Unit
-
- def allSources: Set[Source]
- def allProducts: Set[Product]
- def allExternals: Set[External]
-
- def sourceDependencies(source: Source): Option[Set[Source]]
- def products(source: Source): Option[Set[Product]]
- def externalDependencies(external: External): Option[Set[Source]]
-
- def addSource(source: Source): Unit
- def addExternalDependency(dependsOn: External, source: Source): Unit
- def addSourceDependency(dependsOn: Source, source: Source): Unit
- def addProduct(source: Source, product: Product): Unit
-
- def removeSource(source: Source): Unit
- def removeDependent(source: Source): Unit
- def removeDependencies(source: Source): Option[Set[Source]]
- def removeExternalDependency(external: External): Unit
-}
-
-import java.io.File
-import BasicAnalysis._
-import impl.MapUtilities.{add, all, read, mark, readOnlyIterable, write}
-import scala.collection.mutable.{HashMap, HashSet, ListBuffer, Map, Set}
-
-class BasicAnalysis(analysisPath: Path, projectPath: Path, log: Logger) extends TaskAnalysis[Path, Path, File]
-{
- private val sourceDependencyMap: Map[Path, Set[Path]] = new HashMap
- private val productMap: Map[Path, Set[Path]] = new HashMap
- private val externalDependencyMap: Map[File, Set[Path]] = new HashMap
-
- final type AnyMapToSource = Map[K, Set[Path]] forSome {type K}
- final type AnySourceMap = Map[Path, T] forSome {type T}
- final type AnySourceSetMap = Map[Path, Set[T]] forSome {type T}
- final type AnyMap = Map[K, V] forSome { type K; type V }
-
- protected def mapsToClear = List[AnyMap](sourceDependencyMap, productMap, externalDependencyMap)
- protected def mapsToRemoveSource = List[AnySourceMap](sourceDependencyMap, productMap)
- protected def mapsToRemoveDependent = List[AnyMapToSource](sourceDependencyMap, externalDependencyMap)
- protected def mapsToMark = List[AnySourceSetMap](sourceDependencyMap, productMap)
-
- def clear()
- {
- for(map <- mapsToClear)
- map.clear()
- }
- def removeSource(source: Path)
- {
- for(sourceProducts <- productMap.get(source))
- FileUtilities.clean(sourceProducts, true, log)
- for(map <- mapsToRemoveSource)
- map -= source
- }
- def removeSelfDependency(source: Path)
- {
- for(deps <- sourceDependencyMap.get(source))
- deps -= source
- }
- def removeDependent(source: Path)
- {
- for(map <- mapsToRemoveDependent; deps <- map.values)
- deps -= source
- }
- def removeDependencies(source: Path) = sourceDependencyMap.removeKey(source)
- def removeExternalDependency(dep: File) = externalDependencyMap.removeKey(dep.getAbsoluteFile)
-
- def externalDependencies(external: File) = externalDependencyMap.get(external.getAbsoluteFile)
- def sourceDependencies(source: Path) = sourceDependencyMap.get(source)
- def products(sources: Iterable[Path]): Iterable[Path] =
- {
- val buffer = new ListBuffer[Path]
- for(source <- sources; sourceProducts <- productMap.get(source))
- buffer ++= sourceProducts
- buffer.readOnly
- }
- def products(source: Path) = productMap.get(source)
-
- def allSources = sourceDependencyMap.keySet
- def allProducts: Set[Path] = HashSet(flatten(productMap.values.toList) : _*)
- def allExternals = externalDependencyMap.keySet
-
- def allExternalDependencies = readOnlyIterable(externalDependencyMap)
- def allDependencies = readOnlyIterable(sourceDependencyMap)
-
- def addSourceDependency(on: Path, from: Path) = add(on, from, sourceDependencyMap)
- def addExternalDependency(on: File, from: Path) = add(on.getAbsoluteFile, from, externalDependencyMap)
- def addProductDependency(on: Path, from: Path) =
- {
- for( (source, _) <- productMap.find(_._2.contains(on)) )
- addSourceDependency(source, from)
- }
- def addProduct(source: Path, file: Path) = add(source, file, productMap)
- def addSource(source: Path) =
- {
- for(map <- mapsToMark)
- mark(source, map)
- }
-
- import Format._ // get implicits for data types
- implicit val path: Format[Path] = Format.path(projectPath)
- implicit val pathSet: Format[Set[Path]] = Format.set
-
- protected def backedMaps: Iterable[Backed[_,_]] =
- Backed(sourceDependencyMap, DependenciesLabel, DependenciesFileName) ::
- Backed(productMap, GeneratedLabel, GeneratedFileName) ::
- Backed(externalDependencyMap, ExternalDependenciesLabel, ExternalDependenciesFileName) ::
- Nil
-
- def revert() = load()
- private def loadBacked[Key,Value](b: Backed[Key,Value]) = read(b.map, analysisPath / b.name, log)(b.keyFormat, b.valueFormat)
- private def storeBacked[Key,Value](b: Backed[Key,Value]) = write(b.map, b.label, analysisPath / b.name, log)(b.keyFormat, b.valueFormat)
- final def load(): Option[String] = Control.lazyFold(backedMaps.toList)(backed =>loadBacked(backed))
- final def save(): Option[String] = Control.lazyFold(backedMaps.toList)(backed => storeBacked(backed))
-}
-object BasicAnalysis
-{
- private def flatten(s: Iterable[Set[Path]]): Seq[Path] = s.flatMap(x => x.toSeq).toSeq
-
- val GeneratedFileName = "generated_files"
- val DependenciesFileName = "dependencies"
- val ExternalDependenciesFileName = "external"
-
- val GeneratedLabel = "Generated Classes"
- val DependenciesLabel = "Source Dependencies"
- val ExternalDependenciesLabel = "External Dependencies"
-
- def load(analysisPath: Path, projectPath: Path, log: Logger): Either[String, BasicAnalysis] =
- {
- val analysis = new BasicAnalysis(analysisPath, projectPath, log)
- analysis.load().toLeft(analysis)
- }
-}
-object CompileAnalysis
-{
- val HashesFileName = "hashes"
- val TestsFileName = "tests"
- val ApplicationsFileName = "applications"
- val ProjectDefinitionsName = "projects"
-
- val HashesLabel = "Source Hashes"
- val TestsLabel = "Tests"
- val ApplicationsLabel = "Classes with main methods"
- val ProjectDefinitionsLabel = "Project Definitions"
-
- def load(analysisPath: Path, projectPath: Path, log: Logger): Either[String, CompileAnalysis] =
- {
- val analysis = new CompileAnalysis(analysisPath, projectPath, log)
- analysis.load().toLeft(analysis)
- }
-}
-import CompileAnalysis._
-import Format._ // get implicits for data types
-class BasicCompileAnalysis protected (analysisPath: Path, projectPath: Path, log: Logger) extends BasicAnalysis(analysisPath, projectPath, log)
-{
- /*private */val hashesMap = new HashMap[Path, Array[Byte]]
- val apiMap = new HashMap[Path, xsbti.api.Source]
-
- override protected def mapsToClear = apiMap :: hashesMap :: super.mapsToClear
- override protected def mapsToRemoveSource = apiMap :: hashesMap :: super.mapsToRemoveSource
-
- def setHash(source: Path, hash: Array[Byte]) { hashesMap(source) = hash }
- def clearHash(source: Path) { hashesMap.removeKey(source) }
- def hash(source: Path) = hashesMap.get(source)
- def clearHashes() { hashesMap.clear() }
-
- def setAPI(source: Path, a: xsbti.api.Source) { apiMap(source) = a }
-
- def getClasses(sources: PathFinder, outputDirectory: Path): PathFinder =
- Path.lazyPathFinder
- {
- val basePath = (outputDirectory ###)
- for(c <- products(sources.get)) yield
- Path.relativize(basePath, c).getOrElse(c)
- }
-
- implicit val stringSet: Format[Set[String]] = Format.set
- override protected def backedMaps = Backed(hashesMap, HashesLabel, HashesFileName) :: super.backedMaps.toList
-}
-private[sbt] final class BuilderCompileAnalysis(analysisPath: Path, projectPath: Path, log: Logger) extends BasicCompileAnalysis(analysisPath, projectPath, log)
-{
- private val projectDefinitionMap = new HashMap[Path, Set[String]]
- override protected def mapsToClear = projectDefinitionMap :: super.mapsToClear
- override protected def mapsToRemoveSource = projectDefinitionMap :: super.mapsToRemoveSource
- def allProjects = all(projectDefinitionMap)
- def addProjectDefinition(source: Path, className: String) = add(source, className, projectDefinitionMap)
-
- override protected def backedMaps =
- Backed(projectDefinitionMap, ProjectDefinitionsLabel, ProjectDefinitionsName) ::
- super.backedMaps
-}
-class CompileAnalysis(analysisPath: Path, projectPath: Path, log: Logger) extends BasicCompileAnalysis(analysisPath, projectPath, log)
-{
- private val testMap = new HashMap[Path, Set[Discovered]]
- private val applicationsMap = new HashMap[Path, Set[String]]
- def allTests = all(testMap)
- def allApplications = all(applicationsMap)
- def addTest(source: Path, test: Discovered) = add(source, test, testMap)
- def addApplication(source: Path, className: String) = add(source, className, applicationsMap)
-
- def testSourceMap: Map[String, Path] =
- {
- val map = new HashMap[String, Path]
- for( (source, tests) <- testMap; test <- tests) map(test.className) = source
- map
- }
-
- override protected def mapsToClear = applicationsMap :: testMap :: super.mapsToClear
- override protected def mapsToRemoveSource = applicationsMap :: testMap :: super.mapsToRemoveSource
-
- implicit val testSet: Format[Set[Discovered]] = Format.set
- override protected def backedMaps =
- Backed(testMap, TestsLabel, TestsFileName) ::
- Backed(applicationsMap, ApplicationsLabel, ApplicationsFileName) ::
- super.backedMaps
-}
-/** A map that is persisted in a properties file named 'name' and with 'label'. 'keyFormat' and 'valueFormat' are used to (de)serialize. */
-final case class Backed[Key, Value](map: Map[Key, Value], label: String, name: String)(implicit val keyFormat: Format[Key], val valueFormat: Format[Value]) extends NotNull
View
101 sbt/src/main/scala/sbt/AnalysisCallback.scala
@@ -1,101 +0,0 @@
-/* sbt -- Simple Build Tool
- * Copyright 2008, 2009 Mark Harrah
- */
-package sbt
-
-import java.io.File
-
-object AnalysisCallback
-{
- private val map = new scala.collection.mutable.HashMap[Int, AnalysisCallback]
- private var nextID: Int = 0
- def register(callback: AnalysisCallback): Int =
- {
- val id = nextID
- nextID += 1
- map(id) = callback
- id
- }
- def apply(id: Int): Option[AnalysisCallback] = map.get(id)
- def unregister(id: Int)
- {
- map -= id
- }
-}
-
-trait AnalysisCallback extends NotNull
-{
- /** The names of classes that the analyzer should find subclasses of.*/
- def superclassNames: Iterable[String]
- /** The names of annotations that the analyzer should look for on classes and methods. */
- def annotationNames: Iterable[String]
- /** The base path for the project.*/
- def basePath: Path
- /** Called when the the given superclass could not be found on the classpath by the compiler.*/
- def superclassNotFound(superclassName: String): Unit
- /** Called before the source at the given location is processed. */
- def beginSource(sourcePath: Path): Unit
- /** Called when the a subclass of one of the classes given in <code>superclassNames</code> is
- * discovered.*/
- def foundSubclass(sourcePath: Path, subclassName: String, superclassName: String, isModule: Boolean): Unit
- /** Called when a class or one of its methods has an annotation listed in <code>annotationNames</code>*/
- def foundAnnotated(source: Path, className: String, annotationName: String, isModule: Boolean): Unit
- /** Called to indicate that the source file <code>sourcePath</code> depends on the source file
- * <code>dependsOnPath</code>.*/
- def sourceDependency(dependsOnPath: Path, sourcePath: Path): Unit
- /** Called to indicate that the source file <code>sourcePath</code> depends on the jar
- * <code>jarPath</code>.*/
- def jarDependency(jarPath: File, sourcePath: Path): Unit
- /** Called to indicate that the source file <code>sourcePath</code> depends on the class file
- * <code>classFile</code>.*/
- def classDependency(classFile: File, sourcePath: Path): Unit
- /** Called to indicate that the source file <code>sourcePath</code> depends on the class file
- * <code>classFile</code> that is a product of some source. This differs from classDependency
- * because it is really a sourceDependency. The source corresponding to <code>classFile</code>
- * was not incuded in the compilation so the plugin doesn't know what the source is though. It
- * only knows that the class file came from the output directory.*/
- def productDependency(classFile: Path, sourcePath: Path): Unit
- /** Called to indicate that the source file <code>sourcePath</code> produces a class file at
- * <code>modulePath</code>.*/
- def generatedClass(sourcePath: Path, modulePath: Path): Unit
- /** Called after the source at the given location has been processed. */
- def endSource(sourcePath: Path): Unit
- /** Called when a module with a public 'main' method with the right signature is found.*/
- def foundApplication(sourcePath: Path, className: String): Unit
- def api(sourcePath: Path, source: xsbti.api.Source): Unit
-}
-abstract class BasicAnalysisCallback[A <: BasicCompileAnalysis](val basePath: Path, protected val analysis: A) extends AnalysisCallback
-{
- def superclassNames: Iterable[String]
- def superclassNotFound(superclassName: String) {}
-
- def beginSource(sourcePath: Path): Unit =
- analysis.addSource(sourcePath)
-
- def sourceDependency(dependsOnPath: Path, sourcePath: Path): Unit =
- analysis.addSourceDependency(dependsOnPath, sourcePath)
-
- def jarDependency(jarFile: File, sourcePath: Path): Unit =
- analysis.addExternalDependency(jarFile, sourcePath)
-
- def classDependency(classFile: File, sourcePath: Path): Unit =
- analysis.addExternalDependency(classFile, sourcePath)
-
- def productDependency(classFile: Path, sourcePath: Path): Unit =
- analysis.addProductDependency(classFile, sourcePath)
-
- def generatedClass(sourcePath: Path, modulePath: Path): Unit =
- analysis.addProduct(sourcePath, modulePath)
-
- def endSource(sourcePath: Path): Unit =
- analysis.removeSelfDependency(sourcePath)
-
- def api(sourcePath: Path, source: xsbti.api.Source): Unit =
- analysis.setAPI(sourcePath, source)
-}
-abstract class BasicCompileAnalysisCallback(basePath: Path, analysis: CompileAnalysis)
- extends BasicAnalysisCallback(basePath, analysis)
-{
- def foundApplication(sourcePath: Path, className: String): Unit =
- analysis.addApplication(sourcePath, className)
-}
View
96 sbt/src/main/scala/sbt/Compile.scala
@@ -1,96 +0,0 @@
-/* sbt -- Simple Build Tool
- * Copyright 2008, 2009 Mark Harrah, Seth Tisue
- */
-package sbt
-
-import java.io.File
-import xsbt.{AnalyzingCompiler, CompileFailed, CompilerArguments, ScalaInstance}
-
-
-sealed abstract class CompilerCore
-{
- final def apply(label: String, sources: Iterable[Path], classpath: Iterable[Path], outputDirectory: Path, scalaOptions: Seq[String], log: Logger): Option[String] =
- apply(label, sources, classpath, outputDirectory, scalaOptions, Nil, CompileOrder.Mixed, log)
- final def apply(label: String, sources: Iterable[Path], classpath: Iterable[Path], outputDirectory: Path, scalaOptions: Seq[String], javaOptions: Seq[String], order: CompileOrder.Value, log: Logger): Option[String] =
- {
- def filteredSources(extension: String) = sources.filter(_.name.endsWith(extension))
- def process(label: String, sources: Iterable[_], act: => Unit) =
- () => if(sources.isEmpty) log.debug("No " + label + " sources.") else act
-
- val javaSources = Path.getFiles(filteredSources(".java"))
- val scalaSources = Path.getFiles( if(order == CompileOrder.Mixed) sources else filteredSources(".scala") )
- val classpathSet = Path.getFiles(classpath)
- val scalaCompile = process("Scala", scalaSources, processScala(scalaSources, classpathSet, outputDirectory.asFile, scalaOptions, log) )
- val javaCompile = process("Java", javaSources, processJava(javaSources, classpathSet, outputDirectory.asFile, javaOptions, log))
- doCompile(label, sources, outputDirectory, order, log)(javaCompile, scalaCompile)
- }
- protected def doCompile(label: String, sources: Iterable[Path], outputDirectory: Path, order: CompileOrder.Value, log: Logger)(javaCompile: () => Unit, scalaCompile: () => Unit) =
- {
- log.info(actionStartMessage(label))
- if(sources.isEmpty)
- {
- log.info(actionNothingToDoMessage)
- None
- }
- else
- {
- FileUtilities.createDirectory(outputDirectory.asFile, log) orElse
- (try
- {
- val (first, second) = if(order == CompileOrder.JavaThenScala) (javaCompile, scalaCompile) else (scalaCompile, javaCompile)
- first()
- second()
- log.info(actionSuccessfulMessage)
- None
- }
- catch { case e: xsbti.CompileFailed => Some(e.toString) })
- }
- }
- def actionStartMessage(label: String): String
- def actionNothingToDoMessage: String
- def actionSuccessfulMessage: String
- protected def processScala(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger): Unit
- protected def processJava(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger): Unit
-}
-
-sealed abstract class CompilerBase extends CompilerCore
-{
- def actionStartMessage(label: String) = "Compiling " + label + " sources..."
- val actionNothingToDoMessage = "Nothing to compile."
- val actionSuccessfulMessage = "Compilation successful."
-}
-
-// The following code is based on scala.tools.nsc.Main and scala.tools.nsc.ScalaDoc
-// Copyright 2005-2008 LAMP/EPFL
-// Original author: Martin Odersky
-
-final class Compile(maximumErrors: Int, compiler: AnalyzingCompiler, analysisCallback: AnalysisCallback, baseDirectory: Path) extends CompilerBase with WithArgumentFile
-{
- protected def processScala(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger)
- {
- val callbackInterface = new AnalysisInterface(analysisCallback, baseDirectory, outputDirectory)
- compiler(Set() ++ sources, Set() ++ classpath, outputDirectory, options, callbackInterface, maximumErrors, log)
- }
-}
-final class Scaladoc(maximumErrors: Int, compiler: AnalyzingCompiler) extends CompilerCore
-{
- protected def processScala(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger): Unit =
- compiler.doc(sources, classpath, outputDirectory, options, maximumErrors, log)
- protected def processJava(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger) = ()
-
- def actionStartMessage(label: String) = "Generating API documentation for " + label + " sources..."
- val actionNothingToDoMessage = "No sources specified."
- val actionSuccessfulMessage = "API documentation generation successful."
- def actionUnsuccessfulMessage = "API documentation generation unsuccessful."
-}
-final class Console(compiler: AnalyzingCompiler) extends NotNull
-{
- /** Starts an interactive scala interpreter session with the given classpath.*/
- def apply(classpath: Iterable[Path], log: Logger): Option[String] =
- apply(classpath, Nil, "", log)
- def apply(classpath: Iterable[Path], options: Seq[String], initialCommands: String, log: Logger): Option[String] =
- {
- def console0 = compiler.console(Path.getFiles(classpath), options, initialCommands, log)
- JLine.withJLine( Run.executeTrapExit(console0, log) )
- }
-}
View
394 sbt/src/main/scala/sbt/Conditional.scala
@@ -1,394 +0,0 @@
-/* sbt -- Simple Build Tool
- * Copyright 2008, 2009 Mark Harrah
- */
-package sbt
-
-import xsbt.AnalyzingCompiler
-
-trait Conditional[Source, Product, External] extends NotNull
-{
- type AnalysisType <: TaskAnalysis[Source, Product, External]
- val analysis: AnalysisType = loadAnalysis
-
- protected def loadAnalysis: AnalysisType
- protected def log: Logger
-
- protected def productType: String
- protected def productTypePlural: String
-
- protected def sourcesToProcess: Iterable[Source]
-
- protected def sourceExists(source: Source): Boolean
- protected def sourceLastModified(source: Source): Long
-
- protected def productExists(product: Product): Boolean
- protected def productLastModified(product: Product): Long
-
- protected def externalInfo(externals: Iterable[External]): Iterable[(External, ExternalInfo)]
-
- protected def execute(cAnalysis: ConditionalAnalysis): Option[String]
-
- final case class ExternalInfo(available: Boolean, lastModified: Long) extends NotNull
- trait ConditionalAnalysis extends NotNull
- {
- def dirtySources: Iterable[Source]
- def cleanSources: Iterable[Source]
- def directlyModifiedSourcesCount: Int
- def invalidatedSourcesCount: Int
- def removedSourcesCount: Int
- }
-
- final def run =
- {
- val result = execute(analyze)
- processingComplete(result.isEmpty)
- result
- }
- private def analyze =
- {
- import scala.collection.mutable.HashSet
-
- val sourcesSnapshot = sourcesToProcess
- val removedSources = new HashSet[Source]
- removedSources ++= analysis.allSources
- removedSources --= sourcesSnapshot
- val removedCount = removedSources.size
- for(removed <- removedSources)
- {
- log.debug("Source " + removed + " removed.")
- analysis.removeDependent(removed)
- }
-
- val unmodified = new HashSet[Source]
- val modified = new HashSet[Source]
-
- for(source <- sourcesSnapshot)
- {
- if(isSourceModified(source))
- {
- log.debug("Source " + source + " directly modified.")
- modified += source
- }
- else
- {
- log.debug("Source " + source + " unmodified.")
- unmodified += source
- }
- }
- val directlyModifiedCount = modified.size
- for((external, info) <- externalInfo(analysis.allExternals))
- {
- val dependentSources = analysis.externalDependencies(external).getOrElse(Set.empty)
- if(info.available)
- {
- val dependencyLastModified = info.lastModified
- for(dependentSource <- dependentSources; dependentProducts <- analysis.products(dependentSource))
- {
- dependentProducts.find(p => productLastModified(p) < dependencyLastModified) match
- {
- case Some(modifiedProduct) =>
- {
- log.debug(productType + " " + modifiedProduct + " older than external dependency " + external)
- unmodified -= dependentSource
- modified += dependentSource
- }
- case None => ()
- }
- }
- }
- else
- {
- log.debug("External dependency " + external + " not found.")
- unmodified --= dependentSources
- modified ++= dependentSources
- analysis.removeExternalDependency(external)
- }
- }
-
- val handled = new scala.collection.mutable.HashSet[Source]
- val transitive = !java.lang.Boolean.getBoolean("sbt.intransitive")
- def markModified(changed: Iterable[Source]) { for(c <- changed if !handled.contains(c)) markSourceModified(c) }
- def markSourceModified(src: Source)
- {
- unmodified -= src
- modified += src
- handled += src
- if(transitive)
- markDependenciesModified(src)
- }
- def markDependenciesModified(src: Source) { analysis.removeDependencies(src).map(markModified) }
-
- markModified(modified.toList)
- if(transitive)
- removedSources.foreach(markDependenciesModified)
-
- for(changed <- removedSources ++ modified)
- analysis.removeSource(changed)
-
- new ConditionalAnalysis
- {
- def dirtySources = wrap.Wrappers.readOnly(modified)
- def cleanSources = wrap.Wrappers.readOnly(unmodified)
- def directlyModifiedSourcesCount = directlyModifiedCount
- def invalidatedSourcesCount = dirtySources.size - directlyModifiedCount
- def removedSourcesCount = removedCount
- override def toString =
- {
- " Source analysis: " + directlyModifiedSourcesCount + " new/modified, " +
- invalidatedSourcesCount + " indirectly invalidated, " +
- removedSourcesCount + " removed."
- }
- }
- }
-
- protected def checkLastModified = true
- protected def noProductsImpliesModified = true
- protected def isSourceModified(source: Source) =
- {
- analysis.products(source) match
- {
- case None =>
- {
- log.debug("New file " + source)
- true
- }
- case Some(sourceProducts) =>
- {
- val sourceModificationTime = sourceLastModified(source)
- def isOutofdate(p: Product) =
- !productExists(p) || (checkLastModified && productLastModified(p) < sourceModificationTime)
-
- sourceProducts.find(isOutofdate) match
- {
- case Some(modifiedProduct) =>
- log.debug("Outdated " + productType + ": " + modifiedProduct + " for source " + source)
- true
- case None =>
- if(noProductsImpliesModified && sourceProducts.isEmpty)
- {
- // necessary for change detection that depends on last modified
- log.debug("Source " + source + " has no products, marking it modified.")
- true
- }
- else
- false
- }
- }
- }
- }
- protected def processingComplete(success: Boolean)
- {
- if(success)
- {
- analysis.save()
- log.info(" Post-analysis: " + analysis.allProducts.toSeq.length + " " + productTypePlural + ".")
- }
- else
- analysis.revert()
- }
-}
-
-abstract class AbstractCompileConfiguration extends NotNull
-{
- def label: String
- def sourceRoots: PathFinder
- def sources: PathFinder
- def outputDirectory: Path
- def classpath: PathFinder
- def analysisPath: Path
- def projectPath: Path
- def log: Logger
- def options: Seq[String]
- def javaOptions: Seq[String]
- def maxErrors: Int
- def compileOrder: CompileOrder.Value
-}
-abstract class CompileConfiguration extends AbstractCompileConfiguration
-{
- def fingerprints: Fingerprints
-}
-final case class Fingerprints(superclassNames: Iterable[String], annotationNames: Iterable[String]) extends NotNull
-
-import java.io.File
-class CompileConditional(override val config: CompileConfiguration, compiler: AnalyzingCompiler) extends AbstractCompileConditional(config, compiler)
-{
- import config._
- type AnalysisType = CompileAnalysis
- protected def constructAnalysis(analysisPath: Path, projectPath: Path, log: Logger) =
- new CompileAnalysis(analysisPath, projectPath, log)
- protected def analysisCallback = new CompileAnalysisCallback
- protected class CompileAnalysisCallback extends BasicCompileAnalysisCallback(projectPath, analysis)
- {
- private[this] val fingerprints0 = fingerprints
- def superclassNames = fingerprints0.superclassNames
- def annotationNames = fingerprints0.annotationNames
- def foundSubclass(sourcePath: Path, subclassName: String, superclassName: String, isModule: Boolean): Unit =
- analysis.addTest(sourcePath, DiscoveredSubclass(isModule, subclassName, superclassName))
- def foundAnnotated(sourcePath: Path, className: String, annotationName: String, isModule: Boolean): Unit =
- analysis.addTest(sourcePath, DiscoveredAnnotated(isModule, className, annotationName))
- }
-}
-abstract class AbstractCompileConditional(val config: AbstractCompileConfiguration, val compiler: AnalyzingCompiler) extends Conditional[Path, Path, File]
-{
- import config._
- type AnalysisType <: BasicCompileAnalysis
- protected def loadAnalysis =
- {
- val a = constructAnalysis(analysisPath, projectPath, log)
- for(errorMessage <- a.load())
- error(errorMessage)
- a
- }
- protected def constructAnalysis(analysisPath: Path, projectPath: Path, log: Logger): AnalysisType
-
- protected def log = config.log
-
- protected def productType = "class"
- protected def productTypePlural = "classes"
- protected def sourcesToProcess = sources.get
-
- protected def sourceExists(source: Path) = source.asFile.exists
- protected def sourceLastModified(source: Path) = source.asFile.lastModified
-
- protected def productExists(product: Path) = product.asFile.exists
- protected def productLastModified(product: Path) = product.asFile.lastModified
-
- private def libraryJar = compiler.scalaInstance.libraryJar
- private def compilerJar = compiler.scalaInstance.compilerJar
- protected def externalInfo(externals: Iterable[File]) =
- {
- val (classpathJars, classpathDirs) = ClasspathUtilities.buildSearchPaths(classpath.get ++ Seq(Path.fromFile(libraryJar), Path.fromFile(compilerJar)))
- for(external <- externals) yield
- {
- val available = external.exists && (external == libraryJar || ClasspathUtilities.onClasspath(classpathJars, classpathDirs, external) )
- if(!available)
- log.debug("External " + external + (if(external.exists) " not on classpath." else " does not exist."))
- (external, ExternalInfo(available, external.lastModified))
- }
- }
-
- import ChangeDetection.{LastModifiedOnly, HashOnly, HashAndLastModified, HashAndProductsExist}
- protected def changeDetectionMethod: ChangeDetection.Value = HashAndProductsExist
- override protected def checkLastModified = changeDetectionMethod != HashAndProductsExist
- override protected def noProductsImpliesModified = changeDetectionMethod == LastModifiedOnly
- override protected def isSourceModified(source: Path) =
- changeDetectionMethod match
- {
- case HashAndLastModified | HashAndProductsExist =>
- // behavior will differ because of checkLastModified
- // hash modified must come first so that the latest hash is calculated for every source
- hashModified(source) || super.isSourceModified(source)
- case HashOnly => hashModified(source)
- case LastModifiedOnly => super.isSourceModified(source)
- }
-
- import scala.collection.mutable.{Buffer, ListBuffer}
- private val newHashes: Buffer[(Path, Option[Array[Byte]])] = new ListBuffer
- private def warnHashError(source: Path, message: String)
- {
- log.warn("Error computing hash for source " + source + ": " + message)
- newHashes += ((source, None))
- }
- protected def hashModified(source: Path) =
- {
- source.isDirectory ||
- (analysis.hash(source) match
- {
- case None =>
- log.debug("Source " + source + " had no hash, marking modified.")
- Hash(source, log).fold(err => warnHashError(source, err), newHash => newHashes += ((source, Some(newHash))))
- true
- case Some(oldHash) =>
- {
- Hash(source, log) match
- {
- case Left(err) =>
- warnHashError(source, err)
- log.debug("Assuming source is modified because of error.")
- true
- case Right(newHash) =>
- newHashes += ((source, Some(newHash)))
- val different = !(oldHash deepEquals newHash)
- if(different)
- log.debug("Hash for source " + source + " changed (was " + Hash.toHex(oldHash) +
- ", is now " + Hash.toHex(newHash) + "), marking modified.")
- different
- }
- }
- })
- }
- private def scalaJars: Iterable[Path] =
- {
- val instance = compiler.scalaInstance
- Seq(instance.libraryJar, instance.compilerJar).map(Path.fromFile)
- }
- protected def execute(executeAnalysis: ConditionalAnalysis) =
- {
- log.info(executeAnalysis.toString)
- finishHashes()
- import executeAnalysis.dirtySources
-
- // the output directory won't show up in the classpath unless it exists, so do this before classpath.get
- val outputDir = outputDirectory.asFile
- FileUtilities.createDirectory(outputDir, log)
-
- val cp = classpath.get
- if(!dirtySources.isEmpty)
- checkClasspath(cp)
- def run =
- {
- val compile = new Compile(config.maxErrors, compiler, analysisCallback, projectPath)
- compile(label, dirtySources, cp, outputDirectory, options, javaOptions, compileOrder, log)
- }
- val loader = ClasspathUtilities.toLoader(cp ++ scalaJars)
- val r = classfile.Analyze(projectPath, outputDirectory, dirtySources, sourceRoots.get, log)(analysis.allProducts, analysisCallback, loader)(run)
- if(log.atLevel(Level.Debug))
- {
- /** This checks that the plugin accounted for all classes in the output directory.*/
- val classes = scala.collection.mutable.HashSet(analysis.allProducts.toSeq: _*)
- val actualClasses = (outputDirectory ** GlobFilter("*.class")).get
- val missedClasses = actualClasses.toList.remove(classes.contains)
- missedClasses.foreach(c => log.debug("Missed class: " + c))
- log.debug("Total missed classes: " + missedClasses.length)
- }
- r
- }
- private def finishHashes()
- {
- if(changeDetectionMethod == LastModifiedOnly)
- analysis.clearHashes()
- else
- {
- for((path, hash) <- newHashes)
- {
- hash match
- {
- case None => analysis.clearHash(path)
- case Some(hash) => analysis.setHash(path, hash)
- }
- }
- }
- newHashes.clear()
- }
- private def checkClasspath(cp: Iterable[Path])
- {
- import scala.collection.mutable.{HashMap, HashSet, Set}
- val collisions = new HashMap[String, Set[Path]]
- for(jar <- cp if ClasspathUtilities.isArchive(jar))
- collisions.getOrElseUpdate(jar.asFile.getName, new HashSet[Path]) += jar
- for((name, jars) <- collisions)
- {
- if(jars.size > 1)
- {
- log.debug("Possible duplicate classpath locations for jar " + name + ": ")
- for(jar <- jars) log.debug("\t" + jar.absolutePath)
- }
- }
- }
-
- protected def analysisCallback: AnalysisCallback
-}
-object ChangeDetection extends Enumeration
-{
- val LastModifiedOnly, HashOnly, HashAndLastModified, HashAndProductsExist = Value
-}
View
8 sbt/src/main/scala/sbt/Credentials.scala
@@ -12,11 +12,7 @@ object Credentials
def add(realm: String, host: String, userName: String, passwd: String): Unit =
CredentialsStore.INSTANCE.addCredentials(realm, host, userName, passwd)
/** Load credentials from the given file into Ivy's credentials cache.*/
- def apply(file: String, log: Logger): Unit = apply(Path.fromFile(file), log)
- /** Load credentials from the given file into Ivy's credentials cache.*/
- def apply(file: File, log: Logger): Unit = apply(Path.fromFile(file), log)
- /** Load credentials from the given file into Ivy's credentials cache.*/
- def apply(path: Path, log: Logger)
+ def apply(path: File, log: Logger)
{
val msg =
if(path.exists)
@@ -24,7 +20,7 @@ object Credentials
val properties = new scala.collection.mutable.HashMap[String, String]
def get(keys: List[String]) = keys.flatMap(properties.get).firstOption.toRight(keys.head + " not specified in credentials file: " + path)
- impl.MapUtilities.read(properties, path, log) orElse
+ MapIO.read(properties, path, log) orElse
{
List.separate( List(RealmKeys, HostKeys, UserKeys, PasswordKeys).map(get) ) match
{
View
38 sbt/src/main/scala/sbt/Doc.scala
@@ -0,0 +1,38 @@
+/* sbt -- Simple Build Tool
+ * Copyright 2008, 2009 Mark Harrah
+ */
+package sbt
+
+import java.io.File
+import xsbt.{AnalyzingCompiler, CompileFailed}
+
+final class Scaladoc(maximumErrors: Int, compiler: AnalyzingCompiler)
+{
+ final def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger)
+ {
+ log.info(actionStartMessage(label))
+ if(sources.isEmpty)
+ log.info(actionNothingToDoMessage)
+ else
+ {
+ IO.createDirectory(outputDirectory)
+ compiler.doc(sources, classpath, outputDirectory, options, maximumErrors, log)
+ log.info(actionSuccessfulMessage)
+ }
+ }
+ def actionStartMessage(label: String) = "Generating API documentation for " + label + " sources..."
+ val actionNothingToDoMessage = "No sources specified."
+ val actionSuccessfulMessage = "API documentation generation successful."
+ def actionUnsuccessfulMessage = "API documentation generation unsuccessful."
+}
+final class Console(compiler: AnalyzingCompiler)
+{
+ /** Starts an interactive scala interpreter session with the given classpath.*/
+ def apply(classpath: Seq[File])(implicit log: Logger): Option[String] =
+ apply(classpath, Nil, "", log)
+ def apply(classpath: Iterable[File], options: Seq[String], initialCommands: String)(implicit log: Logger): Option[String] =
+ {
+ def console0 = compiler.console(Path.getFiles(classpath), options, initialCommands, log)
+ JLine.withJLine( Run.executeTrapExit(console0, log) )
+ }
+}
View
43 sbt/src/main/scala/sbt/ExitHook.scala
@@ -1,43 +0,0 @@
-/* sbt -- Simple Build Tool
- * Copyright 2009 Mark Harrah
- */
-package sbt
-
-/** Defines a function to call as sbt exits.*/
-trait ExitHook extends NotNull
-{
- /** Provides a name for this hook to be used to provide feedback to the user. */
- def name: String
- /** Subclasses should implement this method, which is called when this hook is executed. */
- def runBeforeExiting(): Unit
-}
-
-object ExitHooks
-{
- /** This is a list of hooks to call when sbt is finished executing.*/
- private val exitHooks = new scala.collection.mutable.HashSet[ExitHook]
- /** Adds a hook to call before sbt exits. */
- private[sbt] def register(hook: ExitHook) { exitHooks += hook }
- /** Removes a hook. */
- private[sbt] def unregister(hook: ExitHook) { exitHooks -= hook }
- /** Calls each registered exit hook, trapping any exceptions so that each hook is given a chance to run. */
- private[sbt] def runExitHooks(log: Logger)
- {
- for(hook <- exitHooks.toList)
- {
- try
- {
- log.debug("Running exit hook '" + hook.name + "'...")
- hook.runBeforeExiting()
- }
- catch
- {
- case e =>
- {
- log.trace(e);
- log.error("Error running exit hook '" + hook.name + "': " + e.toString)
- }
- }
- }
- }
-}
View
126 sbt/src/main/scala/sbt/Logger.scala
@@ -1,126 +0,0 @@
-/* sbt -- Simple Build Tool
- * Copyright 2008, 2009, 2010 Mark Harrah
- */
-package sbt
-
-import scala.collection.mutable.{Buffer, HashMap, ListBuffer}
-
-/** A logger that can buffer the logging done on it by currently executing Thread and
-* then can flush the buffer to the delegate logger provided in the constructor. Use
-* 'startRecording' to start buffering and then 'play' from to flush the buffer for the
-* current Thread to the backing logger. The logging level set at the
-* time a message is originally logged is used, not the level at the time 'play' is
-* called.
-*
-* This class assumes that it is the only client of the delegate logger.
-*
-* This logger is thread-safe.
-* */
-final class BufferedLogger(delegate: Logger) extends AbstractLogger
-{
- override lazy val ansiCodesSupported = delegate.ansiCodesSupported
- private[this] val buffers = wrap.Wrappers.weakMap[Thread, Buffer[LogEvent]]
- private[this] var recordingAll = false
-
- private[this] def getOrCreateBuffer = buffers.getOrElseUpdate(key, createBuffer)
- private[this] def buffer = if(recordingAll) Some(getOrCreateBuffer) else buffers.get(key)
- private[this] def createBuffer = new ListBuffer[LogEvent]
- private[this] def key = Thread.currentThread
-
- @deprecated def startRecording() = recordAll()
- /** Enables buffering for logging coming from the current Thread. */
- def record(): Unit = synchronized { buffers(key) = createBuffer }
- /** Enables buffering for logging coming from all Threads. */
- def recordAll(): Unit = synchronized{ recordingAll = true }
- def buffer[T](f: => T): T =
- {
- record()
- try { f }
- finally { Control.trap(stop()) }
- }
- def bufferAll[T](f: => T): T =
- {
- recordAll()
- try { f }
- finally { Control.trap(stopAll()) }
- }
-
- /** Flushes the buffer to the delegate logger for the current thread. This method calls logAll on the delegate
- * so that the messages are written consecutively. The buffer is cleared in the process. */
- def play(): Unit =
- synchronized
- {
- for(buffer <- buffers.get(key))
- delegate.logAll(wrap.Wrappers.readOnly(buffer))
- }
- def playAll(): Unit =
- synchronized
- {
- for(buffer <- buffers.values)
- delegate.logAll(wrap.Wrappers.readOnly(buffer))
- }
- /** Clears buffered events for the current thread and disables buffering. */
- def clear(): Unit = synchronized { buffers -= key }
- /** Clears buffered events for all threads and disables all buffering. */
- def clearAll(): Unit = synchronized { buffers.clear(); recordingAll = false }
- /** Plays buffered events for the current thread and disables buffering. */
- def stop(): Unit =
- synchronized
- {
- play()
- clear()
- }
- def stopAll(): Unit =
- synchronized
- {
- playAll()
- clearAll()
- }
-
- def setLevel(newLevel: Level.Value): Unit =
- synchronized
- {
- buffer.foreach{_ += new SetLevel(newLevel) }
- delegate.setLevel(newLevel)
- }
- def getLevel = synchronized { delegate.getLevel }
- def getTrace = synchronized { delegate.getTrace }
- def setTrace(level: Int): Unit =
- synchronized
- {
- buffer.foreach{_ += new SetTrace(level) }
- delegate.setTrace(level)
- }
-
- def trace(t: => Throwable): Unit =
- doBufferableIf(traceEnabled, new Trace(t), _.trace(t))
- def success(message: => String): Unit =
- doBufferable(Level.Info, new Success(message), _.success(message))
- def log(level: Level.Value, message: => String): Unit =
- doBufferable(level, new Log(level, message), _.log(level, message))
- def logAll(events: Seq[LogEvent]): Unit =
- synchronized
- {
- buffer match
- {
- case Some(b) => b ++= events
- case None => delegate.logAll(events)
- }
- }
- def control(event: ControlEvent.Value, message: => String): Unit =
- doBufferable(Level.Info, new ControlEvent(event, message), _.control(event, message))
- private def doBufferable(level: Level.Value, appendIfBuffered: => LogEvent, doUnbuffered: AbstractLogger => Unit): Unit =
- doBufferableIf(atLevel(level), appendIfBuffered, doUnbuffered)
- private def doBufferableIf(condition: => Boolean, appendIfBuffered: => LogEvent, doUnbuffered: AbstractLogger => Unit): Unit =
- synchronized
- {
- if(condition)
- {
- buffer match
- {
- case Some(b) => b += appendIfBuffered
- case None => doUnbuffered(delegate)
- }
- }
- }
-}
View
494 sbt/src/main/scala/sbt/ParallelRunner.scala
@@ -1,494 +0,0 @@
-/* sbt -- Simple Build Tool
- * Copyright 2009 Mark Harrah
- */
-package sbt
-
-/** This file provides the parallel execution engine of sbt. It is a fairly general module, with pluggable Schedulers and Strategies.
-*
-* There are three main componenets to the engine: Distributors, Schedulers, and Strategies.
-*
-* A Scheduler provides work that is ready to execute. The main type of Scheduler in sbt is a scheduler
-* of nodes in a directed, acyclic graph.. This type of scheduler provides work when its
-* dependencies have finished executing successfully. Another type of scheduler is a MultiScheduler, which draws work
-* from sub-schedulers.
-*
-* A Strategy is used by a Scheduler to select the work to process from the work that is ready. It is notified as work
-* becomes ready. It is requested to select work to process from the work that is ready. The main Strategy in sbt is the
-* OrderedStrategy, which prioritizes work according to some ordering defined by its constructor. The primary ordering
-* used in sbt is based on the longest length of the processing path that includes the node being ordered.
-*
-* A Distributor uses a Scheduler to obtain work according up to the maximum work allowed to run at once. It runs each
-* unit of work in its own Thread.
-**/
-
-import java.util.concurrent.LinkedBlockingQueue
-import scala.collection.{immutable, mutable}
-import immutable.TreeSet
-
-/** Interface to the Distributor/Scheduler system for running tasks with dependencies described by a directed acyclic graph.*/
-object ParallelRunner
-{
- /** Executes work for nodes in an acyclic directed graph with root node `node`. The name of a node is provided
- * by the `name` function, the work to perform for a node by `action`, and the logger to use for a node by `log`.
- * The maximum number of tasks to execute simultaneously is `maximumTasks`. */
- def run[D <: Dag[D]](node: D, name: D => String, action: D => Option[String], maximumTasks: Int, log: D => Logger): List[WorkFailure[D]] =
- {
- val info = DagInfo(node)
- // Create a strategy that gives each node a uniform self cost and uses the maximum cost to execute it and the nodes that depend on it
- // to determine which node to run. The self cost could be modified to include more information about a node, such as the size of input files
- val strategy = defaultStrategy(info)
- val jobScheduler = CompoundScheduler(new DagScheduler(info, strategy), strategy)
- val distributor = new Distributor(jobScheduler, action, maximumTasks, log)
- val result = distributor.run().toList
- for( WorkFailure(work, message) <- result ) yield WorkFailure(work, "Error running " + name(work) + ": " + message)
- }
- def dagScheduler[D <: Dag[D]](node: D) =
- {
- val info = DagInfo(node)
- new DagScheduler(info, defaultStrategy(info))
- }
- private def defaultStrategy[D <: Dag[D]](info: DagInfo[D]) = MaxPathStrategy((d: D) => 1, info)
- def emptyScheduler[D]: Scheduler[D] =
- new Scheduler[D]
- {
- /** Starts a new run. The returned object is a new Run, representing a single scheduler run. All state for the run
- * is encapsulated in this object.*/
- def run: Run = new Run
- {
- def complete(d: D, result: Option[String]) {}
- def hasPending = false
- /**Returns true if this scheduler has no more work to be done, ever.*/
- def isComplete = true
- def next(max: Int) = Nil
- def failures = Nil
- }
- }
-}
-/** Requests work from `scheduler` and processes it using `doWork`. This class limits the amount of work processing at any given time
-* to `workers`.*/
-final class Distributor[D](scheduler: Scheduler[D], doWork: D => Option[String], workers: Int, log: D => Logger) extends NotNull
-{
- require(workers > 0)
- final def run(): Iterable[WorkFailure[D]] = (new Run).run()
-
- private final class Run extends NotNull
- {
- private[this] val schedule = scheduler.run
- /** The number of threads currently running. */
- private[this] var running = 0
- /** Pending notifications of completed work. */
- private[this] val complete = new java.util.concurrent.LinkedBlockingQueue[Done]
-
- private[Distributor] def run(): Iterable[WorkFailure[D]] =
- {
- next()
- if(isIdle && !schedule.hasPending) // test if all work is complete
- schedule.failures
- else
- {
- waitForCompletedWork() // wait for some work to complete
- run() // continue
- }
- }
- // true if the maximum number of worker threads are currently running
- private def atMaximum = running == workers
- private def availableWorkers = workers - running
- // true if no worker threads are currently running
- private def isIdle = running == 0
- // process more work
- private def next()
- {
- // if the maximum threads are being used, do nothing
- // if all work is complete or the scheduler is waiting for current work to complete, do nothing
- if(!atMaximum && schedule.hasPending)
- {
- val nextWork = schedule.next(availableWorkers)
- val nextSize = nextWork.size
- assume(nextSize <= availableWorkers, "Scheduler provided more work (" + nextSize + ") than allowed (" + availableWorkers + ")")
- assume(nextSize > 0 || !isIdle, "Distributor idle and the scheduler indicated work pending, but provided no work.")
- nextWork.foreach(process)
- }
- }
- // wait on the blocking queue `complete` until some work finishes and notify the scheduler
- private def waitForCompletedWork()
- {
- require(running > 0)
- val done = complete.take()
- running -= 1
- schedule.complete(done.data, done.result)
- }
- private def process(data: D)
- {
- require(running + 1 <= workers)
- running += 1
- new Worker(data).start()
- }
- private class Worker(data: D) extends Thread with NotNull
- {
- override def interrupt() {}
- override def run()
- {
- val result = Control.trapUnit("", log(data))(doWork(data))
- complete.put( new Done(result, data) )
- }
- }
- }
- private final class Done(val result: Option[String], val data: D) extends NotNull
-}
-final case class WorkFailure[D](work: D, message: String) extends NotNull
-{
- override def toString = message
-}
-/** Schedules work of type D. A Scheduler determines what work is ready to be processed.
-* A Scheduler is itself immutable. It creates a mutable object for each scheduler run.*/
-trait Scheduler[D] extends NotNull
-{
- /** Starts a new run. The returned object is a new Run, representing a single scheduler run. All state for the run
- * is encapsulated in this object.*/
- def run: Run
- trait Run extends NotNull
- {
- /** Notifies this scheduler that work has completed with the given result (Some with the error message or None if the work succeeded).*/
- def complete(d: D, result: Option[String]): Unit
- /** Returns true if there is any more work to be done, although remaining work can be blocked
- * waiting for currently running work to complete.*/
- def hasPending: Boolean
- /**Returns true if this scheduler has no more work to be done, ever.*/
- def isComplete: Boolean
- /** Returns up to 'max' units of work. `max` is always positive. The returned sequence cannot be empty if there is
- * no work currently being processed.*/
- def next(max: Int): Seq[D]
- /** A list of failures that occurred to this point, as reported to the `complete` method. */
- def failures: Iterable[WorkFailure[D]]
- }
-}
-/** A Strategy selects the work to process from work that is ready to be processed.*/
-private trait ScheduleStrategy[D] extends NotNull
-{
- /** Starts a new run. The returned object is a new Run, representing a single strategy run. All state for the run
- * is handled through this object and is encapsulated in this object.*/
- def run: Run
- trait Run extends NotNull
- {
- /** Adds the given work to the list of work that is ready to run.*/
- def workReady(dep: D): Unit
- /** Returns true if there is work ready to be run. */
- def hasReady: Boolean
- /** Provides up to `max` units of work. `max` is always positive and this method is not called
- * if hasReady is false. The returned list cannot be empty is there is work ready to be run.*/
- def next(max: Int): List[D]
- /** If this strategy returns different work from `next` than is provided to `workReady`,
- * this method must map back to the original work.*/
- def reverseMap(dep: D): Iterable[D]
- }
-}
-
-/** A scheduler for nodes of a directed-acyclic graph. It requires the root of the graph
-* and a strategy to select which available nodes to run on limited resources.*/
-private[sbt] final class DagScheduler[D <: Dag[D]](info: DagInfo[D], strategy: ScheduleStrategy[D]) extends Scheduler[D]
-{
- def run: Run = new Run
- {
- val infoRun = info.run
- val strategyRun = strategy.run
-
- // find nodes that are ready to be run (no dependencies)
- {
- val startReady = for( (key, value) <- infoRun.remainingDepsRun if(value.isEmpty)) yield key
- infoRun.remainingDepsRun --= startReady
- startReady.foreach(strategyRun.workReady)
- }
-
- val failures = new mutable.ListBuffer[WorkFailure[D]]
- def next(max: Int) = strategyRun.next(max)
- def complete(work: D, result: Option[String])
- {
- for(originalWork <- strategyRun.reverseMap(work))
- {
- result match
- {
- case None => infoRun.complete(originalWork, strategyRun.workReady)
- case Some(errorMessage) =>
- infoRun.clear(originalWork)
- failures += WorkFailure(originalWork, errorMessage)
- }
- }
- }
- def isComplete = !strategyRun.hasReady && infoRun.reverseDepsRun.isEmpty
- // the strategy might not have any work ready if the remaining work needs currently executing work to finish first
- def hasPending = strategyRun.hasReady || !infoRun.remainingDepsRun.isEmpty
- }
-}
-private object MaxPathStrategy
-{
- def apply[D <: Dag[D]](selfCost: D => Int, info: DagInfo[D]): ScheduleStrategy[D] =
- {
- val cost = // compute the cost of the longest execution path ending at each node
- {
- val cost = new mutable.HashMap[D, Int]
- def computeCost(work: D): Int = info.reverseDeps.getOrElse(work, immutable.Set.empty[D]).foldLeft(0)(_ max getCost(_)) + selfCost(work)
- def getCost(work: D): Int = cost.getOrElseUpdate(work, computeCost(work))
- info.remainingDeps.keys.foreach(getCost)
- wrap.Wrappers.readOnly(cost)
- }
- // create a function to compare units of work. This is not as simple as cost(a) compare cost(b) because it cannot return 0 for
- // unequal nodes (at least for the Ordered comparison)
-
- // 2.8.0 uses Ordering
- implicit val compareOrdering: Ordering[D] =
- new Ordering[D]
- {
- def compare(a: D, b: D) =
- {
- val base = cost(a) compare cost(b)
- if(base == 0)
- a.hashCode compare b.hashCode // this is required because TreeSet interprets 0 as equal
- else
- base
- }
- }
- // 2.7.x uses an implicit view to Ordered
- implicit val compare =
- (a: D) => new Ordered[D] {
- def compare(b: D) = compareOrdering.compare(a, b)
- }
- new OrderedStrategy(new TreeSet())
- }
-}
-/** A strategy that adds work to a tree and selects the last key as the next work to be done. */
-private class OrderedStrategy[D](ready: TreeSet[D]) extends ScheduleStrategy[D]
-{
- def run = new Run
- {
- private[this] var readyRun = ready
- def next(max: Int): List[D] = nextImpl(max, Nil)
- private[this] def nextImpl(remaining: Int, accumulated: List[D]): List[D] =
- {
- if(remaining <= 0 || readyRun.isEmpty)
- accumulated
- else
- {
- val next = readyRun.lastKey
- readyRun -= next
- nextImpl(remaining - 1, next :: accumulated)
- }
- }
- def workReady(dep: D) { readyRun += dep }
- def hasReady = !readyRun.isEmpty
- def reverseMap(dep: D) = dep :: Nil
- }
-}
-/** A class that represents state for a DagScheduler and that MaxPathStrategy uses to initialize an OrderedStrategy. */
-private final class DagInfo[D <: Dag[D]](val remainingDeps: immutable.Map[D, immutable.Set[D]],
- val reverseDeps: immutable.Map[D, immutable.Set[D]]) extends NotNull
-{
- def run = new Run
- final class Run extends NotNull
- {
- val remainingDepsRun = DagInfo.mutableMap(remainingDeps)
- val reverseDepsRun = DagInfo.mutableMap(reverseDeps)
- /** Called when work does not complete successfully and so all work that (transitively) depends on the work
- * must be removed from the maps. */
- def clear(work: D)
- {
- remainingDepsRun -= work
- foreachReverseDep(work)(clear)
- }
- /** Called when work completes properly. `initial` and `ready` are used for a fold over
- * the work that is now ready to go (becaues it was only waiting for `work` to complete).*/
- def complete(work: D, ready: D => Unit)
- {
- def completed(dependsOnCompleted: D)
- {
- for(remainingDependencies <- remainingDepsRun.get(dependsOnCompleted))
- {
- remainingDependencies -= work
- if(remainingDependencies.isEmpty)
- {
- remainingDepsRun -= dependsOnCompleted
- ready(dependsOnCompleted)
- }
- }
- }
- foreachReverseDep(work)(completed)
- }
- private def foreachReverseDep(work: D)(f: D => Unit) { reverseDepsRun.removeKey(work).foreach(_.foreach(f)) }
- }
-}
-/** Constructs forward and reverse dependency map for the given Dag root node. */
-private object DagInfo
-{
- /** Constructs the reverse dependency map from the given Dag and
- * puts the forward dependencies into a map */
- def apply[D <: Dag[D]](root: D): DagInfo[D] =
- {
- val remainingDeps = new mutable.HashMap[D, immutable.Set[D]]
- val reverseDeps = new mutable.HashMap[D, mutable.Set[D]]
- def visitIfUnvisited(node: D): Unit = remainingDeps.getOrElseUpdate(node, processDependencies(node))
- def processDependencies(node: D): Set[D] =
- {
- val workDependencies = node.dependencies
- workDependencies.foreach(visitIfUnvisited)
- for(dep <- workDependencies)
- reverseDeps.getOrElseUpdate(dep, new mutable.HashSet[D]) += node
- immutable.HashSet(workDependencies.toSeq: _*)
- }
- visitIfUnvisited(root)
- new DagInfo(immutable.HashMap(remainingDeps.toSeq : _*), immute(reverseDeps) )
- }
- /** Convert a mutable Map with mutable Sets for values to an immutable Map with immutable Sets for values. */
- private def immute[D](map: mutable.Map[D, mutable.Set[D]]): immutable.Map[D, immutable.Set[D]] =
- {
- val immutedSets = map.map { case (key, value) =>(key, immutable.HashSet(value.toSeq : _*)) }
- immutable.HashMap(immutedSets.toSeq :_*)
- }
- /** Convert an immutable Map with immutable Sets for values to a mutable Map with mutable Sets for values. */
- private def mutableMap[D](map: immutable.Map[D, immutable.Set[D]]): mutable.Map[D, mutable.Set[D]] =
- {
- val mutableSets = map.map { case (key, value) =>(key, mutable.HashSet(value.toSeq : _*)) }
- mutable.HashMap(mutableSets.toSeq :_*)
- }