diff --git a/internal/compiler-bridge/src/main/scala/xsbt/API.scala b/internal/compiler-bridge/src/main/scala/xsbt/API.scala index eb2c76e544..a3fb4ecfcc 100644 --- a/internal/compiler-bridge/src/main/scala/xsbt/API.scala +++ b/internal/compiler-bridge/src/main/scala/xsbt/API.scala @@ -34,6 +34,10 @@ final class API(val global: CallbackGlobal) extends Compat with GlobalHelpers wi val start = System.currentTimeMillis super.run() + // We're running right after pickling, so store pickles now. + val pickleData = Compat.picklePaths(currentRun) + callback.pickleData(pickleData.toArray) + // After processing all units, register generated classes registerGeneratedClasses(nonLocalClassSymbolsInCurrentUnits.iterator) nonLocalClassSymbolsInCurrentUnits.clear() diff --git a/internal/compiler-bridge/src/main/scala_2.10/xsbt/Compat.scala b/internal/compiler-bridge/src/main/scala_2.10/xsbt/Compat.scala index b0a2945e06..c6cbf101f8 100644 --- a/internal/compiler-bridge/src/main/scala_2.10/xsbt/Compat.scala +++ b/internal/compiler-bridge/src/main/scala_2.10/xsbt/Compat.scala @@ -13,6 +13,7 @@ package xsbt import java.io.PrintWriter import java.nio.file.Path +import xsbti.PickleData import xsbti.compile.Output import scala.reflect.{ internal => sri } import scala.reflect.internal.{ util => sriu } @@ -190,6 +191,9 @@ object Compat { } def plainNioFile(path: Path): AbstractFile = new PlainNioFile(path) + + // No pileline pickling in 2.10 + def picklePaths(run: Global#Run) = Iterable.empty[PickleData] } private trait CachedCompilerCompat { self: CachedCompiler0 => diff --git a/internal/compiler-bridge/src/main/scala_2.11-12/xsbt/Compat.scala b/internal/compiler-bridge/src/main/scala_2.11-12/xsbt/Compat.scala index 20914ad2d6..c6cdb8984a 100644 --- a/internal/compiler-bridge/src/main/scala_2.11-12/xsbt/Compat.scala +++ b/internal/compiler-bridge/src/main/scala_2.11-12/xsbt/Compat.scala @@ -12,10 +12,11 @@ package xsbt import java.io.PrintWriter -import java.nio.file.Path +import java.nio.file.{ Path, Paths } +import xsbti.PickleData import xsbti.compile.Output - -import scala.tools.nsc.Settings +import scala.collection.mutable +import scala.tools.nsc.{ Global, Settings } import scala.reflect.io.AbstractFile abstract class Compat @@ -29,6 +30,37 @@ object Compat { def replReporter(settings: Settings, writer: PrintWriter) = writer def plainNioFile(path: Path): AbstractFile = new PlainNioFile(path) + + // Prepare pickle data for eventual storage, computing path within jar file from symbol ownership + // and storing data in a class that does not rely on a shared scala library. + // This is almost verbatim copied from scala.tools.nsc.PipelineMain, except that actually writing to the jar file + // is deferred to AnalysisCallback, after the final incremental compilation cycle. + def picklePaths[G <: Global](run: G#Run): Iterable[PickleData] = { + val rootPath = Paths.get("__ROOT__") + val dirs = mutable.Map[G#Symbol, Path]() + def packageDir(packSymbol: G#Symbol): Path = { + if (packSymbol.isEmptyPackageClass) rootPath + else if (dirs.contains(packSymbol)) dirs(packSymbol) + else if (packSymbol.owner.isRoot) { + val subDir = rootPath.resolve(packSymbol.encodedName) + dirs.put(packSymbol, subDir) + subDir + } else { + val base = packageDir(packSymbol.owner) + val subDir = base.resolve(packSymbol.encodedName) + dirs.put(packSymbol, subDir) + subDir + } + } + + for { (s, p) <- run.symData } yield { + val base = packageDir(s.owner) + val path = base.resolve(s.encodedName + ".sig") + // val path = symToPath(s,true) + val fqcn = s.fullNameString + PickleData.of(p, fqcn, p.bytes, p.writeIndex, path) + } + } } /** Defines compatibility utils for [[ZincCompiler]]. */ diff --git a/internal/compiler-bridge/src/main/scala_2.13/xsbt/Compat.scala b/internal/compiler-bridge/src/main/scala_2.13/xsbt/Compat.scala index d0e638a47d..3b14638238 100644 --- a/internal/compiler-bridge/src/main/scala_2.13/xsbt/Compat.scala +++ b/internal/compiler-bridge/src/main/scala_2.13/xsbt/Compat.scala @@ -12,11 +12,13 @@ package xsbt import java.io.PrintWriter -import java.nio.file.Path +import java.nio.file.{ Path, Paths } +import xsbti.PickleData import xsbti.compile.Output -import scala.tools.nsc.Settings +import scala.tools.nsc.{ Global, Settings } import scala.tools.nsc.interpreter.shell.ReplReporterImpl import scala.reflect.io.AbstractFile +import scala.collection.mutable abstract class Compat object Compat { @@ -30,6 +32,37 @@ object Compat { new ReplReporterImpl(settings, writer) def plainNioFile(path: Path): AbstractFile = new PlainNioFile(path) + + // Prepare pickle data for eventual storage, computing path within jar file from symbol ownership + // and storing data in a class that does not rely on a shared scala library. + // This is almost verbatim copied from scala.tools.nsc.PipelineMain, except that actually writing to the jar file + // is deferred to AnalysisCallback, after the final incremental compilation cycle. + def picklePaths[G <: Global](run: G#Run): Iterable[PickleData] = { + val rootPath = Paths.get("__ROOT__") + val dirs = mutable.Map[G#Symbol, Path]() + def packageDir(packSymbol: G#Symbol): Path = { + if (packSymbol.isEmptyPackageClass) rootPath + else if (dirs.contains(packSymbol)) dirs(packSymbol) + else if (packSymbol.owner.isRoot) { + val subDir = rootPath.resolve(packSymbol.encodedName) + dirs.put(packSymbol, subDir) + subDir + } else { + val base = packageDir(packSymbol.owner) + val subDir = base.resolve(packSymbol.encodedName) + dirs.put(packSymbol, subDir) + subDir + } + } + + for { (s, p) <- run.symData } yield { + val base = packageDir(s.owner) + val path = base.resolve(s.encodedName + ".sig") + // val path = symToPath(s,true) + val fqcn = s.fullNameString + PickleData.of(p, fqcn, p.bytes, p.writeIndex, path) + } + } } /** Defines compatibility utils for [[ZincCompiler]]. */ diff --git a/internal/compiler-interface/src/main/contraband-java/xsbti/PickleData.java b/internal/compiler-interface/src/main/contraband-java/xsbti/PickleData.java new file mode 100644 index 0000000000..0b49d44825 --- /dev/null +++ b/internal/compiler-interface/src/main/contraband-java/xsbti/PickleData.java @@ -0,0 +1,76 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package xsbti; +/** A wrapper around PickleBuffer https://github.com/scala/scala/blob/v2.13.1/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala */ +public final class PickleData implements java.io.Serializable { + + public static PickleData create(Object _underlying, String _fqcn, byte[] _data, int _writeIndex, java.nio.file.Path _path) { + return new PickleData(_underlying, _fqcn, _data, _writeIndex, _path); + } + public static PickleData of(Object _underlying, String _fqcn, byte[] _data, int _writeIndex, java.nio.file.Path _path) { + return new PickleData(_underlying, _fqcn, _data, _writeIndex, _path); + } + private Object underlying; + private String fqcn; + private byte[] data; + private int writeIndex; + private java.nio.file.Path path; + protected PickleData(Object _underlying, String _fqcn, byte[] _data, int _writeIndex, java.nio.file.Path _path) { + super(); + underlying = _underlying; + fqcn = _fqcn; + data = _data; + writeIndex = _writeIndex; + path = _path; + } + + public Object underlying() { + return this.underlying; + } + public String fqcn() { + return this.fqcn; + } + public byte[] data() { + return this.data; + } + public int writeIndex() { + return this.writeIndex; + } + public java.nio.file.Path path() { + return this.path; + } + public PickleData withUnderlying(Object underlying) { + return new PickleData(underlying, fqcn, data, writeIndex, path); + } + public PickleData withFqcn(String fqcn) { + return new PickleData(underlying, fqcn, data, writeIndex, path); + } + public PickleData withData(byte[] data) { + return new PickleData(underlying, fqcn, data, writeIndex, path); + } + public PickleData withWriteIndex(int writeIndex) { + return new PickleData(underlying, fqcn, data, writeIndex, path); + } + public PickleData withPath(java.nio.file.Path path) { + return new PickleData(underlying, fqcn, data, writeIndex, path); + } + public boolean equals(Object obj) { + if (this == obj) { + return true; + } else if (!(obj instanceof PickleData)) { + return false; + } else { + PickleData o = (PickleData)obj; + return this.underlying().equals(o.underlying()) && this.fqcn().equals(o.fqcn()) && java.util.Arrays.equals(this.data(), o.data()) && (this.writeIndex() == o.writeIndex()) && this.path().equals(o.path()); + } + } + public int hashCode() { + return 37 * (37 * (37 * (37 * (37 * (37 * (17 + "xsbti.PickleData".hashCode()) + underlying().hashCode()) + fqcn().hashCode()) + java.util.Arrays.hashCode(data())) + Integer.valueOf(writeIndex()).hashCode()) + path().hashCode()); + } + public String toString() { + return "PickleData(" + "underlying: " + underlying() + ", " + "fqcn: " + fqcn() + ", " + "data: " + data() + ", " + "writeIndex: " + writeIndex() + ", " + "path: " + path() + ")"; + } +} diff --git a/internal/compiler-interface/src/main/contraband-java/xsbti/compile/CompileOptions.java b/internal/compiler-interface/src/main/contraband-java/xsbti/compile/CompileOptions.java index e7987987ce..35ce9fb2e7 100644 --- a/internal/compiler-interface/src/main/contraband-java/xsbti/compile/CompileOptions.java +++ b/internal/compiler-interface/src/main/contraband-java/xsbti/compile/CompileOptions.java @@ -31,17 +31,17 @@ public static CompileOptions create(xsbti.VirtualFile[] _classpath, xsbti.Virtua public static CompileOptions of(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.nio.file.Path _temporaryClassesDirectory) { return new CompileOptions(_classpath, _sources, _classesDirectory, _scalacOptions, _javacOptions, _maxErrors, _sourcePositionMapper, _order, _temporaryClassesDirectory); } - public static CompileOptions create(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.util.Optional _temporaryClassesDirectory, java.util.Optional _converter, java.util.Optional _stamper) { - return new CompileOptions(_classpath, _sources, _classesDirectory, _scalacOptions, _javacOptions, _maxErrors, _sourcePositionMapper, _order, _temporaryClassesDirectory, _converter, _stamper); + public static CompileOptions create(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.util.Optional _temporaryClassesDirectory, java.util.Optional _converter, java.util.Optional _stamper, java.util.Optional _earlyOutput) { + return new CompileOptions(_classpath, _sources, _classesDirectory, _scalacOptions, _javacOptions, _maxErrors, _sourcePositionMapper, _order, _temporaryClassesDirectory, _converter, _stamper, _earlyOutput); } - public static CompileOptions of(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.util.Optional _temporaryClassesDirectory, java.util.Optional _converter, java.util.Optional _stamper) { - return new CompileOptions(_classpath, _sources, _classesDirectory, _scalacOptions, _javacOptions, _maxErrors, _sourcePositionMapper, _order, _temporaryClassesDirectory, _converter, _stamper); + public static CompileOptions of(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.util.Optional _temporaryClassesDirectory, java.util.Optional _converter, java.util.Optional _stamper, java.util.Optional _earlyOutput) { + return new CompileOptions(_classpath, _sources, _classesDirectory, _scalacOptions, _javacOptions, _maxErrors, _sourcePositionMapper, _order, _temporaryClassesDirectory, _converter, _stamper, _earlyOutput); } - public static CompileOptions create(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.nio.file.Path _temporaryClassesDirectory, xsbti.FileConverter _converter, xsbti.compile.analysis.ReadStamps _stamper) { - return new CompileOptions(_classpath, _sources, _classesDirectory, _scalacOptions, _javacOptions, _maxErrors, _sourcePositionMapper, _order, _temporaryClassesDirectory, _converter, _stamper); + public static CompileOptions create(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.nio.file.Path _temporaryClassesDirectory, xsbti.FileConverter _converter, xsbti.compile.analysis.ReadStamps _stamper, xsbti.compile.Output _earlyOutput) { + return new CompileOptions(_classpath, _sources, _classesDirectory, _scalacOptions, _javacOptions, _maxErrors, _sourcePositionMapper, _order, _temporaryClassesDirectory, _converter, _stamper, _earlyOutput); } - public static CompileOptions of(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.nio.file.Path _temporaryClassesDirectory, xsbti.FileConverter _converter, xsbti.compile.analysis.ReadStamps _stamper) { - return new CompileOptions(_classpath, _sources, _classesDirectory, _scalacOptions, _javacOptions, _maxErrors, _sourcePositionMapper, _order, _temporaryClassesDirectory, _converter, _stamper); + public static CompileOptions of(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.nio.file.Path _temporaryClassesDirectory, xsbti.FileConverter _converter, xsbti.compile.analysis.ReadStamps _stamper, xsbti.compile.Output _earlyOutput) { + return new CompileOptions(_classpath, _sources, _classesDirectory, _scalacOptions, _javacOptions, _maxErrors, _sourcePositionMapper, _order, _temporaryClassesDirectory, _converter, _stamper, _earlyOutput); } private xsbti.VirtualFile[] classpath; private xsbti.VirtualFile[] sources; @@ -54,6 +54,7 @@ public static CompileOptions of(xsbti.VirtualFile[] _classpath, xsbti.VirtualFil private java.util.Optional temporaryClassesDirectory; private java.util.Optional converter; private java.util.Optional stamper; + private java.util.Optional earlyOutput; protected CompileOptions() { super(); classpath = new xsbti.VirtualFile[0]; @@ -67,6 +68,7 @@ protected CompileOptions() { temporaryClassesDirectory = java.util.Optional.empty(); converter = java.util.Optional.empty(); stamper = java.util.Optional.empty(); + earlyOutput = java.util.Optional.empty(); } protected CompileOptions(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order) { super(); @@ -81,6 +83,7 @@ protected CompileOptions(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _so temporaryClassesDirectory = java.util.Optional.empty(); converter = java.util.Optional.empty(); stamper = java.util.Optional.empty(); + earlyOutput = java.util.Optional.empty(); } protected CompileOptions(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.util.Optional _temporaryClassesDirectory) { super(); @@ -95,6 +98,7 @@ protected CompileOptions(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _so temporaryClassesDirectory = _temporaryClassesDirectory; converter = java.util.Optional.empty(); stamper = java.util.Optional.empty(); + earlyOutput = java.util.Optional.empty(); } protected CompileOptions(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.nio.file.Path _temporaryClassesDirectory) { super(); @@ -109,8 +113,9 @@ protected CompileOptions(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _so temporaryClassesDirectory = java.util.Optional.ofNullable(_temporaryClassesDirectory); converter = java.util.Optional.empty(); stamper = java.util.Optional.empty(); + earlyOutput = java.util.Optional.empty(); } - protected CompileOptions(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.util.Optional _temporaryClassesDirectory, java.util.Optional _converter, java.util.Optional _stamper) { + protected CompileOptions(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.util.Optional _temporaryClassesDirectory, java.util.Optional _converter, java.util.Optional _stamper, java.util.Optional _earlyOutput) { super(); classpath = _classpath; sources = _sources; @@ -123,8 +128,9 @@ protected CompileOptions(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _so temporaryClassesDirectory = _temporaryClassesDirectory; converter = _converter; stamper = _stamper; + earlyOutput = _earlyOutput; } - protected CompileOptions(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.nio.file.Path _temporaryClassesDirectory, xsbti.FileConverter _converter, xsbti.compile.analysis.ReadStamps _stamper) { + protected CompileOptions(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _sources, java.nio.file.Path _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.nio.file.Path _temporaryClassesDirectory, xsbti.FileConverter _converter, xsbti.compile.analysis.ReadStamps _stamper, xsbti.compile.Output _earlyOutput) { super(); classpath = _classpath; sources = _sources; @@ -137,6 +143,7 @@ protected CompileOptions(xsbti.VirtualFile[] _classpath, xsbti.VirtualFile[] _so temporaryClassesDirectory = java.util.Optional.ofNullable(_temporaryClassesDirectory); converter = java.util.Optional.ofNullable(_converter); stamper = java.util.Optional.ofNullable(_stamper); + earlyOutput = java.util.Optional.ofNullable(_earlyOutput); } /** * The classpath to use for compilation. @@ -189,47 +196,57 @@ public java.util.Optional converter() { public java.util.Optional stamper() { return this.stamper; } + /** Output for pickle JAR used for build pipelining */ + public java.util.Optional earlyOutput() { + return this.earlyOutput; + } public CompileOptions withClasspath(xsbti.VirtualFile[] classpath) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper, earlyOutput); } public CompileOptions withSources(xsbti.VirtualFile[] sources) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper, earlyOutput); } public CompileOptions withClassesDirectory(java.nio.file.Path classesDirectory) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper, earlyOutput); } public CompileOptions withScalacOptions(String[] scalacOptions) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper, earlyOutput); } public CompileOptions withJavacOptions(String[] javacOptions) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper, earlyOutput); } public CompileOptions withMaxErrors(int maxErrors) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper, earlyOutput); } public CompileOptions withSourcePositionMapper(java.util.function.Function sourcePositionMapper) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper, earlyOutput); } public CompileOptions withOrder(xsbti.compile.CompileOrder order) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper, earlyOutput); } public CompileOptions withTemporaryClassesDirectory(java.util.Optional temporaryClassesDirectory) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper, earlyOutput); } public CompileOptions withTemporaryClassesDirectory(java.nio.file.Path temporaryClassesDirectory) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, java.util.Optional.ofNullable(temporaryClassesDirectory), converter, stamper); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, java.util.Optional.ofNullable(temporaryClassesDirectory), converter, stamper, earlyOutput); } public CompileOptions withConverter(java.util.Optional converter) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper, earlyOutput); } public CompileOptions withConverter(xsbti.FileConverter converter) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, java.util.Optional.ofNullable(converter), stamper); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, java.util.Optional.ofNullable(converter), stamper, earlyOutput); } public CompileOptions withStamper(java.util.Optional stamper) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper, earlyOutput); } public CompileOptions withStamper(xsbti.compile.analysis.ReadStamps stamper) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, java.util.Optional.ofNullable(stamper)); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, java.util.Optional.ofNullable(stamper), earlyOutput); + } + public CompileOptions withEarlyOutput(java.util.Optional earlyOutput) { + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper, earlyOutput); + } + public CompileOptions withEarlyOutput(xsbti.compile.Output earlyOutput) { + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory, converter, stamper, java.util.Optional.ofNullable(earlyOutput)); } public boolean equals(Object obj) { if (this == obj) { @@ -238,13 +255,13 @@ public boolean equals(Object obj) { return false; } else { CompileOptions o = (CompileOptions)obj; - return java.util.Arrays.deepEquals(this.classpath(), o.classpath()) && java.util.Arrays.deepEquals(this.sources(), o.sources()) && this.classesDirectory().equals(o.classesDirectory()) && java.util.Arrays.deepEquals(this.scalacOptions(), o.scalacOptions()) && java.util.Arrays.deepEquals(this.javacOptions(), o.javacOptions()) && (this.maxErrors() == o.maxErrors()) && this.sourcePositionMapper().equals(o.sourcePositionMapper()) && this.order().equals(o.order()) && this.temporaryClassesDirectory().equals(o.temporaryClassesDirectory()) && this.converter().equals(o.converter()) && this.stamper().equals(o.stamper()); + return java.util.Arrays.deepEquals(this.classpath(), o.classpath()) && java.util.Arrays.deepEquals(this.sources(), o.sources()) && this.classesDirectory().equals(o.classesDirectory()) && java.util.Arrays.deepEquals(this.scalacOptions(), o.scalacOptions()) && java.util.Arrays.deepEquals(this.javacOptions(), o.javacOptions()) && (this.maxErrors() == o.maxErrors()) && this.sourcePositionMapper().equals(o.sourcePositionMapper()) && this.order().equals(o.order()) && this.temporaryClassesDirectory().equals(o.temporaryClassesDirectory()) && this.converter().equals(o.converter()) && this.stamper().equals(o.stamper()) && this.earlyOutput().equals(o.earlyOutput()); } } public int hashCode() { - return 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "xsbti.compile.CompileOptions".hashCode()) + java.util.Arrays.deepHashCode(classpath())) + java.util.Arrays.deepHashCode(sources())) + classesDirectory().hashCode()) + java.util.Arrays.deepHashCode(scalacOptions())) + java.util.Arrays.deepHashCode(javacOptions())) + Integer.valueOf(maxErrors()).hashCode()) + sourcePositionMapper().hashCode()) + order().hashCode()) + temporaryClassesDirectory().hashCode()) + converter().hashCode()) + stamper().hashCode()); + return 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "xsbti.compile.CompileOptions".hashCode()) + java.util.Arrays.deepHashCode(classpath())) + java.util.Arrays.deepHashCode(sources())) + classesDirectory().hashCode()) + java.util.Arrays.deepHashCode(scalacOptions())) + java.util.Arrays.deepHashCode(javacOptions())) + Integer.valueOf(maxErrors()).hashCode()) + sourcePositionMapper().hashCode()) + order().hashCode()) + temporaryClassesDirectory().hashCode()) + converter().hashCode()) + stamper().hashCode()) + earlyOutput().hashCode()); } public String toString() { - return "CompileOptions(" + "classpath: " + classpath() + ", " + "sources: " + sources() + ", " + "classesDirectory: " + classesDirectory() + ", " + "scalacOptions: " + scalacOptions() + ", " + "javacOptions: " + javacOptions() + ", " + "maxErrors: " + maxErrors() + ", " + "sourcePositionMapper: " + sourcePositionMapper() + ", " + "order: " + order() + ", " + "temporaryClassesDirectory: " + temporaryClassesDirectory() + ", " + "converter: " + converter() + ", " + "stamper: " + stamper() + ")"; + return "CompileOptions(" + "classpath: " + classpath() + ", " + "sources: " + sources() + ", " + "classesDirectory: " + classesDirectory() + ", " + "scalacOptions: " + scalacOptions() + ", " + "javacOptions: " + javacOptions() + ", " + "maxErrors: " + maxErrors() + ", " + "sourcePositionMapper: " + sourcePositionMapper() + ", " + "order: " + order() + ", " + "temporaryClassesDirectory: " + temporaryClassesDirectory() + ", " + "converter: " + converter() + ", " + "stamper: " + stamper() + ", " + "earlyOutput: " + earlyOutput() + ")"; } } diff --git a/internal/compiler-interface/src/main/contraband/incremental.contra b/internal/compiler-interface/src/main/contraband/incremental.contra index d00ae92a14..85661d09dd 100644 --- a/internal/compiler-interface/src/main/contraband/incremental.contra +++ b/internal/compiler-interface/src/main/contraband/incremental.contra @@ -211,6 +211,10 @@ type CompileOptions { ## ReadStamps to calculate timestamp or hash. stamper: xsbti.compile.analysis.ReadStamps @since("1.4.0") + + ## Output for pickle JAR used for build pipelining + earlyOutput: xsbti.compile.Output + @since("1.4.0") } ## This is used as part of CompileResult. diff --git a/internal/compiler-interface/src/main/contraband/pickle.contra b/internal/compiler-interface/src/main/contraband/pickle.contra new file mode 100644 index 0000000000..a8da11b23f --- /dev/null +++ b/internal/compiler-interface/src/main/contraband/pickle.contra @@ -0,0 +1,11 @@ +package xsbti +@target(Java) + +## A wrapper around PickleBuffer https://github.com/scala/scala/blob/v2.13.1/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala +type PickleData { + underlying: raw"Object"! + fqcn: String! + data: [Byte] + writeIndex: Int! + path: java.nio.file.Path! +} diff --git a/internal/compiler-interface/src/main/java/xsbti/AnalysisCallback.java b/internal/compiler-interface/src/main/java/xsbti/AnalysisCallback.java index 4318576487..74537f0098 100644 --- a/internal/compiler-interface/src/main/java/xsbti/AnalysisCallback.java +++ b/internal/compiler-interface/src/main/java/xsbti/AnalysisCallback.java @@ -204,4 +204,8 @@ void problem(String what, */ java.util.Set classesInOutputJar(); + /** + * Pass new pickle data as of this point. + */ + void pickleData(PickleData[] data); } diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/ExternalHooks.java b/internal/compiler-interface/src/main/java/xsbti/compile/ExternalHooks.java index d628ceba72..5b29f4d376 100644 --- a/internal/compiler-interface/src/main/java/xsbti/compile/ExternalHooks.java +++ b/internal/compiler-interface/src/main/java/xsbti/compile/ExternalHooks.java @@ -59,6 +59,12 @@ interface Lookup { boolean shouldDoIncrementalCompilation(Set changedClasses, CompileAnalysis previousAnalysis); Optional hashClasspath(VirtualFile[] classpath); + + /** + * For build pipelining support, this provides a hook for the build tool + * to store Analysis information mid-compilation. + */ + void storeEarlyAnalysis(CompileAnalysis earlyAnalysis, MiniSetup setup); } interface GetProvenance { diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/IncrementalCompiler.java b/internal/compiler-interface/src/main/java/xsbti/compile/IncrementalCompiler.java index 52e4065de1..954a674020 100644 --- a/internal/compiler-interface/src/main/java/xsbti/compile/IncrementalCompiler.java +++ b/internal/compiler-interface/src/main/java/xsbti/compile/IncrementalCompiler.java @@ -103,6 +103,7 @@ CompileResult compile(ScalaCompiler scalaCompiler, VirtualFile[] sources, VirtualFile[] classpath, Output output, + Optional earlyOutput, GlobalsCache globalsCache, String[] scalacOptions, String[] javacOptions, @@ -161,6 +162,7 @@ CompileResult compile(ScalaCompiler scalaCompiler, Path[] sources, Path[] classpath, Output output, + Optional earlyOutput, GlobalsCache globalsCache, String[] scalacOptions, String[] javacOptions, diff --git a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala index 8ce1cc6887..6453946d97 100644 --- a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala +++ b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala @@ -18,6 +18,7 @@ import java.io.File import java.util.UUID import sbt.io.syntax.URL +import xsbti.VirtualFileRef import xsbti.compile.{ Output, SingleOutput } import java.nio.file.{ Files, Path, Paths } @@ -112,6 +113,9 @@ object JarUtils { def fromFile(f: File): ClassInJar = new ClassInJar(f.toString) def fromPath(p: Path): ClassInJar = new ClassInJar(p.toString) + + def fromVirtualFileRef(vf: VirtualFileRef): ClassInJar = + new ClassInJar(vf.id) } /** diff --git a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompileOutput.scala b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompileOutput.scala index 58d66dd46b..4994313ed3 100755 --- a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompileOutput.scala +++ b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompileOutput.scala @@ -15,6 +15,7 @@ package inc import xsbti.compile.{ Output, OutputGroup } import java.nio.file.Path +import java.util.Optional /** * Define helpers to create [[CompileOutput]] to pass to the incremental @@ -46,9 +47,17 @@ object CompileOutput { def apply(groups: Array[OutputGroup]): Output = new ConcreteMultipleOutput(groups) + lazy val empty: Output = new EmptyOutput() + def outputGroup(source: Path, output: Path): OutputGroup = new ConcreteOutputGroup(source, output) + private final class EmptyOutput extends xsbti.compile.Output { + override def getSingleOutput(): Optional[Path] = Optional.empty() + override def getMultipleOutput(): Optional[Array[OutputGroup]] = Optional.empty() + override def toString: String = "EmptyOutput()" + } + private final class ConcreteSingleOutput(val getOutputDirectory: Path) extends xsbti.compile.SingleOutput { override def toString: String = s"SingleOutput($getOutputDirectory)" diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala index d8deab6180..23d83cc226 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala @@ -19,13 +19,24 @@ import sbt.internal.inc.Analysis.{ LocalProduct, NonLocalProduct } import sbt.util.{ InterfaceUtil, Level, Logger } import sbt.util.InterfaceUtil.jo2o import scala.collection.JavaConverters._ -import xsbti.{ FileConverter, Position, Problem, Severity, UseScope, VirtualFile, VirtualFileRef } +import scala.collection.mutable +import xsbti.{ + FileConverter, + PickleData, + Position, + Problem, + Severity, + UseScope, + VirtualFile, + VirtualFileRef +} import xsbt.api.{ APIUtil, HashAPI, NameHashing } import xsbti.api._ import xsbti.compile.{ CompileAnalysis, DependencyChanges, IncOptions, + MiniSetup, Output, ClassFileManager => XClassFileManager } @@ -46,6 +57,58 @@ object Incremental { } } + /** + * This is a callback from AnalysisCallback back up to Zinc code to + * perform mid-compilation. + * + * @param classFileManager + */ + abstract class IncrementalCallback(classFileManager: XClassFileManager) { + + /** + * Merge latest analysis as of pickling into pruned previous analysis, compute invalidations + * and decide whether we need another cycle. + */ + def mergeAndInvalidate(partialAnalysis: Analysis, completingCycle: Boolean): CompileCycleResult + + /** + * Merge latest analysis as of analyzer into pruned previous analysis and inform file manager. + */ + def completeCycle( + prev: Option[CompileCycleResult], + partialAnalysis: Analysis + ): CompileCycleResult + + // def previousAnalysisPruned: Analysis + + /** + * @return true when the compilation cycle is compiling all the sources; false, otherwise. + */ + def isFullCompilation: Boolean + } + + sealed trait CompileCycle { + def run( + sources: Set[VirtualFile], + changes: DependencyChanges, + incHandler: IncrementalCallback + ): CompileCycleResult + } + case class CompileCycleResult( + continue: Boolean, + nextInvalidations: Set[String], + analysis: Analysis + ) + object CompileCycleResult { + def apply( + continue: Boolean, + nextInvalidations: Set[String], + analysis: Analysis + ): CompileCycleResult = + new CompileCycleResult(continue, nextInvalidations, analysis) + def empty = CompileCycleResult(false, Set.empty, Analysis.empty) + } + /** * Runs the incremental compilation algorithm. * @@ -65,18 +128,21 @@ object Incremental { sources: Set[VirtualFile], converter: FileConverter, lookup: Lookup, + previous0: CompileAnalysis, + options: IncOptions, + currentSetup: MiniSetup, + stamper: ReadStamps, + output: Output, + outputJarContent: JarUtils.OutputJarContent, + earlyOutput: Option[Output], + log: Logger + )( compile: ( Set[VirtualFile], DependencyChanges, xsbti.AnalysisCallback, XClassFileManager - ) => Unit, - previous0: CompileAnalysis, - output: Output, - log: Logger, - options: IncOptions, - outputJarContent: JarUtils.OutputJarContent, - stamper: ReadStamps + ) => Unit ): (Boolean, Analysis) = { log.debug(s"[zinc] IncrementalCompile -----------") val previous = previous0 match { case a: Analysis => a } @@ -86,6 +152,9 @@ object Incremental { val internalSourceToClassNamesMap: VirtualFile => Set[String] = (f: VirtualFile) => previous.relations.classNames(f) val externalAPI = getExternalAPI(lookup) + val profiler = InvalidationProfiler.empty + val runProfiler = profiler.profileRun + val incremental: IncrementalCommon = new IncrementalNameHashing(log, options, runProfiler) try { incrementalCompile( sources, @@ -99,16 +168,20 @@ object Incremental { internalSourceToClassNamesMap, externalAPI, currentStamper, - output, options, - outputJarContent, + currentSetup, converter, + lookup, + output, + outputJarContent, + earlyOutput, log ), - log, + incremental, options, output, - outputJarContent + outputJarContent, + log ) } catch { case _: xsbti.CompileCancelled => @@ -155,16 +228,15 @@ object Incremental { XClassFileManager ) => Unit, callbackBuilder: AnalysisCallback.Builder, - log: sbt.util.Logger, + incremental: IncrementalCommon, options: IncOptions, output: Output, outputJarContent: JarUtils.OutputJarContent, - profiler: InvalidationProfiler = InvalidationProfiler.empty + // earlyOutput: Option[Output], + log: sbt.util.Logger )(implicit equivS: Equiv[XStamp]): (Boolean, Analysis) = { log.debug("IncrementalCompile.incrementalCompile") val previous = previous0 match { case a: Analysis => a } - val runProfiler = profiler.profileRun - val incremental: IncrementalCommon = new IncrementalNameHashing(log, options, runProfiler) val initialChanges = incremental.detectInitialChanges(sources, previous, current, lookup, converter, output) log.debug(s"> initialChanges = $initialChanges") @@ -214,13 +286,19 @@ object Incremental { ) => Unit, callbackBuilder: AnalysisCallback.Builder, classFileManager: XClassFileManager - )(srcs: Set[VirtualFile], changes: DependencyChanges): Analysis = { - // Note `ClassFileManager` is shared among multiple cycles in the same incremental compile run, - // in order to rollback entirely if transaction fails. `AnalysisCallback` is used by each cycle - // to report its own analysis individually. - val callback = callbackBuilder.build() - compile(srcs, changes, callback, classFileManager) - callback.getOnce + ): CompileCycle = new CompileCycle { + override def run( + srcs: Set[VirtualFile], + changes: DependencyChanges, + incHandler: IncrementalCallback + ): CompileCycleResult = { + // Note `ClassFileManager` is shared among multiple cycles in the same incremental compile run, + // in order to rollback entirely if transaction fails. `AnalysisCallback` is used by each cycle + // to report its own analysis individually. + val callback = callbackBuilder.build(incHandler) + compile(srcs, changes, callback, classFileManager) + callback.getOnce + } } // the name of system property that was meant to enable debugging mode of incremental compiler but @@ -277,22 +355,29 @@ private object AnalysisCallback { internalSourceToClassNamesMap: VirtualFile => Set[String], externalAPI: (VirtualFileRef, String) => Option[AnalyzedClass], stampReader: ReadStamps, - output: Output, options: IncOptions, - outputJarContent: JarUtils.OutputJarContent, + currentSetup: MiniSetup, converter: FileConverter, + lookup: Lookup, + output: Output, + outputJarContent: JarUtils.OutputJarContent, + earlyOutput: Option[Output], log: Logger ) { - def build(): AnalysisCallback = { + def build(incHandler: Incremental.IncrementalCallback): AnalysisCallback = { new AnalysisCallback( internalBinaryToSourceClassName, internalSourceToClassNamesMap, externalAPI, stampReader, - output, options, + currentSetup, outputJarContent, converter, + lookup, + output, + earlyOutput, + incHandler, log ) } @@ -304,12 +389,18 @@ private final class AnalysisCallback( internalSourceToClassNamesMap: VirtualFile => Set[String], externalAPI: (VirtualFileRef, String) => Option[AnalyzedClass], stampReader: ReadStamps, - output: Output, options: IncOptions, + currentSetup: MiniSetup, outputJarContent: JarUtils.OutputJarContent, converter: FileConverter, + lookup: Lookup, + output: Output, + earlyOutput: Option[Output], + incHandler: Incremental.IncrementalCallback, log: Logger ) extends xsbti.AnalysisCallback { + import Incremental.CompileCycleResult + // This must have a unique value per AnalysisCallback private[this] val compileStartTime: Long = System.currentTimeMillis() private[this] val compilation: Compilation = Compilation(compileStartTime, output) @@ -361,6 +452,11 @@ private final class AnalysisCallback( // source files containing a macro def. private[this] val macroClasses = ConcurrentHashMap.newKeySet[String]() + // Results of invalidation calculations (including whether to continue cycles) - the analysis at this point is + // not useful and so isn't included. + private[this] var invalidationResults: Option[CompileCycleResult] = None + private[this] val pklData: mutable.ArrayBuffer[PickleData] = new mutable.ArrayBuffer() + private def add[A, B](map: TrieMap[A, ConcurrentSet[B]], a: A, b: B): Unit = { map.getOrElseUpdate(a, ConcurrentHashMap.newKeySet[B]()).add(b) () @@ -528,13 +624,17 @@ private final class AnalysisCallback( override def enabled(): Boolean = options.enabled private[this] var gotten: Boolean = false - def getOnce: Analysis = { + def getOnce: CompileCycleResult = { assert(!gotten, "can't call AnalysisCallback#getOnce more than once") gotten = true outputJarContent.scalacRunCompleted() + val a = getAnalysis + incHandler.completeCycle(invalidationResults, a) + } + + private def getAnalysis: Analysis = { val analysis0 = addProductsAndDeps(Analysis.empty) - val analysis = addUsedNames(addCompilation(analysis0)) - analysis + addUsedNames(addCompilation(analysis0)) } def getOrNil[A, B](m: collection.Map[A, Seq[B]], a: A): Seq[B] = m.get(a).toList.flatten @@ -652,14 +752,61 @@ private final class AnalysisCallback( } } + override def apiPhaseCompleted(): Unit = { + // If we know we're done with cycles (presumably because all sources were invalidated) we can store early analysis + // and picke data now. Otherwise, we need to wait for dependency information to decide if there are more cycles. + if (options.pipelining() && incHandler.isFullCompilation) { + val a = getAnalysis + val CompileCycleResult(continue, invalidations, merged) = + incHandler.mergeAndInvalidate(a, false) + assert( + !continue && invalidations.isEmpty, + "everything was supposed to be invalidated already" + ) + invalidationResults = Some(CompileCycleResult.empty) + writeEarlyArtifacts(merged) + } + } + override def dependencyPhaseCompleted(): Unit = { + if (invalidationResults.isEmpty) { + val a = getAnalysis + val CompileCycleResult(continue, invalidations, merged) = + incHandler.mergeAndInvalidate(a, false) + // Store invalidations and continuation decision; the analysis will be computed again after Analyze phase. + invalidationResults = Some(CompileCycleResult(continue, invalidations, Analysis.empty)) + // If there will be no more compilation cycles, store the early analysis file and update the pickle jar + if (options.pipelining && !continue) { + writeEarlyArtifacts(merged) + } + } outputJarContent.dependencyPhaseCompleted() } - override def apiPhaseCompleted(): Unit = {} - override def classesInOutputJar(): java.util.Set[String] = { outputJarContent.get().asJava } + override def pickleData(data: Array[PickleData]): Unit = { + if (options.pipelining && data.nonEmpty) { + pklData ++= data + } + } + + private def writeEarlyArtifacts(merged: Analysis): Unit = { + lookup.storeEarlyAnalysis(merged, currentSetup) + for { + earlyO <- earlyOutput + pickleJarPath <- jo2o(earlyO.getSingleOutput()) + } { + // List classes defined in the files that were compiled in this run. + val knownClasses = merged.relations.allSources + .flatMap(merged.relations.products) + .flatMap(merged.relations.classNames) + // .map(JarUtils.ClassInJar.fromVirtualFileRef(_).toClassFilePath) + PickleJar.write(pickleJarPath, pklData, knownClasses, log) + // hooks.picklesComplete() + } + () + } } diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala index e11a69a7c1..bd1c4a3886 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala @@ -26,7 +26,13 @@ import xsbti.compile.{ } import xsbti.compile.analysis.{ ReadStamps, Stamp => XStamp } import scala.collection.Iterator -import Incremental.{ PrefixingLogger, apiDebug } +import Incremental.{ + CompileCycle, + CompileCycleResult, + IncrementalCallback, + PrefixingLogger, + apiDebug +} /** * Defines the core logic to compile incrementally and apply the class invalidation after @@ -70,7 +76,7 @@ private[inc] abstract class IncrementalCommon( binaryChanges: DependencyChanges, lookup: ExternalLookup, previous: Analysis, - doCompile: (Set[VirtualFile], DependencyChanges) => Analysis, + doCompile: CompileCycle, classfileManager: XClassFileManager, output: Output, cycleNum: Int @@ -88,16 +94,46 @@ private[inc] abstract class IncrementalCommon( val invalidatedRefs: Set[VirtualFileRef] = mapInvalidationsToSources(classesToRecompile, initialChangedSources, vs, previous) val invalidatedSources: Set[VirtualFile] = invalidatedRefs map { converter.toVirtualFile } - val current = - recompileClasses( + val pruned = + IncrementalCommon.pruneClassFilesOfInvalidations( invalidatedSources, - converter, - binaryChanges, previous, - doCompile, - classfileManager + classfileManager, + converter + ) + debug("********* Pruned: \n" + pruned.relations + "\n*********") + val handler = + new IncrementalCallbackImpl( + invalidatedSources, + classfileManager, + pruned, + classesToRecompile, { + ( + recompiledClasses: Set[String], + newApiChanges: APIChanges, + nextInvalidations: Set[String], + continuePerLookup: Boolean + ) => + profiler.registerCycle( + invalidatedClasses, + invalidatedByPackageObjects, + initialChangedSources, + invalidatedSources, + recompiledClasses, + newApiChanges, + nextInvalidations, + continuePerLookup + ) + } ) + // Actual compilation takes place here + log.debug(s"compilation cycle $cycleNum") + val result = doCompile.run(invalidatedSources, binaryChanges, handler) + val continue = result.continue + val nextInvalidations = result.nextInvalidations + val current = result.analysis + // Return immediate analysis as all sources have been recompiled if (invalidatedSources == allSources) CycleState( @@ -114,37 +150,6 @@ private[inc] abstract class IncrementalCommon( cycleNum + 1 ) else { - val recompiledClasses: Set[String] = { - // Represents classes detected as changed externally and internally (by a previous cycle) - classesToRecompile ++ - // Maps the changed sources by the user to class names we can count as invalidated - initialChangedSources.flatMap(previous.relations.classNames) ++ - initialChangedSources.flatMap(current.relations.classNames) - } - - val newApiChanges = - detectAPIChanges(recompiledClasses, previous.apis.internalAPI, current.apis.internalAPI) - debug("\nChanges:\n" + newApiChanges) - val nextInvalidations = invalidateAfterInternalCompilation( - current.relations, - newApiChanges, - recompiledClasses, - cycleNum >= options.transitiveStep, - IncrementalCommon.comesFromScalaSource(previous.relations, Some(current.relations)) - ) - - val continue = lookup.shouldDoIncrementalCompilation(nextInvalidations, current) - - profiler.registerCycle( - invalidatedClasses, - invalidatedByPackageObjects, - initialChangedSources, - invalidatedSources, - recompiledClasses, - newApiChanges, - nextInvalidations, - continue - ) CycleState( if (continue) nextInvalidations else Set.empty, Set.empty, @@ -160,6 +165,78 @@ private[inc] abstract class IncrementalCommon( ) } } + + /** + * IncrementalCallbackImpl is a callback hanlder that the custom + * phases injected by Zinc call back to perform certain operations mid-compilation. + * In particular, for pipelining, we need to know whether the current + * incremental cycle is going to be the last cycle or not. + */ + class IncrementalCallbackImpl( + invalidatedSources: Set[VirtualFile], + classFileManager: XClassFileManager, + pruned: Analysis, + classesToRecompile: Set[String], + registerCycle: (Set[String], APIChanges, Set[String], Boolean) => Unit + ) extends IncrementalCallback(classFileManager) { + override val isFullCompilation: Boolean = + allSources.subsetOf(invalidatedSources) + + override def mergeAndInvalidate( + partialAnalysis: Analysis, + completingCycle: Boolean + ): CompileCycleResult = { + val analysis = + if (isFullCompilation) partialAnalysis + else pruned ++ partialAnalysis + val recompiledClasses: Set[String] = { + // Represents classes detected as changed externally and internally (by a previous cycle) + classesToRecompile ++ + // Maps the changed sources by the user to class names we can count as invalidated + initialChangedSources.flatMap(previous.relations.classNames) ++ + initialChangedSources.flatMap(analysis.relations.classNames) + } + val newApiChanges = + detectAPIChanges(recompiledClasses, previous.apis.internalAPI, analysis.apis.internalAPI) + debug("\nChanges:\n" + newApiChanges) + val nextInvalidations = + if (isFullCompilation) Set.empty[String] + else + invalidateAfterInternalCompilation( + analysis.relations, + newApiChanges, + recompiledClasses, + cycleNum >= options.transitiveStep, + IncrementalCommon + .comesFromScalaSource(previous.relations, Some(analysis.relations)) _ + ) + // No matter what shouldDoIncrementalCompilation returns, we are not in fact going to + // continue if there are no invalidations. Assume the result is somehow interesting for + // profiling... or a bug. + val continuePerLookup = + if (isFullCompilation) false + else lookup.shouldDoIncrementalCompilation(nextInvalidations, analysis) + val continue = continuePerLookup && nextInvalidations.nonEmpty + + // If we're completing the cycle, then mergeAndInvalidate has already been called + if (!completingCycle) { + registerCycle(recompiledClasses, newApiChanges, nextInvalidations, continuePerLookup) + } + CompileCycleResult(continue, nextInvalidations, analysis) + } + + override def completeCycle( + prev: Option[CompileCycleResult], + partialAnalysis: Analysis + ): CompileCycleResult = { + val a1 = pruned ++ partialAnalysis + val results = prev.fold(mergeAndInvalidate(a1, true))(_.copy(analysis = a1)) + val products = a1.relations.allProducts + .map(converter.toVirtualFile(_)) + classFileManager.generated(products.toArray) + results + } + } } /** @@ -192,7 +269,7 @@ private[inc] abstract class IncrementalCommon( binaryChanges: DependencyChanges, lookup: ExternalLookup, previous: Analysis, - doCompile: (Set[VirtualFile], DependencyChanges) => Analysis, + doCompile: CompileCycle, classfileManager: XClassFileManager, output: Output, cycleNum: Int @@ -237,35 +314,6 @@ private[inc] abstract class IncrementalCommon( expand(invalidatedClasses.flatMap(previous.relations.definesClass) ++ aggregateSources) } - def recompileClasses( - sources: Set[VirtualFile], - converter: FileConverter, - binaryChanges: DependencyChanges, - previous: Analysis, - doCompile: (Set[VirtualFile], DependencyChanges) => Analysis, - classfileManager: XClassFileManager - ): Analysis = { - val pruned = - IncrementalCommon.pruneClassFilesOfInvalidations( - sources, - previous, - classfileManager, - converter - ) - debug("********* Pruned: \n" + pruned.relations + "\n*********") - val fresh = doCompile(sources, binaryChanges) - debug("********* Fresh: \n" + fresh.relations + "\n*********") - - val products = fresh.relations.allProducts.toList - /* This is required for both scala compilation and forked java compilation, despite - * being redundant for the most common Java compilation (using the local compiler). */ - classfileManager.generated(products.map(converter.toVirtualFile(_)).toArray) - - val merged = pruned ++ fresh - debug("********* Merged: \n" + merged.relations + "\n*********") - merged - } - /** * Detects the API changes of `recompiledClasses`. * diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Lookup.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Lookup.scala index 97b49438a8..04713bb771 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Lookup.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Lookup.scala @@ -16,7 +16,7 @@ import java.util.Optional import xsbti.api.AnalyzedClass import xsbti.{ VirtualFileRef, VirtualFile } -import xsbti.compile.{ Changes, CompileAnalysis, ExternalHooks, FileHash } +import xsbti.compile.{ Changes, CompileAnalysis, ExternalHooks, FileHash, MiniSetup } /** * A trait that encapsulates looking up elements on a classpath and looking up @@ -142,4 +142,5 @@ trait NoopExternalLookup extends ExternalLookup { ): Boolean = true override def hashClasspath(classpath: Array[VirtualFile]): Optional[Array[FileHash]] = Optional.empty() + override def storeEarlyAnalysis(earlyAnalysis: CompileAnalysis, setup: MiniSetup): Unit = () } diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/PickleJar.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/PickleJar.scala new file mode 100644 index 0000000000..08de7fc891 --- /dev/null +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/PickleJar.scala @@ -0,0 +1,133 @@ +/* + * Zinc - The incremental compiler for Scala. + * Copyright Lightbend, Inc. and Mark Harrah + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package sbt +package internal +package inc + +import java.io.{ Closeable, OutputStream } +import java.nio.file.{ + Files, + FileSystems, + FileVisitResult, + Path, + StandardOpenOption, + SimpleFileVisitor +} +import java.nio.file.attribute.{ BasicFileAttributes, FileTime } +import java.time.Instant + +import sbt.util.Logger +import xsbti.PickleData +import scala.collection.JavaConverters._ +import scala.collection.mutable + +object PickleJar { + def write( + pickleOut: Path, + data: Iterable[PickleData], + knownClasses: collection.Set[String], + log: Logger + ): Path = { + + // def trace(msg: String) = log.trace(() => new Exception(msg)) + + var pj: RootJarPath = null + try { + pj = RootJarPath(pickleOut) + Files.createDirectories(pj.root) + val knownPaths = knownClasses + + val writtenPickles = new java.util.IdentityHashMap[AnyRef, String]() + val writtenSyms = new mutable.HashSet[String] + for { pickle <- data } { + val fqcn = pickle.fqcn() + // Reference to original scalac PickleBuffer (as AnyRef to avoid version dependence). + // For some reason, these might be duplicated. + val orig = pickle.underlying() + if (!writtenPickles.containsKey(orig)) { + if (writtenSyms.contains(fqcn)) + log.warn(s"Found duplicate fqcn $fqcn while writing pickles!") + val elems: Iterable[String] = pickle.path.asScala.map(_.toString) + assert(elems.head == "__ROOT__") + val primary = elems.tail.foldLeft(pj.root)(_.resolve(_)) + Files.createDirectories(primary.getParent) + var out: OutputStream = null + try { + out = Files.newOutputStream( + primary, + StandardOpenOption.CREATE, + StandardOpenOption.TRUNCATE_EXISTING + ) + out.write(pickle.data, 0, pickle.writeIndex) + writtenSyms += fqcn + // trace(s"Added $fqcn to pickle jar") + } finally { + if (out ne null) + out.close() + } + writtenPickles.put(pickle.underlying, fqcn) + } + } + + Files.walkFileTree( + pj.root, + new SimpleFileVisitor[Path] { + override def visitFile(path: Path, attrs: BasicFileAttributes): FileVisitResult = { + val ps = path.toString + if (ps.endsWith(".sig")) { + // "/foo/bar/wiz.sig" -> "foo/bar/wiz.class" + val i0 = if (ps.startsWith("/")) 1 else 0 + val cp = ps.substring(i0, ps.length - 3) + "class" + if (!knownPaths.contains(cp)) { + // log.info(s"Deleting sig for removed class $path") + // trace(s"Removed $cp from picklejar") + Files.delete(path) + } + + } + FileVisitResult.CONTINUE + } + } + ) + + } finally { + if (pj ne null) + pj.close() + } + Files.setLastModifiedTime(pickleOut, FileTime.from(Instant.now())) + } +} + +// RootJarPath is identical to s.reflect.io.RootPath, except that it doesn't delete pre-existing jars. +abstract class RootJarPath extends Closeable { + def root: Path +} + +object RootJarPath { + def apply(path: Path): RootJarPath = { + assert(path.getFileName.toString.endsWith(".jar")) + import java.net.URI + val zipFile = URI.create("jar:file:" + path.toUri.getPath) + val env = new java.util.HashMap[String, String]() + if (!Files.exists(path.getParent)) + Files.createDirectories(path.getParent) + if (!Files.exists(path)) + env.put("create", "true") + val zipfs = FileSystems.newFileSystem(zipFile, env) + new RootJarPath { + def root = zipfs.getRootDirectories.iterator().next() + def close(): Unit = { + zipfs.close() + } + } + } +} diff --git a/internal/zinc-scripted/src/test/scala/sbt/internal/inc/IncHandler.scala b/internal/zinc-scripted/src/test/scala/sbt/internal/inc/IncHandler.scala index 150f62a706..e431b30c02 100644 --- a/internal/zinc-scripted/src/test/scala/sbt/internal/inc/IncHandler.scala +++ b/internal/zinc-scripted/src/test/scala/sbt/internal/inc/IncHandler.scala @@ -596,7 +596,7 @@ case class ProjectStructure( cp.toArray, vs.toArray, output, - // Some(earlyOutput), + Some(earlyOutput), scalacOptions, javacOptions = Array(), maxErrors, diff --git a/internal/zinc-testing/src/main/scala/xsbti/TestCallback.scala b/internal/zinc-testing/src/main/scala/xsbti/TestCallback.scala index 7b81100610..f679a5b48c 100644 --- a/internal/zinc-testing/src/main/scala/xsbti/TestCallback.scala +++ b/internal/zinc-testing/src/main/scala/xsbti/TestCallback.scala @@ -110,6 +110,8 @@ class TestCallback extends AnalysisCallback { override def apiPhaseCompleted(): Unit = {} override def classesInOutputJar(): util.Set[String] = java.util.Collections.emptySet() + + override def pickleData(data: Array[PickleData]): Unit = () } object TestCallback { diff --git a/zinc/src/main/scala/sbt/internal/inc/CompileConfiguration.scala b/zinc/src/main/scala/sbt/internal/inc/CompileConfiguration.scala index d687546ce6..1198c22407 100644 --- a/zinc/src/main/scala/sbt/internal/inc/CompileConfiguration.scala +++ b/zinc/src/main/scala/sbt/internal/inc/CompileConfiguration.scala @@ -46,7 +46,6 @@ final class CompileConfiguration( val sources: Seq[VirtualFile], val converter: FileConverter, val classpath: Seq[VirtualFile], - val output: Output, val previousAnalysis: CompileAnalysis, val previousSetup: Option[MiniSetup], val currentSetup: MiniSetup, @@ -57,6 +56,8 @@ final class CompileConfiguration( val javac: xsbti.compile.JavaCompiler, val cache: GlobalsCache, val incOptions: IncOptions, + val output: Output, val outputJarContent: JarUtils.OutputJarContent, + val earlyOutput: Option[Output], val stampReader: ReadStamps, ) diff --git a/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala b/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala index 86f52a004f..643f35b0ad 100644 --- a/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala +++ b/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala @@ -61,6 +61,7 @@ class IncrementalCompilerImpl extends IncrementalCompiler { sources, classpath, CompileOutput(classesDirectory), + earlyOutput.toOption, cache, progress().toOption, scalacOptions, @@ -75,7 +76,7 @@ class IncrementalCompilerImpl extends IncrementalCompiler { temporaryClassesDirectory.toOption, extraOptions, conv, - stamper.toOption.getOrElse(defaultStampReader) + stamper.toOption.getOrElse(defaultStampReader), )(logger) } @@ -122,6 +123,7 @@ class IncrementalCompilerImpl extends IncrementalCompiler { sources: Array[VirtualFile], classpath: Array[VirtualFile], output: xsbti.compile.Output, + earlyOutput: Optional[xsbti.compile.Output], cache: xsbti.compile.GlobalsCache, scalaOptions: Array[String], javaOptions: Array[String], @@ -146,6 +148,7 @@ class IncrementalCompilerImpl extends IncrementalCompiler { sources.toVector, classpath.toSeq, output, + earlyOutput.toOption, cache, progress.toOption, scalaOptions.toSeq, @@ -207,6 +210,7 @@ class IncrementalCompilerImpl extends IncrementalCompiler { sources: Array[Path], classpath: Array[Path], output: xsbti.compile.Output, + earlyOutput: Optional[xsbti.compile.Output], cache: xsbti.compile.GlobalsCache, scalaOptions: Array[String], javaOptions: Array[String], @@ -233,6 +237,7 @@ class IncrementalCompilerImpl extends IncrementalCompiler { vs, cp, output, + earlyOutput.toOption, cache, progress.toOption, scalaOptions.toSeq, @@ -328,6 +333,7 @@ class IncrementalCompilerImpl extends IncrementalCompiler { sources: Seq[VirtualFile], classpath: Seq[VirtualFile], output: Output, + earlyOutput: Option[Output], cache: GlobalsCache, progress: Option[CompileProgress] = None, scalaOptions: Seq[String] = Nil, @@ -373,7 +379,6 @@ class IncrementalCompilerImpl extends IncrementalCompiler { sources, converter, classpath, - output, cache, progress, scalaOptions ++ extraScalacOptions, @@ -385,7 +390,9 @@ class IncrementalCompilerImpl extends IncrementalCompiler { compileOrder, skip, incrementalOptions, + output, outputJarContent, + earlyOutput, stampReader, extra ) @@ -443,14 +450,15 @@ class IncrementalCompilerImpl extends IncrementalCompiler { srcsSet, converter, lookup, - mixedCompiler.compile, analysis, - output, - log, incOptions, - outputJarContent, + currentSetup, stampReader, - ) + output, + outputJarContent, + earlyOutput, + log + )(mixedCompiler.compile) compile.swap } @@ -495,6 +503,7 @@ class IncrementalCompilerImpl extends IncrementalCompiler { classpath: Array[VirtualFile], sources: Array[VirtualFile], classesDirectory: Path, + earlyJarPath: Option[Path], scalacOptions: Array[String], javacOptions: Array[String], maxErrors: Int, @@ -520,6 +529,7 @@ class IncrementalCompilerImpl extends IncrementalCompiler { temporaryClassesDirectory, Option(converter).toOptional, Option(stampReader).toOptional, + (earlyJarPath map { CompileOutput(_) }).toOptional, ) } inputs(compileOptions, compilers, setup, pr) diff --git a/zinc/src/main/scala/sbt/internal/inc/LookupImpl.scala b/zinc/src/main/scala/sbt/internal/inc/LookupImpl.scala index 9a100bdccb..9feb489eb1 100644 --- a/zinc/src/main/scala/sbt/internal/inc/LookupImpl.scala +++ b/zinc/src/main/scala/sbt/internal/inc/LookupImpl.scala @@ -87,4 +87,9 @@ class LookupImpl(compileConfiguration: CompileConfiguration, previousSetup: Opti override def hashClasspath(classpath: Array[VirtualFile]): Optional[Array[FileHash]] = externalLookup.map(_.hashClasspath(classpath)).getOrElse(Optional.empty()) + + override def storeEarlyAnalysis(earlyAnalysis: CompileAnalysis, setup: MiniSetup): Unit = { + externalLookup.map(_.storeEarlyAnalysis(earlyAnalysis, setup)) + () + } } diff --git a/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala b/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala index 57afd1926d..2d7215dfdf 100644 --- a/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala +++ b/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala @@ -234,7 +234,6 @@ object MixedAnalyzingCompiler { sources: Seq[VirtualFile], converter: FileConverter, // this is needed to thaw ref back to path for stamping classpath: Seq[VirtualFile], - output: Output, cache: GlobalsCache, progress: Option[CompileProgress] = None, options: Seq[String] = Nil, @@ -246,7 +245,9 @@ object MixedAnalyzingCompiler { compileOrder: CompileOrder = Mixed, skip: Boolean = false, incrementalCompilerOptions: IncOptions, + output: Output, outputJarContent: JarUtils.OutputJarContent, + earlyOutput: Option[Output], stamper: ReadStamps, extra: List[(String, String)] ): CompileConfiguration = { @@ -277,7 +278,6 @@ object MixedAnalyzingCompiler { sources, converter, classpath, - output, compileSetup, progress, previousAnalysis, @@ -289,7 +289,9 @@ object MixedAnalyzingCompiler { skip, cache, incrementalCompilerOptions, + output, outputJarContent, + earlyOutput, stamper ) } @@ -298,7 +300,6 @@ object MixedAnalyzingCompiler { sources: Seq[VirtualFile], converter: FileConverter, classpath: Seq[VirtualFile], - output: Output, setup: MiniSetup, progress: Option[CompileProgress], previousAnalysis: CompileAnalysis, @@ -310,14 +311,15 @@ object MixedAnalyzingCompiler { skip: Boolean, cache: GlobalsCache, incrementalCompilerOptions: IncOptions, + output: Output, outputJarContent: JarUtils.OutputJarContent, + earlyOutput: Option[Output], stamper: ReadStamps, ): CompileConfiguration = { new CompileConfiguration( sources, converter, classpath, - output, previousAnalysis, previousSetup, setup, @@ -328,7 +330,9 @@ object MixedAnalyzingCompiler { javac, cache, incrementalCompilerOptions, + output, outputJarContent, + earlyOutput, stamper ) } diff --git a/zinc/src/test/scala/sbt/inc/MultiProjectIncrementalSpec.scala b/zinc/src/test/scala/sbt/inc/MultiProjectIncrementalSpec.scala index c818a1741b..e4250c87e8 100644 --- a/zinc/src/test/scala/sbt/inc/MultiProjectIncrementalSpec.scala +++ b/zinc/src/test/scala/sbt/inc/MultiProjectIncrementalSpec.scala @@ -42,6 +42,7 @@ class MultiProjectIncrementalSpec extends BridgeProviderSpecification { Files.createDirectories(sub2Directory) Files.createDirectories(sub2Directory / "src") val targetDir2 = sub2Directory / "target" + val earlyOutput2 = targetDir2 / "early-output.jar" val cacheFile2 = targetDir2 / "inc_compile.zip" val fileStore2 = AnalysisStore.getCachedStore(FileAnalysisStore.getDefault(cacheFile2.toFile)) @@ -51,6 +52,7 @@ class MultiProjectIncrementalSpec extends BridgeProviderSpecification { Files.createDirectories(sub1Directory / "src") Files.createDirectories(sub1Directory / "lib") val targetDir = sub1Directory / "target" + val earlyOutput = targetDir / "early-output.jar" val cacheFile = targetDir / "inc_compile.zip" val fileStore = AnalysisStore.getCachedStore(FileAnalysisStore.getDefault(cacheFile.toFile)) val dependerFile = sub1Directory / "src" / "Depender.scala" @@ -99,6 +101,7 @@ class MultiProjectIncrementalSpec extends BridgeProviderSpecification { cp.toArray, vs, targetDir, + Some(earlyOutput), Array(), Array(), maxErrors, @@ -126,6 +129,7 @@ class MultiProjectIncrementalSpec extends BridgeProviderSpecification { cp.toArray, vs1, targetDir, + Some(earlyOutput), Array(), Array(), maxErrors, @@ -173,6 +177,7 @@ class MultiProjectIncrementalSpec extends BridgeProviderSpecification { cp2.toArray, vs2, targetDir2, + Some(earlyOutput2), Array(), Array(), maxErrors, @@ -222,6 +227,7 @@ class MultiProjectIncrementalSpec extends BridgeProviderSpecification { cp.toArray, vs3, targetDir, + Some(earlyOutput), Array(), Array(), maxErrors, diff --git a/zinc/src/test/scala/sbt/inc/TestProjectSetup.scala b/zinc/src/test/scala/sbt/inc/TestProjectSetup.scala index 02d33dfa45..a376ede7fb 100644 --- a/zinc/src/test/scala/sbt/inc/TestProjectSetup.scala +++ b/zinc/src/test/scala/sbt/inc/TestProjectSetup.scala @@ -91,6 +91,7 @@ case class TestProjectSetup( val output: Path = if (outputToJar) baseLocation.resolve("target").resolve("output.jar") else defaultClassesDir + val earlyOutput: Path = baseLocation.resolve("target").resolve("early-output.jar") def defaultStoreLocation: Path = baseLocation.resolve("inc_data.zip") @@ -105,6 +106,7 @@ case class TestProjectSetup( si, compilerBridge, output, + earlyOutput, baseLocation, allSources.toVector map converter.toVirtualFile, allClasspath, @@ -160,6 +162,7 @@ object TestProjectSetup { si: xsbti.compile.ScalaInstance, compilerBridge: Path, output: Path, + earlyOutput: Path, tempDir: Path, sources: Seq[VirtualFile], classpath: Seq[VirtualFile], @@ -220,6 +223,7 @@ object TestProjectSetup { cp.toArray, sources.toArray, output, + Some(earlyOutput), scalacOptions.toArray, Array(), maxErrors, diff --git a/zinc/src/test/scala/sbt/inc/VirtualFileIncrementalSpec.scala b/zinc/src/test/scala/sbt/inc/VirtualFileIncrementalSpec.scala index 0248ac86b6..50a49512f3 100644 --- a/zinc/src/test/scala/sbt/inc/VirtualFileIncrementalSpec.scala +++ b/zinc/src/test/scala/sbt/inc/VirtualFileIncrementalSpec.scala @@ -39,6 +39,7 @@ class VirtualFileIncrementalSpec extends BridgeProviderSpecification { Files.createDirectories(sub1Directory) Files.createDirectories(sub1Directory / "lib") val targetDir = sub1Directory / "target" + val earlyOutput = targetDir / "early-output.jar" val cacheFile = targetDir / "inc_compile.zip" val fileStore = AnalysisStore.getCachedStore(FileAnalysisStore.getDefault(cacheFile.toFile)) val dependerFile: VirtualFile = @@ -100,6 +101,7 @@ object Depender2 { cp.toArray, sources, classesDirectory = targetDir, + Some(earlyOutput), Array(), Array(), maxErrors, @@ -120,6 +122,7 @@ object Depender2 { println((targetDir.toFile ** "*").get.toList.toString) val expectedOut = targetDir.resolve("test").resolve("pkg").resolve("Depender$.class") assert(Files.exists(expectedOut), s"$expectedOut does not exist") + assert(Files.exists(earlyOutput), s"$earlyOutput does not exist") val prev1 = fileStore.get.toOption match { case Some(contents) => @@ -131,6 +134,7 @@ object Depender2 { cp.toArray, sources1, targetDir, + Some(earlyOutput), Array(), Array(), maxErrors, @@ -149,6 +153,7 @@ object Depender2 { fileStore.set(AnalysisContents.create(result1.analysis(), result1.setup())) val expectedOut1 = targetDir.resolve("test").resolve("pkg").resolve("Depender2$.class") assert(Files.exists(expectedOut), s"$expectedOut1 does not exist") + assert(Files.exists(earlyOutput), s"$earlyOutput does not exist") } } diff --git a/zinc/src/test/scala/sbt/inc/cached/CachedHashingSpec.scala b/zinc/src/test/scala/sbt/inc/cached/CachedHashingSpec.scala index 2658a35701..1a15d8b7e7 100644 --- a/zinc/src/test/scala/sbt/inc/cached/CachedHashingSpec.scala +++ b/zinc/src/test/scala/sbt/inc/cached/CachedHashingSpec.scala @@ -65,7 +65,6 @@ class CachedHashingSpec extends BaseCompilerSpec { options.sources, options.converter.toOption.get, giganticClasspath, - output, setup.cache, setup.progress.toOption, options.scalacOptions, @@ -77,7 +76,9 @@ class CachedHashingSpec extends BaseCompilerSpec { options.order, setup.skip, setup.incrementalCompilerOptions, + output, JarUtils.createOutputJarContent(output), + options.earlyOutput.toOption, options.stamper.toOption.get, setup.extra.toList.map(_.toScalaTuple) )