diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index c121fbaf7c00..a906d52ccd4e 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -73,11 +73,20 @@ class CompilationUnit protected (val source: SourceFile) { /** List of all comments present in this compilation unit */ var comments: List[Comment] = Nil + /** This is used to record dependencies to invalidate during incremental + * compilation, but only if `ctx.runZincPhases` is true. + */ + val depRecorder: sbt.DependencyRecorder = sbt.DependencyRecorder() + /** Suspends the compilation unit by thowing a SuspendException * and recording the suspended compilation unit */ def suspend()(using Context): Nothing = assert(isSuspendable) + // Clear references to symbols that may become stale. No need to call + // `depRecorder.sendToZinc()` since all compilation phases will be rerun + // when this unit is unsuspended. + depRecorder.clear() if !suspended then if (ctx.settings.XprintSuspension.value) report.echo(i"suspended: $this") diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 5ecf17be32a9..6398fe613b12 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -676,11 +676,16 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { // In the Scala2 ExtractAPI phase we only extract annotations that extend // StaticAnnotation, but in Dotty we currently pickle all annotations so we - // extract everything (except annotations missing from the classpath which - // we simply skip over, and inline body annotations which are handled above). + // extract everything, except: + // - annotations missing from the classpath which we simply skip over + // - inline body annotations which are handled above + // - the Child annotation since we already extract children via + // `api.ClassLike#childrenOfSealedClass` and adding this annotation would + // lead to overcompilation when using zinc's + // `IncOptions#useOptimizedSealed`. s.annotations.foreach { annot => val sym = annot.symbol - if sym.exists && sym != defn.BodyAnnot then + if sym.exists && sym != defn.BodyAnnot && sym != defn.ChildAnnot then annots += apiAnnotation(annot) } diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 32ff7aab0206..ef9c939c2ea7 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -69,12 +69,13 @@ class ExtractDependencies extends Phase { override def run(using Context): Unit = { val unit = ctx.compilationUnit - val collector = new ExtractDependenciesCollector + val rec = unit.depRecorder + val collector = ExtractDependenciesCollector(rec) collector.traverse(unit.tpdTree) if (ctx.settings.YdumpSbtInc.value) { - val deps = collector.dependencies.map(_.toString).toArray[Object] - val names = collector.usedNames.map { case (clazz, names) => s"$clazz: $names" }.toArray[Object] + val deps = rec.classDependencies.map(_.toString).toArray[Object] + val names = rec.usedNames.map { case (clazz, names) => s"$clazz: $names" }.toArray[Object] Arrays.sort(deps) Arrays.sort(names) @@ -91,61 +92,7 @@ class ExtractDependencies extends Phase { } finally pw.close() } - ctx.withIncCallback: cb => - collector.usedNames.foreach { - case (clazz, usedNames) => - val className = classNameAsString(clazz) - usedNames.names.foreach { - case (usedName, scopes) => - cb.usedName(className, usedName.toString, scopes) - } - } - collector.dependencies.foreach(recordDependency) - } - - /* - * Handles dependency on given symbol by trying to figure out if represents a term - * that is coming from either source code (not necessarily compiled in this compilation - * run) or from class file and calls respective callback method. - */ - def recordDependency(dep: ClassDependency)(using Context): Unit = { - val fromClassName = classNameAsString(dep.from) - val sourceFile = ctx.compilationUnit.source - - def binaryDependency(file: Path, binaryClassName: String) = - ctx.withIncCallback(_.binaryDependency(file, binaryClassName, fromClassName, sourceFile, dep.context)) - - def processExternalDependency(depFile: AbstractFile, binaryClassName: String) = { - depFile match { - case ze: ZipArchive#Entry => // The dependency comes from a JAR - ze.underlyingSource match - case Some(zip) if zip.jpath != null => - binaryDependency(zip.jpath, binaryClassName) - case _ => - case pf: PlainFile => // The dependency comes from a class file, Zinc handles JRT filesystem - binaryDependency(pf.jpath, binaryClassName) - case _ => - internalError(s"Ignoring dependency $depFile of unknown class ${depFile.getClass}}", dep.from.srcPos) - } - } - - val depFile = dep.to.associatedFile - if (depFile != null) { - // Cannot ignore inheritance relationship coming from the same source (see sbt/zinc#417) - def allowLocal = dep.context == DependencyByInheritance || dep.context == LocalDependencyByInheritance - val depClassFile = - if depFile.isClass then depFile - else depFile.resolveSibling(dep.to.binaryClassName + ".class") - if (depClassFile != null) { - // Dependency is external -- source is undefined - processExternalDependency(depClassFile, dep.to.binaryClassName) - } else if (allowLocal || depFile != sourceFile.file) { - // We cannot ignore dependencies coming from the same source file because - // the dependency info needs to propagate. See source-dependencies/trait-trait-211. - val toClassName = classNameAsString(dep.to) - ctx.withIncCallback(_.classDependency(toClassName, fromClassName, dep.context)) - } - } + rec.sendToZinc() } } @@ -161,31 +108,6 @@ object ExtractDependencies { report.error(em"Internal error in the incremental compiler while compiling ${ctx.compilationUnit.source}: $msg", pos) } -private case class ClassDependency(from: Symbol, to: Symbol, context: DependencyContext) - -/** An object that maintain the set of used names from within a class */ -private final class UsedNamesInClass { - private val _names = new mutable.HashMap[Name, EnumSet[UseScope]] - def names: collection.Map[Name, EnumSet[UseScope]] = _names - - def update(name: Name, scope: UseScope): Unit = { - val scopes = _names.getOrElseUpdate(name, EnumSet.noneOf(classOf[UseScope])) - scopes.add(scope) - } - - override def toString(): String = { - val builder = new StringBuilder - names.foreach { case (name, scopes) => - builder.append(name.mangledString) - builder.append(" in [") - scopes.forEach(scope => builder.append(scope.toString)) - builder.append("]") - builder.append(", ") - } - builder.toString() - } -} - /** Extract the dependency information of a compilation unit. * * To understand why we track the used names see the section "Name hashing @@ -194,110 +116,18 @@ private final class UsedNamesInClass { * specially, see the subsection "Dependencies introduced by member reference and * inheritance" in the "Name hashing algorithm" section. */ -private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeTraverser => +private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd.TreeTraverser { thisTreeTraverser => import tpd._ - private val _usedNames = new mutable.HashMap[Symbol, UsedNamesInClass] - private val _dependencies = new mutable.HashSet[ClassDependency] - - /** The names used in this class, this does not include names which are only - * defined and not referenced. - */ - def usedNames: collection.Map[Symbol, UsedNamesInClass] = _usedNames - - /** The set of class dependencies from this compilation unit. - */ - def dependencies: Set[ClassDependency] = _dependencies - - /** Top level import dependencies are registered as coming from a first top level - * class/trait/object declared in the compilation unit. If none exists, issue warning. - */ - private var _responsibleForImports: Symbol = _ - private def responsibleForImports(using Context) = { - def firstClassOrModule(tree: Tree) = { - val acc = new TreeAccumulator[Symbol] { - def apply(x: Symbol, t: Tree)(using Context) = - t match { - case typeDef: TypeDef => - typeDef.symbol - case other => - foldOver(x, other) - } - } - acc(NoSymbol, tree) - } - - if (_responsibleForImports == null) { - val tree = ctx.compilationUnit.tpdTree - _responsibleForImports = firstClassOrModule(tree) - if (!_responsibleForImports.exists) - report.warning("""|No class, trait or object is defined in the compilation unit. - |The incremental compiler cannot record the dependency information in such case. - |Some errors like unused import referring to a non-existent class might not be reported. - |""".stripMargin, tree.sourcePos) - } - _responsibleForImports - } - - private var lastOwner: Symbol = _ - private var lastDepSource: Symbol = _ - - /** - * Resolves dependency source (that is, the closest non-local enclosing - * class from a given `ctx.owner` - */ - private def resolveDependencySource(using Context): Symbol = { - def nonLocalEnclosingClass = { - var clazz = ctx.owner.enclosingClass - var owner = clazz - - while (!owner.is(PackageClass)) { - if (owner.isTerm) { - clazz = owner.enclosingClass - owner = clazz - } else { - owner = owner.owner - } - } - clazz - } - - if (lastOwner != ctx.owner) { - lastOwner = ctx.owner - val source = nonLocalEnclosingClass - lastDepSource = if (source.is(PackageClass)) responsibleForImports else source - } - - lastDepSource - } - - private def addUsedName(fromClass: Symbol, name: Name, scope: UseScope): Unit = { - val usedName = _usedNames.getOrElseUpdate(fromClass, new UsedNamesInClass) - usedName.update(name, scope) - } - - private def addUsedName(name: Name, scope: UseScope)(using Context): Unit = { - val fromClass = resolveDependencySource - if (fromClass.exists) { // can happen when visiting imports - assert(fromClass.isClass) - addUsedName(fromClass, name, scope) - } - } - private def addMemberRefDependency(sym: Symbol)(using Context): Unit = if (!ignoreDependency(sym)) { - val enclOrModuleClass = if (sym.is(ModuleVal)) sym.moduleClass else sym.enclosingClass - assert(enclOrModuleClass.isClass, s"$enclOrModuleClass, $sym") - - val fromClass = resolveDependencySource - if (fromClass.exists) { // can happen when visiting imports - assert(fromClass.isClass) + rec.addUsedName(sym) + // packages have class symbol. Only record them as used names but not dependency + if (!sym.is(Package)) { + val enclOrModuleClass = if (sym.is(ModuleVal)) sym.moduleClass else sym.enclosingClass + assert(enclOrModuleClass.isClass, s"$enclOrModuleClass, $sym") - addUsedName(fromClass, sym.zincMangledName, UseScope.Default) - // packages have class symbol. Only record them as used names but not dependency - if (!sym.is(Package)) { - _dependencies += ClassDependency(fromClass, enclOrModuleClass, DependencyByMemberRef) - } + rec.addClassDependency(enclOrModuleClass, DependencyByMemberRef) } } @@ -305,15 +135,13 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT // If the tpt is empty, this is a non-SAM lambda, so no need to register // an inheritance relationship. if !tree.tpt.isEmpty then - val from = resolveDependencySource - _dependencies += ClassDependency(from, tree.tpt.tpe.classSymbol, LocalDependencyByInheritance) + rec.addClassDependency(tree.tpt.tpe.classSymbol, LocalDependencyByInheritance) private def addInheritanceDependencies(tree: Template)(using Context): Unit = if (tree.parents.nonEmpty) { val depContext = depContextOf(tree.symbol.owner) - val from = resolveDependencySource for parent <- tree.parents do - _dependencies += ClassDependency(from, parent.tpe.classSymbol, depContext) + rec.addClassDependency(parent.tpe.classSymbol, depContext) } private def depContextOf(cls: Symbol)(using Context): DependencyContext = @@ -351,7 +179,7 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT for sel <- selectors if !sel.isWildcard do addImported(sel.name) if sel.rename != sel.name then - addUsedName(sel.rename, UseScope.Default) + rec.addUsedRawName(sel.rename) case exp @ Export(expr, selectors) => val dep = expr.tpe.classSymbol if dep.exists && selectors.exists(_.isWildcard) then @@ -364,8 +192,7 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT // inheritance dependency in the presence of wildcard exports // to ensure all new members of `dep` are forwarded to. val depContext = depContextOf(ctx.owner.lexicallyEnclosingClass) - val from = resolveDependencySource - _dependencies += ClassDependency(from, dep, depContext) + rec.addClassDependency(dep, depContext) case t: TypeTree => addTypeDependency(t.tpe) case ref: RefTree => @@ -472,10 +299,246 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT val traverser = new TypeDependencyTraverser { def addDependency(symbol: Symbol) = if (!ignoreDependency(symbol) && symbol.is(Sealed)) { - val usedName = symbol.zincMangledName - addUsedName(usedName, UseScope.PatMatTarget) + rec.addUsedName(symbol, includeSealedChildren = true) } } traverser.traverse(tpe) } } + +case class ClassDependency(fromClass: Symbol, toClass: Symbol, context: DependencyContext) + +/** Record dependencies using `addUsedName`/`addClassDependency` and inform Zinc using `sendToZinc()`. + * + * Note: As an alternative design choice, we could directly call the appropriate + * callback as we record each dependency, this way we wouldn't need to record + * them locally and we could get rid of `sendToZinc()`, but this may be less + * efficient since it would mean calling `classNameAsString` on each call + * to `addUsedName` rather than once per class. + */ +class DependencyRecorder { + import ExtractDependencies.* + + /** A map from a non-local class to the names it uses, this does not include + * names which are only defined and not referenced. + */ + def usedNames: collection.Map[Symbol, UsedNamesInClass] = _usedNames + + /** Record a reference to the name of `sym` from the current non-local + * enclosing class. + * + * @param includeSealedChildren See documentation of `addUsedRawName`. + */ + def addUsedName(sym: Symbol, includeSealedChildren: Boolean = false)(using Context): Unit = + addUsedRawName(sym.zincMangledName, includeSealedChildren) + + /** Record a reference to `name` from the current non-local enclosing class (aka, "from class"). + * + * Most of the time, prefer to use `addUsedName` which takes + * care of name mangling. + * + * Zinc will use this information to invalidate the current non-local + * enclosing class if something changes in the set of definitions named + * `name` among the possible dependencies of the from class. + * + * @param includeSealedChildren If true, the addition or removal of children + * to a sealed class called `name` will also + * invalidate the from class. + * Note that this only has an effect if zinc's + * `IncOptions.useOptimizedSealed` is enabled, + * otherwise the addition or removal of children + * always lead to invalidation. + * + * TODO: If the compiler reported to zinc all usages of + * `SymDenotation#{children,sealedDescendants}` (including from macro code), + * we should be able to turn `IncOptions.useOptimizedSealed` on by default + * safely. + */ + def addUsedRawName(name: Name, includeSealedChildren: Boolean = false)(using Context): Unit = { + val fromClass = resolveDependencySource + if (fromClass.exists) { + val usedName = _usedNames.getOrElseUpdate(fromClass, new UsedNamesInClass) + usedName.update(name, includeSealedChildren) + } + } + + // The two possible value of `UseScope`. To avoid unnecessary allocations, + // we use vals here, but that means we must be careful to never mutate these sets. + private val DefaultScopes = EnumSet.of(UseScope.Default) + private val PatMatScopes = EnumSet.of(UseScope.Default, UseScope.PatMatTarget) + + /** An object that maintain the set of used names from within a class */ + final class UsedNamesInClass { + /** Each key corresponds to a name used in the class. To understand the meaning + * of the associated value, see the documentation of parameter `includeSealedChildren` + * of `addUsedRawName`. + */ + private val _names = new mutable.HashMap[Name, DefaultScopes.type | PatMatScopes.type] + + def names: collection.Map[Name, EnumSet[UseScope]] = _names + + private[DependencyRecorder] def update(name: Name, includeSealedChildren: Boolean): Unit = { + if (includeSealedChildren) + _names(name) = PatMatScopes + else + _names.getOrElseUpdate(name, DefaultScopes) + } + + override def toString(): String = { + val builder = new StringBuilder + names.foreach { case (name, scopes) => + builder.append(name.mangledString) + builder.append(" in [") + scopes.forEach(scope => builder.append(scope.toString)) + builder.append("]") + builder.append(", ") + } + builder.toString() + } + } + + + private val _classDependencies = new mutable.HashSet[ClassDependency] + + def classDependencies: Set[ClassDependency] = _classDependencies + + /** Record a dependency to the class `to` in a given `context` + * from the current non-local enclosing class. + */ + def addClassDependency(toClass: Symbol, context: DependencyContext)(using Context): Unit = + val fromClass = resolveDependencySource + if (fromClass.exists) + _classDependencies += ClassDependency(fromClass, toClass, context) + + private val _usedNames = new mutable.HashMap[Symbol, UsedNamesInClass] + + /** Send the collected dependency information to Zinc and clear the local caches. */ + def sendToZinc()(using Context): Unit = + ctx.withIncCallback: cb => + usedNames.foreach: + case (clazz, usedNames) => + val className = classNameAsString(clazz) + usedNames.names.foreach: + case (usedName, scopes) => + cb.usedName(className, usedName.toString, scopes) + classDependencies.foreach(recordClassDependency(cb, _)) + clear() + + /** Clear all state. */ + def clear(): Unit = + _usedNames.clear() + _classDependencies.clear() + lastOwner = NoSymbol + lastDepSource = NoSymbol + _responsibleForImports = NoSymbol + + /** Handles dependency on given symbol by trying to figure out if represents a term + * that is coming from either source code (not necessarily compiled in this compilation + * run) or from class file and calls respective callback method. + */ + private def recordClassDependency(cb: interfaces.IncrementalCallback, dep: ClassDependency)(using Context): Unit = { + val fromClassName = classNameAsString(dep.fromClass) + val sourceFile = ctx.compilationUnit.source + + def binaryDependency(file: Path, binaryClassName: String) = + cb.binaryDependency(file, binaryClassName, fromClassName, sourceFile, dep.context) + + def processExternalDependency(depFile: AbstractFile, binaryClassName: String) = { + depFile match { + case ze: ZipArchive#Entry => // The dependency comes from a JAR + ze.underlyingSource match + case Some(zip) if zip.jpath != null => + binaryDependency(zip.jpath, binaryClassName) + case _ => + case pf: PlainFile => // The dependency comes from a class file, Zinc handles JRT filesystem + binaryDependency(pf.jpath, binaryClassName) + case _ => + internalError(s"Ignoring dependency $depFile of unknown class ${depFile.getClass}}", dep.fromClass.srcPos) + } + } + + val depFile = dep.toClass.associatedFile + if (depFile != null) { + // Cannot ignore inheritance relationship coming from the same source (see sbt/zinc#417) + def allowLocal = dep.context == DependencyByInheritance || dep.context == LocalDependencyByInheritance + val depClassFile = + if depFile.isClass then depFile + else depFile.resolveSibling(dep.toClass.binaryClassName + ".class") + if (depClassFile != null) { + // Dependency is external -- source is undefined + processExternalDependency(depClassFile, dep.toClass.binaryClassName) + } else if (allowLocal || depFile != sourceFile.file) { + // We cannot ignore dependencies coming from the same source file because + // the dependency info needs to propagate. See source-dependencies/trait-trait-211. + val toClassName = classNameAsString(dep.toClass) + cb.classDependency(toClassName, fromClassName, dep.context) + } + } + } + + private var lastOwner: Symbol = _ + private var lastDepSource: Symbol = _ + + /** The source of the dependency according to `nonLocalEnclosingClass` + * if it exists, otherwise fall back to `responsibleForImports`. + * + * This is backed by a cache which is invalidated when `ctx.owner` changes. + */ + private def resolveDependencySource(using Context): Symbol = { + if (lastOwner != ctx.owner) { + lastOwner = ctx.owner + val source = nonLocalEnclosingClass + lastDepSource = if (source.is(PackageClass)) responsibleForImports else source + } + + lastDepSource + } + + /** The closest non-local enclosing class from `ctx.owner`. */ + private def nonLocalEnclosingClass(using Context): Symbol = { + var clazz = ctx.owner.enclosingClass + var owner = clazz + + while (!owner.is(PackageClass)) { + if (owner.isTerm) { + clazz = owner.enclosingClass + owner = clazz + } else { + owner = owner.owner + } + } + clazz + } + + private var _responsibleForImports: Symbol = _ + + /** Top level import dependencies are registered as coming from a first top level + * class/trait/object declared in the compilation unit. If none exists, issue a warning and return NoSymbol. + */ + private def responsibleForImports(using Context) = { + import tpd.* + def firstClassOrModule(tree: Tree) = { + val acc = new TreeAccumulator[Symbol] { + def apply(x: Symbol, t: Tree)(using Context) = + t match { + case typeDef: TypeDef => + typeDef.symbol + case other => + foldOver(x, other) + } + } + acc(NoSymbol, tree) + } + + if (_responsibleForImports == null) { + val tree = ctx.compilationUnit.tpdTree + _responsibleForImports = firstClassOrModule(tree) + if (!_responsibleForImports.exists) + report.warning("""|No class, trait or object is defined in the compilation unit. + |The incremental compiler cannot record the dependency information in such case. + |Some errors like unused import referring to a non-existent class might not be reported. + |""".stripMargin, tree.sourcePos) + } + _responsibleForImports + } +} diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 3504133cc421..cbb13a841946 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -19,6 +19,9 @@ import ast.Trees.genericEmptyTree import annotation.{tailrec, constructorOnly} import ast.tpd._ import Synthesizer._ +import sbt.ExtractDependencies.* +import sbt.ClassDependency +import xsbti.api.DependencyContext._ /** Synthesize terms for special classes */ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): @@ -457,7 +460,14 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val reason = s"it reduces to a tuple with arity $arity, expected arity <= $maxArity" withErrors(i"${defn.PairClass} is not a generic product because $reason") case MirrorSource.ClassSymbol(pre, cls) => - if cls.isGenericProduct then makeProductMirror(pre, cls, None) + if cls.isGenericProduct then + if ctx.runZincPhases then + // The mirror should be resynthesized if the constructor of the + // case class `cls` changes. See `sbt-test/source-dependencies/mirror-product`. + val rec = ctx.compilationUnit.depRecorder + rec.addClassDependency(cls, DependencyByMemberRef) + rec.addUsedName(cls.primaryConstructor) + makeProductMirror(pre, cls, None) else withErrors(i"$cls is not a generic product because ${cls.whyNotGenericProduct}") case Left(msg) => withErrors(i"type `$mirroredType` is not a generic product because $msg") @@ -477,6 +487,13 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val clsIsGenericSum = cls.isGenericSum(pre) if acceptableMsg.isEmpty && clsIsGenericSum then + if ctx.runZincPhases then + // The mirror should be resynthesized if any child of the sealed class + // `cls` changes. See `sbt-test/source-dependencies/mirror-sum`. + val rec = ctx.compilationUnit.depRecorder + rec.addClassDependency(cls, DependencyByMemberRef) + rec.addUsedName(cls, includeSealedChildren = true) + val elemLabels = cls.children.map(c => ConstantType(Constant(c.name.toString))) def internalError(msg: => String)(using Context): Unit = diff --git a/sbt-test/source-dependencies/mirror-product/MyProduct.scala b/sbt-test/source-dependencies/mirror-product/MyProduct.scala new file mode 100644 index 000000000000..acad1358f62b --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/MyProduct.scala @@ -0,0 +1 @@ +case class MyProduct(x: Int) diff --git a/sbt-test/source-dependencies/mirror-product/Test.scala b/sbt-test/source-dependencies/mirror-product/Test.scala new file mode 100644 index 000000000000..e53d7b999517 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/Test.scala @@ -0,0 +1,10 @@ +import scala.deriving.Mirror +import scala.compiletime.erasedValue + +transparent inline def foo[T](using m: Mirror.Of[T]): Int = + inline erasedValue[m.MirroredElemTypes] match + case _: (Int *: EmptyTuple) => 1 + case _: (Int *: String *: EmptyTuple) => 2 + +@main def Test = + assert(foo[MyProduct] == 2) diff --git a/sbt-test/source-dependencies/mirror-product/build.sbt b/sbt-test/source-dependencies/mirror-product/build.sbt new file mode 100644 index 000000000000..63e314982c41 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/build.sbt @@ -0,0 +1 @@ +scalaVersion := sys.props("plugin.scalaVersion") diff --git a/sbt-test/source-dependencies/mirror-product/changes/MyProduct.scala b/sbt-test/source-dependencies/mirror-product/changes/MyProduct.scala new file mode 100644 index 000000000000..87e5af62bd7e --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/changes/MyProduct.scala @@ -0,0 +1 @@ +case class MyProduct(x: Int, y: String) diff --git a/sbt-test/source-dependencies/mirror-product/test b/sbt-test/source-dependencies/mirror-product/test new file mode 100644 index 000000000000..fbcd15fa4153 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/test @@ -0,0 +1,7 @@ +> compile + +# change the case class constructor +$ copy-file changes/MyProduct.scala MyProduct.scala + +# Both MyProduct.scala and Test.scala should be recompiled, otherwise the assertion will fail +> run diff --git a/sbt-test/source-dependencies/mirror-sum/Sum.scala b/sbt-test/source-dependencies/mirror-sum/Sum.scala new file mode 100644 index 000000000000..782f5a89d913 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/Sum.scala @@ -0,0 +1,2 @@ +sealed trait Sum +case class Child1() extends Sum diff --git a/sbt-test/source-dependencies/mirror-sum/Test.scala b/sbt-test/source-dependencies/mirror-sum/Test.scala new file mode 100644 index 000000000000..9cb6e2c78d64 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/Test.scala @@ -0,0 +1,12 @@ +import scala.deriving.Mirror +import scala.compiletime.erasedValue + +object Test: + transparent inline def foo[T](using m: Mirror.Of[T]): Int = + inline erasedValue[m.MirroredElemLabels] match + case _: ("Child1" *: EmptyTuple) => 1 + case _: ("Child1" *: "Child2" *: EmptyTuple) => 2 + + def main(args: Array[String]): Unit = + assert(foo[Sum] == 2) + diff --git a/sbt-test/source-dependencies/mirror-sum/build.sbt b/sbt-test/source-dependencies/mirror-sum/build.sbt new file mode 100644 index 000000000000..de89f34feb3b --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/build.sbt @@ -0,0 +1,4 @@ +scalaVersion := sys.props("plugin.scalaVersion") +// Use more precise invalidation, otherwise the reference to `Sum` in +// Test.scala is enough to invalidate it when a child is added. +ThisBuild / incOptions ~= { _.withUseOptimizedSealed(true) } diff --git a/sbt-test/source-dependencies/mirror-sum/changes/Sum.scala b/sbt-test/source-dependencies/mirror-sum/changes/Sum.scala new file mode 100644 index 000000000000..13ec68223ed4 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/changes/Sum.scala @@ -0,0 +1,3 @@ +sealed trait Sum +case class Child1() extends Sum +case class Child2() extends Sum diff --git a/sbt-test/source-dependencies/mirror-sum/test b/sbt-test/source-dependencies/mirror-sum/test new file mode 100644 index 000000000000..baf5f17f9905 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/test @@ -0,0 +1,7 @@ +> compile + +# Add a child +$ copy-file changes/Sum.scala Sum.scala + +# Both Sum.scala and Test.scala should be recompiled, otherwise the assertion will fail +> run diff --git a/sbt-test/source-dependencies/useOptimizedSealed/Sealed.scala b/sbt-test/source-dependencies/useOptimizedSealed/Sealed.scala new file mode 100644 index 000000000000..06c191cba9ed --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/Sealed.scala @@ -0,0 +1,2 @@ +sealed trait Sealed +class Child1 extends Sealed diff --git a/sbt-test/source-dependencies/useOptimizedSealed/Test.scala b/sbt-test/source-dependencies/useOptimizedSealed/Test.scala new file mode 100644 index 000000000000..086e359babc4 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/Test.scala @@ -0,0 +1,3 @@ +class Test { + val s: Sealed = new Child1 +} diff --git a/sbt-test/source-dependencies/useOptimizedSealed/build.sbt b/sbt-test/source-dependencies/useOptimizedSealed/build.sbt new file mode 100644 index 000000000000..1c4c78828a55 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/build.sbt @@ -0,0 +1,29 @@ +scalaVersion := sys.props("plugin.scalaVersion") + +ThisBuild / incOptions ~= { _.withUseOptimizedSealed(true) } + +import sbt.internal.inc.Analysis +import complete.DefaultParsers._ + +// Reset compiler iterations, necessary because tests run in batch mode +val recordPreviousIterations = taskKey[Unit]("Record previous iterations.") +recordPreviousIterations := { + val log = streams.value.log + CompileState.previousIterations = { + val previousAnalysis = (previousCompile in Compile).value.analysis.asScala + previousAnalysis match { + case None => + log.info("No previous analysis detected") + 0 + case Some(a: Analysis) => a.compilations.allCompilations.size + } + } +} + +val checkIterations = inputKey[Unit]("Verifies the accumulated number of iterations of incremental compilation.") + +checkIterations := { + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = ((compile in Compile).value match { case a: Analysis => a.compilations.allCompilations.size }) - CompileState.previousIterations + assert(expected == actual, s"Expected $expected compilations, got $actual") +} diff --git a/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed1.scala b/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed1.scala new file mode 100644 index 000000000000..7ce9b1119871 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed1.scala @@ -0,0 +1,3 @@ +sealed trait Sealed +class Child1 extends Sealed +class Child2 extends Sealed diff --git a/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed2.scala b/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed2.scala new file mode 100644 index 000000000000..ccf27d31d219 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed2.scala @@ -0,0 +1,4 @@ +sealed trait Sealed +class Child1 extends Sealed +class Child2 extends Sealed +class Child3 extends Sealed diff --git a/sbt-test/source-dependencies/useOptimizedSealed/changes/Test1.scala b/sbt-test/source-dependencies/useOptimizedSealed/changes/Test1.scala new file mode 100644 index 000000000000..559fef4d481a --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/changes/Test1.scala @@ -0,0 +1,7 @@ +class Test { + def foo(x: Sealed): Int = x match + case _: Child1 => 1 + case _: Child2 => 1 + + val s: Sealed = new Child1 +} diff --git a/sbt-test/source-dependencies/useOptimizedSealed/project/CompileState.scala b/sbt-test/source-dependencies/useOptimizedSealed/project/CompileState.scala new file mode 100644 index 000000000000..078db9c7bf56 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/project/CompileState.scala @@ -0,0 +1,4 @@ +// This is necessary because tests are run in batch mode +object CompileState { + @volatile var previousIterations: Int = -1 +} diff --git a/sbt-test/source-dependencies/useOptimizedSealed/test b/sbt-test/source-dependencies/useOptimizedSealed/test new file mode 100644 index 000000000000..6680e9aab923 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/test @@ -0,0 +1,24 @@ +# Compile Sealed.scala and Test.scala +> compile +> recordPreviousIterations + +# Add an extra children to Sealed +$ copy-file changes/Sealed1.scala Sealed.scala + +# Only Sealed.scala needs to be recompiled because Test.scala does not +# match on a value of type `Sealed`. +> compile +> checkIterations 1 + +> clean +$ copy-file changes/Test1.scala Test.scala +> compile +> recordPreviousIterations + +# Add an extra children to Sealed again +$ copy-file changes/Sealed2.scala Sealed.scala + +# Test.scala will be recompiled in a second round because it matches +# on a value of type `Sealed`. +> compile +> checkIterations 2