diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 1801a7fada7c..b1b771bc7512 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -10,7 +10,7 @@ import Annotations.Annotation import NameKinds.{UniqueName, ContextBoundParamName, ContextFunctionParamName, DefaultGetterName, WildcardParamName} import typer.{Namer, Checking} import util.{Property, SourceFile, SourcePosition, SrcPos, Chars} -import config.Feature.{sourceVersion, migrateTo3, enabled} +import config.{Feature, Config} import config.SourceVersion.* import collection.mutable import reporting.* @@ -46,6 +46,11 @@ object desugar { */ val UntupledParam: Property.Key[Unit] = Property.StickyKey() + /** An attachment key to indicate that a ValDef is an evidence parameter + * for a context bound. + */ + val ContextBoundParam: Property.Key[Unit] = Property.StickyKey() + /** What static check should be applied to a Match? */ enum MatchCheck { case None, Exhaustive, IrrefutablePatDef, IrrefutableGenFrom @@ -195,17 +200,6 @@ object desugar { else vdef1 end valDef - def makeImplicitParameters( - tpts: List[Tree], implicitFlag: FlagSet, - mkParamName: Int => TermName, - forPrimaryConstructor: Boolean = false - )(using Context): List[ValDef] = - for (tpt, i) <- tpts.zipWithIndex yield { - val paramFlags: FlagSet = if (forPrimaryConstructor) LocalParamAccessor else Param - val epname = mkParamName(i) - ValDef(epname, tpt, EmptyTree).withFlags(paramFlags | implicitFlag) - } - def mapParamss(paramss: List[ParamClause]) (mapTypeParam: TypeDef => TypeDef) (mapTermParam: ValDef => ValDef)(using Context): List[ParamClause] = @@ -232,34 +226,84 @@ object desugar { private def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(using Context): Tree = addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor)) + /** Drop context bounds in given TypeDef, replacing them with evidence ValDefs that + * get added to a buffer. + * @param tdef The given TypeDef + * @param evidenceBuf The buffer to which evidence gets added. This buffer + * is shared between desugarings of different type parameters + * of the same method. + * @param evidenceFlags The flags to use for evidence definitions + * @param freshName A function to generate fresh names for evidence definitions + * @param allParamss If `tdef` is a type paramter, all parameters of the owning method, + * otherwise the empty list. + */ + private def desugarContextBounds( + tdef: TypeDef, + evidenceBuf: mutable.ListBuffer[ValDef], + evidenceFlags: FlagSet, + freshName: untpd.Tree => TermName, + allParamss: List[ParamClause])(using Context): TypeDef = + + val evidenceNames = mutable.ListBuffer[TermName]() + + def desugarRHS(rhs: Tree): Tree = rhs match + case ContextBounds(tbounds, ctxbounds) => + val isMember = evidenceFlags.isAllOf(DeferredGivenFlags) + for bound <- ctxbounds do + val evidenceName = bound match + case ContextBoundTypeTree(_, _, ownName) if !ownName.isEmpty => + ownName // if there is an explicitly given name, use it. + case _ => + if Config.nameSingleContextBounds + && !isMember + && ctxbounds.tail.isEmpty + && Feature.enabled(Feature.modularity) + then tdef.name.toTermName + else freshName(bound) + evidenceNames += evidenceName + val evidenceParam = ValDef(evidenceName, bound, EmptyTree).withFlags(evidenceFlags) + evidenceParam.pushAttachment(ContextBoundParam, ()) + evidenceBuf += evidenceParam + tbounds + case LambdaTypeTree(tparams, body) => + cpy.LambdaTypeTree(rhs)(tparams, desugarRHS(body)) + case _ => + rhs + + val tdef1 = cpy.TypeDef(tdef)(rhs = desugarRHS(tdef.rhs)) + // Under x.modularity, if there was a context bound, and `tdef`s name as a term name is + // neither a name of an existing parameter nor a name of generated evidence for + // the same method, add a WitnessAnnotation with all generated evidence names to `tdef`. + // This means a context bound proxy will be created later. + if Feature.enabled(Feature.modularity) + && evidenceNames.nonEmpty + && !evidenceBuf.exists(_.name == tdef.name.toTermName) + && !allParamss.nestedExists(_.name == tdef.name.toTermName) + then + tdef1.withAddedAnnotation: + WitnessNamesAnnot(evidenceNames.toList).withSpan(tdef.span) + else + tdef1 + end desugarContextBounds + private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = val DefDef(_, paramss, tpt, rhs) = meth val evidenceParamBuf = mutable.ListBuffer[ValDef]() var seenContextBounds: Int = 0 - def desugarContextBounds(rhs: Tree): Tree = rhs match - case ContextBounds(tbounds, cxbounds) => - val iflag = if sourceVersion.isAtLeast(`future`) then Given else Implicit - evidenceParamBuf ++= makeImplicitParameters( - cxbounds, iflag, - // Just like with `makeSyntheticParameter` on nameless parameters of - // using clauses, we only need names that are unique among the - // parameters of the method since shadowing does not affect - // implicit resolution in Scala 3. - mkParamName = i => - val index = seenContextBounds + 1 // Start at 1 like FreshNameCreator. - val ret = ContextBoundParamName(EmptyTermName, index) - seenContextBounds += 1 - ret, - forPrimaryConstructor = isPrimaryConstructor) - tbounds - case LambdaTypeTree(tparams, body) => - cpy.LambdaTypeTree(rhs)(tparams, desugarContextBounds(body)) - case _ => - rhs + def freshName(unused: Tree) = + seenContextBounds += 1 // Start at 1 like FreshNameCreator. + ContextBoundParamName(EmptyTermName, seenContextBounds) + // Just like with `makeSyntheticParameter` on nameless parameters of + // using clauses, we only need names that are unique among the + // parameters of the method since shadowing does not affect + // implicit resolution in Scala 3. + val paramssNoContextBounds = + val iflag = if Feature.sourceVersion.isAtLeast(`future`) then Given else Implicit + val flags = if isPrimaryConstructor then iflag | LocalParamAccessor else iflag | Param mapParamss(paramss) { - tparam => cpy.TypeDef(tparam)(rhs = desugarContextBounds(tparam.rhs)) + tparam => desugarContextBounds(tparam, evidenceParamBuf, flags, freshName, paramss) }(identity) rhs match @@ -305,9 +349,9 @@ object desugar { def getterParamss(n: Int): List[ParamClause] = mapParamss(takeUpTo(paramssNoRHS, n)) { - tparam => dropContextBounds(toDefParam(tparam, keepAnnotations = true)) + tparam => dropContextBounds(toMethParam(tparam, KeepAnnotations.All)) } { - vparam => toDefParam(vparam, keepAnnotations = true, keepDefault = false) + vparam => toMethParam(vparam, KeepAnnotations.All, keepDefault = false) } def defaultGetters(paramss: List[ParamClause], n: Int): List[DefDef] = paramss match @@ -399,54 +443,98 @@ object desugar { (Nil, tree) /** Add all evidence parameters in `params` as implicit parameters to `meth`. - * If the parameters of `meth` end in an implicit parameter list or using clause, - * evidence parameters are added in front of that list. Otherwise they are added - * as a separate parameter clause. + * The position of the added parameters is determined as follows: + * + * - If there is an existing parameter list that refers to one of the added + * parameters or their future context bound proxies in one of its parameter + * types, add the new parameters in front of the first such parameter list. + * - Otherwise, if the last parameter list consists of implicit or using parameters, + * join the new parameters in front of this parameter list, creating one + * parameter list (this is equivalent to Scala 2's scheme). + * - Otherwise, add the new parameter list at the end as a separate parameter clause. */ private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = - params match + if params.isEmpty then return meth + + var boundNames = params.map(_.name).toSet // all evidence parameter + context bound proxy names + for mparams <- meth.paramss; mparam <- mparams do + mparam match + case tparam: TypeDef if tparam.mods.annotations.exists(WitnessNamesAnnot.unapply(_).isDefined) => + boundNames += tparam.name.toTermName + case _ => + + def referencesBoundName(vdef: ValDef): Boolean = + vdef.tpt.existsSubTree: + case Ident(name: TermName) => boundNames.contains(name) + case _ => false + + def recur(mparamss: List[ParamClause]): List[ParamClause] = mparamss match + case ValDefs(mparams) :: _ if mparams.exists(referencesBoundName) => + params :: mparamss + case ValDefs(mparams @ (mparam :: _)) :: Nil if mparam.mods.isOneOf(GivenOrImplicit) => + (params ++ mparams) :: Nil + case mparams :: mparamss1 => + mparams :: recur(mparamss1) case Nil => - meth - case evidenceParams => - val paramss1 = meth.paramss.reverse match - case ValDefs(vparams @ (vparam :: _)) :: rparamss if vparam.mods.isOneOf(GivenOrImplicit) => - ((evidenceParams ++ vparams) :: rparamss).reverse - case _ => - meth.paramss :+ evidenceParams - cpy.DefDef(meth)(paramss = paramss1) + params :: Nil + + cpy.DefDef(meth)(paramss = recur(meth.paramss)) + end addEvidenceParams /** The parameters generated from the contextual bounds of `meth`, as generated by `desugar.defDef` */ private def evidenceParams(meth: DefDef)(using Context): List[ValDef] = - meth.paramss.reverse match { - case ValDefs(vparams @ (vparam :: _)) :: _ if vparam.mods.isOneOf(GivenOrImplicit) => - vparams.takeWhile(_.name.is(ContextBoundParamName)) - case _ => - Nil - } + for + case ValDefs(vparams @ (vparam :: _)) <- meth.paramss + if vparam.mods.isOneOf(GivenOrImplicit) + param <- vparams.takeWhile(_.hasAttachment(ContextBoundParam)) + yield + param @sharable private val synthetic = Modifiers(Synthetic) - private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = { - var mods = tparam.rawMods - if (!keepAnnotations) mods = mods.withAnnotations(Nil) - tparam.withMods(mods & (EmptyFlags | Sealed) | Param) - } - private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = { - var mods = vparam.rawMods - if (!keepAnnotations) mods = mods.withAnnotations(Nil) + /** Which annotations to keep in derived parameters */ + private enum KeepAnnotations: + case None, All, WitnessOnly + + /** Filter annotations in `mods` according to `keep` */ + private def filterAnnots(mods: Modifiers, keep: KeepAnnotations)(using Context) = keep match + case KeepAnnotations.None => mods.withAnnotations(Nil) + case KeepAnnotations.All => mods + case KeepAnnotations.WitnessOnly => + mods.withAnnotations: + mods.annotations.filter: + case WitnessNamesAnnot(_) => true + case _ => false + + /** Map type parameter accessor to corresponding method (i.e. constructor) parameter */ + private def toMethParam(tparam: TypeDef, keep: KeepAnnotations)(using Context): TypeDef = + val mods = filterAnnots(tparam.rawMods, keep) + tparam.withMods(mods & EmptyFlags | Param) + + /** Map term parameter accessor to corresponding method (i.e. constructor) parameter */ + private def toMethParam(vparam: ValDef, keep: KeepAnnotations, keepDefault: Boolean)(using Context): ValDef = { + val mods = filterAnnots(vparam.rawMods, keep) val hasDefault = if keepDefault then HasDefault else EmptyFlags - vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault) | Param) + // Need to ensure that tree is duplicated since term parameters can be watched + // and cloning a term parameter will copy its watchers to the clone, which means + // we'd get cross-talk between the original parameter and the clone. + ValDef(vparam.name, vparam.tpt, vparam.rhs) + .withSpan(vparam.span) + .withAttachmentsFrom(vparam) + .withMods(mods & (GivenOrImplicit | Erased | hasDefault | Tracked) | Param) } - def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = - paramss.foldLeft(fn) { (fn, params) => params match - case TypeDefs(params) => - TypeApply(fn, params.map(refOfDef)) - case (vparam: ValDef) :: _ if vparam.mods.is(Given) => - Apply(fn, params.map(refOfDef)).setApplyKind(ApplyKind.Using) - case _ => - Apply(fn, params.map(refOfDef)) - } + /** Desugar type def (not param): Under x.moduliity this can expand + * context bounds, which are expanded to evidence ValDefs. These will + * ultimately map to deferred givens. + */ + def typeDef(tdef: TypeDef)(using Context): Tree = + val evidenceBuf = new mutable.ListBuffer[ValDef] + val result = desugarContextBounds( + tdef, evidenceBuf, + (tdef.mods.flags.toTermFlags & AccessFlags) | Lazy | DeferredGivenFlags, + inventGivenName, Nil) + if evidenceBuf.isEmpty then result else Thicket(result :: evidenceBuf.toList) /** The expansion of a class definition. See inline comments for what is involved */ def classDef(cdef: TypeDef)(using Context): Tree = { @@ -520,7 +608,7 @@ object desugar { // Annotations on class _type_ parameters are set on the derived parameters // but not on the constructor parameters. The reverse is true for // annotations on class _value_ parameters. - val constrTparams = impliedTparams.map(toDefParam(_, keepAnnotations = false)) + val constrTparams = impliedTparams.map(toMethParam(_, KeepAnnotations.WitnessOnly)) val constrVparamss = if (originalVparamss.isEmpty) { // ensure parameter list is non-empty if (isCaseClass) @@ -531,7 +619,7 @@ object desugar { report.error(CaseClassMissingNonImplicitParamList(cdef), namePos) ListOfNil } - else originalVparamss.nestedMap(toDefParam(_, keepAnnotations = true, keepDefault = true)) + else originalVparamss.nestedMap(toMethParam(_, KeepAnnotations.All, keepDefault = true)) val derivedTparams = constrTparams.zipWithConserve(impliedTparams)((tparam, impliedParam) => derivedTypeParam(tparam).withAnnotations(impliedParam.mods.annotations)) @@ -553,7 +641,7 @@ object desugar { defDef( addEvidenceParams( cpy.DefDef(ddef)(paramss = joinParams(constrTparams, ddef.paramss)), - evidenceParams(constr1).map(toDefParam(_, keepAnnotations = false, keepDefault = false))))) + evidenceParams(constr1).map(toMethParam(_, KeepAnnotations.None, keepDefault = false))))) case stat => stat } @@ -609,6 +697,11 @@ object desugar { case _ => false } + /** Is this a repeated argument x* (using a spread operator)? */ + def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match + case PostfixOp(_, Ident(tpnme.raw.STAR)) => true + case _ => false + def appliedRef(tycon: Tree, tparams: List[TypeDef] = constrTparams, widenHK: Boolean = false) = { val targs = for (tparam <- tparams) yield { val targ = refOfDef(tparam) @@ -625,11 +718,6 @@ object desugar { appliedTypeTree(tycon, targs) } - def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { - case PostfixOp(_, Ident(tpnme.raw.STAR)) => true - case _ => false - } - // a reference to the class type bound by `cdef`, with type parameters coming from the constructor val classTypeRef = appliedRef(classTycon) @@ -667,7 +755,7 @@ object desugar { } ensureApplied(nu) - val copiedAccessFlags = if migrateTo3 then EmptyFlags else AccessFlags + val copiedAccessFlags = if Feature.migrateTo3 then EmptyFlags else AccessFlags // Methods to add to a case class C[..](p1: T1, ..., pN: Tn)(moreParams) // def _1: T1 = this.p1 @@ -850,19 +938,17 @@ object desugar { Nil } else { - val defParamss = constrVparamss match { + val defParamss = constrVparamss match case Nil :: paramss => paramss // drop leading () that got inserted by class // TODO: drop this once we do not silently insert empty class parameters anymore case paramss => paramss - } val finalFlag = if ctx.settings.YcompileScala2Library.value then EmptyFlags else Final // implicit wrapper is typechecked in same scope as constructor, so // we can reuse the constructor parameters; no derived params are needed. DefDef( - className.toTermName, joinParams(constrTparams, defParamss), - classTypeRef, creatorExpr) - .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | finalFlag) + className.toTermName, joinParams(constrTparams, defParamss), classTypeRef, creatorExpr + ) .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | finalFlag) .withSpan(cdef.span) :: Nil } @@ -890,7 +976,9 @@ object desugar { } if mods.isAllOf(Given | Inline | Transparent) then report.error("inline given instances cannot be trasparent", cdef) - val classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods + var classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods + if vparamAccessors.exists(_.mods.is(Tracked)) then + classMods |= Dependent cpy.TypeDef(cdef: TypeDef)( name = className, rhs = cpy.Template(impl)(constr, parents1, clsDerived, self1, @@ -1071,7 +1159,7 @@ object desugar { */ def normalizeName(mdef: MemberDef, impl: Tree)(using Context): Name = { var name = mdef.name - if (name.isEmpty) name = name.likeSpaced(inventGivenOrExtensionName(impl)) + if (name.isEmpty) name = name.likeSpaced(inventGivenName(impl)) def errPos = mdef.source.atSpan(mdef.nameSpan) if (ctx.owner == defn.ScalaPackageClass && defn.reservedScalaClassNames.contains(name.toTypeName)) { val kind = if (name.isTypeName) "class" else "object" @@ -1118,7 +1206,7 @@ object desugar { end makePolyFunctionType /** Invent a name for an anonympus given of type or template `impl`. */ - def inventGivenOrExtensionName(impl: Tree)(using Context): SimpleName = + def inventGivenName(impl: Tree)(using Context): SimpleName = val str = impl match case impl: Template => if impl.parents.isEmpty then @@ -1130,6 +1218,10 @@ object desugar { "given_" ++ inventTypeName(impl) str.toTermName.asSimpleName + /** Extract a synthesized given name from a type tree. This is used for + * both anonymous givens and (under x.modularity) deferred givens. + * @param followArgs if true include argument types in the name + */ private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String] { private def extractArgs(args: List[Tree])(using Context): String = args.map(argNameExtractor.apply("", _)).mkString("_") @@ -1143,6 +1235,8 @@ object desugar { case tree: TypeDef => tree.name.toString case tree: AppliedTypeTree if followArgs && tree.args.nonEmpty => s"${apply(x, tree.tpt)}_${extractArgs(tree.args)}" + case ContextBoundTypeTree(tycon, paramName, _) => + s"${apply(x, tycon)}_$paramName" case InfixOp(left, op, right) => if followArgs then s"${op.name}_${extractArgs(List(left, right))}" else op.name.toString @@ -1380,7 +1474,7 @@ object desugar { case tree: TypeDef => if (tree.isClassDef) classDef(tree) else if (ctx.mode.isQuotedPattern) quotedPatternTypeDef(tree) - else tree + else typeDef(tree) case tree: DefDef => if (tree.name.isConstructorName) tree // was already handled by enclosing classDef else defDef(tree) @@ -1678,14 +1772,13 @@ object desugar { .collect: case vd: ValDef => vd - def makeContextualFunction(formals: List[Tree], paramNamesOrNil: List[TermName], body: Tree, erasedParams: List[Boolean])(using Context): Function = { - val mods = Given - val params = makeImplicitParameters(formals, mods, - mkParamName = i => - if paramNamesOrNil.isEmpty then ContextFunctionParamName.fresh() - else paramNamesOrNil(i)) - FunctionWithMods(params, body, Modifiers(mods), erasedParams) - } + def makeContextualFunction(formals: List[Tree], paramNamesOrNil: List[TermName], body: Tree, erasedParams: List[Boolean])(using Context): Function = + val paramNames = + if paramNamesOrNil.nonEmpty then paramNamesOrNil + else formals.map(_ => ContextFunctionParamName.fresh()) + val params = for (tpt, pname) <- formals.zip(paramNames) yield + ValDef(pname, tpt, EmptyTree).withFlags(Given | Param) + FunctionWithMods(params, body, Modifiers(Given), erasedParams) private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { val vdef = ValDef(named.name.asTermName, tpt, rhs) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 941e7b8f1219..97de434ba9d5 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -5,6 +5,8 @@ package ast import core.* import Flags.*, Trees.*, Types.*, Contexts.* import Names.*, StdNames.*, NameOps.*, Symbols.* +import Annotations.Annotation +import NameKinds.ContextBoundParamName import typer.ConstFold import reporting.trace @@ -380,6 +382,29 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => case _ => tree.tpe.isInstanceOf[ThisType] } + + /** Under x.modularity: Extractor for `annotation.internal.WitnessNames(name_1, ..., name_n)` + * represented as an untyped or typed tree. + */ + object WitnessNamesAnnot: + def apply(names: List[TermName])(using Context): untpd.Tree = + untpd.TypedSplice(tpd.New( + defn.WitnessNamesAnnot.typeRef, + tpd.SeqLiteral(names.map(n => tpd.Literal(Constant(n.toString))), tpd.TypeTree(defn.StringType)) :: Nil + )) + + def unapply(tree: Tree)(using Context): Option[List[TermName]] = + unsplice(tree) match + case Apply(Select(New(tpt: tpd.TypeTree), nme.CONSTRUCTOR), SeqLiteral(elems, _) :: Nil) => + tpt.tpe match + case tp: TypeRef if tp.name == tpnme.WitnessNames && tp.symbol == defn.WitnessNamesAnnot => + Some: + elems.map: + case Literal(Constant(str: String)) => + ContextBoundParamName.unmangle(str.toTermName.asSimpleName) + case _ => None + case _ => None + end WitnessNamesAnnot } trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 0dfe52c421d9..64f9fb4df95e 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -118,6 +118,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class ContextBounds(bounds: TypeBoundsTree, cxBounds: List[Tree])(implicit @constructorOnly src: SourceFile) extends TypTree case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree + case class ContextBoundTypeTree(tycon: Tree, paramName: TypeName, ownName: TermName)(implicit @constructorOnly src: SourceFile) extends Tree + // `paramName: tycon as ownName`, ownName != EmptyTermName only under x.modularity case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { @@ -230,6 +232,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class Infix()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Infix) + case class Tracked()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Tracked) + /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) } @@ -675,6 +679,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def ExtMethods(tree: Tree)(paramss: List[ParamClause], methods: List[Tree])(using Context): Tree = tree match case tree: ExtMethods if (paramss eq tree.paramss) && (methods == tree.methods) => tree case _ => finalize(tree, untpd.ExtMethods(paramss, methods)(tree.source)) + def ContextBoundTypeTree(tree: Tree)(tycon: Tree, paramName: TypeName, ownName: TermName)(using Context): Tree = tree match + case tree: ContextBoundTypeTree if (tycon eq tree.tycon) && paramName == tree.paramName && ownName == tree.ownName => tree + case _ => finalize(tree, untpd.ContextBoundTypeTree(tycon, paramName, ownName)(tree.source)) def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match { case tree: ImportSelector if (imported eq tree.imported) && (renamed eq tree.renamed) && (bound eq tree.bound) => tree case _ => finalize(tree, untpd.ImportSelector(imported, renamed, bound)(tree.source)) @@ -740,6 +747,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs)) case ExtMethods(paramss, methods) => cpy.ExtMethods(tree)(transformParamss(paramss), transformSub(methods)) + case ContextBoundTypeTree(tycon, paramName, ownName) => + cpy.ContextBoundTypeTree(tree)(transform(tycon), paramName, ownName) case ImportSelector(imported, renamed, bound) => cpy.ImportSelector(tree)(transformSub(imported), transform(renamed), transform(bound)) case Number(_, _) | TypedSplice(_) => @@ -795,6 +804,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(this(this(x, pats), tpt), rhs) case ExtMethods(paramss, methods) => this(paramss.foldLeft(x)(apply), methods) + case ContextBoundTypeTree(tycon, paramName, ownName) => + this(x, tycon) case ImportSelector(imported, renamed, bound) => this(this(this(x, imported), renamed), bound) case Number(_, _) => diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 2746476261e5..ee8ed4b215d7 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -235,4 +235,12 @@ object Config { */ inline val checkLevelsOnConstraints = false inline val checkLevelsOnInstantiation = true + + /** Under x.modularity: + * If a type parameter `X` has a single context bound `X: C`, should the + * witness parameter be named `X`? This would prevent the creation of a + * context bound companion. + */ + inline val nameSingleContextBounds = false } + diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 1fe9cae936c9..d2bfdcb550dc 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -34,6 +34,7 @@ object Feature: val captureChecking = experimental("captureChecking") val into = experimental("into") val namedTuples = experimental("namedTuples") + val modularity = experimental("modularity") def experimentalAutoEnableFeatures(using Context): List[TermName] = defn.languageExperimentalFeatures diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 109929f0c6f5..06711ec97abf 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -647,9 +647,9 @@ trait ConstraintHandling { * At this point we also drop the @Repeated annotation to avoid inferring type arguments with it, * as those could leak the annotation to users (see run/inferred-repeated-result). */ - def widenInferred(inst: Type, bound: Type, widenUnions: Boolean)(using Context): Type = + def widenInferred(inst: Type, bound: Type, widen: Widen)(using Context): Type = def widenOr(tp: Type) = - if widenUnions then + if widen == Widen.Unions then val tpw = tp.widenUnion if tpw ne tp then if tpw.isTransparent() then @@ -667,14 +667,10 @@ trait ConstraintHandling { val tpw = tp.widenSingletons(skipSoftUnions) if (tpw ne tp) && (tpw <:< bound) then tpw else tp - def isSingleton(tp: Type): Boolean = tp match - case WildcardType(optBounds) => optBounds.exists && isSingleton(optBounds.bounds.hi) - case _ => isSubTypeWhenFrozen(tp, defn.SingletonType) - val wideInst = - if isSingleton(bound) then inst + if widen == Widen.None || bound.isSingletonBounded(frozen = true) then inst else - val widenedFromSingle = widenSingle(inst, skipSoftUnions = widenUnions) + val widenedFromSingle = widenSingle(inst, skipSoftUnions = widen == Widen.Unions) val widenedFromUnion = widenOr(widenedFromSingle) val widened = dropTransparentTraits(widenedFromUnion, bound) widenIrreducible(widened) @@ -713,10 +709,10 @@ trait ConstraintHandling { * The instance type is not allowed to contain references to types nested deeper * than `maxLevel`. */ - def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int)(using Context): Type = { + def instanceType(param: TypeParamRef, fromBelow: Boolean, widen: Widen, maxLevel: Int)(using Context): Type = { val approx = approximation(param, fromBelow, maxLevel).simplified if fromBelow then - val widened = widenInferred(approx, param, widenUnions) + val widened = widenInferred(approx, param, widen) // Widening can add extra constraints, in particular the widened type might // be a type variable which is now instantiated to `param`, and therefore // cannot be used as an instantiation of `param` without creating a loop. @@ -724,7 +720,7 @@ trait ConstraintHandling { // (we do not check for non-toplevel occurrences: those should never occur // since `addOneBound` disallows recursive lower bounds). if constraint.occursAtToplevel(param, widened) then - instanceType(param, fromBelow, widenUnions, maxLevel) + instanceType(param, fromBelow, widen, maxLevel) else widened else diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index d0c30a665289..a5b0e2dba254 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -12,6 +12,7 @@ import Symbols.* import Scopes.* import Uniques.* import ast.Trees.* +import Flags.ParamAccessor import ast.untpd import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet, ReusableInstance} import typer.{Implicits, ImportInfo, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} @@ -399,7 +400,8 @@ object Contexts { * * - as owner: The primary constructor of the class * - as outer context: The context enclosing the class context - * - as scope: The parameter accessors in the class context + * - as scope: type parameters, the parameter accessors, and + * the context bound companions in the class context, * * The reasons for this peculiar choice of attributes are as follows: * @@ -413,10 +415,11 @@ object Contexts { * context see the constructor parameters instead, but then we'd need a final substitution step * from constructor parameters to class parameter accessors. */ - def superCallContext: Context = { - val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors*) - superOrThisCallContext(owner.primaryConstructor, locals) - } + def superCallContext: Context = + val locals = owner.typeParams + ++ owner.asClass.unforcedDecls.filter: sym => + sym.is(ParamAccessor) || sym.isContextBoundCompanion + superOrThisCallContext(owner.primaryConstructor, newScopeWith(locals*)) /** The context for the arguments of a this(...) constructor call. * The context is computed from the local auxiliary constructor context. diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 15880207b3c8..11a4a8473e79 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -59,10 +59,10 @@ class Definitions { private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope) = newCompleteClassSymbol(owner, name, flags | Permanent | NoInits | Open, parents, decls).entered - private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = + private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope): TypeSymbol = scope.enter(newPermanentSymbol(cls, name, flags, TypeBounds.empty)) - private def enterTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = + private def enterTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope): TypeSymbol = enterTypeField(cls, name, flags | ClassTypeParamCreationFlags, scope) private def enterSyntheticTypeParam(cls: ClassSymbol, paramFlags: FlagSet, scope: MutableScope, suffix: String = "T0") = @@ -240,6 +240,7 @@ class Definitions { @tu lazy val Compiletime_codeOf: Symbol = CompiletimePackageClass.requiredMethod("codeOf") @tu lazy val Compiletime_erasedValue : Symbol = CompiletimePackageClass.requiredMethod("erasedValue") @tu lazy val Compiletime_uninitialized: Symbol = CompiletimePackageClass.requiredMethod("uninitialized") + @tu lazy val Compiletime_deferred : Symbol = CompiletimePackageClass.requiredMethod("deferred") @tu lazy val Compiletime_error : Symbol = CompiletimePackageClass.requiredMethod(nme.error) @tu lazy val Compiletime_requireConst : Symbol = CompiletimePackageClass.requiredMethod("requireConst") @tu lazy val Compiletime_constValue : Symbol = CompiletimePackageClass.requiredMethod("constValue") @@ -458,6 +459,13 @@ class Definitions { @tu lazy val andType: TypeSymbol = enterBinaryAlias(tpnme.AND, AndType(_, _)) @tu lazy val orType: TypeSymbol = enterBinaryAlias(tpnme.OR, OrType(_, _, soft = false)) + @tu lazy val CBCompanion: TypeSymbol = // type ``[-Refs] + enterPermanentSymbol(tpnme.CBCompanion, + TypeBounds(NothingType, + HKTypeLambda(tpnme.syntheticTypeParamName(0) :: Nil, Contravariant :: Nil)( + tl => TypeBounds.empty :: Nil, + tl => AnyType))).asType + /** Method representing a throw */ @tu lazy val throwMethod: TermSymbol = enterMethod(OpsPackageClass, nme.THROWkw, MethodType(List(ThrowableType), NothingType)) @@ -527,12 +535,16 @@ class Definitions { def ConsType: TypeRef = ConsClass.typeRef @tu lazy val SeqFactoryClass: Symbol = requiredClass("scala.collection.SeqFactory") + @tu lazy val PreciseClass: ClassSymbol = requiredClass("scala.Precise") + @tu lazy val SingletonClass: ClassSymbol = // needed as a synthetic class because Scala 2.x refers to it in classfiles // but does not define it as an explicit class. - enterCompleteClassSymbol( - ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final, - List(AnyType), EmptyScope) + val cls = enterCompleteClassSymbol( + ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final | Erased, + List(AnyType)) + enterTypeField(cls, tpnme.Self, Deferred, cls.info.decls.openForMutations) + cls @tu lazy val SingletonType: TypeRef = SingletonClass.typeRef @tu lazy val MaybeCapabilityAnnot: ClassSymbol = @@ -1061,6 +1073,7 @@ class Definitions { @tu lazy val RetainsByNameAnnot: ClassSymbol = requiredClass("scala.annotation.retainsByName") @tu lazy val RetainsArgAnnot: ClassSymbol = requiredClass("scala.annotation.retainsArg") @tu lazy val PublicInBinaryAnnot: ClassSymbol = requiredClass("scala.annotation.publicInBinary") + @tu lazy val WitnessNamesAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WitnessNames") @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") @@ -2157,6 +2170,7 @@ class Definitions { NullClass, NothingClass, SingletonClass, + CBCompanion, MaybeCapabilityAnnot) @tu lazy val syntheticCoreClasses: List[Symbol] = syntheticScalaClasses ++ List( diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 8110bc769d4f..b1bf7a266c91 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -377,6 +377,9 @@ object Flags { /** Symbol cannot be found as a member during typer */ val (Invisible @ _, _, _) = newFlags(45, "") + /** Tracked modifier for class parameter / a class with some tracked parameters */ + val (Tracked @ _, _, Dependent @ _) = newFlags(46, "tracked") + // ------------ Flags following this one are not pickled ---------------------------------- /** Symbol is not a member of its owner */ @@ -452,7 +455,7 @@ object Flags { CommonSourceModifierFlags.toTypeFlags | Abstract | Sealed | Opaque | Open val TermSourceModifierFlags: FlagSet = - CommonSourceModifierFlags.toTermFlags | Inline | AbsOverride | Lazy + CommonSourceModifierFlags.toTermFlags | Inline | AbsOverride | Lazy | Tracked /** Flags representing modifiers that can appear in trees */ val ModifierFlags: FlagSet = @@ -466,7 +469,7 @@ object Flags { val FromStartFlags: FlagSet = commonFlags( Module, Package, Deferred, Method, Case, Enum, Param, ParamAccessor, Scala2SpecialFlags, MutableOrOpen, Opaque, Touched, JavaStatic, - OuterOrCovariant, LabelOrContravariant, CaseAccessor, + OuterOrCovariant, LabelOrContravariant, CaseAccessor, Tracked, Extension, NonMember, Implicit, Given, Permanent, Synthetic, Exported, SuperParamAliasOrScala2x, Inline, Macro, ConstructorProxy, Invisible) @@ -477,7 +480,7 @@ object Flags { */ val AfterLoadFlags: FlagSet = commonFlags( FromStartFlags, AccessFlags, Final, AccessorOrSealed, - Abstract, LazyOrTrait, SelfName, JavaDefined, JavaAnnotation, Transparent) + Abstract, LazyOrTrait, SelfName, JavaDefined, JavaAnnotation, Transparent, Tracked) /** A value that's unstable unless complemented with a Stable flag */ val UnstableValueFlags: FlagSet = Mutable | Method @@ -543,8 +546,6 @@ object Flags { /** Flags retained in type export forwarders */ val RetainedExportTypeFlags = Infix - val MandatoryExportTypeFlags = Exported | Final - /** Flags that apply only to classes */ val ClassOnlyFlags = Sealed | Open | Abstract.toTypeFlags @@ -572,6 +573,7 @@ object Flags { val DeferredOrLazyOrMethod: FlagSet = Deferred | Lazy | Method val DeferredOrTermParamOrAccessor: FlagSet = Deferred | ParamAccessor | TermParam // term symbols without right-hand sides val DeferredOrTypeParam: FlagSet = Deferred | TypeParam // type symbols without right-hand sides + val DeferredGivenFlags: FlagSet = Deferred | Given | HasDefault val EnumValue: FlagSet = Enum | StableRealizable // A Scala enum value val FinalOrInline: FlagSet = Final | Inline val FinalOrModuleClass: FlagSet = Final | ModuleClass // A module class or a final class diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index 5dab5631c62a..14d7827974c0 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -104,8 +104,8 @@ object Mode { val CheckBoundsOrSelfType: Mode = newMode(14, "CheckBoundsOrSelfType") /** Use previous Scheme for implicit resolution. Currently significant - * in 3.0-migration where we use Scala-2's scheme instead and in 3.5-migration - * where we use the previous scheme up to 3.4 instead. + * in 3.0-migration where we use Scala-2's scheme instead and in 3.5 and 3.6-migration + * where we use the previous scheme up to 3.4 for comparison with the new scheme. */ val OldImplicitResolution: Mode = newMode(15, "OldImplicitResolution") diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index d4f009cbbbd5..74d440562824 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -182,13 +182,13 @@ object NameKinds { case DerivedName(underlying, info: this.NumberedInfo) => Some((underlying, info.num)) case _ => None } - protected def skipSeparatorAndNum(name: SimpleName, separator: String): Int = { + protected def skipSeparatorAndNum(name: SimpleName, separator: String): Int = var i = name.length - while (i > 0 && name(i - 1).isDigit) i -= 1 - if (i > separator.length && i < name.length && - name.slice(i - separator.length, i).toString == separator) i + while i > 0 && name(i - 1).isDigit do i -= 1 + if i >= separator.length && i < name.length + && name.slice(i - separator.length, i).toString == separator + then i else -1 - } numberedNameKinds(tag) = this: @unchecked } @@ -240,6 +240,16 @@ object NameKinds { } } + /** Unique names that can be unmangled */ + class UniqueNameKindWithUnmangle(separator: String) extends UniqueNameKind(separator): + override def unmangle(name: SimpleName): TermName = + val i = skipSeparatorAndNum(name, separator) + if i > 0 then + val index = name.drop(i).toString.toInt + val original = name.take(i - separator.length).asTermName + apply(original, index) + else name + /** Names of the form `prefix . name` */ val QualifiedName: QualifiedNameKind = new QualifiedNameKind(QUALIFIED, ".") @@ -288,7 +298,7 @@ object NameKinds { * * The "evidence$" prefix is a convention copied from Scala 2. */ - val ContextBoundParamName: UniqueNameKind = new UniqueNameKind("evidence$") + val ContextBoundParamName: UniqueNameKind = new UniqueNameKindWithUnmangle("evidence$") /** The name of an inferred contextual function parameter: * @@ -323,20 +333,7 @@ object NameKinds { val InlineBinderName: UniqueNameKind = new UniqueNameKind("$proxy") val MacroNames: UniqueNameKind = new UniqueNameKind("$macro$") - /** A kind of unique extension methods; Unlike other unique names, these can be - * unmangled. - */ - val UniqueExtMethName: UniqueNameKind = new UniqueNameKind("$extension") { - override def unmangle(name: SimpleName): TermName = { - val i = skipSeparatorAndNum(name, separator) - if (i > 0) { - val index = name.drop(i).toString.toInt - val original = name.take(i - separator.length).asTermName - apply(original, index) - } - else name - } - } + val UniqueExtMethName: UniqueNameKind = new UniqueNameKindWithUnmangle("$extension") /** Kinds of unique names generated by the pattern matcher */ val PatMatStdBinderName: UniqueNameKind = new UniqueNameKind("x") diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index 75a135826785..07cb9292baa4 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -4,7 +4,10 @@ package core import Contexts.*, Symbols.*, Types.*, Flags.*, Scopes.*, Decorators.*, Names.*, NameOps.* import SymDenotations.{LazyType, SymDenotation}, StdNames.nme +import ContextOps.enter import TypeApplications.EtaExpansion +import collection.mutable +import config.Printers.typr /** Operations that are shared between Namer and TreeUnpickler */ object NamerOps: @@ -15,8 +18,41 @@ object NamerOps: */ def effectiveResultType(ctor: Symbol, paramss: List[List[Symbol]])(using Context): Type = paramss match - case TypeSymbols(tparams) :: _ => ctor.owner.typeRef.appliedTo(tparams.map(_.typeRef)) - case _ => ctor.owner.typeRef + case TypeSymbols(tparams) :: rest => + addParamRefinements(ctor.owner.typeRef.appliedTo(tparams.map(_.typeRef)), rest) + case _ => + addParamRefinements(ctor.owner.typeRef, paramss) + + /** Given a method with tracked term-parameters `p1, ..., pn`, and result type `R`, add the + * refinements R { p1 = p1' } ... { pn = pn' }, where pi' is the TermParamRef + * of the parameter and pi is its name. This matters only under experimental.modularity, + * since without it there are no tracked parameters. Parameter refinements are added for + * constructors and given companion methods. + */ + def addParamRefinements(resType: Type, paramss: List[List[Symbol]])(using Context): Type = + paramss.flatten.foldLeft(resType): (rt, param) => + if param.is(Tracked) then RefinedType(rt, param.name, param.termRef) + else rt + + /** Split dependent class refinements off parent type. Add them to `refinements`, + * unless it is null. + */ + extension (tp: Type) + def separateRefinements(cls: ClassSymbol, refinements: mutable.LinkedHashMap[Name, Type] | Null)(using Context): Type = + tp match + case RefinedType(tp1, rname, rinfo) => + try tp1.separateRefinements(cls, refinements) + finally + if refinements != null then + refinements(rname) = refinements.get(rname) match + case Some(tp) => tp & rinfo + case None => rinfo + case tp @ AnnotatedType(tp1, ann) => + tp.derivedAnnotatedType(tp1.separateRefinements(cls, refinements), ann) + case tp: RecType => + tp.parent.substRecThis(tp, cls.thisType).separateRefinements(cls, refinements) + case tp => + tp /** If isConstructor, make sure it has at least one non-implicit parameter list * This is done by adding a () in front of a leading old style implicit parameter, @@ -222,4 +258,55 @@ object NamerOps: rhsCtx.gadtState.addBound(psym, tr, isUpper = true) } + /** Create a context-bound companion for type symbol `tsym`, which has a context + * bound that defines a set of witnesses with names `witnessNames`. + * + * @param params If `tsym` is a type parameter, a list of parameter symbols + * that includes all witnesses, otherwise the empty list. + * + * The context-bound companion has as name the name of `tsym` translated to + * a term name. We create a synthetic val of the form + * + * val A: ``[witnessRef1 | ... | witnessRefN] + * + * where + * + * is the CBCompanion type created in Definitions + * withnessRefK is a refence to the K'th witness. + * + * The companion has the same access flags as the original type. + */ + def addContextBoundCompanionFor(tsym: Symbol, witnessNames: List[TermName], params: List[Symbol])(using Context): Unit = + val prefix = ctx.owner.thisType + val companionName = tsym.name.toTermName + val witnessRefs = + if params.nonEmpty then + witnessNames.map: witnessName => + prefix.select(params.find(_.name == witnessName).get) + else + witnessNames.map(TermRef(prefix, _)) + val cbtype = defn.CBCompanion.typeRef.appliedTo: + witnessRefs.reduce[Type](OrType(_, _, soft = false)) + val cbc = newSymbol( + ctx.owner, companionName, + (tsym.flagsUNSAFE & (AccessFlags)).toTermFlags | Synthetic, + cbtype) + typr.println(s"context bound companion created $cbc for $witnessNames in ${ctx.owner}") + ctx.enter(cbc) + end addContextBoundCompanionFor + + /** Add context bound companions to all context-bound types declared in + * this class. This assumes that these types already have their + * WitnessNames annotation set even before they are completed. This is + * the case for unpickling but currently not for Namer. So the method + * is only called during unpickling. + */ + def addContextBoundCompanions(cls: ClassSymbol)(using Context): Unit = + for sym <- cls.info.decls do + if sym.isType && !sym.isClass then + for ann <- sym.annotationsUNSAFE do + if ann.symbol == defn.WitnessNamesAnnot then + ann.tree match + case ast.tpd.WitnessNamesAnnot(witnessNames) => + addContextBoundCompanionFor(sym, witnessNames, Nil) end NamerOps diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala index 6d6a47cf6a1e..9baf0c40a80b 100644 --- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala +++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala @@ -88,11 +88,6 @@ trait PatternTypeConstrainer { self: TypeComparer => } } - def stripRefinement(tp: Type): Type = tp match { - case tp: RefinedOrRecType => stripRefinement(tp.parent) - case tp => tp - } - def tryConstrainSimplePatternType(pat: Type, scrut: Type) = { val patCls = pat.classSymbol val scrCls = scrut.classSymbol @@ -182,14 +177,14 @@ trait PatternTypeConstrainer { self: TypeComparer => case AndType(scrut1, scrut2) => constrainPatternType(pat, scrut1) && constrainPatternType(pat, scrut2) case scrut: RefinedOrRecType => - constrainPatternType(pat, stripRefinement(scrut)) + constrainPatternType(pat, scrut.stripRefinement) case scrut => dealiasDropNonmoduleRefs(pat) match { case OrType(pat1, pat2) => either(constrainPatternType(pat1, scrut), constrainPatternType(pat2, scrut)) case AndType(pat1, pat2) => constrainPatternType(pat1, scrut) && constrainPatternType(pat2, scrut) case pat: RefinedOrRecType => - constrainPatternType(stripRefinement(pat), scrut) + constrainPatternType(pat.stripRefinement, scrut) case pat => tryConstrainSimplePatternType(pat, scrut) || classesMayBeCompatible && constrainUpcasted(scrut) diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 62d7afa22ed2..b935488695e0 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -288,6 +288,7 @@ object StdNames { // Compiler-internal val CAPTURE_ROOT: N = "cap" + val CBCompanion: N = "" val CONSTRUCTOR: N = "" val STATIC_CONSTRUCTOR: N = "" val EVT2U: N = "evt2u$" @@ -387,6 +388,7 @@ object StdNames { val RootPackage: N = "RootPackage" val RootClass: N = "RootClass" val Select: N = "Select" + val Self: N = "Self" val Shape: N = "Shape" val StringContext: N = "StringContext" val This: N = "This" @@ -396,6 +398,7 @@ object StdNames { val TypeApply: N = "TypeApply" val TypeRef: N = "TypeRef" val UNIT : N = "UNIT" + val WitnessNames: N = "WitnessNames" val acc: N = "acc" val adhocExtensions: N = "adhocExtensions" val andThen: N = "andThen" @@ -455,6 +458,7 @@ object StdNames { val create: N = "create" val currentMirror: N = "currentMirror" val curried: N = "curried" + val deferred: N = "deferred" val definitions: N = "definitions" val delayedInit: N = "delayedInit" val delayedInitArg: N = "delayedInit$body" @@ -629,6 +633,7 @@ object StdNames { val toString_ : N = "toString" val toTypeConstructor: N = "toTypeConstructor" val tpe : N = "tpe" + val tracked: N = "tracked" val transparent : N = "transparent" val tree : N = "tree" val true_ : N = "true" diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 09d45dbdf06b..3904228756a0 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1187,21 +1187,25 @@ object SymDenotations { final def isExtensibleClass(using Context): Boolean = isClass && !isOneOf(FinalOrModuleClass) && !isAnonymousClass - /** A symbol is effectively final if it cannot be overridden in a subclass */ + /** A symbol is effectively final if it cannot be overridden */ final def isEffectivelyFinal(using Context): Boolean = isOneOf(EffectivelyFinalFlags) || is(Inline, butNot = Deferred) || is(JavaDefinedVal, butNot = Method) || isConstructor - || !owner.isExtensibleClass + || !owner.isExtensibleClass && !is(Deferred) + // Deferred symbols can arise through parent refinements under x.modularity. + // For them, the overriding relationship reverses anyway, so + // being in a final class does not mean the symbol cannot be + // implemented concretely in a superclass. /** A class is effectively sealed if has the `final` or `sealed` modifier, or it * is defined in Scala 3 and is neither abstract nor open. */ final def isEffectivelySealed(using Context): Boolean = isOneOf(FinalOrSealed) - || isClass && (!isOneOf(EffectivelyOpenFlags) - || isLocalToCompilationUnit) + || isClass + && (!isOneOf(EffectivelyOpenFlags) || isLocalToCompilationUnit) final def isLocalToCompilationUnit(using Context): Boolean = is(Private) diff --git a/compiler/src/dotty/tools/dotc/core/SymUtils.scala b/compiler/src/dotty/tools/dotc/core/SymUtils.scala index 65634241b790..3a97a0053dbd 100644 --- a/compiler/src/dotty/tools/dotc/core/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/SymUtils.scala @@ -87,6 +87,9 @@ class SymUtils: !d.isPrimitiveValueClass } + def isContextBoundCompanion(using Context): Boolean = + self.is(Synthetic) && self.infoOrCompleter.typeSymbol == defn.CBCompanion + /** Is this a case class for which a product mirror is generated? * Excluded are value classes, abstract classes and case classes with more than one * parameter section. diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 0020efa5018d..da0ecac47b7d 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -312,7 +312,6 @@ object Symbols extends SymUtils { * With the given setup, all such calls will give implicit-not found errors */ final def symbol(implicit ev: DontUseSymbolOnSymbol): Nothing = unsupported("symbol") - type DontUseSymbolOnSymbol final def source(using Context): SourceFile = { def valid(src: SourceFile): SourceFile = @@ -402,13 +401,12 @@ object Symbols extends SymUtils { flags: FlagSet = this.flags, info: Type = this.info, privateWithin: Symbol = this.privateWithin, - coord: Coord = NoCoord, // Can be `= owner.coord` once we bootstrap - compUnitInfo: CompilationUnitInfo | Null = null // Can be `= owner.associatedFile` once we bootstrap + coord: Coord = NoCoord, // Can be `= owner.coord` once we have new default args + compUnitInfo: CompilationUnitInfo | Null = null // Can be `= owner.compilationUnitInfo` once we have new default args ): Symbol = { val coord1 = if (coord == NoCoord) owner.coord else coord val compilationUnitInfo1 = if (compilationUnitInfo == null) owner.compilationUnitInfo else compilationUnitInfo - if isClass then newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord1, compilationUnitInfo1) else @@ -936,6 +934,8 @@ object Symbols extends SymUtils { case (x: Symbol) :: _ if x.isType => Some(xs.asInstanceOf[List[TypeSymbol]]) case _ => None + type DontUseSymbolOnSymbol + // ----- Locating predefined symbols ---------------------------------------- def requiredPackage(path: PreName)(using Context): TermSymbol = { diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index cee1ec7fffa8..a849d28c81d6 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3257,8 +3257,8 @@ object TypeComparer { def subtypeCheckInProgress(using Context): Boolean = comparing(_.subtypeCheckInProgress) - def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type = - comparing(_.instanceType(param, fromBelow, widenUnions, maxLevel)) + def instanceType(param: TypeParamRef, fromBelow: Boolean, widen: Widen, maxLevel: Int = Int.MaxValue)(using Context): Type = + comparing(_.instanceType(param, fromBelow, widen: Widen, maxLevel)) def approximation(param: TypeParamRef, fromBelow: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type = comparing(_.approximation(param, fromBelow, maxLevel)) @@ -3278,8 +3278,8 @@ object TypeComparer { def addToConstraint(tl: TypeLambda, tvars: List[TypeVar])(using Context): Boolean = comparing(_.addToConstraint(tl, tvars)) - def widenInferred(inst: Type, bound: Type, widenUnions: Boolean)(using Context): Type = - comparing(_.widenInferred(inst, bound, widenUnions)) + def widenInferred(inst: Type, bound: Type, widen: Widen)(using Context): Type = + comparing(_.widenInferred(inst, bound, widen: Widen)) def dropTransparentTraits(tp: Type, bound: Type)(using Context): Type = comparing(_.dropTransparentTraits(tp, bound)) diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 8461c0f091fe..1282b77f013e 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -545,7 +545,7 @@ object TypeOps: val lo = TypeComparer.instanceType( tp.origin, fromBelow = variance > 0 || variance == 0 && tp.hasLowerBound, - widenUnions = tp.widenUnions)(using mapCtx) + tp.widenPolicy)(using mapCtx) val lo1 = apply(lo) if (lo1 ne lo) lo1 else tp case _ => diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index d4be03e9aae4..afc2cc39f9cf 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -7,12 +7,14 @@ import Types.*, Contexts.*, Symbols.*, Flags.*, Decorators.* import Names.{Name, TermName} import Constants.Constant -class TypeUtils { +import Names.Name +import config.Feature +class TypeUtils: /** A decorator that provides methods on types * that are needed in the transformer pipeline. */ - extension (self: Type) { + extension (self: Type) def isErasedValueType(using Context): Boolean = self.isInstanceOf[ErasedValueType] @@ -21,7 +23,11 @@ class TypeUtils { self.classSymbol.isPrimitiveValueClass def isErasedClass(using Context): Boolean = - self.underlyingClassRef(refinementOK = true).typeSymbol.is(Flags.Erased) + val cls = self.underlyingClassRef(refinementOK = true).typeSymbol + cls.is(Flags.Erased) + && (cls != defn.SingletonClass || Feature.enabled(Feature.modularity)) + // Singleton counts as an erased class only under x.modularity + /** Is this type a checked exception? This is the case if the type * derives from Exception but not from RuntimeException. According to @@ -178,5 +184,11 @@ class TypeUtils { def isThisTypeOf(cls: Symbol)(using Context) = self match case self: Types.ThisType => self.cls == cls case _ => false - } -} + + /** Strip all outer refinements off this type */ + def stripRefinement: Type = self match + case self: RefinedOrRecType => self.parent.stripRefinement + case seld => self + +end TypeUtils + diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index a6136a20cf32..eeffc41d4159 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -44,8 +44,6 @@ import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} import scala.annotation.internal.sharable import scala.annotation.threadUnsafe - - object Types extends TypeUtils { @sharable private var nextId = 0 @@ -330,6 +328,21 @@ object Types extends TypeUtils { /** Is this type a (possibly aliased) singleton type? */ def isSingleton(using Context): Boolean = dealias.isInstanceOf[SingletonType] + /** Is this upper-bounded by a (possibly aliased) singleton type? + * Overridden in TypeVar + */ + def isSingletonBounded(frozen: Boolean)(using Context): Boolean = this.dealias.normalized match + case tp: SingletonType => tp.isStable + case tp: TypeRef => + tp.name == tpnme.Singleton && tp.symbol == defn.SingletonClass + || tp.superType.isSingletonBounded(frozen) + case tp: TypeVar if !tp.isInstantiated => + if frozen then tp frozen_<:< defn.SingletonType else tp <:< defn.SingletonType + case tp: HKTypeLambda => false + case tp: TypeProxy => tp.superType.isSingletonBounded(frozen) + case AndType(tpL, tpR) => tpL.isSingletonBounded(frozen) || tpR.isSingletonBounded(frozen) + case _ => false + /** Is this type of kind `AnyKind`? */ def hasAnyKind(using Context): Boolean = { @tailrec def loop(tp: Type): Boolean = tp match { @@ -1642,17 +1655,19 @@ object Types extends TypeUtils { * * P { ... type T = / += / -= U ... } # T * - * to just U. Does not perform the reduction if the resulting type would contain - * a reference to the "this" of the current refined type, except in the following situation + * to just U. Analogously, `P { val x: S} # x` is reduced to `S` if `S` + * is a singleton type. * - * (1) The "this" reference can be avoided by following an alias. Example: + * Does not perform the reduction if the resulting type would contain + * a reference to the "this" of the current refined type, except if the "this" + * reference can be avoided by following an alias. Example: * * P { type T = String, type R = P{...}.T } # R --> String * * (*) normalizes means: follow instantiated typevars and aliases. */ - def lookupRefined(name: Name)(using Context): Type = { - @tailrec def loop(pre: Type): Type = pre.stripTypeVar match { + def lookupRefined(name: Name)(using Context): Type = + @tailrec def loop(pre: Type): Type = pre match case pre: RefinedType => pre.refinedInfo match { case tp: AliasingBounds => @@ -1675,12 +1690,13 @@ object Types extends TypeUtils { case TypeAlias(alias) => loop(alias) case _ => NoType } + case pre: (TypeVar | AnnotatedType) => + loop(pre.underlying) case _ => NoType - } loop(this) - } + end lookupRefined /** The type , reduced if possible */ def select(name: Name)(using Context): Type = @@ -2820,35 +2836,30 @@ object Types extends TypeUtils { def derivedSelect(prefix: Type)(using Context): Type = if prefix eq this.prefix then this else if prefix.isExactlyNothing then prefix - else { - val res = - if (isType && currentValidSymbol.isAllOf(ClassTypeParam)) argForParam(prefix) + else + val reduced = + if isType && currentValidSymbol.isAllOf(ClassTypeParam) then argForParam(prefix) else prefix.lookupRefined(name) - if (res.exists) return res - if (isType) { - if (Config.splitProjections) - prefix match { - case prefix: AndType => - def isMissing(tp: Type) = tp match { - case tp: TypeRef => !tp.info.exists - case _ => false - } - val derived1 = derivedSelect(prefix.tp1) - val derived2 = derivedSelect(prefix.tp2) - return ( - if (isMissing(derived1)) derived2 - else if (isMissing(derived2)) derived1 - else prefix.derivedAndType(derived1, derived2)) - case prefix: OrType => - val derived1 = derivedSelect(prefix.tp1) - val derived2 = derivedSelect(prefix.tp2) - return prefix.derivedOrType(derived1, derived2) - case _ => - } - } - if (prefix.isInstanceOf[WildcardType]) WildcardType.sameKindAs(this) + if reduced.exists then return reduced + if Config.splitProjections && isType then + prefix match + case prefix: AndType => + def isMissing(tp: Type) = tp match + case tp: TypeRef => !tp.info.exists + case _ => false + val derived1 = derivedSelect(prefix.tp1) + val derived2 = derivedSelect(prefix.tp2) + return + if isMissing(derived1) then derived2 + else if isMissing(derived2) then derived1 + else prefix.derivedAndType(derived1, derived2) + case prefix: OrType => + val derived1 = derivedSelect(prefix.tp1) + val derived2 = derivedSelect(prefix.tp2) + return prefix.derivedOrType(derived1, derived2) + case _ => + if prefix.isInstanceOf[WildcardType] then WildcardType.sameKindAs(this) else withPrefix(prefix) - } /** A reference like this one, but with the given symbol, if it exists */ private def withSym(sym: Symbol)(using Context): ThisType = @@ -4925,8 +4936,13 @@ object Types extends TypeUtils { * @param origin the parameter that's tracked by the type variable. * @param creatorState the typer state in which the variable was created. * @param initNestingLevel the initial nesting level of the type variable. (c.f. nestingLevel) + * @param precise whether we should use instantiation without widening for this TypeVar. */ - final class TypeVar private(initOrigin: TypeParamRef, creatorState: TyperState | Null, val initNestingLevel: Int) extends CachedProxyType with ValueType { + final class TypeVar private( + initOrigin: TypeParamRef, + creatorState: TyperState | Null, + val initNestingLevel: Int, + val precise: Boolean) extends CachedProxyType with ValueType { private var currentOrigin = initOrigin def origin: TypeParamRef = currentOrigin @@ -5014,7 +5030,7 @@ object Types extends TypeUtils { } def typeToInstantiateWith(fromBelow: Boolean)(using Context): Type = - TypeComparer.instanceType(origin, fromBelow, widenUnions, nestingLevel) + TypeComparer.instanceType(origin, fromBelow, widenPolicy, nestingLevel) /** Instantiate variable from the constraints over its `origin`. * If `fromBelow` is true, the variable is instantiated to the lub @@ -5030,8 +5046,26 @@ object Types extends TypeUtils { else instantiateWith(tp) - /** Widen unions when instantiating this variable in the current context? */ - def widenUnions(using Context): Boolean = !ctx.typerState.constraint.isHard(this) + /** Should we suppress widening? True if this TypeVar is precise + * or if it has as an upper bound a precise TypeVar. + */ + def isPrecise(using Context) = + precise || hasPreciseUpperBound + + private def hasPreciseUpperBound(using Context) = + val constr = ctx.typerState.constraint + constr.upper(origin).exists: tparam => + constr.typeVarOfParam(tparam) match + case tvar: TypeVar => tvar.precise + case _ => false + + /** The policy used for widening singletons or unions when instantiating + * this variable in the current context. + */ + def widenPolicy(using Context): Widen = + if isPrecise then Widen.None + else if ctx.typerState.constraint.isHard(this) then Widen.Singletons + else Widen.Unions /** For uninstantiated type variables: the entry in the constraint (either bounds or * provisional instance value) @@ -5072,8 +5106,18 @@ object Types extends TypeUtils { } } object TypeVar: - def apply(using Context)(initOrigin: TypeParamRef, creatorState: TyperState | Null, nestingLevel: Int = ctx.nestingLevel) = - new TypeVar(initOrigin, creatorState, nestingLevel) + def apply(using Context)( + initOrigin: TypeParamRef, + creatorState: TyperState | Null, + nestingLevel: Int = ctx.nestingLevel, + precise: Boolean = false) = + new TypeVar(initOrigin, creatorState, nestingLevel, precise) + + /** The three possible widening policies */ + enum Widen: + case None // no widening + case Singletons // widen singletons but not unions + case Unions // widen singletons and unions type TypeVars = SimpleIdentitySet[TypeVar] diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 186e039c4d74..8d1eca8fb5f0 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -867,6 +867,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { if (flags.is(Exported)) writeModTag(EXPORTED) if (flags.is(Given)) writeModTag(GIVEN) if (flags.is(Implicit)) writeModTag(IMPLICIT) + if (flags.is(Tracked)) writeModTag(TRACKED) if (isTerm) { if (flags.is(Lazy, butNot = Module)) writeModTag(LAZY) if (flags.is(AbsOverride)) { writeModTag(ABSTRACT); writeModTag(OVERRIDE) } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 04d19f2f8821..91a5899146cc 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -31,7 +31,8 @@ import util.{SourceFile, Property} import ast.{Trees, tpd, untpd} import Trees.* import Decorators.* -import dotty.tools.dotc.quoted.QuotePatterns +import config.Feature +import quoted.QuotePatterns import dotty.tools.tasty.{TastyBuffer, TastyReader} import TastyBuffer.* @@ -755,6 +756,7 @@ class TreeUnpickler(reader: TastyReader, case INVISIBLE => addFlag(Invisible) case TRANSPARENT => addFlag(Transparent) case INFIX => addFlag(Infix) + case TRACKED => addFlag(Tracked) case PRIVATEqualified => readByte() privateWithin = readWithin @@ -922,6 +924,8 @@ class TreeUnpickler(reader: TastyReader, val resType = if name == nme.CONSTRUCTOR then effectiveResultType(sym, paramss) + else if sym.isAllOf(Given | Method) && Feature.enabled(Feature.modularity) then + addParamRefinements(tpt.tpe, paramss) else tpt.tpe sym.info = methodType(paramss, resType) @@ -1074,7 +1078,7 @@ class TreeUnpickler(reader: TastyReader, } val parentReader = fork val parents = readParents(withArgs = false)(using parentCtx) - val parentTypes = parents.map(_.tpe.dealias) + val parentTypes = parents.map(_.tpe.dealiasKeepAnnots.separateRefinements(cls, null)) if cls.is(JavaDefined) && parentTypes.exists(_.derivesFrom(defn.JavaAnnotationClass)) then cls.setFlag(JavaAnnotation) val self = @@ -1134,6 +1138,7 @@ class TreeUnpickler(reader: TastyReader, }) defn.patchStdLibClass(cls) NamerOps.addConstructorProxies(cls) + NamerOps.addContextBoundCompanions(cls) setSpan(start, untpd.Template(constr, mappedParents, self, lazyStats) .withType(localDummy.termRef)) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 60b2a2b1d3cf..e28ba5fd669e 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -62,7 +62,7 @@ object Parsers { case ExtensionFollow // extension clause, following extension parameter def isClass = // owner is a class - this == Class || this == CaseClass + this == Class || this == CaseClass || this == Given def takesOnlyUsingClauses = // only using clauses allowed for this owner this == Given || this == ExtensionFollow def acceptsVariance = @@ -976,12 +976,14 @@ object Parsers { * i.e. an identifier followed by type and value parameters, followed by `:`? * @pre The current token is an identifier */ - def followingIsGivenSig() = + def followingIsOldStyleGivenSig() = val lookahead = in.LookaheadScanner() if lookahead.isIdent then lookahead.nextToken() + var paramsSeen = false def skipParams(): Unit = if lookahead.token == LPAREN || lookahead.token == LBRACKET then + paramsSeen = true lookahead.skipParens() skipParams() else if lookahead.isNewLine then @@ -989,6 +991,16 @@ object Parsers { skipParams() skipParams() lookahead.isColon + && { + !in.featureEnabled(Feature.modularity) + || { // with modularity language import, a `:` at EOL after an identifier represents a single identifier given + // Example: + // given C: + // def f = ... + lookahead.nextToken() + !lookahead.isAfterLineEnd + } + } def followingIsExtension() = val next = in.lookahead.token @@ -1806,9 +1818,11 @@ object Parsers { */ def infixType(): Tree = infixTypeRest(refinedType()) - def infixTypeRest(t: Tree): Tree = - infixOps(t, canStartInfixTypeTokens, refinedTypeFn, Location.ElseWhere, ParseKind.Type, - isOperator = !followingIsVararg() && !isPureArrow + def infixTypeRest(t: Tree, operand: Location => Tree = refinedTypeFn): Tree = + infixOps(t, canStartInfixTypeTokens, operand, Location.ElseWhere, ParseKind.Type, + isOperator = !followingIsVararg() + && !isPureArrow + && !(isIdent(nme.as) && in.featureEnabled(Feature.modularity)) && nextCanFollowOperator(canStartInfixTypeTokens)) /** RefinedType ::= WithType {[nl] Refinement} [`^` CaptureSet] @@ -1872,6 +1886,10 @@ object Parsers { */ def annotType(): Tree = annotTypeRest(simpleType()) + /** AnnotType1 ::= SimpleType1 {Annotation} + */ + def annotType1(): Tree = annotTypeRest(simpleType1()) + def annotTypeRest(t: Tree): Tree = if (in.token == AT) annotTypeRest(atSpan(startOffset(t)) { @@ -2178,20 +2196,33 @@ object Parsers { if (in.token == tok) { in.nextToken(); toplevelTyp() } else EmptyTree - /** TypeParamBounds ::= TypeBounds {`<%' Type} {`:' Type} + /** TypeAndCtxBounds ::= TypeBounds [`:` ContextBounds] */ - def typeParamBounds(pname: TypeName): Tree = { + def typeAndCtxBounds(pname: TypeName): Tree = { val t = typeBounds() val cbs = contextBounds(pname) if (cbs.isEmpty) t else atSpan((t.span union cbs.head.span).start) { ContextBounds(t, cbs) } } + /** ContextBound ::= Type [`as` id] */ + def contextBound(pname: TypeName): Tree = + val t = toplevelTyp() + val ownName = + if isIdent(nme.as) && in.featureEnabled(Feature.modularity) then + in.nextToken() + ident() + else EmptyTermName + ContextBoundTypeTree(t, pname, ownName) + + /** ContextBounds ::= ContextBound | `{` ContextBound {`,` ContextBound} `}` + */ def contextBounds(pname: TypeName): List[Tree] = if in.isColon then - atSpan(in.skipToken()) { - AppliedTypeTree(toplevelTyp(), Ident(pname)) - } :: contextBounds(pname) + in.nextToken() + if in.token == LBRACE && in.featureEnabled(Feature.modularity) + then inBraces(commaSeparated(() => contextBound(pname))) + else contextBound(pname) :: contextBounds(pname) else if in.token == VIEWBOUND then report.errorOrMigrationWarning( em"view bounds `<%' are no longer supported, use a context bound `:' instead", @@ -3189,6 +3220,7 @@ object Parsers { case nme.open => Mod.Open() case nme.transparent => Mod.Transparent() case nme.infix => Mod.Infix() + case nme.tracked => Mod.Tracked() } } @@ -3255,7 +3287,8 @@ object Parsers { * | AccessModifier * | override * | opaque - * LocalModifier ::= abstract | final | sealed | open | implicit | lazy | inline | transparent | infix | erased + * LocalModifier ::= abstract | final | sealed | open | implicit | lazy | erased | + * inline | transparent | infix */ def modifiers(allowed: BitSet = modifierTokens, start: Modifiers = Modifiers()): Modifiers = { @tailrec @@ -3372,7 +3405,7 @@ object Parsers { val isAbstractOwner = paramOwner == ParamOwner.Type || paramOwner == ParamOwner.TypeParam val start = in.offset var mods = annotsAsMods() | Param - if paramOwner == ParamOwner.Class || paramOwner == ParamOwner.CaseClass then + if paramOwner.isClass then mods |= PrivateLocal if isIdent(nme.raw.PLUS) && checkVarianceOK() then mods |= Covariant @@ -3386,7 +3419,7 @@ object Parsers { } else ident().toTypeName val hkparams = typeParamClauseOpt(ParamOwner.Type) - val bounds = if (isAbstractOwner) typeBounds() else typeParamBounds(name) + val bounds = if (isAbstractOwner) typeBounds() else typeAndCtxBounds(name) TypeDef(name, lambdaAbstract(hkparams, bounds)).withMods(mods) } } @@ -3408,8 +3441,8 @@ object Parsers { /** ClsTermParamClause ::= ‘(’ ClsParams ‘)’ | UsingClsTermParamClause * UsingClsTermParamClause::= ‘(’ ‘using’ [‘erased’] (ClsParams | ContextTypes) ‘)’ * ClsParams ::= ClsParam {‘,’ ClsParam} - * ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’)] Param - * + * ClsParam ::= {Annotation} + * [{Modifier | ‘tracked’} (‘val’ | ‘var’)] Param * TypelessClause ::= DefTermParamClause * | UsingParamClause * @@ -3445,6 +3478,8 @@ object Parsers { if isErasedKw then mods = addModifier(mods) if paramOwner.isClass then + if isIdent(nme.tracked) && in.featureEnabled(Feature.modularity) && !in.lookahead.isColon then + mods = addModifier(mods) mods = addFlag(modifiers(start = mods), ParamAccessor) mods = if in.token == VAL then @@ -3507,22 +3542,27 @@ object Parsers { paramMods() if paramOwner.takesOnlyUsingClauses && !impliedMods.is(Given) then syntaxError(em"`using` expected") - val (firstParamMod, isParams) = + val (firstParamMod, paramsAreNamed) = var mods = EmptyModifiers if in.lookahead.isColon then (mods, true) else if isErased then mods = addModifier(mods) - val isParams = + val paramsAreNamed = !impliedMods.is(Given) || startParamTokens.contains(in.token) - || isIdent && (in.name == nme.inline || in.lookahead.isColon) - (mods, isParams) - (if isParams then commaSeparated(() => param()) - else contextTypes(paramOwner, numLeadParams, impliedMods)) match { + || isIdent + && (in.name == nme.inline // inline starts a name binding + || in.name == nme.tracked // tracked starts a name binding under x.modularity + && in.featureEnabled(Feature.modularity) + || in.lookahead.isColon) // a following `:` starts a name binding + (mods, paramsAreNamed) + val params = + if paramsAreNamed then commaSeparated(() => param()) + else contextTypes(paramOwner, numLeadParams, impliedMods) + params match case Nil => Nil case (h :: t) => h.withAddedFlags(firstParamMod.flags) :: t - } checkVarArgsRules(clause) clause } @@ -3894,14 +3934,16 @@ object Parsers { argumentExprss(mkApply(Ident(nme.CONSTRUCTOR), argumentExprs())) } - /** TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds [‘=’ Type] + /** TypeDef ::= id [TypeParamClause] {FunParamClause} TypeAndCtxBounds [‘=’ Type] */ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = { newLinesOpt() atSpan(start, nameStart) { val nameIdent = typeIdent() + val tname = nameIdent.name.asTypeName val tparams = typeParamClauseOpt(ParamOwner.Type) val vparamss = funParamClauses() + def makeTypeDef(rhs: Tree): Tree = { val rhs1 = lambdaAbstractAll(tparams :: vparamss, rhs) val tdef = TypeDef(nameIdent.name.toTypeName, rhs1) @@ -3909,36 +3951,37 @@ object Parsers { tdef.pushAttachment(Backquoted, ()) finalizeDef(tdef, mods, start) } + in.token match { case EQUALS => in.nextToken() makeTypeDef(toplevelTyp()) case SUBTYPE | SUPERTYPE => - val bounds = typeBounds() - if (in.token == EQUALS) { - val eqOffset = in.skipToken() - var rhs = toplevelTyp() - rhs match { - case mtt: MatchTypeTree => - bounds match { - case TypeBoundsTree(EmptyTree, upper, _) => - rhs = MatchTypeTree(upper, mtt.selector, mtt.cases) - case _ => - syntaxError(em"cannot combine lower bound and match type alias", eqOffset) - } - case _ => - if mods.is(Opaque) then - rhs = TypeBoundsTree(bounds.lo, bounds.hi, rhs) - else - syntaxError(em"cannot combine bound and alias", eqOffset) - } - makeTypeDef(rhs) - } - else makeTypeDef(bounds) + typeAndCtxBounds(tname) match + case bounds: TypeBoundsTree if in.token == EQUALS => + val eqOffset = in.skipToken() + var rhs = toplevelTyp() + rhs match { + case mtt: MatchTypeTree => + bounds match { + case TypeBoundsTree(EmptyTree, upper, _) => + rhs = MatchTypeTree(upper, mtt.selector, mtt.cases) + case _ => + syntaxError(em"cannot combine lower bound and match type alias", eqOffset) + } + case _ => + if mods.is(Opaque) then + rhs = TypeBoundsTree(bounds.lo, bounds.hi, rhs) + else + syntaxError(em"cannot combine bound and alias", eqOffset) + } + makeTypeDef(rhs) + case bounds => makeTypeDef(bounds) case SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | OUTDENT | EOF => - makeTypeDef(typeBounds()) - case _ if (staged & StageKind.QuotedPattern) != 0 => - makeTypeDef(typeBounds()) + makeTypeDef(typeAndCtxBounds(tname)) + case _ if (staged & StageKind.QuotedPattern) != 0 + || in.featureEnabled(Feature.modularity) && in.isColon => + makeTypeDef(typeAndCtxBounds(tname)) case _ => syntaxErrorOrIncomplete(ExpectedTypeBoundOrEquals(in.token)) return EmptyTree // return to avoid setting the span to EmptyTree @@ -4092,13 +4135,41 @@ object Parsers { syntaxError(em"extension clause can only define methods", stat.span) } - /** GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) - * GivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ + /** GivenDef ::= OldGivenDef | NewGivenDef + * OldGivenDef ::= [OldGivenSig] (GivenType [‘=’ Expr] | StructuralInstance) + * OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ + * StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] + * + * NewGivenDef ::= [GivenConditional '=>'] NewGivenSig + * GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} + * NewGivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + * | ConstrApps ['as' id] TemplateBody + * + * GivenType ::= AnnotType1 {id [nl] AnnotType1} */ def givenDef(start: Offset, mods: Modifiers, givenMod: Mod) = atSpan(start, nameStart) { var mods1 = addMod(mods, givenMod) val nameStart = in.offset - val name = if isIdent && followingIsGivenSig() then ident() else EmptyTermName + var name = if isIdent && followingIsOldStyleGivenSig() then ident() else EmptyTermName + var newSyntaxAllowed = in.featureEnabled(Feature.modularity) + + def moreConstrApps() = + if newSyntaxAllowed && in.token == COMMA then + in.nextToken() + constrApps() + else // need to be careful with last `with` + withConstrApps() + + // Adjust parameter modifiers so that they are now parameters of a method + // (originally, we created class parameters) + // TODO: syntax.md should be adjusted to reflect the difference that + // parameters of an alias given cannot be vals. + def adjustDefParams(paramss: List[ParamClause]): List[ParamClause] = + paramss.nestedMap: param => + if !param.mods.isAllOf(PrivateLocal) then + syntaxError(em"method parameter ${param.name} may not be a `val`", param.span) + param.withMods(param.mods &~ (AccessFlags | ParamAccessor | Mutable) | Param) + .asInstanceOf[List[ParamClause]] val gdef = val tparams = typeParamClauseOpt(ParamOwner.Given) @@ -4109,31 +4180,55 @@ object Parsers { else Nil newLinesOpt() val noParams = tparams.isEmpty && vparamss.isEmpty - if !(name.isEmpty && noParams) then acceptColon() + val hasParamsOrId = !name.isEmpty || !noParams + if hasParamsOrId then + if in.isColon then + newSyntaxAllowed = false + in.nextToken() + else if newSyntaxAllowed then accept(ARROW) + else acceptColon() val parents = - if isSimpleLiteral then rejectWildcardType(annotType()) :: Nil - else refinedTypeRest(constrApp()) :: withConstrApps() + if isSimpleLiteral then + rejectWildcardType(annotType()) :: Nil + else constrApp() match + case parent: Apply => parent :: moreConstrApps() + case parent if in.isIdent && newSyntaxAllowed => + infixTypeRest(parent, _ => annotType1()) :: Nil + case parent => parent :: moreConstrApps() + if newSyntaxAllowed && in.isIdent(nme.as) then + in.nextToken() + name = ident() + val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then + // given alias accept(EQUALS) mods1 |= Final if noParams && !mods.is(Inline) then mods1 |= Lazy ValDef(name, parents.head, subExpr()) else - DefDef(name, joinParams(tparams, vparamss), parents.head, subExpr()) - else if (isStatSep || isStatSeqEnd) && parentsIsType then + DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, subExpr()) + else if (isStatSep || isStatSeqEnd) && parentsIsType && !newSyntaxAllowed then + // old-style abstract given if name.isEmpty then syntaxError(em"anonymous given cannot be abstract") - DefDef(name, joinParams(tparams, vparamss), parents.head, EmptyTree) + DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, EmptyTree) else - val tparams1 = tparams.map(tparam => tparam.withMods(tparam.mods | PrivateLocal)) - val vparamss1 = vparamss.map(_.map(vparam => - vparam.withMods(vparam.mods &~ Param | ParamAccessor | Protected))) - val constr = makeConstructor(tparams1, vparamss1) + // structural instance + val vparamss1 = vparamss.nestedMap: vparam => + if vparam.mods.is(Private) + then vparam.withMods(vparam.mods &~ PrivateLocal | Protected) + else vparam + val constr = makeConstructor(tparams, vparamss1) val templ = - if isStatSep || isStatSeqEnd then Template(constr, parents, Nil, EmptyValDef, Nil) - else withTemplate(constr, parents) + if isStatSep || isStatSeqEnd then + Template(constr, parents, Nil, EmptyValDef, Nil) + else if !newSyntaxAllowed || in.token == WITH then + withTemplate(constr, parents) + else + possibleTemplateStart() + templateBodyOpt(constr, parents, Nil) if noParams && !mods.is(Inline) then ModuleDef(name, templ) else TypeDef(name.toTypeName, templ) end gdef @@ -4205,10 +4300,10 @@ object Parsers { /* -------- TEMPLATES ------------------------------------------- */ - /** ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} + /** ConstrApp ::= AnnotType1 {ParArgumentExprs} */ val constrApp: () => Tree = () => - val t = rejectWildcardType(annotTypeRest(simpleType1()), + val t = rejectWildcardType(annotType1(), fallbackTree = Ident(tpnme.ERROR)) // Using Ident(tpnme.ERROR) to avoid causing cascade errors on non-user-written code if in.token == LPAREN then parArgumentExprss(wrapNew(t)) else t diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 87f7c88e0407..c06b43cafe17 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -113,7 +113,7 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def refinementNameString(tp: RefinedType): String = nameString(tp.refinedName) /** String representation of a refinement */ - protected def toTextRefinement(rt: RefinedType): Text = + def toTextRefinement(rt: RefinedType): Text = val keyword = rt.refinedInfo match { case _: ExprType | _: MethodOrPoly => "def " case _: TypeBounds => "type " @@ -434,11 +434,11 @@ class PlainPrinter(_ctx: Context) extends Printer { sym.isEffectiveRoot || sym.isAnonymousClass || sym.name.isReplWrapperName /** String representation of a definition's type following its name, - * if symbol is completed, "?" otherwise. + * if symbol is completed, ": ?" otherwise. */ protected def toTextRHS(optType: Option[Type]): Text = optType match { case Some(tp) => toTextRHS(tp) - case None => "?" + case None => ": ?" } protected def decomposeLambdas(bounds: TypeBounds): (Text, TypeBounds) = diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index 8687925ed5fb..297dc31ea94a 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -4,7 +4,7 @@ package printing import core.* import Texts.*, ast.Trees.* -import Types.{Type, SingletonType, LambdaParam, NamedType}, +import Types.{Type, SingletonType, LambdaParam, NamedType, RefinedType}, Symbols.Symbol, Scopes.Scope, Constants.Constant, Names.Name, Denotations._, Annotations.Annotation, Contexts.Context import typer.Implicits.* @@ -104,6 +104,9 @@ abstract class Printer { /** Textual representation of a prefix of some reference, ending in `.` or `#` */ def toTextPrefixOf(tp: NamedType): Text + /** textual representation of a refinement, with no enclosing {...} */ + def toTextRefinement(rt: RefinedType): Text + /** Textual representation of a reference in a capture set */ def toTextCaptureRef(tp: Type): Text diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 0329f0639d87..1ff4c8cae339 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -386,7 +386,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { changePrec(GlobalPrec) { keywordStr("for ") ~ Text(enums map enumText, "; ") ~ sep ~ toText(expr) } def cxBoundToText(bound: untpd.Tree): Text = bound match { // DD - case AppliedTypeTree(tpt, _) => " : " ~ toText(tpt) + case ContextBoundTypeTree(tpt, _, _) => " : " ~ toText(tpt) case untpd.Function(_, tpt) => " <% " ~ toText(tpt) } @@ -658,7 +658,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def toTextAnnot = toTextLocal(arg) ~~ annotText(annot.symbol.enclosingClass, annot) def toTextRetainsAnnot = - try changePrec(GlobalPrec)(toText(arg) ~ "^" ~ toTextCaptureSet(captureSet)) + try changePrec(GlobalPrec)(toTextLocal(arg) ~ "^" ~ toTextCaptureSet(captureSet)) catch case ex: IllegalCaptureRef => toTextAnnot if annot.symbol.maybeOwner.isRetains && Feature.ccEnabled && !printDebug @@ -747,9 +747,18 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case GenAlias(pat, expr) => toText(pat) ~ " = " ~ toText(expr) case ContextBounds(bounds, cxBounds) => - cxBounds.foldLeft(toText(bounds)) {(t, cxb) => - t ~ cxBoundToText(cxb) - } + if Feature.enabled(Feature.modularity) then + def boundsText(bounds: Tree) = bounds match + case ContextBoundTypeTree(tpt, _, ownName) => + toText(tpt) ~ (" as " ~ toText(ownName) `provided` !ownName.isEmpty) + case bounds => toText(bounds) + cxBounds match + case bound :: Nil => ": " ~ boundsText(bound) + case _ => ": {" ~ Text(cxBounds.map(boundsText), ", ") ~ "}" + else + cxBounds.foldLeft(toText(bounds)) {(t, cxb) => + t ~ cxBoundToText(cxb) + } case PatDef(mods, pats, tpt, rhs) => modText(mods, NoSymbol, keywordStr("val"), isType = false) ~~ toText(pats, ", ") ~ optAscription(tpt) ~ optText(rhs)(" = " ~ _) @@ -794,6 +803,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { prefix ~~ idx.toString ~~ "|" ~~ tpeText ~~ "|" ~~ argsText ~~ "|" ~~ contentText ~~ postfix case CapturesAndResult(refs, parent) => changePrec(GlobalPrec)("^{" ~ Text(refs.map(toText), ", ") ~ "}" ~ toText(parent)) + case ContextBoundTypeTree(tycon, pname, ownName) => + toText(pname) ~ " : " ~ toText(tycon) ~ (" as " ~ toText(ownName) `provided` !ownName.isEmpty) case _ => tree.fallbackToText(this) } diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index e51f0a8b77ac..04380a7b8e4a 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -208,7 +208,9 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case UnstableInlineAccessorID // errorNumber: 192 case VolatileOnValID // errorNumber: 193 case ExtensionNullifiedByMemberID // errorNumber: 194 - case InlinedAnonClassWarningID // errorNumber: 195 + case ConstructorProxyNotValueID // errorNumber: 195 + case ContextBoundCompanionNotValueID // errorNumber: 196 + case InlinedAnonClassWarningID // errorNumber: 197 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 51556a5c93ac..ceb8ecbc8e03 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -3203,3 +3203,39 @@ class VolatileOnVal()(using Context) extends SyntaxMsg(VolatileOnValID): protected def msg(using Context): String = "values cannot be volatile" protected def explain(using Context): String = "" + +class ConstructorProxyNotValue(sym: Symbol)(using Context) +extends TypeMsg(ConstructorProxyNotValueID): + protected def msg(using Context): String = + i"constructor proxy $sym cannot be used as a value" + protected def explain(using Context): String = + i"""A constructor proxy is a symbol made up by the compiler to represent a non-existent + |factory method of a class. For instance, in + | + | class C(x: Int) + | + |C does not have an apply method since it is not a case class. Yet one can + |still create instances with applications like `C(3)` which expand to `new C(3)`. + |The `C` in this call is a constructor proxy. It can only be used as applications + |but not as a stand-alone value.""" + +class ContextBoundCompanionNotValue(sym: Symbol)(using Context) +extends TypeMsg(ConstructorProxyNotValueID): + protected def msg(using Context): String = + i"context bound companion $sym cannot be used as a value" + protected def explain(using Context): String = + i"""A context bound companion is a symbol made up by the compiler to represent the + |witness or witnesses generated for the context bound(s) of a type parameter or type. + |For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + |there is just a type `A` declared but not a value `A`. Nevertheless, one can write + |the selection `A.unit`, which works because the compiler created a context bound + |companion value with the (term-)name `A`. However, these context bound companions + |are not values themselves, they can only be referred to in selections.""" + diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 8bfbb90a0700..a25a2fcb5c6d 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -567,7 +567,13 @@ object Erasure { case Some(annot) => val message = annot.argumentConstant(0) match case Some(c) => - c.stringValue.toMessage + val addendum = tree match + case tree: RefTree + if tree.symbol == defn.Compiletime_deferred && tree.name != nme.deferred => + i".\nNote that `deferred` can only be used under its own name when implementing a given in a trait; `${tree.name}` is not accepted." + case _ => + "" + (c.stringValue ++ addendum).toMessage case _ => em"""Reference to ${tree.symbol.showLocated} should not have survived, |it should have been processed and eliminated during expansion of an enclosing macro or term erasure.""" diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index d107de31829f..c6ad1bb860e8 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -279,9 +279,15 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => } } - def checkNoConstructorProxy(tree: Tree)(using Context): Unit = + def checkUsableAsValue(tree: Tree)(using Context): Tree = + def unusable(msg: Symbol => Message) = + errorTree(tree, msg(tree.symbol)) if tree.symbol.is(ConstructorProxy) then - report.error(em"constructor proxy ${tree.symbol} cannot be used as a value", tree.srcPos) + unusable(ConstructorProxyNotValue(_)) + else if tree.symbol.isContextBoundCompanion then + unusable(ContextBoundCompanionNotValue(_)) + else + tree def checkStableSelection(tree: Tree)(using Context): Unit = def check(qual: Tree) = @@ -326,11 +332,11 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => if tree.isType then checkNotPackage(tree) else - checkNoConstructorProxy(tree) registerNeedsInlining(tree) - tree.tpe match { + val tree1 = checkUsableAsValue(tree) + tree1.tpe match { case tpe: ThisType => This(tpe.cls).withSpan(tree.span) - case _ => tree + case _ => tree1 } case tree @ Select(qual, name) => registerNeedsInlining(tree) @@ -338,8 +344,9 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => Checking.checkRealizable(qual.tpe, qual.srcPos) withMode(Mode.Type)(super.transform(checkNotPackage(tree))) else - checkNoConstructorProxy(tree) - transformSelect(tree, Nil) + checkUsableAsValue(tree) match + case tree1: Select => transformSelect(tree1, Nil) + case tree1 => tree1 case tree: Apply => val methType = tree.fun.tpe.widen.asInstanceOf[MethodType] val app = @@ -369,11 +376,15 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => case Select(nu: New, nme.CONSTRUCTOR) if isCheckable(nu) => // need to check instantiability here, because the type of the New itself // might be a type constructor. - ctx.typer.checkClassType(tree.tpe, tree.srcPos, traitReq = false, stablePrefixReq = true) + def checkClassType(tpe: Type, stablePrefixReq: Boolean) = + ctx.typer.checkClassType(tpe, tree.srcPos, + traitReq = false, stablePrefixReq = stablePrefixReq, + refinementOK = Feature.enabled(Feature.modularity)) + checkClassType(tree.tpe, true) if !nu.tpe.isLambdaSub then // Check the constructor type as well; it could be an illegal singleton type // which would not be reflected as `tree.tpe` - ctx.typer.checkClassType(nu.tpe, tree.srcPos, traitReq = false, stablePrefixReq = false) + checkClassType(nu.tpe, false) Checking.checkInstantiable(tree.tpe, nu.tpe, nu.srcPos) withNoCheckNews(nu :: Nil)(app1) case _ => @@ -448,8 +459,12 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => // Constructor parameters are in scope when typing a parent. // While they can safely appear in a parent tree, to preserve // soundness we need to ensure they don't appear in a parent - // type (#16270). - val illegalRefs = parent.tpe.namedPartsWith(p => p.symbol.is(ParamAccessor) && (p.symbol.owner eq sym)) + // type (#16270). We can strip any refinement of a parent type since + // these refinements are split off from the parent type constructor + // application `parent` in Namer and don't show up as parent types + // of the class. + val illegalRefs = parent.tpe.dealias.stripRefinement.namedPartsWith: + p => p.symbol.is(ParamAccessor) && (p.symbol.owner eq sym) if illegalRefs.nonEmpty then report.error( em"The type of a class parent cannot refer to constructor parameters, but ${parent.tpe} refers to ${illegalRefs.map(_.name.show).mkString(",")}", parent.srcPos) @@ -463,6 +478,12 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => else if !sym.is(Param) && !sym.owner.isOneOf(AbstractOrTrait) then Checking.checkGoodBounds(tree.symbol) + // Delete all context bound companions of this TypeDef + if sym.owner.isClass && sym.hasAnnotation(defn.WitnessNamesAnnot) then + val decls = sym.owner.info.decls + for cbCompanion <- decls.lookupAll(sym.name.toTermName) do + if cbCompanion.isContextBoundCompanion then + decls.openForMutations.unlink(cbCompanion) (tree.rhs, sym.info) match case (rhs: LambdaTypeTree, bounds: TypeBounds) => VarianceChecker.checkLambda(rhs, bounds) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 2ebe33a9a14f..c4e1c7892e8d 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -311,9 +311,11 @@ object TreeChecker { def assertDefined(tree: untpd.Tree)(using Context): Unit = if (tree.symbol.maybeOwner.isTerm) { val sym = tree.symbol + def isAllowed = // constructor proxies and context bound companions are flagged at PostTyper + isSymWithoutDef(sym) && ctx.phase.id < postTyperPhase.id assert( - nowDefinedSyms.contains(sym) || patBoundSyms.contains(sym), - i"undefined symbol ${sym} at line " + tree.srcPos.line + nowDefinedSyms.contains(sym) || patBoundSyms.contains(sym) || isAllowed, + i"undefined symbol ${sym} in ${sym.owner} at line " + tree.srcPos.line ) if (!ctx.phase.patternTranslated) @@ -384,6 +386,9 @@ object TreeChecker { case _ => } + def isSymWithoutDef(sym: Symbol)(using Context): Boolean = + sym.is(ConstructorProxy) || sym.isContextBoundCompanion + /** Exclude from double definition checks any erased symbols that were * made `private` in phase `UnlinkErasedDecls`. These symbols will be removed * completely in phase `Erasure` if they are defined in a currently compiled unit. @@ -614,14 +619,12 @@ object TreeChecker { val decls = cls.classInfo.decls.toList.toSet.filter(isNonMagicalMember) val defined = impl.body.map(_.symbol) - def isAllowed(sym: Symbol): Boolean = sym.is(ConstructorProxy) - - val symbolsNotDefined = (decls -- defined - constr.symbol).filterNot(isAllowed) + val symbolsMissingDefs = (decls -- defined - constr.symbol).filterNot(isSymWithoutDef) - assert(symbolsNotDefined.isEmpty, - i" $cls tree does not define members: ${symbolsNotDefined.toList}%, %\n" + - i"expected: ${decls.toList}%, %\n" + - i"defined: ${defined}%, %") + assert(symbolsMissingDefs.isEmpty, + i"""$cls tree does not define members: ${symbolsMissingDefs.toList}%, % + |expected: ${decls.toList}%, % + |defined: ${defined}%, %""") super.typedClassDef(cdef, cls) } diff --git a/compiler/src/dotty/tools/dotc/transform/init/Util.scala b/compiler/src/dotty/tools/dotc/transform/init/Util.scala index 756fd1a0a8e7..e11d0e1e21a5 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Util.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Util.scala @@ -20,6 +20,7 @@ object Util: def typeRefOf(tp: Type)(using Context): TypeRef = tp.dealias.typeConstructor match case tref: TypeRef => tref + case RefinedType(parent, _, _) => typeRefOf(parent) case hklambda: HKTypeLambda => typeRefOf(hklambda.resType) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 76d057f15408..c3369ac58e31 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1791,7 +1791,7 @@ trait Applications extends Compatibility { * a. always as good as a method or a polymorphic method. * b. as good as a member of any other type `tp2` if `asGoodValueType(tp1, tp2) = true` */ - def isAsGood(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsSpecific $tp1 $tp2", overload) { + def isAsGood(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsGood $tp1 $tp2", overload) { tp1 match case tp1: MethodType => // (1) tp1.paramInfos.isEmpty && tp2.isInstanceOf[LambdaType] @@ -1886,7 +1886,7 @@ trait Applications extends Compatibility { then // Intermediate rules: better means specialize, but map all type arguments downwards // These are enabled for 3.0-3.5, and for all comparisons between old-style implicits, - // and in 3.5 amd 3.6-migration when we compare with previous rules. + // and in 3.5 and 3.6-migration when we compare with previous rules. val flip = new TypeMap: def apply(t: Type) = t match case t @ AppliedType(tycon, args) => diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 7745c620312c..073055ba5b58 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -33,8 +33,7 @@ import Applications.UnapplyArgs import Inferencing.isFullyDefined import transform.patmat.SpaceEngine.{isIrrefutable, isIrrefutableQuotePattern} import transform.ValueClasses.underlyingOfValueClass -import config.Feature -import config.Feature.sourceVersion +import config.Feature, Feature.{sourceVersion, modularity} import config.SourceVersion.* import config.MigrationVersion import printing.Formatting.hlAsKeyword @@ -198,7 +197,7 @@ object Checking { * and that the instance conforms to the self type of the created class. */ def checkInstantiable(tp: Type, srcTp: Type, pos: SrcPos)(using Context): Unit = - tp.underlyingClassRef(refinementOK = false) match + tp.underlyingClassRef(refinementOK = Feature.enabled(modularity)) match case tref: TypeRef => val cls = tref.symbol if (cls.isOneOf(AbstractOrTrait)) { @@ -601,6 +600,7 @@ object Checking { // The issue with `erased inline` is that the erased semantics get lost // as the code is inlined and the reference is removed before the erased usage check. checkCombination(Erased, Inline) + checkNoConflict(Tracked, Mutable, em"mutable variables may not be `tracked`") checkNoConflict(Lazy, ParamAccessor, em"parameter may not be `lazy`") } @@ -1067,8 +1067,8 @@ trait Checking { * check that class prefix is stable. * @return `tp` itself if it is a class or trait ref, ObjectType if not. */ - def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = - tp.underlyingClassRef(refinementOK = false) match { + def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean, refinementOK: Boolean = false)(using Context): Type = + tp.underlyingClassRef(refinementOK) match case tref: TypeRef => if (traitReq && !tref.symbol.is(Trait)) report.error(TraitIsExpected(tref.symbol), pos) if (stablePrefixReq && ctx.phase <= refchecksPhase) checkStable(tref.prefix, pos, "class prefix") @@ -1076,7 +1076,6 @@ trait Checking { case _ => report.error(NotClassType(tp), pos) defn.ObjectType - } /** If `sym` is an old-style implicit conversion, check that implicit conversions are enabled. * @pre sym.is(GivenOrImplicit) @@ -1332,20 +1331,20 @@ trait Checking { } /** Check that user-defined (result) type is fully applied */ - def checkFullyAppliedType(tree: Tree)(using Context): Unit = tree match + def checkFullyAppliedType(tree: Tree, prefix: String)(using Context): Unit = tree match case TypeBoundsTree(lo, hi, alias) => - checkFullyAppliedType(lo) - checkFullyAppliedType(hi) - checkFullyAppliedType(alias) + checkFullyAppliedType(lo, prefix) + checkFullyAppliedType(hi, prefix) + checkFullyAppliedType(alias, prefix) case Annotated(arg, annot) => - checkFullyAppliedType(arg) + checkFullyAppliedType(arg, prefix) case LambdaTypeTree(_, body) => - checkFullyAppliedType(body) + checkFullyAppliedType(body, prefix) case _: TypeTree => case _ => if tree.tpe.typeParams.nonEmpty then val what = if tree.symbol.exists then tree.symbol.show else i"type $tree" - report.error(em"$what takes type parameters", tree.srcPos) + report.error(em"$prefix$what takes type parameters", tree.srcPos) /** Check that we are in an inline context (inside an inline method or in inline code) */ def checkInInlineContext(what: String, pos: SrcPos)(using Context): Unit = @@ -1610,7 +1609,7 @@ trait ReChecking extends Checking { override def checkEnumParent(cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkEnum(cdef: untpd.TypeDef, cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkRefsLegal(tree: tpd.Tree, badOwner: Symbol, allowed: (Name, Symbol) => Boolean, where: String)(using Context): Unit = () - override def checkFullyAppliedType(tree: Tree)(using Context): Unit = () + override def checkFullyAppliedType(tree: Tree, prefix: String)(using Context): Unit = () override def checkEnumCaseRefsLegal(cdef: TypeDef, enumCtx: Context)(using Context): Unit = () override def checkAnnotApplicable(annot: Tree, sym: Symbol)(using Context): Boolean = true override def checkMatchable(tp: Type, pos: SrcPos, pattern: Boolean)(using Context): Unit = () @@ -1626,7 +1625,7 @@ trait NoChecking extends ReChecking { override def checkNonCyclic(sym: Symbol, info: TypeBounds, reportErrors: Boolean)(using Context): Type = info override def checkNonCyclicInherited(joint: Type, parents: List[Type], decls: Scope, pos: SrcPos)(using Context): Unit = () override def checkStable(tp: Type, pos: SrcPos, kind: String)(using Context): Unit = () - override def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = tp + override def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean, refinementOK: Boolean)(using Context): Type = tp override def checkImplicitConversionDefOK(sym: Symbol)(using Context): Unit = () override def checkImplicitConversionUseOK(tree: Tree, expected: Type)(using Context): Unit = () override def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = tp diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index bc19e97b85d8..fd22f0ec5529 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -531,7 +531,7 @@ object Implicits: |must be more specific than $target""" :: Nil override def msg(using Context) = - super.msg.append("\nThe expected type $target is not specific enough, so no search was attempted") + super.msg.append(i"\nThe expected type $target is not specific enough, so no search was attempted") override def toString = s"TooUnspecific" end TooUnspecific @@ -924,10 +924,10 @@ trait Implicits: /** Search an implicit argument and report error if not found */ - def implicitArgTree(formal: Type, span: Span)(using Context): Tree = { + def implicitArgTree(formal: Type, span: Span, where: => String = "")(using Context): Tree = { val arg = inferImplicitArg(formal, span) if (arg.tpe.isInstanceOf[SearchFailureType]) - report.error(missingArgMsg(arg, formal, ""), ctx.source.atSpan(span)) + report.error(missingArgMsg(arg, formal, where), ctx.source.atSpan(span)) arg } diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 72ca6a35bf4b..83964417a6f1 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -26,7 +26,7 @@ import Nullables.* import transform.ValueClasses.* import TypeErasure.erasure import reporting.* -import config.Feature.sourceVersion +import config.Feature.{sourceVersion, modularity} import config.SourceVersion.* import scala.compiletime.uninitialized @@ -55,11 +55,12 @@ class Namer { typer: Typer => import untpd.* - val TypedAhead : Property.Key[tpd.Tree] = new Property.Key - val ExpandedTree : Property.Key[untpd.Tree] = new Property.Key - val ExportForwarders: Property.Key[List[tpd.MemberDef]] = new Property.Key - val SymOfTree : Property.Key[Symbol] = new Property.Key - val AttachedDeriver : Property.Key[Deriver] = new Property.Key + val TypedAhead : Property.Key[tpd.Tree] = new Property.Key + val ExpandedTree : Property.Key[untpd.Tree] = new Property.Key + val ExportForwarders : Property.Key[List[tpd.MemberDef]] = new Property.Key + val ParentRefinements: Property.Key[List[Symbol]] = new Property.Key + val SymOfTree : Property.Key[Symbol] = new Property.Key + val AttachedDeriver : Property.Key[Deriver] = new Property.Key // was `val Deriver`, but that gave shadowing problems with constructor proxies /** A partial map from unexpanded member and pattern defs and to their expansions. @@ -121,7 +122,8 @@ class Namer { typer: Typer => /** Record `sym` as the symbol defined by `tree` */ def recordSym(sym: Symbol, tree: Tree)(using Context): Symbol = { - for (refs <- tree.removeAttachment(References); ref <- refs) ref.watching(sym) + for refs <- tree.removeAttachment(References); ref <- refs do + ref.watching(sym) tree.pushAttachment(SymOfTree, sym) sym } @@ -294,12 +296,15 @@ class Namer { typer: Typer => createOrRefine[Symbol](tree, name, flags, ctx.owner, _ => info, (fs, _, pwithin) => newSymbol(ctx.owner, name, fs, info, pwithin, tree.nameSpan)) case tree: Import => - recordSym(newImportSymbol(ctx.owner, Completer(tree)(ctx), tree.span), tree) + recordSym(importSymbol(tree), tree) case _ => NoSymbol } } + private def importSymbol(imp: Import)(using Context): Symbol = + newImportSymbol(ctx.owner, Completer(imp)(ctx), imp.span) + /** If `sym` exists, enter it in effective scope. Check that * package members are not entered twice in the same run. */ @@ -401,6 +406,11 @@ class Namer { typer: Typer => enterSymbol(sym) setDocstring(sym, origStat) addEnumConstants(mdef, sym) + mdef match + case tdef: TypeDef if ctx.owner.isClass => + for case WitnessNamesAnnot(witnessNames) <- tdef.mods.annotations do + addContextBoundCompanionFor(symbolOfTree(tdef), witnessNames, Nil) + case _ => ctx case stats: Thicket => stats.toList.foreach(recur) @@ -524,11 +534,9 @@ class Namer { typer: Typer => } /** Transfer all references to `from` to `to` */ - def transferReferences(from: ValDef, to: ValDef): Unit = { - val fromRefs = from.removeAttachment(References).getOrElse(Nil) - val toRefs = to.removeAttachment(References).getOrElse(Nil) - to.putAttachment(References, fromRefs ++ toRefs) - } + def transferReferences(from: ValDef, to: ValDef): Unit = + for ref <- from.removeAttachment(References).getOrElse(Nil) do + ref.watching(to) /** Merge the module class `modCls` in the expanded tree of `mdef` with the * body and derived clause of the synthetic module class `fromCls`. @@ -706,7 +714,18 @@ class Namer { typer: Typer => enterSymbol(companion) end addAbsentCompanions - stats.foreach(expand) + /** Expand each statement, keeping track of language imports in the context. This is + * necessary since desugaring might depend on language imports. + */ + def expandTopLevel(stats: List[Tree])(using Context): Unit = stats match + case (imp @ Import(qual, _)) :: stats1 if untpd.languageImport(qual).isDefined => + expandTopLevel(stats1)(using ctx.importContext(imp, importSymbol(imp))) + case stat :: stats1 => + expand(stat) + expandTopLevel(stats1) + case Nil => + + expandTopLevel(stats) mergeCompanionDefs() val ctxWithStats = stats.foldLeft(ctx)((ctx, stat) => indexExpanded(stat)(using ctx)) inContext(ctxWithStats) { @@ -1203,7 +1222,9 @@ class Namer { typer: Typer => target = target.etaExpand newSymbol( cls, forwarderName, - MandatoryExportTypeFlags | (sym.flags & RetainedExportTypeFlags), + Exported + | (sym.flags & RetainedExportTypeFlags) + | (if Feature.enabled(modularity) then EmptyFlags else Final), TypeAlias(target), coord = span) // Note: This will always create unparameterzied aliases. So even if the original type is @@ -1513,6 +1534,7 @@ class Namer { typer: Typer => /** The type signature of a ClassDef with given symbol */ override def completeInCreationContext(denot: SymDenotation): Unit = { val parents = impl.parents + val parentRefinements = new mutable.LinkedHashMap[Name, Type] /* The type of a parent constructor. Types constructor arguments * only if parent type contains uninstantiated type parameters. @@ -1526,8 +1548,9 @@ class Namer { typer: Typer => core match case Select(New(tpt), nme.CONSTRUCTOR) => val targs1 = targs map (typedAheadType(_)) - val ptype = typedAheadType(tpt).tpe appliedTo targs1.tpes - if (ptype.typeParams.isEmpty) ptype + val ptype = typedAheadType(tpt).tpe.appliedTo(targs1.tpes) + if ptype.typeParams.isEmpty && !ptype.dealias.typeSymbol.is(Dependent) then + ptype else if (denot.is(ModuleClass) && denot.sourceModule.isOneOf(GivenOrImplicit)) missingType(denot.symbol, "parent ")(using creationContext) @@ -1567,8 +1590,13 @@ class Namer { typer: Typer => val ptype = parentType(parent)(using completerCtx.superCallContext).dealiasKeepAnnots if (cls.isRefinementClass) ptype else { - val pt = checkClassType(ptype, parent.srcPos, - traitReq = parent ne parents.head, stablePrefixReq = !isJava) + val pt = checkClassType( + if Feature.enabled(modularity) + then ptype.separateRefinements(cls, parentRefinements) + else ptype, + parent.srcPos, + traitReq = parent ne parents.head, + stablePrefixReq = !isJava) if (pt.derivesFrom(cls)) { val addendum = parent match { case Select(qual: Super, _) if Feature.migrateTo3 => @@ -1595,6 +1623,23 @@ class Namer { typer: Typer => } } + /** Enter all parent refinements as public class members, unless a definition + * with the same name already exists in the class. Remember the refining symbols + * as an attachment on the ClassDef tree. + */ + def enterParentRefinementSyms(refinements: List[(Name, Type)]) = + val refinedSyms = mutable.ListBuffer[Symbol]() + for (name, tp) <- refinements do + if decls.lookupEntry(name) == null then + val flags = tp match + case tp: MethodOrPoly => Method | Synthetic | Deferred | Tracked + case _ if name.isTermName => Synthetic | Deferred | Tracked + case _ => Synthetic | Deferred + refinedSyms += newSymbol(cls, name, flags, tp, coord = original.rhs.span.startPos).entered + if refinedSyms.nonEmpty then + typr.println(i"parent refinement symbols: ${refinedSyms.toList}") + original.pushAttachment(ParentRefinements, refinedSyms.toList) + /** If `parents` contains references to traits that have supertraits with implicit parameters * add those supertraits in linearization order unless they are already covered by other * parent types. For instance, in @@ -1636,11 +1681,9 @@ class Namer { typer: Typer => val parentTypes = defn.adjustForTuple(cls, cls.typeParams, defn.adjustForBoxedUnit(cls, - addUsingTraits( - locally: - val isJava = ctx.isJava - ensureFirstIsClass(cls, parents.map(checkedParentType(_, isJava))) - ) + addUsingTraits: + val isJava = ctx.isJava + ensureFirstIsClass(cls, parents.map(checkedParentType(_, isJava))) ) ) typr.println(i"completing $denot, parents = $parents%, %, parentTypes = $parentTypes%, %") @@ -1665,6 +1708,7 @@ class Namer { typer: Typer => cls.invalidateMemberCaches() // we might have checked for a member when parents were not known yet. cls.setNoInitsFlags(parentsKind(parents), untpd.bodyKind(rest)) cls.setStableConstructor() + enterParentRefinementSyms(parentRefinements.toList) processExports(using localCtx) defn.patchStdLibClass(cls) addConstructorProxies(cls) @@ -1711,12 +1755,6 @@ class Namer { typer: Typer => val sym = tree.symbol if sym.isConstructor then sym.owner else sym - /** Enter and typecheck parameter list */ - def completeParams(params: List[MemberDef])(using Context): Unit = { - index(params) - for (param <- params) typedAheadExpr(param) - } - /** The signature of a module valdef. * This will compute the corresponding module class TypeRef immediately * without going through the defined type of the ValDef. This is necessary @@ -1792,6 +1830,18 @@ class Namer { typer: Typer => case _ => WildcardType } + + // translate `given T = deferred` to an abstract given with HasDefault flag + if sym.is(Given) then + mdef.rhs match + case rhs: RefTree + if rhs.name == nme.deferred + && typedAheadExpr(rhs).symbol == defn.Compiletime_deferred + && sym.maybeOwner.is(Trait) => + sym.resetFlag(Final) + sym.setFlag(Deferred | HasDefault) + case _ => + val mbrTpe = paramFn(checkSimpleKinded(typedAheadType(mdef.tpt, tptProto)).tpe) if (ctx.explicitNulls && mdef.mods.is(JavaDefined)) JavaNullInterop.nullifyMember(sym, mbrTpe, mdef.mods.isAllOf(JavaEnumValue)) @@ -1799,10 +1849,35 @@ class Namer { typer: Typer => } /** The type signature of a DefDef with given symbol */ - def defDefSig(ddef: DefDef, sym: Symbol, completer: Namer#Completer)(using Context): Type = { + def defDefSig(ddef: DefDef, sym: Symbol, completer: Namer#Completer)(using Context): Type = // Beware: ddef.name need not match sym.name if sym was freshened! val isConstructor = sym.name == nme.CONSTRUCTOR + // A map from context-bounded type parameters to associated evidence parameter names + val witnessNamesOfParam = mutable.Map[TypeDef, List[TermName]]() + if !ddef.name.is(DefaultGetterName) && !sym.is(Synthetic) then + for params <- ddef.paramss; case tdef: TypeDef <- params do + for case WitnessNamesAnnot(ws) <- tdef.mods.annotations do + witnessNamesOfParam(tdef) = ws + + /** Is each name in `wnames` defined somewhere in the longest prefix of all `params` + * that have been typed ahead (i.e. that carry the TypedAhead attachment)? + */ + def allParamsSeen(wnames: List[TermName], params: List[MemberDef]) = + (wnames.toSet[Name] -- params.takeWhile(_.hasAttachment(TypedAhead)).map(_.name)).isEmpty + + /** Enter and typecheck parameter list. + * Once all witness parameters for a context bound are seen, create a + * context bound companion for it. + */ + def completeParams(params: List[MemberDef])(using Context): Unit = + index(params) + for param <- params do + typedAheadExpr(param) + for (tdef, wnames) <- witnessNamesOfParam do + if wnames.contains(param.name) && allParamsSeen(wnames, params) then + addContextBoundCompanionFor(symbolOfTree(tdef), wnames, params.map(symbolOfTree)) + // The following 3 lines replace what was previously just completeParams(tparams). // But that can cause bad bounds being computed, as witnessed by // tests/pos/paramcycle.scala. The problematic sequence is this: @@ -1835,16 +1910,54 @@ class Namer { typer: Typer => ddef.trailingParamss.foreach(completeParams) val paramSymss = normalizeIfConstructor(ddef.paramss.nestedMap(symbolOfTree), isConstructor) sym.setParamss(paramSymss) + + /** Under x.modularity, we add `tracked` to context bound witnesses + * that have abstract type members + */ + def needsTracked(sym: Symbol, param: ValDef)(using Context) = + !sym.is(Tracked) + && param.hasAttachment(ContextBoundParam) + && sym.info.memberNames(abstractTypeNameFilter).nonEmpty + + /** Under x.modularity, set every context bound evidence parameter of a class to be tracked, + * provided it has a type that has an abstract type member. Reset private and local flags + * so that the parameter becomes a `val`. + */ + def setTracked(param: ValDef): Unit = + val sym = symbolOfTree(param) + sym.maybeOwner.maybeOwner.infoOrCompleter match + case info: TempClassInfo if needsTracked(sym, param) => + typr.println(i"set tracked $param, $sym: ${sym.info} containing ${sym.info.memberNames(abstractTypeNameFilter).toList}") + for acc <- info.decls.lookupAll(sym.name) if acc.is(ParamAccessor) do + acc.resetFlag(PrivateLocal) + acc.setFlag(Tracked) + sym.setFlag(Tracked) + case _ => + def wrapMethType(restpe: Type): Type = instantiateDependent(restpe, paramSymss) methodType(paramSymss, restpe, ddef.mods.is(JavaDefined)) + + def wrapRefinedMethType(restpe: Type): Type = + wrapMethType(addParamRefinements(restpe, paramSymss)) + if isConstructor then + if sym.isPrimaryConstructor && Feature.enabled(modularity) then + ddef.termParamss.foreach(_.foreach(setTracked)) // set result type tree to unit, but take the current class as result type of the symbol typedAheadType(ddef.tpt, defn.UnitType) wrapMethType(effectiveResultType(sym, paramSymss)) + else if sym.isAllOf(Given | Method) && Feature.enabled(modularity) then + // set every context bound evidence parameter of a given companion method + // to be tracked, provided it has a type that has an abstract type member. + // Add refinements for all tracked parameters to the result type. + for params <- ddef.termParamss; param <- params do + val psym = symbolOfTree(param) + if needsTracked(psym, param) then psym.setFlag(Tracked) + valOrDefDefSig(ddef, sym, paramSymss, wrapRefinedMethType) else valOrDefDefSig(ddef, sym, paramSymss, wrapMethType) - } + end defDefSig def inferredResultType( mdef: ValOrDefDef, @@ -1978,7 +2091,7 @@ class Namer { typer: Typer => if defaultTp.exists then TypeOps.SimplifyKeepUnchecked() else null) match case ctp: ConstantType if sym.isInlineVal => ctp - case tp => TypeComparer.widenInferred(tp, pt, widenUnions = true) + case tp => TypeComparer.widenInferred(tp, pt, Widen.Unions) // Replace aliases to Unit by Unit itself. If we leave the alias in // it would be erased to BoxedUnit. diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 46c12b244fbb..ecf1da30cac1 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -11,6 +11,7 @@ import Constants.* import util.{Stats, SimpleIdentityMap, SimpleIdentitySet} import Decorators.* import Uniques.* +import Flags.Method import inlines.Inlines import config.Printers.typr import Inferencing.* @@ -26,7 +27,7 @@ object ProtoTypes { import tpd.* /** A trait defining an `isCompatible` method. */ - trait Compatibility { + trait Compatibility: /** Is there an implicit conversion from `tp` to `pt`? */ def viewExists(tp: Type, pt: Type)(using Context): Boolean @@ -106,19 +107,34 @@ object ProtoTypes { if !res then ctx.typerState.constraint = savedConstraint res - /** Constrain result with special case if `meth` is an inlineable method in an inlineable context. - * In that case, we should always succeed and not constrain type parameters in the expected type, - * because the actual return type can be a subtype of the currently known return type. - * However, we should constrain parameters of the declared return type. This distinction is - * achieved by replacing expected type parameters with wildcards. + /** Constrain result with two special cases: + * 1. If `meth` is an inlineable method in an inlineable context, + * we should always succeed and not constrain type parameters in the expected type, + * because the actual return type can be a subtype of the currently known return type. + * However, we should constrain parameters of the declared return type. This distinction is + * achieved by replacing expected type parameters with wildcards. + * 2. When constraining the result of a primitive value operation against + * a precise typevar, don't lower-bound the typevar with a non-singleton type. */ def constrainResult(meth: Symbol, mt: Type, pt: Type)(using Context): Boolean = - if (Inlines.isInlineable(meth)) { + + def constFoldException(pt: Type): Boolean = pt.dealias match + case tvar: TypeVar => + tvar.isPrecise + && meth.is(Method) && meth.owner.isPrimitiveValueClass + && mt.resultType.isPrimitiveValueType && !mt.resultType.isSingleton + case tparam: TypeParamRef => + constFoldException(ctx.typerState.constraint.typeVarOfParam(tparam)) + case _ => + false + + if Inlines.isInlineable(meth) then constrainResult(mt, wildApprox(pt)) true - } - else constrainResult(mt, pt) - } + else + constFoldException(pt) || constrainResult(mt, pt) + end constrainResult + end Compatibility object NoViewsAllowed extends Compatibility { override def viewExists(tp: Type, pt: Type)(using Context): Boolean = false @@ -701,6 +717,20 @@ object ProtoTypes { case FunProto((arg: untpd.TypedSplice) :: Nil, _) => arg.isExtensionReceiver case _ => false + /** An extractor for Singleton and Precise witness types. + * + * Singleton { type Self = T } returns Some(T, true) + * Precise { type Self = T } returns Some(T, false) + */ + object PreciseConstrained: + def unapply(tp: Type)(using Context): Option[(Type, Boolean)] = tp.dealias match + case RefinedType(parent, tpnme.Self, TypeAlias(tp)) => + val tsym = parent.typeSymbol + if tsym == defn.SingletonClass then Some((tp, true)) + else if tsym == defn.PreciseClass then Some((tp, false)) + else None + case _ => None + /** Add all parameters of given type lambda `tl` to the constraint's domain. * If the constraint contains already some of these parameters in its domain, * make a copy of the type lambda and add the copy's type parameters instead. @@ -713,26 +743,43 @@ object ProtoTypes { tl: TypeLambda, owningTree: untpd.Tree, alwaysAddTypeVars: Boolean, nestingLevel: Int = ctx.nestingLevel - ): (TypeLambda, List[TypeVar]) = { + ): (TypeLambda, List[TypeVar]) = val state = ctx.typerState val addTypeVars = alwaysAddTypeVars || !owningTree.isEmpty if (tl.isInstanceOf[PolyType]) assert(!ctx.typerState.isCommittable || addTypeVars, s"inconsistent: no typevars were added to committable constraint ${state.constraint}") // hk type lambdas can be added to constraints without typevars during match reduction + val added = state.constraint.ensureFresh(tl) - def newTypeVars(tl: TypeLambda): List[TypeVar] = - for paramRef <- tl.paramRefs - yield - val tvar = TypeVar(paramRef, state, nestingLevel) + def preciseConstrainedRefs(tp: Type, singletonOnly: Boolean): Set[TypeParamRef] = tp match + case tp: MethodType if tp.isContextualMethod => + val ownBounds = + for + case PreciseConstrained(ref: TypeParamRef, singleton) <- tp.paramInfos + if !singletonOnly || singleton + yield ref + ownBounds.toSet ++ preciseConstrainedRefs(tp.resType, singletonOnly) + case tp: LambdaType => + preciseConstrainedRefs(tp.resType, singletonOnly) + case _ => + Set.empty + + def newTypeVars: List[TypeVar] = + val preciseRefs = preciseConstrainedRefs(added, singletonOnly = false) + for paramRef <- added.paramRefs yield + val tvar = TypeVar(paramRef, state, nestingLevel, precise = preciseRefs.contains(paramRef)) state.ownedVars += tvar tvar - val added = state.constraint.ensureFresh(tl) - val tvars = if addTypeVars then newTypeVars(added) else Nil + val tvars = if addTypeVars then newTypeVars else Nil TypeComparer.addToConstraint(added, tvars) + val singletonRefs = preciseConstrainedRefs(added, singletonOnly = true) + for paramRef <- added.paramRefs do + // Constrain all type parameters [T: Singleton] to T <: Singleton + if singletonRefs.contains(paramRef) then paramRef <:< defn.SingletonType (added, tvars) - } + end constrained def constrained(tl: TypeLambda, owningTree: untpd.Tree)(using Context): (TypeLambda, List[TypeVar]) = constrained(tl, owningTree, diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index 9741a366da89..7a5c838848ac 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -182,4 +182,5 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override protected def checkEqualityEvidence(tree: tpd.Tree, pt: Type)(using Context): Unit = () override protected def matchingApply(methType: MethodOrPoly, pt: FunProto)(using Context): Boolean = true override protected def typedScala2MacroBody(call: untpd.Tree)(using Context): Tree = promote(call) + override protected def migrate[T](migration: => T, disabled: => T = ()): T = disabled } diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 2bf4b959ebca..cb1aea27c444 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -552,7 +552,11 @@ object RefChecks { overrideError("is an extension method, cannot override a normal method") else if (other.is(ExtensionMethod) && !member.is(ExtensionMethod)) // (1.3) overrideError("is a normal method, cannot override an extension method") - else if !other.is(Deferred) + else if (!other.is(Deferred) + || other.isAllOf(Given | HasDefault) + // deferred givens have flags Given, HasDefault and Deferred set. These + // need to be checked for overriding as if they were concrete members + ) && !member.is(Deferred) && !other.name.is(DefaultGetterName) && !member.isAnyOverride @@ -610,8 +614,13 @@ object RefChecks { overrideError("is not inline, cannot implement an inline method") else if (other.isScala2Macro && !member.isScala2Macro) // (1.11) overrideError("cannot be used here - only Scala-2 macros can override Scala-2 macros") - else if (!compatTypes(memberTp(self), otherTp(self)) && - !compatTypes(memberTp(upwardsSelf), otherTp(upwardsSelf))) + else if !compatTypes(memberTp(self), otherTp(self)) + && !compatTypes(memberTp(upwardsSelf), otherTp(upwardsSelf)) + && !member.is(Tracked) + // Tracked members need to be excluded since they are abstract type members with + // singleton types. Concrete overrides usually have a wider type. + // TODO: Should we exclude all refinements inherited from parents? + then overrideError("has incompatible type", compareTypes = true) else if (member.targetName != other.targetName) if (other.targetName != other.name) @@ -620,7 +629,9 @@ object RefChecks { overrideError("cannot have a @targetName annotation since external names would be different") else if intoOccurrences(memberTp(self)) != intoOccurrences(otherTp(self)) then overrideError("has different occurrences of `into` modifiers", compareTypes = true) - else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) then // (1.12) + else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) + && !member.is(Tracked) // see remark on tracked members above + then // (1.12) report.errorOrMigrationWarning( em"cannot override val parameter ${other.showLocated}", member.srcPos, @@ -670,6 +681,10 @@ object RefChecks { mbr.isType || mbr.isSuperAccessor // not yet synthesized || mbr.is(JavaDefined) && hasJavaErasedOverriding(mbr) + || mbr.is(Tracked) + // Tracked members correspond to existing val parameters, so they don't + // count as deferred. The val parameter could not implement the tracked + // refinement since it usually has a wider type. def isImplemented(mbr: Symbol) = val mbrDenot = mbr.asSeenFrom(clazz.thisType) diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 21d1151bcfd3..6b18540b6551 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -237,6 +237,23 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): EmptyTreeNoError end synthesizedValueOf + val synthesizedSingleton: SpecialHandler = (formal, span) => formal match + case PreciseConstrained(tp, true) => + if tp.isSingletonBounded(frozen = false) then + withNoErrors: + ref(defn.Compiletime_erasedValue).appliedToType(formal).withSpan(span) + else + withErrors(i"$tp is not a singleton") + case _ => + EmptyTreeNoError + + val synthesizedPrecise: SpecialHandler = (formal, span) => formal match + case PreciseConstrained(tp, false) => + withNoErrors: + ref(defn.Compiletime_erasedValue).appliedToType(formal).withSpan(span) + case _ => + EmptyTreeNoError + /** Create an anonymous class `new Object { type MirroredMonoType = ... }` * and mark it with given attachment so that it is made into a mirror at PostTyper. */ @@ -536,7 +553,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val tparams = poly.paramRefs val variances = childClass.typeParams.map(_.paramVarianceSign) val instanceTypes = tparams.lazyZip(variances).map((tparam, variance) => - TypeComparer.instanceType(tparam, fromBelow = variance < 0, widenUnions = true) + TypeComparer.instanceType(tparam, fromBelow = variance < 0, Widen.Unions) ) val instanceType = resType.substParams(poly, instanceTypes) // this is broken in tests/run/i13332intersection.scala, @@ -738,6 +755,8 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): defn.MirrorClass -> synthesizedMirror, defn.ManifestClass -> synthesizedManifest, defn.OptManifestClass -> synthesizedOptManifest, + defn.SingletonClass -> synthesizedSingleton, + defn.PreciseClass -> synthesizedPrecise, ) def tryAll(formal: Type, span: Span)(using Context): TreeWithErrors = diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 46982cf1406d..2a69c948baae 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -40,8 +40,7 @@ import annotation.tailrec import Implicits.* import util.Stats.record import config.Printers.{gadts, typr} -import config.Feature -import config.Feature.{sourceVersion, migrateTo3} +import config.Feature, Feature.{sourceVersion, migrateTo3, modularity} import config.SourceVersion.* import rewrites.Rewrites, Rewrites.patch import staging.StagingLevel @@ -184,6 +183,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Overridden in derived typers def newLikeThis(nestingLevel: Int): Typer = new Typer(nestingLevel) + /** Apply given migration. Overridden to use `disabled` instead in ReTypers. */ + protected def migrate[T](migration: => T, disabled: => T = ()): T = migration + /** Find the type of an identifier with given `name` in given context `ctx`. * @param name the name of the identifier * @param pt the expected type @@ -838,6 +840,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer return dynSelected.ensureConforms(fieldType) case _ => + // Otherwise, if the qualifier is a context bound companion, handle + // by selecting a witness in typedCBSelect + if qual.tpe.typeSymbol == defn.CBCompanion then + val witnessSelection = typedCBSelect(tree0, pt, qual) + if !witnessSelection.isEmpty then return witnessSelection + // Otherwise, report an error assignType(tree, rawType match @@ -847,6 +855,80 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer notAMemberErrorType(tree, qual, pt)) end typedSelect + /** Expand a selection A.m on a context bound companion A with type + * `[ref_1 | ... | ref_N]` as described by + * Step 3 of the doc comment of annotation.internal.WitnessNames. + * @return the best alternative if it exists, + * or EmptyTree if no witness admits selecting with the given name, + * or EmptyTree and report an ambiguity error of there are several + * possible witnesses and no selection is better than the other + * according to the critera given in Step 3. + */ + def typedCBSelect(tree: untpd.Select, pt: Type, qual: Tree)(using Context): Tree = + + type Alts = List[(/*prev: */Tree, /*prevState: */TyperState, /*prevWitness: */TermRef)] + + /** Compare two alternative selections `alt1` and `alt2` from witness types + * `wit1`, `wit2` according to the 3 criteria in Step 3 of the doc comment + * of annotation.internal.WitnessNames. I.e. + * + * alt1 = qual1.m, alt2 = qual2.m, qual1: wit1, qual2: wit2 + * + * @return 1 if 1st alternative is preferred over 2nd + * -1 if 2nd alternative is preferred over 1st + * 0 if neither alternative is preferred over the other + */ + def compareAlts(alt1: Tree, alt2: Tree, wit1: TermRef, wit2: TermRef): Int = + val cmpPrefix = compare(wit1, wit2, preferGeneral = true) + typr.println(i"compare witnesses $wit1: ${wit1.info}, $wit2: ${wit2.info} = $cmpPrefix") + if cmpPrefix != 0 then cmpPrefix + else (alt1.tpe, alt2.tpe) match + case (tp1: TypeRef, tp2: TypeRef) => + if tp1.dealias == tp2.dealias then 1 else 0 + case (tp1: TermRef, tp2: TermRef) => + if tp1.info.isSingleton && (tp1 frozen_=:= tp2) then 1 + else compare(tp1, tp2, preferGeneral = false) + case (tp1: TermRef, _) => 1 // should not happen, but prefer TermRefs over others + case (_, tp2: TermRef) => -1 + case _ => 0 + + /** Find the set of maximally preferred alternatives among `prevs` and + * alternatives referred to by `witnesses`. + * @param prevs a list of (ref tree, typer state, term ref) tripls that + * represents previously identified alternatives + * @param witnesses a type of the form ref_1 | ... | ref_n containing references + * still to be considered. + */ + def tryAlts(prevs: Alts, witnesses: Type): Alts = witnesses match + case OrType(wit1, wit2) => + tryAlts(tryAlts(prevs, wit1), wit2) + case witness: TermRef => + val altQual = tpd.ref(witness).withSpan(qual.span) + val altCtx = ctx.fresh.setNewTyperState() + val alt = typedSelect(tree, pt, altQual)(using altCtx) + def current = (alt, altCtx.typerState, witness) + if altCtx.reporter.hasErrors then prevs + else + val comparisons = prevs.map: (prevTree, prevState, prevWitness) => + compareAlts(prevTree, alt, prevWitness, witness) + if comparisons.exists(_ == 1) then prevs + else current :: prevs.zip(comparisons).collect{ case (prev, cmp) if cmp != -1 => prev } + + qual.tpe.widen match + case AppliedType(_, arg :: Nil) => + tryAlts(Nil, arg) match + case Nil => EmptyTree + case (best @ (bestTree, bestState, _)) :: Nil => + bestState.commit() + bestTree + case multiAlts => + report.error( + em"""Ambiguous witness reference. None of the following alternatives is more specific than the other: + |${multiAlts.map((alt, _, witness) => i"\n $witness.${tree.name}: ${alt.tpe.widen}")}""", + tree.srcPos) + EmptyTree + end typedCBSelect + def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { record("typedSelect") @@ -1004,10 +1086,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tp.exists && !tp.typeSymbol.is(Final) && (!tp.isTopType || tp.isAnyRef) // Object is the only toplevel class that can be instantiated - if (templ1.parents.isEmpty && - isFullyDefined(pt, ForceDegree.flipBottom) && - isSkolemFree(pt) && - isEligible(pt.underlyingClassRef(refinementOK = false))) + if templ1.parents.isEmpty + && isFullyDefined(pt, ForceDegree.flipBottom) + && isSkolemFree(pt) + && isEligible(pt.underlyingClassRef(refinementOK = Feature.enabled(modularity))) + then templ1 = cpy.Template(templ)(parents = untpd.TypeTree(pt) :: Nil) for case parent: RefTree <- templ1.parents do typedAhead(parent, tree => inferTypeParams(typedType(tree), pt)) @@ -2281,6 +2364,23 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree.tpFun(tsyms, vsyms) completeTypeTree(InferredTypeTree(), tp, tree) + def typedContextBoundTypeTree(tree: untpd.ContextBoundTypeTree)(using Context): Tree = + val tycon = typedType(tree.tycon) + val tyconSplice = untpd.TypedSplice(tycon) + val tparam = untpd.Ident(tree.paramName).withSpan(tree.span) + if tycon.tpe.typeParams.nonEmpty then + typed(untpd.AppliedTypeTree(tyconSplice, tparam :: Nil)) + else if Feature.enabled(modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractOrParamType then + val tparamSplice = untpd.TypedSplice(typedExpr(tparam)) + typed(untpd.RefinedTypeTree(tyconSplice, List(untpd.TypeDef(tpnme.Self, tparamSplice)))) + else + def selfNote = + if Feature.enabled(modularity) then + " and\ndoes not have an abstract type member named `Self` either" + else "" + errorTree(tree, + em"Illegal context bound: ${tycon.tpe} does not take type parameters$selfNote.") + def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = { val ref1 = typedExpr(tree.ref, SingletonTypeProto) checkStable(ref1.tpe, tree.srcPos, "singleton type") @@ -2288,7 +2388,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedRefinedTypeTree(tree: untpd.RefinedTypeTree)(using Context): TypTree = { - val tpt1 = if (tree.tpt.isEmpty) TypeTree(defn.ObjectType) else typedAheadType(tree.tpt) + val tpt1 = if tree.tpt == EmptyTree then TypeTree(defn.ObjectType) else typedAheadType(tree.tpt) val refineClsDef = desugar.refinedTypeToClass(tpt1, tree.refinements).withSpan(tree.span) val refineCls = createSymbol(refineClsDef).asClass val TypeDef(_, impl: Template) = typed(refineClsDef): @unchecked @@ -2512,7 +2612,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer var name = tree.name if (name == nme.WILDCARD && tree.mods.is(Given)) { val Typed(_, tpt) = tree.body: @unchecked - name = desugar.inventGivenOrExtensionName(tpt) + name = desugar.inventGivenName(tpt) } if (name == nme.WILDCARD) body1 else { @@ -2632,17 +2732,32 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if filters == List(MessageFilter.None) then sup.markUsed() ctx.run.nn.suppressions.addSuppression(sup) + /** Run `typed` on `rhs` except if `rhs` is the right hand side of a deferred given, + * in which case the empty tree is returned. + */ + private inline def excludeDeferredGiven( + rhs: untpd.Tree, sym: Symbol)( + inline typed: untpd.Tree => Tree)(using Context): Tree = + rhs match + case rhs: RefTree + if rhs.name == nme.deferred && sym.isAllOf(DeferredGivenFlags, butNot = Param) => + EmptyTree + case _ => + typed(rhs) + def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = { val ValDef(name, tpt, _) = vdef checkNonRootName(vdef.name, vdef.nameSpan) completeAnnotations(vdef, sym) - if (sym.isOneOf(GivenOrImplicit)) checkImplicitConversionDefOK(sym) + if sym.is(Implicit) then checkImplicitConversionDefOK(sym) if sym.is(Module) then checkNoModuleClash(sym) val tpt1 = checkSimpleKinded(typedType(tpt)) - val rhs1 = vdef.rhs match { - case rhs @ Ident(nme.WILDCARD) => rhs withType tpt1.tpe - case rhs => typedExpr(rhs, tpt1.tpe.widenExpr) - } + val rhs1 = vdef.rhs match + case rhs @ Ident(nme.WILDCARD) => + rhs.withType(tpt1.tpe) + case rhs => + excludeDeferredGiven(rhs, sym): + typedExpr(_, tpt1.tpe.widenExpr) val vdef1 = assignType(cpy.ValDef(vdef)(name, tpt1, rhs1), sym) postProcessInfo(vdef1, sym) vdef1.setDefTree @@ -2702,9 +2817,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if sym.isInlineMethod then rhsCtx.addMode(Mode.InlineableBody) if sym.is(ExtensionMethod) then rhsCtx.addMode(Mode.InExtensionMethod) - val rhs1 = PrepareInlineable.dropInlineIfError(sym, - if sym.isScala2Macro then typedScala2MacroBody(ddef.rhs)(using rhsCtx) - else typedExpr(ddef.rhs, tpt1.tpe.widenExpr)(using rhsCtx)) + val rhs1 = excludeDeferredGiven(ddef.rhs, sym): rhs => + PrepareInlineable.dropInlineIfError(sym, + if sym.isScala2Macro then typedScala2MacroBody(rhs)(using rhsCtx) + else typedExpr(rhs, tpt1.tpe.widenExpr)(using rhsCtx)) if sym.isInlineMethod then if StagingLevel.level > 0 then @@ -2777,8 +2893,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typeIndexedLambdaTypeTree(rhs, tparams, body) case rhs => typedType(rhs) - checkFullyAppliedType(rhs1) - if sym.isOpaqueAlias then checkNoContextFunctionType(rhs1) + if sym.isOpaqueAlias then + checkFullyAppliedType(rhs1, "Opaque type alias must be fully applied, but ") + checkNoContextFunctionType(rhs1) assignType(cpy.TypeDef(tdef)(name, rhs1), sym) } @@ -2871,6 +2988,72 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } } + /** Add all parent refinement symbols as declarations to this class */ + def addParentRefinements(body: List[Tree])(using Context): List[Tree] = + cdef.getAttachment(ParentRefinements) match + case Some(refinedSyms) => + val refinements = refinedSyms.map: sym => + ( if sym.isType then TypeDef(sym.asType) + else if sym.is(Method) then DefDef(sym.asTerm) + else ValDef(sym.asTerm) + ).withSpan(impl.span.startPos) + body ++ refinements + case None => + body + + /** Implement givens that were declared with a `deferred` rhs. + * The a given value matching the declared type is searched in a + * context directly enclosing the current class, in which all given + * parameters of the current class are also defined. + */ + def implementDeferredGivens(body: List[Tree]): List[Tree] = + if cls.is(Trait) || ctx.isAfterTyper then body + else + def isGivenValue(mbr: TermRef) = + val dcl = mbr.symbol + if dcl.is(Method) then + report.error( + em"""Cannnot infer the implementation of the deferred ${dcl.showLocated} + |since that given is parameterized. An implementing given needs to be written explicitly.""", + cdef.srcPos) + false + else true + + def givenImpl(mbr: TermRef): ValDef = + val dcl = mbr.symbol + val target = dcl.info.asSeenFrom(cls.thisType, dcl.owner) + val constr = cls.primaryConstructor + val usingParamAccessors = cls.paramAccessors.filter(_.is(Given)) + val paramScope = newScopeWith(usingParamAccessors*) + val searchCtx = ctx.outer.fresh.setScope(paramScope) + val rhs = implicitArgTree(target, cdef.span, + where = i"inferring the implementation of the deferred ${dcl.showLocated}" + )(using searchCtx) + + val impl = dcl.copy(cls, + flags = dcl.flags &~ (HasDefault | Deferred) | Final | Override, + info = target, + coord = rhs.span).entered.asTerm + + def anchorParams = new TreeMap: + override def transform(tree: Tree)(using Context): Tree = tree match + case id: Ident if usingParamAccessors.contains(id.symbol) => + cpy.Select(id)(This(cls), id.name) + case _ => + super.transform(tree) + ValDef(impl, anchorParams.transform(rhs)).withSpan(impl.span.endPos) + end givenImpl + + val givenImpls = + cls.thisType.implicitMembers + //.showing(i"impl def givens for $cls/$result") + .filter(_.symbol.isAllOf(DeferredGivenFlags, butNot = Param)) + //.showing(i"impl def filtered givens for $cls/$result") + .filter(isGivenValue) + .map(givenImpl) + body ++ givenImpls + end implementDeferredGivens + ensureCorrectSuperClass() completeAnnotations(cdef, cls) val constr1 = typed(constr).asInstanceOf[DefDef] @@ -2891,7 +3074,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cdef.withType(UnspecifiedErrorType) else { val dummy = localDummy(cls, impl) - val body1 = addAccessorDefs(cls, typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1) + val body1 = + implementDeferredGivens( + addParentRefinements( + addAccessorDefs(cls, + typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1))) checkNoDoubleDeclaration(cls) val impl1 = cpy.Template(impl)(constr1, parents1, Nil, self1, body1) @@ -3148,8 +3335,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val app1 = typed(app, if ctx.mode.is(Mode.Pattern) then pt else defn.TupleXXLClass.typeRef) if ctx.mode.is(Mode.Pattern) then app1 else - val elemTpes = elems.lazyZip(pts).map((elem, pt) => - TypeComparer.widenInferred(elem.tpe, pt, widenUnions = true)) + val elemTpes = elems.lazyZip(pts).map: (elem, pt) => + TypeComparer.widenInferred(elem.tpe, pt, Widen.Unions) val resTpe = TypeOps.nestedPairs(elemTpes) app1.cast(resTpe) @@ -3249,6 +3436,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case tree: untpd.UnApply => typedUnApply(tree, pt) case tree: untpd.Tuple => typedTuple(tree, pt) case tree: untpd.InLambdaTypeTree => typedInLambdaTypeTree(tree, pt) + case tree: untpd.ContextBoundTypeTree => typedContextBoundTypeTree(tree) case tree: untpd.InfixOp => typedInfixOp(tree, pt) case tree: untpd.ParsedTry => typedTry(tree, pt) case tree @ untpd.PostfixOp(qual, Ident(nme.WILDCARD)) => typedAsFunction(tree, pt) @@ -4400,7 +4588,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Ident(qual)(qual.symbol.name.sourceModuleName.toTypeName) case _ => errorTree(tree, em"cannot convert from $tree to an instance creation expression") - val tycon = ctorResultType.underlyingClassRef(refinementOK = false) + val tycon = ctorResultType.underlyingClassRef(refinementOK = Feature.enabled(modularity)) typed( untpd.Select( untpd.New(untpd.TypedSplice(tpt.withType(tycon))), diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index a856a5b84d92..d6f962176ecc 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -103,7 +103,7 @@ i13842.scala # Position change under captureChecking boxmap-paper.scala -# Function types print differnt after unpickling since test mispredicts Feature.preFundsEnabled +# Function types print different after unpickling since test mispredicts Feature.preFundsEnabled caps-universal.scala # GADT cast applied to singleton type difference @@ -124,3 +124,14 @@ i19955a.scala i19955b.scala i20053b.scala +# alias types at different levels of dereferencing +parsercombinators-givens.scala +parsercombinators-givens-2.scala +parsercombinators-ctx-bounds.scala +parsercombinators-this.scala +parsercombinators-arrow.scala +parsercombinators-new-syntax.scala +hylolib-deferred-given +hylolib-cb +hylolib + diff --git a/compiler/test/dotty/tools/repl/TabcompleteTests.scala b/compiler/test/dotty/tools/repl/TabcompleteTests.scala index e4c3a2557e7d..f719752be353 100644 --- a/compiler/test/dotty/tools/repl/TabcompleteTests.scala +++ b/compiler/test/dotty/tools/repl/TabcompleteTests.scala @@ -122,11 +122,11 @@ class TabcompleteTests extends ReplTest { } @Test def moduleCompletion = initially { - assertEquals(List("Predef"), tabComplete("object Foo { type T = Pre")) + assertEquals(List("Predef"), tabComplete("object Foo { type T = Pred")) } @Test def i6415 = initially { - assertEquals(List("Predef"), tabComplete("object Foo { opaque type T = Pre")) + assertEquals(List("Predef"), tabComplete("object Foo { opaque type T = Pred")) } @Test def i6361 = initially { diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 8cc070d5dbc5..dd4a3af403ab 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -191,6 +191,7 @@ MatchType ::= InfixType `match` <<< TypeCaseClauses >>> InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) RefinedType ::= AnnotType {[nl] Refinement} RefinedTypeTree(t, ds) AnnotType ::= SimpleType {Annotation} Annotated(t, annot) +AnnotType1 ::= SimpleType1 {Annotation} Annotated(t, annot) SimpleType ::= SimpleLiteral SingletonTypeTree(l) | ‘?’ TypeBounds @@ -220,7 +221,9 @@ IntoTargetType ::= Type TypeArgs ::= ‘[’ Types ‘]’ ts Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> ds TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) -TypeParamBounds ::= TypeBounds {‘:’ Type} ContextBounds(typeBounds, tps) +TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] ContextBounds(typeBounds, tps) +ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] Types ::= Type {‘,’ Type} NamesAndTypes ::= NameAndType {‘,’ NameAndType} NameAndType ::= id ':' Type @@ -358,7 +361,7 @@ ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ ```ebnf ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) - id [HkTypeParamClause] TypeParamBounds Bound(below, above, context) + id [HkTypeParamClause] TypeAndCtxBounds Bound(below, above, context) TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds @@ -372,7 +375,7 @@ ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ | [nl] ‘(’ ‘using’ (ClsParams | FunArgTypes) ‘)’ ClsParams ::= ClsParam {‘,’ ClsParam} ClsParam ::= {Annotation} ValDef(mods, id, tpe, expr) -- point of mods on val/var - [{Modifier} (‘val’ | ‘var’)] Param + [{Modifier | ‘tracked’} (‘val’ | ‘var’)] Param DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent DefParamClause ::= DefTypeParamClause @@ -383,7 +386,7 @@ TypelessClause ::= DefTermParamClause | UsingParamClause DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeAndCtxBounds DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ @@ -454,7 +457,7 @@ PatDef ::= ids [‘:’ Type] [‘=’ Expr] DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] DefDef(_, name, paramss, tpe, expr) | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) DefSig ::= id [DefParamClauses] [DefImplicitClause] -TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound +TypeDef ::= id [TypeParamClause] {FunParamClause} TypeAndCtxBounds TypeDefTree(_, name, tparams, bound [‘=’ Type] TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef @@ -466,9 +469,13 @@ ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] ModuleDef(mods, name, template) // no constructor EnumDef ::= id ClassConstr InheritClauses EnumBody -GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present -StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] + +GivenDef ::= [GivenConditional '=>'] GivenSig +GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} +GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + | ConstrApps ['as' id] TemplateBody +GivenType ::= AnnotType1 {id [nl] AnnotType1} + Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> diff --git a/docs/_docs/reference/experimental/modularity.md b/docs/_docs/reference/experimental/modularity.md new file mode 100644 index 000000000000..a989b71770af --- /dev/null +++ b/docs/_docs/reference/experimental/modularity.md @@ -0,0 +1,189 @@ +--- +layout: doc-page +title: "Modularity Improvements" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/modularity.html +--- + +# Modularity Improvements + +Martin Odersky, 7.1.2024 + +Scala is a language in the SML tradition, in the sense that it has +abstract and alias types as members of modules (which in Scala take the form of objects and classes). This leads to a simple dependently +typed system, where dependencies in types are on paths instead of full terms. + +So far, some key ingredients were lacking which meant that module composition with functors is harder in Scala than in SML. In particular, one often needs to resort the infamous `Aux` pattern that lifts type members into type parameters so that they can be tracked across class instantiations. This makes modular, dependently typed programs +much harder to write and read, and makes such programming only accessible to experts. + +In this note I propose some small changes to Scala's dependent typing that makes +modular programming much more straightforward. + +The suggested improvements have been implemented and are available +in source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: + +``` + scala compile -source:future -language:experimental.modularity +``` + +## Tracked Parameters + +Scala is dependently typed for functions, but unfortunately not for classes. +For instance, consider the following definitions: + +```scala + class C: + type T + ... + + def f(x: C): x.T = ... + + val y: C { type T = Int } +``` +Then `f(y)` would have type `Int`, since the compiler will substitute the +concrete parameter reference `y` for the formal parameter `x` in the result +type of `f`, and `y.T = Int` + +However, if we use a class `F` instead of a method `f`, things go wrong. + +```scala + class F(val x: C): + val result: x.T = ... +``` +Now `F(y).result` would not have type `Int` but instead the rather less useful type `?1.T` where `?1` is a so-called skolem constant of type `C` (a skolem represents an unknown value). + +This shortcoming means that classes cannot really be used for advanced +modularity constructs that rely on dependent typing. + +**Proposal:** Introduce a `tracked` modifier that can be added to +a `val` parameter of a class or trait. For every tracked class parameter of a class `C`, add a refinement in the constructor type of `C` that the class member is the same as the parameter. + +**Example:** In the setting above, assume `F` is instead declared like this: +```scala + class F(tracked val x: C): + val result: x.T = ... +``` +Then the constructor `F` would get roughly the following type: +```scala + F(x1: C): F { val x: x1.type } +``` +_Aside:_ More precisely, both parameter and refinement would apply to the same name `x` but the refinement still refers to the parameter. We unfortunately can't express that in source, however, so we chose the new name `x1` for the parameter in the explanation. + +With the new constructor type, the expression `F(y).result` would now have the type `Int`, as hoped for. The reasoning to get there is as follows: + + - The result of the constructor `F(y)` has type `F { val x: y.type }` by + the standard typing for dependent functions. + - The type of `result` inside `F` is `x.T`. + - Hence, the type of `result` as a member of `F { val x: y.type }` is `y.T`, which is equal to `Int`. + +The addition of tracked parameters makes classes suitable as a fundamental modularity construct supporting dependent typing. Here is an example, taken from issue #3920: + +```scala +trait Ordering: + type T + def compare(t1:T, t2: T): Int + +class SetFunctor(tracked val ord: Ordering): + type Set = List[ord.T] + + def empty: Set = Nil + + extension (s: Set) + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def contains(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + +object intOrdering extends Ordering: + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + +val IntSet = new SetFunctor(intOrdering) + +@main def Test = + import IntSet.* + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.contains(7)) + assert(set.contains(8)) +``` +This works as it should now. Without the addition of `tracked` to the +parameter of `SetFunctor` typechecking would immediately lose track of +the element type `T` after an `add`, and would therefore fail. + +**Syntax Change** + +``` +ClsParam ::= {Annotation} [{Modifier | ‘tracked’} (‘val’ | ‘var’)] Param +``` + +The (soft) `tracked` modifier is only allowed for `val` parameters of classes. + +**Discussion** + +Since `tracked` is so useful, why not assume it by default? First, `tracked` makes sense only for `val` parameters. If a class parameter is not also a field declared using `val` then there's nothing to refine in the constructor result type. One could think of at least making all `val` parameters tracked by default, but that would be a backwards incompatible change. For instance, the following code would break: + +```scala +case class Foo(x: Int) +var foo = Foo(1) +if someCondition then foo = Foo(2) +``` +If we assume `tracked` for parameter `x` (which is implicitly a `val`), +then `foo` would get inferred type `Foo { val x: 1 }`, so it could not +be reassigned to a value of type `Foo { val x: 2 }` on the next line. + +Another approach might be to assume `tracked` for a `val` parameter `x` +only if the class refers to a type member of `x`. But it turns out that this +scheme is unimplementable since it would quickly lead to cyclic references +when typechecking recursive class graphs. So an explicit `tracked` looks like the best available option. + +## Allow Class Parents to be Refined Types + +Since `tracked` parameters create refinements in constructor types, +it is now possible that a class has a parent that is a refined type. +Previously such types were not permitted, since we were not quite sure how to handle them. But with tracked parameters it becomes pressing to +admit such types. + +**Proposal** Allow refined types as parent types of classes. All refinements that are inherited in this way become synthetic members of the class. + +**Example** + +```scala +class C: + type T + def m(): T + +type R = C: + type T = Int + def m(): 22 + +class D extends R: + def next(): D +``` +This code now compiles. The definition of `D` is expanded as follows: + +```scala +class D extends C: + def next(): D + /*synthetic*/ type T = Int + /*synthetic*/ def m(): 22 +``` +Note how class refinements are moved from the parent constructor of `D` into the body of class `D` itself. + +This change does not entail a syntax change. Syntactically, parent types cannot be refined types themselves. So the following would be illegal: +```scala +class D extends C { type T = Int; def m(): 22 }: // error + def next(): D +``` +If a refined type should be used directly as a parent type of a class, it needs to come in parentheses: +```scala +class D extends (C { type T = Int; def m(): 22 }) // ok + def next(): D +``` + +## A Small Relaxation To Export Rules + +The rules for export forwarders are changed as follows. + +Previously, all export forwarders were declared `final`. Now, only term members are declared `final`. Type aliases are left aside. + +This makes it possible to export the same type member into several traits and then mix these traits in the same class. The test file `tests/pos/typeclass-aggregates.scala` shows why this is essential if we want to combine multiple givens with type members in a new given that aggregates all these givens in an intersection type. + +The change does not lose safety since different type aliases would in any case lead to uninstantiatable classes. \ No newline at end of file diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md new file mode 100644 index 000000000000..a78e764bbe7d --- /dev/null +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -0,0 +1,790 @@ +--- +layout: doc-page +title: "Better Support for Type Classes" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/typeclasses.html +--- + +Martin Odersky, 8.1.2024, edited 5.4.2024 + +A type class in Scala is a pattern where we define + + - a trait with one type parameter (the _type class_) + - given instances at specific instantiations of that trait, + - using clauses or context bounds abstracting over that trait. + +Type classes as a pattern work overall OK, but if we compare them to native implementations in Haskell, or protocols in Swift, or traits in Rust, then there are some idiosyncrasies and rough corners which in the end make them +a bit cumbersome and limiting for standard generic programming patterns. Much has improved since Scala 2's implicits, but there is still some gap to bridge to get to parity with these languages. + +This note shows that with some fairly small and reasonable tweaks to Scala's syntax and typing rules we can obtain a much better scheme for working with type classes, or do generic programming in general. + +The bulk of the suggested improvements has been implemented and is available +under source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: + +``` + scala compile -source:future -language:experimental.modularity +``` + +It is intended to turn features described here into proposals under the Scala improvement process. A first installment is SIP 64, which covers some syntactic changes, names for context bounds, multiple context bounds and deferred givens. The order of exposition described in this note is different from the planned proposals of SIPs. This doc is not a guide on how to sequence details, but instead wants to present a vision of what is possible. For instance, we start here with a feature (Self types and `is` syntax) that has turned out to be controversial and that will probably be proposed only late in the sequence of SIPs. + +## Generalizing Context Bounds + + The only place in Scala's syntax where the type class pattern is relevant is + in context bounds. A context bound such as + +```scala + def min[A: Ordering](x: List[A]): A +``` +requires that `Ordering` is a trait or class with a single type parameter (which makes it a type class) and expands to a `using` clause that instantiates that parameter. Here is the expansion of `min`: +```scala + def min[A](x: List[A])(using Ordering[A]): A +``` + +**Proposal** Allow type classes to define an abstract type member named `Self` instead of a type parameter. + +**Example** + +```scala + trait Ord: + type Self + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + object Monoid: + def unit[M](using m: Monoid { type Self = M}): M + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) + + def reduce[A: Monoid](xs: List[A]): A = + xs.foldLeft(Monoid.unit)(_ `combine` _) + + trait ParserCombinator: + type Self + type Input + type Result + extension (self: Self) + def parse(input: Input): Option[Result] = ... + + def combine[A: ParserCombinator, B: ParserCombinator { type Input = A.Input }] = ... +``` + +**Advantages** + + - Avoids repetitive type parameters, concentrates on what's essential, namely the type class hierarchy. + - Gives a clear indication of traits intended as type classes. A trait is a type class + if it has type `Self` as a member + - Allows to create aggregate type classes that combine givens via intersection types. + - Allows to use refinements in context bounds (the `combine` example above would be very awkward to express using the old way of context bounds expanding to type constructors). + +`Self`-based context bounds are a better fit for a dependently typed language like Scala than parameter-based ones. The main reason is that we are dealing with proper types, not type constructors. Proper types can be parameterized, intersected, or refined. This makes `Self`-based designs inherently more compositional than parameterized ones. + + + +**Details** + +When a trait has both a type parameter and an abstract `Self` type, we + resolve a context bound to the `Self` type. This allows type classes + that carry type parameters, as in + +```scala +trait Sequential[E]: + type Self +``` + +Here, +```scala +[S: Sequential[Int]] +``` +should resolve to: +```scala +[S](using Sequential[Int] { type Self = S }) +``` +and not to: +```scala +[S](using Sequential[S]) +``` + +**Discussion** + + Why not use `This` for the self type? The name `This` suggests that it is the type of `this`. But this is not true for type class traits. `Self` is the name of the type implementing a distinguished _member type_ of the trait in a `given` definition. `Self` is an established term in both Rust and Swift with the meaning used here. + + One possible objection to the `Self` based design is that it does not cover "multi-parameter" type classes. But neither do context bounds! "Multi-parameter" type classes in Scala are simply givens that can be synthesized with the standard mechanisms. Type classes in the strict sense abstract only over a single type, namely the implementation type of a trait. + + +## Auxiliary Type Alias `is` + +We introduce a standard type alias `is` in the Scala package or in `Predef`, defined like this: + +```scala + infix type is[A <: AnyKind, B <: {type Self <: AnyKind}] = B { type Self = A } +``` + +This makes writing instance definitions and using clauses quite pleasant. Examples: + +```scala + given Int is Ord ... + given Int is Monoid ... + + type Reader = [X] =>> Env => X + given Reader is Monad ... + + object Monoid: + def unit[M](using m: M is Monoid): M +``` + +(more examples will follow below) + + + +## Naming Context Bounds + +Context bounds are a convenient and legible abbreviation. A problem so far is that they are always anonymous, +one cannot name the using parameter to which a context bound expands. + +For instance, consider a `reduce` method over `Monoid`s defined like this: + +```scala +def reduce[A : Monoid](xs: List[A]): A = ??? +``` +Since we don't have a name for the `Monoid` instance of `A`, we need to resort to `summon` in the body of `reduce`: +```scala +def reduce[A : Monoid](xs: List[A]): A = + xs.foldLeft(summon Monoid[A])(_ `combine` _) +``` +That's generally considered too painful to write and read, hence people usually adopt one of two alternatives. Either, eschew context bounds and switch to using clauses: +```scala +def reduce[A](xs: List[A])(using m: Monoid[A]): A = + xs.foldLeft(m)(_ `combine` _) +``` +Or, plan ahead and define a "trampoline" method in `Monoid`'s companion object: +```scala + trait Monoid[A] extends SemiGroup[A]: + def unit: A + object Monoid: + def unit[A](using m: Monoid[A]): A = m.unit + ... + def reduce[A : Monoid](xs: List[A]): A = + xs.foldLeft(Monoid.unit)(_ `combine` _) +``` +This is all accidental complexity which can be avoided by the following proposal. + +**Proposal:** Allow to name a context bound, like this: +```scala + def reduce[A : Monoid as m](xs: List[A]): A = + xs.foldLeft(m.unit)(_ `combine` _) +``` + +We use `as x` after the type to bind the instance to `x`. This is analogous to import renaming, which also introduces a new name for something that comes before. + +**Benefits:** The new syntax is simple and clear. +It avoids the awkward choice between concise context bounds that can't be named and verbose using clauses that can. + +### New Syntax for Aggregate Context Bounds + +Aggregate context bounds like `A : X : Y` are not obvious to read, and it becomes worse when we add names, e.g. `A : X as x : Y as y`. + +**Proposal:** Allow to combine several context bounds inside `{...}`, analogous +to import clauses. Example: + +```scala + trait: + def showMax[X : {Ordering, Show}](x: X, y: X): String + class B extends A: + def showMax[X : {Ordering as ordering, Show as show}](x: X, y: X): String = + show.asString(ordering.max(x, y)) +``` + +The old syntax with multiple `:` should be phased out over time. + +**Benefits:** The new syntax is much clearer than the old one, in particular for newcomers that don't know context bounds well. + +### Better Default Names for Context Bounds + +So far, an unnamed context bound for a type parameter gets a synthesized fresh name. It would be much more useful if it got the name of the constrained type parameter instead, translated to be a term name. This means our `reduce` method over monoids would not even need an `as` binding. We could simply formulate it as follows: +``` + def reduce[A : Monoid](xs: List[A]) = + xs.foldLeft(A.unit)(_ `combine` _) +``` + +In Scala we are already familiar with using one name for two related things where one version names a type and the other an associated value. For instance, we use that convention for classes and companion objects. In retrospect, the idea of generalizing this to also cover type parameters is obvious. It is surprising that it was not brought up before. + +**Proposed Rules** + + 1. The generated evidence parameter for a context bound `A : C as a` has name `a` + 2. The generated evidence for a context bound `A : C` without an `as` binding has name `A` (seen as a term name). So, `A : C` is equivalent to `A : C as A`. + 3. If there are multiple context bounds for a type parameter, as in `A : {C_1, ..., C_n}`, the generated evidence parameter for every context bound `C_i` has a fresh synthesized name, unless the context bound carries an `as` clause, in which case rule (1) applies. + +TODO: Present context bound proxy concept. + +The default naming convention reduces the need for named context bounds. But named context bounds are still essential, for at least two reasons: + + - They are needed to give names to multiple context bounds. + - They give an explanation what a single unnamed context bound expands to. + + +### Expansion of Context Bounds + +Context bounds are currently translated to implicit parameters in the last parameter list of a method or class. This is a problem if a context bound is mentioned in one of the preceding parameter types. For example, consider a type class of parsers with associated type members `Input` and `Result` describing the input type on which the parsers operate and the type of results they produce: +```scala +trait Parser[P]: + type Input + type Result +``` +Here is a method `run` that runs a parser on an input of the required type: + +```scala +def run[P : Parser](in: P.Input): P.Result +``` +Or, making clearer what happens by using an explicit name for the context bound: +```scala +def run[P : Parser as p](in: p.Input): p.Result +``` +With the current translation this does not work since it would be expanded to: +```scala + def run[P](x: p.Input)(using p: Parser[P]): p.Result +``` +Note that the `p` in `p.Input` refers to the `p` introduced in the using clause, which comes later. So this is ill-formed. + +This problem would be fixed by changing the translation of context bounds so that they expand to using clauses immediately after the type parameter. But such a change is infeasible, for two reasons: + + 1. It would be a binary-incompatible change. + 2. Putting using clauses earlier can impair type inference. A type in + a using clause can be constrained by term arguments coming before that + clause. Moving the using clause first would miss those constraints, which could cause ambiguities in implicit search. + +But there is an alternative which is feasible: + +**Proposal:** Map the context bounds of a method or class as follows: + + 1. If one of the bounds is referred to by its term name in a subsequent parameter clause, the context bounds are mapped to a using clause immediately preceding the first such parameter clause. + 2. Otherwise, if the last parameter clause is a using (or implicit) clause, merge all parameters arising from context bounds in front of that clause, creating a single using clause. + 3. Otherwise, let the parameters arising from context bounds form a new using clause at the end. + +Rules (2) and (3) are the status quo, and match Scala 2's rules. Rule (1) is new but since context bounds so far could not be referred to, it does not apply to legacy code. Therefore, binary compatibility is maintained. + +**Discussion** More refined rules could be envisaged where context bounds are spread over different using clauses so that each comes as late as possible. But it would make matters more complicated and the gain in expressiveness is not clear to me. + +Named (either explicitly, or by default) context bounds in givens that produce classes are mapped to tracked val's of these classes (see #18958). This allows +references to these parameters to be precise, so that information about dependent type members is preserved. + + +## Context Bounds for Type Members + +It's not very orthogonal to allow subtype bounds for both type parameters and abstract type members, but context bounds only for type parameters. What's more, we don't even have the fallback of an explicit using clause for type members. The only alternative is to also introduce a set of abstract givens that get implemented in each subclass. This is extremely heavyweight and opaque to newcomers. + +**Proposal**: Allow context bounds for type members. Example: + +```scala + class Collection: + type Element : Ord +``` + +The question is how these bounds are expanded. Context bounds on type parameters +are expanded into using clauses. But for type members this does not work, since we cannot refer to a member type of a class in a parameter type of that class. What we are after is an equivalent of using parameter clauses but represented as class members. + +**Proposal:** Introduce a new way to implement a given definition in a trait like this: +```scala +given T = deferred +``` +`deferred` is a new method in the `scala.compiletime` package, which can appear only as the right hand side of a given defined in a trait. Any class implementing that trait will provide an implementation of this given. If a definition is not provided explicitly, it will be synthesized by searching for a given of type `T` in the scope of the inheriting class. Specifically, the scope in which this given will be searched is the environment of that class augmented by its parameters but not containing its members (since that would lead to recursive resolutions). If an implementation _is_ provided explicitly, it counts as an override of a concrete definition and needs an `override` modifier. + +Deferred givens allow a clean implementation of context bounds in traits, +as in the following example: +```scala +trait Sorted: + type Element : Ord + +class SortedSet[A : Ord] extends Sorted: + type Element = A +``` +The compiler expands this to the following implementation: +```scala +trait Sorted: + type Element + given Ord[Element] = compiletime.deferred + +class SortedSet[A](using A: Ord[A]) extends Sorted: + type Element = A + override given Ord[Element] = A // i.e. the A defined by the using clause +``` + +The using clause in class `SortedSet` provides an implementation for the deferred given in trait `Sorted`. + +**Benefits:** + + - Better orthogonality, type parameters and abstract type members now accept the same kinds of bounds. + - Better ergonomics, since deferred givens get naturally implemented in inheriting classes, no need for boilerplate to fill in definitions of abstract givens. + +**Alternative:** It was suggested that we use a modifier for a deferred given instead of a `= deferred`. Something like `deferred given C[T]`. But a modifier does not suggest the concept that a deferred given will be implemented automatically in subclasses unless an explicit definition is written. In a sense, we can see `= deferred` as the invocation of a magic macro that is provided by the compiler. So from a user's point of view a given with `deferred` right hand side is not abstract. +It is a concrete definition where the compiler will provide the correct implementation. + +## New Given Syntax + +A good language syntax is like a Bach fugue: A small set of motifs is combined in a multitude of harmonic ways. Dissonances and irregularities should be avoided. + +When designing Scala 3, I believe that, by and large, we achieved that goal, except in one area, which is the syntax of givens. There _are_ some glaring dissonances, as seen in this code for defining an ordering on lists: +```scala +given [A](using Ord[A]): Ord[List[A]] with + def compare(x: List[A], y: List[A]) = ... +``` +The `:` feels utterly foreign in this position. It's definitely not a type ascription, so what is its role? Just as bad is the trailing `with`. Everywhere else we use braces or trailing `:` to start a scope of nested definitions, so the need of `with` sticks out like a sore thumb. + +We arrived at that syntax not because of a flight of fancy but because even after trying for about a year to find other solutions it seemed like the least bad alternative. The awkwardness of the given syntax arose because we insisted that givens could be named or anonymous, with the default on anonymous, that we would not use underscore for an anonymous given, and that the name, if present, had to come first, and have the form `name [parameters] :`. In retrospect, that last requirement showed a lack of creativity on our part. + +Sometimes unconventional syntax grows on you and becomes natural after a while. But here it was unfortunately the opposite. The longer I used given definitions in this style the more awkward they felt, in particular since the rest of the language seemed so much better put together by comparison. And I believe many others agree with me on this. Since the current syntax is unnatural and esoteric, this means it's difficult to discover and very foreign even after that. This makes it much harder to learn and apply givens than it need be. + +Things become much simpler if we introduce the optional name instead with an `as name` clause at the end, just like we did for context bounds. We can then use a more intuitive syntax for givens like this: +```scala +given String is Ord: + def compare(x: String, y: String) = ... + +given [A : Ord] => List[A] is Ord: + def compare(x: List[A], y: List[A]) = ... + +given Int is Monoid: + extension (x: Int) def combine(y: Int) = x + y + def unit = 0 +``` +Here, the second given can be read as if `A` is an `Ord` then `List[A]` is also an`Ord`. Or: for all `A: Ord`, `List[A]` is `Ord`. The arrow can be seen as an implication, note also the analogy to pattern matching syntax. + +If explicit names are desired, we add them with `as` clauses: +```scala +given String is Ord as intOrd: + def compare(x: String, y: String) = ... + +given [A : Ord] => List[A] is Ord as listOrd: + def compare(x: List[A], y: List[A]) = ... + +given Int is Monoid as intMonoid: + extension (x: Int) def combine(y: Int) = x + y + def unit = 0 +``` + +The underlying principles are: + + - A `given` clause consists of the following elements: + + - An optional _precondition_, which introduces type parameters and/or using clauses and which ends in `=>`, + - the implemented _type_, + - an optional name binding using `as`, + - an implementation which consists of either an `=` and an expression, + or a template body. + + - Since there is no longer a middle `:` separating name and parameters from the implemented type, we can use a `:` to start the class body without looking unnatural, as is done everywhere else. That eliminates the special case where `with` was used before. + +This will be a fairly significant change to the given syntax. I believe there's still a possibility to do this. Not so much code has migrated to new style givens yet, and code that was written can be changed fairly easily. Specifically, there are about a 900K definitions of `implicit def`s +in Scala code on Github and about 10K definitions of `given ... with`. So about 1% of all code uses the Scala 3 syntax, which would have to be changed again. + +Changing something introduced just recently in Scala 3 is not fun, +but I believe these adjustments are preferable to let bad syntax +sit there and fester. The cost of changing should be amortized by improved developer experience over time, and better syntax would also help in migrating Scala 2 style implicits to Scala 3. But we should do it quickly before a lot more code +starts migrating. + +Migration to the new syntax is straightforward, and can be supported by automatic rewrites. For a transition period we can support both the old and the new syntax. It would be a good idea to backport the new given syntax to the LTS version of Scala so that code written in this version can already use it. The current LTS would then support old and new-style givens indefinitely, whereas new Scala 3.x versions would phase out the old syntax over time. + + +### Abolish Abstract Givens + +Another simplification is possible. So far we have special syntax for abstract givens: +```scala +given x: T +``` +The problem is that this syntax clashes with the quite common case where we want to establish a given without any nested definitions. For instance +consider a given that constructs a type tag: +```scala +class Tag[T] +``` +Then this works: +```scala +given Tag[String]() +given Tag[String] with {} +``` +But the following more natural syntax fails: +```scala +given Tag[String] +``` +The last line gives a rather cryptic error: +``` +1 |given Tag[String] + | ^ + | anonymous given cannot be abstract +``` +The problem is that the compiler thinks that the last given is intended to be abstract, and complains since abstract givens need to be named. This is another annoying dissonance. Nowhere else in Scala's syntax does adding a +`()` argument to a class cause a drastic change in meaning. And it's also a violation of the principle that it should be possible to define all givens without providing names for them. + +Fortunately, abstract givens are no longer necessary since they are superseded by the new `deferred` scheme. So we can deprecate that syntax over time. Abstract givens are a highly specialized mechanism with a so far non-obvious syntax. We have seen that this syntax clashes with reasonable expectations of Scala programmers. My estimate is that maybe a dozen people world-wide have used abstract givens in anger so far. + +**Proposal** In the future, let the `= deferred` mechanism be the only way to deliver the functionality of abstract givens. + +This is less of a disruption than it might appear at first: + + - `given T` was illegal before since abstract givens could not be anonymous. + It now means a concrete given of class `T` with no member definitions. + - `given x: T` is legacy syntax for an abstract given. + - `given T as x = deferred` is the analogous new syntax, which is more powerful since + it allows for automatic instantiation. + - `given T = deferred` is the anonymous version in the new syntax, which was not expressible before. + +**Benefits:** + + - Simplification of the language since a feature is dropped + - Eliminate non-obvious and misleading syntax. + + +### Bonus: Fixing Singleton + +We know the current treatment of `Singleton` as a type bound is broken since +`x.type | y.type <: Singleton` holds by the subtyping rules for union types, even though `x.type | y.type` is clearly not a singleton. + +A better approach is to treat `Singleton` as a type class that is interpreted specially by the compiler. + +We can do this in a backwards-compatible way by defining `Singleton` like this: + +```scala +trait Singleton: + type Self +``` + +Then, instead of using an unsound upper bound we can use a context bound: + +```scala +def f[X: Singleton](x: X) = ... +``` + +The context bound is treated specially by the compiler so that no using clause is generated at runtime (this is straightforward, using the erased definitions mechanism). + +### Bonus: Precise Typing + +This approach also presents a solution to the problem how to express precise type variables. We can introduce another special type class `Precise` and use it like this: + +```scala +def f[X: Precise](x: X) = ... +``` +Like a `Singleton` bound, a `Precise` bound disables automatic widening of singleton types or union types in inferred instances of type variable `X`. But there is no requirement that the type argument _must_ be a singleton. + + +## Summary of Syntax Changes + +Here is the complete context-free syntax for all proposed features. +Overall the syntax for givens becomes a lot simpler than what it was before. + +``` +TmplDef ::= 'given' GivenDef +GivenDef ::= [GivenConditional '=>'] GivenSig +GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} +GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + | ConstrApps ['as' id] TemplateBody +GivenType ::= AnnotType {id [nl] AnnotType} + +TypeDef ::= id [TypeParamClause] TypeAndCtxBounds +TypeParamBounds ::= TypeAndCtxBounds +TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] +ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] +``` + + + +## Examples + + +### Example 1 + +Here are some standard type classes, which were mostly already introduced at the start of this note, now with associated instance givens and some test code: + +```scala + // Type classes + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] // Here, Self is a type constructor with parameter A + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) + + // Instances + + given Int is Ord: + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + + given [T: Ord] => List[T] is Ord: + extension (xs: List[T]) def compareTo(ys: List[T]): Int = + (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given List is Monad: + extension [A](xs: List[A]) + def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given [Ctx] => Reader[Ctx] is Monad: + extension [A](r: Ctx => A) + def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + // Usages + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given [T: Ord] => T is Ord as descending: + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) +``` +The `Reader` type is a bit hairy. It is a type class (written in the parameterized syntax) where we fix a context `Ctx` and then let `Reader` be the polymorphic function type over `X` that takes a context `Ctx` and returns an `X`. Type classes like this are commonly used in monadic effect systems. + + +### Example 2 + +The following contributed code by @LPTK (issue #10929) did _not_ work at first since +references were not tracked correctly. The version below adds explicit tracked parameters which makes the code compile. +```scala +infix abstract class TupleOf[T, +A]: + type Mapped[+A] <: Tuple + def map[B](x: T)(f: A => B): Mapped[B] + +object TupleOf: + + given TupleOf[EmptyTuple, Nothing] with + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple](using tracked val tup: Rest TupleOf A): TupleOf[A *: Rest, A] with + type Mapped[+A] = A *: tup.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + f(x.head) *: tup.map(x.tail)(f) +``` + +Note the quite convoluted syntax, which makes the code hard to understand. Here is the same example in the new type class syntax, which also compiles correctly: +```scala +//> using options -language:experimental.modularity -source future + +trait TupleOf[+A]: + type Self + type Mapped[+A] <: Tuple + def map[B](x: Self)(f: A => B): Mapped[B] + +object TupleOf: + + given EmptyTuple is TupleOf[Nothing]: + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple : TupleOf[A]] => A *: Rest is TupleOf[A]: + type Mapped[+A] = A *: Rest.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + f(x.head) *: Rest.map(x.tail)(f) +``` +Note in particular the following points: + + - In the original code, it was not clear that `TupleOf` is a type class, + since it contained two type parameters, one of which played the role + of the instance type `Self`. The new version is much clearer: `TupleOf` is + a type class over `Self` with one additional parameter, the common type of all tuple elements. + - The two given definitions are obfuscated in the old code. Their version + in the new code makes it clear what kind of instances they define: + + - `EmptyTuple` is a tuple of `Nothing`. + - if `Rest` is a tuple of `A`, then `A *: Rest` is also a tuple of `A`. + + - There's no need to introduce names for parameter instances in using clauses; the default naming scheme for context bound evidences works fine, and is more concise. + - There's no need to manually declare implicit parameters as `tracked`, + context bounds provide that automatically. + - Everything in the new code feels like idiomatic Scala 3, whereas the original code exhibits the awkward corner case that requires a `with` in + front of given definitions. + +### Example 3 + +Dimi Racordon tried to [define parser combinators](https://users.scala-lang.org/t/create-an-instance-of-a-type-class-with-methods-depending-on-type-members/9613) in Scala that use dependent type members for inputs and results. It was intended as a basic example of type class constraints, but it did not work in current Scala. + +Here is the problem solved with the new syntax. Note how much clearer that syntax is compared to Dimi's original version, which did not work out in the end. + +```scala +/** A parser combinator */ +trait Combinator: + type Self + + type Input + type Result + + extension (self: Self) + /** Parses and returns an element from input `in` */ + def parse(in: Input): Option[Result] +end Combinator + +case class Apply[I, R](action: I => Option[R]) +case class Combine[A, B](a: A, b: B) + +given [I, R] => Apply[I, R] is Combinator: + type Input = I + type Result = R + extension (self: Apply[I, R]) + def parse(in: I): Option[R] = self.action(in) + +given [A: Combinator, B: Combinator { type Input = A.Input }] + => Combine[A, B] is Combinator: + type Input = A.Input + type Result = (A.Result, B.Result) + extension (self: Combine[A, B]) + def parse(in: Input): Option[Result] = + for + x <- self.a.parse(in) + y <- self.b.parse(in) + yield (x, y) +``` +The example is now as expressed as straightforwardly as it should be: + + - `Combinator` is a type class with two associated types, `Input` and `Result`, and a `parse` method. + - `Apply` and `Combine` are two data constructors representing parser combinators. They are declared to be `Combinators` in the two subsequent `given` declarations. + - `Apply`'s parse method applies the `action` function to the input. + - `Combine[A, B]` is a parser combinator provided `A` and `B` are parser combinators + that process the same type of `Input`, which is also the input type of + `Combine[A, B]`. Its `Result` type is a pair of the `Result` types of `A` and `B`. + Results are produced by a simple for-expression. + +Compared to the original example, which required serious contortions, this is now all completely straightforward. + +_Note 1:_ One could also explore improvements, for instance making this purely functional. But that's not the point of the demonstration here, where I wanted +to take the original example and show how it can be made to work with the new constructs, and be expressed more clearly as well. + +_Note 2:_ One could improve the notation even further by adding equality constraints in the style of Swift, which in turn resemble the _sharing constraints_ of SML. A hypothetical syntax applied to the second given would be: +```scala +given [A: Combinator, B: Combinator with A.Input == B.Input] + => Combine[A, B] is Combinator: +``` +This variant is aesthetically pleasing since it makes the equality constraint symmetric. The original version had to use an asymmetric refinement on the second type parameter bound instead. For now, such constraints are neither implemented nor proposed. This is left as a possibility for future work. Note also the analogy with +the work of @mbovel and @Sporarum on refinement types, where similar `with` clauses can appear for term parameters. If that work goes ahead, we could possibly revisit the issue of `with` clauses also for type parameters. + +### Example 4 + +Dimi Racordon tried to [port some core elements](https://github.com/kyouko-taiga/scala-hylolib) of the type class based [Hylo standard library to Scala](https://github.com/hylo-lang/hylo/tree/main/StandardLibrary/Sources). It worked to some degree, but there were some things that could not be expressed, and more things that could be expressed only awkwardly. + +With the improvements proposed here, the library can now be expressed quite clearly and straightforwardly. See tests/pos/hylolib in this PR for details. + +## Suggested Improvement unrelated to Type Classes + +The following improvement would make sense alongside the suggested changes to type classes. But it does not form part of this proposal and is not yet implemented. + + +### Using `as` also in Patterns + +Since we have now more precedents of `as` as a postfix binder, I want to come back to the proposal to use it in patterns as well, in favor of `@`, which should be deprecated. + +Examples: + +```scala + xs match + case (Person(name, age) as p) :: rest => ... + + tp match + case Param(tl, _) :: _ as tparams => ... + + val x :: xs1 as xs = ys.checkedCast +``` + +These would replace the previous syntax using `@`: + +```scala + xs match + case p @ Person(name, age) :: rest => ... + + tp match + case tparams @ (Param(tl, _) :: _) => ... + + val xs @ (x :: xs1) = ys.checkedCast +``` +**Advantages:** No unpronounceable and non-standard symbol like `@`. More regularity. + +Generally, we want to use `as name` to attach a name for some entity that could also have been used stand-alone. + +**Proposed Syntax Change** + +``` +Pattern2 ::= InfixPattern ['as' id] +``` + +## Summary + +I have proposed some tweaks to Scala 3, which would greatly increase its usability for modular, type class based, generic programming. The proposed changes are: + + 1. Allow context bounds over classes that define a `Self` member type. + 1. Allow context bounds to be named with `as`. Use the bound parameter name as a default name for the generated context bound evidence. + 1. Add a new `{...}` syntax for multiple context bounds. + 1. Make context bounds also available for type members, which expand into a new form of deferred given. Phase out the previous abstract givens in favor of the new form. + 1. Add a predefined type alias `is`. + 1. Introduce a new cleaner syntax of given clauses. + +It's interesting that givens, which are a very general concept in Scala, were "almost there" when it comes to full support of concepts and generic programming. We only needed to add a few usability tweaks to context bounds, +alongside two syntactic changes that supersede the previous forms of `given .. with` clauses and abstract givens. Also interesting is that the superseded syntax constructs were the two areas where we collectively felt that the previous solutions were a bit awkward, but we could not think of better ones at the time. It's very nice that more satisfactory solutions are now emerging. + +## Conclusion + +Generic programming can be expressed in a number of languages. For instance, with +type classes in Haskell, or with traits in Rust, or with protocols in Swift, or with concepts in C++. Each of these is constructed from a fairly heavyweight set of new constructs, different from expressions and types. By contrast, equivalent solutions in Scala rely on regular types. Type classes are simply traits that define a `Self` type member. + +The proposed scheme has similar expressiveness to Protocols in Swift or Traits in Rust. Both of these were largely influenced by Jeremy Siek's PdD thesis "[A language for generic programming](https://scholarworks.iu.edu/dspace/handle/2022/7067)", which was first proposed as a way to implement concepts in C++. C++ did not follow Siek's approach, but Swift and Rust did. + +In Siek's thesis and in the formal treatments of Rust and Swift, + type class concepts are explained by mapping them to a lower level language of explicit dictionaries with representations for terms and types. Crucially, that lower level is not expressible without loss of granularity in the source language itself, since type representations are mapped to term dictionaries. By contrast, the current proposal expands type class concepts into other well-typed Scala constructs, which ultimately map into well-typed DOT programs. Type classes are simply a convenient notation for something that can already be expressed in Scala. In that sense, we stay true to the philosophy of a _scalable language_, where a small core can support a large range of advanced use cases. + diff --git a/docs/_docs/reference/other-new-features/export.md b/docs/_docs/reference/other-new-features/export.md index 98e9a7d3d711..e21d369b6b5e 100644 --- a/docs/_docs/reference/other-new-features/export.md +++ b/docs/_docs/reference/other-new-features/export.md @@ -37,7 +37,12 @@ final def print(bits: BitMap): Unit = printUnit.print(bits) final type PrinterType = printUnit.PrinterType ``` -They can be accessed inside `Copier` as well as from outside: +With the experimental `modularity` language import, only exported methods and values are final, whereas the generated `PrinterType` would be a simple type alias +```scala + type PrinterType = printUnit.PrinterType +``` + +These aliases can be accessed inside `Copier` as well as from outside: ```scala val copier = new Copier @@ -90,12 +95,17 @@ export O.* ``` Export aliases copy the type and value parameters of the members they refer to. -Export aliases are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of extensions are again defined as extensions. Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: +Export aliases of term members are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of extensions are again defined as extensions. Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: - - Export aliases cannot be overridden, since they are final. + - Export aliases of methods or fields cannot be overridden, since they are final. - Export aliases cannot override concrete members in base classes, since they are not marked `override`. - However, export aliases can implement deferred members of base classes. + - Export type aliases are normally also final, except when the experimental + language import `modularity` is present. The general + rules for type aliases ensure in any case that if there are several type aliases in a class, + they must agree on their right hand sides, or the class could not be instantiated. + So dropping the `final` for export type aliases is safe. Export aliases for public value definitions that are accessed without referring to private values in the qualifier path diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index ae541b65d8c4..66cf5a18fac9 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -200,8 +200,8 @@ SimpleType ::= SimpleLiteral | Singleton ‘.’ ‘type’ | ‘(’ [Types] ‘)’ | Refinement - | SimpleType1 TypeArgs - | SimpleType1 ‘#’ id + | SimpleType TypeArgs + | SimpleType ‘#’ id Singleton ::= SimpleRef | SimpleLiteral | Singleton ‘.’ id @@ -392,7 +392,7 @@ LocalModifier ::= ‘abstract’ AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] AccessQualifier ::= ‘[’ id ‘]’ -Annotation ::= ‘@’ SimpleType1 {ParArgumentExprs} +Annotation ::= ‘@’ SimpleType {ParArgumentExprs} Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} @@ -444,6 +444,7 @@ ObjectDef ::= id [Template] EnumDef ::= id ClassConstr InheritClauses EnumBody GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present +GivenType ::= AnnotType {id [nl] AnnotType} StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods @@ -453,7 +454,7 @@ ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef Template ::= InheritClauses [TemplateBody] InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] ConstrApps ::= ConstrApp ({‘,’ ConstrApp} | {‘with’ ConstrApp}) -ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} +ConstrApp ::= SimpleType {Annotation} {ParArgumentExprs} ConstrExpr ::= SelfInvocation | <<< SelfInvocation {semi BlockStat} >>> SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} diff --git a/docs/sidebar.yml b/docs/sidebar.yml index b38e057f06b1..efdab80595a6 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -155,6 +155,8 @@ subsection: - page: reference/experimental/purefuns.md - page: reference/experimental/tupled-function.md - page: reference/experimental/named-tuples.md + - page: reference/experimental/modularity.md + - page: reference/experimental/typeclasses.md - page: reference/syntax.md - title: Language Versions index: reference/language-versions/language-versions.md diff --git a/library/src/scala/Precise.scala b/library/src/scala/Precise.scala new file mode 100644 index 000000000000..aad42ca8950f --- /dev/null +++ b/library/src/scala/Precise.scala @@ -0,0 +1,11 @@ +package scala +import annotation.experimental +import language.experimental.erasedDefinitions + +/** A type class-like trait intended as a context bound for type variables. + * If we have `[X: Precise]`, instances of the type variable `X` are inferred + * in precise mode. This means that singleton types and union types are not + * widened. + */ +@experimental erased trait Precise: + type Self diff --git a/library/src/scala/annotation/internal/WitnessNames.scala b/library/src/scala/annotation/internal/WitnessNames.scala new file mode 100644 index 000000000000..3921c2083617 --- /dev/null +++ b/library/src/scala/annotation/internal/WitnessNames.scala @@ -0,0 +1,54 @@ +package scala.annotation +package internal + +/** An annotation that is used for marking type definitions that should get + * context bound companions. The scheme is as follows: + * + * 1. When desugaring a context-bounded type A, add a @WitnessNames(n_1, ... , n_k) + * annotation to the type declaration node, where n_1, ..., n_k are the names of + * all the witnesses generated for the context bounds of A. This annotation will + * be pickled as usual. + * + * 2. During Namer or Unpickling, when encountering a type declaration A with + * a WitnessNames(n_1, ... , n_k) annotation, create a CB companion `val A` with + * type ``[ref_1 | ... | ref_k] where ref_i is a TermRef + * with the same prefix as A and name n_i. Except, don't do this if the type in + * question is a type parameter and there is already a term parameter with name A + * defined for the same method. + * + * ContextBoundCompanion is defined as an internal abstract type like this: + * + * type ``[-Refs] + * + * The context bound companion's variance is negative, so that unions in the + * arguments are joined when encountering multiple definfitions and forming a glb. + * + * 3. Add a special case for typing a selection A.m on a value A of type + * ContextBoundCompanion[ref_1, ..., ref_k]. Namely, try to typecheck all + * selections ref_1.m, ..., ref_k.m with the expected type. There must be + * a unique selection ref_i.m that typechecks and such that for all other + * selections ref_j.m that also typecheck one of the following three criteria + * applies: + * + * 1. ref_i.m and ref_j.m are the same. This means: If they are types then + * ref_i.m is an alias of ref_j.m. If they are terms then they are both + * singleton types and ref_i.m =:= ref_j.m. + * 2. The underlying type (under widen) of ref_i is a true supertype of the + * underlying type of ref_j. + * 3. ref_i.m is a term, the underlying type of ref_j is not a strict subtype + * of the underlying type of ref_i, and the underlying type ref_i.m is a + * strict subtype of the underlying type of ref_j.m. + * + * If there is such a selection, map A.m to ref_i.m, otherwise report an error. + * + * (2) might surprise. It is the analogue of given disambiguation, where we also + * pick the most general candidate that matches the expected type. E.g. we have + * context bounds for Functor, Monad, and Applicable. In this case we want to + * select the `map` method of `Functor`. + * + * 4. At PostTyper, issue an error when encountering any reference to a CB companion. + */ +@experimental +class WitnessNames(names: String*) extends StaticAnnotation + + diff --git a/library/src/scala/compiletime/package.scala b/library/src/scala/compiletime/package.scala index 3eca997554a0..a3896a1eeb06 100644 --- a/library/src/scala/compiletime/package.scala +++ b/library/src/scala/compiletime/package.scala @@ -1,7 +1,7 @@ package scala package compiletime -import annotation.compileTimeOnly +import annotation.{compileTimeOnly, experimental} /** Use this method when you have a type, do not have a value for it but want to * pattern match on it. For example, given a type `Tup <: Tuple`, one can @@ -42,6 +42,20 @@ def erasedValue[T]: T = erasedValue[T] @compileTimeOnly("`uninitialized` can only be used as the right hand side of a mutable field definition") def uninitialized: Nothing = ??? +/** Used as the right hand side of a given in a trait, like this + * + * ``` + * given T = deferred + * ``` + * + * This signifies that the given will get a synthesized definition in all classes + * that implement the enclosing trait and that do not contain an explicit overriding + * definition of that given. + */ +@experimental +@compileTimeOnly("`deferred` can only be used as the right hand side of a given definition in a trait") +def deferred: Nothing = ??? + /** The error method is used to produce user-defined compile errors during inline expansion. * If an inline expansion results in a call error(msgStr) the compiler produces an error message containing the given msgStr. * diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 7abd92e408f8..77b014b80466 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -66,4 +66,18 @@ object Predef: extension (opt: Option.type) @experimental inline def fromNullable[T](t: T | Null): Option[T] = Option(t).asInstanceOf[Option[T]] + + /** A type supporting Self-based type classes. + * + * A is TC + * + * expands to + * + * TC { type Self = A } + * + * which is what is needed for a context bound `[A: TC]`. + */ + @experimental + infix type is[A <: AnyKind, B <: Any{type Self <: AnyKind}] = B { type Self = A } + end Predef diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index b2bd4b791423..02c4a99bbbcf 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -98,6 +98,18 @@ object language: @compileTimeOnly("`namedTuples` can only be used at compile time in import statements") object namedTuples + /** Experimental support for new features for better modularity, including + * - better tracking of dependencies through classes + * - better usability of context bounds + * - better syntax and conventions for type classes + * - ability to merge exported types in intersections + * + * @see [[https://dotty.epfl.ch/docs/reference/experimental/modularity]] + * @see [[https://dotty.epfl.ch/docs/reference/experimental/typeclasses]] + */ + @compileTimeOnly("`modularity` can only be used at compile time in import statements") + object modularity + /** Was needed to add support for relaxed imports of extension methods. * The language import is no longer needed as this is now a standard feature since SIP was accepted. * @see [[http://dotty.epfl.ch/docs/reference/contextual/extension-methods]] diff --git a/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala index e470f492657c..8ce7cdce4382 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala @@ -898,7 +898,7 @@ class InlayHintsSuite extends BaseInlayHintsSuite { | import quotes.reflect.* | Type.of[T] match | case '[f] => - | val fr/*: TypeRepr<>*/ = TypeRepr.of[T]/*(using evidence$1<<(3:21)>>)*/ + | val fr/*: TypeRepr<>*/ = TypeRepr.of[T]/*(using evidence$1<<(3:23)>>)*/ |""".stripMargin ) diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 40a3918b5943..18d2e985f844 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -18,6 +18,10 @@ object MiMaFilters { ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.Tuples.fromIArray"), ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.namedTuples"), ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$namedTuples$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.modularity"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$modularity$"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.compiletime.package#package.deferred"), + ProblemFilters.exclude[MissingClassProblem]("scala.annotation.internal.WitnessNames"), ), // Additions since last LTS @@ -94,7 +98,8 @@ object MiMaFilters { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of tasty core Build.mimaPreviousDottyVersion -> Seq( - ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.FLEXIBLEtype") + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.FLEXIBLEtype"), + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.TRACKED"), ), // Additions since last LTS diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index 164243d3b469..c29ea99bcd8d 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -228,6 +228,7 @@ Standard-Section: "ASTs" TopLevelStat* EXPORTED -- An export forwarder OPEN -- an open class INVISIBLE -- invisible during typechecking + TRACKED -- a tracked class parameter / a dependent class Annotation Variance = STABLE -- invariant @@ -509,6 +510,7 @@ object TastyFormat { final val INVISIBLE = 44 final val EMPTYCLAUSE = 45 final val SPLITCLAUSE = 46 + final val TRACKED = 47 // Tree Cat. 2: tag Nat final val firstNatTreeTag = SHAREDterm @@ -700,7 +702,8 @@ object TastyFormat { | INVISIBLE | ANNOTATION | PRIVATEqualified - | PROTECTEDqualified => true + | PROTECTEDqualified + | TRACKED => true case _ => false } diff --git a/tests/neg/cb-companion-leaks.check b/tests/neg/cb-companion-leaks.check new file mode 100644 index 000000000000..560561e0e261 --- /dev/null +++ b/tests/neg/cb-companion-leaks.check @@ -0,0 +1,66 @@ +-- [E195] Type Error: tests/neg/cb-companion-leaks.scala:9:23 ---------------------------------------------------------- +9 | def foo[A: {C, D}] = A // error + | ^ + | context bound companion value A cannot be used as a value + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + --------------------------------------------------------------------------------------------------------------------- +-- [E195] Type Error: tests/neg/cb-companion-leaks.scala:13:10 --------------------------------------------------------- +13 | val x = A // error + | ^ + | context bound companion value A cannot be used as a value + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + -------------------------------------------------------------------------------------------------------------------- +-- [E195] Type Error: tests/neg/cb-companion-leaks.scala:15:9 ---------------------------------------------------------- +15 | val y: A.type = ??? // error + | ^ + | context bound companion value A cannot be used as a value + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/cb-companion-leaks.scala b/tests/neg/cb-companion-leaks.scala new file mode 100644 index 000000000000..07155edb05dc --- /dev/null +++ b/tests/neg/cb-companion-leaks.scala @@ -0,0 +1,16 @@ +//> using options -language:experimental.modularity -source future -explain + +class C[Self] + +class D[Self] + +trait Test: + + def foo[A: {C, D}] = A // error + + type A: C + + val x = A // error + + val y: A.type = ??? // error + diff --git a/tests/neg/deferred-givens-2.check b/tests/neg/deferred-givens-2.check new file mode 100644 index 000000000000..4a29141cc48b --- /dev/null +++ b/tests/neg/deferred-givens-2.check @@ -0,0 +1,12 @@ +-- [E172] Type Error: tests/neg/deferred-givens-2.scala:17:6 ----------------------------------------------------------- +17 |class SortedIntWrong1 extends Sorted: // error + |^ + |No given instance of type Ord{type Self = SortedIntWrong1.this.Element} was found for inferring the implementation of the deferred given instance given_Ord_Element in trait Sorted +18 | type Element = Int +19 | override given (Element is Ord)() +-- [E172] Type Error: tests/neg/deferred-givens-2.scala:21:6 ----------------------------------------------------------- +21 |class SortedIntWrong2 extends Sorted: // error + |^ + |No given instance of type Ord{type Self = SortedIntWrong2.this.Element} was found for inferring the implementation of the deferred given instance given_Ord_Element in trait Sorted +22 | type Element = Int +23 | override given (Int is Ord)() diff --git a/tests/neg/deferred-givens-2.scala b/tests/neg/deferred-givens-2.scala new file mode 100644 index 000000000000..4e75ceb08728 --- /dev/null +++ b/tests/neg/deferred-givens-2.scala @@ -0,0 +1,23 @@ +//> using options -language:experimental.modularity -source future +trait Ord: + type Self + +trait Sorted: + type Element: Ord + +object Scoped: + given (Int is Ord)() + class SortedIntCorrect extends Sorted: + type Element = Int + +class SortedIntCorrect2 extends Sorted: + type Element = Int + override given (Int is Ord)() as given_Ord_Element + +class SortedIntWrong1 extends Sorted: // error + type Element = Int + override given (Element is Ord)() + +class SortedIntWrong2 extends Sorted: // error + type Element = Int + override given (Int is Ord)() \ No newline at end of file diff --git a/tests/neg/deferred-givens.check b/tests/neg/deferred-givens.check new file mode 100644 index 000000000000..cc15901d087f --- /dev/null +++ b/tests/neg/deferred-givens.check @@ -0,0 +1,13 @@ +-- [E172] Type Error: tests/neg/deferred-givens.scala:11:6 ------------------------------------------------------------- +11 |class B extends A // error + |^^^^^^^^^^^^^^^^^ + |No given instance of type Ctx was found for inferring the implementation of the deferred given instance ctx in trait A +-- [E172] Type Error: tests/neg/deferred-givens.scala:13:15 ------------------------------------------------------------ +13 |abstract class C extends A // error + |^^^^^^^^^^^^^^^^^^^^^^^^^^ + |No given instance of type Ctx was found for inferring the implementation of the deferred given instance ctx in trait A +-- Error: tests/neg/deferred-givens.scala:26:8 ------------------------------------------------------------------------- +26 | class E extends A2 // error, can't summon polymorphic given + | ^^^^^^^^^^^^^^^^^^ + | Cannnot infer the implementation of the deferred given instance given_Ctx3_T in trait A2 + | since that given is parameterized. An implementing given needs to be written explicitly. diff --git a/tests/neg/deferred-givens.scala b/tests/neg/deferred-givens.scala new file mode 100644 index 000000000000..7ff67d784714 --- /dev/null +++ b/tests/neg/deferred-givens.scala @@ -0,0 +1,30 @@ +//> using options -language:experimental.modularity -source future +import compiletime.deferred + +class Ctx +class Ctx2 + +trait A: + given Ctx as ctx = deferred + given Ctx2 = deferred + +class B extends A // error + +abstract class C extends A // error + +class D extends A: + given Ctx as ctx = Ctx() // ok, was implemented + given Ctx2 = Ctx2() // ok + +class Ctx3[T] + +trait A2: + given [T] => Ctx3[T] = deferred + +object O: + given [T] => Ctx3[T] = Ctx3[T]() + class E extends A2 // error, can't summon polymorphic given + +class E extends A2: + given [T] => Ctx3[T] = Ctx3[T]() // ok + diff --git a/tests/neg/deferredSummon.check b/tests/neg/deferredSummon.check new file mode 100644 index 000000000000..bd76ad73467e --- /dev/null +++ b/tests/neg/deferredSummon.check @@ -0,0 +1,17 @@ +-- Error: tests/neg/deferredSummon.scala:4:26 -------------------------------------------------------------------------- +4 | given Int = compiletime.deferred // error + | ^^^^^^^^^^^^^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:7:26 -------------------------------------------------------------------------- +7 | given Int = compiletime.deferred // error + | ^^^^^^^^^^^^^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:12:16 ------------------------------------------------------------------------- +12 | given Int = deferred // error + | ^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:16:14 ------------------------------------------------------------------------- +16 | given Int = defered // error + | ^^^^^^^ + |`deferred` can only be used as the right hand side of a given definition in a trait. + |Note that `deferred` can only be used under its own name when implementing a given in a trait; `defered` is not accepted. diff --git a/tests/neg/deferredSummon.scala b/tests/neg/deferredSummon.scala new file mode 100644 index 000000000000..cddde82535fb --- /dev/null +++ b/tests/neg/deferredSummon.scala @@ -0,0 +1,19 @@ +//> using options -language:experimental.modularity + +object Test: + given Int = compiletime.deferred // error + +abstract class C: + given Int = compiletime.deferred // error + +trait A: + import compiletime.deferred + locally: + given Int = deferred // error + +trait B: + import compiletime.deferred as defered + given Int = defered // error + + + diff --git a/tests/neg/i0248-inherit-refined.check b/tests/neg/i0248-inherit-refined.check new file mode 100644 index 000000000000..4e14c3c6f14b --- /dev/null +++ b/tests/neg/i0248-inherit-refined.check @@ -0,0 +1,12 @@ +-- [E170] Type Error: tests/neg/i0248-inherit-refined.scala:8:18 ------------------------------------------------------- +8 | class C extends Y // error + | ^ + | test.A & test.B is not a class type + | + | longer explanation available when compiling with `-explain` +-- [E170] Type Error: tests/neg/i0248-inherit-refined.scala:10:18 ------------------------------------------------------ +10 | class D extends Z // error + | ^ + | test.A | test.B is not a class type + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i0248-inherit-refined.scala b/tests/neg/i0248-inherit-refined.scala index 97b6f5cdab73..f7cd6375afc9 100644 --- a/tests/neg/i0248-inherit-refined.scala +++ b/tests/neg/i0248-inherit-refined.scala @@ -1,10 +1,12 @@ +//> using options -source future -language:experimental.modularity + object test { class A { type T } type X = A { type T = Int } - class B extends X // error + class B extends X // was error, now OK type Y = A & B class C extends Y // error type Z = A | B class D extends Z // error - abstract class E extends ({ val x: Int }) // error + abstract class E extends ({ val x: Int }) // was error, now OK } diff --git a/tests/neg/i12348.check b/tests/neg/i12348.check index ccc2b9f7ed00..55806fa5ca1b 100644 --- a/tests/neg/i12348.check +++ b/tests/neg/i12348.check @@ -2,7 +2,3 @@ 2 | given inline x: Int = 0 // error | ^ | 'with' expected, but identifier found --- [E040] Syntax Error: tests/neg/i12348.scala:3:10 -------------------------------------------------------------------- -3 |} // error - | ^ - | '}' expected, but eof found diff --git a/tests/neg/i12348.scala b/tests/neg/i12348.scala index 69fc77fb532e..bd8bf63994e6 100644 --- a/tests/neg/i12348.scala +++ b/tests/neg/i12348.scala @@ -1,3 +1,2 @@ object A { given inline x: Int = 0 // error -} // error \ No newline at end of file diff --git a/tests/neg/i12456.scala b/tests/neg/i12456.scala index b9fb0283dcd7..c1a3ada5a420 100644 --- a/tests/neg/i12456.scala +++ b/tests/neg/i12456.scala @@ -1 +1 @@ -object F { type T[G[X] <: X, F <: G[F]] } // error // error +object F { type T[G[X] <: X, F <: G[F]] } // error diff --git a/tests/neg/i13757-match-type-anykind.scala b/tests/neg/i13757-match-type-anykind.scala index a80e8b2b289b..998c54292b15 100644 --- a/tests/neg/i13757-match-type-anykind.scala +++ b/tests/neg/i13757-match-type-anykind.scala @@ -8,7 +8,7 @@ object Test: type AnyKindMatchType3[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int - type AnyKindMatchType4[X <: Option] = X match // error // error: the scrutinee of a match type cannot be higher-kinded + type AnyKindMatchType4[X <: Option] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int type AnyKindMatchType5[X[_]] = X match // error: the scrutinee of a match type cannot be higher-kinded diff --git a/tests/neg/i3964.scala b/tests/neg/i3964.scala new file mode 100644 index 000000000000..eaf3953bc230 --- /dev/null +++ b/tests/neg/i3964.scala @@ -0,0 +1,12 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test1: + + abstract class Bar { val x: Animal } + val bar: Bar { val x: Cat } = new Bar { val x = new Cat } // error, but should work + + trait Foo { val x: Animal } + val foo: Foo { val x: Cat } = new Foo { val x = new Cat } // error, but should work diff --git a/tests/neg/i7045.scala b/tests/neg/i7045.scala new file mode 100644 index 000000000000..b4c6d60cd35a --- /dev/null +++ b/tests/neg/i7045.scala @@ -0,0 +1,7 @@ +trait Bar { type Y } +trait Foo { type X } + +class Test: + given a1(using b: Bar): Foo = new Foo { type X = b.Y } // ok + given a2(using b: Bar): (Foo { type X = b.Y }) = new Foo { type X = b.Y } // ok + given a3(using b: Bar): Foo { type X = b.Y } = new Foo { type X = b.Y } // error \ No newline at end of file diff --git a/tests/neg/i9328.scala b/tests/neg/i9328.scala index dabde498e1dc..c13d33e103b9 100644 --- a/tests/neg/i9328.scala +++ b/tests/neg/i9328.scala @@ -3,7 +3,7 @@ type Id[T] = T match { case _ => T } -class Foo2[T <: Id[T]] // error // error +class Foo2[T <: Id[T]] // error object Foo { // error object Foo { } diff --git a/tests/neg/i9330.scala b/tests/neg/i9330.scala index ca25582ef7e8..6ba57c033473 100644 --- a/tests/neg/i9330.scala +++ b/tests/neg/i9330.scala @@ -1,4 +1,4 @@ val x = { - () == "" // error + () == "" implicit def foo[A: A] // error // error // error } diff --git a/tests/neg/parent-refinement-access.check b/tests/neg/parent-refinement-access.check new file mode 100644 index 000000000000..5cde9d51558f --- /dev/null +++ b/tests/neg/parent-refinement-access.check @@ -0,0 +1,7 @@ +-- [E164] Declaration Error: tests/neg/parent-refinement-access.scala:6:6 ---------------------------------------------- +6 |trait Year2(private[Year2] val value: Int) extends (Gen { val x: Int }) // error + | ^ + | error overriding value x in trait Year2 of type Int; + | value x in trait Gen of type Any has weaker access privileges; it should be public + | (Note that value x in trait Year2 of type Int is abstract, + | and is therefore overridden by concrete value x in trait Gen of type Any) diff --git a/tests/neg/parent-refinement-access.scala b/tests/neg/parent-refinement-access.scala new file mode 100644 index 000000000000..57d45f4fb201 --- /dev/null +++ b/tests/neg/parent-refinement-access.scala @@ -0,0 +1,6 @@ +//> using options -source future -language:experimental.modularity + +trait Gen: + private[Gen] val x: Any = () + +trait Year2(private[Year2] val value: Int) extends (Gen { val x: Int }) // error diff --git a/tests/neg/parent-refinement.check b/tests/neg/parent-refinement.check index 550430bd35a7..cf9a57bc7821 100644 --- a/tests/neg/parent-refinement.check +++ b/tests/neg/parent-refinement.check @@ -1,4 +1,25 @@ --- Error: tests/neg/parent-refinement.scala:5:2 ------------------------------------------------------------------------ -5 | with Ordered[Year] { // error - | ^^^^ - | end of toplevel definition expected but 'with' found +-- Error: tests/neg/parent-refinement.scala:11:6 ----------------------------------------------------------------------- +11 |class Bar extends IdOf[Int], (X { type Value = String }) // error + | ^^^ + |class Bar cannot be instantiated since it has a member Value with possibly conflicting bounds Int | String <: ... <: Int & String +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:15:17 ------------------------------------------------- +15 | val x: Value = 0 // error + | ^ + | Found: (0 : Int) + | Required: Baz.this.Value + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:21:6 -------------------------------------------------- +21 | foo(2) // error + | ^ + | Found: (2 : Int) + | Required: Boolean + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:17:22 ------------------------------------------------- +17 |val x: IdOf[Int] = Baz() // error + | ^^^^^ + | Found: Baz + | Required: IdOf[Int] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/parent-refinement.scala b/tests/neg/parent-refinement.scala index ca2b88a75fd8..868747faba57 100644 --- a/tests/neg/parent-refinement.scala +++ b/tests/neg/parent-refinement.scala @@ -1,7 +1,21 @@ +//> using options -source future -language:experimental.modularity trait Id { type Value } +trait X { type Value } +type IdOf[T] = Id { type Value = T } + case class Year(value: Int) extends AnyVal - with Id { type Value = Int } - with Ordered[Year] { // error + with (Id { type Value = Int }) + with Ordered[Year] + +class Bar extends IdOf[Int], (X { type Value = String }) // error + +class Baz extends IdOf[Int]: + type Value = String + val x: Value = 0 // error + +val x: IdOf[Int] = Baz() // error -} \ No newline at end of file +object Clash extends ({ def foo(x: Int): Int }): + def foo(x: Boolean): Int = 1 + foo(2) // error diff --git a/tests/neg/parser-stability-12.scala b/tests/neg/parser-stability-12.scala index 78ff178d010c..17a611d70e34 100644 --- a/tests/neg/parser-stability-12.scala +++ b/tests/neg/parser-stability-12.scala @@ -1,4 +1,4 @@ trait x0[]: // error - trait x1[x1 <:x0] // error: type x0 takes type parameters + trait x1[x1 <:x0] extends x1[ // error // error \ No newline at end of file diff --git a/tests/neg/singleton-ctx-bound.check b/tests/neg/singleton-ctx-bound.check new file mode 100644 index 000000000000..785123c0e680 --- /dev/null +++ b/tests/neg/singleton-ctx-bound.check @@ -0,0 +1,34 @@ +-- [E007] Type Mismatch Error: tests/neg/singleton-ctx-bound.scala:7:5 ------------------------------------------------- +7 | f1(someInt) // error + | ^^^^^^^ + | Found: Int + | Required: Singleton + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/singleton-ctx-bound.scala:12:5 ------------------------------------------------ +12 | f2(someInt) // error + | ^^^^^^^ + | Found: Int + | Required: Singleton + | + | longer explanation available when compiling with `-explain` +-- [E172] Type Error: tests/neg/singleton-ctx-bound.scala:13:26 -------------------------------------------------------- +13 | f2(if ??? then 1 else 2) // error + | ^ + |No given instance of type (1 : Int) | (2 : Int) is Singleton was found for parameter x$2 of method f2 in object Test. Failed to synthesize an instance of type (1 : Int) | (2 : Int) is Singleton: (1 : Int) | (2 : Int) is not a singleton +-- [E007] Type Mismatch Error: tests/neg/singleton-ctx-bound.scala:17:5 ------------------------------------------------ +17 | f3(someInt) // error + | ^^^^^^^ + | Found: Int + | Required: Singleton + | + | longer explanation available when compiling with `-explain` +-- [E172] Type Error: tests/neg/singleton-ctx-bound.scala:18:26 -------------------------------------------------------- +18 | f3(if ??? then 1 else 2) // error + | ^ + |No given instance of type Singleton{type Self = (1 : Int) | (2 : Int)} was found for a context parameter of method f3 in object Test. Failed to synthesize an instance of type Singleton{type Self = (1 : Int) | (2 : Int)}: (1 : Int) | (2 : Int) is not a singleton +-- [E172] Type Error: tests/neg/singleton-ctx-bound.scala:33:6 --------------------------------------------------------- +33 |class D extends A: // error + |^ + |No given instance of type Singleton{type Self = D.this.Elem} was found for inferring the implementation of the deferred given instance given_Singleton_Elem in trait A. Failed to synthesize an instance of type Singleton{type Self = D.this.Elem}: D.this.Elem is not a singleton +34 | type Elem = Int diff --git a/tests/neg/singleton-ctx-bound.scala b/tests/neg/singleton-ctx-bound.scala new file mode 100644 index 000000000000..e061ec54bb16 --- /dev/null +++ b/tests/neg/singleton-ctx-bound.scala @@ -0,0 +1,35 @@ +//> using options -language:experimental.modularity -source future +object Test: + + def someInt = 1 + + def f1[T <: Singleton](x: T): T = x + f1(someInt) // error + f1(if ??? then 1 else 2) // OK, but should be error + f1(3 * 2) // OK + + def f2[T](x: T)(using T is Singleton): T = x + f2(someInt) // error + f2(if ??? then 1 else 2) // error + f2(3 * 2) // OK + + def f3[T: Singleton](x: T): T = x + f3(someInt) // error + f3(if ??? then 1 else 2) // error + f3(3 * 2) // OK + f3(6) // OK + +import compiletime.* + +trait A: + type Elem: Singleton + +class B extends A: + type Elem = 1 // OK + +class C[X: Singleton] extends A: + type Elem = X // OK + +class D extends A: // error + type Elem = Int + diff --git a/tests/neg/tracked.check b/tests/neg/tracked.check new file mode 100644 index 000000000000..ae734e7aa0b4 --- /dev/null +++ b/tests/neg/tracked.check @@ -0,0 +1,50 @@ +-- Error: tests/neg/tracked.scala:2:16 --------------------------------------------------------------------------------- +2 |class C(tracked x: Int) // error + | ^ + | `val` or `var` expected +-- [E040] Syntax Error: tests/neg/tracked.scala:7:18 ------------------------------------------------------------------- +7 | def foo(tracked a: Int) = // error + | ^ + | ':' expected, but identifier found +-- Error: tests/neg/tracked.scala:8:12 --------------------------------------------------------------------------------- +8 | tracked val b: Int = 2 // error + | ^^^ + | end of statement expected but 'val' found +-- Error: tests/neg/tracked.scala:11:10 -------------------------------------------------------------------------------- +11 | tracked object Foo // error // error + | ^^^^^^ + | end of statement expected but 'object' found +-- Error: tests/neg/tracked.scala:14:10 -------------------------------------------------------------------------------- +14 | tracked class D // error // error + | ^^^^^ + | end of statement expected but 'class' found +-- Error: tests/neg/tracked.scala:17:10 -------------------------------------------------------------------------------- +17 | tracked type T = Int // error // error + | ^^^^ + | end of statement expected but 'type' found +-- Error: tests/neg/tracked.scala:20:29 -------------------------------------------------------------------------------- +20 | given g2(using tracked val x: Int): C = C(x) // error + | ^^^^^^^^^^^^^^^^^^ + | method parameter x may not be a `val` +-- Error: tests/neg/tracked.scala:4:21 --------------------------------------------------------------------------------- +4 |class C2(tracked var x: Int) // error + | ^ + | mutable variables may not be `tracked` +-- [E006] Not Found Error: tests/neg/tracked.scala:11:2 ---------------------------------------------------------------- +11 | tracked object Foo // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/tracked.scala:14:2 ---------------------------------------------------------------- +14 | tracked class D // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/tracked.scala:17:2 ---------------------------------------------------------------- +17 | tracked type T = Int // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/tracked.scala b/tests/neg/tracked.scala new file mode 100644 index 000000000000..8d315a7b89ac --- /dev/null +++ b/tests/neg/tracked.scala @@ -0,0 +1,20 @@ +//> using options -source future -language:experimental.modularity +class C(tracked x: Int) // error + +class C2(tracked var x: Int) // error + +object A: + def foo(tracked a: Int) = // error + tracked val b: Int = 2 // error + +object B: + tracked object Foo // error // error + +object C: + tracked class D // error // error + +object D: + tracked type T = Int // error // error + +object E: + given g2(using tracked val x: Int): C = C(x) // error diff --git a/tests/neg/tracked2.scala b/tests/neg/tracked2.scala new file mode 100644 index 000000000000..2e6fa8cf6045 --- /dev/null +++ b/tests/neg/tracked2.scala @@ -0,0 +1 @@ +class C(tracked val x: Int) // error diff --git a/tests/neg/unapplied-types.scala b/tests/neg/unapplied-types.scala deleted file mode 100644 index 2f2339baa026..000000000000 --- a/tests/neg/unapplied-types.scala +++ /dev/null @@ -1,7 +0,0 @@ -trait T { - type L[X] = List[X] - type T1 <: L // error: takes type parameters - type T2 = L // error: takes type parameters - type T3 = List // error: takes type parameters - type T4 <: List // error: takes type parameters -} diff --git a/tests/new/tracked-mixin-traits.scala b/tests/new/tracked-mixin-traits.scala new file mode 100644 index 000000000000..21d890d44f42 --- /dev/null +++ b/tests/new/tracked-mixin-traits.scala @@ -0,0 +1,16 @@ +trait A: + type T +object a extends A: + type T = Int + +trait B(tracked val b: A): + type T = b.T + +trait C(tracked val c: A): + type T = c.T + +class D extends B(a), C(a): + val x: T = 2 + + + diff --git a/tests/pending/pos/cbproxy-default.scala b/tests/pending/pos/cbproxy-default.scala new file mode 100644 index 000000000000..e8f12ceeae75 --- /dev/null +++ b/tests/pending/pos/cbproxy-default.scala @@ -0,0 +1,4 @@ +def f[S: Monad]( + initial: S.Self = S.unit // error +) = + S.unit // works \ No newline at end of file diff --git a/tests/pending/pos/singleton-infer.scala b/tests/pending/pos/singleton-infer.scala new file mode 100644 index 000000000000..72e00baf3aab --- /dev/null +++ b/tests/pending/pos/singleton-infer.scala @@ -0,0 +1,8 @@ +//> using options -Xprint:typer -language:experimental.modularity -source future + +def f1[S, T <: S : Singleton](x: S) = () +def f2[S, T >: S : Singleton](x: S) = () + +def Test = + f1(42) // f1[Int, Singleton & Int] // should infer (42 : Int) or throw an error? + f2(42) // f2[(42 : Int), (42 : Int)] \ No newline at end of file diff --git a/tests/pos-macros/i8325/Macro_1.scala b/tests/pos-macros/i8325/Macro_1.scala index 18466e17b3df..92a54d21b00a 100644 --- a/tests/pos-macros/i8325/Macro_1.scala +++ b/tests/pos-macros/i8325/Macro_1.scala @@ -3,7 +3,7 @@ package a import scala.quoted.* -object A: +object O: inline def transform[A](inline expr: A): A = ${ transformImplExpr('expr) @@ -15,7 +15,7 @@ object A: import quotes.reflect.* expr.asTerm match { case Inlined(x,y,z) => transformImplExpr(z.asExpr.asInstanceOf[Expr[A]]) - case Apply(fun,args) => '{ A.pure(${Apply(fun,args).asExpr.asInstanceOf[Expr[A]]}) } + case Apply(fun,args) => '{ O.pure(${Apply(fun,args).asExpr.asInstanceOf[Expr[A]]}) } case other => expr } } diff --git a/tests/pos-macros/i8325/Test_2.scala b/tests/pos-macros/i8325/Test_2.scala index 8b0a74b11a08..90e88dfee341 100644 --- a/tests/pos-macros/i8325/Test_2.scala +++ b/tests/pos-macros/i8325/Test_2.scala @@ -3,7 +3,7 @@ package a class Test1 { def t1(): Unit = { - A.transform( + O.transform( s"a ${1} ${2}") } diff --git a/tests/pos-macros/i8325b/Macro_1.scala b/tests/pos-macros/i8325b/Macro_1.scala index 181efa260f9b..139abed94078 100644 --- a/tests/pos-macros/i8325b/Macro_1.scala +++ b/tests/pos-macros/i8325b/Macro_1.scala @@ -3,7 +3,7 @@ package a import scala.quoted.* -object A: +object O: inline def transform[A](inline expr: A): A = ${ transformImplExpr('expr) @@ -16,7 +16,7 @@ object A: expr.asTerm match { case Inlined(x,y,z) => transformImplExpr(z.asExpr.asInstanceOf[Expr[A]]) case r@Apply(fun,args) => '{ - A.pure(${r.asExpr.asInstanceOf[Expr[A]]}) } + O.pure(${r.asExpr.asInstanceOf[Expr[A]]}) } case other => expr } } diff --git a/tests/pos-macros/i8325b/Test_2.scala b/tests/pos-macros/i8325b/Test_2.scala index 8b0a74b11a08..90e88dfee341 100644 --- a/tests/pos-macros/i8325b/Test_2.scala +++ b/tests/pos-macros/i8325b/Test_2.scala @@ -3,7 +3,7 @@ package a class Test1 { def t1(): Unit = { - A.transform( + O.transform( s"a ${1} ${2}") } diff --git a/tests/pos/FromString-cb-companion.scala b/tests/pos/FromString-cb-companion.scala new file mode 100644 index 000000000000..d086420761ee --- /dev/null +++ b/tests/pos/FromString-cb-companion.scala @@ -0,0 +1,14 @@ +//> using options -language:experimental.modularity -source future + +trait FromString[Self]: + def fromString(s: String): Self + +given FromString[Int] = _.toInt + +given FromString[Double] = _.toDouble + +def add[N: {FromString, Numeric as num}](a: String, b: String): N = + N.plus( + num.plus(N.fromString(a), N.fromString(b)), + N.fromString(a) + ) \ No newline at end of file diff --git a/tests/pos/FromString-named.scala b/tests/pos/FromString-named.scala new file mode 100644 index 000000000000..efa0882ae347 --- /dev/null +++ b/tests/pos/FromString-named.scala @@ -0,0 +1,11 @@ +//> using options -language:experimental.modularity -source future + +trait FromString[A]: + def fromString(s: String): A + +given FromString[Int] = _.toInt + +given FromString[Double] = _.toDouble + +def add[N: {FromString as N, Numeric as num}](a: String, b: String): N = + num.plus(N.fromString(a), N.fromString(b)) diff --git a/tests/pos/FromString-typeparam.scala b/tests/pos/FromString-typeparam.scala new file mode 100644 index 000000000000..893bcfd3decc --- /dev/null +++ b/tests/pos/FromString-typeparam.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future + +trait FromString[A]: + def fromString(s: String): A + +given FromString[Int] = _.toInt + +given FromString[Double] = _.toDouble + +def add[N: {FromString, Numeric}](a: String, b: String): N = + val num = summon[Numeric[N]] + val N = summon[FromString[N]] + num.plus(N.fromString(a), N.fromString(b)) diff --git a/tests/pos/FromString.scala b/tests/pos/FromString.scala new file mode 100644 index 000000000000..333a4c002989 --- /dev/null +++ b/tests/pos/FromString.scala @@ -0,0 +1,15 @@ +//> using options -language:experimental.modularity -source future + +trait FromString: + type Self + def fromString(s: String): Self + +given Int is FromString = _.toInt + +given Double is FromString = _.toDouble + +def add[N: {FromString, Numeric as num}](a: String, b: String): N = + N.plus( + num.plus(N.fromString(a), N.fromString(b)), + N.fromString(a) + ) \ No newline at end of file diff --git a/tests/pos/cb-companion-joins.scala b/tests/pos/cb-companion-joins.scala new file mode 100644 index 000000000000..97e0a8a7e4ac --- /dev/null +++ b/tests/pos/cb-companion-joins.scala @@ -0,0 +1,21 @@ +import language.experimental.modularity +import language.future + +trait M[Self]: + extension (x: Self) def combine (y: Self): String + def unit: Self + +trait Num[Self]: + def zero: Self + +trait A extends M[A] +trait B extends M[A] + +trait AA: + type X: M +trait BB: + type X: Num +class CC[X1: {M, Num}] extends AA, BB: + type X = X1 + X.zero + X.unit diff --git a/tests/pos/cbproxy-expansion.scala b/tests/pos/cbproxy-expansion.scala new file mode 100644 index 000000000000..ee145b62d4ed --- /dev/null +++ b/tests/pos/cbproxy-expansion.scala @@ -0,0 +1,16 @@ +//> using options -language:experimental.modularity -source future +trait TC[T]: + type Self + +def f1[S, T: TC[S] as tc](x: S, y: tc.Self) = () +def f2[S, T: TC[S]](x: S, y: T.Self) = () +def f3[S, T: TC[S]](x: S, y: Int) = () + +given TC[String] with + type Self = Int + def unit = 42 + +def main = + f1("hello", 23) + f2("hello", 23) + f3("hello", 23) diff --git a/tests/pos/deferred-givens-singletons.scala b/tests/pos/deferred-givens-singletons.scala new file mode 100644 index 000000000000..60a881340b75 --- /dev/null +++ b/tests/pos/deferred-givens-singletons.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future +import compiletime.* + +trait A: + type Elem: Singleton + +class B extends A: + type Elem = 1 + +class C[X: Singleton] extends A: + type Elem = X + + diff --git a/tests/pos/deferred-givens.scala b/tests/pos/deferred-givens.scala new file mode 100644 index 000000000000..b9018c97e151 --- /dev/null +++ b/tests/pos/deferred-givens.scala @@ -0,0 +1,36 @@ +//> using options -language:experimental.modularity -source future +import compiletime.* +class Ord[Elem] +given Ord[Double] + +trait A: + type Elem : Ord + def foo = summon[Ord[Elem]] + +class AC extends A: + type Elem = Double + override given Ord[Elem] = ??? + +class AD extends A: + type Elem = Double + +trait B: + type Elem + given Ord[Elem] = deferred + def foo = summon[Ord[Elem]] + +class C extends B: + type Elem = String + override given Ord[Elem] = ??? + +def bar(using Ord[String]) = 1 + +class D(using Ord[String]) extends B: + type Elem = String + +class E(using x: Ord[String]) extends B: + type Elem = String + override given Ord[Elem] = x + +class F[X: Ord] extends B: + type Elem = X diff --git a/tests/pos/deferredSummon.scala b/tests/pos/deferredSummon.scala new file mode 100644 index 000000000000..f8252576d81a --- /dev/null +++ b/tests/pos/deferredSummon.scala @@ -0,0 +1,48 @@ +//> using options -language:experimental.modularity -source future +import compiletime.deferred + +trait Ord: + type Self + def less(x: Self, y: Self): Boolean + +trait A: + type Elem + given Elem is Ord = deferred + def foo = summon[Elem is Ord] + +trait B: + type Elem: Ord + def foo = summon[Elem is Ord] + +object Inst: + given Int is Ord: + def less(x: Int, y: Int) = x < y + +object Test1: + import Inst.given + class C extends A: + type Elem = Int + object E extends A: + type Elem = Int + given A: + type Elem = Int + +class D1[T: Ord] extends B: + type Elem = T + +object Test2: + import Inst.given + class C extends B: + type Elem = Int + object E extends B: + type Elem = Int + given B: + type Elem = Int + +class D2[T: Ord] extends B: + type Elem = T + + + + + diff --git a/tests/pos/dep-context-bounds.scala b/tests/pos/dep-context-bounds.scala new file mode 100644 index 000000000000..c724d92e9809 --- /dev/null +++ b/tests/pos/dep-context-bounds.scala @@ -0,0 +1,17 @@ +//> using options -language:experimental.modularity -source future +trait A: + type Self + +object Test1: + def foo[X: A](x: X.Self) = ??? + + def bar[X: A](a: Int) = ??? + + def baz[X: A](a: Int)(using String) = ??? + +object Test2: + def foo[X: A as x](a: x.Self) = ??? + + def bar[X: A as x](a: Int) = ??? + + def baz[X: A as x](a: Int)(using String) = ??? diff --git a/tests/pos/depclass-1.scala b/tests/pos/depclass-1.scala new file mode 100644 index 000000000000..38daef85ae98 --- /dev/null +++ b/tests/pos/depclass-1.scala @@ -0,0 +1,19 @@ +//> using options -source future -language:experimental.modularity +class A(tracked val source: String) + +class B(x: Int, tracked val source1: String) extends A(source1) + +class C(tracked val source2: String) extends B(1, source2) + +//class D(source1: String) extends C(source1) +val x = C("hello") +val _: A{ val source: "hello" } = x + +class Vec[Elem](tracked val size: Int) +class Vec8 extends Vec[Float](8) + +val v = Vec[Float](10) +val v2 = Vec8() +val xx: 10 = v.size +val x2: 8 = v2.size + diff --git a/tests/pos/hylolib-cb-extract.scala b/tests/pos/hylolib-cb-extract.scala new file mode 100644 index 000000000000..b80a88485a2b --- /dev/null +++ b/tests/pos/hylolib-cb-extract.scala @@ -0,0 +1,18 @@ +//> using options -language:experimental.modularity -source future +package hylotest +import compiletime.deferred + +trait Value[Self] + +/** A collection of elements accessible by their position. */ +trait Collection[Self]: + + /** The type of the elements in the collection. */ + type Element: Value + +class BitArray + +given Value[Boolean] {} + +given Collection[BitArray] with + type Element = Boolean diff --git a/tests/pos/hylolib-cb/AnyCollection.scala b/tests/pos/hylolib-cb/AnyCollection.scala new file mode 100644 index 000000000000..50f4313e46ce --- /dev/null +++ b/tests/pos/hylolib-cb/AnyCollection.scala @@ -0,0 +1,66 @@ +package hylo + +/** A type-erased collection. + * + * A `AnyCollection` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyCollection[Element] private ( + val _start: () => AnyValue, + val _end: () => AnyValue, + val _after: (AnyValue) => AnyValue, + val _at: (AnyValue) => Element +) + +object AnyCollection { + + /** Creates an instance forwarding its operations to `base`. */ + def apply[Base: Collection as b](base: Base): AnyCollection[b.Element] = + // NOTE: This evidence is redefined so the compiler won't report ambiguity between `intIsValue` + // and `anyValueIsValue` when the method is called on a collection of `Int`s. None of these + // choices is even correct! Note also that the ambiguity is suppressed if the constructor of + // `AnyValue` is declared with a context bound rather than an implicit parameter. + given Value[b.Position] = b.positionIsValue + + def start(): AnyValue = + AnyValue(base.startPosition) + + def end(): AnyValue = + AnyValue(base.endPosition) + + def after(p: AnyValue): AnyValue = + AnyValue(base.positionAfter(p.unsafelyUnwrappedAs[b.Position])) + + def at(p: AnyValue): b.Element = + base.at(p.unsafelyUnwrappedAs[b.Position]) + + new AnyCollection[b.Element]( + _start = start, + _end = end, + _after = after, + _at = at + ) + +} + +given anyCollectionIsCollection[T: Value]: Collection[AnyCollection[T]] with { + + type Element = T + type Position = AnyValue + + extension (self: AnyCollection[T]) { + + def startPosition = + self._start() + + def endPosition = + self._end() + + def positionAfter(p: Position) = + self._after(p) + + def at(p: Position) = + self._at(p) + + } + +} diff --git a/tests/pos/hylolib-cb/AnyValue.scala b/tests/pos/hylolib-cb/AnyValue.scala new file mode 100644 index 000000000000..b9d39869c09a --- /dev/null +++ b/tests/pos/hylolib-cb/AnyValue.scala @@ -0,0 +1,76 @@ +package hylo + +/** A wrapper around an object providing a reference API. */ +private final class Ref[T](val value: T) { + + override def toString: String = + s"Ref($value)" + +} + +/** A type-erased value. + * + * An `AnyValue` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyValue private ( + private val wrapped: AnyRef, + private val _copy: (AnyRef) => AnyValue, + private val _eq: (AnyRef, AnyRef) => Boolean, + private val _hashInto: (AnyRef, Hasher) => Hasher +) { + + /** Returns a copy of `this`. */ + def copy(): AnyValue = + _copy(this.wrapped) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: AnyValue): Boolean = + _eq(this.wrapped, other.wrapped) + + /** Hashes the salient parts of `this` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + _hashInto(this.wrapped, hasher) + + /** Returns the value wrapped in `this` as an instance of `T`. */ + def unsafelyUnwrappedAs[T]: T = + wrapped.asInstanceOf[Ref[T]].value + + /** Returns a textual description of `this`. */ + override def toString: String = + wrapped.toString + +} + +object AnyValue { + + /** Creates an instance wrapping `wrapped`. */ + def apply[T](using Value[T])(wrapped: T): AnyValue = + def copy(a: AnyRef): AnyValue = + AnyValue(a.asInstanceOf[Ref[T]].value.copy()) + + def eq(a: AnyRef, b: AnyRef): Boolean = + a.asInstanceOf[Ref[T]].value `eq` b.asInstanceOf[Ref[T]].value + + def hashInto(a: AnyRef, hasher: Hasher): Hasher = + a.asInstanceOf[Ref[T]].value.hashInto(hasher) + + new AnyValue(Ref(wrapped), copy, eq, hashInto) + +} + +given anyValueIsValue: Value[AnyValue] with { + + extension (self: AnyValue) { + + def copy(): AnyValue = + self.copy() + + def eq(other: AnyValue): Boolean = + self `eq` other + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} diff --git a/tests/pos/hylolib-cb/BitArray.scala b/tests/pos/hylolib-cb/BitArray.scala new file mode 100644 index 000000000000..3a0b4658f747 --- /dev/null +++ b/tests/pos/hylolib-cb/BitArray.scala @@ -0,0 +1,372 @@ +package hylo + +import scala.collection.mutable + +/** An array of bit values represented as Booleans, where `true` indicates that the bit is on. */ +final class BitArray private ( + private var _bits: HyArray[Int], + private var _count: Int +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** The number of bits that the array can contain before allocating new storage. */ + def capacity: Int = + _bits.capacity << 5 + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): BitArray = + if (n == 0) { + this + } else { + val k = 1 + ((n - 1) >> 5) + if (assumeUniqueness) { + _bits = _bits.reserveCapacity(k, assumeUniqueness) + this + } else { + new BitArray(_bits.reserveCapacity(k), _count) + } + } + + /** Adds a new element at the end of the array. */ + def append(bit: Boolean, assumeUniqueness: Boolean = false): BitArray = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + val p = BitArray.Position(count) + if (p.bucket >= _bits.count) { + result._bits = _bits.append(if bit then 1 else 0) + } else { + result.setValue(bit, p) + } + result._count += 1 + result + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (BitArray, Option[Boolean]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + val bit = result.at(BitArray.Position(count)) + result._count -= 1 + (result, Some(bit)) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): BitArray = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + result._bits.removeAll(keepStorage, assumeUniqueness = true) + result._count = 0 + result + } else { + BitArray() + } + + /** Returns `true` iff all elements in `this` are `false`. */ + def allFalse: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == 0 + } else if (_bits.at(i) != 0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns `true` iff all elements in `this` are `true`. */ + def allTrue: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == m + } else if (_bits.at(i) != ~0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns the bitwise OR of `this` and `other`. */ + def | (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ | _, assumeUniqueness = true) + + /** Returns the bitwise AND of `this` and `other`. */ + def & (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ & _, assumeUniqueness = true) + + /** Returns the bitwise XOR of `this` and `other`. */ + def ^ (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ ^ _, assumeUniqueness = true) + + /** Assigns each bits in `this` to the result of `operation` applied on those bits and their + * corresponding bits in `other`. + * + * @requires + * `self.count == other.count`. + */ + private def applyBitwise( + other: BitArray, + operation: (Int, Int) => Int, + assumeUniqueness: Boolean = false + ): BitArray = + require(this.count == other.count) + if (isEmpty) { + this + } else { + val result = if assumeUniqueness then this else copy() + var u = assumeUniqueness + val k = (count - 1) >> 5 + + for (i <- 0 until k) { + result._bits = result._bits.modifyAt( + i, (n) => operation(n, other._bits.at(n)), + assumeUniqueness = u + ) + u = true + } + val m = (1 << (count & 31)) - 1 + result._bits = result._bits.modifyAt( + k, (n) => operation(n & m, other._bits.at(k) & m), + assumeUniqueness = u + ) + + result + } + + /** Returns the position of `this`'s first element', or `endPosition` if `this` is empty. + * + * @complexity + * O(1). + */ + def startPosition: BitArray.Position = + BitArray.Position(0) + + /** Returns the "past the end" position in `this`, that is, the position immediately after the + * last element in `this`. + * + * @complexity + * O(1). + */ + def endPosition: BitArray.Position = + BitArray.Position(count) + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: BitArray.Position): BitArray.Position = + if (p.offsetInBucket == 63) { + BitArray.Position(p.bucket + 1, 0) + } else { + BitArray.Position(p.bucket, p.offsetInBucket + 1) + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: BitArray.Position): Boolean = + val m = 1 << p.offsetInBucket + val b: Int = _bits.at(p.bucket) + (b & m) == m + + /** Accesses the `i`-th element of `this`. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def atIndex(i: Int): Boolean = + at(BitArray.Position(i)) + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: BitArray.Position, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + val result = if assumeUniqueness then this else copy() + result.setValue(transform(result.at(p)), p) + result + + /** Calls `transform` on `i`-th element of `this` to update its value. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def modifyAtIndex( + i: Int, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + modifyAt(BitArray.Position(i), transform, assumeUniqueness) + + /** Returns an independent copy of `this`. */ + def copy(minimumCapacity: Int = 0): BitArray = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val k = 1 + ((minimumCapacity - 1) >> 5) + val newBits = _bits.copy(k) + new BitArray(newBits, _count) + } + + /** Returns a textual description of `this`. */ + override def toString: String = + _bits.toString + + /** Sets the value `b` for the bit at position `p`. + * + * @requires + * `this` is uniquely referenced and `p` is a valid position in `this`. + */ + private def setValue(b: Boolean, p: BitArray.Position): Unit = + val m = 1 << p.offsetInBucket + _bits = _bits.modifyAt( + p.bucket, + (e) => if b then e | m else e & ~m, + assumeUniqueness = true + ) + +} + +object BitArray { + + /** A position in a `BitArray`. + * + * @param bucket + * The bucket containing `this`. + * @param offsetInBucket + * The offset of `this` in its containing bucket. + */ + final class Position( + private[BitArray] val bucket: Int, + private[BitArray] val offsetInBucket: Int + ) { + + /** Creates a position from an index. */ + private[BitArray] def this(index: Int) = + this(index >> 5, index & 31) + + /** Returns the index corresponding to this position. */ + private def index: Int = + (bucket >> 5) + offsetInBucket + + /** Returns a copy of `this`. */ + def copy(): Position = + new Position(bucket, offsetInBucket) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: Position): Boolean = + (this.bucket == other.bucket) && (this.offsetInBucket == other.offsetInBucket) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + hasher.combine(bucket) + hasher.combine(offsetInBucket) + + } + + /** Creates an array with the given `bits`. */ + def apply[T](bits: Boolean*): BitArray = + var result = new BitArray(HyArray[Int](), 0) + for (b <- bits) result = result.append(b, assumeUniqueness = true) + result + +} + +given bitArrayPositionIsValue: Value[BitArray.Position] with { + + extension (self: BitArray.Position) { + + def copy(): BitArray.Position = + self.copy() + + def eq(other: BitArray.Position): Boolean = + self.eq(other) + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} + +given bitArrayIsCollection: Collection[BitArray] with { + + type Element = Boolean + type Position = BitArray.Position + + extension (self: BitArray) { + + override def count: Int = + self.count + + def startPosition: BitArray.Position = + self.startPosition + + def endPosition: BitArray.Position = + self.endPosition + + def positionAfter(p: BitArray.Position): BitArray.Position = + self.positionAfter(p) + + def at(p: BitArray.Position): Boolean = + self.at(p) + + } + +} + +given bitArrayIsStringConvertible: StringConvertible[BitArray] with { + + extension (self: BitArray) + override def description: String = + var contents = mutable.StringBuilder() + self.forEach((e) => { contents += (if e then '1' else '0'); true }) + contents.mkString + +} diff --git a/tests/pos/hylolib-cb/Collection.scala b/tests/pos/hylolib-cb/Collection.scala new file mode 100644 index 000000000000..2fc04f02b9ac --- /dev/null +++ b/tests/pos/hylolib-cb/Collection.scala @@ -0,0 +1,278 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A collection of elements accessible by their position. */ +trait Collection[Self] { + + /** The type of the elements in the collection. */ + type Element: Value + + /** The type of a position in the collection. */ + type Position: Value as positionIsValue + + extension (self: Self) { + + /** Returns `true` iff `self` is empty. */ + def isEmpty: Boolean = + startPosition `eq` endPosition + + /** Returns the number of elements in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def count: Int = + val e = endPosition + def _count(p: Position, n: Int): Int = + if p `eq` e then n else _count(self.positionAfter(p), n + 1) + _count(startPosition, 0) + + /** Returns the position of `self`'s first element', or `endPosition` if `self` is empty. + * + * @complexity + * O(1) + */ + def startPosition: Position + + /** Returns the "past the end" position in `self`, that is, the position immediately after the + * last element in `self`. + * + * @complexity + * O(1). + */ + def endPosition: Position + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: Position): Position + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Position): Element + + /** Returns `true` iff `i` precedes `j`. + * + * @requires + * `i` and j` are valid positions in `self`. + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def isBefore(i: Position, j: Position): Boolean = + val e = self.endPosition + if (i.eq(e)) { + false + } else if (j.eq(e)) { + true + } else { + def _isBefore(n: Position): Boolean = + if (n.eq(j)) { + true + } else if (n.eq(e)) { + false + } else { + _isBefore(self.positionAfter(n)) + } + _isBefore(self.positionAfter(i)) + } + + } + +} + +extension [Self: Collection as s](self: Self) { + + /** Returns the first element of `self` along with a slice containing the suffix after this + * element, or `None` if `self` is empty. + * + * @complexity + * O(1) + */ + def headAndTail: Option[(s.Element, Slice[Self])] = + if (self.isEmpty) { + None + } else { + val p = self.startPosition + val q = self.positionAfter(p) + val t = Slice(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + } + + /** Applies `combine` on `partialResult` and each element of `self`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def reduce[T](partialResult: T, combine: (T, s.Element) => T): T = + val e = self.endPosition + def loop(p: s.Position, r: T): T = + if (p.eq(e)) { + r + } else { + loop(self.positionAfter(p), combine(r, self.at(p))) + } + loop(self.startPosition, partialResult) + + /** Applies `action` on each element of `self`, in order, until `action` returns `false`, and + * returns `false` iff `action` did. + * + * You can return `false` from `action` to emulate a `continue` statement as found in traditional + * imperative languages (e.g., C). + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def forEach(action: (s.Element) => Boolean): Boolean = + val e = self.endPosition + def loop(p: s.Position): Boolean = + if (p.eq(e)) { + true + } else if (!action(self.at(p))) { + false + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns a collection with the elements of `self` transformed by `transform`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def map[T: Value](transform: (s.Element) => T): HyArray[T] = + self.reduce( + HyArray[T](), + (r, e) => r.append(transform(e), assumeUniqueness = true) + ) + + /** Returns a collection with the elements of `self` satisfying `isInclude`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def filter(isIncluded: (s.Element) => Boolean): HyArray[s.Element] = + self.reduce( + HyArray[s.Element](), + (r, e) => if (isIncluded(e)) then r.append(e, assumeUniqueness = true) else r + ) + + /** Returns `true` if `self` contains an element satisfying `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def containsWhere(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) != None + + /** Returns `true` if all elements in `self` satisfy `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def allSatisfy(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) == None + + /** Returns the position of the first element of `self` satisfying `predicate`, or `None` if no + * such element exists. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def firstPositionWhere(predicate: (s.Element) => Boolean): Option[s.Position] = + val e = self.endPosition + def loop(p: s.Position): Option[s.Position] = + if (p.eq(e)) { + None + } else if (predicate(self.at(p))) { + Some(p) + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns the minimum element in `self`, using `isLessThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement(isLessThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isLessThan) + + // NOTE: I can't find a reasonable way to call this method. + /** Returns the minimum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement()(using Comparable[s.Element]): Option[s.Element] = + self.minElement(isLessThan = _ `lt` _) + + /** Returns the maximum element in `self`, using `isGreaterThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement(isGreaterThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isGreaterThan) + + /** Returns the maximum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement()(using Comparable[s.Element]): Option[s.Element] = + self.maxElement(isGreaterThan = _ `gt` _) + + /** Returns the maximum element in `self`, using `isOrderedBefore` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def leastElement(isOrderedBefore: (s.Element, s.Element) => Boolean): Option[s.Element] = + if (self.isEmpty) { + None + } else { + val e = self.endPosition + def _least(p: s.Position, least: s.Element): s.Element = + if (p.eq(e)) { + least + } else { + val x = self.at(p) + val y = if isOrderedBefore(x, least) then x else least + _least(self.positionAfter(p), y) + } + + val b = self.startPosition + Some(_least(self.positionAfter(b), self.at(b))) + } + +} + +extension [Self: Collection as s](self: Self)(using + Value[s.Element] +) { + + /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ + def elementsEqual[T](using o: Collection[T] { type Element = s.Element })(other: T): Boolean = + def loop(i: s.Position, j: o.Position): Boolean = + if (i `eq` self.endPosition) { + j `eq` other.endPosition + } else if (j `eq` other.endPosition) { + false + } else if (self.at(i) `neq` other.at(j)) { + false + } else { + loop(self.positionAfter(i), other.positionAfter(j)) + } + loop(self.startPosition, other.startPosition) + +} diff --git a/tests/pos/hylolib-cb/CoreTraits.scala b/tests/pos/hylolib-cb/CoreTraits.scala new file mode 100644 index 000000000000..01b2c5242af9 --- /dev/null +++ b/tests/pos/hylolib-cb/CoreTraits.scala @@ -0,0 +1,57 @@ +package hylo + +/** A type whose instance can be treated as independent values. + * + * The data structure of and algorithms of Hylo's standard library operate "notional values" rather + * than arbitrary references. This trait defines the basis operations of all values. + */ +trait Value[Self] { + + extension (self: Self) { + + /** Returns a copy of `self`. */ + def copy(): Self + + /** Returns `true` iff `self` and `other` have an equivalent value. */ + def eq(other: Self): Boolean + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher + + } + +} + +extension [Self: Value](self: Self) def neq(other: Self): Boolean = !self.eq(other) + +// ---------------------------------------------------------------------------- +// Comparable +// ---------------------------------------------------------------------------- + +trait Comparable[Self] extends Value[Self] { + + extension (self: Self) { + + /** Returns `true` iff `self` is ordered before `other`. */ + def lt(other: Self): Boolean + + /** Returns `true` iff `self` is ordered after `other`. */ + def gt(other: Self): Boolean = other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered before `other`. */ + def le(other: Self): Boolean = !other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered after `other`. */ + def ge(other: Self): Boolean = !self.lt(other) + + } + +} + +/** Returns the lesser of `x` and `y`. */ +def min[T: Comparable](x: T, y: T): T = + if y.lt(x) then y else x + +/** Returns the greater of `x` and `y`. */ +def max[T: Comparable](x: T, y: T): T = + if x.lt(y) then y else x diff --git a/tests/pos/hylolib-cb/Hasher.scala b/tests/pos/hylolib-cb/Hasher.scala new file mode 100644 index 000000000000..ef6813df6b60 --- /dev/null +++ b/tests/pos/hylolib-cb/Hasher.scala @@ -0,0 +1,38 @@ +package hylo + +import scala.util.Random + +/** A universal hash function. */ +final class Hasher private (private val hash: Int = Hasher.offsetBasis) { + + /** Returns the computed hash value. */ + def finalizeHash(): Int = + hash + + /** Adds `n` to the computed hash value. */ + def combine(n: Int): Hasher = + var h = hash + h = h ^ n + h = h * Hasher.prime + new Hasher(h) +} + +object Hasher { + + private val offsetBasis = 0x811c9dc5 + private val prime = 0x01000193 + + /** A random seed ensuring different hashes across multiple runs. */ + private lazy val seed = scala.util.Random.nextInt() + + /** Creates an instance with the given `seed`. */ + def apply(): Hasher = + val h = new Hasher() + h.combine(seed) + h + + /** Returns the hash of `v`. */ + def hash[T: Value](v: T): Int = + v.hashInto(Hasher()).finalizeHash() + +} diff --git a/tests/pos/hylolib-cb/HyArray.scala b/tests/pos/hylolib-cb/HyArray.scala new file mode 100644 index 000000000000..0fff45e744ec --- /dev/null +++ b/tests/pos/hylolib-cb/HyArray.scala @@ -0,0 +1,220 @@ +//> using options -language:experimental.modularity -source future +package hylo + +import java.util.Arrays +import scala.collection.mutable + +/** An ordered, random-access collection. */ +final class HyArray[Element: Value as elementIsCValue]( + private var _storage: scala.Array[AnyRef | Null] | Null, + private var _count: Int // NOTE: where do I document private fields +) { + + // NOTE: The fact that we need Array[AnyRef] is diappointing and difficult to discover + // The compiler error sent me on a wild goose chase with ClassTag. + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** Returns the number of elements that `this` can contain before allocating new storage. */ + def capacity: Int = + if _storage == null then 0 else _storage.length + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): HyArray[Element] = + if (n <= capacity) { + this + } else { + var newCapacity = max(1, capacity) + while (newCapacity < n) { newCapacity = newCapacity << 1 } + + val newStorage = new scala.Array[AnyRef | Null](newCapacity) + val s = _storage.asInstanceOf[scala.Array[AnyRef | Null]] + var i = 0 + while (i < count) { + newStorage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + + if (assumeUniqueness) { + _storage = newStorage + this + } else { + new HyArray(newStorage, count) + } + } + + /** Adds a new element at the end of the array. */ + def append(source: Element, assumeUniqueness: Boolean = false): HyArray[Element] = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + result._storage(count) = source.asInstanceOf[AnyRef] + result._count += 1 + result + + // NOTE: Can't refine `C.Element` without renaming the generic parameter of `HyArray`. + // /** Adds the contents of `source` at the end of the array. */ + // def appendContents[C](using + // s: Collection[C] + // )( + // source: C { type Element = Element }, + // assumeUniqueness: Boolean = false + // ): HyArray[Element] = + // val result = if (assumeUniqueness) { this } else { copy(count + source.count) } + // source.reduce(result, (r, e) => r.append(e, assumeUniqueness = true)) + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (HyArray[Element], Option[Element]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + result._count -= 1 + (result, Some(result._storage(result._count).asInstanceOf[Element])) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + Arrays.fill(result._storage, null) + result._count = 0 + result + } else { + HyArray() + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Int): Element = + _storage(p).asInstanceOf[Element] + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: Int, + transform: (Element) => Element, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if assumeUniqueness then this else copy() + result._storage(p) = transform(at(p)).asInstanceOf[AnyRef] + result + + /** Returns a textual description of `this`. */ + override def toString: String = + var s = "[" + var i = 0 + while (i < count) { + if (i > 0) { s += ", " } + s += s"${at(i)}" + i += 1 + } + s + "]" + + /** Returns an independent copy of `this`, capable of storing `minimumCapacity` elements before + * allocating new storage. + */ + def copy(minimumCapacity: Int = 0): HyArray[Element] = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val clone = HyArray[Element]().reserveCapacity(max(minimumCapacity, count)) + var i = 0 + while (i < count) { + clone._storage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + clone._count = count + clone + } + +} + +object HyArray { + + /** Creates an array with the given `elements`. */ + def apply[T: Value](elements: T*): HyArray[T] = + var a = new HyArray[T](null, 0) + for (e <- elements) a = a.append(e, assumeUniqueness = true) + a + +} + +given [T: Value] => Value[HyArray[T]] with { + + extension (self: HyArray[T]) { + + def copy(): HyArray[T] = + self.copy() + + def eq(other: HyArray[T]): Boolean = + self.elementsEqual(other) + + def hashInto(hasher: Hasher): Hasher = + self.reduce(hasher, (h, e) => e.hashInto(h)) + + } + +} + +given [T: Value] => Collection[HyArray[T]] with { + + type Element = T + type Position = Int + + extension (self: HyArray[T]) { + + // NOTE: Having to explicitly override means that primary declaration can't automatically + // specialize trait requirements. + override def isEmpty: Boolean = self.isEmpty + + override def count: Int = self.count + + def startPosition = 0 + + def endPosition = self.count + + def positionAfter(p: Int) = p + 1 + + def at(p: Int) = self.at(p) + + } + +} + +// NOTE: This should work. +// given hyArrayIsStringConvertible[T](using +// tIsValue: Value[T], +// tIsStringConvertible: StringConvertible[T] +// ): StringConvertible[HyArray[T]] with { +// +// given Collection[HyArray[T]] = hyArrayIsCollection[T] +// +// extension (self: HyArray[T]) +// override def description: String = +// var contents = mutable.StringBuilder() +// self.forEach((e) => { contents ++= e.description; true }) +// s"[${contents.mkString(", ")}]" +// +// } diff --git a/tests/pos/hylolib-cb/Integers.scala b/tests/pos/hylolib-cb/Integers.scala new file mode 100644 index 000000000000..b9bc203a88ea --- /dev/null +++ b/tests/pos/hylolib-cb/Integers.scala @@ -0,0 +1,58 @@ +package hylo + +given booleanIsValue: Value[Boolean] with { + + extension (self: Boolean) { + + def copy(): Boolean = + // Note: Scala's `Boolean` has value semantics already. + self + + def eq(other: Boolean): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(if self then 1 else 0) + + } + +} + +given intIsValue: Value[Int] with { + + extension (self: Int) { + + def copy(): Int = + // Note: Scala's `Int` has value semantics already. + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + } + +} + +given intIsComparable: Comparable[Int] with { + + extension (self: Int) { + + def copy(): Int = + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + def lt(other: Int): Boolean = self < other + + } + +} + +given intIsStringConvertible: StringConvertible[Int] with {} diff --git a/tests/pos/hylolib-cb/Range.scala b/tests/pos/hylolib-cb/Range.scala new file mode 100644 index 000000000000..1f597652ead1 --- /dev/null +++ b/tests/pos/hylolib-cb/Range.scala @@ -0,0 +1,37 @@ +package hylo + +/** A half-open interval from a lower bound up to, but not including, an uppor bound. */ +final class Range[Bound] private (val lowerBound: Bound, val upperBound: Bound) { + + /** Returns a textual description of `this`. */ + override def toString: String = + s"[${lowerBound}, ${upperBound})" + +} + +object Range { + + /** Creates a half-open interval [`lowerBound`, `upperBound`), using `isLessThanOrEqual` to ensure + * that the bounds are well-formed. + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound]( + lowerBound: Bound, + upperBound: Bound, + isLessThanOrEqual: (Bound, Bound) => Boolean + ) = + require(isLessThanOrEqual(lowerBound, upperBound)) + new Range(lowerBound, upperBound) + + /** Creates a half-open interval [`lowerBound`, `upperBound`). + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound](lowerBound: Bound, upperBound: Bound)(using Comparable[Bound]) = + require(lowerBound `le` upperBound) + new Range(lowerBound, upperBound) + +} diff --git a/tests/pos/hylolib-cb/Slice.scala b/tests/pos/hylolib-cb/Slice.scala new file mode 100644 index 000000000000..b577ceeb3739 --- /dev/null +++ b/tests/pos/hylolib-cb/Slice.scala @@ -0,0 +1,44 @@ +package hylo + +/** A view into a collection. */ +final class Slice[Base: Collection as b]( + val base: Base, + val bounds: Range[b.Position] +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: b.Position = + bounds.lowerBound + + def endPosition: b.Position = + bounds.upperBound + + def positionAfter(p: b.Position): b.Position = + base.positionAfter(p) + + def at(p: b.Position): b.Element = + base.at(p) + +} + +given sliceIsCollection[T: Collection as c]: Collection[Slice[T]] with { + + type Element = c.Element + type Position = c.Position + + extension (self: Slice[T]) { + + def startPosition = self.bounds.lowerBound.asInstanceOf[Position] // NOTE: Ugly hack + + def endPosition = self.bounds.upperBound.asInstanceOf[Position] + + def positionAfter(p: Position) = self.base.positionAfter(p) + + def at(p: Position) = self.base.at(p) + + } + +} diff --git a/tests/pos/hylolib-cb/StringConvertible.scala b/tests/pos/hylolib-cb/StringConvertible.scala new file mode 100644 index 000000000000..0702f79f2794 --- /dev/null +++ b/tests/pos/hylolib-cb/StringConvertible.scala @@ -0,0 +1,14 @@ +package hylo + +/** A type whose instances can be described by a character string. */ +trait StringConvertible[Self] { + + extension (self: Self) { + + /** Returns a textual description of `self`. */ + def description: String = + self.toString + + } + +} diff --git a/tests/pos/hylolib-deferred-given-extract.scala b/tests/pos/hylolib-deferred-given-extract.scala new file mode 100644 index 000000000000..02d889dc9aac --- /dev/null +++ b/tests/pos/hylolib-deferred-given-extract.scala @@ -0,0 +1,19 @@ +//> using options -language:experimental.modularity -source future +package hylotest +import compiletime.deferred + +trait Value[Self] + +/** A collection of elements accessible by their position. */ +trait Collection[Self]: + + /** The type of the elements in the collection. */ + type Element + given elementIsValue: Value[Element] = compiletime.deferred + +class BitArray + +given Value[Boolean] {} + +given Collection[BitArray] with + type Element = Boolean diff --git a/tests/pos/hylolib-deferred-given/AnyCollection.scala b/tests/pos/hylolib-deferred-given/AnyCollection.scala new file mode 100644 index 000000000000..55e453d6dc87 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/AnyCollection.scala @@ -0,0 +1,69 @@ +package hylo + +/** A type-erased collection. + * + * A `AnyCollection` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyCollection[Element] private ( + val _start: () => AnyValue, + val _end: () => AnyValue, + val _after: (AnyValue) => AnyValue, + val _at: (AnyValue) => Element +) + +object AnyCollection { + + /** Creates an instance forwarding its operations to `base`. */ + def apply[Base](using b: Collection[Base])(base: Base): AnyCollection[b.Element] = + // NOTE: This evidence is redefined so the compiler won't report ambiguity between `intIsValue` + // and `anyValueIsValue` when the method is called on a collection of `Int`s. None of these + // choices is even correct! Note also that the ambiguity is suppressed if the constructor of + // `AnyValue` is declared with a context bound rather than an implicit parameter. + given Value[b.Position] = b.positionIsValue + + def start(): AnyValue = + AnyValue(base.startPosition) + + def end(): AnyValue = + AnyValue(base.endPosition) + + def after(p: AnyValue): AnyValue = + AnyValue(base.positionAfter(p.unsafelyUnwrappedAs[b.Position])) + + def at(p: AnyValue): b.Element = + base.at(p.unsafelyUnwrappedAs[b.Position]) + + new AnyCollection[b.Element]( + _start = start, + _end = end, + _after = after, + _at = at + ) + +} + +given anyCollectionIsCollection[T](using tIsValue: Value[T]): Collection[AnyCollection[T]] with { + + type Element = T + //given elementIsValue: Value[Element] = tIsValue + + type Position = AnyValue + given positionIsValue: Value[Position] = anyValueIsValue + + extension (self: AnyCollection[T]) { + + def startPosition = + self._start() + + def endPosition = + self._end() + + def positionAfter(p: Position) = + self._after(p) + + def at(p: Position) = + self._at(p) + + } + +} diff --git a/tests/pos/hylolib-deferred-given/AnyValue.scala b/tests/pos/hylolib-deferred-given/AnyValue.scala new file mode 100644 index 000000000000..21f2965e102e --- /dev/null +++ b/tests/pos/hylolib-deferred-given/AnyValue.scala @@ -0,0 +1,76 @@ +package hylo + +/** A wrapper around an object providing a reference API. */ +private final class Ref[T](val value: T) { + + override def toString: String = + s"Ref($value)" + +} + +/** A type-erased value. + * + * An `AnyValue` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyValue private ( + private val wrapped: AnyRef, + private val _copy: (AnyRef) => AnyValue, + private val _eq: (AnyRef, AnyRef) => Boolean, + private val _hashInto: (AnyRef, Hasher) => Hasher +) { + + /** Returns a copy of `this`. */ + def copy(): AnyValue = + _copy(this.wrapped) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: AnyValue): Boolean = + _eq(this.wrapped, other.wrapped) + + /** Hashes the salient parts of `this` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + _hashInto(this.wrapped, hasher) + + /** Returns the value wrapped in `this` as an instance of `T`. */ + def unsafelyUnwrappedAs[T]: T = + wrapped.asInstanceOf[Ref[T]].value + + /** Returns a textual description of `this`. */ + override def toString: String = + wrapped.toString + +} + +object AnyValue { + + /** Creates an instance wrapping `wrapped`. */ + def apply[T: Value](wrapped: T): AnyValue = + def copy(a: AnyRef): AnyValue = + AnyValue(a.asInstanceOf[Ref[T]].value.copy()) + + def eq(a: AnyRef, b: AnyRef): Boolean = + a.asInstanceOf[Ref[T]].value `eq` b.asInstanceOf[Ref[T]].value + + def hashInto(a: AnyRef, hasher: Hasher): Hasher = + a.asInstanceOf[Ref[T]].value.hashInto(hasher) + + new AnyValue(Ref(wrapped), copy, eq, hashInto) + +} + +given anyValueIsValue: Value[AnyValue] with { + + extension (self: AnyValue) { + + def copy(): AnyValue = + self.copy() + + def eq(other: AnyValue): Boolean = + self `eq` other + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} diff --git a/tests/pos/hylolib-deferred-given/BitArray.scala b/tests/pos/hylolib-deferred-given/BitArray.scala new file mode 100644 index 000000000000..485f30472847 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/BitArray.scala @@ -0,0 +1,375 @@ +package hylo + +import scala.collection.mutable + +/** An array of bit values represented as Booleans, where `true` indicates that the bit is on. */ +final class BitArray private ( + private var _bits: HyArray[Int], + private var _count: Int +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** The number of bits that the array can contain before allocating new storage. */ + def capacity: Int = + _bits.capacity << 5 + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): BitArray = + if (n == 0) { + this + } else { + val k = 1 + ((n - 1) >> 5) + if (assumeUniqueness) { + _bits = _bits.reserveCapacity(k, assumeUniqueness) + this + } else { + new BitArray(_bits.reserveCapacity(k), _count) + } + } + + /** Adds a new element at the end of the array. */ + def append(bit: Boolean, assumeUniqueness: Boolean = false): BitArray = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + val p = BitArray.Position(count) + if (p.bucket >= _bits.count) { + result._bits = _bits.append(if bit then 1 else 0) + } else { + result.setValue(bit, p) + } + result._count += 1 + result + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (BitArray, Option[Boolean]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + val bit = result.at(BitArray.Position(count)) + result._count -= 1 + (result, Some(bit)) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): BitArray = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + result._bits.removeAll(keepStorage, assumeUniqueness = true) + result._count = 0 + result + } else { + BitArray() + } + + /** Returns `true` iff all elements in `this` are `false`. */ + def allFalse: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == 0 + } else if (_bits.at(i) != 0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns `true` iff all elements in `this` are `true`. */ + def allTrue: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == m + } else if (_bits.at(i) != ~0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns the bitwise OR of `this` and `other`. */ + def | (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ | _, assumeUniqueness = true) + + /** Returns the bitwise AND of `this` and `other`. */ + def & (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ & _, assumeUniqueness = true) + + /** Returns the bitwise XOR of `this` and `other`. */ + def ^ (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ ^ _, assumeUniqueness = true) + + /** Assigns each bits in `this` to the result of `operation` applied on those bits and their + * corresponding bits in `other`. + * + * @requires + * `self.count == other.count`. + */ + private def applyBitwise( + other: BitArray, + operation: (Int, Int) => Int, + assumeUniqueness: Boolean = false + ): BitArray = + require(this.count == other.count) + if (isEmpty) { + this + } else { + val result = if assumeUniqueness then this else copy() + var u = assumeUniqueness + val k = (count - 1) >> 5 + + for (i <- 0 until k) { + result._bits = result._bits.modifyAt( + i, (n) => operation(n, other._bits.at(n)), + assumeUniqueness = u + ) + u = true + } + val m = (1 << (count & 31)) - 1 + result._bits = result._bits.modifyAt( + k, (n) => operation(n & m, other._bits.at(k) & m), + assumeUniqueness = u + ) + + result + } + + /** Returns the position of `this`'s first element', or `endPosition` if `this` is empty. + * + * @complexity + * O(1). + */ + def startPosition: BitArray.Position = + BitArray.Position(0) + + /** Returns the "past the end" position in `this`, that is, the position immediately after the + * last element in `this`. + * + * @complexity + * O(1). + */ + def endPosition: BitArray.Position = + BitArray.Position(count) + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: BitArray.Position): BitArray.Position = + if (p.offsetInBucket == 63) { + BitArray.Position(p.bucket + 1, 0) + } else { + BitArray.Position(p.bucket, p.offsetInBucket + 1) + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: BitArray.Position): Boolean = + val m = 1 << p.offsetInBucket + val b: Int = _bits.at(p.bucket) + (b & m) == m + + /** Accesses the `i`-th element of `this`. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def atIndex(i: Int): Boolean = + at(BitArray.Position(i)) + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: BitArray.Position, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + val result = if assumeUniqueness then this else copy() + result.setValue(transform(result.at(p)), p) + result + + /** Calls `transform` on `i`-th element of `this` to update its value. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def modifyAtIndex( + i: Int, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + modifyAt(BitArray.Position(i), transform, assumeUniqueness) + + /** Returns an independent copy of `this`. */ + def copy(minimumCapacity: Int = 0): BitArray = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val k = 1 + ((minimumCapacity - 1) >> 5) + val newBits = _bits.copy(k) + new BitArray(newBits, _count) + } + + /** Returns a textual description of `this`. */ + override def toString: String = + _bits.toString + + /** Sets the value `b` for the bit at position `p`. + * + * @requires + * `this` is uniquely referenced and `p` is a valid position in `this`. + */ + private def setValue(b: Boolean, p: BitArray.Position): Unit = + val m = 1 << p.offsetInBucket + _bits = _bits.modifyAt( + p.bucket, + (e) => if b then e | m else e & ~m, + assumeUniqueness = true + ) + +} + +object BitArray { + + /** A position in a `BitArray`. + * + * @param bucket + * The bucket containing `this`. + * @param offsetInBucket + * The offset of `this` in its containing bucket. + */ + final class Position( + private[BitArray] val bucket: Int, + private[BitArray] val offsetInBucket: Int + ) { + + /** Creates a position from an index. */ + private[BitArray] def this(index: Int) = + this(index >> 5, index & 31) + + /** Returns the index corresponding to this position. */ + private def index: Int = + (bucket >> 5) + offsetInBucket + + /** Returns a copy of `this`. */ + def copy(): Position = + new Position(bucket, offsetInBucket) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: Position): Boolean = + (this.bucket == other.bucket) && (this.offsetInBucket == other.offsetInBucket) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + hasher.combine(bucket) + hasher.combine(offsetInBucket) + + } + + /** Creates an array with the given `bits`. */ + def apply[T](bits: Boolean*): BitArray = + var result = new BitArray(HyArray[Int](), 0) + for (b <- bits) result = result.append(b, assumeUniqueness = true) + result + +} + +given bitArrayPositionIsValue: Value[BitArray.Position] with { + + extension (self: BitArray.Position) { + + def copy(): BitArray.Position = + self.copy() + + def eq(other: BitArray.Position): Boolean = + self.eq(other) + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} + +given bitArrayIsCollection: Collection[BitArray] with { + + type Element = Boolean + //given elementIsValue: Value[Boolean] = booleanIsValue + + type Position = BitArray.Position + given positionIsValue: Value[BitArray.Position] = bitArrayPositionIsValue + + extension (self: BitArray) { + + override def count: Int = + self.count + + def startPosition: BitArray.Position = + self.startPosition + + def endPosition: BitArray.Position = + self.endPosition + + def positionAfter(p: BitArray.Position): BitArray.Position = + self.positionAfter(p) + + def at(p: BitArray.Position): Boolean = + self.at(p) + + } + +} + +given bitArrayIsStringConvertible: StringConvertible[BitArray] with { + + extension (self: BitArray) + override def description: String = + var contents = mutable.StringBuilder() + self.forEach((e) => { contents += (if e then '1' else '0'); true }) + contents.mkString + +} diff --git a/tests/pos/hylolib-deferred-given/Collection.scala b/tests/pos/hylolib-deferred-given/Collection.scala new file mode 100644 index 000000000000..6b5e7a762dc8 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Collection.scala @@ -0,0 +1,281 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A collection of elements accessible by their position. */ +trait Collection[Self] { + + /** The type of the elements in the collection. */ + type Element + given elementIsValue: Value[Element] = compiletime.deferred + + /** The type of a position in the collection. */ + type Position + given positionIsValue: Value[Position] + + extension (self: Self) { + + /** Returns `true` iff `self` is empty. */ + def isEmpty: Boolean = + startPosition `eq` endPosition + + /** Returns the number of elements in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def count: Int = + val e = endPosition + def _count(p: Position, n: Int): Int = + if p `eq` e then n else _count(self.positionAfter(p), n + 1) + _count(startPosition, 0) + + /** Returns the position of `self`'s first element', or `endPosition` if `self` is empty. + * + * @complexity + * O(1) + */ + def startPosition: Position + + /** Returns the "past the end" position in `self`, that is, the position immediately after the + * last element in `self`. + * + * @complexity + * O(1). + */ + def endPosition: Position + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: Position): Position + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Position): Element + + /** Returns `true` iff `i` precedes `j`. + * + * @requires + * `i` and j` are valid positions in `self`. + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def isBefore(i: Position, j: Position): Boolean = + val e = self.endPosition + if (i.eq(e)) { + false + } else if (j.eq(e)) { + true + } else { + def _isBefore(n: Position): Boolean = + if (n.eq(j)) { + true + } else if (n.eq(e)) { + false + } else { + _isBefore(self.positionAfter(n)) + } + _isBefore(self.positionAfter(i)) + } + + } + +} + +extension [Self](self: Self)(using s: Collection[Self]) { + + /** Returns the first element of `self` along with a slice containing the suffix after this + * element, or `None` if `self` is empty. + * + * @complexity + * O(1) + */ + def headAndTail: Option[(s.Element, Slice[Self])] = + if (self.isEmpty) { + None + } else { + val p = self.startPosition + val q = self.positionAfter(p) + val t = Slice(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + } + + /** Applies `combine` on `partialResult` and each element of `self`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def reduce[T](partialResult: T, combine: (T, s.Element) => T): T = + val e = self.endPosition + def loop(p: s.Position, r: T): T = + if (p.eq(e)) { + r + } else { + loop(self.positionAfter(p), combine(r, self.at(p))) + } + loop(self.startPosition, partialResult) + + /** Applies `action` on each element of `self`, in order, until `action` returns `false`, and + * returns `false` iff `action` did. + * + * You can return `false` from `action` to emulate a `continue` statement as found in traditional + * imperative languages (e.g., C). + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def forEach(action: (s.Element) => Boolean): Boolean = + val e = self.endPosition + def loop(p: s.Position): Boolean = + if (p.eq(e)) { + true + } else if (!action(self.at(p))) { + false + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns a collection with the elements of `self` transformed by `transform`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def map[T](using Value[T])(transform: (s.Element) => T): HyArray[T] = + self.reduce( + HyArray[T](), + (r, e) => r.append(transform(e), assumeUniqueness = true) + ) + + /** Returns a collection with the elements of `self` satisfying `isInclude`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def filter(isIncluded: (s.Element) => Boolean): HyArray[s.Element] = + self.reduce( + HyArray[s.Element](), + (r, e) => if (isIncluded(e)) then r.append(e, assumeUniqueness = true) else r + ) + + /** Returns `true` if `self` contains an element satisfying `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def containsWhere(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) != None + + /** Returns `true` if all elements in `self` satisfy `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def allSatisfy(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) == None + + /** Returns the position of the first element of `self` satisfying `predicate`, or `None` if no + * such element exists. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def firstPositionWhere(predicate: (s.Element) => Boolean): Option[s.Position] = + val e = self.endPosition + def loop(p: s.Position): Option[s.Position] = + if (p.eq(e)) { + None + } else if (predicate(self.at(p))) { + Some(p) + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns the minimum element in `self`, using `isLessThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement(isLessThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isLessThan) + + // NOTE: I can't find a reasonable way to call this method. + /** Returns the minimum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement()(using Comparable[s.Element]): Option[s.Element] = + self.minElement(isLessThan = _ `lt` _) + + /** Returns the maximum element in `self`, using `isGreaterThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement(isGreaterThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isGreaterThan) + + /** Returns the maximum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement()(using Comparable[s.Element]): Option[s.Element] = + self.maxElement(isGreaterThan = _ `gt` _) + + /** Returns the maximum element in `self`, using `isOrderedBefore` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def leastElement(isOrderedBefore: (s.Element, s.Element) => Boolean): Option[s.Element] = + if (self.isEmpty) { + None + } else { + val e = self.endPosition + def _least(p: s.Position, least: s.Element): s.Element = + if (p.eq(e)) { + least + } else { + val x = self.at(p) + val y = if isOrderedBefore(x, least) then x else least + _least(self.positionAfter(p), y) + } + + val b = self.startPosition + Some(_least(self.positionAfter(b), self.at(b))) + } + +} + +extension [Self](self: Self)(using + s: Collection[Self], + e: Value[s.Element] +) { + + /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ + def elementsEqual[T](using o: Collection[T] { type Element = s.Element })(other: T): Boolean = + def loop(i: s.Position, j: o.Position): Boolean = + if (i `eq` self.endPosition) { + j `eq` other.endPosition + } else if (j `eq` other.endPosition) { + false + } else if (self.at(i) `neq` other.at(j)) { + false + } else { + loop(self.positionAfter(i), other.positionAfter(j)) + } + loop(self.startPosition, other.startPosition) + +} diff --git a/tests/pos/hylolib-deferred-given/CoreTraits.scala b/tests/pos/hylolib-deferred-given/CoreTraits.scala new file mode 100644 index 000000000000..01b2c5242af9 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/CoreTraits.scala @@ -0,0 +1,57 @@ +package hylo + +/** A type whose instance can be treated as independent values. + * + * The data structure of and algorithms of Hylo's standard library operate "notional values" rather + * than arbitrary references. This trait defines the basis operations of all values. + */ +trait Value[Self] { + + extension (self: Self) { + + /** Returns a copy of `self`. */ + def copy(): Self + + /** Returns `true` iff `self` and `other` have an equivalent value. */ + def eq(other: Self): Boolean + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher + + } + +} + +extension [Self: Value](self: Self) def neq(other: Self): Boolean = !self.eq(other) + +// ---------------------------------------------------------------------------- +// Comparable +// ---------------------------------------------------------------------------- + +trait Comparable[Self] extends Value[Self] { + + extension (self: Self) { + + /** Returns `true` iff `self` is ordered before `other`. */ + def lt(other: Self): Boolean + + /** Returns `true` iff `self` is ordered after `other`. */ + def gt(other: Self): Boolean = other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered before `other`. */ + def le(other: Self): Boolean = !other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered after `other`. */ + def ge(other: Self): Boolean = !self.lt(other) + + } + +} + +/** Returns the lesser of `x` and `y`. */ +def min[T: Comparable](x: T, y: T): T = + if y.lt(x) then y else x + +/** Returns the greater of `x` and `y`. */ +def max[T: Comparable](x: T, y: T): T = + if x.lt(y) then y else x diff --git a/tests/pos/hylolib-deferred-given/Hasher.scala b/tests/pos/hylolib-deferred-given/Hasher.scala new file mode 100644 index 000000000000..ca45550ed002 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Hasher.scala @@ -0,0 +1,39 @@ +//> using options -language:experimental.modularity -source future +package hylo + +import scala.util.Random + +/** A universal hash function. */ +final class Hasher private (private val hash: Int = Hasher.offsetBasis) { + + /** Returns the computed hash value. */ + def finalizeHash(): Int = + hash + + /** Adds `n` to the computed hash value. */ + def combine(n: Int): Hasher = + var h = hash + h = h ^ n + h = h * Hasher.prime + new Hasher(h) +} + +object Hasher { + + private val offsetBasis = 0x811c9dc5 + private val prime = 0x01000193 + + /** A random seed ensuring different hashes across multiple runs. */ + private lazy val seed = scala.util.Random.nextInt() + + /** Creates an instance with the given `seed`. */ + def apply(): Hasher = + val h = new Hasher() + h.combine(seed) + h + + /** Returns the hash of `v`. */ + def hash[T: Value](v: T): Int = + v.hashInto(Hasher()).finalizeHash() + +} diff --git a/tests/pos/hylolib-deferred-given/HyArray.scala b/tests/pos/hylolib-deferred-given/HyArray.scala new file mode 100644 index 000000000000..98632dcb65bc --- /dev/null +++ b/tests/pos/hylolib-deferred-given/HyArray.scala @@ -0,0 +1,224 @@ +package hylo + +import java.util.Arrays +import scala.collection.mutable + +/** An ordered, random-access collection. */ +final class HyArray[Element] private (using + elementIsValue: Value[Element] +)( + private var _storage: scala.Array[AnyRef | Null] | Null, + private var _count: Int // NOTE: where do I document private fields +) { + + // NOTE: The fact that we need Array[AnyRef] is diappointing and difficult to discover + // The compiler error sent me on a wild goose chase with ClassTag. + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** Returns the number of elements that `this` can contain before allocating new storage. */ + def capacity: Int = + if _storage == null then 0 else _storage.length + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): HyArray[Element] = + if (n <= capacity) { + this + } else { + var newCapacity = max(1, capacity) + while (newCapacity < n) { newCapacity = newCapacity << 1 } + + val newStorage = new scala.Array[AnyRef | Null](newCapacity) + val s = _storage.asInstanceOf[scala.Array[AnyRef | Null]] + var i = 0 + while (i < count) { + newStorage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + + if (assumeUniqueness) { + _storage = newStorage + this + } else { + new HyArray(newStorage, count) + } + } + + /** Adds a new element at the end of the array. */ + def append(source: Element, assumeUniqueness: Boolean = false): HyArray[Element] = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + result._storage(count) = source.asInstanceOf[AnyRef] + result._count += 1 + result + + // NOTE: Can't refine `C.Element` without renaming the generic parameter of `HyArray`. + // /** Adds the contents of `source` at the end of the array. */ + // def appendContents[C](using + // s: Collection[C] + // )( + // source: C { type Element = Element }, + // assumeUniqueness: Boolean = false + // ): HyArray[Element] = + // val result = if (assumeUniqueness) { this } else { copy(count + source.count) } + // source.reduce(result, (r, e) => r.append(e, assumeUniqueness = true)) + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (HyArray[Element], Option[Element]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + result._count -= 1 + (result, Some(result._storage(result._count).asInstanceOf[Element])) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + Arrays.fill(result._storage, null) + result._count = 0 + result + } else { + HyArray() + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Int): Element = + _storage(p).asInstanceOf[Element] + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: Int, + transform: (Element) => Element, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if assumeUniqueness then this else copy() + result._storage(p) = transform(at(p)).asInstanceOf[AnyRef] + result + + /** Returns a textual description of `this`. */ + override def toString: String = + var s = "[" + var i = 0 + while (i < count) { + if (i > 0) { s += ", " } + s += s"${at(i)}" + i += 1 + } + s + "]" + + /** Returns an independent copy of `this`, capable of storing `minimumCapacity` elements before + * allocating new storage. + */ + def copy(minimumCapacity: Int = 0): HyArray[Element] = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val clone = HyArray[Element]().reserveCapacity(max(minimumCapacity, count)) + var i = 0 + while (i < count) { + clone._storage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + clone._count = count + clone + } + +} + +object HyArray { + + /** Creates an array with the given `elements`. */ + def apply[T](using t: Value[T])(elements: T*): HyArray[T] = + var a = new HyArray[T](null, 0) + for (e <- elements) a = a.append(e, assumeUniqueness = true) + a + +} + +given hyArrayIsValue[T](using tIsValue: Value[T]): Value[HyArray[T]] with { + + extension (self: HyArray[T]) { + + def copy(): HyArray[T] = + self.copy() + + def eq(other: HyArray[T]): Boolean = + self.elementsEqual(other) + + def hashInto(hasher: Hasher): Hasher = + self.reduce(hasher, (h, e) => e.hashInto(h)) + + } + +} + +given hyArrayIsCollection[T](using tIsValue: Value[T]): Collection[HyArray[T]] with { + + type Element = T + //given elementIsValue: Value[T] = tIsValue + + type Position = Int + given positionIsValue: Value[Int] = intIsValue + + extension (self: HyArray[T]) { + + // NOTE: Having to explicitly override means that primary declaration can't automatically + // specialize trait requirements. + override def isEmpty: Boolean = self.isEmpty + + override def count: Int = self.count + + def startPosition = 0 + + def endPosition = self.count + + def positionAfter(p: Int) = p + 1 + + def at(p: Int) = self.at(p) + + } + +} + +// NOTE: This should work. +// given hyArrayIsStringConvertible[T](using +// tIsValue: Value[T], +// tIsStringConvertible: StringConvertible[T] +// ): StringConvertible[HyArray[T]] with { +// +// given Collection[HyArray[T]] = hyArrayIsCollection[T] +// +// extension (self: HyArray[T]) +// override def description: String = +// var contents = mutable.StringBuilder() +// self.forEach((e) => { contents ++= e.description; true }) +// s"[${contents.mkString(", ")}]" +// +// } diff --git a/tests/pos/hylolib-deferred-given/Integers.scala b/tests/pos/hylolib-deferred-given/Integers.scala new file mode 100644 index 000000000000..b9bc203a88ea --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Integers.scala @@ -0,0 +1,58 @@ +package hylo + +given booleanIsValue: Value[Boolean] with { + + extension (self: Boolean) { + + def copy(): Boolean = + // Note: Scala's `Boolean` has value semantics already. + self + + def eq(other: Boolean): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(if self then 1 else 0) + + } + +} + +given intIsValue: Value[Int] with { + + extension (self: Int) { + + def copy(): Int = + // Note: Scala's `Int` has value semantics already. + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + } + +} + +given intIsComparable: Comparable[Int] with { + + extension (self: Int) { + + def copy(): Int = + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + def lt(other: Int): Boolean = self < other + + } + +} + +given intIsStringConvertible: StringConvertible[Int] with {} diff --git a/tests/pos/hylolib-deferred-given/Range.scala b/tests/pos/hylolib-deferred-given/Range.scala new file mode 100644 index 000000000000..b0f50dd55c8c --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Range.scala @@ -0,0 +1,37 @@ +package hylo + +/** A half-open interval from a lower bound up to, but not including, an uppor bound. */ +final class Range[Bound] private (val lowerBound: Bound, val upperBound: Bound) { + + /** Returns a textual description of `this`. */ + override def toString: String = + s"[${lowerBound}, ${upperBound})" + +} + +object Range { + + /** Creates a half-open interval [`lowerBound`, `upperBound`), using `isLessThanOrEqual` to ensure + * that the bounds are well-formed. + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound]( + lowerBound: Bound, + upperBound: Bound, + isLessThanOrEqual: (Bound, Bound) => Boolean + ) = + require(isLessThanOrEqual(lowerBound, upperBound)) + new Range(lowerBound, upperBound) + + /** Creates a half-open interval [`lowerBound`, `upperBound`). + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound: Comparable](lowerBound: Bound, upperBound: Bound) = + require(lowerBound `le` upperBound) + new Range(lowerBound, upperBound) + +} diff --git a/tests/pos/hylolib-deferred-given/Slice.scala b/tests/pos/hylolib-deferred-given/Slice.scala new file mode 100644 index 000000000000..57cdb38f6e53 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Slice.scala @@ -0,0 +1,49 @@ +package hylo + +/** A view into a collection. */ +final class Slice[Base](using + val b: Collection[Base] +)( + val base: Base, + val bounds: Range[b.Position] +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: b.Position = + bounds.lowerBound + + def endPosition: b.Position = + bounds.upperBound + + def positionAfter(p: b.Position): b.Position = + base.positionAfter(p) + + def at(p: b.Position): b.Element = + base.at(p) + +} + +given sliceIsCollection[T](using c: Collection[T]): Collection[Slice[T]] with { + + type Element = c.Element + //given elementIsValue: Value[Element] = c.elementIsValue + + type Position = c.Position + given positionIsValue: Value[Position] = c.positionIsValue + + extension (self: Slice[T]) { + + def startPosition = self.bounds.lowerBound.asInstanceOf[Position] // NOTE: Ugly hack + + def endPosition = self.bounds.upperBound.asInstanceOf[Position] + + def positionAfter(p: Position) = self.base.positionAfter(p) + + def at(p: Position) = self.base.at(p) + + } + +} diff --git a/tests/pos/hylolib-deferred-given/StringConvertible.scala b/tests/pos/hylolib-deferred-given/StringConvertible.scala new file mode 100644 index 000000000000..0702f79f2794 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/StringConvertible.scala @@ -0,0 +1,14 @@ +package hylo + +/** A type whose instances can be described by a character string. */ +trait StringConvertible[Self] { + + extension (self: Self) { + + /** Returns a textual description of `self`. */ + def description: String = + self.toString + + } + +} diff --git a/tests/pos/hylolib-extract.scala b/tests/pos/hylolib-extract.scala new file mode 100644 index 000000000000..846e52f30df6 --- /dev/null +++ b/tests/pos/hylolib-extract.scala @@ -0,0 +1,29 @@ +//> using options -language:experimental.modularity -source future +package hylotest + +trait Value: + type Self + extension (self: Self) def eq(other: Self): Boolean + +/** A collection of elements accessible by their position. */ +trait Collection: + type Self + + /** The type of the elements in the collection. */ + type Element: Value + +class BitArray + +given Boolean is Value: + extension (self: Self) def eq(other: Self): Boolean = + self == other + +given BitArray is Collection: + type Element = Boolean + +extension [Self: Value](self: Self) + def neq(other: Self): Boolean = !self.eq(other) + +extension [Self: Collection](self: Self) + def elementsEqual[T: Collection { type Element = Self.Element } ](other: T): Boolean = + ??? diff --git a/tests/pos/hylolib/AnyCollection.scala b/tests/pos/hylolib/AnyCollection.scala new file mode 100644 index 000000000000..6c2b835852e6 --- /dev/null +++ b/tests/pos/hylolib/AnyCollection.scala @@ -0,0 +1,51 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A type-erased collection. + * + * A `AnyCollection` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyCollection[Element] private ( + val _start: () => AnyValue, + val _end: () => AnyValue, + val _after: (AnyValue) => AnyValue, + val _at: (AnyValue) => Element +) + +object AnyCollection { + + /** Creates an instance forwarding its operations to `base`. */ + def apply[Base: Collection](base: Base): AnyCollection[Base.Element] = + + def start(): AnyValue = + AnyValue(base.startPosition) + + def end(): AnyValue = + AnyValue(base.endPosition) + + def after(p: AnyValue): AnyValue = + AnyValue(base.positionAfter(p.unsafelyUnwrappedAs[Base.Position])) + + def at(p: AnyValue): Base.Element = + base.at(p.unsafelyUnwrappedAs[Base.Position]) + + new AnyCollection[Base.Element]( + _start = start, + _end = end, + _after = after, + _at = at + ) + +} + +given [T: Value] => AnyCollection[T] is Collection: + + type Element = T + type Position = AnyValue + + extension (self: AnyCollection[T]) + def startPosition = self._start() + def endPosition = self._end() + def positionAfter(p: Position) = self._after(p) + def at(p: Position) = self._at(p) + diff --git a/tests/pos/hylolib/AnyValue.scala b/tests/pos/hylolib/AnyValue.scala new file mode 100644 index 000000000000..6844135b646b --- /dev/null +++ b/tests/pos/hylolib/AnyValue.scala @@ -0,0 +1,67 @@ +package hylo + +/** A wrapper around an object providing a reference API. */ +private final class Ref[T](val value: T) { + + override def toString: String = + s"Ref($value)" + +} + +/** A type-erased value. + * + * An `AnyValue` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyValue private ( + private val wrapped: AnyRef, + private val _copy: (AnyRef) => AnyValue, + private val _eq: (AnyRef, AnyRef) => Boolean, + private val _hashInto: (AnyRef, Hasher) => Hasher +) { + + /** Returns a copy of `this`. */ + def copy(): AnyValue = + _copy(this.wrapped) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: AnyValue): Boolean = + _eq(this.wrapped, other.wrapped) + + /** Hashes the salient parts of `this` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + _hashInto(this.wrapped, hasher) + + /** Returns the value wrapped in `this` as an instance of `T`. */ + def unsafelyUnwrappedAs[T]: T = + wrapped.asInstanceOf[Ref[T]].value + + /** Returns a textual description of `this`. */ + override def toString: String = + wrapped.toString + +} + +object AnyValue { + + /** Creates an instance wrapping `wrapped`. */ + def apply[T: Value](wrapped: T): AnyValue = + def copy(a: AnyRef): AnyValue = + AnyValue(a.asInstanceOf[Ref[T]].value.copy()) + + def eq(a: AnyRef, b: AnyRef): Boolean = + a.asInstanceOf[Ref[T]].value `eq` b.asInstanceOf[Ref[T]].value + + def hashInto(a: AnyRef, hasher: Hasher): Hasher = + a.asInstanceOf[Ref[T]].value.hashInto(hasher) + + new AnyValue(Ref(wrapped), copy, eq, hashInto) + +} + +given AnyValue is Value: + + extension (self: AnyValue) + def copy(): AnyValue = self.copy() + def eq(other: AnyValue): Boolean = self `eq` other + def hashInto(hasher: Hasher): Hasher = self.hashInto(hasher) + diff --git a/tests/pos/hylolib/AnyValueTests.scala b/tests/pos/hylolib/AnyValueTests.scala new file mode 100644 index 000000000000..96d3563f4f53 --- /dev/null +++ b/tests/pos/hylolib/AnyValueTests.scala @@ -0,0 +1,15 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class AnyValueTests extends munit.FunSuite: + + test("eq"): + val a = AnyValue(1) + assert(a `eq` a) + assert(!(a `neq` a)) + + val b = AnyValue(2) + assert(!(a `eq` b)) + assert(a `neq` b) + diff --git a/tests/pos/hylolib/BitArray.scala b/tests/pos/hylolib/BitArray.scala new file mode 100644 index 000000000000..6ef406e5ad83 --- /dev/null +++ b/tests/pos/hylolib/BitArray.scala @@ -0,0 +1,362 @@ +package hylo + +import scala.collection.mutable + +/** An array of bit values represented as Booleans, where `true` indicates that the bit is on. */ +final class BitArray private ( + private var _bits: HyArray[Int], + private var _count: Int +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** The number of bits that the array can contain before allocating new storage. */ + def capacity: Int = + _bits.capacity << 5 + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): BitArray = + if (n == 0) { + this + } else { + val k = 1 + ((n - 1) >> 5) + if (assumeUniqueness) { + _bits = _bits.reserveCapacity(k, assumeUniqueness) + this + } else { + new BitArray(_bits.reserveCapacity(k), _count) + } + } + + /** Adds a new element at the end of the array. */ + def append(bit: Boolean, assumeUniqueness: Boolean = false): BitArray = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + val p = BitArray.Position(count) + if (p.bucket >= _bits.count) { + result._bits = _bits.append(if bit then 1 else 0) + } else { + result.setValue(bit, p) + } + result._count += 1 + result + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (BitArray, Option[Boolean]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + val bit = result.at(BitArray.Position(count)) + result._count -= 1 + (result, Some(bit)) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): BitArray = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + result._bits.removeAll(keepStorage, assumeUniqueness = true) + result._count = 0 + result + } else { + BitArray() + } + + /** Returns `true` iff all elements in `this` are `false`. */ + def allFalse: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == 0 + } else if (_bits.at(i) != 0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns `true` iff all elements in `this` are `true`. */ + def allTrue: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == m + } else if (_bits.at(i) != ~0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns the bitwise OR of `this` and `other`. */ + def | (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ | _, assumeUniqueness = true) + + /** Returns the bitwise AND of `this` and `other`. */ + def & (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ & _, assumeUniqueness = true) + + /** Returns the bitwise XOR of `this` and `other`. */ + def ^ (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ ^ _, assumeUniqueness = true) + + /** Assigns each bits in `this` to the result of `operation` applied on those bits and their + * corresponding bits in `other`. + * + * @requires + * `self.count == other.count`. + */ + private def applyBitwise( + other: BitArray, + operation: (Int, Int) => Int, + assumeUniqueness: Boolean = false + ): BitArray = + require(this.count == other.count) + if (isEmpty) { + this + } else { + val result = if assumeUniqueness then this else copy() + var u = assumeUniqueness + val k = (count - 1) >> 5 + + for (i <- 0 until k) { + result._bits = result._bits.modifyAt( + i, (n) => operation(n, other._bits.at(n)), + assumeUniqueness = u + ) + u = true + } + val m = (1 << (count & 31)) - 1 + result._bits = result._bits.modifyAt( + k, (n) => operation(n & m, other._bits.at(k) & m), + assumeUniqueness = u + ) + + result + } + + /** Returns the position of `this`'s first element', or `endPosition` if `this` is empty. + * + * @complexity + * O(1). + */ + def startPosition: BitArray.Position = + BitArray.Position(0) + + /** Returns the "past the end" position in `this`, that is, the position immediately after the + * last element in `this`. + * + * @complexity + * O(1). + */ + def endPosition: BitArray.Position = + BitArray.Position(count) + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: BitArray.Position): BitArray.Position = + if (p.offsetInBucket == 63) { + BitArray.Position(p.bucket + 1, 0) + } else { + BitArray.Position(p.bucket, p.offsetInBucket + 1) + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: BitArray.Position): Boolean = + val m = 1 << p.offsetInBucket + val b: Int = _bits.at(p.bucket) + (b & m) == m + + /** Accesses the `i`-th element of `this`. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def atIndex(i: Int): Boolean = + at(BitArray.Position(i)) + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: BitArray.Position, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + val result = if assumeUniqueness then this else copy() + result.setValue(transform(result.at(p)), p) + result + + /** Calls `transform` on `i`-th element of `this` to update its value. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def modifyAtIndex( + i: Int, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + modifyAt(BitArray.Position(i), transform, assumeUniqueness) + + /** Returns an independent copy of `this`. */ + def copy(minimumCapacity: Int = 0): BitArray = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val k = 1 + ((minimumCapacity - 1) >> 5) + val newBits = _bits.copy(k) + new BitArray(newBits, _count) + } + + /** Returns a textual description of `this`. */ + override def toString: String = + _bits.toString + + /** Sets the value `b` for the bit at position `p`. + * + * @requires + * `this` is uniquely referenced and `p` is a valid position in `this`. + */ + private def setValue(b: Boolean, p: BitArray.Position): Unit = + val m = 1 << p.offsetInBucket + _bits = _bits.modifyAt( + p.bucket, + (e) => if b then e | m else e & ~m, + assumeUniqueness = true + ) + +} + +object BitArray { + + /** A position in a `BitArray`. + * + * @param bucket + * The bucket containing `this`. + * @param offsetInBucket + * The offset of `this` in its containing bucket. + */ + final class Position( + private[BitArray] val bucket: Int, + private[BitArray] val offsetInBucket: Int + ) { + + /** Creates a position from an index. */ + private[BitArray] def this(index: Int) = + this(index >> 5, index & 31) + + /** Returns the index corresponding to this position. */ + private def index: Int = + (bucket >> 5) + offsetInBucket + + /** Returns a copy of `this`. */ + def copy(): Position = + new Position(bucket, offsetInBucket) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: Position): Boolean = + (this.bucket == other.bucket) && (this.offsetInBucket == other.offsetInBucket) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + hasher.combine(bucket) + hasher.combine(offsetInBucket) + + } + + /** Creates an array with the given `bits`. */ + def apply[T](bits: Boolean*): BitArray = + var result = new BitArray(HyArray[Int](), 0) + for (b <- bits) result = result.append(b, assumeUniqueness = true) + result + +} + +given BitArray.Position is Value: + + extension (self: BitArray.Position) + + def copy(): BitArray.Position = + self.copy() + + def eq(other: BitArray.Position): Boolean = + self.eq(other) + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + +given BitArray is Collection: + + type Element = Boolean + type Position = BitArray.Position + + extension (self: BitArray) + + override def count: Int = + self.count + + def startPosition: BitArray.Position = + self.startPosition + + def endPosition: BitArray.Position = + self.endPosition + + def positionAfter(p: BitArray.Position): BitArray.Position = + self.positionAfter(p) + + def at(p: BitArray.Position): Boolean = + self.at(p) + +given BitArray is StringConvertible: + extension (self: BitArray) + override def description: String = + var contents = mutable.StringBuilder() + self.forEach((e) => { contents += (if e then '1' else '0'); true }) + contents.mkString + diff --git a/tests/pos/hylolib/Collection.scala b/tests/pos/hylolib/Collection.scala new file mode 100644 index 000000000000..bef86a967e6e --- /dev/null +++ b/tests/pos/hylolib/Collection.scala @@ -0,0 +1,267 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A collection of elements accessible by their position. */ +trait Collection: + type Self + + /** The type of the elements in the collection. */ + type Element: Value + + /** The type of a position in the collection. */ + type Position: Value + + extension (self: Self) + + /** Returns `true` iff `self` is empty. */ + def isEmpty: Boolean = + startPosition `eq` endPosition + + /** Returns the number of elements in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def count: Int = + val e = endPosition + def loop(p: Position, n: Int): Int = + if p `eq` e then n else loop(self.positionAfter(p), n + 1) + loop(startPosition, 0) + + /** Returns the position of `self`'s first element', or `endPosition` if `self` is empty. + * + * @complexity + * O(1) + */ + def startPosition: Position + + /** Returns the "past the end" position in `self`, that is, the position immediately after the + * last element in `self`. + * + * @complexity + * O(1). + */ + def endPosition: Position + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: Position): Position + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Position): Element + + /** Returns `true` iff `i` precedes `j`. + * + * @requires + * `i` and j` are valid positions in `self`. + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def isBefore(i: Position, j: Position): Boolean = + val e = self.endPosition + if i `eq` e then false + else if j `eq` e then true + else + def recur(n: Position): Boolean = + if n `eq` j then true + else if n `eq` e then false + else recur(self.positionAfter(n)) + recur(self.positionAfter(i)) + + class Slice2(val base: Self, val bounds: Range[Position]): + + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: Position = + bounds.lowerBound + + def endPosition: Position = + bounds.upperBound + + def at(p: Position): Element = + base.at(p) + end Slice2 + +end Collection + +extension [Self: Collection](self: Self) + + /** Returns the first element of `self` along with a slice containing the suffix after this + * element, or `None` if `self` is empty. + * + * @complexity + * O(1) + */ + def headAndTail: Option[(Self.Element, Slice[Self])] = + if self.isEmpty then + None + else + val p = self.startPosition + val q = self.positionAfter(p) + val t = Slice(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + + def headAndTail2: Option[(Self.Element, Self.Slice2)] = + if self.isEmpty then + None + else + val p = self.startPosition + val q = self.positionAfter(p) + val t = Self.Slice2(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + + /** Applies `combine` on `partialResult` and each element of `self`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def reduce[T](partialResult: T)(combine: (T, Self.Element) => T): T = + val e = self.endPosition + def loop(p: Self.Position, r: T): T = + if p `eq` e then r + else loop(self.positionAfter(p), combine(r, self.at(p))) + loop(self.startPosition, partialResult) + + /** Applies `action` on each element of `self`, in order, until `action` returns `false`, and + * returns `false` iff `action` did. + * + * You can return `false` from `action` to emulate a `continue` statement as found in traditional + * imperative languages (e.g., C). + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def forEach(action: Self.Element => Boolean): Boolean = + val e = self.endPosition + def loop(p: Self.Position): Boolean = + if p `eq` e then true + else if !action(self.at(p)) then false + else loop(self.positionAfter(p)) + loop(self.startPosition) + + /** Returns a collection with the elements of `self` transformed by `transform`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def map[T: Value](transform: Self.Element => T): HyArray[T] = + self.reduce(HyArray[T]()): (r, e) => + r.append(transform(e), assumeUniqueness = true) + + /** Returns a collection with the elements of `self` satisfying `isInclude`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def filter(isIncluded: Self.Element => Boolean): HyArray[Self.Element] = + self.reduce(HyArray[Self.Element]()): (r, e) => + if isIncluded(e) then r.append(e, assumeUniqueness = true) else r + + /** Returns `true` if `self` contains an element satisfying `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def containsWhere(predicate: Self.Element => Boolean): Boolean = + self.firstPositionWhere(predicate) != None + + /** Returns `true` if all elements in `self` satisfy `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def allSatisfy(predicate: Self.Element => Boolean): Boolean = + self.firstPositionWhere(predicate) == None + + /** Returns the position of the first element of `self` satisfying `predicate`, or `None` if no + * such element exists. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def firstPositionWhere(predicate: Self.Element => Boolean): Option[Self.Position] = + val e = self.endPosition + def loop(p: Self.Position): Option[Self.Position] = + if p `eq` e then None + else if predicate(self.at(p)) then Some(p) + else loop(self.positionAfter(p)) + loop(self.startPosition) + + /** Returns the minimum element in `self`, using `isLessThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement(isLessThan: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + self.leastElement(isLessThan) + + // NOTE: I can't find a reasonable way to call this method. + /** Returns the minimum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement()(using Self.Element is Comparable): Option[Self.Element] = + self.minElement(isLessThan = _ `lt` _) + + /** Returns the maximum element in `self`, using `isGreaterThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement(isGreaterThan: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + self.leastElement(isGreaterThan) + + /** Returns the maximum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement()(using Self.Element is Comparable): Option[Self.Element] = + self.maxElement(isGreaterThan = _ `gt` _) + + /** Returns the maximum element in `self`, using `isOrderedBefore` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def leastElement(isOrderedBefore: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + if self.isEmpty then + None + else + val e = self.endPosition + def loop(p: Self.Position, least: Self.Element): Self.Element = + if p `eq` e then + least + else + val x = self.at(p) + val y = if isOrderedBefore(x, least) then x else least + loop(self.positionAfter(p), y) + val b = self.startPosition + Some(loop(self.positionAfter(b), self.at(b))) + + /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ + def elementsEqual[T: Collection { type Element = Self.Element } ](other: T): Boolean = + def loop(i: Self.Position, j: T.Position): Boolean = + if i `eq` self.endPosition then + j `eq` other.endPosition + else if j `eq` other.endPosition then + false + else if self.at(i) `neq` other.at(j)then + false + else + loop(self.positionAfter(i), other.positionAfter(j)) + loop(self.startPosition, other.startPosition) +end extension diff --git a/tests/pos/hylolib/CollectionTests.scala b/tests/pos/hylolib/CollectionTests.scala new file mode 100644 index 000000000000..d884790f64d7 --- /dev/null +++ b/tests/pos/hylolib/CollectionTests.scala @@ -0,0 +1,67 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class CollectionTests extends munit.FunSuite: + + test("isEmpty"): + val empty = AnyCollection(HyArray[Int]()) + assert(empty.isEmpty) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2)) + assert(!nonEmpty.isEmpty) + + test("count"): + val a = AnyCollection(HyArray[Int](1, 2)) + assertEquals(a.count, 2) + + test("isBefore"): + val empty = AnyCollection(HyArray[Int]()) + assert(!empty.isBefore(empty.startPosition, empty.endPosition)) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2)) + val p0 = nonEmpty.startPosition + val p1 = nonEmpty.positionAfter(p0) + val p2 = nonEmpty.positionAfter(p1) + assert(nonEmpty.isBefore(p0, nonEmpty.endPosition)) + assert(nonEmpty.isBefore(p1, nonEmpty.endPosition)) + assert(!nonEmpty.isBefore(p2, nonEmpty.endPosition)) + + test("headAndTail"): + val empty = AnyCollection(HyArray[Int]()) + assertEquals(empty.headAndTail, None) + + val one = AnyCollection(HyArray[Int](1)) + val Some((h0, t0)) = one.headAndTail: @unchecked + assert(h0 eq 1) + assert(t0.isEmpty) + + val two = AnyCollection(HyArray[Int](1, 2)) + val Some((h1, t1)) = two.headAndTail: @unchecked + assertEquals(h1, 1) + assertEquals(t1.count, 1) + + test("reduce"): + val empty = AnyCollection(HyArray[Int]()) + assertEquals(empty.reduce(0)((s, x) => s + x), 0) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2, 3)) + assertEquals(nonEmpty.reduce(0)((s, x) => s + x), 6) + + test("forEach"): + val empty = AnyCollection(HyArray[Int]()) + assert(empty.forEach((e) => false)) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2, 3)) + var s = 0 + assert(nonEmpty.forEach((e) => { s += e; true })) + assertEquals(s, 6) + + s = 0 + assert(!nonEmpty.forEach((e) => { s += e; false })) + assertEquals(s, 1) + + test("elementsEqual"): + val a = HyArray(1, 2) + assert(a.elementsEqual(a)) +end CollectionTests diff --git a/tests/pos/hylolib/CoreTraits.scala b/tests/pos/hylolib/CoreTraits.scala new file mode 100644 index 000000000000..f4b3699b430e --- /dev/null +++ b/tests/pos/hylolib/CoreTraits.scala @@ -0,0 +1,56 @@ +package hylo + +/** A type whose instance can be treated as independent values. + * + * The data structure of and algorithms of Hylo's standard library operate "notional values" rather + * than arbitrary references. This trait defines the basis operations of all values. + */ +trait Value: + type Self + + extension (self: Self) { + + /** Returns a copy of `self`. */ + def copy(): Self + + /** Returns `true` iff `self` and `other` have an equivalent value. */ + def eq(other: Self): Boolean + + def neq(other: Self): Boolean = !self.eq(other) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher + + } + +// ---------------------------------------------------------------------------- +// Comparable +// ---------------------------------------------------------------------------- + +trait Comparable extends Value { + + extension (self: Self) { + + /** Returns `true` iff `self` is ordered before `other`. */ + def lt(other: Self): Boolean + + /** Returns `true` iff `self` is ordered after `other`. */ + def gt(other: Self): Boolean = other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered before `other`. */ + def le(other: Self): Boolean = !other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered after `other`. */ + def ge(other: Self): Boolean = !self.lt(other) + + } + +} + +/** Returns the lesser of `x` and `y`. */ +def min[T: Comparable](x: T, y: T): T = + if y.lt(x) then y else x + +/** Returns the greater of `x` and `y`. */ +def max[T: Comparable](x: T, y: T): T = + if x.lt(y) then y else x diff --git a/tests/pos/hylolib/Hasher.scala b/tests/pos/hylolib/Hasher.scala new file mode 100644 index 000000000000..ca45550ed002 --- /dev/null +++ b/tests/pos/hylolib/Hasher.scala @@ -0,0 +1,39 @@ +//> using options -language:experimental.modularity -source future +package hylo + +import scala.util.Random + +/** A universal hash function. */ +final class Hasher private (private val hash: Int = Hasher.offsetBasis) { + + /** Returns the computed hash value. */ + def finalizeHash(): Int = + hash + + /** Adds `n` to the computed hash value. */ + def combine(n: Int): Hasher = + var h = hash + h = h ^ n + h = h * Hasher.prime + new Hasher(h) +} + +object Hasher { + + private val offsetBasis = 0x811c9dc5 + private val prime = 0x01000193 + + /** A random seed ensuring different hashes across multiple runs. */ + private lazy val seed = scala.util.Random.nextInt() + + /** Creates an instance with the given `seed`. */ + def apply(): Hasher = + val h = new Hasher() + h.combine(seed) + h + + /** Returns the hash of `v`. */ + def hash[T: Value](v: T): Int = + v.hashInto(Hasher()).finalizeHash() + +} diff --git a/tests/pos/hylolib/HyArray.scala b/tests/pos/hylolib/HyArray.scala new file mode 100644 index 000000000000..de5e83d3b1a3 --- /dev/null +++ b/tests/pos/hylolib/HyArray.scala @@ -0,0 +1,202 @@ +//> using options -language:experimental.modularity -source future +package hylo + +import java.util.Arrays +import scala.collection.mutable + +/** An ordered, random-access collection. */ +final class HyArray[Element: Value as elementIsCValue]( + private var _storage: scala.Array[AnyRef | Null] | Null, + private var _count: Int // NOTE: where do I document private fields +) { + + // NOTE: The fact that we need Array[AnyRef] is diappointing and difficult to discover + // The compiler error sent me on a wild goose chase with ClassTag. + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** Returns the number of elements that `this` can contain before allocating new storage. */ + def capacity: Int = + if _storage == null then 0 else _storage.length + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): HyArray[Element] = + if (n <= capacity) { + this + } else { + var newCapacity = max(1, capacity) + while (newCapacity < n) { newCapacity = newCapacity << 1 } + + val newStorage = new scala.Array[AnyRef | Null](newCapacity) + val s = _storage.asInstanceOf[scala.Array[AnyRef | Null]] + var i = 0 + while (i < count) { + newStorage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + + if (assumeUniqueness) { + _storage = newStorage + this + } else { + new HyArray(newStorage, count) + } + } + + /** Adds a new element at the end of the array. */ + def append(source: Element, assumeUniqueness: Boolean = false): HyArray[Element] = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + result._storage(count) = source.asInstanceOf[AnyRef] + result._count += 1 + result + + /** Adds the contents of `source` at the end of the array. */ + def appendContents[C: Collection { type Element = HyArray.this.Element }]( + source: C, assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if (assumeUniqueness) { this } else { copy(count + source.count) } + source.reduce(result): (r, e) => + r.append(e, assumeUniqueness = true) + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (HyArray[Element], Option[Element]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + result._count -= 1 + (result, Some(result._storage(result._count).asInstanceOf[Element])) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + Arrays.fill(result._storage, null) + result._count = 0 + result + } else { + HyArray() + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Int): Element = + _storage(p).asInstanceOf[Element] + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: Int, + transform: (Element) => Element, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if assumeUniqueness then this else copy() + result._storage(p) = transform(at(p)).asInstanceOf[AnyRef] + result + + /** Returns a textual description of `this`. */ + override def toString: String = + var s = "[" + var i = 0 + while (i < count) { + if (i > 0) { s += ", " } + s += s"${at(i)}" + i += 1 + } + s + "]" + + /** Returns an independent copy of `this`, capable of storing `minimumCapacity` elements before + * allocating new storage. + */ + def copy(minimumCapacity: Int = 0): HyArray[Element] = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val clone = HyArray[Element]().reserveCapacity(max(minimumCapacity, count)) + var i = 0 + while (i < count) { + clone._storage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + clone._count = count + clone + } + +} + +object HyArray { + + /** Creates an array with the given `elements`. */ + def apply[T: Value](elements: T*): HyArray[T] = + var a = new HyArray[T](null, 0) + for (e <- elements) a = a.append(e, assumeUniqueness = true) + a + +} + +given [T: Value] => HyArray[T] is Value: + + extension (self: HyArray[T]) + + def copy(): HyArray[T] = + self.copy() + + def eq(other: HyArray[T]): Boolean = + self.elementsEqual(other) + + def hashInto(hasher: Hasher): Hasher = + self.reduce(hasher)((h, e) => e.hashInto(h)) + +given [T: Value] => HyArray[T] is Collection: + + type Element = T + type Position = Int + + extension (self: HyArray[T]) + + // NOTE: Having to explicitly override means that primary declaration can't automatically + // specialize trait requirements. + override def isEmpty: Boolean = self.isEmpty + + override def count: Int = self.count + + def startPosition = 0 + + def endPosition = self.count + + def positionAfter(p: Int) = p + 1 + + def at(p: Int) = self.at(p) + +given [T: {Value, StringConvertible}] => HyArray[T] is StringConvertible: + extension (self: HyArray[T]) + override def description: String = + val contents = mutable.StringBuilder() + self.forEach: e => + contents ++= e.description + true + s"[${contents.mkString(", ")}]" diff --git a/tests/pos/hylolib/HyArrayTests.scala b/tests/pos/hylolib/HyArrayTests.scala new file mode 100644 index 000000000000..0de65603d0c7 --- /dev/null +++ b/tests/pos/hylolib/HyArrayTests.scala @@ -0,0 +1,17 @@ +import hylo.* +import hylo.given + +class HyArrayTests extends munit.FunSuite: + + test("reserveCapacity"): + var a = HyArray[Int]() + a = a.append(1) + a = a.append(2) + + a = a.reserveCapacity(10) + assert(a.capacity >= 10) + assertEquals(a.count, 2) + assertEquals(a.at(0), 1) + assertEquals(a.at(1), 2) + +end HyArrayTests diff --git a/tests/pos/hylolib/Integers.scala b/tests/pos/hylolib/Integers.scala new file mode 100644 index 000000000000..f7334ae40786 --- /dev/null +++ b/tests/pos/hylolib/Integers.scala @@ -0,0 +1,46 @@ +package hylo + +given Boolean is Value: + + extension (self: Boolean) + + def copy(): Boolean = + // Note: Scala's `Boolean` has value semantics already. + self + + def eq(other: Boolean): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(if self then 1 else 0) + +given Int is Value: + + extension (self: Int) + + def copy(): Int = + // Note: Scala's `Int` has value semantics already. + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + +given Int is Comparable: + + extension (self: Int) + + def copy(): Int = + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + def lt(other: Int): Boolean = self < other + +given Int is StringConvertible diff --git a/tests/pos/hylolib/IntegersTests.scala b/tests/pos/hylolib/IntegersTests.scala new file mode 100644 index 000000000000..74dedf30d83e --- /dev/null +++ b/tests/pos/hylolib/IntegersTests.scala @@ -0,0 +1,14 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class IntegersTests extends munit.FunSuite: + + test("Int.hashInto"): + val x = Hasher.hash(42) + val y = Hasher.hash(42) + assertEquals(x, y) + + val z = Hasher.hash(1337) + assertNotEquals(x, z) + diff --git a/tests/pos/hylolib/Range.scala b/tests/pos/hylolib/Range.scala new file mode 100644 index 000000000000..b0f50dd55c8c --- /dev/null +++ b/tests/pos/hylolib/Range.scala @@ -0,0 +1,37 @@ +package hylo + +/** A half-open interval from a lower bound up to, but not including, an uppor bound. */ +final class Range[Bound] private (val lowerBound: Bound, val upperBound: Bound) { + + /** Returns a textual description of `this`. */ + override def toString: String = + s"[${lowerBound}, ${upperBound})" + +} + +object Range { + + /** Creates a half-open interval [`lowerBound`, `upperBound`), using `isLessThanOrEqual` to ensure + * that the bounds are well-formed. + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound]( + lowerBound: Bound, + upperBound: Bound, + isLessThanOrEqual: (Bound, Bound) => Boolean + ) = + require(isLessThanOrEqual(lowerBound, upperBound)) + new Range(lowerBound, upperBound) + + /** Creates a half-open interval [`lowerBound`, `upperBound`). + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound: Comparable](lowerBound: Bound, upperBound: Bound) = + require(lowerBound `le` upperBound) + new Range(lowerBound, upperBound) + +} diff --git a/tests/pos/hylolib/Slice.scala b/tests/pos/hylolib/Slice.scala new file mode 100644 index 000000000000..d54f855b1041 --- /dev/null +++ b/tests/pos/hylolib/Slice.scala @@ -0,0 +1,63 @@ +package hylo + +/** A view into a collection. */ +final class Slice[Base: Collection]( + val base: Base, + val bounds: Range[Base.Position] +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: Base.Position = + bounds.lowerBound + + def endPosition: Base.Position = + bounds.upperBound + + def positionAfter(p: Base.Position): Base.Position = + base.positionAfter(p) + + def at(p: Base.Position): Base.Element = + base.at(p) + +} + +given [C: Collection] => Slice[C] is Collection: + + type Element = C.Element + type Position = C.Position + + extension (self: Slice[C]) + + def startPosition = self.bounds.lowerBound.asInstanceOf[Position] + // This is actually unsafe. We have: + // self.bounds: Range(Slice[C].Base.Position) + // But the _value_ of Slice[C].Base is not necssarily this given, even + // though it is true that `type Slice[C].Base = C`. There might be multiple + // implementations of `Slice[C] is Collection` that define different `Position` + // types. So we cannot conclude that `Slice[C].Base.Position = this.Position`. + // To make this safe, we'd need some form of coherence, where we ensure that + // there is only one way to implement `Slice is Collection`. + // + // As an alternativem we can make Slice dependent on the original Collection + // _instance_ instead of the original Collection _type_. This design is + // realized by the Slice2 definitions. It works without casts. + + def endPosition = self.bounds.upperBound.asInstanceOf[Position] + + def positionAfter(p: Position) = self.base.positionAfter(p) + + def at(p: Position) = self.base.at(p) + +given [C: Collection] => C.Slice2 is Collection: + type Element = C.Element + type Position = C.Position + + extension (self: C.Slice2) + + def startPosition = self.bounds.lowerBound + def endPosition = self.bounds.upperBound + def positionAfter(p: Position) = self.base.positionAfter(p) + def at(p: Position) = self.base.at(p) diff --git a/tests/pos/hylolib/StringConvertible.scala b/tests/pos/hylolib/StringConvertible.scala new file mode 100644 index 000000000000..cf901d9a3313 --- /dev/null +++ b/tests/pos/hylolib/StringConvertible.scala @@ -0,0 +1,9 @@ +package hylo + +/** A type whose instances can be described by a character string. */ +trait StringConvertible: + type Self + + /** Returns a textual description of `self`. */ + extension (self: Self) + def description: String = self.toString diff --git a/tests/pos/hylolib/Test.scala b/tests/pos/hylolib/Test.scala new file mode 100644 index 000000000000..9e8d6181affd --- /dev/null +++ b/tests/pos/hylolib/Test.scala @@ -0,0 +1,16 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +object munit: + open class FunSuite: + def test(name: String)(op: => Unit): Unit = op + def assertEquals[T](x: T, y: T) = assert(x == y) + def assertNotEquals[T](x: T, y: T) = assert(x != y) + +@main def Test = + CollectionTests() + AnyValueTests() + HyArrayTests() + IntegersTests() + println("done") diff --git a/tests/pos/i10929-new-syntax.scala b/tests/pos/i10929-new-syntax.scala new file mode 100644 index 000000000000..11c5e9313d4c --- /dev/null +++ b/tests/pos/i10929-new-syntax.scala @@ -0,0 +1,22 @@ +//> using options -language:experimental.modularity -source future +trait TupleOf[+A]: + type Self + type Mapped[+A] <: Tuple + def map[B](x: Self)(f: A => B): Mapped[B] + +object TupleOf: + + given EmptyTuple is TupleOf[Nothing]: + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple : TupleOf[A]] => A *: Rest is TupleOf[A]: + type Mapped[+A] = A *: Rest.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + (f(x.head) *: Rest.map(x.tail)(f)) + +def foo[T: TupleOf[Int]](xs: T): T.Mapped[Int] = T.map(xs)(_ + 1) + +@main def test = + foo(EmptyTuple): EmptyTuple // ok + foo(1 *: EmptyTuple): Int *: EmptyTuple // now also ok diff --git a/tests/pos/i10929.scala b/tests/pos/i10929.scala new file mode 100644 index 000000000000..e916e4547e59 --- /dev/null +++ b/tests/pos/i10929.scala @@ -0,0 +1,21 @@ +//> using options -language:experimental.modularity -source future +infix abstract class TupleOf[T, +A]: + type Mapped[+A] <: Tuple + def map[B](x: T)(f: A => B): Mapped[B] + +object TupleOf: + + given TupleOf[EmptyTuple, Nothing] with + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple](using tracked val tup: Rest TupleOf A): TupleOf[A *: Rest, A] with + type Mapped[+A] = A *: tup.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + (f(x.head) *: tup.map(x.tail)(f)) + +def foo[T](xs: T)(using tup: T TupleOf Int): tup.Mapped[Int] = tup.map(xs)(_ + 1) + +@main def test = + foo(EmptyTuple): EmptyTuple // ok + foo(1 *: EmptyTuple): Int *: EmptyTuple // now also ok \ No newline at end of file diff --git a/tests/pos/i13580.scala b/tests/pos/i13580.scala new file mode 100644 index 000000000000..c3c491a19dbe --- /dev/null +++ b/tests/pos/i13580.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future +trait IntWidth: + type Out +given IntWidth: + type Out = 155 + +trait IntCandidate: + type Out +given (using tracked val w: IntWidth) => IntCandidate: + type Out = w.Out + +val x = summon[IntCandidate] +val xx = summon[x.Out =:= 155] diff --git a/tests/pos/i3920.scala b/tests/pos/i3920.scala new file mode 100644 index 000000000000..6cd74187098f --- /dev/null +++ b/tests/pos/i3920.scala @@ -0,0 +1,32 @@ +//> using options -source future -language:experimental.modularity +trait Ordering { + type T + def compare(t1:T, t2: T): Int +} + +class SetFunctor(tracked val ord: Ordering) { + type Set = List[ord.T] + def empty: Set = Nil + + implicit class helper(s: Set) { + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def member(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + } +} + +object Test { + val orderInt = new Ordering { + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + } + + val IntSet = new SetFunctor(orderInt) + import IntSet.* + + def main(args: Array[String]) = { + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.member(7)) + assert(set.member(8)) + } +} \ No newline at end of file diff --git a/tests/pos/i3964.scala b/tests/pos/i3964.scala new file mode 100644 index 000000000000..42412b910899 --- /dev/null +++ b/tests/pos/i3964.scala @@ -0,0 +1,32 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test2: + class Bar(tracked val x: Animal) + val b = new Bar(new Cat) + val bar: Bar { val x: Cat } = new Bar(new Cat) // ok + + trait Foo(tracked val x: Animal) + val foo: Foo { val x: Cat } = new Foo(new Cat) {} // ok + +object Test3: + trait Vec(tracked val size: Int) + class Vec8 extends Vec(8) + + abstract class Lst(tracked val size: Int) + class Lst8 extends Lst(8) + + val v8a: Vec { val size: 8 } = new Vec8 + val v8b: Vec { val size: 8 } = new Vec(8) {} + + val l8a: Lst { val size: 8 } = new Lst8 + val l8b: Lst { val size: 8 } = new Lst(8) {} + + class VecN(tracked val n: Int) extends Vec(n) + class Vec9 extends VecN(9) + val v9a = VecN(9) + val _: Vec { val size: 9 } = v9a + val v9b = Vec9() + val _: Vec { val size: 9 } = v9b diff --git a/tests/pos/i3964a/Defs_1.scala b/tests/pos/i3964a/Defs_1.scala new file mode 100644 index 000000000000..7dcc89f7003e --- /dev/null +++ b/tests/pos/i3964a/Defs_1.scala @@ -0,0 +1,18 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test2: + class Bar(tracked val x: Animal) + val b = new Bar(new Cat) + val bar: Bar { val x: Cat } = new Bar(new Cat) // ok + + trait Foo(tracked val x: Animal) + val foo: Foo { val x: Cat } = new Foo(new Cat) {} // ok + +package coll: + trait Vec(tracked val size: Int) + class Vec8 extends Vec(8) + + abstract class Lst(tracked val size: Int) \ No newline at end of file diff --git a/tests/pos/i3964a/Uses_2.scala b/tests/pos/i3964a/Uses_2.scala new file mode 100644 index 000000000000..9d1b6ebaa58b --- /dev/null +++ b/tests/pos/i3964a/Uses_2.scala @@ -0,0 +1,16 @@ +//> using options -source future -language:experimental.modularity +import coll.* +class Lst8 extends Lst(8) + +val v8a: Vec { val size: 8 } = new Vec8 +val v8b: Vec { val size: 8 } = new Vec(8) {} + +val l8a: Lst { val size: 8 } = new Lst8 +val l8b: Lst { val size: 8 } = new Lst(8) {} + +class VecN(tracked val n: Int) extends Vec(n) +class Vec9 extends VecN(9) +val v9a = VecN(9) +val _: Vec { val size: 9 } = v9a +val v9b = Vec9() +val _: Vec { val size: 9 } = v9b diff --git a/tests/pos/i7045.scala b/tests/pos/i7045.scala deleted file mode 100644 index e683654dd5c3..000000000000 --- a/tests/pos/i7045.scala +++ /dev/null @@ -1,9 +0,0 @@ -trait Bar { type Y } -trait Foo { type X } - -class Test: - given a1(using b: Bar): Foo = new Foo { type X = b.Y } - - given a2(using b: Bar): Foo { type X = b.Y } = new Foo { type X = b.Y } - - given a3(using b: Bar): (Foo { type X = b.Y }) = new Foo { type X = b.Y } diff --git a/tests/pos/ord-over-tracked.scala b/tests/pos/ord-over-tracked.scala new file mode 100644 index 000000000000..a9b4aba556e1 --- /dev/null +++ b/tests/pos/ord-over-tracked.scala @@ -0,0 +1,15 @@ +import language.experimental.modularity + +trait Ord[T]: + def lt(x: T, y: T): Boolean + +given Ord[Int] = ??? + +case class D(tracked val x: Int) +given [T <: D]: Ord[T] = (a, b) => a.x < b.x + +def mySort[T: Ord](x: Array[T]): Array[T] = ??? + +def test = + val arr = Array(D(1)) + val arr1 = mySort(arr) // error: no given instance of type Ord[D{val x: (1 : Int)}] \ No newline at end of file diff --git a/tests/pos/parent-refinement.scala b/tests/pos/parent-refinement.scala new file mode 100644 index 000000000000..eaa74228c5d6 --- /dev/null +++ b/tests/pos/parent-refinement.scala @@ -0,0 +1,48 @@ +//> using options -source future -language:experimental.modularity + +class A +class B extends A +class C extends B + +trait Id { type Value } +type IdOf[T] = Id { type Value = T } +trait X { type Value } + +case class Year(value: Int) extends IdOf[Int]: + val x: Value = 2 + +type Between[Lo, Hi] = X { type Value >: Lo <: Hi } + +class Foo() extends IdOf[B], Between[C, A]: + val x: Value = B() + +trait Bar extends IdOf[Int], (X { type Value = String }) + +class Baz extends IdOf[Int]: + type Value = String + val x: Value = "" + +trait Gen: + type T + val x: T + +type IntInst = Gen: + type T = Int + val x: 0 + +trait IntInstTrait extends IntInst + +abstract class IntInstClass extends IntInstTrait, IntInst + +object obj1 extends IntInstTrait: + val x = 0 + +object obj2 extends IntInstClass: + val x = 0 + +def main = + val x: obj1.T = 2 - obj2.x + val y: obj2.T = 2 - obj1.x + + + diff --git a/tests/pos/parsercombinators-arrow.scala b/tests/pos/parsercombinators-arrow.scala new file mode 100644 index 000000000000..f8bec02067e5 --- /dev/null +++ b/tests/pos/parsercombinators-arrow.scala @@ -0,0 +1,48 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + + type Self + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: Self) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given [C, E] => Apply[C, E] is Combinator: + type Context = C + type Element = E + extension(self: Apply[C, E]) + def parse(context: C): Option[E] = self.action(context) + +given [A: Combinator, B: Combinator { type Context = A.Context }] + => Combine[A, B] is Combinator: + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked diff --git a/tests/pos/parsercombinators-ctx-bounds.scala b/tests/pos/parsercombinators-ctx-bounds.scala new file mode 100644 index 000000000000..d77abea5e539 --- /dev/null +++ b/tests/pos/parsercombinators-ctx-bounds.scala @@ -0,0 +1,49 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A: Combinator, B: [X] =>> Combinator[X] { type Context = A.Context }] + : Combinator[Combine[A, B]] with + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/pos/parsercombinators-expanded.scala b/tests/pos/parsercombinators-expanded.scala new file mode 100644 index 000000000000..cf8137bfe8eb --- /dev/null +++ b/tests/pos/parsercombinators-expanded.scala @@ -0,0 +1,64 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +object test: + + class apply[C, E] extends Combinator[Apply[C, E]]: + type Context = C + type Element = E + extension(self: Apply[C, E]) + def parse(context: C): Option[E] = self.action(context) + + def apply[C, E]: apply[C, E] = new apply[C, E] + + class combine[A, B]( + tracked val f: Combinator[A], + tracked val s: Combinator[B] { type Context = f.Context} + ) extends Combinator[Combine[A, B]]: + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + + def combine[A, B]( + _f: Combinator[A], + _s: Combinator[B] { type Context = _f.Context} + ) = new combine[A, B](_f, _s) + // cast is needed since the type of new combine[A, B](_f, _s) + // drops the required refinement. + + extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + + @main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val c = combine( + apply[mutable.ListBuffer[Int], Int], + apply[mutable.ListBuffer[Int], Int] + ) + val r = c.parse(m)(stream) // was type mismatch, now OK + val rc: Option[(Int, Int)] = r + } diff --git a/tests/pos/parsercombinators-givens-2.scala b/tests/pos/parsercombinators-givens-2.scala new file mode 100644 index 000000000000..8349d69a30af --- /dev/null +++ b/tests/pos/parsercombinators-givens-2.scala @@ -0,0 +1,52 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A, B, C](using + f: Combinator[A] { type Context = C }, + s: Combinator[B] { type Context = C } +): Combinator[Combine[A, B]] with { + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) { + def parse(context: Context): Option[Element] = ??? + } +} + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // works, but Element type is not resolved correctly +} diff --git a/tests/pos/parsercombinators-givens.scala b/tests/pos/parsercombinators-givens.scala new file mode 100644 index 000000000000..5b5588c93840 --- /dev/null +++ b/tests/pos/parsercombinators-givens.scala @@ -0,0 +1,54 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A, B](using + tracked val f: Combinator[A], + tracked val s: Combinator[B] { type Context = f.Context } +): Combinator[Combine[A, B]] with { + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) { + def parse(context: Context): Option[Element] = ??? + } +} + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/pos/parsercombinators-new-syntax.scala b/tests/pos/parsercombinators-new-syntax.scala new file mode 100644 index 000000000000..f984972b915d --- /dev/null +++ b/tests/pos/parsercombinators-new-syntax.scala @@ -0,0 +1,45 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + type Self + type Input + type Result + + extension (self: Self) + /// Parses and returns an element from input `in`. + def parse(in: Input): Option[Result] +end Combinator + +case class Apply[I, R](action: I => Option[R]) +case class Combine[A, B](first: A, second: B) + +given [I, R] => Apply[I, R] is Combinator: + type Input = I + type Result = R + extension (self: Apply[I, R]) + def parse(in: I): Option[R] = self.action(in) + +given [A: Combinator, B: Combinator { type Input = A.Input }] + => Combine[A, B] is Combinator: + type Input = A.Input + type Result = (A.Result, B.Result) + extension (self: Combine[A, B]) + def parse(in: Input): Option[Result] = + for x <- self.first.parse(in); y <- self.second.parse(in) yield (x, y) + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // was error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Input` + val rc: Option[(Int, Int)] = r + diff --git a/tests/pos/parsercombinators-this.scala b/tests/pos/parsercombinators-this.scala new file mode 100644 index 000000000000..70b423985400 --- /dev/null +++ b/tests/pos/parsercombinators-this.scala @@ -0,0 +1,53 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + + type Self + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: Self) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator with { + type Self = Apply[C, E] + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A: Combinator, B: Combinator { type Context = A.Context }] + : Combinator with + type Self = Combine[A, B] + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/pos/precise-ctx-bound.scala b/tests/pos/precise-ctx-bound.scala new file mode 100644 index 000000000000..3f17a5b4a54e --- /dev/null +++ b/tests/pos/precise-ctx-bound.scala @@ -0,0 +1,51 @@ +//> using options -language:experimental.modularity -source future +object Test: + + class Wrap[T](x: T) + + def f0[T](x: T): Wrap[T] = Wrap(x) + val x0 = f0(1) + val _: Wrap[Int] = x0 + + def f1[T: Precise](x: T): Wrap[T] = Wrap(x) + def l = "hello".length + val x1 = Wrap(l) + val _: Wrap[Int] = x1 + + def f2[T](x: T)(using Precise { type Self = T}): Wrap[T] = Wrap(x) + val x2 = f2(1) + val _: Wrap[1] = x2 + + def f3[T: Precise](x: T): Wrap[T] = Wrap(x) + val x3 = f3(identity(1)) + val _: Wrap[1] = x3 + val x3a = f3(1 + 2) + val _: Wrap[3] = x3a + + def f4[T](x: T)(using T is Precise): Wrap[T] = Wrap(x) + val x4 = f4(1) + val _: Wrap[1] = x4 + val x4a = f4(1 + 2) + val _: Wrap[3] = x4a + val y4 = f4(if ??? then 1 else 2) + val _: Wrap[1 | 2] = y4 + val z4 = f4(if ??? then B() else C()) + val _: Wrap[B | C] = z4 + trait A + class B extends A + class C extends A + + class C0[T](x: T): + def fld: T = x + val y0 = C0("hi") + val _: String = y0.fld + + class C2[T](x: T)(using T is Precise): + def fld: T = x + val y2 = C2(identity("hi")) + val _: "hi" = y2.fld + + class C3[T: Precise](x: T): + def fld: T = x + val y3 = C3("hi") + val _: "hi" = y3.fld diff --git a/tests/pos/precise-indexof.scala b/tests/pos/precise-indexof.scala new file mode 100644 index 000000000000..af1e6c5b504b --- /dev/null +++ b/tests/pos/precise-indexof.scala @@ -0,0 +1,46 @@ +//> using options -language:experimental.modularity -source future +import compiletime.* +import compiletime.ops.int.* + +/** The index of `Y` in tuple `X` as a literal constant Int, + * or `Size[X]` if `Y` does not occur in `X` + */ +type IndexOf[X <: Tuple, Y] <: Int = X match + case Y *: _ => 0 + case x *: xs => S[IndexOf[xs, Y]] + case EmptyTuple => 0 + +extension [X <: Tuple](inline x: X) + + /** The index (starting at 0) of the first element in the type `X` of `x` + * that matches type `Y`. + */ + inline def indexOfType[Y] = constValue[IndexOf[X, Y]] + + inline def indexOf[Y: Precise](y: Y) = constValue[IndexOf[X, Y]] + +// Note: without the Precise, the index calcularion would go wrong. For instance, +// (1, 2, "hello", true).indexOf(2) would be 0, the same as (1, 2, "hello", true).indexOTypef[Int] +// (1, 2, "hello", true).indexOf("foo") would be 2, the same as (1, 2, "hello", true).indexOTypef[String] +// But we could alternatively pick Singleton + +@main def Test = + val t: (1, 2, "hello", true) = (1, 2, "hello", true) + val x1: 0 = t.indexOfType[1] + val x2: 1 = t.indexOfType[2] + val x3: 2 = t.indexOfType["hello"] + val x4: 3 = t.indexOfType[true] + val x5: 4 = t.indexOfType[77] + val x6: 0 = t.indexOfType[Int] + val x7: 2 = t.indexOfType[String] + val x8: 4 = t.indexOfType[Double] + + val y1: 0 = t.indexOf(1) + val y2: 1 = t.indexOf(2) + val y3: 2 = t.indexOf("hello") + val y4: 3 = t.indexOf(true) + val y5: 4 = t.indexOf(identity(77)) + val y6: 0 = t.indexOf(identity(1)) + val y7: 4 = t.indexOf("foo") + + diff --git a/tests/pos/sets-tc.scala b/tests/pos/sets-tc.scala new file mode 100644 index 000000000000..86349bf6a405 --- /dev/null +++ b/tests/pos/sets-tc.scala @@ -0,0 +1,46 @@ +import language.experimental.modularity + +// First version: higher-kinded self type +object v1: + trait Set: + type Self[A] + def empty[A]: Self[A] + def union[A](self: Self[A], other: Self[A]): Self[A] + + case class ListSet[A](elems: List[A]) + + given ListSet is Set: + def empty[A]: ListSet[A] = ListSet(Nil) + + def union[A](self: ListSet[A], other: ListSet[A]): ListSet[A] = + ListSet(self.elems ++ other.elems) + + def listUnion[A, S[_]: Set](xs: List[S[A]]): S[A] = + xs.foldLeft(S.empty)(S.union) + + val xs = ListSet(List(1, 2, 3)) + val ys = ListSet(List(4, 5)) + val zs = listUnion(List(xs, ys)) + + // Second version: parameterized type class +object v2: + trait Set[A]: + type Self + def empty: Self + extension (s: Self) def union (other: Self): Self + + case class ListSet[A](elems: List[A]) + + given [A] => ListSet[A] is Set[A]: + def empty: ListSet[A] = ListSet(Nil) + + extension (self: ListSet[A]) def union(other: ListSet[A]): ListSet[A] = + ListSet(self.elems ++ other.elems) + + def listUnion[A, S: Set[A]](xs: List[S]): S = + xs.foldLeft(S.empty)(_ `union` _) + + val xs = ListSet(List(1, 2, 3)) + val ys = ListSet(List(4, 5)) + val zs = listUnion(List(xs, ys)) + diff --git a/tests/pos/singleton-ctx-bound.scala b/tests/pos/singleton-ctx-bound.scala new file mode 100644 index 000000000000..c6b0d2fb823c --- /dev/null +++ b/tests/pos/singleton-ctx-bound.scala @@ -0,0 +1,47 @@ +//> using options -language:experimental.modularity -source future +object Test: + + class Wrap[T](x: T) + + def f0[T](x: T): Wrap[T] = Wrap(x) + val x0 = f0(1) + val _: Wrap[Int] = x0 + + def f1[T <: Singleton](x: T): Wrap[T] = Wrap(x) + val x1 = f1(1) + val _: Wrap[1] = x1 + + def f2[T](x: T)(using Singleton { type Self = T}): Wrap[T] = Wrap(x) + val x2 = f2(1) + val _: Wrap[1] = x2 + + def f3[T: Singleton](x: T): Wrap[T] = Wrap(x) + val x3 = f3(1) + val _: Wrap[1] = x3 + + def f4[T](x: T)(using T is Singleton): Wrap[T] = Wrap(x) + val x4 = f4(1) + val _: Wrap[1] = x4 + + class C0[T](x: T): + def fld: T = x + val y0 = C0("hi") + val _: String = y0.fld + + class C1[T <: Singleton](x: T): + def fld: T = x + val y1 = C1("hi") + val _: "hi" = y1.fld + + class C2[T](x: T)(using T is Singleton): + def fld: T = x + val y2 = C2("hi") + val _: "hi" = y2.fld + + class C3[T: Singleton](x: T): + def fld: T = x + val y3 = C3("hi") + val _: "hi" = y3.fld + + + diff --git a/tests/pos/typeclass-aggregates.scala b/tests/pos/typeclass-aggregates.scala new file mode 100644 index 000000000000..5e4551b226b7 --- /dev/null +++ b/tests/pos/typeclass-aggregates.scala @@ -0,0 +1,47 @@ +//> using options -source future -language:experimental.modularity +trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + + trait OrdProxy extends Ord: + export Ord.this.* + +trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait SemiGroupProxy extends SemiGroup: + export SemiGroup.this.* + +trait Monoid extends SemiGroup: + def unit: Self + + trait MonoidProxy extends Monoid: + export Monoid.this.* + +def ordWithMonoid(ord: Ord, monoid: Monoid{ type Self = ord.Self }): Ord & Monoid = + new ord.OrdProxy with monoid.MonoidProxy {} + +trait OrdWithMonoid extends Ord, Monoid + +def ordWithMonoid2(ord: Ord, monoid: Monoid{ type Self = ord.Self }) = //: OrdWithMonoid { type Self = ord.Self} = + new OrdWithMonoid with ord.OrdProxy with monoid.MonoidProxy {} + +given intOrd: (Ord { type Self = Int }) = ??? +given intMonoid: (Monoid { type Self = Int }) = ??? + +//given (using ord: Ord, monoid: Monoid{ type Self = ord.Self }): (Ord & Monoid { type Self = ord.Self}) = +// ordWithMonoid2(ord, monoid) + +val x = summon[Ord & Monoid { type Self = Int}] +val y: Int = ??? : x.Self + +// given [A, B](using ord: A is Ord, monoid: A is Monoid) => A is Ord & Monoid = +// new ord.OrdProxy with monoid.MonoidProxy {} + +given [A](using ord: Ord { type Self = A }, monoid: Monoid { type Self = A}): ((Ord & Monoid) { type Self = A}) = + new ord.OrdProxy with monoid.MonoidProxy {} + diff --git a/tests/pos/typeclasses-arrow.scala b/tests/pos/typeclasses-arrow.scala new file mode 100644 index 000000000000..379365ffa1c5 --- /dev/null +++ b/tests/pos/typeclasses-arrow.scala @@ -0,0 +1,140 @@ +//> using options -language:experimental.modularity -source future + +class Common: + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + +object Instances extends Common: + + given Int is Ord as intOrd: + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + + given [T: Ord] => List[T] is Ord: + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given List is Monad as listMonad: + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given [Ctx] => Reader[Ctx] is Monad as readerMonad: + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given [T: Ord] => T is Ord as descending: + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 28 105 793 +// wc Rust : 57 193 1466 + +trait Animal: + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self + + // Method signatures; these will return a string. + extension (self: Self) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +given Sheep is Animal: + def apply(name: String) = Sheep(name) + extension (self: Self) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ diff --git a/tests/pos/typeclasses-arrow0.scala b/tests/pos/typeclasses-arrow0.scala new file mode 100644 index 000000000000..22d84fe6478d --- /dev/null +++ b/tests/pos/typeclasses-arrow0.scala @@ -0,0 +1,136 @@ +//> using options -language:experimental.modularity -source future + +class Common: + + trait Ord[A]: + extension (x: A) + def compareTo(y: A): Int + def < (y: A): Boolean = compareTo(y) < 0 + def > (y: A): Boolean = compareTo(y) > 0 + def <= (y: A): Boolean = compareTo(y) <= 0 + def >= (y: A): Boolean = compareTo(y) >= 0 + def max(y: A): A = if x < y then y else x + + trait Show[A]: + extension (x: A) def show: String + + trait SemiGroup[A]: + extension (x: A) def combine(y: A): A + + trait Monoid[A] extends SemiGroup[A]: + def unit: A + + trait Functor[F[_]]: + extension [A](x: F[A]) def map[B](f: A => B): F[B] + + trait Monad[F[_]] extends Functor[F]: + def pure[A](x: A): F[A] + extension [A](x: F[A]) + def flatMap[B](f: A => F[B]): F[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + +object Instances extends Common: + + given Ord[Int] as intOrd: + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + + given [T: Ord] => Ord[List[T]]: + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given Monad[List] as listMonad: + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given [Ctx] => Monad[Reader[Ctx]] as readerMonad: + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given [T: Ord] => Ord[T] as descending: + extension (x: T) def compareTo(y: T) = summon[Ord[T]].compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 28 105 793 +// wc Rust : 57 193 1466 + +trait Animal[Self]: + + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self + + // Method signatures; these will return a string. + extension (self: Self) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +given Animal[Sheep]: + def apply(name: String) = Sheep(name) + extension (self: Sheep) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ diff --git a/tests/pos/typeclasses-this.scala b/tests/pos/typeclasses-this.scala new file mode 100644 index 000000000000..33ccb8d9d653 --- /dev/null +++ b/tests/pos/typeclasses-this.scala @@ -0,0 +1,141 @@ +//> using options -language:experimental.modularity -source future + +class Common: + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + +object Instances extends Common: + + given intOrd: (Int is Ord) with + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + +// given [T](using tracked val ev: Ord { type Self = T}): Ord { type Self = List[T] } with + given [T: Ord]: (List[T] is Ord) with + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given listMonad: (List is Monad) with + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given readerMonad[Ctx]: (Reader[Ctx] is Monad) with + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given descending[T: Ord]: (T is Ord) with + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 28 105 793 +// wc Rust : 57 193 1466 + +trait Animal: + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self + + // Method signatures; these will return a string. + extension (self: Self) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +given Sheep is Animal with + def apply(name: String) = Sheep(name) + extension (self: Self) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ diff --git a/tests/pos/typeclasses.scala b/tests/pos/typeclasses.scala index 07fe5a31ce5d..d0315a318310 100644 --- a/tests/pos/typeclasses.scala +++ b/tests/pos/typeclasses.scala @@ -1,66 +1,45 @@ -class Common: +//> using options -source future -language:experimental.modularity - // this should go in Predef - infix type at [A <: { type This}, B] = A { type This = B } +class Common: trait Ord: - type This - extension (x: This) - def compareTo(y: This): Int - def < (y: This): Boolean = compareTo(y) < 0 - def > (y: This): Boolean = compareTo(y) > 0 + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 trait SemiGroup: - type This - extension (x: This) def combine(y: This): This + type Self + extension (x: Self) def combine(y: Self): Self trait Monoid extends SemiGroup: - def unit: This + def unit: Self trait Functor: - type This[A] - extension [A](x: This[A]) def map[B](f: A => B): This[B] + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] trait Monad extends Functor: - def pure[A](x: A): This[A] - extension [A](x: This[A]) - def flatMap[B](f: A => This[B]): This[B] + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] def map[B](f: A => B) = x.flatMap(f `andThen` pure) + end Common object Instances extends Common: -/* - instance Int: Ord as intOrd with + given intOrd: (Int is Ord) with + type Self = Int extension (x: Int) def compareTo(y: Int) = if x < y then -1 else if x > y then +1 else 0 -*/ - given intOrd: Ord with - type This = Int - extension (x: Int) - def compareTo(y: Int) = - if x < y then -1 - else if x > y then +1 - else 0 -/* - instance List[T: Ord]: Ord as listOrd with - extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match - case (Nil, Nil) => 0 - case (Nil, _) => -1 - case (_, Nil) => +1 - case (x :: xs1, y :: ys1) => - val fst = x.compareTo(y) - if (fst != 0) fst else xs1.compareTo(ys1) -*/ - // Proposed short syntax: - // given listOrd[T: Ord as ord]: Ord at T with - given listOrd[T](using ord: Ord { type This = T}): Ord with - type This = List[T] + given listOrd[T](using ord: T is Ord): (List[T] is Ord) with extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match case (Nil, Nil) => 0 case (Nil, _) => -1 @@ -70,32 +49,18 @@ object Instances extends Common: if (fst != 0) fst else xs1.compareTo(ys1) end listOrd -/* - instance List: Monad as listMonad with + given listMonad: (List is Monad) with extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = xs.flatMap(f) def pure[A](x: A): List[A] = List(x) -*/ - given listMonad: Monad with - type This[A] = List[A] - extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = - xs.flatMap(f) - def pure[A](x: A): List[A] = - List(x) -/* - type Reader[Ctx] = X =>> Ctx => X - instance Reader[Ctx: _]: Monad as readerMonad with - extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = - ctx => f(r(ctx))(ctx) - def pure[A](x: A): Ctx => A = - ctx => x -*/ + type Reader[Ctx] = [X] =>> Ctx => X - given readerMonad[Ctx]: Monad with - type This[X] = Ctx => X + //given [Ctx] => Reader[Ctx] is Monad as readerMonad: + + given readerMonad[Ctx]: (Reader[Ctx] is Monad) with extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = ctx => f(r(ctx))(ctx) def pure[A](x: A): Ctx => A = @@ -110,29 +75,17 @@ object Instances extends Common: def second = xs.tail.head def third = xs.tail.tail.head - //Proposed short syntax: - //extension [M: Monad as m, A](xss: M[M[A]]) - // def flatten: M[A] = - // xs.flatMap(identity) - - extension [M, A](using m: Monad)(xss: m.This[m.This[A]]) - def flatten: m.This[A] = + extension [M, A](using m: Monad)(xss: m.Self[m.Self[A]]) + def flatten: m.Self[A] = xss.flatMap(identity) - // Proposed short syntax: - //def maximum[T: Ord](xs: List[T]: T = - def maximum[T](xs: List[T])(using Ord at T): T = + def maximum[T](xs: List[T])(using T is Ord): T = xs.reduceLeft((x, y) => if (x < y) y else x) - // Proposed short syntax: - // def descending[T: Ord as asc]: Ord at T = new Ord: - def descending[T](using asc: Ord at T): Ord at T = new Ord: - type This = T + def descending[T](using asc: T is Ord): T is Ord = new: extension (x: T) def compareTo(y: T) = asc.compareTo(y)(x) - // Proposed short syntax: - // def minimum[T: Ord](xs: List[T]) = - def minimum[T](xs: List[T])(using Ord at T) = + def minimum[T](xs: List[T])(using T is Ord) = maximum(xs)(using descending) def test(): Unit = @@ -148,12 +101,12 @@ object Instances extends Common: // wc Scala: 30 115 853 // wc Rust : 57 193 1466 trait Animal: - type This - // Associated function signature; `This` refers to the implementor type. - def apply(name: String): This + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self // Method signatures; these will return a string. - extension (self: This) + extension (self: Self) def name: String def noise: String def talk(): Unit = println(s"$name, $noise") @@ -171,7 +124,7 @@ class Sheep(val name: String): /* instance Sheep: Animal with def apply(name: String) = Sheep(name) - extension (self: This) + extension (self: Self) def name: String = self.name def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" override def talk(): Unit = @@ -179,10 +132,9 @@ instance Sheep: Animal with */ // Implement the `Animal` trait for `Sheep`. -given Animal with - type This = Sheep +given (Sheep is Animal) with def apply(name: String) = Sheep(name) - extension (self: This) + extension (self: Self) def name: String = self.name def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" override def talk(): Unit = diff --git a/tests/pos/unapplied-types.scala b/tests/pos/unapplied-types.scala new file mode 100644 index 000000000000..604e63deb8ad --- /dev/null +++ b/tests/pos/unapplied-types.scala @@ -0,0 +1,7 @@ +trait T { + type L[X] = List[X] + type T1 <: L // was error: takes type parameters + type T2 = L // was error: takes type parameters + type T3 = List // was error: takes type parameters + type T4 <: List // was error: takes type parameters +} diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 48ff5407ac87..7079c7320ba0 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -79,6 +79,12 @@ val experimentalDefinitionInLibrary = Set( "scala.NamedTuple$", "scala.NamedTupleDecomposition", "scala.NamedTupleDecomposition$", + + // New feature: modularity + "scala.Precise", + "scala.annotation.internal.WitnessNames", + "scala.compiletime.package$package$.deferred", + "scala.runtime.stdLibPatches.Predef$.is", ) diff --git a/tests/run/for-desugar-strawman.scala b/tests/run/for-desugar-strawman.scala new file mode 100644 index 000000000000..a92b19b9150a --- /dev/null +++ b/tests/run/for-desugar-strawman.scala @@ -0,0 +1,96 @@ + +@main def Test = + println: + for + x <- List(1, 2, 3) + y = x + x + if x >= 2 + i <- List.range(0, y) + z = i * i + if z % 2 == 0 + yield + i * x + + println: + val xs = List(1, 2, 3) + xs.flatMapDefined: x => + val y = x + x + xs.applyFilter(x >= 2): + val is = List.range(0, y) + is.mapDefined: i => + val z = i * i + is.applyFilter(z % 2 == 0): + i * x + +extension [A](as: List[A]) + + def applyFilter[B](p: => Boolean)(b: => B) = + if p then Some(b) else None + + def flatMapDefined[B](f: A => Option[IterableOnce[B]]): List[B] = + as.flatMap: x => + f(x).getOrElse(Nil) + + def mapDefined[B](f: A => Option[B]): List[B] = + as.flatMap(f) + +object UNDEFINED + +extension [A](as: Vector[A]) + + def applyFilter[B](p: => Boolean)(b: => B) = + if p then b else UNDEFINED + + def flatMapDefined[B](f: A => IterableOnce[B] | UNDEFINED.type): Vector[B] = + as.flatMap: x => + f(x) match + case UNDEFINED => Nil + case y: IterableOnce[B] => y + + def mapDefined[B](f: A => B | UNDEFINED.type): Vector[B] = + as.flatMap: x => + f(x) match + case UNDEFINED => Nil + case y: B => y :: Nil + +/* +F ::= val x = E; F + x <- E; G +G ::= [] + val x = E; G + if E; G + x <- E; G + +Translation scheme: + +{ for F yield E }c where c = undefined +{ for G yield E }c where c is a reference to the generator preceding the G sequence + +{ for [] yield E }c = E +{ for p = Ep; G yield E }c = val p = Ep; { for G yield E }c +{ for if Ep; G yield E}c = c.applyFilter(Ep)({ for G yield E }c) +{ for p <- Ep; G yield E }c = val c1 = Ep; c1.BIND{ case p => { for G yield E }c1 } (c1 fresh) + + where BIND = flatMapDefined if isGen(G), isFilter(G) + = mapDefined if !isGen(G), isFilter(G) + = flatMap if isGen(G), !isFilter(G) + = map if !isGen(G), !isFilter(G) + +{ for case p <- Ep; G yield E }c = { for $x <- Ep; if $x match case p => true case _ => false; p = $x@RuntimeChecked; G yield E }c +{ for case p = Ep; G yield E }c = { for $x = Ep; if $x match case p => true case _ => false; p = $x@RuntimeChecked; G yield E}c + +isFilter(if E; S) +isFilter(val x = E; S) if isFilter(S) + +isGen(x <- E; S) +isGen(val x = E; S) if isGen(S) +isGen(if E; S) if isGen(S) + +*/ + +val foo = 1 + +def main2 = + foo + ??? + ??? match { case _ => 0 } \ No newline at end of file diff --git a/tests/run/given-disambiguation.scala b/tests/run/given-disambiguation.scala new file mode 100644 index 000000000000..637c02a5621f --- /dev/null +++ b/tests/run/given-disambiguation.scala @@ -0,0 +1,58 @@ +import language.experimental.modularity +import language.future + +trait M: + type Self + extension (x: Self) def combine (y: Self): String + def unit: Self + +trait Num: + type Self + def zero: Self + +trait A extends M +trait B extends M + +def f[X: {M, A, B}](x: X) = + summon[X is M] + x.combine(x) + +trait AA: + type XX: {M, A, B} + val x = XX.unit + val A: String = "hello" + +trait AAA: + type X: M +trait BBB: + type X: Num +class CCC[X1: {M, Num}] extends AAA, BBB: + type X = X1 + X.zero + X.unit + +@main def Test = + class C + + given C is M: + extension (x: Self) def combine (y: Self) = "M" + def unit = C() + + given C is A: + extension (x: Self) def combine (y: Self) = "A" + def unit = C() + + given C is B: + extension (x: Self) def combine (y: Self) = "B" + def unit = C() + + assert(f(C()) == "M") + + class CC extends AA: + type XX = C + assert(A.length == 5) + assert(A.toString == "hello") + + CC() + + diff --git a/tests/run/i15840.scala b/tests/run/i15840.scala new file mode 100644 index 000000000000..0f238e2e7148 --- /dev/null +++ b/tests/run/i15840.scala @@ -0,0 +1,27 @@ +//> using options -language:experimental.modularity -source future + +trait Nat: + type N <: Nat + +class _0 extends Nat: + type N = _0 + +class NatOps[N <: Nat](tracked val n: N): + def toInt(using toIntN: ToInt[n.N]): Int = toIntN() + +// works +def toInt[N <: Nat](n: N)(using toIntN: ToInt[n.N]) = toIntN() + +sealed abstract class ToInt[N <: Nat]: + def apply(): Int + +object ToInt: + given ToInt[_0] { + def apply() = 0 + } + +@main def Test() = + assert(toInt(new _0) == 0) + assert(NatOps[_0](new _0).toInt == 0) + assert: + NatOps(new _0).toInt == 0 // did not work diff --git a/tests/run/i3920.scala b/tests/run/i3920.scala new file mode 100644 index 000000000000..c66fd8908976 --- /dev/null +++ b/tests/run/i3920.scala @@ -0,0 +1,26 @@ +//> using options -source future -language:experimental.modularity +trait Ordering: + type T + def compare(t1:T, t2: T): Int + +class SetFunctor(tracked val ord: Ordering): + type Set = List[ord.T] + + def empty: Set = Nil + + extension (s: Set) + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def contains(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + +object intOrdering extends Ordering: + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + +val IntSet = new SetFunctor(intOrdering) + +@main def Test = + import IntSet.* + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.contains(7)) + assert(set.contains(8)) \ No newline at end of file diff --git a/tests/semanticdb/expect/Methods.expect.scala b/tests/semanticdb/expect/Methods.expect.scala index f34c657b2f6d..4ec723ad584e 100644 --- a/tests/semanticdb/expect/Methods.expect.scala +++ b/tests/semanticdb/expect/Methods.expect.scala @@ -15,7 +15,7 @@ class Methods/*<-example::Methods#*/[T/*<-example::Methods#[T]*/] { def m6/*<-example::Methods#m6().*/(x/*<-example::Methods#m6().(x)*/: Int/*->scala::Int#*/) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+1).*/(x/*<-example::Methods#m6(+1).(x)*/: List/*->example::Methods#List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+2).*/(x/*<-example::Methods#m6(+2).(x)*/: scala.List/*->scala::package.List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ - def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/: Ordering/*->scala::math::Ordering#*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ + def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*/: Ordering/*->example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ def `m8()./*<-example::Methods#`m8().`().*/`() = ???/*->scala::Predef.`???`().*/ class `m9()./*<-example::Methods#`m9().`#*/` def m9/*<-example::Methods#m9().*/(x/*<-example::Methods#m9().(x)*/: `m9().`/*->example::Methods#`m9().`#*/) = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/expect/Synthetic.expect.scala b/tests/semanticdb/expect/Synthetic.expect.scala index a4419aa8bd82..4d797ce2b856 100644 --- a/tests/semanticdb/expect/Synthetic.expect.scala +++ b/tests/semanticdb/expect/Synthetic.expect.scala @@ -30,7 +30,7 @@ class Synthetic/*<-example::Synthetic#*/ { null.asInstanceOf/*->scala::Any#asInstanceOf().*/[Int/*->scala::Int#*/ => Int/*->scala::Int#*/](2) } - class J/*<-example::Synthetic#J#*/[T/*<-example::Synthetic#J#[T]*//*<-example::Synthetic#J#evidence$1.*/: Manifest/*->scala::Predef.Manifest#*/] { val arr/*<-example::Synthetic#J#arr.*/ = Array/*->scala::Array.*/.empty/*->scala::Array.empty().*/[T/*->example::Synthetic#J#[T]*/] } + class J/*<-example::Synthetic#J#*/[T/*<-example::Synthetic#J#[T]*/: /*<-example::Synthetic#J#evidence$1.*/Manifest/*->scala::Predef.Manifest#*//*->example::Synthetic#J#[T]*/] { val arr/*<-example::Synthetic#J#arr.*/ = Array/*->scala::Array.*/.empty/*->scala::Array.empty().*/[T/*->example::Synthetic#J#[T]*/] } class F/*<-example::Synthetic#F#*/ implicit val ordering/*<-example::Synthetic#ordering.*/: Ordering/*->scala::package.Ordering#*/[F/*->example::Synthetic#F#*/] = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 2120cc633da8..84c3e7c6a110 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -2732,8 +2732,8 @@ Occurrences: [16:29..16:32): ??? -> scala/Predef.`???`(). [17:6..17:8): m7 <- example/Methods#m7(). [17:9..17:10): U <- example/Methods#m7().[U] -[17:10..17:10): <- example/Methods#m7().(evidence$1) -[17:12..17:20): Ordering -> scala/math/Ordering# +[17:12..17:20): Ordering -> example/Methods#m7().[U] +[17:12..17:12): <- example/Methods#m7().(evidence$1) [17:22..17:23): c <- example/Methods#m7().(c) [17:25..17:32): Methods -> example/Methods# [17:33..17:34): T -> example/Methods#[T] @@ -3533,7 +3533,7 @@ Uri => Synthetic.scala Text => empty Language => Scala Symbols => 52 entries -Occurrences => 136 entries +Occurrences => 137 entries Synthetics => 39 entries Symbols: @@ -3659,8 +3659,9 @@ Occurrences: [32:8..32:9): J <- example/Synthetic#J# [32:9..32:9): <- example/Synthetic#J#``(). [32:10..32:11): T <- example/Synthetic#J#[T] -[32:11..32:11): <- example/Synthetic#J#evidence$1. +[32:13..32:13): <- example/Synthetic#J#evidence$1. [32:13..32:21): Manifest -> scala/Predef.Manifest# +[32:13..32:21): Manifest -> example/Synthetic#J#[T] [32:29..32:32): arr <- example/Synthetic#J#arr. [32:35..32:40): Array -> scala/Array. [32:41..32:46): empty -> scala/Array.empty(). diff --git a/tests/warn/context-bounds-migration.scala b/tests/warn/context-bounds-migration.scala new file mode 100644 index 000000000000..cdd3eca62b5c --- /dev/null +++ b/tests/warn/context-bounds-migration.scala @@ -0,0 +1,9 @@ + +class C[T] +def foo[X: C] = () + +given [T]: C[T] = C[T]() + +def Test = + foo(C[Int]()) // warning + foo(using C[Int]()) // ok diff --git a/tests/warn/i16723.check b/tests/warn/i16723.check index ed8e55502a80..6d55fa0a89d2 100644 --- a/tests/warn/i16723.check +++ b/tests/warn/i16723.check @@ -1,4 +1,4 @@ --- [E195] Potential Issue Warning: tests/warn/i16723.scala:3:2 --------------------------------------------------------- +-- [E197] Potential Issue Warning: tests/warn/i16723.scala:3:2 --------------------------------------------------------- 3 | new Object {} // warn | ^ | New anonymous class definition will be duplicated at each inline site diff --git a/tests/warn/i16723a.check b/tests/warn/i16723a.check index ba4794fac23e..ace11c5af1f9 100644 --- a/tests/warn/i16723a.check +++ b/tests/warn/i16723a.check @@ -1,4 +1,4 @@ --- [E195] Potential Issue Warning: tests/warn/i16723a.scala:5:38 ------------------------------------------------------- +-- [E197] Potential Issue Warning: tests/warn/i16723a.scala:5:38 ------------------------------------------------------- 5 |inline given Converter[Int, String] = new Converter { // warn | ^ | New anonymous class definition will be duplicated at each inline site