Skip to content

Commit

Permalink
Merge pull request #4938 from dotty-staging/add-tuples
Browse files Browse the repository at this point in the history
Add support for generic tuples
  • Loading branch information
nicolasstucki committed Sep 6, 2018
2 parents c6687a3 + 42a5a9d commit 00e396c
Show file tree
Hide file tree
Showing 40 changed files with 824 additions and 157 deletions.
27 changes: 17 additions & 10 deletions compiler/src/dotty/tools/dotc/ast/Desugar.scala
Original file line number Diff line number Diff line change
Expand Up @@ -815,6 +815,23 @@ object desugar {
makeOp(right, left, Position(op.pos.start, right.pos.end))
}

/** Translate tuple expressions of arity <= 22
*
* () ==> ()
* (t) ==> t
* (t1, ..., tN) ==> TupleN(t1, ..., tN)
*/
def smallTuple(tree: Tuple)(implicit ctx: Context): Tree = {
val ts = tree.trees
val arity = ts.length
assert(arity <= Definitions.MaxTupleArity)
def tupleTypeRef = defn.TupleType(arity)
if (arity == 1) ts.head
else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts)
else if (arity == 0) unitLiteral
else Apply(ref(tupleTypeRef.classSymbol.companionModule.termRef), ts)
}

/** Make closure corresponding to function.
* params => body
* ==>
Expand Down Expand Up @@ -1141,16 +1158,6 @@ object desugar {
case PrefixOp(op, t) =>
val nspace = if (ctx.mode.is(Mode.Type)) tpnme else nme
Select(t, nspace.UNARY_PREFIX ++ op.name)
case Tuple(ts) =>
val arity = ts.length
def tupleTypeRef = defn.TupleType(arity)
if (arity > Definitions.MaxTupleArity) {
ctx.error(TupleTooLong(ts), tree.pos)
unitLiteral
} else if (arity == 1) ts.head
else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts)
else if (arity == 0) unitLiteral
else Apply(ref(tupleTypeRef.classSymbol.companionModule.termRef), ts)
case WhileDo(cond, body) =>
// { <label> def while$(): Unit = if (cond) { body; while$() } ; while$() }
val call = Apply(Ident(nme.WHILE_PREFIX), Nil).withPos(tree.pos)
Expand Down
13 changes: 13 additions & 0 deletions compiler/src/dotty/tools/dotc/ast/TreeInfo.scala
Original file line number Diff line number Diff line change
Expand Up @@ -716,6 +716,19 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] =>
Nil
}

/** If `tree` is an instance of `TupleN[...](e1, ..., eN)`, the arguments `e1, ..., eN`
* otherwise the empty list.
*/
def tupleArgs(tree: Tree)(implicit ctx: Context): List[Tree] = tree match {
case Block(Nil, expr) => tupleArgs(expr)
case Inlined(_, Nil, expr) => tupleArgs(expr)
case Apply(fn, args)
if fn.symbol.name == nme.apply &&
fn.symbol.owner.is(Module) &&
defn.isTupleClass(fn.symbol.owner.companionClass) => args
case _ => Nil
}

/** The qualifier part of a Select or Ident.
* For an Ident, this is the `This` of the current class.
*/
Expand Down
56 changes: 24 additions & 32 deletions compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala
Original file line number Diff line number Diff line change
Expand Up @@ -251,14 +251,13 @@ trait ConstraintHandling {
}
}

/** The instance type of `param` in the current constraint (which contains `param`).
* If `fromBelow` is true, the instance type is the lub of the parameter's
* lower bounds; otherwise it is the glb of its upper bounds. However,
* a lower bound instantiation can be a singleton type only if the upper bound
* is also a singleton type.
/** Widen inferred type `tp` with upper bound `bound`, according to the following rules:
* 1. If `tp` is a singleton type, yet `bound` is not a singleton type, nor a subtype
* of `scala.Singleton`, widen `tp`.
* 2. If `tp` is a union type, yet upper bound is not a union type,
* approximate the union type from above by an intersection of all common base types.
*/
def instanceType(param: TypeParamRef, fromBelow: Boolean): Type = {
def upperBound = constraint.fullUpperBound(param)
def widenInferred(tp: Type, bound: Type): Type = {
def isMultiSingleton(tp: Type): Boolean = tp.stripAnnots match {
case tp: SingletonType => true
case AndType(tp1, tp2) => isMultiSingleton(tp1) | isMultiSingleton(tp2)
Expand All @@ -268,39 +267,32 @@ trait ConstraintHandling {
case tp: TypeParamRef => isMultiSingleton(bounds(tp).hi)
case _ => false
}
def isFullyDefined(tp: Type): Boolean = tp match {
case tp: TypeVar => tp.isInstantiated && isFullyDefined(tp.instanceOpt)
case tp: TypeProxy => isFullyDefined(tp.underlying)
case tp: AndType => isFullyDefined(tp.tp1) && isFullyDefined(tp.tp2)
case tp: OrType => isFullyDefined(tp.tp1) && isFullyDefined(tp.tp2)
case _ => true
}
def isOrType(tp: Type): Boolean = tp.dealias match {
case tp: OrType => true
case tp: RefinedOrRecType => isOrType(tp.parent)
case AndType(tp1, tp2) => isOrType(tp1) | isOrType(tp2)
case WildcardType(bounds: TypeBounds) => isOrType(bounds.hi)
case _ => false
}
def widenOr(tp: Type) =
if (isOrType(tp) && !isOrType(bound)) tp.widenUnion
else tp
def widenSingle(tp: Type) =
if (isMultiSingleton(tp) && !isMultiSingleton(bound) &&
!isSubTypeWhenFrozen(bound, defn.SingletonType)) tp.widen
else tp
widenOr(widenSingle(tp))
}

// First, solve the constraint.
var inst = approximation(param, fromBelow).simplified

// Then, approximate by (1.) - (3.) and simplify as follows.
// 1. If instance is from below and is a singleton type, yet upper bound is
// not a singleton type or a subtype of `scala.Singleton`, widen the
// instance.
if (fromBelow && isMultiSingleton(inst) && !isMultiSingleton(upperBound)
&& !isSubTypeWhenFrozen(upperBound, defn.SingletonType))
inst = inst.widen

// 2. If instance is from below and is a fully-defined union type, yet upper bound
// is not a union type, approximate the union type from above by an intersection
// of all common base types.
if (fromBelow && isOrType(inst) && !isOrType(upperBound))
inst = inst.widenUnion

inst
/** The instance type of `param` in the current constraint (which contains `param`).
* If `fromBelow` is true, the instance type is the lub of the parameter's
* lower bounds; otherwise it is the glb of its upper bounds. However,
* a lower bound instantiation can be a singleton type only if the upper bound
* is also a singleton type.
*/
def instanceType(param: TypeParamRef, fromBelow: Boolean): Type = {
val inst = approximation(param, fromBelow).simplified
if (fromBelow) widenInferred(inst, constraint.fullUpperBound(param)) else inst
}

/** Constraint `c1` subsumes constraint `c2`, if under `c2` as constraint we have
Expand Down
47 changes: 44 additions & 3 deletions compiler/src/dotty/tools/dotc/core/Definitions.scala
Original file line number Diff line number Diff line change
Expand Up @@ -706,6 +706,18 @@ class Definitions {

lazy val XMLTopScopeModuleRef = ctx.requiredModuleRef("scala.xml.TopScope")

lazy val TupleTypeRef = ctx.requiredClassRef("scala.Tuple")
def TupleClass(implicit ctx: Context) = TupleTypeRef.symbol.asClass

lazy val PairType = ctx.requiredClassRef("scala.*:")
def PairClass(implicit ctx: Context) = PairType.symbol.asClass
lazy val TupleXXLType = ctx.requiredClassRef("scala.TupleXXL")
def TupleXXLClass(implicit ctx: Context) = TupleXXLType.symbol.asClass
def TupleXXLModule(implicit ctx: Context) = TupleXXLClass.companionModule

def TupleXXL_apply(implicit ctx: Context) =
TupleXXLModule.info.member(nme.apply).requiredSymbol(_.info.isVarArgsMethod)

// Annotation base classes
lazy val AnnotationType = ctx.requiredClassRef("scala.annotation.Annotation")
def AnnotationClass(implicit ctx: Context) = AnnotationType.symbol.asClass
Expand Down Expand Up @@ -880,7 +892,7 @@ class Definitions {
private lazy val ImplementedFunctionType = mkArityArray("scala.Function", MaxImplementedFunctionArity, 0)
def FunctionClassPerRun = new PerRun[Array[Symbol]](implicit ctx => ImplementedFunctionType.map(_.symbol.asClass))

lazy val TupleType = mkArityArray("scala.Tuple", MaxTupleArity, 2)
lazy val TupleType = mkArityArray("scala.Tuple", MaxTupleArity, 1)

def FunctionClass(n: Int, isImplicit: Boolean = false, isErased: Boolean = false)(implicit ctx: Context) =
if (isImplicit && isErased)
Expand All @@ -901,8 +913,6 @@ class Definitions {
if (n <= MaxImplementedFunctionArity && (!isImplicit || ctx.erasedTypes) && !isErased) ImplementedFunctionType(n)
else FunctionClass(n, isImplicit, isErased).typeRef

private lazy val TupleTypes: Set[TypeRef] = TupleType.toSet

/** If `cls` is a class in the scala package, its name, otherwise EmptyTypeName */
def scalaClassName(cls: Symbol)(implicit ctx: Context): TypeName =
if (cls.isClass && cls.owner == ScalaPackageClass) cls.asClass.name else EmptyTypeName
Expand Down Expand Up @@ -1127,6 +1137,10 @@ class Definitions {
def isErasedFunctionType(tp: Type)(implicit ctx: Context) =
isFunctionType(tp) && tp.dealias.typeSymbol.name.isErasedFunction

/** A whitelist of Scala-2 classes that are known to be pure */
def isAssuredNoInits(sym: Symbol) =
(sym `eq` SomeClass) || isTupleClass(sym)

// ----- primitive value class machinery ------------------------------------------

/** This class would also be obviated by the implicit function type design */
Expand Down Expand Up @@ -1199,6 +1213,8 @@ class Definitions {
def isValueSubClass(sym1: Symbol, sym2: Symbol) =
valueTypeEnc(sym2.asClass.name) % valueTypeEnc(sym1.asClass.name) == 0

lazy val erasedToObject = Set[Symbol](AnyClass, AnyValClass, TupleClass, SingletonClass)

// ----- Initialization ---------------------------------------------------

/** Lists core classes that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
Expand Down Expand Up @@ -1226,6 +1242,27 @@ class Definitions {

private[this] var isInitialized = false

/** Add a `Tuple` as a parent to `Unit`.
* Add the right `*:` instance as a parent to Tuple1..Tuple22
*/
def fixTupleCompleter(cls: ClassSymbol): Unit = cls.infoOrCompleter match {
case completer: LazyType =>
cls.info = new LazyType {
def syntheticParent(tparams: List[TypeSymbol]): Type =
if (tparams.isEmpty) TupleTypeRef
else (tparams :\ (UnitType: Type)) ((tparam, tail) => PairType.appliedTo(tparam.typeRef, tail))
override def complete(denot: SymDenotation)(implicit ctx: Context) = {
completer.complete(denot)
denot.info match {
case info: ClassInfo =>
denot.info = info.derivedClassInfo(
classParents = info.classParents :+ syntheticParent(cls.typeParams))
}
}
}
case _ =>
}

def init()(implicit ctx: Context) = {
this.ctx = ctx
if (!isInitialized) {
Expand All @@ -1243,6 +1280,10 @@ class Definitions {
// force initialization of every symbol that is synthesized or hijacked by the compiler
val forced = syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses()

fixTupleCompleter(UnitClass)
for (i <- 1 to MaxTupleArity)
fixTupleCompleter(TupleType(i).symbol.asClass)

isInitialized = true
}
}
Expand Down
3 changes: 2 additions & 1 deletion compiler/src/dotty/tools/dotc/core/SymDenotations.scala
Original file line number Diff line number Diff line change
Expand Up @@ -596,7 +596,8 @@ object SymDenotations {
* initaliazion code?
*/
def isNoInitsClass(implicit ctx: Context) =
isClass && asClass.baseClasses.forall(_.is(NoInits))
isClass &&
(asClass.baseClasses.forall(_.is(NoInits)) || defn.isAssuredNoInits(symbol))

/** Is this a "real" method? A real method is a method which is:
* - not an accessor
Expand Down
35 changes: 25 additions & 10 deletions compiler/src/dotty/tools/dotc/core/TypeErasure.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import Uniques.unique
import dotc.transform.ExplicitOuter._
import dotc.transform.ValueClasses._
import util.DotClass
import transform.TypeUtils._
import Definitions.MaxImplementedFunctionArity
import scala.annotation.tailrec

Expand All @@ -33,6 +34,9 @@ import scala.annotation.tailrec
*/
object TypeErasure {

private def erasureDependsOnArgs(tp: Type)(implicit ctx: Context) =
tp.isRef(defn.ArrayClass) || tp.isRef(defn.PairClass)

/** A predicate that tests whether a type is a legal erased type. Only asInstanceOf and
* isInstanceOf may have types that do not satisfy the predicate.
* ErasedValueType is considered an erased type because it is valid after Erasure (it is
Expand All @@ -44,7 +48,8 @@ object TypeErasure {
case tp: TypeRef =>
val sym = tp.symbol
sym.isClass &&
sym != defn.AnyClass && sym != defn.ArrayClass &&
!erasureDependsOnArgs(tp) &&
!defn.erasedToObject.contains(sym) &&
!defn.isSyntheticFunctionClass(sym)
case _: TermRef =>
true
Expand Down Expand Up @@ -280,10 +285,8 @@ object TypeErasure {

// Pick the last minimum to prioritise classes over traits
minimums.lastOption match {
case Some(lub) if lub != defn.AnyClass && lub != defn.AnyValClass =>
lub.typeRef
case _ => // Any/AnyVal only exist before erasure
defn.ObjectType
case Some(lub) => valueErasure(lub.typeRef)
case _ => defn.ObjectType
}
}
}
Expand Down Expand Up @@ -354,7 +357,7 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
* - otherwise, if T is a type parameter coming from Java, []Object
* - otherwise, Object
* - For a term ref p.x, the type <noprefix> # x.
* - For a typeref scala.Any, scala.AnyVal or scala.Singleton: |java.lang.Object|
* - For a typeref scala.Any, scala.AnyVal, scala.Singleton, scala.Tuple, or scala.*: : |java.lang.Object|
* - For a typeref scala.Unit, |scala.runtime.BoxedUnit|.
* - For a typeref scala.FunctionN, where N > MaxImplementedFunctionArity, scala.FunctionXXL
* - For a typeref scala.ImplicitFunctionN, | scala.FunctionN |
Expand Down Expand Up @@ -390,6 +393,7 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
else eraseNormalClassRef(tp)
case tp: AppliedType =>
if (tp.tycon.isRef(defn.ArrayClass)) eraseArray(tp)
else if (tp.tycon.isRef(defn.PairClass)) erasePair(tp)
else if (tp.isRepeatedParam) apply(tp.underlyingIfRepeated(isJava))
else apply(tp.superType)
case _: TermRef | _: ThisType =>
Expand Down Expand Up @@ -420,9 +424,13 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
case tp @ ClassInfo(pre, cls, parents, decls, _) =>
if (cls is Package) tp
else {
def eraseParent(tp: Type) = tp.dealias match {
case tp: AppliedType if tp.tycon.isRef(defn.PairClass) => defn.ObjectType
case _ => apply(tp)
}
val erasedParents: List[Type] =
if ((cls eq defn.ObjectClass) || cls.isPrimitiveValueClass) Nil
else parents.mapConserve(apply) match {
else parents.mapConserve(eraseParent) match {
case tr :: trs1 =>
assert(!tr.classSymbol.is(Trait), cls)
val tr1 = if (cls is Trait) defn.ObjectType else tr
Expand Down Expand Up @@ -450,6 +458,13 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
else JavaArrayType(arrayErasure(elemtp))
}

private def erasePair(tp: Type)(implicit ctx: Context): Type = {
val arity = tp.tupleArity
if (arity < 0) defn.ObjectType
else if (arity <= Definitions.MaxTupleArity) defn.TupleType(arity)
else defn.TupleXXLType
}

/** The erasure of a symbol's info. This is different from `apply` in the way `ExprType`s and
* `PolyType`s are treated. `eraseInfo` maps them them to method types, whereas `apply` maps them
* to the underlying type.
Expand Down Expand Up @@ -492,15 +507,15 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
// constructor method should not be semi-erased.
else if (isConstructor && isDerivedValueClass(sym)) eraseNormalClassRef(tp)
else this(tp)
case AppliedType(tycon, _) if !(tycon isRef defn.ArrayClass) =>
case AppliedType(tycon, _) if !erasureDependsOnArgs(tycon) =>
eraseResult(tycon)
case _ =>
this(tp)
}

private def normalizeClass(cls: ClassSymbol)(implicit ctx: Context): ClassSymbol = {
if (cls.owner == defn.ScalaPackageClass) {
if (cls == defn.AnyClass || cls == defn.AnyValClass || cls == defn.SingletonClass)
if (defn.erasedToObject.contains(cls))
return defn.ObjectClass
if (cls == defn.UnitClass)
return defn.BoxedUnitClass
Expand Down Expand Up @@ -534,7 +549,7 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
normalizeClass(sym.asClass).fullName.asTypeName
case tp: AppliedType =>
sigName(
if (tp.tycon.isRef(defn.ArrayClass)) this(tp)
if (erasureDependsOnArgs(tp.tycon)) this(tp)
else if (tp.tycon.typeSymbol.isClass) tp.underlying
else tp.superType)
case ErasedValueType(_, underlying) =>
Expand Down
3 changes: 3 additions & 0 deletions compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
Original file line number Diff line number Diff line change
Expand Up @@ -211,6 +211,7 @@ Standard-Section: "ASTs" TopLevelStat*
TYPEDSPLICE Length splice_Term
FUNCTION Length body_Term arg_Term*
INFIXOP Length op_NameRef left_Term right_Term
TUPLE Length elem_Term*
PATDEF Length type_Term rhs_Term pattern_Term* Modifier*
EMPTYTYPETREE
Expand Down Expand Up @@ -437,6 +438,7 @@ object TastyFormat {
final val FUNCTION = 201
final val INFIXOP = 202
final val PATDEF = 203
final val TUPLE = 204

def methodType(isImplicit: Boolean = false, isErased: Boolean = false) = {
val implicitOffset = if (isImplicit) 1 else 0
Expand Down Expand Up @@ -656,6 +658,7 @@ object TastyFormat {
case TYPEDSPLICE => "TYPEDSPLICE"
case FUNCTION => "FUNCTION"
case INFIXOP => "INFIXOP"
case TUPLE => "TUPLE"
case PATDEF => "PATDEF"
}

Expand Down
3 changes: 3 additions & 0 deletions compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -853,6 +853,9 @@ class TreePickler(pickler: TastyPickler) {
case untpd.InfixOp(l, op, r) =>
writeByte(INFIXOP)
withLength { pickleUntyped(l); pickleUntyped(op); pickleUntyped(r) }
case untpd.Tuple(elems) =>
writeByte(TUPLE)
withLength { elems.foreach(pickleUntyped) }
case untpd.PatDef(mods, pats, tpt, rhs) =>
writeByte(PATDEF)
withLength {
Expand Down

0 comments on commit 00e396c

Please sign in to comment.