From 8a3db5d0574df218c601de7baa267ba8ca4165ee Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Tue, 29 Sep 2020 16:46:13 +0200 Subject: [PATCH 01/75] Initial 2.13 cross compile compat: - fixed eta-exapnds - removing deprecated implicit () - removed early initializers - scala.collections compat --- build.sbt | 24 ++- .../src/main/scala/java/lang/Class.scala | 4 +- .../src/main/scala/java/lang/Object.scala | 6 +- .../scala/scalanative/regex/CharClass.scala | 4 +- .../scala/scalanative/regex/Parser.scala | 6 +- .../scala/scala/scalanative/regex/RE2.scala | 10 +- .../scala/scala/scalanative/nir/Show.scala | 1 + .../scala/scala/scalanative/nir/Types.scala | 2 +- .../scala/scala/scalanative/nir/package.scala | 8 + .../serialization/BinaryDeserializer.scala | 156 +++++++++--------- .../nscplugin/NirDefinitions.scala | 14 +- .../scalanative/nscplugin/NirGenExpr.scala | 14 +- .../scalanative/nscplugin/NirGenPhase.scala | 1 + .../scalanative/nscplugin/NirGenType.scala | 2 +- .../scalanative/nscplugin/NirPlugin.scala | 12 +- .../nscplugin/PrepNativeInterop.scala | 28 ++-- project/build.sbt | 2 + .../scalanative/CrossCompileCompat.scala | 15 ++ .../scalanative/io/VirtualDirectory.scala | 1 + .../scala/scala/scalanative/util/Stats.scala | 2 +- 20 files changed, 179 insertions(+), 133 deletions(-) create mode 100644 nir/src/main/scala/scala/scalanative/nir/package.scala create mode 100644 util/src/main/scala/scala/scalanative/CrossCompileCompat.scala diff --git a/build.sbt b/build.sbt index fae7c63f49..326d3a5fcf 100644 --- a/build.sbt +++ b/build.sbt @@ -1,7 +1,6 @@ import java.io.File.pathSeparator import scala.collection.mutable import scala.util.Try - import build.ScalaVersions._ // Convert "SomeName" to "some-name". @@ -14,6 +13,14 @@ def projectName(project: sbt.ResolvedProject): String = { convertCamelKebab(project.id) } +def parallelCollectionsDependencies(scalaVersion: String): Seq[ModuleID] = { + CrossVersion.partialVersion(scalaVersion) match { + case Some((2, n)) if n >= 13 => + Seq("org.scala-lang.modules" %% "scala-parallel-collections" % "0.2.0") + case _ => Nil + } +} + // Provide consistent project name pattern. lazy val nameSettings: Seq[Setting[_]] = Seq( name := projectName(thisProject.value) // Maven @@ -42,7 +49,7 @@ inThisBuild( Def.settings( organization := "org.scala-native", // Maven version := nativeVersion, // Maven - scalaVersion := scala212, + scalaVersion := libScalaVersion, crossScalaVersions := libCrossScalaVersions, scalacOptions ++= Seq( "-deprecation", @@ -199,10 +206,17 @@ lazy val toolSettings: Seq[Setting[_]] = Def.settings( sbtVersion := sbt10Version, crossSbtVersions := List(sbt10Version), + scalaVersion := sbt10ScalaVersion, crossScalaVersions := Seq(sbt10ScalaVersion), javacOptions ++= Seq("-encoding", "utf8") ) +lazy val crossCompileCompatSettings = Def.settings( + libraryDependencies ++= Seq( + "org.scala-lang.modules" %% "scala-collection-compat" % "2.2.0" + ) ++ parallelCollectionsDependencies(scalaVersion.value) +) + lazy val buildInfoSettings: Seq[Setting[_]] = Def.settings( buildInfoPackage := "scala.scalanative.buildinfo", @@ -238,8 +252,8 @@ lazy val nirparser = .settings(noPublishSettings) .settings( libraryDependencies ++= Seq( - "com.lihaoyi" %% "fastparse" % "1.0.0", - "com.lihaoyi" %% "scalaparse" % "1.0.0", + "com.lihaoyi" %% "fastparse" % "2.3.0", + "com.lihaoyi" %% "scalaparse" % "2.3.0", scalacheckDep, scalatestDep ) @@ -296,6 +310,7 @@ lazy val nscplugin = ), exportJars := true ) + .settings(crossCompileCompatSettings) .settings(scalacOptions += "-Xno-patmat-analysis") lazy val sbtPluginSettings: Seq[Setting[_]] = @@ -319,7 +334,6 @@ lazy val sbtScalaNative = .settings(sbtPluginSettings) .settings( crossScalaVersions := Seq(sbt10ScalaVersion), - addSbtPlugin("org.portable-scala" % "sbt-platform-deps" % "1.0.0"), sbtTestDirectory := (ThisBuild / baseDirectory).value / "scripted-tests", // publish the other projects before running scripted tests. scriptedDependencies := { diff --git a/nativelib/src/main/scala/java/lang/Class.scala b/nativelib/src/main/scala/java/lang/Class.scala index 1f7648924a..13f9a06ee8 100644 --- a/nativelib/src/main/scala/java/lang/Class.scala +++ b/nativelib/src/main/scala/java/lang/Class.scala @@ -42,7 +42,7 @@ final class _Class[A](val rawty: RawPtr) { ty.name def getSimpleName(): String = - getName.split('.').last.split('$').last + getName().split('.').last.split('$').last def isArray(): scala.Boolean = (rawty == toRawType(classOf[BooleanArray]) || @@ -108,7 +108,7 @@ final class _Class[A](val rawty: RawPtr) { Intrinsics.castRawPtrToLong(rawty).## override def toString = { - val name = getName + val name = getName() val prefix = if (ty.isClass) "class " else "interface " prefix + name } diff --git a/nativelib/src/main/scala/java/lang/Object.scala b/nativelib/src/main/scala/java/lang/Object.scala index 8a722ad22b..14a9dd26fc 100644 --- a/nativelib/src/main/scala/java/lang/Object.scala +++ b/nativelib/src/main/scala/java/lang/Object.scala @@ -20,13 +20,13 @@ class _Object { new _Class(getRawType(this)) @inline def __notify(): Unit = - getMonitor(this)._notify + getMonitor(this)._notify() @inline def __notifyAll(): Unit = - getMonitor(this)._notifyAll + getMonitor(this)._notifyAll() @inline def __wait(): Unit = - getMonitor(this)._wait + getMonitor(this)._wait() @inline def __wait(timeout: scala.Long): Unit = getMonitor(this)._wait(timeout) diff --git a/nativelib/src/main/scala/scala/scalanative/regex/CharClass.scala b/nativelib/src/main/scala/scala/scalanative/regex/CharClass.scala index 5dcadc3aab..c58e491ae7 100644 --- a/nativelib/src/main/scala/scala/scalanative/regex/CharClass.scala +++ b/nativelib/src/main/scala/scala/scalanative/regex/CharClass.scala @@ -25,7 +25,7 @@ class CharClass private (unit: Unit) { // Constructs a CharClass with initial ranges |r|. // The right to mutate |r| is passed to the callee. - def this(r: Array[Int]) { + def this(r: Array[Int]) = { this(()) this.r = r this.len = r.length @@ -36,7 +36,7 @@ class CharClass private (unit: Unit) { // 16 bytes is a best guess. See commit mesage for details on its derivation. // Constructs an empty CharClass. - def this() { + def this() = { this(()) val initialCapacity = 16 this.r = new Array[Int](initialCapacity) diff --git a/nativelib/src/main/scala/scala/scalanative/regex/Parser.scala b/nativelib/src/main/scala/scala/scalanative/regex/Parser.scala index 84a0b40edb..fb60f85d0d 100644 --- a/nativelib/src/main/scala/scala/scalanative/regex/Parser.scala +++ b/nativelib/src/main/scala/scala/scalanative/regex/Parser.scala @@ -746,7 +746,7 @@ class Parser(wholeRegexp: String, _flags: Int) { case '*' | '+' | '?' => repeatPos = t.pos() val op = - (t.pop: @scala.annotation.switch) match { + (t.pop(): @scala.annotation.switch) match { case '*' => ROP.STAR case '+' => ROP.PLUS case '?' => ROP.QUEST @@ -953,8 +953,8 @@ class Parser(wholeRegexp: String, _flags: Int) { if (!parseCompleted) { throw new PatternSyntaxException(ERR_INVALID_PERL_OP, - t.from(startPos) + t.rest, - t.pos - 1) + t.from(startPos) + t.rest(), + t.pos() - 1) } } } diff --git a/nativelib/src/main/scala/scala/scalanative/regex/RE2.scala b/nativelib/src/main/scala/scala/scalanative/regex/RE2.scala index 79e5319b64..189bb5aede 100644 --- a/nativelib/src/main/scala/scala/scalanative/regex/RE2.scala +++ b/nativelib/src/main/scala/scala/scalanative/regex/RE2.scala @@ -56,7 +56,7 @@ class RE2 private { private val machine = new ArrayList[Machine]() // This is visible for testing. - def this(expr: String) { + def this(expr: String) = { this() val re2 = RE2.compile(expr) // Copy everything. @@ -76,7 +76,7 @@ class RE2 private { prog: Prog, numSubexp: Int, longest: Boolean, - namedCaps: Map[String, Int]) { + namedCaps: Map[String, Int]) = { this() this.expr = expr this.prog = prog @@ -759,16 +759,16 @@ object RE2 { def compileImpl(expr: String, mode: Int, longest: Boolean): RE2 = { var re = Parser.parse(expr, mode) val maxCap = re.maxCap() // (may shrink during simplify) - val namedCaps = re.namedCaps + val namedCaps = re.namedCaps() re = Simplify.simplify(re) val prog = Compiler.compileRegexp(re) val re2 = new RE2(expr, prog, maxCap, longest, namedCaps) val prefixBuilder = new java.lang.StringBuilder() re2.prefixComplete = prog.prefix(prefixBuilder) - re2.prefix = prefixBuilder.toString() + re2.prefix = prefixBuilder.toString re2.prefixUTF8 = re2.prefix.getBytes("UTF-8") - if (!re2.prefix.isEmpty()) { + if (!re2.prefix.isEmpty) { re2.prefixRune = re2.prefix.codePointAt(0) } re2 diff --git a/nir/src/main/scala/scala/scalanative/nir/Show.scala b/nir/src/main/scala/scala/scalanative/nir/Show.scala index 4c25d12b24..2534bbcd3a 100644 --- a/nir/src/main/scala/scala/scalanative/nir/Show.scala +++ b/nir/src/main/scala/scala/scalanative/nir/Show.scala @@ -5,6 +5,7 @@ import java.nio.charset.StandardCharsets import scala.collection.mutable import scala.scalanative.util.ShowBuilder.InMemoryShowBuilder import scalanative.util.{ShowBuilder, unreachable} +import scalanative.CrossCompileCompat.Converters._ object Show { def newBuilder: NirShowBuilder = new NirShowBuilder(new InMemoryShowBuilder) diff --git a/nir/src/main/scala/scala/scalanative/nir/Types.scala b/nir/src/main/scala/scala/scalanative/nir/Types.scala index 55710e6c1e..24a2d91180 100644 --- a/nir/src/main/scala/scala/scalanative/nir/Types.scala +++ b/nir/src/main/scala/scala/scalanative/nir/Types.scala @@ -142,7 +142,7 @@ object Type { def toArrayClass(ty: Type): Global = ty match { case _ if typeToArray.contains(ty) => typeToArray(ty) - case Type.Ref(name, _, _) if name == Rt.BoxedUnit => + case Type.Ref(name, _, _) if name == Rt.BoxedUnit.name => typeToArray(Rt.BoxedUnit) case _ => typeToArray(Rt.Object) diff --git a/nir/src/main/scala/scala/scalanative/nir/package.scala b/nir/src/main/scala/scala/scalanative/nir/package.scala new file mode 100644 index 0000000000..81c9b031a0 --- /dev/null +++ b/nir/src/main/scala/scala/scalanative/nir/package.scala @@ -0,0 +1,8 @@ +package scala.scalanative +import scala.collection.mutable +import scala.language.implicitConversions + +package object nir { + implicit def bufferToSeq[T](buf: mutable.UnrolledBuffer[T]): Seq[T] = + buf.toSeq +} diff --git a/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala b/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala index 12dff7f527..6f46248167 100644 --- a/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala +++ b/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala @@ -23,7 +23,7 @@ final class BinaryDeserializer(buffer: ByteBuffer) { val files = Array.fill(getInt())(new URI(getUTF8String())) - val pairs = getSeq((getGlobal, getInt)) + val pairs = getSeq((getGlobal(), getInt())) (prelude, pairs, files) } @@ -32,7 +32,7 @@ final class BinaryDeserializer(buffer: ByteBuffer) { header.foreach { case (g, offset) => buffer.position(offset) - allDefns += getDefn + allDefns += getDefn() } allDefns } @@ -57,7 +57,7 @@ final class BinaryDeserializer(buffer: ByteBuffer) { private def getBool(): Boolean = get != 0 - private def getAttrs(): Attrs = Attrs.fromSeq(getSeq(getAttr)) + private def getAttrs(): Attrs = Attrs.fromSeq(getSeq(getAttr())) private def getAttr(): Attr = getInt match { case T.MayInlineAttr => Attr.MayInline case T.InlineHintAttr => Attr.InlineHint @@ -70,12 +70,12 @@ final class BinaryDeserializer(buffer: ByteBuffer) { case T.UnOptAttr => Attr.UnOpt case T.NoOptAttr => Attr.NoOpt case T.DidOptAttr => Attr.DidOpt - case T.BailOptAttr => Attr.BailOpt(getUTF8String) + case T.BailOptAttr => Attr.BailOpt(getUTF8String()) case T.DynAttr => Attr.Dyn case T.StubAttr => Attr.Stub case T.ExternAttr => Attr.Extern - case T.LinkAttr => Attr.Link(getUTF8String) + case T.LinkAttr => Attr.Link(getUTF8String()) case T.AbstractAttr => Attr.Abstract } @@ -100,19 +100,19 @@ final class BinaryDeserializer(buffer: ByteBuffer) { case T.XorBin => Bin.Xor } - private def getInsts(): Seq[Inst] = getSeq(getInst) + private def getInsts(): Seq[Inst] = getSeq(getInst()) private def getInst(): Inst = { implicit val pos: nir.Position = getPosition() getInt() match { - case T.LabelInst => Inst.Label(getLocal, getParams) - case T.LetInst => Inst.Let(getLocal, getOp, Next.None) - case T.LetUnwindInst => Inst.Let(getLocal, getOp, getNext) - case T.RetInst => Inst.Ret(getVal) - case T.JumpInst => Inst.Jump(getNext) - case T.IfInst => Inst.If(getVal, getNext, getNext) - case T.SwitchInst => Inst.Switch(getVal, getNext, getNexts) - case T.ThrowInst => Inst.Throw(getVal, getNext) - case T.UnreachableInst => Inst.Unreachable(getNext) + case T.LabelInst => Inst.Label(getLocal(), getParams()) + case T.LetInst => Inst.Let(getLocal(), getOp(), Next.None) + case T.LetUnwindInst => Inst.Let(getLocal(), getOp(), getNext()) + case T.RetInst => Inst.Ret(getVal()) + case T.JumpInst => Inst.Jump(getNext()) + case T.IfInst => Inst.If(getVal(), getNext(), getNext()) + case T.SwitchInst => Inst.Switch(getVal(), getNext(), getNexts()) + case T.ThrowInst => Inst.Throw(getVal(), getNext()) + case T.UnreachableInst => Inst.Unreachable(getNext()) } } @@ -151,95 +151,97 @@ final class BinaryDeserializer(buffer: ByteBuffer) { case T.BitcastConv => Conv.Bitcast } - private def getDefns(): Seq[Defn] = getSeq(getDefn) + private def getDefns(): Seq[Defn] = getSeq(getDefn()) private def getDefn(): Defn = { implicit val pos: nir.Position = getPosition() getInt() match { case T.VarDefn => - Defn.Var(getAttrs, getGlobal, getType, getVal) + Defn.Var(getAttrs(), getGlobal(), getType(), getVal()) case T.ConstDefn => - Defn.Const(getAttrs, getGlobal, getType, getVal) + Defn.Const(getAttrs(), getGlobal(), getType(), getVal()) case T.DeclareDefn => - Defn.Declare(getAttrs, getGlobal, getType) + Defn.Declare(getAttrs(), getGlobal(), getType()) case T.DefineDefn => Defn.Define(getAttrs(), getGlobal(), getType(), getInsts()) case T.TraitDefn => - Defn.Trait(getAttrs, getGlobal, getGlobals) + Defn.Trait(getAttrs(), getGlobal(), getGlobals()) case T.ClassDefn => - Defn.Class(getAttrs, getGlobal, getGlobalOpt, getGlobals) + Defn.Class(getAttrs(), getGlobal(), getGlobalOpt(), getGlobals()) case T.ModuleDefn => - Defn.Module(getAttrs, getGlobal, getGlobalOpt, getGlobals) + Defn.Module(getAttrs(), getGlobal(), getGlobalOpt(), getGlobals()) } } - private def getGlobals(): Seq[Global] = getSeq(getGlobal) - private def getGlobalOpt(): Option[Global] = getOpt(getGlobal) + private def getGlobals(): Seq[Global] = getSeq(getGlobal()) + private def getGlobalOpt(): Option[Global] = getOpt(getGlobal()) private def getGlobal(): Global = getInt match { case T.NoneGlobal => Global.None case T.TopGlobal => - Global.Top(getUTF8String) + Global.Top(getUTF8String()) case T.MemberGlobal => - Global.Member(Global.Top(getUTF8String), getSig) + Global.Member(Global.Top(getUTF8String()), getSig) } private def getSig(): Sig = - new Sig(getUTF8String) + new Sig(getUTF8String()) private def getLocal(): Local = Local(getLong) - private def getNexts(): Seq[Next] = getSeq(getNext) + private def getNexts(): Seq[Next] = getSeq(getNext()) private def getNext(): Next = getInt match { case T.NoneNext => Next.None - case T.UnwindNext => Next.Unwind(getParam, getNext) - case T.CaseNext => Next.Case(getVal, getNext) - case T.LabelNext => Next.Label(getLocal, getVals) + case T.UnwindNext => Next.Unwind(getParam(), getNext()) + case T.CaseNext => Next.Case(getVal(), getNext()) + case T.LabelNext => Next.Label(getLocal(), getVals()) } private def getOp(): Op = getInt match { - case T.CallOp => Op.Call(getType, getVal, getVals) - case T.LoadOp => Op.Load(getType, getVal) - case T.StoreOp => Op.Store(getType, getVal, getVal) - case T.ElemOp => Op.Elem(getType, getVal, getVals) - case T.ExtractOp => Op.Extract(getVal, getInts) - case T.InsertOp => Op.Insert(getVal, getVal, getInts) - case T.StackallocOp => Op.Stackalloc(getType, getVal) - case T.BinOp => Op.Bin(getBin, getType, getVal, getVal) - case T.CompOp => Op.Comp(getComp, getType, getVal, getVal) - case T.ConvOp => Op.Conv(getConv, getType, getVal) - - case T.ClassallocOp => Op.Classalloc(getGlobal) - case T.FieldloadOp => Op.Fieldload(getType, getVal, getGlobal) - case T.FieldstoreOp => Op.Fieldstore(getType, getVal, getGlobal, getVal) - case T.MethodOp => Op.Method(getVal, getSig) - case T.DynmethodOp => Op.Dynmethod(getVal, getSig) - case T.ModuleOp => Op.Module(getGlobal) - case T.AsOp => Op.As(getType, getVal) - case T.IsOp => Op.Is(getType, getVal) - case T.CopyOp => Op.Copy(getVal) - case T.SizeofOp => Op.Sizeof(getType) - case T.BoxOp => Op.Box(getType, getVal) - case T.UnboxOp => Op.Unbox(getType, getVal) - case T.VarOp => Op.Var(getType) - case T.VarloadOp => Op.Varload(getVal) - case T.VarstoreOp => Op.Varstore(getVal, getVal) - case T.ArrayallocOp => Op.Arrayalloc(getType, getVal) - case T.ArrayloadOp => Op.Arrayload(getType, getVal, getVal) - case T.ArraystoreOp => Op.Arraystore(getType, getVal, getVal, getVal) - case T.ArraylengthOp => Op.Arraylength(getVal) + case T.CallOp => Op.Call(getType(), getVal(), getVals()) + case T.LoadOp => Op.Load(getType(), getVal()) + case T.StoreOp => Op.Store(getType(), getVal(), getVal()) + case T.ElemOp => Op.Elem(getType(), getVal(), getVals()) + case T.ExtractOp => Op.Extract(getVal(), getInts()) + case T.InsertOp => Op.Insert(getVal(), getVal(), getInts()) + case T.StackallocOp => Op.Stackalloc(getType(), getVal()) + case T.BinOp => Op.Bin(getBin(), getType(), getVal(), getVal()) + case T.CompOp => Op.Comp(getComp(), getType(), getVal(), getVal()) + case T.ConvOp => Op.Conv(getConv(), getType(), getVal()) + + case T.ClassallocOp => Op.Classalloc(getGlobal()) + case T.FieldloadOp => Op.Fieldload(getType(), getVal(), getGlobal()) + case T.FieldstoreOp => + Op.Fieldstore(getType(), getVal(), getGlobal(), getVal()) + case T.MethodOp => Op.Method(getVal(), getSig()) + case T.DynmethodOp => Op.Dynmethod(getVal(), getSig()) + case T.ModuleOp => Op.Module(getGlobal()) + case T.AsOp => Op.As(getType(), getVal()) + case T.IsOp => Op.Is(getType(), getVal()) + case T.CopyOp => Op.Copy(getVal()) + case T.SizeofOp => Op.Sizeof(getType()) + case T.BoxOp => Op.Box(getType(), getVal()) + case T.UnboxOp => Op.Unbox(getType(), getVal()) + case T.VarOp => Op.Var(getType()) + case T.VarloadOp => Op.Varload(getVal()) + case T.VarstoreOp => Op.Varstore(getVal(), getVal()) + case T.ArrayallocOp => Op.Arrayalloc(getType(), getVal()) + case T.ArrayloadOp => Op.Arrayload(getType(), getVal(), getVal()) + case T.ArraystoreOp => + Op.Arraystore(getType(), getVal(), getVal(), getVal()) + case T.ArraylengthOp => Op.Arraylength(getVal()) } - private def getParams(): Seq[Val.Local] = getSeq(getParam) - private def getParam(): Val.Local = Val.Local(getLocal, getType) + private def getParams(): Seq[Val.Local] = getSeq(getParam()) + private def getParam(): Val.Local = Val.Local(getLocal(), getType()) - private def getTypes(): Seq[Type] = getSeq(getType) + private def getTypes(): Seq[Type] = getSeq(getType()) private def getType(): Type = getInt match { case T.VarargType => Type.Vararg case T.PtrType => Type.Ptr @@ -251,25 +253,25 @@ final class BinaryDeserializer(buffer: ByteBuffer) { case T.LongType => Type.Long case T.FloatType => Type.Float case T.DoubleType => Type.Double - case T.ArrayValueType => Type.ArrayValue(getType, getInt) - case T.StructValueType => Type.StructValue(getTypes) - case T.FunctionType => Type.Function(getTypes, getType) + case T.ArrayValueType => Type.ArrayValue(getType(), getInt) + case T.StructValueType => Type.StructValue(getTypes()) + case T.FunctionType => Type.Function(getTypes(), getType()) case T.NullType => Type.Null case T.NothingType => Type.Nothing case T.VirtualType => Type.Virtual - case T.VarType => Type.Var(getType) + case T.VarType => Type.Var(getType()) case T.UnitType => Type.Unit - case T.ArrayType => Type.Array(getType, getBool) - case T.RefType => Type.Ref(getGlobal, getBool, getBool) + case T.ArrayType => Type.Array(getType(), getBool()) + case T.RefType => Type.Ref(getGlobal(), getBool(), getBool()) } - private def getVals(): Seq[Val] = getSeq(getVal) + private def getVals(): Seq[Val] = getSeq(getVal()) private def getVal(): Val = getInt match { case T.TrueVal => Val.True case T.FalseVal => Val.False case T.NullVal => Val.Null - case T.ZeroVal => Val.Zero(getType) + case T.ZeroVal => Val.Zero(getType()) case T.CharVal => Val.Char(getShort.toChar) case T.ByteVal => Val.Byte(get) case T.ShortVal => Val.Short(getShort) @@ -277,14 +279,14 @@ final class BinaryDeserializer(buffer: ByteBuffer) { case T.LongVal => Val.Long(getLong) case T.FloatVal => Val.Float(getFloat) case T.DoubleVal => Val.Double(getDouble) - case T.StructValueVal => Val.StructValue(getVals) - case T.ArrayValueVal => Val.ArrayValue(getType, getVals) + case T.StructValueVal => Val.StructValue(getVals()) + case T.ArrayValueVal => Val.ArrayValue(getType(), getVals()) case T.CharsVal => Val.Chars(getBytes()) - case T.LocalVal => Val.Local(getLocal, getType) - case T.GlobalVal => Val.Global(getGlobal, getType) + case T.LocalVal => Val.Local(getLocal(), getType) + case T.GlobalVal => Val.Global(getGlobal(), getType()) case T.UnitVal => Val.Unit - case T.ConstVal => Val.Const(getVal) + case T.ConstVal => Val.Const(getVal()) case T.StringVal => Val.String { val chars = Array.fill(getInt)(getChar) diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirDefinitions.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirDefinitions.scala index 46695f1958..e50d83d574 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirDefinitions.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirDefinitions.scala @@ -226,23 +226,25 @@ trait NirDefinitions { self: NirGlobalAddons => 'O' -> getRequiredClass("scala.scalanative.runtime.ObjectArray") ) + private def mapValue[K, V1, V2](fn: V1 => V2)(in: (K, V1)): (K, V2) = + (in._1, fn(in._2)) lazy val RuntimeArrayModule: Map[Char, Symbol] = - RuntimeArrayClass.mapValues(_.companion) + RuntimeArrayClass.map(mapValue(_.companion)) lazy val RuntimeArrayAllocMethod: Map[Char, Symbol] = - RuntimeArrayModule.mapValues(getMember(_, TermName("alloc"))) + RuntimeArrayModule.map(mapValue(getMember(_, TermName("alloc")))) lazy val RuntimeArrayApplyMethod: Map[Char, Symbol] = - RuntimeArrayClass.mapValues(getMember(_, TermName("apply"))) + RuntimeArrayClass.map(mapValue(getMember(_, TermName("apply")))) lazy val RuntimeArrayUpdateMethod: Map[Char, Symbol] = - RuntimeArrayClass.mapValues(getMember(_, TermName("update"))) + RuntimeArrayClass.map(mapValue(getMember(_, TermName("update")))) lazy val RuntimeArrayLengthMethod: Map[Char, Symbol] = - RuntimeArrayClass.mapValues(getMember(_, TermName("length"))) + RuntimeArrayClass.map(mapValue(getMember(_, TermName("length")))) lazy val RuntimeArrayCloneMethod: Map[Char, Symbol] = - RuntimeArrayClass.mapValues(getMember(_, TermName("clone"))) + RuntimeArrayClass.map(mapValue(getMember(_, TermName("clone")))) lazy val RuntimeBoxesModule = getRequiredModule( "scala.scalanative.runtime.Boxes") diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala index cc91e2624d..744a704756 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala @@ -2,7 +2,7 @@ package scala.scalanative package nscplugin import scala.collection.mutable -import scalanative.nir.{Position, _} +import scalanative.nir._ import scalanative.util.{StringUtils, unsupported} import scalanative.util.ScopedVar.scoped import scalanative.nscplugin.NirPrimitives._ @@ -131,7 +131,7 @@ trait NirGenExpr { self: NirGenPhase => } def genLabelDef(label: LabelDef): Val = { - assert(label.params.length == 0) + assert(label.params.isEmpty, "empty LabelDef params") buf.jump(Next(curMethodEnv.enterLabel(label)))(label.pos) genLabel(label) } @@ -215,7 +215,7 @@ trait NirGenExpr { self: NirGenPhase => case CaseDef(Ident(nme.WILDCARD), _, _) => Seq() case CaseDef(pat, guard, body) => - assert(guard.isEmpty) + assert(guard.isEmpty, "CaseDef guard was not empty") val vals: Seq[Val] = pat match { case lit: Literal => List(genLiteralValue(lit)) @@ -1528,7 +1528,7 @@ trait NirGenExpr { self: NirGenPhase => _))))))), _), _) => - val chars = Val.Chars(StringUtils.processEscapes(str)) + val chars = Val.Chars(StringUtils.processEscapes(str).toIndexedSeq) val const = Val.Const(chars) buf.box(nir.Rt.BoxedPtr, const, unwind)(app.pos) @@ -1739,7 +1739,7 @@ trait NirGenExpr { self: NirGenPhase => } case Object_synchronized => - assert(argsp.size == 1) + assert(argsp.size == 1, "synchronized with wrong number of args") genSynchronized(ValTree(boxed), argsp.head) } } @@ -1826,7 +1826,7 @@ trait NirGenExpr { self: NirGenPhase => def genLoadExtern(ty: nir.Type, externTy: nir.Type, sym: Symbol)( implicit pos: nir.Position): Val = { - assert(sym.owner.isExternModule) + assert(sym.owner.isExternModule, "loadExtern was not extern") val name = Val.Global(genName(sym), Type.Ptr) @@ -1835,7 +1835,7 @@ trait NirGenExpr { self: NirGenPhase => def genStoreExtern(externTy: nir.Type, sym: Symbol, value: Val)( implicit pos: nir.Position): Val = { - assert(sym.owner.isExternModule) + assert(sym.owner.isExternModule, "storeExtern was not extern") val name = Val.Global(genName(sym), Type.Ptr) val externValue = toExtern(externTy, value) diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala index eadf81522a..e98f7f74de 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala @@ -4,6 +4,7 @@ package nscplugin import java.nio.file.Path import scala.collection.mutable import scala.language.implicitConversions +import scala.scalanative.CrossCompileCompat.Converters._ import scala.scalanative.nir._ import scala.scalanative.util.ScopedVar.scoped import scala.tools.nsc.plugins._ diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenType.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenType.scala index ae4983bb79..144be7bc0b 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenType.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenType.scala @@ -157,7 +157,7 @@ trait NirGenType { self: NirGenPhase => private def genMethodSigImpl(sym: Symbol, isExtern: Boolean): nir.Type.Function = { - require(sym.isMethod || sym.isStaticMember) + require(sym.isMethod || sym.isStaticMember, "symbol is not a method") val tpe = sym.tpe val owner = sym.owner diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirPlugin.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirPlugin.scala index 0277931c04..b35989c902 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirPlugin.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirPlugin.scala @@ -9,21 +9,21 @@ class NirPlugin(val global: Global) extends Plugin { self => val description = "Compile to Scala Native IR (NIR)" val components = List[PluginComponent](prepNativeInterop, nirGen) - object nirAddons extends { + object nirAddons extends NirGlobalAddons { val global: NirPlugin.this.global.type = NirPlugin.this.global - } with NirGlobalAddons + } - object prepNativeInterop extends { + object prepNativeInterop extends PrepNativeInterop { val global: self.global.type = self.global val nirAddons: NirPlugin.this.nirAddons.type = NirPlugin.this.nirAddons override val runsAfter = List("typer") override val runsBefore = List("pickler") - } with PrepNativeInterop + } - object nirGen extends { + object nirGen extends NirGenPhase { val global: self.global.type = self.global val nirAddons: NirPlugin.this.nirAddons.type = NirPlugin.this.nirAddons override val runsAfter = List("mixin") override val runsBefore = List("delambdafy", "cleanup", "terminal") - } with NirGenPhase + } } diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/PrepNativeInterop.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/PrepNativeInterop.scala index 9e1f50ca08..bd7b9da786 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/PrepNativeInterop.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/PrepNativeInterop.scala @@ -203,14 +203,12 @@ abstract class PrepNativeInterop private def isScalaEnum(implDef: ImplDef) = implDef.symbol.tpe.typeSymbol isSubClass EnumerationClass - private trait ScalaEnumFctExtractors { - protected val methSym: Symbol - + private abstract class ScalaEnumFctExtractors(val methSym: Symbol) { protected def resolve(ptpes: Symbol*) = { val res = methSym suchThat { _.tpe.params.map(_.tpe.typeSymbol) == ptpes.toList } - assert(res != NoSymbol) + assert(res != NoSymbol, "tried to resolve NoSymbol") res } @@ -250,16 +248,18 @@ abstract class PrepNativeInterop } - private object ScalaEnumValue extends { - protected val methSym = getMemberMethod(EnumerationClass, nativenme.Value) - } with ScalaEnumFctExtractors - - private object ScalaEnumVal extends { - protected val methSym = { - val valSym = getMemberClass(EnumerationClass, nativenme.Val) - valSym.tpe.member(nme.CONSTRUCTOR) - } - } with ScalaEnumFctExtractors + private object ScalaEnumValue + extends ScalaEnumFctExtractors( + methSym = getMemberMethod(EnumerationClass, nativenme.Value) + ) + + private object ScalaEnumVal + extends ScalaEnumFctExtractors( + methSym = { + val valSym = getMemberClass(EnumerationClass, nativenme.Val) + valSym.tpe.member(nme.CONSTRUCTOR) + } + ) /** * Construct a call to Enumeration.Value diff --git a/project/build.sbt b/project/build.sbt index 63edd48246..eb617ae792 100644 --- a/project/build.sbt +++ b/project/build.sbt @@ -11,6 +11,8 @@ Compile / unmanagedSourceDirectories ++= { ).map(dir => root / s"$dir/src/main/scala") } +libraryDependencies += "org.scala-lang.modules" %% "scala-collection-compat" % "2.2.0" + addSbtPlugin("org.portable-scala" % "sbt-platform-deps" % "1.0.0") addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.4") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.0") diff --git a/util/src/main/scala/scala/scalanative/CrossCompileCompat.scala b/util/src/main/scala/scala/scalanative/CrossCompileCompat.scala new file mode 100644 index 0000000000..6298c92f37 --- /dev/null +++ b/util/src/main/scala/scala/scalanative/CrossCompileCompat.scala @@ -0,0 +1,15 @@ +package scala.scalanative + +private[scalanative] object CrossCompileCompat { + val Converters = { + import Compat._ + { + import scala.collection.parallel._ + CollectionConverters + } + } + + object Compat { + object CollectionConverters + } +} diff --git a/util/src/main/scala/scala/scalanative/io/VirtualDirectory.scala b/util/src/main/scala/scala/scalanative/io/VirtualDirectory.scala index 1d99381db7..b5acef2418 100644 --- a/util/src/main/scala/scala/scalanative/io/VirtualDirectory.scala +++ b/util/src/main/scala/scala/scalanative/io/VirtualDirectory.scala @@ -1,6 +1,7 @@ package scala.scalanative package io +import scala.jdk.CollectionConverters._ import java.io.Writer import java.net.URI import java.nio.ByteBuffer diff --git a/util/src/main/scala/scala/scalanative/util/Stats.scala b/util/src/main/scala/scala/scalanative/util/Stats.scala index 7bd4d39c46..1607051d40 100644 --- a/util/src/main/scala/scala/scalanative/util/Stats.scala +++ b/util/src/main/scala/scala/scalanative/util/Stats.scala @@ -72,7 +72,7 @@ object Stats { counts.clear() } private def threadKey(key: String): String = - java.lang.Thread.currentThread.getId + ":" + key + "" + java.lang.Thread.currentThread.getId + ":" + key def in[T](f: => T): T = { clear() val res = f From 47733cf580d77e60d95f42c2255aa40484d490dd Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Tue, 29 Sep 2020 17:03:35 +0200 Subject: [PATCH 02/75] Updated ScalaVersions --- project/ScalaVersions.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/project/ScalaVersions.scala b/project/ScalaVersions.scala index 87afd5d2ff..b5d98ec7fb 100644 --- a/project/ScalaVersions.scala +++ b/project/ScalaVersions.scala @@ -3,9 +3,9 @@ package build object ScalaVersions { val scala211: String = "2.11.12" val scala212: String = "2.12.12" - //val scala213: String = "2.13.3" + val scala213: String = "2.13.3" val sbt10Version: String = "1.1.6" // minimum version val sbt10ScalaVersion: String = scala212 - val libCrossScalaVersions: Seq[String] = Seq(scala211, scala212) + val libCrossScalaVersions: Seq[String] = Seq(scala211, scala212, scala213) } From b713318ed8b1775f6110f75ed14264cdd1eb108f Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 30 Sep 2020 14:26:12 +0200 Subject: [PATCH 03/75] Fix early initializers in NirPlugin --- .../scalanative/nscplugin/NirCompat.scala | 5 +---- .../scalanative/nscplugin/NirDefinitions.scala | 3 ++- .../scalanative/nscplugin/NirGenExpr.scala | 2 +- .../scalanative/nscplugin/NirGenFile.scala | 2 +- .../scalanative/nscplugin/NirGenName.scala | 4 +--- .../scalanative/nscplugin/NirGenPhase.scala | 18 ++++++++++-------- .../scalanative/nscplugin/NirGenStat.scala | 2 +- .../scalanative/nscplugin/NirGenType.scala | 2 +- .../scalanative/nscplugin/NirGenUtil.scala | 2 +- .../scalanative/nscplugin/NirPlugin.scala | 18 ++++++++++-------- .../nscplugin/PrepNativeInterop.scala | 3 ++- .../scala/scalanative/nscplugin/package.scala | 5 +++++ 12 files changed, 36 insertions(+), 30 deletions(-) create mode 100644 nscplugin/src/main/scala/scala/scalanative/nscplugin/package.scala diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirCompat.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirCompat.scala index 60dbb0846b..d7f70dcbab 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirCompat.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirCompat.scala @@ -4,11 +4,8 @@ package nscplugin import scala.reflect.internal.Flags import scala.tools.nsc._ -trait NirCompat { self: NirGenPhase => +trait NirCompat[G <: NscGlobal] { self: NirGenPhase[G] => import NirCompat.{infiniteLoop, noImplClasses} - - val global: Global - import global._ // SAMFunction was introduced in 2.12 for LMF-capable SAM type diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirDefinitions.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirDefinitions.scala index e50d83d574..a31fce1572 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirDefinitions.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirDefinitions.scala @@ -3,7 +3,8 @@ package nscplugin import scala.tools.nsc._ -trait NirDefinitions { self: NirGlobalAddons => +trait NirDefinitions { + val global: Global import global._ import definitions._ import rootMirror._ diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala index 744a704756..5cc0c19b81 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala @@ -7,7 +7,7 @@ import scalanative.util.{StringUtils, unsupported} import scalanative.util.ScopedVar.scoped import scalanative.nscplugin.NirPrimitives._ -trait NirGenExpr { self: NirGenPhase => +trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => import global._ import definitions._ import treeInfo.hasSynthCaseSymbol diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenFile.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenFile.scala index 367547ef3e..374de920b6 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenFile.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenFile.scala @@ -6,7 +6,7 @@ import java.nio.file.{Path, Paths} import scala.scalanative.nir.serialization.serializeBinary import scala.tools.nsc.io.AbstractFile -trait NirGenFile { self: NirGenPhase => +trait NirGenFile[G <: NscGlobal] { self: NirGenPhase[G] => import global._ def genPathFor(cunit: CompilationUnit, ownerName: nir.Global): Path = { diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenName.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenName.scala index 63f324e9fb..323e2927b0 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenName.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenName.scala @@ -1,11 +1,9 @@ package scala.scalanative package nscplugin -import scala.tools.nsc._ -import scala.reflect.internal.Flags._ import scalanative.util.unreachable -trait NirGenName { self: NirGenPhase => +trait NirGenName[G <: NscGlobal] { self: NirGenPhase[G] => import global.{Name => _, _}, definitions._ import nirAddons.nirDefinitions._ import SimpleType.{fromSymbol, fromType} diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala index e98f7f74de..dcc46e1a91 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala @@ -10,15 +10,17 @@ import scala.scalanative.util.ScopedVar.scoped import scala.tools.nsc.plugins._ import scala.tools.nsc.{util => _, _} -abstract class NirGenPhase +abstract class NirGenPhase[G <: NscGlobal](val global: G) extends PluginComponent - with NirGenStat - with NirGenExpr - with NirGenUtil - with NirGenFile - with NirGenType - with NirGenName - with NirCompat { + with NirGenStat[G] + with NirGenExpr[G] + with NirGenUtil[G] + with NirGenFile[G] + with NirGenType[G] + with NirGenName[G] + with NirCompat[G] { + + /** Not for use in the constructor body: only initialized afterwards. */ val nirAddons: NirGlobalAddons { val global: NirGenPhase.this.global.type } diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenStat.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenStat.scala index 1e009ec0d2..1114891354 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenStat.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenStat.scala @@ -8,7 +8,7 @@ import scala.scalanative.util.unsupported import scala.scalanative.util.ScopedVar.scoped import scalanative.nir.ControlFlow.removeDeadBlocks -trait NirGenStat { self: NirGenPhase => +trait NirGenStat[G <: NscGlobal] { self: NirGenPhase[G] => import global._ import definitions._ diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenType.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenType.scala index 144be7bc0b..9843146fec 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenType.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenType.scala @@ -1,7 +1,7 @@ package scala.scalanative package nscplugin -trait NirGenType { self: NirGenPhase => +trait NirGenType[G <: NscGlobal] { self: NirGenPhase[G] => import SimpleType.{fromSymbol, fromType} import global._ import definitions._ diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenUtil.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenUtil.scala index 156e05b960..b6588d30d1 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenUtil.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenUtil.scala @@ -3,7 +3,7 @@ package nscplugin import scalanative.util.unsupported -trait NirGenUtil { self: NirGenPhase => +trait NirGenUtil[G <: NscGlobal] { self: NirGenPhase[G] => import global._ import definitions._ import nirAddons._ diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirPlugin.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirPlugin.scala index b35989c902..469340df09 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirPlugin.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirPlugin.scala @@ -4,24 +4,26 @@ package nscplugin import scala.tools.nsc._ import scala.tools.nsc.plugins._ -class NirPlugin(val global: Global) extends Plugin { self => +class NirPlugin(val global: Global) extends Plugin { val name = "nir" val description = "Compile to Scala Native IR (NIR)" val components = List[PluginComponent](prepNativeInterop, nirGen) - object nirAddons extends NirGlobalAddons { - val global: NirPlugin.this.global.type = NirPlugin.this.global - } + /** A trick to avoid early initializers while still enforcing that `global` + * is initialized early. + */ + abstract class NirGlobalAddonsEarlyInit[G <: NscGlobal](val global: G) + extends NirGlobalAddons + + object nirAddons extends NirGlobalAddonsEarlyInit[global.type](global) - object prepNativeInterop extends PrepNativeInterop { - val global: self.global.type = self.global + object prepNativeInterop extends PrepNativeInterop[global.type](global) { val nirAddons: NirPlugin.this.nirAddons.type = NirPlugin.this.nirAddons override val runsAfter = List("typer") override val runsBefore = List("pickler") } - object nirGen extends NirGenPhase { - val global: self.global.type = self.global + object nirGen extends NirGenPhase[global.type](global) { val nirAddons: NirPlugin.this.nirAddons.type = NirPlugin.this.nirAddons override val runsAfter = List("mixin") override val runsBefore = List("delambdafy", "cleanup", "terminal") diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/PrepNativeInterop.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/PrepNativeInterop.scala index bd7b9da786..b3ae4d94a6 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/PrepNativeInterop.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/PrepNativeInterop.scala @@ -11,11 +11,12 @@ import scala.tools.nsc._ * - Rewrite the body `scala.util.PropertiesTrait.scalaProps` to * be statically determined at compile-time. */ -abstract class PrepNativeInterop +abstract class PrepNativeInterop[G <: NscGlobal](val global: G) extends plugins.PluginComponent with transform.Transform { import PrepNativeInterop._ + /** Not for use in the constructor body: only initialized afterwards. */ val nirAddons: NirGlobalAddons { val global: PrepNativeInterop.this.global.type } diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/package.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/package.scala new file mode 100644 index 0000000000..bcd839c0d8 --- /dev/null +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/package.scala @@ -0,0 +1,5 @@ +package scala.scalanative + +package object nscplugin { + type NscGlobal = scala.tools.nsc.Global with Singleton +} From aeb5bf5878b5df033aa7fb503d87d32c27923faa Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 30 Sep 2020 18:06:53 +0200 Subject: [PATCH 04/75] Use java parallel processing instead of scala parallel collections (since they were removed from main distribution in 2.13) --- build.sbt | 3 ++ .../scala/scala/scalanative/nir/Show.scala | 51 ++++++++++++------- .../scala/scala/scalanative/nir/package.scala | 8 --- .../scalanative/nscplugin/NirGenPhase.scala | 22 +++++--- .../scala/scala/scalanative/package.scala | 12 +++++ .../scala/scalanative/util/package.scala | 6 ++- 6 files changed, 68 insertions(+), 34 deletions(-) delete mode 100644 nir/src/main/scala/scala/scalanative/nir/package.scala create mode 100644 util/src/main/scala/scala/scalanative/package.scala diff --git a/build.sbt b/build.sbt index 326d3a5fcf..d497f18b83 100644 --- a/build.sbt +++ b/build.sbt @@ -291,6 +291,7 @@ lazy val tools = Test / parallelExecution := false, mimaSettings ) + .settings(crossCompileCompatSettings) .dependsOn(nir, util, testingCompilerInterface % Test) lazy val nscplugin = @@ -333,7 +334,9 @@ lazy val sbtScalaNative = .enablePlugins(SbtPlugin) .settings(sbtPluginSettings) .settings( + scalaVersion := sbt10ScalaVersion, crossScalaVersions := Seq(sbt10ScalaVersion), + addSbtPlugin("org.portable-scala" % "sbt-platform-deps" % "1.0.0"), sbtTestDirectory := (ThisBuild / baseDirectory).value / "scripted-tests", // publish the other projects before running scripted tests. scriptedDependencies := { diff --git a/nir/src/main/scala/scala/scalanative/nir/Show.scala b/nir/src/main/scala/scala/scalanative/nir/Show.scala index 2534bbcd3a..76500ec81e 100644 --- a/nir/src/main/scala/scala/scalanative/nir/Show.scala +++ b/nir/src/main/scala/scala/scalanative/nir/Show.scala @@ -5,7 +5,9 @@ import java.nio.charset.StandardCharsets import scala.collection.mutable import scala.scalanative.util.ShowBuilder.InMemoryShowBuilder import scalanative.util.{ShowBuilder, unreachable} -import scalanative.CrossCompileCompat.Converters._ +import scala.jdk.CollectionConverters._ +import java.util.stream.{Stream => JStream} +import java.util.function.{Function => JFunction, Consumer => JConsumer} object Show { def newBuilder: NirShowBuilder = new NirShowBuilder(new InMemoryShowBuilder) @@ -34,28 +36,39 @@ object Show { def apply(v: Type): String = { val b = newBuilder; b.type_(v); b.toString } def apply(v: Val): String = { val b = newBuilder; b.val_(v); b.toString } + type DefnString = (Global, String) + def dump(defns: Seq[Defn], fileName: String): Unit = { + + val collectDefs = new JFunction[(Int, Seq[Defn]), JStream[DefnString]] { + override def apply(t: (Int, Seq[Defn])): JStream[DefnString] = { + val (_, defns) = t + defns + .collect { + case defn if defn != null => + (defn.name, defn.show) + } + .asJavaCollection + .stream() + } + } + val pw = new java.io.PrintWriter(fileName) + val writeToFile = new JConsumer[DefnString] { + override def accept(t: (Global, String)): Unit = { + val (_, shown) = t + pw.write(shown) + pw.write("\n") + } + } + try { + val groupedDefns = util.partitionBy(defns.filter(_ != null))(_.name) util - .partitionBy(defns.filter(_ != null))(_.name) - .par - .map { - case (_, defns) => - defns.collect { - case defn if defn != null => - (defn.name, defn.show) - } - } - .seq - .flatten - .toSeq - .sortBy(_._1) - .foreach { - case (_, shown) => - pw.write(shown) - pw.write("\n") - } + .parallelStream(groupedDefns) + .flatMap(collectDefs) + .sorted(Ordering.by(_._1)) + .forEach(writeToFile) } finally { pw.close() } diff --git a/nir/src/main/scala/scala/scalanative/nir/package.scala b/nir/src/main/scala/scala/scalanative/nir/package.scala deleted file mode 100644 index 81c9b031a0..0000000000 --- a/nir/src/main/scala/scala/scalanative/nir/package.scala +++ /dev/null @@ -1,8 +0,0 @@ -package scala.scalanative -import scala.collection.mutable -import scala.language.implicitConversions - -package object nir { - implicit def bufferToSeq[T](buf: mutable.UnrolledBuffer[T]): Seq[T] = - buf.toSeq -} diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala index dcc46e1a91..83d35cc4af 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala @@ -1,14 +1,15 @@ package scala.scalanative package nscplugin -import java.nio.file.Path +import java.nio.file.{Path => JPath} +import java.util.function.{Consumer => JConsumer} import scala.collection.mutable import scala.language.implicitConversions -import scala.scalanative.CrossCompileCompat.Converters._ import scala.scalanative.nir._ import scala.scalanative.util.ScopedVar.scoped import scala.tools.nsc.plugins._ import scala.tools.nsc.{util => _, _} +import scala.jdk.CollectionConverters._ abstract class NirGenPhase[G <: NscGlobal](val global: G) extends PluginComponent @@ -97,10 +98,19 @@ abstract class NirGenPhase[G <: NscGlobal](val global: G) (path, reflectiveInstBuf.toSeq) }.toMap - (files ++ reflectiveInstFiles).par.foreach { - case (path, stats) => - genIRFile(path, stats) - } + val allFiles = files ++ reflectiveInstFiles + + val generateIRFile: JConsumer[(JPath, Seq[Defn])] = + new JConsumer[(JPath, Seq[Defn])] { + override def accept(t: (JPath, Seq[Defn])): Unit = { + val (path, stats) = t + genIRFile(path, stats) + } + } + + util + .parallelStream(allFiles) + .forEach(generateIRFile) } } diff --git a/util/src/main/scala/scala/scalanative/package.scala b/util/src/main/scala/scala/scalanative/package.scala new file mode 100644 index 0000000000..3427252817 --- /dev/null +++ b/util/src/main/scala/scala/scalanative/package.scala @@ -0,0 +1,12 @@ +package scala +import scala.collection.mutable +import scala.language.implicitConversions + +package object scalanative { + implicit def bufferToSeq[T]( + buf: mutable.UnrolledBuffer[T]): collection.Seq[T] = + buf.toSeq + + implicit def seqToImmutableSeq[T]( + seq: collection.Seq[T]): collection.immutable.Seq[T] = seq.toIndexedSeq +} diff --git a/util/src/main/scala/scala/scalanative/util/package.scala b/util/src/main/scala/scala/scalanative/util/package.scala index 824cccc093..9fe9b84e73 100644 --- a/util/src/main/scala/scala/scalanative/util/package.scala +++ b/util/src/main/scala/scala/scalanative/util/package.scala @@ -1,6 +1,6 @@ package scala.scalanative -import java.nio.ByteBuffer +import java.util.stream.{Stream => JStream} package object util { @@ -54,4 +54,8 @@ package object util { def partitionBy[T](elems: Seq[T], batches: Int)( f: T => Any): Map[Int, Seq[T]] = elems.groupBy { elem => Math.abs(f(elem).##) % batches } + + def parallelStream[T](elems: Iterable[T]): JStream[T] = { + JStream.of(elems.toSeq: _*).parallel() + } } From d4f7691459496930d21857ee4161f55c162c1594 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Thu, 1 Oct 2020 09:31:46 +0200 Subject: [PATCH 05/75] Removed posix.time import causing wrong reference in scope --- .../src/main/scala/scala/scalanative/posix/sys/select.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/posixlib/src/main/scala/scala/scalanative/posix/sys/select.scala b/posixlib/src/main/scala/scala/scalanative/posix/sys/select.scala index 5e9bd0f489..0d821e7bea 100644 --- a/posixlib/src/main/scala/scala/scalanative/posix/sys/select.scala +++ b/posixlib/src/main/scala/scala/scalanative/posix/sys/select.scala @@ -2,8 +2,6 @@ package scala.scalanative.posix.sys import scalanative.unsafe._ import scalanative.unsafe.Nat._ -import scalanative.posix.inttypes._ -import scalanative.posix.time._ @extern object select { From bea1f0180c8c0dca4dd43647038430316ed4e202 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Thu, 1 Oct 2020 17:57:48 +0200 Subject: [PATCH 06/75] Make javalib 2.13 cross-compilable --- build.sbt | 1 + .../scala/java/io/BufferedOutputStream.scala | 2 +- .../main/scala/java/io/BufferedReader.scala | 5 +- .../main/scala/java/io/BufferedWriter.scala | 4 +- javalib/src/main/scala/java/io/File.scala | 43 +++-- .../main/scala/java/io/FileDescriptor.scala | 4 +- .../main/scala/java/io/FileOutputStream.scala | 2 +- .../scala/java/io/InputStreamReader.scala | 40 ++-- .../scala/java/io/OutputStreamWriter.scala | 28 +-- .../src/main/scala/java/io/PrintStream.scala | 2 +- .../main/scala/java/io/RandomAccessFile.scala | 14 +- javalib/src/main/scala/java/io/Reader.scala | 12 +- .../java/lang/AbstractStringBuilder.scala | 14 +- javalib/src/main/scala/java/lang/Byte.scala | 4 +- .../src/main/scala/java/lang/Character.scala | 2 +- javalib/src/main/scala/java/lang/Double.scala | 2 +- javalib/src/main/scala/java/lang/Enum.scala | 2 +- javalib/src/main/scala/java/lang/Float.scala | 2 +- .../src/main/scala/java/lang/Integer.scala | 2 +- javalib/src/main/scala/java/lang/Long.scala | 2 +- javalib/src/main/scala/java/lang/Math.scala | 12 +- javalib/src/main/scala/java/lang/Number.scala | 4 +- javalib/src/main/scala/java/lang/PipeIO.scala | 6 +- .../main/scala/java/lang/ProcessBuilder.scala | 48 ++--- javalib/src/main/scala/java/lang/Short.scala | 4 +- .../scala/java/lang/StackTraceElement.scala | 4 +- javalib/src/main/scala/java/lang/String.scala | 40 ++-- javalib/src/main/scala/java/lang/System.scala | 4 +- javalib/src/main/scala/java/lang/Thread.scala | 4 +- .../main/scala/java/lang/ThreadLocal.scala | 2 +- .../src/main/scala/java/lang/Throwables.scala | 4 +- .../main/scala/java/lang/UnixProcess.scala | 48 ++--- .../lang/annotation/RetentionPolicy.scala | 2 +- .../main/scala/java/lang/reflect/Array.scala | 12 +- .../src/main/scala/java/math/BigDecimal.scala | 179 +++++++++--------- .../src/main/scala/java/math/BigInteger.scala | 16 +- .../src/main/scala/java/math/BitLevel.scala | 6 +- .../src/main/scala/java/math/Conversion.scala | 30 +-- .../src/main/scala/java/math/Division.scala | 26 +-- .../src/main/scala/java/math/Logical.scala | 47 ++--- .../main/scala/java/math/Multiplication.scala | 2 +- .../src/main/scala/java/math/Primality.scala | 8 +- .../main/scala/java/math/RoundingMode.scala | 2 +- .../main/scala/java/net/Inet4Address.scala | 2 +- .../src/main/scala/java/net/InetAddress.scala | 9 +- .../scala/java/net/InetSocketAddress.scala | 6 +- .../main/scala/java/net/PlainSocketImpl.scala | 17 +- .../main/scala/java/net/ServerSocket.scala | 6 +- javalib/src/main/scala/java/net/Socket.scala | 2 +- .../scala/java/net/SocketInputStream.scala | 2 +- javalib/src/main/scala/java/net/URI.scala | 66 ++++--- javalib/src/main/scala/java/nio/Buffer.scala | 24 +-- .../src/main/scala/java/nio/ByteOrder.scala | 2 +- .../src/main/scala/java/nio/CharBuffer.scala | 12 +- .../src/main/scala/java/nio/GenBuffer.scala | 22 +-- .../main/scala/java/nio/GenHeapBuffer.scala | 21 +- .../scala/java/nio/GenHeapBufferView.scala | 30 +-- .../java/nio/HeapByteBufferCharView.scala | 6 +- .../main/scala/java/nio/HeapCharBuffer.scala | 6 +- .../src/main/scala/java/nio/IntBuffer.scala | 2 +- .../scala/java/nio/MappedByteBuffer.scala | 2 +- .../scala/java/nio/StringCharBuffer.scala | 10 +- .../java/nio/channels/FileChannelImpl.scala | 14 +- .../scala/java/nio/channels/FileLock.scala | 2 +- .../main/scala/java/nio/charset/Charset.scala | 11 +- .../java/nio/charset/CharsetDecoder.scala | 38 ++-- .../java/nio/charset/CharsetEncoder.scala | 46 ++--- .../scala/java/nio/charset/CoderResult.scala | 2 +- .../java/nio/file/DirectoryStreamImpl.scala | 4 +- .../scala/java/nio/file/FileSystems.scala | 6 +- .../scala/java/nio/file/FileVisitOption.scala | 2 +- .../scala/java/nio/file/FileVisitResult.scala | 2 +- .../src/main/scala/java/nio/file/Files.scala | 109 +++++------ .../java/nio/file/attribute/FileTime.scala | 6 +- .../file/attribute/PosixFilePermissions.scala | 4 +- .../java/security/MessageDigestSpi.scala | 2 +- .../main/scala/java/security/Timestamp.scala | 8 +- .../scala/java/security/cert/CertPath.scala | 4 +- .../java/security/cert/Certificate.scala | 2 +- .../main/scala/java/text/DecimalFormat.scala | 20 +- .../main/scala/java/text/NumberFormat.scala | 4 +- .../src/main/scala/java/time/Duration.scala | 6 +- .../scala/java/util/AbstractCollection.scala | 22 +-- .../main/scala/java/util/AbstractList.scala | 32 ++-- .../main/scala/java/util/AbstractMap.scala | 41 ++-- .../main/scala/java/util/AbstractQueue.scala | 2 +- .../java/util/AbstractSequentialList.scala | 8 +- .../main/scala/java/util/AbstractSet.scala | 9 +- .../src/main/scala/java/util/ArrayDeque.scala | 32 ++-- .../src/main/scala/java/util/ArrayList.scala | 2 +- javalib/src/main/scala/java/util/Arrays.scala | 2 +- javalib/src/main/scala/java/util/Base64.scala | 2 +- .../main/scala/java/util/Collections.scala | 124 ++++++------ javalib/src/main/scala/java/util/Date.scala | 2 +- .../src/main/scala/java/util/Formatter.scala | 4 +- .../src/main/scala/java/util/HashMap.scala | 4 +- .../src/main/scala/java/util/HashSet.scala | 12 +- .../src/main/scala/java/util/Hashtable.scala | 8 +- .../main/scala/java/util/LinkedHashMap.scala | 2 +- .../src/main/scala/java/util/LinkedList.scala | 20 +- javalib/src/main/scala/java/util/Locale.scala | 2 +- .../main/scala/java/util/NavigableView.scala | 2 +- .../main/scala/java/util/PriorityQueue.scala | 2 +- .../src/main/scala/java/util/Properties.scala | 8 +- .../src/main/scala/java/util/TreeSet.scala | 12 +- javalib/src/main/scala/java/util/UUID.scala | 4 +- .../main/scala/java/util/WeakHashMap.scala | 4 +- .../scala/java/util/concurrent/TimeUnit.scala | 2 +- .../main/scala/java/util/jar/JarEntry.scala | 4 +- .../scala/java/util/jar/JarVerifier.scala | 10 +- .../main/scala/java/util/jar/Manifest.scala | 4 +- .../main/scala/java/util/regex/Matcher.scala | 2 +- .../main/scala/java/util/regex/Pattern.scala | 5 +- .../util/regex/PatternSyntaxException.scala | 2 +- .../main/scala/java/util/stream/Stream.scala | 5 +- .../java/util/stream/WrappedScalaStream.scala | 16 +- .../main/scala/java/util/zip/Checksum.scala | 2 +- .../scala/java/util/zip/GZIPInputStream.scala | 4 +- .../scala/java/util/zip/ZipOutputStream.scala | 12 +- .../security/auth/x500/X500Principal.scala | 2 +- .../ISO_8859_1_And_US_ASCII_Common.scala | 34 ++-- .../main/scala/niocharset/UTF_16_Common.scala | 18 +- javalib/src/main/scala/niocharset/UTF_8.scala | 42 ++-- .../scalanative/nio/fs/FileHelpers.scala | 4 +- .../nio/fs/NativePosixFileAttributeView.scala | 38 ++-- .../scalanative/nio/fs/UnixFileSystem.scala | 2 +- .../nio/fs/UnixFileSystemProvider.scala | 18 +- .../scala/scalanative/nio/fs/UnixPath.scala | 48 ++--- 128 files changed, 943 insertions(+), 929 deletions(-) diff --git a/build.sbt b/build.sbt index d497f18b83..8101e0047d 100644 --- a/build.sbt +++ b/build.sbt @@ -396,6 +396,7 @@ lazy val javalib = .in(file("javalib")) .enablePlugins(MyScalaNativePlugin) .settings(mavenPublishSettings) + .settings(crossCompileCompatSettings) .settings( Compile / doc / sources := Nil, // doc generation currently broken // This is required to have incremental compilation to work in javalib. diff --git a/javalib/src/main/scala/java/io/BufferedOutputStream.scala b/javalib/src/main/scala/java/io/BufferedOutputStream.scala index 397e40625d..ad466db49b 100644 --- a/javalib/src/main/scala/java/io/BufferedOutputStream.scala +++ b/javalib/src/main/scala/java/io/BufferedOutputStream.scala @@ -73,7 +73,7 @@ class BufferedOutputStream(out: OutputStream, size: Int) count += 1 } - private def flushInternal() { + private def flushInternal() = { if (count > 0) { out.write(buf, 0, count) count = 0 diff --git a/javalib/src/main/scala/java/io/BufferedReader.scala b/javalib/src/main/scala/java/io/BufferedReader.scala index a020b88c65..b2007eb067 100644 --- a/javalib/src/main/scala/java/io/BufferedReader.scala +++ b/javalib/src/main/scala/java/io/BufferedReader.scala @@ -1,6 +1,6 @@ package java.io -import scala.collection.immutable.{Stream => SStream} +import scala.collection.compat.immutable.{LazyList => SStream} import java.util.stream.{Stream, WrappedScalaStream} class BufferedReader(in: Reader, sz: Int) extends Reader { @@ -162,9 +162,6 @@ class BufferedReader(in: Reader, sz: Int) extends Reader { } } - private def toScalaStream(): SStream[String] = - toScalaStream(false) - private[this] def toScalaStream(closeAtEnd: Boolean): SStream[String] = { Option(readLine()) match { case None => diff --git a/javalib/src/main/scala/java/io/BufferedWriter.scala b/javalib/src/main/scala/java/io/BufferedWriter.scala index 44184826a6..0c1da6d59e 100644 --- a/javalib/src/main/scala/java/io/BufferedWriter.scala +++ b/javalib/src/main/scala/java/io/BufferedWriter.scala @@ -1,7 +1,5 @@ package java.io -import scala.compat.Platform.EOL - class BufferedWriter(out: Writer, sz: Int) extends Writer { if (sz <= 0) throw new IllegalArgumentException("Buffer size <= 0") @@ -26,7 +24,7 @@ class BufferedWriter(out: Writer, sz: Int) extends Writer { } def newLine(): Unit = - write(EOL, 0, EOL.length) + write(System.lineSeparator(), 0, System.lineSeparator().length) override def write(c: Int): Unit = write(Array(c.toChar), 0, 1) diff --git a/javalib/src/main/scala/java/io/File.scala b/javalib/src/main/scala/java/io/File.scala index 2c0d86d5e0..e273c06824 100644 --- a/javalib/src/main/scala/java/io/File.scala +++ b/javalib/src/main/scala/java/io/File.scala @@ -31,7 +31,7 @@ class File(_path: String) extends Serializable with Comparable[File] { this(Option(parent).map(_.path).orNull, child) def this(uri: URI) = { - this(uri.getPath) + this(uri.getPath()) checkURI(uri) } @@ -122,7 +122,7 @@ class File(_path: String) extends Serializable with Comparable[File] { def getCanonicalPath(): String = Zone { implicit z => - if (exists) { + if (exists()) { fromCString(simplifyExistingPath(toCString(properPath))) } else { simplifyNonExistingPath(fromCString(resolve(toCString(properPath)))) @@ -153,7 +153,7 @@ class File(_path: String) extends Serializable with Comparable[File] { case (acc, seg) => seg :: acc } .reverse - .filterNot(_.isEmpty) + .filterNot(_.isEmpty()) .mkString(separator, separator, "") @throws(classOf[IOException]) @@ -166,11 +166,12 @@ class File(_path: String) extends Serializable with Comparable[File] { } def getParent(): String = - path.split(separatorChar).filterNot(_.isEmpty) match { - case Array() if !isAbsolute => null - case Array(_) if !isAbsolute => null - case parts if !isAbsolute => parts.init.mkString(separator) - case parts if isAbsolute => parts.init.mkString(separator, separator, "") + path.split(separatorChar).filterNot(_.isEmpty()) match { + case Array() if !isAbsolute() => null + case Array(_) if !isAbsolute() => null + case parts if !isAbsolute() => parts.init.mkString(separator) + case parts if isAbsolute() => + parts.init.mkString(separator, separator, "") } def getParentFile(): File = { @@ -310,7 +311,7 @@ class File(_path: String) extends Serializable with Comparable[File] { override def toString(): String = path - def deleteOnExit(): Unit = DeleteOnExit.addFile(this.getAbsolutePath) + def deleteOnExit(): Unit = DeleteOnExit.addFile(this.getAbsolutePath()) @stub def toURL(): java.net.URL = ??? @@ -419,7 +420,7 @@ object File { throw new IOException( "getcwd() error in trying to get user directory.")) - if (path.isEmpty) userdir + if (path.isEmpty()) userdir else if (userdir.endsWith(separator)) userdir + path else userdir + separator + path } @@ -521,16 +522,16 @@ object File { } } - val pathSeparatorChar: Char = if (Platform.isWindows) ';' else ':' + val pathSeparatorChar: Char = if (Platform.isWindows()) ';' else ':' val pathSeparator: String = pathSeparatorChar.toString - val separatorChar: Char = if (Platform.isWindows) '\\' else '/' + val separatorChar: Char = if (Platform.isWindows()) '\\' else '/' val separator: String = separatorChar.toString private var counter: Int = 0 private var counterBase: Int = 0 - private val caseSensitive: Boolean = !Platform.isWindows + private val caseSensitive: Boolean = !Platform.isWindows() def listRoots(): Array[File] = - if (Platform.isWindows) ??? + if (Platform.isWindows()) ??? else { var array = new Array[File](1) array(0) = new File("/") @@ -556,19 +557,19 @@ object File { def compMsg(comp: String): String = s"Found $comp component in URI" - if (!uri.isAbsolute) { + if (!uri.isAbsolute()) { throwExc("URI is not absolute") - } else if (!uri.getRawSchemeSpecificPart.startsWith("/")) { + } else if (!uri.getRawSchemeSpecificPart().startsWith("/")) { throwExc("URI is not hierarchical") - } else if (uri.getScheme == null || !(uri.getScheme == "file")) { + } else if (uri.getScheme() == null || !(uri.getScheme() == "file")) { throwExc("Expected file scheme in URI") - } else if (uri.getRawPath == null || uri.getRawPath.length == 0) { + } else if (uri.getRawPath() == null || uri.getRawPath().length() == 0) { throwExc("Expected non-empty path in URI") - } else if (uri.getRawAuthority != null) { + } else if (uri.getRawAuthority() != null) { throwExc(compMsg("authority")) - } else if (uri.getRawQuery != null) { + } else if (uri.getRawQuery() != null) { throwExc(compMsg("query")) - } else if (uri.getRawFragment != null) { + } else if (uri.getRawFragment() != null) { throwExc(compMsg("fragment")) } // else URI is ok diff --git a/javalib/src/main/scala/java/io/FileDescriptor.scala b/javalib/src/main/scala/java/io/FileDescriptor.scala index d845ac30f7..1bd76c06ee 100644 --- a/javalib/src/main/scala/java/io/FileDescriptor.scala +++ b/javalib/src/main/scala/java/io/FileDescriptor.scala @@ -34,9 +34,9 @@ object FileDescriptor { private[io] def openReadOnly(file: File): FileDescriptor = Zone { implicit z => - val fd = fcntl.open(toCString(file.getPath), fcntl.O_RDONLY, 0.toUInt) + val fd = fcntl.open(toCString(file.getPath()), fcntl.O_RDONLY, 0.toUInt) if (fd == -1) { - throw new FileNotFoundException("No such file " + file.getPath) + throw new FileNotFoundException("No such file " + file.getPath()) } new FileDescriptor(fd, true) } diff --git a/javalib/src/main/scala/java/io/FileOutputStream.scala b/javalib/src/main/scala/java/io/FileOutputStream.scala index b8c47d1f6b..e4671a45af 100644 --- a/javalib/src/main/scala/java/io/FileOutputStream.scala +++ b/javalib/src/main/scala/java/io/FileOutputStream.scala @@ -67,7 +67,7 @@ object FileOutputStream { import stat._ val flags = O_CREAT | O_WRONLY | (if (append) O_APPEND else O_TRUNC) val mode = S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH - val fd = open(toCString(file.getPath), flags, mode) + val fd = open(toCString(file.getPath()), flags, mode) if (fd == -1) throw new FileNotFoundException( s"$file (${fromCString(string.strerror(errno.errno))})") diff --git a/javalib/src/main/scala/java/io/InputStreamReader.scala b/javalib/src/main/scala/java/io/InputStreamReader.scala index 0ee4e4cd42..9f5193397b 100644 --- a/javalib/src/main/scala/java/io/InputStreamReader.scala +++ b/javalib/src/main/scala/java/io/InputStreamReader.scala @@ -42,12 +42,12 @@ class InputStreamReader(private[this] var in: InputStream, this(in, Objects .requireNonNull(charset) - .newDecoder + .newDecoder() .onMalformedInput(CodingErrorAction.REPLACE) .onUnmappableCharacter(CodingErrorAction.REPLACE)) def this(in: InputStream) = - this(in, Charset.defaultCharset) + this(in, Charset.defaultCharset()) def this(in: InputStream, charsetName: String) = this( @@ -70,12 +70,12 @@ class InputStreamReader(private[this] var in: InputStream, } def getEncoding(): String = - if (closed) null else decoder.charset.name + if (closed) null else decoder.charset().name() override def read(): Int = { ensureOpen() - if (outBuf.hasRemaining) outBuf.get() + if (outBuf.hasRemaining()) outBuf.get() else super.read() } @@ -87,9 +87,9 @@ class InputStreamReader(private[this] var in: InputStream, if (len == 0) { 0 - } else if (outBuf.hasRemaining) { + } else if (outBuf.hasRemaining()) { // Reuse chars decoded last time - val available = Math.min(outBuf.remaining, len) + val available = Math.min(outBuf.remaining(), len) outBuf.get(cbuf, off, available) available } else if (!endOfInput) { @@ -117,12 +117,12 @@ class InputStreamReader(private[this] var in: InputStream, off: Int, len: Int): Int = { // Return outBuf to its full capacity - outBuf.limit(outBuf.capacity) + outBuf.limit(outBuf.capacity()) outBuf.position(0) @tailrec // but not inline, this is not a common path def loopWithOutBuf(desiredOutBufSize: Int): Int = { - if (outBuf.capacity < desiredOutBufSize) + if (outBuf.capacity() < desiredOutBufSize) outBuf = CharBuffer.allocate(desiredOutBufSize) val charsRead = readImpl(outBuf) if (charsRead == InputStreamReader.Overflow) @@ -158,16 +158,16 @@ class InputStreamReader(private[this] var in: InputStream, * at all), which will cause one of the following cases to be handled. */ out.position() - initPos - } else if (result.isUnderflow) { + } else if (result.isUnderflow()) { if (endOfInput) { assert( - !inBuf.hasRemaining, + !inBuf.hasRemaining(), "CharsetDecoder.decode() should not have returned UNDERFLOW when " + - "both endOfInput and inBuf.hasRemaining are true. It should have " + + "both endOfInput and inBuf.hasRemaining() are true. It should have " + "returned a MalformedInput error instead." ) // Flush - if (decoder.flush(out).isOverflow) { + if (decoder.flush(out).isOverflow()) { InputStreamReader.Overflow } else { // Done @@ -176,13 +176,13 @@ class InputStreamReader(private[this] var in: InputStream, } } else { // We need to read more from the underlying input stream - if (inBuf.limit() == inBuf.capacity) { + if (inBuf.limit() == inBuf.capacity()) { inBuf.compact() - if (!inBuf.hasRemaining) { + if (!inBuf.hasRemaining()) { throw new AssertionError( "Scala.js implementation restriction: " + - inBuf.capacity + " bytes do not seem to be enough for " + - getEncoding + " to decode a single code point. " + + inBuf.capacity() + " bytes do not seem to be enough for " + + getEncoding() + " to decode a single code point. " + "Please report this as a bug.") } inBuf.limit(inBuf.position()) @@ -194,7 +194,9 @@ class InputStreamReader(private[this] var in: InputStream, * according to the specification of InputStreamReader. */ val bytesRead = - in.read(inBuf.array, inBuf.limit(), inBuf.capacity - inBuf.limit()) + in.read(inBuf.array(), + inBuf.limit(), + inBuf.capacity() - inBuf.limit()) if (bytesRead == -1) endOfInput = true @@ -203,7 +205,7 @@ class InputStreamReader(private[this] var in: InputStream, readImpl(out) } - } else if (result.isOverflow) { + } else if (result.isOverflow()) { InputStreamReader.Overflow } else { result.throwException() @@ -217,7 +219,7 @@ class InputStreamReader(private[this] var in: InputStream, * is the expected behavior. */ override def ready(): Boolean = - outBuf.hasRemaining || in.available() > 0 + outBuf.hasRemaining() || in.available() > 0 private def ensureOpen(): Unit = { if (closed) diff --git a/javalib/src/main/scala/java/io/OutputStreamWriter.scala b/javalib/src/main/scala/java/io/OutputStreamWriter.scala index a14b2d920d..7485c0b687 100644 --- a/javalib/src/main/scala/java/io/OutputStreamWriter.scala +++ b/javalib/src/main/scala/java/io/OutputStreamWriter.scala @@ -31,13 +31,13 @@ class OutputStreamWriter(private[this] var out: OutputStream, this(out, Objects .requireNonNull(cs) - .newEncoder + .newEncoder() .onMalformedInput(CodingErrorAction.REPLACE) .onUnmappableCharacter(CodingErrorAction.REPLACE)) } def this(out: OutputStream) = - this(out, Charset.defaultCharset) + this(out, Charset.defaultCharset()) def this(out: OutputStream, charsetName: String) = this( @@ -51,7 +51,7 @@ class OutputStreamWriter(private[this] var out: OutputStream, ) def getEncoding(): String = - if (closed) null else enc.charset.name + if (closed) null else enc.charset().name() override def write(c: Int): Unit = write(c.toChar.toString, 0, 1) @@ -75,8 +75,8 @@ class OutputStreamWriter(private[this] var out: OutputStream, @tailrec def loopEncode(): Unit = { val result = enc.encode(cbuf1, outBuf, false) - if (result.isUnderflow) () - else if (result.isOverflow) { + if (result.isUnderflow()) () + else if (result.isOverflow()) { makeRoomInOutBuf() loopEncode() } else { @@ -86,7 +86,7 @@ class OutputStreamWriter(private[this] var out: OutputStream, } loopEncode() - if (cbuf1.hasRemaining) + if (cbuf1.hasRemaining()) inBuf = cbuf1.toString } @@ -103,14 +103,14 @@ class OutputStreamWriter(private[this] var out: OutputStream, def loopEncode(): Unit = { val cbuf = CharBuffer.wrap(inBuf) val result = enc.encode(cbuf, outBuf, true) - if (result.isUnderflow) { + if (result.isUnderflow()) { assert( - !cbuf.hasRemaining, + !cbuf.hasRemaining(), "CharsetEncoder.encode() should not have returned UNDERFLOW when " + - "both endOfInput and inBuf.hasRemaining are true. It should have " + + "both endOfInput and inBuf.hasRemaining() are true. It should have " + "returned a MalformedInput error instead." ) - } else if (result.isOverflow) { + } else if (result.isOverflow()) { makeRoomInOutBuf() loopEncode() } else { @@ -122,7 +122,7 @@ class OutputStreamWriter(private[this] var out: OutputStream, @inline @tailrec def loopFlush(): Unit = { - if (enc.flush(outBuf).isOverflow) { + if (enc.flush(outBuf).isOverflow()) { makeRoomInOutBuf() loopFlush() } @@ -154,9 +154,9 @@ class OutputStreamWriter(private[this] var out: OutputStream, if (outBuf.position() != 0) { flushBuffer() } else { - // Very unlikely (outBuf.capacity is not enough to encode a single code point) + // Very unlikely (outBuf.capacity() is not enough to encode a single code point) outBuf.flip() - val newBuf = ByteBuffer.allocate(outBuf.capacity * 2) + val newBuf = ByteBuffer.allocate(outBuf.capacity() * 2) newBuf.put(outBuf) outBuf = newBuf } @@ -170,7 +170,7 @@ class OutputStreamWriter(private[this] var out: OutputStream, // Don't use outBuf.flip() first, in case out.write() throws // Hence, use 0 instead of position, and position instead of limit - out.write(outBuf.array, outBuf.arrayOffset, outBuf.position) + out.write(outBuf.array(), outBuf.arrayOffset(), outBuf.position()) outBuf.clear() } } diff --git a/javalib/src/main/scala/java/io/PrintStream.scala b/javalib/src/main/scala/java/io/PrintStream.scala index f68959267d..7f501dff5a 100644 --- a/javalib/src/main/scala/java/io/PrintStream.scala +++ b/javalib/src/main/scala/java/io/PrintStream.scala @@ -73,7 +73,7 @@ class PrintStream private (_out: OutputStream, private lazy val encoder = { val c = - if (charset == null) Charset.defaultCharset + if (charset == null) Charset.defaultCharset() else charset /* We pass `this` as the output stream for the encoding writer so that * we can apply auto-flushing. Note that this will flush() more often diff --git a/javalib/src/main/scala/java/io/RandomAccessFile.scala b/javalib/src/main/scala/java/io/RandomAccessFile.scala index c60cabfa56..d63dcc7b42 100644 --- a/javalib/src/main/scala/java/io/RandomAccessFile.scala +++ b/javalib/src/main/scala/java/io/RandomAccessFile.scala @@ -76,7 +76,7 @@ class RandomAccessFile private (file: File, in.readInt() override final def readLine(): String = { - if (getFilePointer == length) null + if (getFilePointer() == length()) null else { val builder = new StringBuilder var c = '0' @@ -87,7 +87,7 @@ class RandomAccessFile private (file: File, // If there's a newline after carriage-return, we must eat it too. if (c == '\r' && readChar() != '\n') { - seek(getFilePointer - 1) + seek(getFilePointer() - 1) } builder.toString.init } @@ -125,7 +125,7 @@ class RandomAccessFile private (file: File, override def skipBytes(n: Int): Int = if (n <= 0) 0 else { - val currentPosition = getFilePointer + val currentPosition = getFilePointer() val fileLength = length() val toSkip = if (currentPosition + n > fileLength) fileLength - currentPosition @@ -201,7 +201,7 @@ class RandomAccessFile private (file: File, override final def writeUTF(str: String): Unit = { out.writeUTF(str) - maybeFlush + maybeFlush() } private def maybeFlush(): Unit = @@ -213,8 +213,8 @@ private object RandomAccessFile { Zone { implicit z => import fcntl._ import stat._ - if (_flags == "r" && !file.exists) - throw new FileNotFoundException(file.getName) + if (_flags == "r" && !file.exists()) + throw new FileNotFoundException(file.getName()) val flags = _flags match { case "r" => O_RDONLY case "rw" | "rws" | "rwd" => O_RDWR | O_CREAT @@ -223,7 +223,7 @@ private object RandomAccessFile { s"""Illegal mode "${_flags}" must be one of "r", "rw", "rws" or "rwd"""") } val mode = S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH - val fd = open(toCString(file.getPath), flags, mode) + val fd = open(toCString(file.getPath()), flags, mode) new FileDescriptor(fd) } diff --git a/javalib/src/main/scala/java/io/Reader.scala b/javalib/src/main/scala/java/io/Reader.scala index a7da18e4f5..431c2b1189 100644 --- a/javalib/src/main/scala/java/io/Reader.scala +++ b/javalib/src/main/scala/java/io/Reader.scala @@ -12,16 +12,16 @@ abstract class Reader private[this] (_lock: Option[Object]) protected def this() = this(None) def read(target: CharBuffer): Int = { - if (!target.hasRemaining) 0 - else if (target.hasArray) { - val charsRead = read(target.array, - target.position() + target.arrayOffset, - target.remaining) + if (!target.hasRemaining()) 0 + else if (target.hasArray()) { + val charsRead = read(target.array(), + target.position() + target.arrayOffset(), + target.remaining()) if (charsRead != -1) target.position(target.position() + charsRead) charsRead } else { - val buf = new Array[Char](target.remaining) + val buf = new Array[Char](target.remaining()) val charsRead = read(buf) if (charsRead != -1) target.put(buf, 0, charsRead) diff --git a/javalib/src/main/scala/java/lang/AbstractStringBuilder.scala b/javalib/src/main/scala/java/lang/AbstractStringBuilder.scala index 1ce002bf37..b9dc0165de 100644 --- a/javalib/src/main/scala/java/lang/AbstractStringBuilder.scala +++ b/javalib/src/main/scala/java/lang/AbstractStringBuilder.scala @@ -202,7 +202,7 @@ abstract class AbstractStringBuilder private (unit: Unit) { System.arraycopy(value, start, dest, destStart, end - start) } - final def insert0(index: scala.Int, chars: Array[Char]) { + final def insert0(index: scala.Int, chars: Array[Char]) = { if (0 > index || index > count) { throw new StringIndexOutOfBoundsException(index) } @@ -510,7 +510,7 @@ abstract class AbstractStringBuilder private (unit: Unit) { while (!found && i < count) { if (value(i) == firstChar) { found = true - break + break() } i += 1 } @@ -523,9 +523,9 @@ abstract class AbstractStringBuilder private (unit: Unit) { breakable { while (true) { o2 += 1 - if (!(o2 < subCount)) break + if (!(o2 < subCount)) break() o1 += 1 - if (!(value(o1) == subString.charAt(o2))) break + if (!(value(o1) == subString.charAt(o2))) break() } } if (o2 == subCount) { @@ -556,7 +556,7 @@ abstract class AbstractStringBuilder private (unit: Unit) { while (!found && i >= 0) { if (value(i) == firstChar) { found = true - break + break() } i -= 1 } @@ -569,9 +569,9 @@ abstract class AbstractStringBuilder private (unit: Unit) { breakable { while (true) { o2 += 1 - if (!(o2 < subCount)) break + if (!(o2 < subCount)) break() o1 += 1 - if (!(value(o1) == subString.charAt(o2))) break + if (!(value(o1) == subString.charAt(o2))) break() } } if (o2 == subCount) { diff --git a/javalib/src/main/scala/java/lang/Byte.scala b/javalib/src/main/scala/java/lang/Byte.scala index b0ea1dc1cf..5785caa04e 100644 --- a/javalib/src/main/scala/java/lang/Byte.scala +++ b/javalib/src/main/scala/java/lang/Byte.scala @@ -74,7 +74,7 @@ final class Byte(val _value: scala.Byte) extends Number with Comparable[Byte] { protected def unary_+ : scala.Int = _value.toInt protected def unary_- : scala.Int = -_value.toInt - protected def +(x: String): String = _value + x + protected def +(x: String): String = "" + _value + x protected def <<(x: scala.Int): scala.Int = _value << x protected def <<(x: scala.Long): scala.Int = _value << x.toInt @@ -194,7 +194,7 @@ object Byte { x - y @inline def decode(nm: String): Byte = { - val i = Integer.decode(nm).intValue + val i = Integer.decode(nm).intValue() val b = i.toByte if (b == i) valueOf(b) diff --git a/javalib/src/main/scala/java/lang/Character.scala b/javalib/src/main/scala/java/lang/Character.scala index bc7f0ed359..f169f231c4 100644 --- a/javalib/src/main/scala/java/lang/Character.scala +++ b/javalib/src/main/scala/java/lang/Character.scala @@ -62,7 +62,7 @@ class Character(val _value: scala.Char) protected def unary_- : scala.Int = -_value.toInt // scalastyle:on disallow.space.before.token - protected def +(x: String): String = _value + x + protected def +(x: String): String = "" + _value + x protected def <<(x: scala.Int): scala.Int = _value << x protected def <<(x: scala.Long): scala.Int = _value << x.toInt diff --git a/javalib/src/main/scala/java/lang/Double.scala b/javalib/src/main/scala/java/lang/Double.scala index 803806d35d..d55644bd24 100644 --- a/javalib/src/main/scala/java/lang/Double.scala +++ b/javalib/src/main/scala/java/lang/Double.scala @@ -107,7 +107,7 @@ final class Double(val _value: scala.Double) protected def unary_+ : scala.Double = _value protected def unary_- : scala.Double = -_value - protected def +(x: String): String = _value + x + protected def +(x: String): String = "" + _value + x protected def <(x: scala.Byte): scala.Boolean = _value < x protected def <(x: scala.Short): scala.Boolean = _value < x diff --git a/javalib/src/main/scala/java/lang/Enum.scala b/javalib/src/main/scala/java/lang/Enum.scala index 1f1a46e294..3f0b4b99a1 100644 --- a/javalib/src/main/scala/java/lang/Enum.scala +++ b/javalib/src/main/scala/java/lang/Enum.scala @@ -6,5 +6,5 @@ abstract class Enum[E <: Enum[E]] protected (_name: String, _ordinal: Int) def name(): String = _name def ordinal(): Int = _ordinal override def toString(): String = _name - final def compareTo(o: E): Int = _ordinal.compareTo(o.ordinal) + final def compareTo(o: E): Int = _ordinal.compareTo(o.ordinal()) } diff --git a/javalib/src/main/scala/java/lang/Float.scala b/javalib/src/main/scala/java/lang/Float.scala index 87f98e880a..20160449d1 100644 --- a/javalib/src/main/scala/java/lang/Float.scala +++ b/javalib/src/main/scala/java/lang/Float.scala @@ -103,7 +103,7 @@ final class Float(val _value: scala.Float) protected def unary_+ : scala.Float = _value protected def unary_- : scala.Float = -_value - protected def +(x: String): String = _value + x + protected def +(x: String): String = "" + _value + x protected def <(x: scala.Byte): scala.Boolean = _value < x protected def <(x: scala.Short): scala.Boolean = _value < x diff --git a/javalib/src/main/scala/java/lang/Integer.scala b/javalib/src/main/scala/java/lang/Integer.scala index 31e112b4f4..a1490ce9b5 100644 --- a/javalib/src/main/scala/java/lang/Integer.scala +++ b/javalib/src/main/scala/java/lang/Integer.scala @@ -77,7 +77,7 @@ final class Integer(val _value: scala.Int) protected def unary_+ : scala.Int = _value protected def unary_- : scala.Int = -_value - protected def +(x: String): String = _value + x + protected def +(x: String): String = "" + _value + x protected def <<(x: scala.Int): scala.Int = _value << x protected def <<(x: scala.Long): scala.Int = _value << x.toInt diff --git a/javalib/src/main/scala/java/lang/Long.scala b/javalib/src/main/scala/java/lang/Long.scala index 6b00487107..4f85745f9c 100644 --- a/javalib/src/main/scala/java/lang/Long.scala +++ b/javalib/src/main/scala/java/lang/Long.scala @@ -82,7 +82,7 @@ final class Long(val _value: scala.Long) extends Number with Comparable[Long] { protected def unary_+ : scala.Long = _value protected def unary_- : scala.Long = -_value - protected def +(x: String): String = _value + x + protected def +(x: String): String = "" + _value + x protected def <<(x: scala.Int): scala.Long = _value << x protected def <<(x: scala.Long): scala.Long = _value << x diff --git a/javalib/src/main/scala/java/lang/Math.scala b/javalib/src/main/scala/java/lang/Math.scala index 91dee85c6b..5ca4a02593 100644 --- a/javalib/src/main/scala/java/lang/Math.scala +++ b/javalib/src/main/scala/java/lang/Math.scala @@ -135,10 +135,10 @@ object Math { cmath.log1p(a) @alwaysinline def max(a: scala.Double, b: scala.Double): scala.Double = - if (a.isNaN || b.isNaN) Double.NaN else `llvm.maxnum.f64`(a, b) + if (a.isNaN() || b.isNaN()) Double.NaN else `llvm.maxnum.f64`(a, b) @alwaysinline def max(a: scala.Float, b: scala.Float): scala.Float = - if (a.isNaN || b.isNaN) Float.NaN else `llvm.maxnum.f32`(a, b) + if (a.isNaN() || b.isNaN()) Float.NaN else `llvm.maxnum.f32`(a, b) @alwaysinline def max(a: scala.Int, b: scala.Int): scala.Int = if (a > b) a else b @@ -147,10 +147,10 @@ object Math { if (a > b) a else b @alwaysinline def min(a: scala.Double, b: scala.Double): scala.Double = - if (a.isNaN || b.isNaN) Double.NaN else `llvm.minnum.f64`(a, b) + if (a.isNaN() || b.isNaN()) Double.NaN else `llvm.minnum.f64`(a, b) @alwaysinline def min(a: scala.Float, b: scala.Float): scala.Float = - if (a.isNaN || b.isNaN) Float.NaN else `llvm.minnum.f32`(a, b) + if (a.isNaN() || b.isNaN()) Float.NaN else `llvm.minnum.f32`(a, b) @alwaysinline def min(a: scala.Int, b: scala.Int): scala.Int = if (a < b) a else b @@ -238,7 +238,7 @@ object Math { `llvm.rint.f64`(a) @inline def round(a: scala.Float): scala.Int = { - if (a.isNaN) { + if (a.isNaN()) { 0 } else if (a >= scala.Int.MaxValue.toFloat - 0.5f) { scala.Int.MaxValue @@ -253,7 +253,7 @@ object Math { } @inline def round(a: scala.Double): scala.Long = { - if (a.isNaN) { + if (a.isNaN()) { 0L } else if (a >= scala.Long.MaxValue.toDouble - 0.5d) { scala.Long.MaxValue diff --git a/javalib/src/main/scala/java/lang/Number.scala b/javalib/src/main/scala/java/lang/Number.scala index 308b0ff806..013136f976 100644 --- a/javalib/src/main/scala/java/lang/Number.scala +++ b/javalib/src/main/scala/java/lang/Number.scala @@ -3,8 +3,8 @@ package java.lang import scala.math.ScalaNumber abstract class Number extends java.lang._Object with java.io.Serializable { - def byteValue(): scala.Byte = intValue.toByte - def shortValue(): scala.Short = intValue.toShort + def byteValue(): scala.Byte = intValue().toByte + def shortValue(): scala.Short = intValue().toShort def intValue(): scala.Int def longValue(): scala.Long def floatValue(): scala.Float diff --git a/javalib/src/main/scala/java/lang/PipeIO.scala b/javalib/src/main/scala/java/lang/PipeIO.scala index 5e20f1512d..196c1c7660 100644 --- a/javalib/src/main/scala/java/lang/PipeIO.scala +++ b/javalib/src/main/scala/java/lang/PipeIO.scala @@ -20,7 +20,7 @@ private[lang] object PipeIO { childFd: Int, redirect: ProcessBuilder.Redirect )(implicit ioStream: PipeIO[T]): T = { - redirect.`type` match { + redirect.`type`() match { case ProcessBuilder.Redirect.Type.PIPE => ioStream.fdStream(process, new FileDescriptor(childFd)) case _ => @@ -61,7 +61,7 @@ private[lang] object PipeIO { var toRead = 0 var readBuf: Array[scala.Byte] = Array() while ({ - toRead = availableFD + toRead = availableFD() toRead > 0 }) { val size = if (readBuf == null) 0 else readBuf.size @@ -80,7 +80,7 @@ private[lang] object PipeIO { private[this] var drained = false private def availableFD() = { val res = stackalloc[CInt] - ioctl(is.getFD.fd, FIONREAD, res.asInstanceOf[Ptr[scala.Byte]]) match { + ioctl(is.getFD().fd, FIONREAD, res.asInstanceOf[Ptr[scala.Byte]]) match { case -1 => 0 case _ => !res } diff --git a/javalib/src/main/scala/java/lang/ProcessBuilder.scala b/javalib/src/main/scala/java/lang/ProcessBuilder.scala index dcdf524f4d..9ca14598ac 100644 --- a/javalib/src/main/scala/java/lang/ProcessBuilder.scala +++ b/javalib/src/main/scala/java/lang/ProcessBuilder.scala @@ -11,26 +11,30 @@ import scala.scalanative.runtime.Platform import ProcessBuilder.Redirect final class ProcessBuilder(private var _command: List[String]) { - def this(command: Array[String]) { + def this(command: Array[String]) = { this(Arrays.asList(command)) } def command(): List[String] = _command def command(command: Array[String]): ProcessBuilder = - set(_command = Arrays.asList(command)) + set { _command = Arrays.asList(command); () } - def command(command: List[String]): ProcessBuilder = set(_command = command) + def command(command: List[String]): ProcessBuilder = set { + _command = command; () + } def environment(): Map[String, String] = _environment def directory(): File = _directory def directory(dir: File): ProcessBuilder = - set(_directory = dir match { - case null => defaultDirectory - case _ => dir - }) + set { + _directory = dir match { + case null => defaultDirectory + case _ => dir + }; () + } def inheritIO(): ProcessBuilder = { redirectInput(Redirect.INHERIT) @@ -39,38 +43,38 @@ final class ProcessBuilder(private var _command: List[String]) { } def redirectError(destination: Redirect): ProcessBuilder = destination match { - case null => set(_redirectOutput = Redirect.PIPE) + case null => set { _redirectOutput = Redirect.PIPE; () } case d => - d.`type` match { + d.`type`() match { case Redirect.Type.READ => throw new IllegalArgumentException( s"Redirect.READ cannot be used for error.") case _ => - set(_redirectError = destination) + set { _redirectError = destination; () } } } def redirectInput(source: Redirect): ProcessBuilder = source match { - case null => set(_redirectInput = Redirect.PIPE) + case null => set { _redirectInput = Redirect.PIPE; () } case s => - s.`type` match { + s.`type`() match { case Redirect.Type.WRITE | Redirect.Type.APPEND => throw new IllegalArgumentException(s"$s cannot be used for input.") case _ => - set(_redirectInput = source) + set { _redirectInput = source; () } } } def redirectOutput(destination: Redirect): ProcessBuilder = destination match { - case null => set(_redirectOutput = Redirect.PIPE) + case null => set { _redirectOutput = Redirect.PIPE; () } case s => - s.`type` match { + s.`type`() match { case Redirect.Type.READ => throw new IllegalArgumentException( s"Redirect.READ cannot be used for output.") case _ => - set(_redirectOutput = destination) + set { _redirectOutput = destination; () } } } @@ -95,12 +99,12 @@ final class ProcessBuilder(private var _command: List[String]) { def redirectErrorStream(): scala.Boolean = _redirectErrorStream def redirectErrorStream(redirectErrorStream: scala.Boolean): ProcessBuilder = - set(_redirectErrorStream = redirectErrorStream) + set { _redirectErrorStream = redirectErrorStream; () } def start(): Process = { if (_command.isEmpty()) throw new IndexOutOfBoundsException() if (_command.contains(null)) throw new NullPointerException() - if (Platform.isWindows) { + if (Platform.isWindows()) { val msg = "No windows implementation of java.lang.Process" throw new UnsupportedOperationException(msg) } else { @@ -134,14 +138,14 @@ object ProcessBuilder { def `type`(): Redirect.Type override def equals(other: Any): scala.Boolean = other match { - case that: Redirect => file == that.file && `type` == that.`type` + case that: Redirect => file() == that.file() && `type`() == that.`type`() case _ => false } override def hashCode(): Int = { var hash = 1 - hash = hash * 31 + file.hashCode() - hash = hash * 31 + `type`.hashCode() + hash = hash * 31 + file().hashCode() + hash = hash * 31 + `type`().hashCode() hash } } @@ -188,7 +192,7 @@ object ProcessBuilder { def valueOf(name: String): Type = { if (name == null) throw new NullPointerException() - _values.toSeq.find(_.name == name) match { + _values.toSeq.find(_.name() == name) match { case Some(t) => t case None => throw new IllegalArgumentException( diff --git a/javalib/src/main/scala/java/lang/Short.scala b/javalib/src/main/scala/java/lang/Short.scala index 81c321cb92..a794d9c927 100644 --- a/javalib/src/main/scala/java/lang/Short.scala +++ b/javalib/src/main/scala/java/lang/Short.scala @@ -75,7 +75,7 @@ final class Short(val _value: scala.Short) protected def unary_+ : scala.Int = _value.toInt protected def unary_- : scala.Int = -_value.toInt - protected def +(x: String): String = _value + x + protected def +(x: String): String = "" + _value + x protected def <<(x: scala.Int): scala.Int = _value << x protected def <<(x: scala.Long): scala.Int = _value << x.toInt @@ -196,7 +196,7 @@ object Short { x - y @inline def decode(nm: String): Short = { - val i = Integer.decode(nm).intValue + val i = Integer.decode(nm).intValue() val r = i.toShort if (r == i) valueOf(r) diff --git a/javalib/src/main/scala/java/lang/StackTraceElement.scala b/javalib/src/main/scala/java/lang/StackTraceElement.scala index 889682ba55..abe8c2c191 100644 --- a/javalib/src/main/scala/java/lang/StackTraceElement.scala +++ b/javalib/src/main/scala/java/lang/StackTraceElement.scala @@ -50,9 +50,9 @@ private[lang] object StackTraceElement { var methodName = "" def readSymbol(): Boolean = { - if (read != '_') { + if (read() != '_') { false - } else if (read != 'S') { + } else if (read() != 'S') { false } else { readGlobal() diff --git a/javalib/src/main/scala/java/lang/String.scala b/javalib/src/main/scala/java/lang/String.scala index 2ea0880923..7994eae44c 100644 --- a/javalib/src/main/scala/java/lang/String.scala +++ b/javalib/src/main/scala/java/lang/String.scala @@ -44,8 +44,8 @@ final class _String() this() offset = 0 val charBuffer = encoding.decode(ByteBuffer.wrap(data, start, length)) - value = charBuffer.array - count = charBuffer.length + value = charBuffer.array() + count = charBuffer.length() } def this(data: Array[scala.Byte], @@ -109,14 +109,14 @@ final class _String() count = string.length() } - def this(sb: StringBuffer) { + def this(sb: StringBuffer) = { this() offset = 0 - value = sb.getValue - count = sb.length + value = sb.getValue() + count = sb.length() } - def this(codePoints: Array[Int], offset: Int, count: Int) { + def this(codePoints: Array[Int], offset: Int, count: Int) = { this() if (offset < 0 || count < 0 || offset > codePoints.length - count) { throw new StringIndexOutOfBoundsException() @@ -135,10 +135,10 @@ final class _String() } } - def this(sb: java.lang.StringBuilder) { + def this(sb: java.lang.StringBuilder) = { this() offset = 0 - count = sb.length + count = sb.length() value = new Array[Char](count) sb.getChars(0, count, value, 0) } @@ -600,9 +600,9 @@ final class _String() val rs = replacement.toString - if (ts.isEmpty) { + if (ts.isEmpty()) { val buffer = - new java.lang.StringBuilder(count + (rs.length * (count + 1))) + new java.lang.StringBuilder(count + (rs.length() * (count + 1))) buffer.append(rs) var i = 0 @@ -616,7 +616,7 @@ final class _String() } val buffer = new java.lang.StringBuilder(count + rs.length) - val tl = target.length + val tl = target.length() var tail = 0 do { buffer.append(value, offset + tail, index - tail) @@ -669,14 +669,14 @@ final class _String() def toLowerCase(locale: Locale): _String = toCase(locale, Character.toLowerCase) - def toLowerCase(): _String = toLowerCase(Locale.getDefault) + def toLowerCase(): _String = toLowerCase(Locale.getDefault()) override def toString(): String = this def toUpperCase(locale: Locale): _String = toCase(locale, Character.toUpperCase) - def toUpperCase(): _String = toUpperCase(Locale.getDefault) + def toUpperCase(): _String = toUpperCase(Locale.getDefault()) private[this] def toCase(locale: Locale, convert: Int => Int): _String = { if (count == 0) return this @@ -730,16 +730,16 @@ final class _String() } def contentEquals(sb: StringBuffer): scala.Boolean = { - val size = sb.length + val size = sb.length() if (count != size) { false } else { - regionMatches(0, new _String(0, size, sb.getValue), 0, size) + regionMatches(0, new _String(0, size, sb.getValue()), 0, size) } } def contentEquals(cs: CharSequence): scala.Boolean = { - val len = cs.length + val len = cs.length() if (len != count) { false } else if (len == 0 && count == 0) { @@ -800,10 +800,10 @@ final class _String() split(expr, 0) def split(expr: _String, max: Int): Array[String] = - if (isEmpty) { + if (isEmpty()) { Array("") } else { - expr.length match { + expr.length() match { case 1 if !isRegexMeta(expr.charAt(0)) => fastSplit(expr.charAt(0), max) case 2 if expr.charAt(0) == '\\' && isRegexMeta(expr.charAt(1)) => fastSplit(expr.charAt(1), max) @@ -909,8 +909,8 @@ object _String { throw new NullPointerException("null format argument") } else { val bufferSize = - if (args == null) fmt.length + 0 - else fmt.length + args.length * 10 + if (args == null) fmt.length() + 0 + else fmt.length() + args.length * 10 val f = new Formatter(new java.lang.StringBuilder(bufferSize), loc) f.format(fmt, args).toString } diff --git a/javalib/src/main/scala/java/lang/System.scala b/javalib/src/main/scala/java/lang/System.scala index 5c9cccdbaf..b6f2fb0f69 100644 --- a/javalib/src/main/scala/java/lang/System.scala +++ b/javalib/src/main/scala/java/lang/System.scala @@ -41,7 +41,7 @@ object System { "Java Platform API Specification") sysProps.setProperty("line.separator", lineSeparator()) - if (Platform.isWindows) { + if (Platform.isWindows()) { sysProps.setProperty("file.separator", "\\") sysProps.setProperty("path.separator", ";") val userLang = fromCString(Platform.windowsGetUserLang()) @@ -98,7 +98,7 @@ object System { }) def lineSeparator(): String = { - if (Platform.isWindows) "\r\n" + if (Platform.isWindows()) "\r\n" else "\n" } diff --git a/javalib/src/main/scala/java/lang/Thread.scala b/javalib/src/main/scala/java/lang/Thread.scala index 203eb8ce36..a303953737 100644 --- a/javalib/src/main/scala/java/lang/Thread.scala +++ b/javalib/src/main/scala/java/lang/Thread.scala @@ -67,8 +67,8 @@ object Thread { def currentThread(): Thread = MainThread def interrupted(): scala.Boolean = { - val ret = currentThread.isInterrupted - currentThread.interruptedState = false + val ret = currentThread().isInterrupted() + currentThread().interruptedState = false ret } diff --git a/javalib/src/main/scala/java/lang/ThreadLocal.scala b/javalib/src/main/scala/java/lang/ThreadLocal.scala index ae9e0330f0..b5853f2fd0 100644 --- a/javalib/src/main/scala/java/lang/ThreadLocal.scala +++ b/javalib/src/main/scala/java/lang/ThreadLocal.scala @@ -8,7 +8,7 @@ class ThreadLocal[T] { def get(): T = { if (!hasValue) - set(initialValue) + set(initialValue()) v } diff --git a/javalib/src/main/scala/java/lang/Throwables.scala b/javalib/src/main/scala/java/lang/Throwables.scala index 41bab872d8..2bcb6c0a29 100644 --- a/javalib/src/main/scala/java/lang/Throwables.scala +++ b/javalib/src/main/scala/java/lang/Throwables.scala @@ -199,7 +199,7 @@ class Throwable protected (s: String, while (throwable != null) { println("Caused by: " + throwable) - val currentStack = throwable.getStackTrace + val currentStack = throwable.getStackTrace() if (currentStack.nonEmpty) { val duplicates = countDuplicates(currentStack, parentStack) var i = 0 @@ -215,7 +215,7 @@ class Throwable protected (s: String, } parentStack = currentStack - throwable = throwable.getCause + throwable = throwable.getCause() } } diff --git a/javalib/src/main/scala/java/lang/UnixProcess.scala b/javalib/src/main/scala/java/lang/UnixProcess.scala index 0eaaf62916..21c88a36fd 100644 --- a/javalib/src/main/scala/java/lang/UnixProcess.scala +++ b/javalib/src/main/scala/java/lang/UnixProcess.scala @@ -4,20 +4,19 @@ package lang import java.io.{File, IOException, InputStream, OutputStream} import java.util.concurrent.TimeUnit import java.util.ScalaOps._ - import scala.scalanative.unsigned._ import scala.scalanative.unsafe._ import scala.scalanative.libc.{errno => err, signal => sig, _} import sig._ import err.errno -import scala.scalanative.posix.{fcntl, pthread, sys, unistd, errno => e, time} +import scala.scalanative.posix.{fcntl, pthread, sys, time, unistd, errno => e} import time._ import sys.time._ import e.ETIMEDOUT import UnixProcess._ import java.lang.ProcessBuilder.Redirect - import pthread._ +import scala.collection.mutable.ArraySeq import scala.scalanative.posix.sys.types.{pthread_cond_t, pthread_mutex_t} private[lang] class UnixProcess private ( @@ -82,11 +81,11 @@ private[lang] class UnixProcess private ( } private[this] val _inputStream = - PipeIO[PipeIO.Stream](this, !outfds, builder.redirectOutput) + PipeIO[PipeIO.Stream](this, !outfds, builder.redirectOutput()) private[this] val _errorStream = - PipeIO[PipeIO.Stream](this, !errfds, builder.redirectError) + PipeIO[PipeIO.Stream](this, !errfds, builder.redirectError()) private[this] val _outputStream = - PipeIO[OutputStream](this, !(infds + 1), builder.redirectInput) + PipeIO[OutputStream](this, !(infds + 1), builder.redirectInput()) private[this] var _exitValue = -1 private[lang] def checkResult(): CInt = { @@ -130,11 +129,11 @@ object UnixProcess { val infds = stackalloc[CInt](2) val outfds = stackalloc[CInt](2) val errfds = - if (builder.redirectErrorStream) outfds else stackalloc[CInt](2) + if (builder.redirectErrorStream()) outfds else stackalloc[CInt](2) throwOnError(unistd.pipe(infds), s"Couldn't create pipe.") throwOnError(unistd.pipe(outfds), s"Couldn't create pipe.") - if (!builder.redirectErrorStream) + if (!builder.redirectErrorStream()) throwOnError(unistd.pipe(errfds), s"Couldn't create pipe.") val cmd = builder.command().scalaOps.toSeq val binaries = binaryPaths(builder.environment(), cmd.head) @@ -171,13 +170,13 @@ object UnixProcess { */ def invokeChildProcess(): Process = { if (dir != null) unistd.chdir(toCString(dir.toString)) - setupChildFDS(!infds, builder.redirectInput, unistd.STDIN_FILENO) + setupChildFDS(!infds, builder.redirectInput(), unistd.STDIN_FILENO) setupChildFDS(!(outfds + 1), - builder.redirectOutput, + builder.redirectOutput(), unistd.STDOUT_FILENO) setupChildFDS(!(errfds + 1), - if (builder.redirectErrorStream) Redirect.PIPE - else builder.redirectError, + if (builder.redirectErrorStream()) Redirect.PIPE + else builder.redirectError(), unistd.STDERR_FILENO) unistd.close(!infds) unistd.close(!(infds + 1)) @@ -213,8 +212,9 @@ object UnixProcess { } } - @inline private def nullTerminate(seq: Seq[String])(implicit z: Zone) = { - val res = alloc[CString](seq.length + 1) + @inline private def nullTerminate(seq: collection.Iterable[String])( + implicit z: Zone) = { + val res = alloc[CString](seq.size + 1) seq.zipWithIndex foreach { case (s, i) => !(res + i) = toCString(s) } res } @@ -223,7 +223,7 @@ object UnixProcess { redirect: ProcessBuilder.Redirect, procFd: CInt): Unit = { import fcntl.{open => _, _} - redirect.`type` match { + redirect.`type`() match { case ProcessBuilder.Redirect.Type.INHERIT => case ProcessBuilder.Redirect.Type.PIPE => if (unistd.dup2(childFd, procFd) == -1) { @@ -231,19 +231,19 @@ object UnixProcess { s"Couldn't duplicate pipe file descriptor $errno") } case r @ ProcessBuilder.Redirect.Type.READ => - val fd = open(redirect.file, O_RDONLY) + val fd = open(redirect.file(), O_RDONLY) if (unistd.dup2(fd, procFd) == -1) { throw new IOException( s"Couldn't duplicate read file descriptor $errno") } case r @ ProcessBuilder.Redirect.Type.WRITE => - val fd = open(redirect.file, O_CREAT | O_WRONLY | O_TRUNC) + val fd = open(redirect.file(), O_CREAT | O_WRONLY | O_TRUNC) if (unistd.dup2(fd, procFd) == -1) { throw new IOException( s"Couldn't duplicate write file descriptor $errno") } case r @ ProcessBuilder.Redirect.Type.APPEND => - val fd = open(redirect.file, O_CREAT | O_WRONLY | O_APPEND) + val fd = open(redirect.file(), O_CREAT | O_WRONLY | O_APPEND) if (unistd.dup2(fd, procFd) == -1) { throw new IOException( s"Couldn't duplicate append file descriptor $errno") @@ -252,7 +252,7 @@ object UnixProcess { } @inline def open(f: File, flags: CInt) = Zone { implicit z => - fcntl.open(toCString(f.getAbsolutePath), flags, 0.toUInt) match { + fcntl.open(toCString(f.getAbsolutePath()), flags, 0.toUInt) match { case -1 => throw new IOException(s"Unable to open file $f ($errno)") case fd => fd } @@ -268,10 +268,12 @@ object UnixProcess { case null => "/bin:/usr/bin:/usr/local/bin" case p => p } - path split ":" map { absPath => - new File(s"$absPath/$bin") - } collect { - case f if f.canExecute => f.toString + ArraySeq + .unsafeWrapArray(path.split(":")) + .map { absPath => + new File(s"$absPath/$bin") + } collect { + case f if f.canExecute() => f.toString } } } diff --git a/javalib/src/main/scala/java/lang/annotation/RetentionPolicy.scala b/javalib/src/main/scala/java/lang/annotation/RetentionPolicy.scala index 970df36bda..13d626cdc8 100644 --- a/javalib/src/main/scala/java/lang/annotation/RetentionPolicy.scala +++ b/javalib/src/main/scala/java/lang/annotation/RetentionPolicy.scala @@ -9,7 +9,7 @@ object RetentionPolicy { final val RUNTIME = new RetentionPolicy("RUNTIME", 2) def valueOf(name: String): RetentionPolicy = - values.find(_.name() == name).getOrElse { + values().find(_.name() == name).getOrElse { throw new IllegalArgumentException( s"No enum constant java.lang.annotation.RetentionPolicy.$name") } diff --git a/javalib/src/main/scala/java/lang/reflect/Array.scala b/javalib/src/main/scala/java/lang/reflect/Array.scala index ea12aca176..a0a0f5faaf 100644 --- a/javalib/src/main/scala/java/lang/reflect/Array.scala +++ b/javalib/src/main/scala/java/lang/reflect/Array.scala @@ -106,8 +106,8 @@ object Array { case array: Array[Char] => array(index) case array: Array[Byte] => array(index) case array: Array[Short] => array(index) - case array: Array[Int] => array(index) - case array: Array[Long] => array(index) + case array: Array[Int] => array(index).toFloat + case array: Array[Long] => array(index).toFloat case _ => throw new IllegalArgumentException("argument type mismatch") } @@ -118,7 +118,7 @@ object Array { case array: Array[Byte] => array(index) case array: Array[Short] => array(index) case array: Array[Int] => array(index) - case array: Array[Long] => array(index) + case array: Array[Long] => array(index).toDouble case array: Array[Float] => array(index) case _ => throw new IllegalArgumentException("argument type mismatch") @@ -182,7 +182,7 @@ object Array { def setInt(array: AnyRef, index: Int, value: Int): Unit = array match { case array: Array[Int] => array(index) = value case array: Array[Long] => array(index) = value - case array: Array[Float] => array(index) = value + case array: Array[Float] => array(index) = value.toFloat case array: Array[Double] => array(index) = value case _ => throw new IllegalArgumentException("argument type mismatch") @@ -190,8 +190,8 @@ object Array { def setLong(array: AnyRef, index: Int, value: Long): Unit = array match { case array: Array[Long] => array(index) = value - case array: Array[Float] => array(index) = value - case array: Array[Double] => array(index) = value + case array: Array[Float] => array(index) = value.toFloat + case array: Array[Double] => array(index) = value.toDouble case _ => throw new IllegalArgumentException("argument type mismatch") } diff --git a/javalib/src/main/scala/java/math/BigDecimal.scala b/javalib/src/main/scala/java/math/BigDecimal.scala index a374f02008..648d2139c1 100644 --- a/javalib/src/main/scala/java/math/BigDecimal.scala +++ b/javalib/src/main/scala/java/math/BigDecimal.scala @@ -104,7 +104,7 @@ object BigDecimal { } def valueOf(d: Double): BigDecimal = { - if (d.isInfinite || d.isNaN) + if (d.isInfinite() || d.isNaN()) throw new NumberFormatException("Infinity or NaN: " + d) new BigDecimal(d.toString) @@ -124,7 +124,7 @@ object BigDecimal { } else { val bi = Multiplication.multiplyByTenPow(augend.getUnscaledValue(), diffScale) - new BigDecimal(thisValue.getUnscaledValue().add(bi), thisValue.scale) + new BigDecimal(thisValue.getUnscaledValue().add(bi), thisValue.scale()) } } @@ -202,8 +202,9 @@ object BigDecimal { else 0 } - private[math] def newArrayOfPows(len: Int, pow: Int): Array[Long] = - new Array[Long](len - 1).scanLeft[Long, Array[Long]](1)((z, e) => z * pow) + private[math] def newArrayOfPows(len: Int, pow: Int): Array[Long] = { + new Array[Long](len - 1).scanLeft(1L)((z, e) => z * pow) + } /** Return an increment that can be -1,0 or 1, depending on {@code roundingMode}. * @@ -471,7 +472,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { def this(dVal: Double) = { this() - if (dVal.isInfinite || dVal.isNaN) + if (dVal.isInfinite() || dVal.isNaN()) throw new NumberFormatException("Infinity or NaN: " + dVal) val bits = java.lang.Double.doubleToLongBits(dVal) @@ -571,15 +572,15 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { def add(augend: BigDecimal): BigDecimal = { val diffScale = this._scale - augend._scale // Fast return when some operand is zero - if (this.isZero && diffScale <= 0) { + if (this.isZero() && diffScale <= 0) { augend - } else if (augend.isZero && (this.isZero || diffScale >= 0)) { + } else if (augend.isZero() && (this.isZero() || diffScale >= 0)) { this } else if (diffScale == 0) { if (Math.max(this._bitLength, augend._bitLength) + 1 < 64) valueOf(this._smallValue + augend._smallValue, this._scale) else - new BigDecimal(this.getUnscaledValue.add(augend.getUnscaledValue), + new BigDecimal(this.getUnscaledValue().add(augend.getUnscaledValue()), this._scale) } else if (diffScale > 0) { addAndMult10(this, augend, diffScale) @@ -590,7 +591,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { def add(augend: BigDecimal, mc: MathContext): BigDecimal = { // scalastyle:off return - if (augend.isZero || this.isZero || mc.precision == 0) { + if (augend.isZero() || this.isZero() || mc.precision == 0) { add(augend).round(mc) } else { val diffScale = this._scale.toLong - augend._scale @@ -608,9 +609,9 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { val tempBI: BigInteger = { val biLarger = BigInteger.valueOf(largerSignum) if (largerSignum == smaller.signum()) { - multiplyByPosInt(larger.getUnscaledValue, 10).add(biLarger) + multiplyByPosInt(larger.getUnscaledValue(), 10).add(biLarger) } else { - val tempBI2 = larger.getUnscaledValue.subtract(biLarger) + val tempBI2 = larger.getUnscaledValue().subtract(biLarger) multiplyByPosInt(tempBI2, 10).add( BigInteger.valueOf(largerSignum * 9)) } @@ -626,16 +627,17 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { val diffScale = _scale - subtrahend._scale // Fast return when some operand is zero - if (this.isZero && diffScale <= 0) { + if (this.isZero() && diffScale <= 0) { subtrahend.negate() - } else if (subtrahend.isZero && (this.isZero || diffScale >= 0)) { + } else if (subtrahend.isZero() && (this.isZero() || diffScale >= 0)) { this } else if (diffScale == 0) { if (Math.max(this._bitLength, subtrahend._bitLength) + 1 < 64) valueOf(this._smallValue - subtrahend._smallValue, this._scale) else - new BigDecimal(getUnscaledValue.subtract(subtrahend.getUnscaledValue), - _scale) + new BigDecimal( + getUnscaledValue().subtract(subtrahend.getUnscaledValue()), + _scale) } else if (diffScale > 0) { def powTenLen = LongTenPowsBitLength(diffScale) def maxLen = @@ -645,8 +647,8 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { val powTen = LongTenPows(diffScale) valueOf(this._smallValue - subtrahend._smallValue * powTen, this._scale) } else { - val mult = multiplyByTenPow(subtrahend.getUnscaledValue, diffScale) - new BigDecimal(getUnscaledValue.subtract(mult), this._scale) + val mult = multiplyByTenPow(subtrahend.getUnscaledValue(), diffScale) + new BigDecimal(getUnscaledValue().subtract(mult), this._scale) } } else { val negDiffScale = -diffScale @@ -659,8 +661,8 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { valueOf(_smallValue * powTen - subtrahend._smallValue, subtrahend._scale) } else { - val mult = multiplyByTenPow(this.getUnscaledValue, negDiffScale) - val multSub = mult.subtract(subtrahend.getUnscaledValue) + val mult = multiplyByTenPow(this.getUnscaledValue(), negDiffScale) + val multSub = mult.subtract(subtrahend.getUnscaledValue()) new BigDecimal(multSub, subtrahend._scale) } } @@ -670,7 +672,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { val diffScale = subtrahend._scale - this._scale.toLong val precLessDiff = subtrahend.approxPrecision() < diffScale - 1 // Some operand is zero or the precision is infinity - if (subtrahend.isZero || this.isZero || mc.precision == 0) { + if (subtrahend.isZero() || this.isZero() || mc.precision == 0) { subtract(subtrahend).round(mc) } else if (precLessDiff && (mc.precision < this.approxPrecision())) { // Cases where it is unnecessary to subtract two numbers with very different scales @@ -678,9 +680,9 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { val biSignum = BigInteger.valueOf(thisSignum) val tempBI: BigInteger = { if (thisSignum != subtrahend.signum()) { - multiplyByPosInt(getUnscaledValue, 10).add(biSignum) + multiplyByPosInt(getUnscaledValue(), 10).add(biSignum) } else { - val bi = this.getUnscaledValue.subtract(biSignum) + val bi = this.getUnscaledValue().subtract(biSignum) multiplyByPosInt(bi, 10).add(BigInteger.valueOf(thisSignum * 9)) } } @@ -693,14 +695,14 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { def multiply(multiplicand: BigDecimal): BigDecimal = { val newScale = this._scale.toLong + multiplicand._scale - if (this.isZero || multiplicand.isZero) { + if (this.isZero() || multiplicand.isZero()) { zeroScaledBy(newScale) } else if (this._bitLength + multiplicand._bitLength < 64) { valueOf(this._smallValue * multiplicand._smallValue, safeLongToInt(newScale)) } else { val unscaled = - this.getUnscaledValue.multiply(multiplicand.getUnscaledValue) + this.getUnscaledValue().multiply(multiplicand.getUnscaledValue()) new BigDecimal(unscaled, safeLongToInt(newScale)) } } @@ -719,7 +721,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { roundingMode: RoundingMode): BigDecimal = { if (roundingMode == null) throw new NullPointerException("roundingMode == null") - else if (divisor.isZero) + else if (divisor.isZero()) throw new ArithmeticException("Division by zero") val diffScale: Long = (this._scale.toLong - divisor._scale) - scale @@ -732,8 +734,8 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { @inline def default(): BigDecimal = { - val scaledDividend0 = this.getUnscaledValue - val scaledDivisor0 = divisor.getUnscaledValue + val scaledDividend0 = this.getUnscaledValue() + val scaledDivisor0 = divisor.getUnscaledValue() val (scaledDividend, scaledDivisor) = if (diffScale > 0) @@ -781,21 +783,21 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { divide(divisor, _scale, roundingMode) def divide(divisor: BigDecimal): BigDecimal = { - val thisUnscaled = this.getUnscaledValue + val thisUnscaled = this.getUnscaledValue() val diffScale: Long = _scale.toLong - divisor._scale - if (divisor.isZero) { + if (divisor.isZero()) { throw new ArithmeticException("Division by zero") } else if (thisUnscaled.signum() == 0) { zeroScaledBy(diffScale) } else { - val divisorUnscaled = divisor.getUnscaledValue + val divisorUnscaled = divisor.getUnscaledValue() val lastPow = BigFivePows.length - 1 val gcd = thisUnscaled.gcd(divisorUnscaled) // To divide both by the GCD val p = thisUnscaled.divide(gcd) val q1 = divisorUnscaled.divide(gcd) // To simplify all "2" factors of q, dividing by 2^k - val k = q1.getLowestSetBit // number of factors "2" in 'q' + val k = q1.getLowestSetBit() // number of factors "2" in 'q' @inline @tailrec @@ -836,7 +838,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { // to obtain a quotient with at least 'mc.precision()' digits // In special cases it reduces the problem to call the dual method - if (mc.precision == 0 || this.isZero || divisor.isZero) + if (mc.precision == 0 || this.isZero() || divisor.isZero()) return this.divide(divisor) // scalastyle:ignore val diffScale: Long = _scale.toLong - divisor._scale @@ -846,20 +848,20 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { val (quot, newScale0) = { if (trailingZeros > 0) { // To append trailing zeros at end of dividend - val q = getUnscaledValue.multiply(powerOf10(trailingZeros)) + val q = getUnscaledValue().multiply(powerOf10(trailingZeros)) (q, diffScale + trailingZeros) } else { - (getUnscaledValue, diffScale) + (getUnscaledValue(), diffScale) } } - val qr = quot.divideAndRemainderImpl(divisor.getUnscaledValue) + val qr = quot.divideAndRemainderImpl(divisor.getUnscaledValue()) val (integerQuot, newScale) = { // Calculating the exact quotient with at least 'mc.precision()' digits if (qr.rem.signum() != 0) { // Checking if: 2 * remainder >= divisor ? val compRem = - qr.rem.shiftLeftOneBit().compareTo(divisor.getUnscaledValue) + qr.rem.shiftLeftOneBit().compareTo(divisor.getUnscaledValue()) val bi = BigInteger.valueOf(qr.quot.signum() * (5 + compRem)) (qr.quot.multiply(BigInteger.TEN).add(bi), newScale0 + 1) } else { @@ -890,30 +892,31 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { } def divideToIntegralValue(divisor: BigDecimal): BigDecimal = { - if (divisor.isZero) + if (divisor.isZero()) throw new ArithmeticException("Division by zero") val newScale: Long = this._scale.toLong - divisor._scale val lastPow = BigTenPows.length - 1 val (integralValue, varScale) = { if ((divisor.approxPrecision() + newScale > this.approxPrecision() + - 1L) || this.isZero) { + 1L) || this.isZero()) { // If the divisor's integer part is greater than this's integer part, // the result must be zero with the appropriate scale (BigInteger.ZERO, 0L) } else if (newScale == 0) { - (getUnscaledValue.divide(divisor.getUnscaledValue), 0L) + (getUnscaledValue().divide(divisor.getUnscaledValue()), 0L) } else if (newScale > 0) { val powerOfTen = powerOf10(newScale) val iv = - getUnscaledValue.divide(divisor.getUnscaledValue.multiply(powerOfTen)) + getUnscaledValue().divide( + divisor.getUnscaledValue().multiply(powerOfTen)) (iv.multiply(powerOfTen), newScale) } else { // (newScale < 0) val powerOfTen = powerOf10(-newScale) - val integralValue0 = getUnscaledValue + val integralValue0 = getUnscaledValue() .multiply(powerOfTen) - .divide(divisor.getUnscaledValue) + .divide(divisor.getUnscaledValue()) // To strip trailing zeros approximating to the preferred scale @inline @@ -951,17 +954,17 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { val quotPrecision = diffPrecision - diffScale + 1 // In special cases it call the dual method - if (mcPrecision == 0 || this.isZero || divisor.isZero) + if (mcPrecision == 0 || this.isZero() || divisor.isZero()) return this.divideToIntegralValue(divisor) val (quot, newScale) = { if (quotPrecision <= 0) { (BigInteger.ZERO, diffScale) } else if (diffScale == 0) { - (this.getUnscaledValue.divide(divisor.getUnscaledValue), diffScale) + (this.getUnscaledValue().divide(divisor.getUnscaledValue()), diffScale) } else if (diffScale > 0) { - val div = divisor.getUnscaledValue.multiply(powerOf10(diffScale)) - val q = this.getUnscaledValue.divide(div) + val div = divisor.getUnscaledValue().multiply(powerOf10(diffScale)) + val q = this.getUnscaledValue().divide(div) // To chose 10^newScale to get a quotient with at least 'mc.precision()' digits val ns = Math.min(diffScale, Math.max(mcPrecision - quotPrecision + 1, 0)) @@ -971,8 +974,8 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { * (u1 * 10^exp) / u2 has at least 'mc.precision()' digits. */ val exp = Math.min(-diffScale, Math.max(mcPrecision.toLong - diffPrecision, 0)) - val mult = this.getUnscaledValue.multiply(powerOf10(exp)) - val qr = mult.divideAndRemainderImpl(divisor.getUnscaledValue) + val mult = this.getUnscaledValue().multiply(powerOf10(exp)) + val qr = mult.divideAndRemainderImpl(divisor.getUnscaledValue()) val ns = diffScale + exp // To fix the scale val exp2 = -ns // The remaining power of ten // If after division there is a remainder... @@ -982,7 +985,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { val compRemDiv = { if (compRemDiv0 == 0) { val bi = qr.rem.multiply(powerOf10(exp2)) - val rem = bi.divide(divisor.getUnscaledValue) + val rem = bi.divide(divisor.getUnscaledValue()) Math.abs(rem.signum()) } else { compRemDiv0 @@ -1057,8 +1060,8 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { throw new ArithmeticException("Invalid operation") } else { val newScale = _scale * n.toLong - if (isZero) zeroScaledBy(newScale) - else new BigDecimal(getUnscaledValue.pow(n), safeLongToInt(newScale)) + if (isZero()) zeroScaledBy(newScale) + else new BigDecimal(getUnscaledValue().pow(n), safeLongToInt(newScale)) } } @@ -1069,7 +1072,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { val mcError = mcPrec > 0 && elength > mcPrec // In particular cases, it reduces the problem to call the other 'pow()' - if (n == 0 || (isZero && n > 0)) { + if (n == 0 || (isZero() && n > 0)) { pow(n) } else if (m > 999999999 || (mcPrec == 0 && n < 0) || mcError) { throw new ArithmeticException("Invalid operation") @@ -1105,7 +1108,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { def abs(mc: MathContext): BigDecimal = { val result = if (signum() < 0) negate() - else new BigDecimal(getUnscaledValue, _scale) + else new BigDecimal(getUnscaledValue(), _scale) result.inplaceRound(mc) result } @@ -1114,7 +1117,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { if (_bitLength < 63 || (_bitLength == 63 && _smallValue != Long.MinValue)) valueOf(-_smallValue, _scale) else - new BigDecimal(getUnscaledValue.negate(), _scale) + new BigDecimal(getUnscaledValue().negate(), _scale) } def negate(mc: MathContext): BigDecimal = { @@ -1147,7 +1150,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { } else { val decimalDigits = 1 + ((_bitLength - 1) * Log2).toInt // If after division the number isn't zero, there exists an additional digit - if (getUnscaledValue.divide(powerOf10(decimalDigits)).signum() != 0) + if (getUnscaledValue().divide(powerOf10(decimalDigits)).signum() != 0) decimalDigits + 1 else decimalDigits @@ -1157,10 +1160,10 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { _precision } - def unscaledValue(): BigInteger = getUnscaledValue + def unscaledValue(): BigInteger = getUnscaledValue() def round(mc: MathContext): BigDecimal = { - val thisBD = new BigDecimal(getUnscaledValue, _scale) + val thisBD = new BigDecimal(getUnscaledValue(), _scale) thisBD.inplaceRound(mc) thisBD } @@ -1177,7 +1180,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { if (diffScale < LongTenPows.length && cmp < 64) { valueOf(this._smallValue * LongTenPows(diffScale.toInt), newScale) } else { - new BigDecimal(multiplyByTenPow(getUnscaledValue, diffScale.toInt), + new BigDecimal(multiplyByTenPow(getUnscaledValue(), diffScale.toInt), newScale) } } else if (this._bitLength < 64 && -diffScale < LongTenPows.length) { @@ -1185,7 +1188,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { dividePrimitiveLongs(this._smallValue, lpt, newScale, roundingMode) } else { val powTen = powerOf10(-diffScale) - divideBigIntegers(this.getUnscaledValue, powTen, newScale, roundingMode) + divideBigIntegers(this.getUnscaledValue(), powTen, newScale, roundingMode) } } @@ -1208,12 +1211,12 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { if (_smallValue == 0) zeroScaledBy(newScale) else valueOf(_smallValue, safeLongToInt(newScale)) } else { - new BigDecimal(getUnscaledValue, safeLongToInt(newScale)) + new BigDecimal(getUnscaledValue(), safeLongToInt(newScale)) } } def stripTrailingZeros(): BigDecimal = { - if (isZero) { + if (isZero()) { // Preserve RI compatibility, so BigDecimal.equals (which checks // value *and* scale) continues to work. this @@ -1241,7 +1244,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { } } - val (strippedBI, newScale) = loop(1, getUnscaledValue, _scale) + val (strippedBI, newScale) = loop(1, getUnscaledValue(), _scale) new BigDecimal(strippedBI, safeLongToInt(newScale)) } } @@ -1265,8 +1268,8 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { } else { // thisSign equals val.signum() and diffPrecision is approx. diffScale val (thisUnscaled, valUnscaled) = { - val t = this.getUnscaledValue - val v = bi.getUnscaledValue + val t = this.getUnscaledValue() + val v = bi.getUnscaledValue() if (diffScale < 0) (t.multiply(powerOf10(-diffScale)), v) else if (diffScale > 0) @@ -1321,11 +1324,11 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { _toStringImage = Conversion.toDecimalScaledString(_smallValue, _scale) _toStringImage } else { - val intString: String = getUnscaledValue.toString + val intString: String = getUnscaledValue().toString if (_scale == 0) { intString } else { - val begin = if (getUnscaledValue.signum() < 0) 2 else 1 + val begin = if (getUnscaledValue().signum() < 0) 2 else 1 val end = intString.length val exponent: Long = -_scale.toLong + end - begin val result = if (_scale > 0 && exponent >= -6) { @@ -1351,11 +1354,11 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { } def toEngineeringString(): String = { - val intString = getUnscaledValue.toString + val intString = getUnscaledValue().toString if (_scale == 0) { intString } else { - val begin = if (getUnscaledValue.signum() < 0) 2 else 1 + val begin = if (getUnscaledValue().signum() < 0) 2 else 1 var end = intString.length val exponent0: Long = -_scale.toLong + end - begin @@ -1375,7 +1378,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { val (e, b) = { if (rem != 0) { val (rem1, exp, beg) = { - if (getUnscaledValue.signum() == 0) { + if (getUnscaledValue().signum() == 0) { val r = if (rem < 0) -rem else 3 - rem (r, exponent0 + r, begin) } else { @@ -1410,8 +1413,8 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { } def toPlainString(): String = { - val intStr = getUnscaledValue.toString - if (_scale == 0 || (isZero && _scale < 0)) { + val intStr = getUnscaledValue().toString + if (_scale == 0 || (isZero() && _scale < 0)) { intStr } else { val begin = if (signum() < 0) 1 else 0 @@ -1450,28 +1453,28 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { } def toBigInteger(): BigInteger = { - if (_scale == 0 || isZero) - getUnscaledValue + if (_scale == 0 || isZero()) + getUnscaledValue() else if (_scale < 0) - getUnscaledValue.multiply(powerOf10(-_scale.toLong)) + getUnscaledValue().multiply(powerOf10(-_scale.toLong)) else - getUnscaledValue.divide(powerOf10(_scale)) + getUnscaledValue().divide(powerOf10(_scale)) } def toBigIntegerExact(): BigInteger = { - if (_scale == 0 || isZero) { - getUnscaledValue + if (_scale == 0 || isZero()) { + getUnscaledValue() } else if (_scale < 0) { - getUnscaledValue.multiply(powerOf10(-_scale.toLong)) + getUnscaledValue().multiply(powerOf10(-_scale.toLong)) } else { // (scale > 0) // An optimization before do a heavy division if (_scale > approxPrecision() || - _scale > getUnscaledValue.getLowestSetBit) + _scale > getUnscaledValue().getLowestSetBit()) throw new ArithmeticException("Rounding necessary") val integerAndFraction = - getUnscaledValue.divideAndRemainder(powerOf10(_scale)) + getUnscaledValue().divideAndRemainder(powerOf10(_scale)) if (integerAndFraction(1).signum() != 0) { // It exists a non-zero fractional part throw new ArithmeticException("Rounding necessary") @@ -1512,7 +1515,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { /* A similar code like in doubleValue() could be repeated here, * but this simple implementation is quite efficient. */ val powerOfTwo = this._bitLength - (_scale / Log2).toLong - val floatResult0: Float = signum() + val floatResult0: Float = signum().toFloat val floatResult: Float = { if (powerOfTwo < -149 || floatResult0 == 0.0f) // 'this' is very small floatResult0 * 0.0f @@ -1535,7 +1538,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { // Cases which 'this' is very large sign * Double.PositiveInfinity } else { - val mantissa0 = getUnscaledValue.abs() + val mantissa0 = getUnscaledValue().abs() var exponent = 1076 // bias + 53 val mantissa = { @@ -1566,7 +1569,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { } } - val lowestSetBit = mantissa.getLowestSetBit + val lowestSetBit = mantissa.getLowestSetBit() val discardedSize = mantissa.bitLength() - 54 var bits: Long = 0L // IEEE-754 Standard var tempBits: Long = 0L // for temporal calculations @@ -1673,7 +1676,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { // Getting the integer part and the discarded fraction val sizeOfFraction: BigInteger = powerOf10(discardedPrecision) val integerAndFraction = - getUnscaledValue.divideAndRemainder(sizeOfFraction) + getUnscaledValue().divideAndRemainder(sizeOfFraction) val newScale0 = _scale.toLong - discardedPrecision // If the discarded fraction is non-zero, perform rounding val newScale = { @@ -1713,18 +1716,18 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { private def movePoint(newScale: Long): BigDecimal = { def lptbLen = LongTenPowsBitLength(-newScale.toInt) - if (isZero) { + if (isZero()) { zeroScaledBy(Math.max(newScale, 0)) } else if (newScale >= 0) { // When: 'n'== Integer.MIN_VALUE isn't possible to call to movePointRight(-n) // since -Integer.MIN_VALUE == Integer.MIN_VALUE if (_bitLength < 64) valueOf(_smallValue, safeLongToInt(newScale)) - else new BigDecimal(getUnscaledValue, safeLongToInt(newScale)) + else new BigDecimal(getUnscaledValue(), safeLongToInt(newScale)) } else if (-newScale < LongTenPows.length && _bitLength + lptbLen < 64) { valueOf(_smallValue * LongTenPows(-newScale.toInt), 0) } else { new BigDecimal( - multiplyByTenPow(getUnscaledValue, safeLongToInt(-newScale)), + multiplyByTenPow(getUnscaledValue(), safeLongToInt(-newScale)), 0) } } @@ -1755,7 +1758,7 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { val intPart1 = intPart0 + roundingBehavior(intPart0.toInt & 1, frac, mc.roundingMode) // If after to add the increment the precision changed, we normalize the size - if (Math.log10(Math.abs(intPart1)) >= mc.precision) + if (Math.log10(Math.abs(intPart1.toDouble)) >= mc.precision.toDouble) (newScale0 - 1, intPart1 / 10) else (newScale0, intPart1) diff --git a/javalib/src/main/scala/java/math/BigInteger.scala b/javalib/src/main/scala/java/math/BigInteger.scala index 5b7d05fff9..9d1c18b45b 100644 --- a/javalib/src/main/scala/java/math/BigInteger.scala +++ b/javalib/src/main/scala/java/math/BigInteger.scala @@ -240,7 +240,7 @@ class BigInteger extends Number with Comparable[BigInteger] { if ((radix < java.lang.Character.MIN_RADIX) || (radix > java.lang.Character.MAX_RADIX)) throw new NumberFormatException("Radix out of range") - if (s.isEmpty) + if (s.isEmpty()) throw new NumberFormatException("Zero length BigInteger") this.setFromString(s, radix) @@ -359,7 +359,7 @@ class BigInteger extends Number with Comparable[BigInteger] { throw new ArithmeticException("BigInteger divide by zero") val divisorSign = divisor.sign - if (divisor.isOne) { + if (divisor.isOne()) { if (divisor.sign > 0) this else this.negate() } else { @@ -502,7 +502,7 @@ class BigInteger extends Number with Comparable[BigInteger] { -1 } else { // (sign != 0) implies that exists some non zero digit - val i = getFirstNonzeroDigit + val i = getFirstNonzeroDigit() (i << 5) + java.lang.Integer.numberOfTrailingZeros(digits(i)) } } @@ -571,7 +571,7 @@ class BigInteger extends Number with Comparable[BigInteger] { } else if (!(testBit(0) || m.testBit(0))) { // If both are even, no inverse exists throw new ArithmeticException("BigInteger not invertible.") - } else if (m.isOne) { + } else if (m.isOne()) { ZERO } else { // From now on: (m > 1) @@ -590,7 +590,7 @@ class BigInteger extends Number with Comparable[BigInteger] { throw new ArithmeticException("BigInteger: modulus not positive") var base = this - if (m.isOne || (_exponent.sign > 0 && base.sign == 0)) { + if (m.isOne() || (_exponent.sign > 0 && base.sign == 0)) { BigInteger.ZERO } else if (base.sign == 0 && _exponent.sign == 0) { BigInteger.ONE @@ -720,12 +720,12 @@ class BigInteger extends Number with Comparable[BigInteger] { throw new ArithmeticException("Negative bit address") } else if (intCount >= numberLength) { sign < 0 - } else if (sign < 0 && intCount < getFirstNonzeroDigit) { + } else if (sign < 0 && intCount < getFirstNonzeroDigit()) { false } else { var digit = digits(intCount) if (sign < 0) - digit = if (getFirstNonzeroDigit == intCount) -digit else ~digit + digit = if (getFirstNonzeroDigit() == intCount) -digit else ~digit val i = 1 << (n & 31) (digit & i) != 0 } @@ -737,7 +737,7 @@ class BigInteger extends Number with Comparable[BigInteger] { val temp: BigInteger = this val bitLen = bitLength() - val firstNonZeroDigit = getFirstNonzeroDigit + val firstNonZeroDigit = getFirstNonzeroDigit() var bytesLen = (bitLen >> 3) + 1 /* * Puts the little-endian int array representing the magnitude of this diff --git a/javalib/src/main/scala/java/math/BitLevel.scala b/javalib/src/main/scala/java/math/BitLevel.scala index a412966dba..bc196a608b 100644 --- a/javalib/src/main/scala/java/math/BitLevel.scala +++ b/javalib/src/main/scala/java/math/BitLevel.scala @@ -61,7 +61,7 @@ private[math] object BitLevel { if (bi.sign == 0) { 0 } else { - var i = bi.getFirstNonzeroDigit + var i = bi.getFirstNonzeroDigit() if (bi.sign > 0) { while (i < bi.numberLength) { bCount += java.lang.Integer.bitCount(bi.digits(i)) @@ -94,7 +94,7 @@ private[math] object BitLevel { var bLength = bi.numberLength << 5 var highDigit = bi.digits(bi.numberLength - 1) if (bi.sign < 0) { - val i = bi.getFirstNonzeroDigit + val i = bi.getFirstNonzeroDigit() // We reduce the problem to the positive case. if (i == bi.numberLength - 1) highDigit -= 1 @@ -125,7 +125,7 @@ private[math] object BitLevel { if (intCount >= bi.numberLength) { resDigits(intCount) = bitNumber } else { - val firstNonZeroDigit = bi.getFirstNonzeroDigit + val firstNonZeroDigit = bi.getFirstNonzeroDigit() if (intCount > firstNonZeroDigit) { resDigits(intCount) ^= bitNumber } else if (intCount < firstNonZeroDigit) { diff --git a/javalib/src/main/scala/java/math/Conversion.scala b/javalib/src/main/scala/java/math/Conversion.scala index aaccccd715..03bd13ed4b 100644 --- a/javalib/src/main/scala/java/math/Conversion.scala +++ b/javalib/src/main/scala/java/math/Conversion.scala @@ -100,7 +100,8 @@ private[math] object Conversion { @tailrec def innerLoop(): Unit = { currentChar -= 1 - result = Character.forDigit(resDigit % radix, radix) + result + result = + Character.forDigit(resDigit % radix, radix).toString + result resDigit /= radix if (resDigit != 0 && currentChar != 0) innerLoop() @@ -111,7 +112,7 @@ private[math] object Conversion { var i: Int = 0 while (i < delta && currentChar > 0) { currentChar -= 1 - result = '0' + result + result = "0" + result i += 1 } i = tempLen - 1 @@ -130,14 +131,15 @@ private[math] object Conversion { while (j < 8 && currentChar > 0) { resDigit = digits(i) >> (j << 2) & 0xf currentChar -= 1 - result = java.lang.Character.forDigit(resDigit, 16) + result + result = + java.lang.Character.forDigit(resDigit, 16).toString + result j += 1 } } } // strip leading zero's result = result.dropWhile(_ == '0') - if (sign == -1) '-' + result + if (sign == -1) "-" + result else result } } @@ -177,7 +179,7 @@ private[math] object Conversion { val prev = v v /= 10 currentChar -= 1 - result = (48 + (prev - v * 10).toInt).toChar + result + result = (prev - v * 10).toString + result } while (v != 0) } else { var v: Int = highDigit @@ -185,7 +187,7 @@ private[math] object Conversion { val prev = v v /= 10 currentChar -= 1 - result = (48 + (prev - v * 10)).toChar + result + result = (prev - v * 10).toString + result } while (v != 0) } } else { @@ -214,7 +216,7 @@ private[math] object Conversion { @tailrec def innerLoop(): Unit = { currentChar -= 1 - result = (48 + (resDigit % 10)).toChar + result + result = (resDigit % 10).toString + result resDigit /= 10 if (resDigit != 0 && currentChar != 0) innerLoop() @@ -226,7 +228,7 @@ private[math] object Conversion { var i = 0 while ((i < delta) && (currentChar > 0)) { currentChar -= 1 - result = '0' + result + result = "0" + result i += 1 } var j = tempLen - 1 @@ -234,13 +236,13 @@ private[math] object Conversion { j -= 1 } tempLen = j + 1 - if (!(j == 0 && (temp(j) == 0))) loop + if (!(j == 0 && (temp(j) == 0))) loop() } loop() result = result.dropWhile(_ == '0') } - if (sign < 0) '-' + result + if (sign < 0) "-" + result else result } } @@ -281,7 +283,7 @@ private[math] object Conversion { val prev = v v /= 10 currentChar -= 1 - result = (48 + (prev - v * 10)).toChar + result + result = (prev - v * 10).toString + result } while (v != 0) val exponent: Long = resLengthInChars - currentChar - scale.toLong - 1 @@ -294,7 +296,7 @@ private[math] object Conversion { } else { // special case 2 for (j <- 0 until -index) { - result = '0' + result + result = "0" + result } result = "0." + result } @@ -310,7 +312,7 @@ private[math] object Conversion { result + exponentStr } - if (negNumber) '-' + result + if (negNumber) "-" + result else result } } @@ -334,7 +336,7 @@ private[math] object Conversion { def bigInteger2Double(bi: BigInteger): Double = { if (bi.numberLength < 2 || ((bi.numberLength == 2) && (bi.digits(1) > 0))) { - bi.longValue() + bi.longValue().toDouble } else if (bi.numberLength > 32) { if (bi.sign > 0) Double.PositiveInfinity else Double.NegativeInfinity diff --git a/javalib/src/main/scala/java/math/Division.scala b/javalib/src/main/scala/java/math/Division.scala index 3ee60b14bb..61e66dcb74 100644 --- a/javalib/src/main/scala/java/math/Division.scala +++ b/javalib/src/main/scala/java/math/Division.scala @@ -141,7 +141,7 @@ private[math] object Division { rem = longR.toInt if ((leftHand ^ Long.MinValue) > (rightHand ^ Long.MinValue)) - loop + loop() } } loop() @@ -340,7 +340,7 @@ private[math] object Division { exponent: BigInteger, modulus: BigInteger): BigInteger = { // STEP 1: Obtain the factorization 'modulus'= q * 2^j. - val j = modulus.getLowestSetBit + val j = modulus.getLowestSetBit() val q = modulus.shiftRight(j) // STEP 2: Compute x1 := base^exponent (mod q). @@ -407,8 +407,8 @@ private[math] object Division { * Divide both number the maximal possible times by 2 without rounding * gcd(2*a, 2*b) = 2 * gcd(a,b) */ - val lsb1 = op1.getLowestSetBit - val lsb2 = op2.getLowestSetBit + val lsb1 = op1.getLowestSetBit() + val lsb2 = op2.getLowestSetBit() val pow2Count = Math.min(lsb1, lsb2) BitLevel.inplaceShiftRight(op1, lsb1) BitLevel.inplaceShiftRight(op2, lsb2) @@ -436,13 +436,13 @@ private[math] object Division { if (op2.numberLength > op1.numberLength * 1.2) { op2 = op2.remainder(op1) if (op2.signum() != 0) { - BitLevel.inplaceShiftRight(op2, op2.getLowestSetBit) + BitLevel.inplaceShiftRight(op2, op2.getLowestSetBit()) } } else { // Use Knuth's algorithm of successive subtract and shifting do { Elementary.inplaceSubtract(op2, op1) - BitLevel.inplaceShiftRight(op2, op2.getLowestSetBit) + BitLevel.inplaceShiftRight(op2, op2.getLowestSetBit()) } while (op2.compareTo(op1) >= BigInteger.EQUALS) } // now op1 >= op2 @@ -450,7 +450,7 @@ private[math] object Division { op2 = op1 op1 = swap if (op1.sign != 0) - loop + loop() } } @@ -618,8 +618,8 @@ private[math] object Division { s.digits(0) = 1 var k = 0 - val lsbu = u.getLowestSetBit - val lsbv = v.getLowestSetBit + val lsbu = u.getLowestSetBit() + val lsbv = v.getLowestSetBit() if (lsbu > lsbv) { BitLevel.inplaceShiftRight(u, lsbu) BitLevel.inplaceShiftRight(v, lsbv) @@ -636,7 +636,7 @@ private[math] object Division { while (v.signum() > 0) { while (u.compareTo(v) > BigInteger.EQUALS) { Elementary.inplaceSubtract(u, v) - val toShift = u.getLowestSetBit + val toShift = u.getLowestSetBit() BitLevel.inplaceShiftRight(u, toShift) Elementary.inplaceAdd(r, s) BitLevel.inplaceShiftLeft(s, toShift) @@ -649,7 +649,7 @@ private[math] object Division { if (u.compareTo(v) <= BigInteger.EQUALS) { Elementary.inplaceSubtract(v, u) if (v.signum() != 0) { - val toShift = v.getLowestSetBit + val toShift = v.getLowestSetBit() BitLevel.inplaceShiftRight(v, toShift) Elementary.inplaceAdd(s, r) BitLevel.inplaceShiftLeft(r, toShift) @@ -661,7 +661,7 @@ private[math] object Division { loop() } - if (!u.isOne) // u is the gcd + if (!u.isOne()) // u is the gcd throw new ArithmeticException("BigInteger not invertible.") if (r.compareTo(p) >= BigInteger.EQUALS) Elementary.inplaceSubtract(r, p) @@ -956,7 +956,7 @@ private[math] object Division { while (bi.testBit(i)) { i -= 1 } - n - 1 - Math.max(i, bi.getLowestSetBit) + n - 1 - Math.max(i, bi.getLowestSetBit()) } } diff --git a/javalib/src/main/scala/java/math/Logical.scala b/javalib/src/main/scala/java/math/Logical.scala index 172ddca08a..ec333c0e23 100644 --- a/javalib/src/main/scala/java/math/Logical.scala +++ b/javalib/src/main/scala/java/math/Logical.scala @@ -108,7 +108,7 @@ private[math] object Logical { def andPositive(bi: BigInteger, that: BigInteger): BigInteger = { // PRE: both arguments are positive val resLength = Math.min(bi.numberLength, that.numberLength) - var i = Math.max(bi.getFirstNonzeroDigit, that.getFirstNonzeroDigit) + var i = Math.max(bi.getFirstNonzeroDigit(), that.getFirstNonzeroDigit()) if (i >= resLength) { BigInteger.ZERO @@ -128,8 +128,8 @@ private[math] object Logical { /** @return sign = positive.magnitude & magnitude = -negative.magnitude */ def andDiffSigns(positive: BigInteger, negative: BigInteger): BigInteger = { // PRE: positive is positive and negative is negative - val iPos = positive.getFirstNonzeroDigit - val iNeg = negative.getFirstNonzeroDigit + val iPos = positive.getFirstNonzeroDigit() + val iNeg = negative.getFirstNonzeroDigit() // Look if the trailing zeros of the negative will "blank" all // the positive digits @@ -170,8 +170,8 @@ private[math] object Logical { // scalastyle:off return // PRE: longer and shorter are negative // PRE: longer has at least as many digits as shorter - val iLonger = longer.getFirstNonzeroDigit - val iShorter = shorter.getFirstNonzeroDigit + val iLonger = longer.getFirstNonzeroDigit() + val iShorter = shorter.getFirstNonzeroDigit() // Does shorter matter? if (iLonger >= shorter.numberLength) { @@ -255,7 +255,7 @@ private[math] object Logical { val resDigits = new Array[Int](bi.numberLength) val limit = Math.min(bi.numberLength, that.numberLength) var i: Int = 0 - i = bi.getFirstNonzeroDigit + i = bi.getFirstNonzeroDigit() while (i < limit) { resDigits(i) = bi.digits(i) & ~that.digits(i) i += 1 @@ -274,8 +274,8 @@ private[math] object Logical { def andNotPositiveNegative(positive: BigInteger, negative: BigInteger): BigInteger = { // PRE: positive > 0 && negative < 0 - val iNeg = negative.getFirstNonzeroDigit - val iPos = positive.getFirstNonzeroDigit + val iNeg = negative.getFirstNonzeroDigit() + val iPos = positive.getFirstNonzeroDigit() if (iNeg >= positive.numberLength) { positive } else { @@ -310,8 +310,8 @@ private[math] object Logical { // PRE: negative < 0 && positive > 0 var limit: Int = 0 var digit: Int = 0 - val iNeg = negative.getFirstNonzeroDigit - val iPos = positive.getFirstNonzeroDigit + val iNeg = negative.getFirstNonzeroDigit() + val iPos = positive.getFirstNonzeroDigit() if (iNeg >= positive.numberLength) { negative } else { @@ -397,8 +397,8 @@ private[math] object Logical { /** @return sign = 1, magnitude = -val.magnitude & ~(-that.magnitude) */ def andNotNegative(bi: BigInteger, that: BigInteger): BigInteger = { // PRE: val < 0 && that < 0 - val iVal = bi.getFirstNonzeroDigit - val iThat = that.getFirstNonzeroDigit + val iVal = bi.getFirstNonzeroDigit() + val iThat = that.getFirstNonzeroDigit() if (iVal >= that.numberLength) { BigInteger.ZERO } else { @@ -463,7 +463,7 @@ private[math] object Logical { } } else if (that.sign > 0) { orDiffSigns(that, bi) - } else if (that.getFirstNonzeroDigit > bi.getFirstNonzeroDigit) { + } else if (that.getFirstNonzeroDigit() > bi.getFirstNonzeroDigit()) { orNegative(that, bi) } else { orNegative(bi, that) @@ -492,8 +492,8 @@ private[math] object Logical { def orNegative(bi: BigInteger, that: BigInteger): BigInteger = { // PRE: val and that are negative; // PRE: val has at least as many trailing zeros digits as that - val iThat = that.getFirstNonzeroDigit - val iVal = bi.getFirstNonzeroDigit + val iThat = that.getFirstNonzeroDigit() + val iVal = bi.getFirstNonzeroDigit() var i = 0 if (iVal >= that.numberLength) { that @@ -529,8 +529,8 @@ private[math] object Logical { /** @return sign = -1, magnitude = -(positive.magnitude | -negative.magnitude) */ def orDiffSigns(positive: BigInteger, negative: BigInteger): BigInteger = { // Jumping over the least significant zero bits - val iNeg = negative.getFirstNonzeroDigit - val iPos = positive.getFirstNonzeroDigit + val iNeg = negative.getFirstNonzeroDigit() + val iPos = positive.getFirstNonzeroDigit() // Look if the trailing zeros of the positive will "copy" all // the negative digits @@ -608,7 +608,7 @@ private[math] object Logical { } } else if (that.sign > 0) { xorDiffSigns(that, bi) - } else if (that.getFirstNonzeroDigit > bi.getFirstNonzeroDigit) { + } else if (that.getFirstNonzeroDigit() > bi.getFirstNonzeroDigit()) { xorNegative(that, bi) } else { xorNegative(bi, that) @@ -621,7 +621,8 @@ private[math] object Logical { // PRE: longer has at least as many digits as shorter val resLength = longer.numberLength val resDigits = new Array[Int](resLength) - var i = Math.min(longer.getFirstNonzeroDigit, shorter.getFirstNonzeroDigit) + var i = + Math.min(longer.getFirstNonzeroDigit(), shorter.getFirstNonzeroDigit()) while (i < shorter.numberLength) { resDigits(i) = longer.digits(i) ^ shorter.digits(i) i += 1 @@ -641,8 +642,8 @@ private[math] object Logical { // PRE: val has at least as many trailing zero digits as that val resLength = Math.max(bi.numberLength, that.numberLength) val resDigits = new Array[Int](resLength) - val iVal = bi.getFirstNonzeroDigit - val iThat = that.getFirstNonzeroDigit + val iVal = bi.getFirstNonzeroDigit() + val iThat = that.getFirstNonzeroDigit() var i = iThat if (iVal == iThat) { @@ -693,8 +694,8 @@ private[math] object Logical { // scalastyle:off return val resLength = Math.max(negative.numberLength, positive.numberLength) val resDigits: Array[Int] = new Array[Int](resLength) - val iNeg = negative.getFirstNonzeroDigit - val iPos = positive.getFirstNonzeroDigit + val iNeg = negative.getFirstNonzeroDigit() + val iPos = positive.getFirstNonzeroDigit() var i = 0 //The first diff --git a/javalib/src/main/scala/java/math/Multiplication.scala b/javalib/src/main/scala/java/math/Multiplication.scala index 0085715d55..b708d4c499 100644 --- a/javalib/src/main/scala/java/math/Multiplication.scala +++ b/javalib/src/main/scala/java/math/Multiplication.scala @@ -459,5 +459,5 @@ private[math] object Multiplication { } private def newArrayOfPows(len: Int, pow: Int) = - new Array[Int](len - 1).scanLeft[Int, Array[Int]](1)((z, _) => z * pow) + new Array[Int](len - 1).scanLeft(1)((z, _) => z * pow) } diff --git a/javalib/src/main/scala/java/math/Primality.scala b/javalib/src/main/scala/java/math/Primality.scala index f80d55850a..477d0edbf1 100644 --- a/javalib/src/main/scala/java/math/Primality.scala +++ b/javalib/src/main/scala/java/math/Primality.scala @@ -254,7 +254,7 @@ private[math] object Primality { var y: BigInteger = null val nMinus1 = n.subtract(BigInteger.ONE) val bitLength = nMinus1.bitLength() - val k = nMinus1.getLowestSetBit + val k = nMinus1.getLowestSetBit() val q = nMinus1.shiftRight(k) val rnd = new Random() for (i <- 0 until t) { @@ -270,15 +270,15 @@ private[math] object Primality { do { x = new BigInteger(bitLength, rnd) } while ((x.compareTo(n) >= BigInteger.EQUALS) || x.sign == 0 || - x.isOne) + x.isOne()) } y = x.modPow(q, n) - if (!(y.isOne || y == nMinus1)) { + if (!(y.isOne() || y == nMinus1)) { for (j <- 1 until k) { if (y != nMinus1) { y = y.multiply(y).mod(n) - if (y.isOne) + if (y.isOne()) return false } } diff --git a/javalib/src/main/scala/java/math/RoundingMode.scala b/javalib/src/main/scala/java/math/RoundingMode.scala index a869ddcb0d..6f11d86f20 100644 --- a/javalib/src/main/scala/java/math/RoundingMode.scala +++ b/javalib/src/main/scala/java/math/RoundingMode.scala @@ -55,7 +55,7 @@ object RoundingMode { def values(): Array[RoundingMode] = _values.clone() def valueOf(name: String): RoundingMode = { - _values.find(_.name == name).getOrElse { + _values.find(_.name() == name).getOrElse { throw new IllegalArgumentException("No enum const RoundingMode." + name) } } diff --git a/javalib/src/main/scala/java/net/Inet4Address.scala b/javalib/src/main/scala/java/net/Inet4Address.scala index be4f67be3f..2174b528f4 100644 --- a/javalib/src/main/scala/java/net/Inet4Address.scala +++ b/javalib/src/main/scala/java/net/Inet4Address.scala @@ -25,7 +25,7 @@ final class Inet4Address private[net] (ipAddress: Array[Byte], host: String) } override def isMCGlobal(): Boolean = { - if (!isMulticastAddress) return false + if (!isMulticastAddress()) return false val address = InetAddress.bytesToInt(ipAddress, 0) diff --git a/javalib/src/main/scala/java/net/InetAddress.scala b/javalib/src/main/scala/java/net/InetAddress.scala index d8b512f5c1..b396143781 100644 --- a/javalib/src/main/scala/java/net/InetAddress.scala +++ b/javalib/src/main/scala/java/net/InetAddress.scala @@ -547,8 +547,11 @@ private[net] trait InetAddressBase { } private def addressToString(value: Int): String = { - return (((value >> 24) & 0xff) + "." + ((value >> 16) & 0xff) + "." - + ((value >> 8) & 0xff) + "." + (value & 0xff)) + val p1 = (value >> 24) & 0xff + val p2 = (value >> 16) & 0xff + val p3 = (value >> 8) & 0xff + val p4 = value & 0xff + s"$p1.$p2.$p3.$p4" } } @@ -581,7 +584,7 @@ class InetAddress private[net] (ipAddress: Array[Byte], if (obj == null || obj.getClass != this.getClass) { false } else { - val objIPAddress = obj.asInstanceOf[InetAddress].getAddress; + val objIPAddress = obj.asInstanceOf[InetAddress].getAddress(); objIPAddress.indices.forall(i => objIPAddress(i) == ipAddress(i)) } } diff --git a/javalib/src/main/scala/java/net/InetSocketAddress.scala b/javalib/src/main/scala/java/net/InetSocketAddress.scala index cc10d5068b..7764eb10dd 100644 --- a/javalib/src/main/scala/java/net/InetSocketAddress.scala +++ b/javalib/src/main/scala/java/net/InetSocketAddress.scala @@ -20,7 +20,7 @@ class InetSocketAddress private[net] (private var addr: InetAddress, if (addr == null) { addr = InetAddress.wildcard } - hostName = addr.getHostAddress + hostName = addr.getHostAddress() } private var gotHostName = false @@ -32,7 +32,7 @@ class InetSocketAddress private[net] (private var addr: InetAddress, private val isResolved = (addr != null) def this(port: Int) = - this(InetAddress.wildcard, port, InetAddress.wildcard.getHostName, false) + this(InetAddress.wildcard, port, InetAddress.wildcard.getHostName(), false) def this(hostname: String, port: Int) = this(Try(InetAddress.getByName(hostname)).getOrElse(null), @@ -49,7 +49,7 @@ class InetSocketAddress private[net] (private var addr: InetAddress, final def getHostName: String = { if (!gotHostName && addr != null) { gotHostName = true - hostName = addr.getHostName + hostName = addr.getHostName() } hostName } diff --git a/javalib/src/main/scala/java/net/PlainSocketImpl.scala b/javalib/src/main/scala/java/net/PlainSocketImpl.scala index 65a546f528..318968acd1 100644 --- a/javalib/src/main/scala/java/net/PlainSocketImpl.scala +++ b/javalib/src/main/scala/java/net/PlainSocketImpl.scala @@ -82,10 +82,10 @@ private[net] class PlainSocketImpl extends SocketImpl { hints.ai_socktype = socket.SOCK_STREAM Zone { implicit z => - val cIP = toCString(addr.getHostAddress) + val cIP = toCString(addr.getHostAddress()) if (getaddrinfo(cIP, toCString(port.toString), hints, ret) != 0) { throw new BindException( - "Couldn't resolve address: " + addr.getHostAddress) + "Couldn't resolve address: " + addr.getHostAddress()) } } @@ -96,13 +96,14 @@ private[net] class PlainSocketImpl extends SocketImpl { if (bindRes < 0) { throw new BindException( - "Couldn't bind to an address: " + addr.getHostAddress + + "Couldn't bind to an address: " + addr.getHostAddress() + " on port: " + port.toString) } this.localport = fetchLocalPort(family).getOrElse { throw new BindException( - "Couldn't bind to address: " + addr.getHostAddress + " on port: " + port) + "Couldn't bind to address: " + addr + .getHostAddress() + " on port: " + port) } } @@ -275,7 +276,7 @@ private[net] class PlainSocketImpl extends SocketImpl { hints.ai_family = socket.AF_UNSPEC hints.ai_flags = AI_NUMERICHOST | AI_NUMERICSERV hints.ai_socktype = socket.SOCK_STREAM - val remoteAddress = inetAddr.getAddress.getHostAddress + val remoteAddress = inetAddr.getAddress.getHostAddress() Zone { implicit z => val cIP = toCString(remoteAddress) @@ -318,7 +319,7 @@ private[net] class PlainSocketImpl extends SocketImpl { } } - override def close(): Unit = { + override def close: Unit = { if (fd.fd != -1) { cClose(fd.fd) fd = new FileDescriptor @@ -345,7 +346,7 @@ private[net] class PlainSocketImpl extends SocketImpl { new SocketInputStream(this) } - override def shutdownOutput(): Unit = { + override def shutdownOutput: Unit = { socket.shutdown(fd.fd, 1) match { case 0 => shutOutput = true case _ => @@ -353,7 +354,7 @@ private[net] class PlainSocketImpl extends SocketImpl { } } - override def shutdownInput(): Unit = { + override def shutdownInput: Unit = { socket.shutdown(fd.fd, 0) match { case 0 => shutInput = true case _ => diff --git a/javalib/src/main/scala/java/net/ServerSocket.scala b/javalib/src/main/scala/java/net/ServerSocket.scala index 29c3f0939e..a00eb0f473 100644 --- a/javalib/src/main/scala/java/net/ServerSocket.scala +++ b/javalib/src/main/scala/java/net/ServerSocket.scala @@ -18,7 +18,7 @@ class ServerSocket(private var port: Int, } if (port >= 0) { - startup + startup() } def startup(): Unit = { @@ -79,7 +79,7 @@ class ServerSocket(private var port: Int, val addr = if (endpoint == null || endpoint.asInstanceOf[InetSocketAddress].getAddress == null) - new InetSocketAddress(InetAddress.getLoopbackAddress, 0) + new InetSocketAddress(InetAddress.getLoopbackAddress(), 0) else { endpoint.asInstanceOf[InetSocketAddress] } @@ -134,7 +134,7 @@ class ServerSocket(private var port: Int, impl.setOption(SocketOptions.SO_TIMEOUT, Integer.valueOf(timeout)) } - override def close: Unit = { + override def close(): Unit = { impl.close closed = true } diff --git a/javalib/src/main/scala/java/net/Socket.scala b/javalib/src/main/scala/java/net/Socket.scala index 94ab1f9991..1bfb57fc48 100644 --- a/javalib/src/main/scala/java/net/Socket.scala +++ b/javalib/src/main/scala/java/net/Socket.scala @@ -116,7 +116,7 @@ class Socket protected (private[net] val impl: SocketImpl, val addr = if (bindpoint == null || bindpoint.asInstanceOf[InetSocketAddress].getAddress == null) - new InetSocketAddress(InetAddress.getLoopbackAddress, 0) + new InetSocketAddress(InetAddress.getLoopbackAddress(), 0) else { bindpoint.asInstanceOf[InetSocketAddress] } diff --git a/javalib/src/main/scala/java/net/SocketInputStream.scala b/javalib/src/main/scala/java/net/SocketInputStream.scala index c234ccd990..80fb5fe248 100644 --- a/javalib/src/main/scala/java/net/SocketInputStream.scala +++ b/javalib/src/main/scala/java/net/SocketInputStream.scala @@ -8,7 +8,7 @@ private[net] class SocketInputStream(socket: PlainSocketImpl) override def close(): Unit = socket.close - override def available: Int = socket.available + override def available(): Int = socket.available override def read(): Int = { val buffer = new Array[Byte](1) diff --git a/javalib/src/main/scala/java/net/URI.scala b/javalib/src/main/scala/java/net/URI.scala index 46c6100b83..aa588c6ad2 100644 --- a/javalib/src/main/scala/java/net/URI.scala +++ b/javalib/src/main/scala/java/net/URI.scala @@ -32,7 +32,7 @@ final class URI private () extends Comparable[URI] with Serializable { import URI._ - private val serialVersionUID = -6052424284110960213l + private val serialVersionUID = -6052424284110960213L private var string: String = _ @@ -89,7 +89,8 @@ final class URI private () extends Comparable[URI] with Serializable { earlyStop = true } if (!earlyStop) { - if (scheme != null && path != null && path.length > 0 && path.charAt(0) != '/') { + if (scheme != null && path != null && path + .length() > 0 && path.charAt(0) != '/') { throw new URISyntaxException(path, "Relative path") } val uri: StringBuilder = new StringBuilder() @@ -139,7 +140,8 @@ final class URI private () extends Comparable[URI] with Serializable { query: String, fragment: String) = { this() - if (scheme != null && path != null && path.length > 0 && path.charAt(0) != '/') { + if (scheme != null && path != null && path + .length() > 0 && path.charAt(0) != '/') { throw new URISyntaxException(path, "Relative path") } val uri: StringBuilder = new StringBuilder() @@ -188,12 +190,12 @@ final class URI private () extends Comparable[URI] with Serializable { if (index != -1 && (index2 >= index || index2 == -1) && (index3 >= index || index3 == -1)) { absolute = true scheme = temp.substring(0, index) - if (scheme.length == 0) { + if (scheme.length() == 0) { throw new URISyntaxException(uri, "Scheme expected", index) } validateScheme(uri, scheme, 0) schemespecificpart = temp.substring(index + 1) - if (schemespecificpart.length == 0) { + if (schemespecificpart.length() == 0) { throw new URISyntaxException(uri, "Scheme-specific part expected", index + 1) @@ -203,7 +205,8 @@ final class URI private () extends Comparable[URI] with Serializable { schemespecificpart = temp } - if (scheme == null || schemespecificpart.length > 0 && schemespecificpart + if (scheme == null || schemespecificpart + .length() > 0 && schemespecificpart .charAt(0) == '/') { opaque = false @@ -270,7 +273,7 @@ final class URI private () extends Comparable[URI] with Serializable { throw new URISyntaxException(uri, "Illegal character in scheme", index - + e.getIndex) + + e.getIndex()) } } @@ -279,9 +282,10 @@ final class URI private () extends Comparable[URI] with Serializable { URIEncoderDecoder.validate(ssp, allLegal) } catch { case e: URISyntaxException => - throw new URISyntaxException(uri, - e.getReason + " in scheme specific part", - index + e.getIndex) + throw new URISyntaxException( + uri, + e.getReason() + " in scheme specific part", + index + e.getIndex()) } } @@ -291,8 +295,8 @@ final class URI private () extends Comparable[URI] with Serializable { } catch { case e: URISyntaxException => throw new URISyntaxException(uri, - e.getReason + " in authority", - index + e.getIndex) + e.getReason() + " in authority", + index + e.getIndex()) } } @@ -302,8 +306,8 @@ final class URI private () extends Comparable[URI] with Serializable { } catch { case e: URISyntaxException => throw new URISyntaxException(uri, - e.getReason + " in path", - index + e.getIndex) + e.getReason() + " in path", + index + e.getIndex()) } } @@ -313,8 +317,8 @@ final class URI private () extends Comparable[URI] with Serializable { } catch { case e: URISyntaxException => throw new URISyntaxException(uri, - e.getReason + " in query", - index + e.getIndex) + e.getReason() + " in query", + index + e.getIndex()) } } @@ -324,8 +328,8 @@ final class URI private () extends Comparable[URI] with Serializable { } catch { case e: URISyntaxException => throw new URISyntaxException(uri, - e.getReason + " in fragment", - index + e.getIndex) + e.getReason() + " in fragment", + index + e.getIndex()) } } @@ -351,7 +355,7 @@ final class URI private () extends Comparable[URI] with Serializable { val endindex = temp.indexOf(']') if (index != -1 && endindex < index) { tempHost = temp.substring(0, index) - if (index < (temp.length - 1)) { + if (index < (temp.length() - 1)) { try { tempPort = java.lang.Integer.parseInt(temp.substring(index + 1)) if (tempPort < 0) { @@ -394,7 +398,7 @@ final class URI private () extends Comparable[URI] with Serializable { } def validateUserinfo(uri: String, userInfo: String, index: Int): Unit = { - for (i <- 0 until userInfo.length) { + for (i <- 0 until userInfo.length()) { val ch: Char = userInfo.charAt(i) if (ch == ']' || ch == '[') { throw new URISyntaxException(uri, @@ -406,7 +410,7 @@ final class URI private () extends Comparable[URI] with Serializable { def isValidHost(forceServer: Boolean, host: String): Boolean = { if (host.charAt(0) == '[') { - if (host.charAt(host.length - 1) != ']') { + if (host.charAt(host.length() - 1) != ']') { throw new URISyntaxException( host, "Expected a closing square bracket for ipv6 address", @@ -421,7 +425,7 @@ final class URI private () extends Comparable[URI] with Serializable { throw new URISyntaxException(host, "Illegal character in host name", 0) } val index: Int = host.lastIndexOf('.') - if (index < 0 || index == host.length - 1 || + if (index < 0 || index == host.length() - 1 || !java.lang.Character.isDigit(host.charAt(index + 1))) { if (isValidDomainName(host)) { return true @@ -450,7 +454,7 @@ final class URI private () extends Comparable[URI] with Serializable { } var label: String = null val st: StringTokenizer = new StringTokenizer(host, ".") - while (st.hasMoreTokens) { + while (st.hasMoreTokens()) { label = st.nextToken() if (label.startsWith("-") || label.endsWith("-")) { return false @@ -497,7 +501,7 @@ final class URI private () extends Comparable[URI] with Serializable { } def isValidIP6Address(ipAddress: String): Boolean = { - val length: Int = ipAddress.length + val length: Int = ipAddress.length() var doubleColon: Boolean = false var numberOfColons: Int = 0 var numberOfPeriods: Int = 0 @@ -563,7 +567,7 @@ final class URI private () extends Comparable[URI] with Serializable { } word = "" case _ => - if (word.length > 3) { + if (word.length() > 3) { return false } if (!isValidHexChar(c)) { @@ -591,10 +595,10 @@ final class URI private () extends Comparable[URI] with Serializable { def isValidIP4Word(word: String): Boolean = { var c: Char = 0 - if (word.length < 1 || word.length > 3) { + if (word.length() < 1 || word.length() > 3) { return false } - for (i <- 0 until word.length) { + for (i <- 0 until word.length()) { c = word.charAt(i) if (!(c >= '0' && c <= '9')) { return false @@ -870,7 +874,7 @@ final class URI private () extends Comparable[URI] with Serializable { override def hashCode(): Int = { if (hash == -1) { - hash = getHashString.hashCode + hash = getHashString().hashCode } hash } @@ -883,7 +887,7 @@ final class URI private () extends Comparable[URI] with Serializable { // count the number of '/'s, to determine number of segments var index = -1 index = path.indexOf('/', index + 1) - val pathlen: Int = path.length + val pathlen: Int = path.length() var size: Int = 0 if (pathlen > 0 && path.charAt(0) != '/') { size += 1 @@ -937,7 +941,7 @@ final class URI private () extends Comparable[URI] with Serializable { } if (!path.endsWith("/") && seglist.length > 0 && include(seglist.length - 1)) { - newpath.deleteCharAt(newpath.length - 1) + newpath.deleteCharAt(newpath.length() - 1) } var result: String = newpath.toString // prepend "./" to normalize @@ -1012,7 +1016,7 @@ final class URI private () extends Comparable[URI] with Serializable { result.fragment = relative.fragment result.query = relative.query // the result URI is the remainder of the relative URI's path - result.path = relativePath.substring(thisPath.length) + result.path = relativePath.substring(thisPath.length()) result.setSchemeSpecificPart() result } diff --git a/javalib/src/main/scala/java/nio/Buffer.scala b/javalib/src/main/scala/java/nio/Buffer.scala index 016191a318..240452fb52 100644 --- a/javalib/src/main/scala/java/nio/Buffer.scala +++ b/javalib/src/main/scala/java/nio/Buffer.scala @@ -11,7 +11,7 @@ abstract class Buffer private[nio] (val _capacity: Int) { // Normal implementation of Buffer - private var _limit: Int = capacity + private var _limit: Int = capacity() private var _position: Int = 0 private[nio] var _mark: Int = -1 @@ -57,7 +57,7 @@ abstract class Buffer private[nio] (val _capacity: Int) { def clear(): Buffer = { _mark = -1 _position = 0 - _limit = capacity + _limit = capacity() this } @@ -74,9 +74,9 @@ abstract class Buffer private[nio] (val _capacity: Int) { this } - @inline final def remaining(): Int = limit - position + @inline final def remaining(): Int = limit() - position() - @inline final def hasRemaining(): Boolean = position != limit + @inline final def hasRemaining(): Boolean = position() != limit() def isReadOnly(): Boolean @@ -92,7 +92,7 @@ abstract class Buffer private[nio] (val _capacity: Int) { def isDirect(): Boolean override def toString(): String = - s"${getClass.getName}[pos=$position lim=$limit cap=$capacity]" + s"${getClass.getName}[pos=${position()} lim=${limit()} cap=${capacity()}]" /* Generic access to methods declared in subclasses. * These methods allow to write generic algorithms on any kind of Buffer. @@ -134,7 +134,7 @@ abstract class Buffer private[nio] (val _capacity: Int) { // Helpers @inline private[nio] def ensureNotReadOnly(): Unit = { - if (isReadOnly) + if (isReadOnly()) throw new ReadOnlyBufferException } @@ -147,7 +147,7 @@ abstract class Buffer private[nio] (val _capacity: Int) { @inline private[nio] def getPosAndAdvanceRead(): Int = { val p = _position - if (p == limit) + if (p == limit()) throw new BufferUnderflowException _position = p + 1 p @@ -156,7 +156,7 @@ abstract class Buffer private[nio] (val _capacity: Int) { @inline private[nio] def getPosAndAdvanceRead(length: Int): Int = { val p = _position val newPos = p + length - if (newPos > limit) + if (newPos > limit()) throw new BufferUnderflowException _position = newPos p @@ -164,7 +164,7 @@ abstract class Buffer private[nio] (val _capacity: Int) { @inline private[nio] def getPosAndAdvanceWrite(): Int = { val p = _position - if (p == limit) + if (p == limit()) throw new BufferOverflowException _position = p + 1 p @@ -173,20 +173,20 @@ abstract class Buffer private[nio] (val _capacity: Int) { @inline private[nio] def getPosAndAdvanceWrite(length: Int): Int = { val p = _position val newPos = p + length - if (newPos > limit) + if (newPos > limit()) throw new BufferOverflowException _position = newPos p } @inline private[nio] def validateIndex(index: Int): Int = { - if (index < 0 || index >= limit) + if (index < 0 || index >= limit()) throw new IndexOutOfBoundsException index } @inline private[nio] def validateIndex(index: Int, length: Int): Int = { - if (index < 0 || index + length > limit) + if (index < 0 || index + length > limit()) throw new IndexOutOfBoundsException index } diff --git a/javalib/src/main/scala/java/nio/ByteOrder.scala b/javalib/src/main/scala/java/nio/ByteOrder.scala index 0f7da86392..ae4d939702 100644 --- a/javalib/src/main/scala/java/nio/ByteOrder.scala +++ b/javalib/src/main/scala/java/nio/ByteOrder.scala @@ -13,7 +13,7 @@ object ByteOrder { val LITTLE_ENDIAN: ByteOrder = new ByteOrder("LITTLE_ENDIAN") def nativeOrder(): ByteOrder = { - if (Platform.littleEndian) LITTLE_ENDIAN + if (Platform.littleEndian()) LITTLE_ENDIAN else BIG_ENDIAN } } diff --git a/javalib/src/main/scala/java/nio/CharBuffer.scala b/javalib/src/main/scala/java/nio/CharBuffer.scala index 900686f351..b35fb36955 100644 --- a/javalib/src/main/scala/java/nio/CharBuffer.scala +++ b/javalib/src/main/scala/java/nio/CharBuffer.scala @@ -13,10 +13,10 @@ object CharBuffer { wrap(array, 0, array.length) def wrap(csq: CharSequence, start: Int, end: Int): CharBuffer = - StringCharBuffer.wrap(csq, 0, csq.length, start, end - start) + StringCharBuffer.wrap(csq, 0, csq.length(), start, end - start) def wrap(csq: CharSequence): CharBuffer = - wrap(csq, 0, csq.length) + wrap(csq, 0, csq.length()) } abstract class CharBuffer private[nio] (_capacity: Int, @@ -35,7 +35,7 @@ abstract class CharBuffer private[nio] (_capacity: Int, def read(target: CharBuffer): Int = { // Attention: this method must not change this buffer's position - val n = remaining + val n = remaining() if (n == 0) -1 else if (_array != null) { // even if read-only @@ -151,9 +151,9 @@ abstract class CharBuffer private[nio] (_capacity: Int, override def toString(): String = { if (_array != null) { // even if read-only - new String(_array, position() + _arrayOffset, remaining) + new String(_array, position() + _arrayOffset, remaining()) } else { - val chars = new Array[Char](remaining) + val chars = new Array[Char](remaining()) val savedPos = position() get(chars) position(savedPos) @@ -161,7 +161,7 @@ abstract class CharBuffer private[nio] (_capacity: Int, } } - final def length(): Int = remaining + final def length(): Int = remaining() final def charAt(index: Int): Char = get(position() + index) diff --git a/javalib/src/main/scala/java/nio/GenBuffer.scala b/javalib/src/main/scala/java/nio/GenBuffer.scala index 1cdfb342a6..ae30f1d83c 100644 --- a/javalib/src/main/scala/java/nio/GenBuffer.scala +++ b/javalib/src/main/scala/java/nio/GenBuffer.scala @@ -46,8 +46,8 @@ private[nio] final class GenBuffer[B <: Buffer](val self: B) extends AnyVal { if (src eq self) throw new IllegalArgumentException ensureNotReadOnly() - val srcLimit = src.limit - var srcPos = src.position + val srcLimit = src.limit() + var srcPos = src.position() val length = srcLimit - srcPos var selfPos = getPosAndAdvanceWrite(length) src.position(srcLimit) @@ -78,14 +78,14 @@ private[nio] final class GenBuffer[B <: Buffer](val self: B) extends AnyVal { @inline def generic_hasArray(): Boolean = - _array != null && !isReadOnly + _array != null && !isReadOnly() @inline def generic_array(): Array[ElementType] = { val a = _array if (a == null) throw new UnsupportedOperationException - if (isReadOnly) + if (isReadOnly()) throw new ReadOnlyBufferException a } @@ -95,7 +95,7 @@ private[nio] final class GenBuffer[B <: Buffer](val self: B) extends AnyVal { val o = _arrayOffset if (o == -1) throw new UnsupportedOperationException - if (isReadOnly) + if (isReadOnly()) throw new ReadOnlyBufferException o } @@ -103,8 +103,8 @@ private[nio] final class GenBuffer[B <: Buffer](val self: B) extends AnyVal { @inline def generic_hashCode(hashSeed: Int): Int = { import scala.util.hashing.MurmurHash3._ - val start = position - val end = limit + val start = position() + val end = limit() var h = hashSeed var i = start while (i != end) { @@ -121,10 +121,10 @@ private[nio] final class GenBuffer[B <: Buffer](val self: B) extends AnyVal { if (self eq that) { 0 } else { - val thisStart = self.position - val thisRemaining = self.limit - thisStart - val thatStart = that.position - val thatRemaining = that.limit - thatStart + val thisStart = self.position() + val thisRemaining = self.limit() - thisStart + val thatStart = that.position() + val thatRemaining = that.limit() - thatStart val shortestLength = Math.min(thisRemaining, thatRemaining) var i = 0 diff --git a/javalib/src/main/scala/java/nio/GenHeapBuffer.scala b/javalib/src/main/scala/java/nio/GenHeapBuffer.scala index 1260e98d9d..6c8b9696ca 100644 --- a/javalib/src/main/scala/java/nio/GenHeapBuffer.scala +++ b/javalib/src/main/scala/java/nio/GenHeapBuffer.scala @@ -50,20 +50,25 @@ private[nio] final class GenHeapBuffer[B <: Buffer](val self: B) @inline def generic_slice()(implicit newHeapBuffer: NewThisHeapBuffer): BufferType = { - val newCapacity = remaining + val newCapacity = remaining() newHeapBuffer(newCapacity, _array, - _arrayOffset + position, + _arrayOffset + position(), 0, newCapacity, - isReadOnly) + isReadOnly()) } @inline def generic_duplicate()( implicit newHeapBuffer: NewThisHeapBuffer): BufferType = { val result = - newHeapBuffer(capacity, _array, _arrayOffset, position, limit, isReadOnly) + newHeapBuffer(capacity(), + _array, + _arrayOffset, + position(), + limit(), + isReadOnly()) result._mark = _mark result } @@ -72,7 +77,7 @@ private[nio] final class GenHeapBuffer[B <: Buffer](val self: B) def generic_asReadOnlyBuffer()( implicit newHeapBuffer: NewThisHeapBuffer): BufferType = { val result = - newHeapBuffer(capacity, _array, _arrayOffset, position, limit, true) + newHeapBuffer(capacity(), _array, _arrayOffset, position(), limit(), true) result._mark = _mark result } @@ -81,11 +86,11 @@ private[nio] final class GenHeapBuffer[B <: Buffer](val self: B) def generic_compact(): BufferType = { ensureNotReadOnly() - val len = remaining + val len = remaining() System - .arraycopy(_array, _arrayOffset + position, _array, _arrayOffset, len) + .arraycopy(_array, _arrayOffset + position(), _array, _arrayOffset, len) _mark = -1 - limit(capacity) + limit(capacity()) position(len) self } diff --git a/javalib/src/main/scala/java/nio/GenHeapBufferView.scala b/javalib/src/main/scala/java/nio/GenHeapBufferView.scala index eea009458b..e5711670ae 100644 --- a/javalib/src/main/scala/java/nio/GenHeapBufferView.scala +++ b/javalib/src/main/scala/java/nio/GenHeapBufferView.scala @@ -29,7 +29,7 @@ private[nio] object GenHeapBufferView { byteBuffer._arrayOffset + byteBufferPos, 0, viewCapacity, - byteBuffer.isReadOnly, + byteBuffer.isReadOnly(), byteBuffer.isBigEndian) } } @@ -43,26 +43,26 @@ private[nio] final class GenHeapBufferView[B <: Buffer](val self: B) @inline def generic_slice()( implicit newHeapBufferView: NewThisHeapBufferView): BufferType = { - val newCapacity = remaining + val newCapacity = remaining() val bytesPerElem = newHeapBufferView.bytesPerElem newHeapBufferView(newCapacity, _byteArray, - _byteArrayOffset + bytesPerElem * position, + _byteArrayOffset + bytesPerElem * position(), 0, newCapacity, - isReadOnly, + isReadOnly(), isBigEndian) } @inline def generic_duplicate()( implicit newHeapBufferView: NewThisHeapBufferView): BufferType = { - val result = newHeapBufferView(capacity, + val result = newHeapBufferView(capacity(), _byteArray, _byteArrayOffset, - position, - limit, - isReadOnly, + position(), + limit(), + isReadOnly(), isBigEndian) result._mark = _mark result @@ -71,11 +71,11 @@ private[nio] final class GenHeapBufferView[B <: Buffer](val self: B) @inline def generic_asReadOnlyBuffer()( implicit newHeapBufferView: NewThisHeapBufferView): BufferType = { - val result = newHeapBufferView(capacity, + val result = newHeapBufferView(capacity(), _byteArray, _byteArrayOffset, - position, - limit, + position(), + limit(), true, isBigEndian) result._mark = _mark @@ -85,18 +85,18 @@ private[nio] final class GenHeapBufferView[B <: Buffer](val self: B) @inline def generic_compact()( implicit newHeapBufferView: NewThisHeapBufferView): BufferType = { - if (isReadOnly) + if (isReadOnly()) throw new ReadOnlyBufferException - val len = remaining + val len = remaining() val bytesPerElem = newHeapBufferView.bytesPerElem System.arraycopy(_byteArray, - _byteArrayOffset + bytesPerElem * position, + _byteArrayOffset + bytesPerElem * position(), _byteArray, _byteArrayOffset, bytesPerElem * len) _mark = -1 - limit(capacity) + limit(capacity()) position(len) self } diff --git a/javalib/src/main/scala/java/nio/HeapByteBufferCharView.scala b/javalib/src/main/scala/java/nio/HeapByteBufferCharView.scala index c686762613..4a9699980b 100644 --- a/javalib/src/main/scala/java/nio/HeapByteBufferCharView.scala +++ b/javalib/src/main/scala/java/nio/HeapByteBufferCharView.scala @@ -34,14 +34,14 @@ private[nio] final class HeapByteBufferCharView private ( GenHeapBufferView(this).generic_asReadOnlyBuffer() def subSequence(start: Int, end: Int): CharBuffer = { - if (start < 0 || end < start || end > remaining) + if (start < 0 || end < start || end > remaining()) throw new IndexOutOfBoundsException - new HeapByteBufferCharView(capacity, + new HeapByteBufferCharView(capacity(), _byteArray, _byteArrayOffset, position() + start, position() + end, - isReadOnly, + isReadOnly(), isBigEndian) } diff --git a/javalib/src/main/scala/java/nio/HeapCharBuffer.scala b/javalib/src/main/scala/java/nio/HeapCharBuffer.scala index bbe6e2ba9e..ec8ee95bdf 100644 --- a/javalib/src/main/scala/java/nio/HeapCharBuffer.scala +++ b/javalib/src/main/scala/java/nio/HeapCharBuffer.scala @@ -33,14 +33,14 @@ private[nio] final class HeapCharBuffer private (_capacity: Int, GenHeapBuffer(this).generic_asReadOnlyBuffer() def subSequence(start: Int, end: Int): CharBuffer = { - if (start < 0 || end < start || end > remaining) + if (start < 0 || end < start || end > remaining()) throw new IndexOutOfBoundsException - new HeapCharBuffer(capacity, + new HeapCharBuffer(capacity(), _array, _arrayOffset, position() + start, position() + end, - isReadOnly) + isReadOnly()) } @noinline diff --git a/javalib/src/main/scala/java/nio/IntBuffer.scala b/javalib/src/main/scala/java/nio/IntBuffer.scala index 9fd42c8d87..7c0cc360d4 100644 --- a/javalib/src/main/scala/java/nio/IntBuffer.scala +++ b/javalib/src/main/scala/java/nio/IntBuffer.scala @@ -106,7 +106,7 @@ abstract class IntBuffer private[nio] (_capacity: Int, def isDirect(): Boolean // HERE - def isReadOnly: Boolean + def isReadOnly(): Boolean // toString(): String inherited from Buffer diff --git a/javalib/src/main/scala/java/nio/MappedByteBuffer.scala b/javalib/src/main/scala/java/nio/MappedByteBuffer.scala index 2bf4f06b49..eebe99b825 100644 --- a/javalib/src/main/scala/java/nio/MappedByteBuffer.scala +++ b/javalib/src/main/scala/java/nio/MappedByteBuffer.scala @@ -166,5 +166,5 @@ abstract class MappedByteBuffer(mode: FileChannel.MapMode, underlying.store(index, elem) private def ensureWritable(): Unit = - if (isReadOnly) throw new NonWritableChannelException() + if (isReadOnly()) throw new NonWritableChannelException() } diff --git a/javalib/src/main/scala/java/nio/StringCharBuffer.scala b/javalib/src/main/scala/java/nio/StringCharBuffer.scala index abfb3105c8..4a9f731d4d 100644 --- a/javalib/src/main/scala/java/nio/StringCharBuffer.scala +++ b/javalib/src/main/scala/java/nio/StringCharBuffer.scala @@ -17,13 +17,13 @@ private[nio] final class StringCharBuffer private (_capacity: Int, def isDirect(): Boolean = false def slice(): CharBuffer = { - val cap = remaining + val cap = remaining() new StringCharBuffer(cap, _csq, _csqOffset + position(), 0, cap) } def duplicate(): CharBuffer = { val result = - new StringCharBuffer(capacity, _csq, _csqOffset, position(), limit) + new StringCharBuffer(capacity(), _csq, _csqOffset, position(), limit()) result._mark = this._mark result } @@ -31,9 +31,9 @@ private[nio] final class StringCharBuffer private (_capacity: Int, def asReadOnlyBuffer(): CharBuffer = duplicate() def subSequence(start: Int, end: Int): CharBuffer = { - if (start < 0 || end < start || end > remaining) + if (start < 0 || end < start || end > remaining()) throw new IndexOutOfBoundsException - new StringCharBuffer(capacity, + new StringCharBuffer(capacity(), _csq, _csqOffset, position() + start, @@ -102,7 +102,7 @@ private[nio] object StringCharBuffer { capacity: Int, initialPosition: Int, initialLength: Int): CharBuffer = { - if (csqOffset < 0 || capacity < 0 || csqOffset + capacity > csq.length) + if (csqOffset < 0 || capacity < 0 || csqOffset + capacity > csq.length()) throw new IndexOutOfBoundsException val initialLimit = initialPosition + initialLength if (initialPosition < 0 || initialLength < 0 || initialLimit > capacity) diff --git a/javalib/src/main/scala/java/nio/channels/FileChannelImpl.scala b/javalib/src/main/scala/java/nio/channels/FileChannelImpl.scala index c80dc9c7c9..f5e0d328d7 100644 --- a/javalib/src/main/scala/java/nio/channels/FileChannelImpl.scala +++ b/javalib/src/main/scala/java/nio/channels/FileChannelImpl.scala @@ -58,7 +58,7 @@ final class FileChannelImpl(path: Path, number: Int): Long = { ensureOpen() - var bytesRead = 0l + var bytesRead = 0L var i = 0 while (i < number) { @@ -82,8 +82,8 @@ final class FileChannelImpl(path: Path, override def read(buffer: ByteBuffer, pos: Long): Int = { ensureOpen() position(pos) - val bufPosition: Int = buffer.position - raf.read(buffer.array, bufPosition, buffer.limit() - bufPosition) match { + val bufPosition: Int = buffer.position() + raf.read(buffer.array(), bufPosition, buffer.limit() - bufPosition) match { case bytesRead if bytesRead < 0 => bytesRead case bytesRead => buffer.position(bufPosition + bytesRead) @@ -138,10 +138,10 @@ final class FileChannelImpl(path: Path, override def write(buffer: ByteBuffer, pos: Long): Int = { ensureOpen() position(pos) - val srcPos: Int = buffer.position - val srcLim: Int = buffer.limit + val srcPos: Int = buffer.position() + val srcLim: Int = buffer.limit() val lim = math.abs(srcLim - srcPos) - raf.write(buffer.array, 0, lim) + raf.write(buffer.array(), 0, lim) buffer.position(srcPos + lim) lim } @@ -192,7 +192,7 @@ private object FileChannelImpl { mode.append("s") } - val raf = new RandomAccessFile(path.toFile, mode.toString) + val raf = new RandomAccessFile(path.toFile(), mode.toString) if (writing && options.contains(TRUNCATE_EXISTING)) { raf.setLength(0L) diff --git a/javalib/src/main/scala/java/nio/channels/FileLock.scala b/javalib/src/main/scala/java/nio/channels/FileLock.scala index 0dd733a66e..f1f923ff87 100644 --- a/javalib/src/main/scala/java/nio/channels/FileLock.scala +++ b/javalib/src/main/scala/java/nio/channels/FileLock.scala @@ -39,6 +39,6 @@ abstract class FileLock private (channel: Channel, release() override final def toString(): String = - s"FileLock($channel, $position, $size, $shared), isValid = $isValid" + s"FileLock($channel, $position, $size, $shared), isValid = ${isValid()}" } diff --git a/javalib/src/main/scala/java/nio/charset/Charset.scala b/javalib/src/main/scala/java/nio/charset/Charset.scala index e82e64f721..660685c22e 100644 --- a/javalib/src/main/scala/java/nio/charset/Charset.scala +++ b/javalib/src/main/scala/java/nio/charset/Charset.scala @@ -9,16 +9,16 @@ abstract class Charset protected (canonicalName: String, aliases: Array[String]) final def name(): String = canonicalName override final def equals(that: Any): Boolean = that match { - case that: Charset => this.name == that.name + case that: Charset => this.name() == that.name() case _ => false } override final def toString(): String = name() - override final def hashCode(): Int = name.## + override final def hashCode(): Int = name().## override final def compareTo(that: Charset): Int = - name.compareToIgnoreCase(that.name) + name().compareToIgnoreCase(that.name()) def contains(cs: Charset): Boolean @@ -50,14 +50,13 @@ abstract class Charset protected (canonicalName: String, aliases: Array[String]) final def encode(str: String): ByteBuffer = encode(CharBuffer.wrap(str)) - def displayName(): String = name + def displayName(): String = name() } object Charset { import StandardCharsets._ - def defaultCharset(): Charset = - UTF_8 + def defaultCharset(): Charset = UTF_8 def forName(charsetName: String): Charset = { val m = CharsetMap diff --git a/javalib/src/main/scala/java/nio/charset/CharsetDecoder.scala b/javalib/src/main/scala/java/nio/charset/CharsetDecoder.scala index cd11b63a41..4f88539d44 100644 --- a/javalib/src/main/scala/java/nio/charset/CharsetDecoder.scala +++ b/javalib/src/main/scala/java/nio/charset/CharsetDecoder.scala @@ -32,7 +32,7 @@ abstract class CharsetDecoder protected (cs: Charset, if (newReplacement == null || newReplacement == "") throw new IllegalArgumentException( "Invalid replacement: " + newReplacement) - if (newReplacement.length > maxCharsPerByte) + if (newReplacement.length > maxCharsPerByte()) throw new IllegalArgumentException( "Replacement string cannot be longer than maxCharsPerByte") _replacement = newReplacement @@ -94,8 +94,8 @@ abstract class CharsetDecoder protected (cs: Charset, throw new CoderMalfunctionError(ex) } - val result2 = if (result1.isUnderflow) { - val remaining = in.remaining + val result2 = if (result1.isUnderflow()) { + val remaining = in.remaining() if (endOfInput && remaining > 0) CoderResult.malformedForLength(remaining) else @@ -104,26 +104,26 @@ abstract class CharsetDecoder protected (cs: Charset, result1 } - if (result2.isUnderflow || result2.isOverflow) { + if (result2.isUnderflow() || result2.isOverflow()) { result2 } else { val action = - if (result2.isUnmappable) unmappableCharacterAction - else malformedInputAction + if (result2.isUnmappable()) unmappableCharacterAction() + else malformedInputAction() action match { case CodingErrorAction.REPLACE => - if (out.remaining < replacement.length) { + if (out.remaining() < replacement().length) { CoderResult.OVERFLOW } else { - out.put(replacement) - in.position(in.position() + result2.length) + out.put(replacement()) + in.position(in.position() + result2.length()) loop() } case CodingErrorAction.REPORT => result2 case CodingErrorAction.IGNORE => - in.position(in.position() + result2.length) + in.position(in.position() + result2.length()) loop() } } @@ -136,7 +136,7 @@ abstract class CharsetDecoder protected (cs: Charset, (status: @switch) match { case END => val result = implFlush(out) - if (result.isUnderflow) + if (result.isUnderflow()) status = FLUSHED result case FLUSHED => @@ -161,10 +161,10 @@ abstract class CharsetDecoder protected (cs: Charset, final def decode(in: ByteBuffer): CharBuffer = { def grow(out: CharBuffer): CharBuffer = { - if (out.capacity == 0) { + if (out.capacity() == 0) { CharBuffer.allocate(1) } else { - val result = CharBuffer.allocate(out.capacity * 2) + val result = CharBuffer.allocate(out.capacity() * 2) out.flip() result.put(out) result @@ -175,10 +175,10 @@ abstract class CharsetDecoder protected (cs: Charset, @tailrec def loopDecode(out: CharBuffer): CharBuffer = { val result = decode(in, out, endOfInput = true) - if (result.isUnderflow) { - assert(!in.hasRemaining) + if (result.isUnderflow()) { + assert(!in.hasRemaining()) out - } else if (result.isOverflow) { + } else if (result.isOverflow()) { loopDecode(grow(out)) } else { result.throwException() @@ -190,9 +190,9 @@ abstract class CharsetDecoder protected (cs: Charset, @tailrec def loopFlush(out: CharBuffer): CharBuffer = { val result = flush(out) - if (result.isUnderflow) { + if (result.isUnderflow()) { out - } else if (result.isOverflow) { + } else if (result.isOverflow()) { loopFlush(grow(out)) } else { result.throwException() @@ -201,7 +201,7 @@ abstract class CharsetDecoder protected (cs: Charset, } reset() - val initLength = (in.remaining.toDouble * averageCharsPerByte).toInt + val initLength = (in.remaining().toDouble * averageCharsPerByte()).toInt val out = loopFlush(loopDecode(CharBuffer.allocate(initLength))) out.flip() out diff --git a/javalib/src/main/scala/java/nio/charset/CharsetEncoder.scala b/javalib/src/main/scala/java/nio/charset/CharsetEncoder.scala index ba03c27fd8..1fd2afe262 100644 --- a/javalib/src/main/scala/java/nio/charset/CharsetEncoder.scala +++ b/javalib/src/main/scala/java/nio/charset/CharsetEncoder.scala @@ -36,7 +36,7 @@ abstract class CharsetEncoder protected ( final def replaceWith(newReplacement: Array[Byte]): CharsetEncoder = { if (newReplacement == null || newReplacement.length == 0 || - newReplacement.length > maxBytesPerChar || + newReplacement.length > maxBytesPerChar() || !isLegalReplacement(newReplacement)) throw new IllegalArgumentException @@ -48,7 +48,7 @@ abstract class CharsetEncoder protected ( protected def implReplaceWith(newReplacement: Array[Byte]): Unit = () def isLegalReplacement(repl: Array[Byte]): Boolean = { - val decoder = charset.newDecoder + val decoder = charset().newDecoder() val replBuf = ByteBuffer.wrap(repl) @inline @@ -56,10 +56,10 @@ abstract class CharsetEncoder protected ( def loop(outBufSize: Int): Boolean = { val result = decoder.decode(replBuf, CharBuffer.allocate(outBufSize), true) - if (result.isOverflow) { + if (result.isOverflow()) { loop(outBufSize * 2) } else { - !replBuf.hasRemaining + !replBuf.hasRemaining() } } @@ -118,8 +118,8 @@ abstract class CharsetEncoder protected ( throw new CoderMalfunctionError(ex) } - val result2 = if (result1.isUnderflow) { - val remaining = in.remaining + val result2 = if (result1.isUnderflow()) { + val remaining = in.remaining() if (endOfInput && remaining > 0) CoderResult.malformedForLength(remaining) else @@ -128,26 +128,26 @@ abstract class CharsetEncoder protected ( result1 } - if (result2.isUnderflow || result2.isOverflow) { + if (result2.isUnderflow() || result2.isOverflow()) { result2 } else { val action = - if (result2.isUnmappable) unmappableCharacterAction - else malformedInputAction + if (result2.isUnmappable()) unmappableCharacterAction() + else malformedInputAction() action match { case CodingErrorAction.REPLACE => - if (out.remaining < replacement.length) { + if (out.remaining() < replacement().length) { CoderResult.OVERFLOW } else { - out.put(replacement) - in.position(in.position() + result2.length) + out.put(replacement()) + in.position(in.position() + result2.length()) loop() } case CodingErrorAction.REPORT => result2 case CodingErrorAction.IGNORE => - in.position(in.position() + result2.length) + in.position(in.position() + result2.length()) loop() } } @@ -160,7 +160,7 @@ abstract class CharsetEncoder protected ( (status: @switch) match { case END => val result = implFlush(out) - if (result.isUnderflow) + if (result.isUnderflow()) status = FLUSHED result case FLUSHED => @@ -185,27 +185,27 @@ abstract class CharsetEncoder protected ( final def encode(in: CharBuffer): ByteBuffer = { def grow(out: ByteBuffer): ByteBuffer = { - if (out.capacity == 0) { + if (out.capacity() == 0) { ByteBuffer.allocate(1) } else { - val result = ByteBuffer.allocate(out.capacity * 2) + val result = ByteBuffer.allocate(out.capacity() * 2) out.flip() result.put(out) result } } - if (in.remaining == 0) { + if (in.remaining() == 0) { ByteBuffer.allocate(0) } else { @inline @tailrec def loopEncode(out: ByteBuffer): ByteBuffer = { val result = encode(in, out, endOfInput = true) - if (result.isUnderflow) { - assert(!in.hasRemaining) + if (result.isUnderflow()) { + assert(!in.hasRemaining()) out - } else if (result.isOverflow) { + } else if (result.isOverflow()) { loopEncode(grow(out)) } else { result.throwException() @@ -217,9 +217,9 @@ abstract class CharsetEncoder protected ( @tailrec def loopFlush(out: ByteBuffer): ByteBuffer = { val result = flush(out) - if (result.isUnderflow) { + if (result.isUnderflow()) { out - } else if (result.isOverflow) { + } else if (result.isOverflow()) { loopFlush(grow(out)) } else { result.throwException() @@ -228,7 +228,7 @@ abstract class CharsetEncoder protected ( } reset() - val initLength = (in.remaining * averageBytesPerChar).toInt + val initLength = (in.remaining() * averageBytesPerChar()).toInt val out = loopFlush(loopEncode(ByteBuffer.allocate(initLength))) out.flip() out diff --git a/javalib/src/main/scala/java/nio/charset/CoderResult.scala b/javalib/src/main/scala/java/nio/charset/CoderResult.scala index 5185476362..c836dbaa9e 100644 --- a/javalib/src/main/scala/java/nio/charset/CoderResult.scala +++ b/javalib/src/main/scala/java/nio/charset/CoderResult.scala @@ -14,7 +14,7 @@ class CoderResult private (kind: Int, _length: Int) { @inline def isMalformed(): Boolean = kind == Malformed @inline def isUnmappable(): Boolean = kind == Unmappable - @inline def isError(): Boolean = isMalformed || isUnmappable + @inline def isError(): Boolean = isMalformed() || isUnmappable() @inline def length(): Int = { val l = _length diff --git a/javalib/src/main/scala/java/nio/file/DirectoryStreamImpl.scala b/javalib/src/main/scala/java/nio/file/DirectoryStreamImpl.scala index f46988cc33..17fb4f5eaf 100644 --- a/javalib/src/main/scala/java/nio/file/DirectoryStreamImpl.scala +++ b/javalib/src/main/scala/java/nio/file/DirectoryStreamImpl.scala @@ -23,10 +23,10 @@ class DirectoryStreamImpl[T](stream: Stream[T], else { iteratorCalled = true new Iterator[T] { - override def hasNext(): Boolean = !closed && underlying.hasNext + override def hasNext(): Boolean = !closed && underlying.hasNext() override def next(): T = if (!hasNext()) throw new NoSuchElementException() - else underlying.next + else underlying.next() override def remove(): Unit = throw new UnsupportedOperationException() } } diff --git a/javalib/src/main/scala/java/nio/file/FileSystems.scala b/javalib/src/main/scala/java/nio/file/FileSystems.scala index 6d10c97daa..c57a9fef1f 100644 --- a/javalib/src/main/scala/java/nio/file/FileSystems.scala +++ b/javalib/src/main/scala/java/nio/file/FileSystems.scala @@ -30,7 +30,7 @@ object FileSystems { val map = new HashMap[String, Object]() var i = 0 var fs: Option[FileSystem] = None - while (i < providers.size && fs.isEmpty) { + while (i < providers.size() && fs.isEmpty) { try { fs = Some(providers.get(i).newFileSystem(path, map)) } catch { @@ -55,8 +55,8 @@ object FileSystems { val providers = FileSystemProvider.installedProviders var provider: Option[FileSystemProvider] = None var i = 0 - while (i < providers.size && provider.isEmpty) { - if (providers.get(i).getScheme.equalsIgnoreCase(uri.getScheme)) { + while (i < providers.size() && provider.isEmpty) { + if (providers.get(i).getScheme().equalsIgnoreCase(uri.getScheme())) { provider = Some(providers.get(i)) } i += 1 diff --git a/javalib/src/main/scala/java/nio/file/FileVisitOption.scala b/javalib/src/main/scala/java/nio/file/FileVisitOption.scala index 2c33a94e82..eb9334bab6 100644 --- a/javalib/src/main/scala/java/nio/file/FileVisitOption.scala +++ b/javalib/src/main/scala/java/nio/file/FileVisitOption.scala @@ -9,7 +9,7 @@ object FileVisitOption { val _values = Array(FOLLOW_LINKS) def values(): Array[FileVisitOption] = _values.clone() def valueOf(name: String): FileVisitOption = { - _values.find(_.name == name).getOrElse { + _values.find(_.name() == name).getOrElse { throw new IllegalArgumentException( "No enum const FileVisitOption." + name) } diff --git a/javalib/src/main/scala/java/nio/file/FileVisitResult.scala b/javalib/src/main/scala/java/nio/file/FileVisitResult.scala index bea732ea18..e2f366af1e 100644 --- a/javalib/src/main/scala/java/nio/file/FileVisitResult.scala +++ b/javalib/src/main/scala/java/nio/file/FileVisitResult.scala @@ -12,7 +12,7 @@ object FileVisitResult { private val _values = Array(CONTINUE, TERMINATE, SKIP_SUBTREE, SKIP_SIBLINGS) def values(): Array[FileVisitResult] = _values.clone() def valueOf(name: String): FileVisitResult = { - _values.find(_.name == name).getOrElse { + _values.find(_.name() == name).getOrElse { throw new IllegalArgumentException( "No enum const FileVisitResult." + name) } diff --git a/javalib/src/main/scala/java/nio/file/Files.scala b/javalib/src/main/scala/java/nio/file/Files.scala index e3b4eb22a9..d354b6fe4a 100644 --- a/javalib/src/main/scala/java/nio/file/Files.scala +++ b/javalib/src/main/scala/java/nio/file/Files.scala @@ -1,47 +1,32 @@ package java.nio.file -import java.lang.Iterable -import java.lang.OutOfMemoryError - import java.io.{ BufferedReader, BufferedWriter, File, FileOutputStream, + IOException, InputStream, InputStreamReader, - IOException, OutputStream, OutputStreamWriter } - import java.nio.file.attribute._ import java.nio.charset.{Charset, StandardCharsets} import java.nio.channels.{FileChannel, SeekableByteChannel} - -import java.util.concurrent.TimeUnit import java.util.function.BiPredicate -import java.util.{ - EnumSet, - HashMap, - HashSet, - Iterator, - LinkedList, - List, - Map, - Set -} +import java.util.{EnumSet, HashMap, HashSet, LinkedList, List, Map, Set} import java.util.stream.{Stream, WrappedScalaStream} - import scalanative.unsigned._ import scalanative.unsafe._ import scalanative.libc._ -import scalanative.posix.{dirent, fcntl, limits, unistd}, dirent._ +import scalanative.posix.{dirent, fcntl, limits, unistd} +import dirent._ +import java.nio.file.StandardCopyOption._ import scalanative.posix.sys.stat import scalanative.nio.fs.{FileHelpers, UnixException} - -import scala.collection.immutable.{Map => SMap, Stream => SStream, Set => SSet} -import StandardCopyOption._ +import scala.collection.compat.immutable.{LazyList => SStream} +import scala.collection.immutable.{Map => SMap, Set => SSet} object Files { @@ -55,16 +40,18 @@ object Files { true else throw new UnsupportedOperationException() - val targetFile = target.toFile + val targetFile = target.toFile() val out = - if (!targetFile.exists || (targetFile.isFile && replaceExisting)) { + if (!targetFile.exists() || (targetFile.isFile() && replaceExisting)) { new FileOutputStream(targetFile, append = false) - } else if (targetFile.isDirectory && targetFile.list.isEmpty && replaceExisting) { + } else if (targetFile.isDirectory() && targetFile + .list() + .isEmpty && replaceExisting) { if (!targetFile.delete()) throw new IOException() new FileOutputStream(targetFile, append = false) } else { - throw new FileAlreadyExistsException(targetFile.getAbsolutePath) + throw new FileAlreadyExistsException(targetFile.getAbsolutePath()) } try copy(in, out) @@ -80,7 +67,7 @@ object Files { val linkOpts = Array(LinkOption.NOFOLLOW_LINKS) val attrs = Files.readAttributes(source, classOf[PosixFileAttributes], linkOpts) - if (attrs.isSymbolicLink) + if (attrs.isSymbolicLink()) throw new IOException( s"Unsupported operation: copy symbolic link $source to $target") val targetExists = exists(target, linkOpts) @@ -96,12 +83,12 @@ object Files { if (options.contains(COPY_ATTRIBUTES)) { val newAttrView = getFileAttributeView(target, classOf[PosixFileAttributeView], linkOpts) - newAttrView.setTimes(attrs.lastModifiedTime, - attrs.lastAccessTime, - attrs.creationTime) - newAttrView.setGroup(attrs.group) - newAttrView.setOwner(attrs.owner) - newAttrView.setPermissions(attrs.permissions) + newAttrView.setTimes(attrs.lastModifiedTime(), + attrs.lastAccessTime(), + attrs.creationTime()) + newAttrView.setGroup(attrs.group()) + newAttrView.setOwner(attrs.owner()) + newAttrView.setPermissions(attrs.permissions()) } target } @@ -110,7 +97,7 @@ object Files { var written: Long = 0L var value: Int = 0 - while ({ value = in.read; value != -1 }) { + while ({ value = in.read(); value != -1 }) { out.write(value) written += 1 } @@ -133,7 +120,7 @@ object Files { if (exists(dir, Array.empty)) { if (!isDirectory(dir, Array.empty)) { throw new FileAlreadyExistsException(dir.toString) - } else if (list(dir).iterator.hasNext) { + } else if (list(dir).iterator.hasNext()) { throw new DirectoryNotEmptyException(dir.toString) } dir @@ -198,7 +185,7 @@ object Files { def createTempDirectory(dir: Path, prefix: String, attrs: Array[FileAttribute[_]]): Path = - createTempDirectory(dir.toFile, prefix, attrs) + createTempDirectory(dir.toFile(), prefix, attrs) def createTempDirectory(prefix: String, attrs: Array[FileAttribute[_]]): Path = @@ -241,9 +228,9 @@ object Files { def exists(path: Path, options: Array[LinkOption]): Boolean = if (options.contains(LinkOption.NOFOLLOW_LINKS)) { - path.toFile.exists() || isSymbolicLink(path) + path.toFile().exists() || isSymbolicLink(path) } else { - path.toFile.exists() + path.toFile().exists() } def find(start: Path, @@ -258,7 +245,7 @@ object Files { matcher.test(p, attributes) } - new WrappedScalaStream(stream.toStream, None) + new WrappedScalaStream(stream.to(SStream), None) } def getAttribute(path: Path, @@ -288,7 +275,7 @@ object Files { val attributes = getFileAttributeView(path, classOf[BasicFileAttributeView], options) .readAttributes() - attributes.lastModifiedTime + attributes.lastModifiedTime() } def getOwner(path: Path, options: Array[LinkOption]): UserPrincipal = { @@ -324,9 +311,9 @@ object Files { val buf = alloc[stat.stat] val err = if (options.contains(LinkOption.NOFOLLOW_LINKS)) { - stat.lstat(toCString(path.toFile.getPath()), buf) + stat.lstat(toCString(path.toFile().getPath()), buf) } else { - stat.stat(toCString(path.toFile.getPath()), buf) + stat.stat(toCString(path.toFile().getPath()), buf) } if (err == 0) stat.S_ISREG(buf._13) == 1 else false @@ -338,7 +325,7 @@ object Files { def isSymbolicLink(path: Path): Boolean = Zone { implicit z => val buf = alloc[stat.stat] - if (stat.lstat(toCString(path.toFile.getPath()), buf) == 0) { + if (stat.lstat(toCString(path.toFile().getPath()), buf) == 0) { stat.S_ISLNK(buf._13) == 1 } else { false @@ -356,14 +343,14 @@ object Files { def list(dir: Path): Stream[Path] = new WrappedScalaStream( - FileHelpers.list(dir.toString, (n, _) => dir.resolve(n)).toStream, + FileHelpers.list(dir.toString, (n, _) => dir.resolve(n)).to(SStream), None) def move(source: Path, target: Path, options: Array[CopyOption]): Path = { - if (!exists(source.toAbsolutePath, Array.empty)) { + if (!exists(source.toAbsolutePath(), Array.empty)) { throw new NoSuchFileException(source.toString) - } else if (!exists(target.toAbsolutePath, Array.empty) || options.contains( - REPLACE_EXISTING)) { + } else if (!exists(target.toAbsolutePath(), Array.empty) || options + .contains(REPLACE_EXISTING)) { Zone { implicit z => if (stdio.rename(toCString(source.toAbsolutePath().toString), toCString(target.toAbsolutePath().toString)) != 0) { @@ -466,7 +453,7 @@ object Files { def readAllLines(path: Path, cs: Charset): List[String] = { val list = new LinkedList[String]() val reader = newBufferedReader(path, cs) - val lines = reader.lines.iterator + val lines = reader.lines().iterator while (lines.hasNext()) { list.add(lines.next()) } @@ -578,15 +565,16 @@ object Files { currentDepth: Int, options: Array[FileVisitOption], visited: SSet[Path]): SStream[Path] = { + start #:: { if (!isDirectory(start, linkOptsFromFileVisitOpts(options))) SStream.empty else { FileHelpers .list(start.toString, (n, t) => (n, t)) - .toStream + .to(SStream) .flatMap { - case (name, tpe) - if tpe == DT_LNK && options.contains( + case (name: String, tpe) + if tpe == DT_LNK() && options.contains( FileVisitOption.FOLLOW_LINKS) => val path = start.resolve(name) val newVisited = visited + path @@ -594,15 +582,16 @@ object Files { if (newVisited.contains(target)) throw new FileSystemLoopException(path.toString) else walk(path, maxDepth, currentDepth + 1, options, newVisited) - case (name, tpe) if tpe == DT_DIR && currentDepth < maxDepth => + case (name: String, tpe) + if tpe == DT_DIR() && currentDepth < maxDepth => val path = start.resolve(name) val newVisited = if (options.contains(FileVisitOption.FOLLOW_LINKS)) visited + path else visited walk(path, maxDepth, currentDepth + 1, options, newVisited) - case (name, _) => - start.resolve(name) #:: SStream.Empty + case (name: String, _) => + start.resolve(name) #:: SStream.empty } } } @@ -643,7 +632,7 @@ object Files { val dirsToSkip = scala.collection.mutable.Set.empty[Path] val openDirs = scala.collection.mutable.Stack.empty[Path] stream.foreach { p => - val parent = p.getParent + val parent = p.getParent() if (dirsToSkip.contains(parent)) () else { @@ -658,13 +647,13 @@ object Files { } val result = - if (attributes.isRegularFile) { + if (attributes.isRegularFile()) { visitor.visitFile(p, attributes) - } else if (attributes.isDirectory) { + } else if (attributes.isDirectory()) { openDirs.push(p) visitor.preVisitDirectory(p, attributes) match { case FileVisitResult.SKIP_SUBTREE => - openDirs.pop; FileVisitResult.SKIP_SUBTREE + openDirs.pop(); FileVisitResult.SKIP_SUBTREE case other => other } } else { @@ -720,7 +709,7 @@ object Files { else _options val writer = newBufferedWriter(path, cs, options) val it = lines.iterator - while (it.hasNext()) { + while (it.hasNext) { writer.append(it.next()) writer.newLine() } @@ -734,7 +723,7 @@ object Files { write(path, lines, StandardCharsets.UTF_8, options) private def setAttributes(path: Path, attrs: Array[FileAttribute[_]]): Unit = - attrs.map(a => (a.name, a.value)).toMap.foreach { + attrs.map(a => (a.name(), a.value)).toMap.foreach { case (name, value) => setAttribute(path, name, value.asInstanceOf[AnyRef], Array.empty) } diff --git a/javalib/src/main/scala/java/nio/file/attribute/FileTime.scala b/javalib/src/main/scala/java/nio/file/attribute/FileTime.scala index 1a91277872..facb530ff4 100644 --- a/javalib/src/main/scala/java/nio/file/attribute/FileTime.scala +++ b/javalib/src/main/scala/java/nio/file/attribute/FileTime.scala @@ -5,11 +5,11 @@ import java.time.Instant final class FileTime private (instant: Instant) extends Comparable[FileTime] { def compareTo(other: FileTime) = - instant.compareTo(other.toInstant) + instant.compareTo(other.toInstant()) override def equals(obj: Any): Boolean = obj match { - case other: FileTime => instant == other.toInstant + case other: FileTime => instant == other.toInstant() case _ => false } @@ -20,7 +20,7 @@ final class FileTime private (instant: Instant) extends Comparable[FileTime] { unit.convert(toMillis(), TimeUnit.MILLISECONDS) def toMillis(): Long = - instant.toEpochMilli + instant.toEpochMilli() def toInstant(): Instant = instant diff --git a/javalib/src/main/scala/java/nio/file/attribute/PosixFilePermissions.scala b/javalib/src/main/scala/java/nio/file/attribute/PosixFilePermissions.scala index d37bab1f5b..0a9eaa9778 100644 --- a/javalib/src/main/scala/java/nio/file/attribute/PosixFilePermissions.scala +++ b/javalib/src/main/scala/java/nio/file/attribute/PosixFilePermissions.scala @@ -6,8 +6,8 @@ object PosixFilePermissions { def asFileAttribute(perms: Set[PosixFilePermission]) : FileAttribute[Set[PosixFilePermission]] = new FileAttribute[Set[PosixFilePermission]] { - override def name(): String = "posix:permissions" - override def value(): Set[PosixFilePermission] = perms + override def name(): String = "posix:permissions" + override def value: Set[PosixFilePermission] = perms } def fromString(perms: String): Set[PosixFilePermission] = diff --git a/javalib/src/main/scala/java/security/MessageDigestSpi.scala b/javalib/src/main/scala/java/security/MessageDigestSpi.scala index 66e877eeef..7b3566eb8e 100644 --- a/javalib/src/main/scala/java/security/MessageDigestSpi.scala +++ b/javalib/src/main/scala/java/security/MessageDigestSpi.scala @@ -13,7 +13,7 @@ abstract class MessageDigestSpi { protected def engineUpdate(input: Array[Byte], offset: Int, len: Int): Unit protected def engineUpdate(input: ByteBuffer): Unit = { - if (input.hasRemaining) { + if (input.hasRemaining()) { if (input.hasArray()) { val tmp = input.array() val offset = input.arrayOffset() diff --git a/javalib/src/main/scala/java/security/Timestamp.scala b/javalib/src/main/scala/java/security/Timestamp.scala index d5e97130bb..8ef77b6d3b 100644 --- a/javalib/src/main/scala/java/security/Timestamp.scala +++ b/javalib/src/main/scala/java/security/Timestamp.scala @@ -23,7 +23,7 @@ final class Timestamp private (private val signerCertPath: CertPath, @inline def getSignerCertPath: CertPath = signerCertPath - @inline def getTimestamp: Date = new Date(timestamp.getTime) + @inline def getTimestamp: Date = new Date(timestamp.getTime()) override def equals(obj: Any): Boolean = obj match { @@ -45,8 +45,8 @@ final class Timestamp private (private val signerCertPath: CertPath, buf.append("Timestamp [") buf.append(timestamp.toString) buf.append(" certPath=") - val certificates = signerCertPath.getCertificates - if (certificates.isEmpty) { + val certificates = signerCertPath.getCertificates() + if (certificates.isEmpty()) { buf.append(certificates.get(0).toString) } else { buf.append("") @@ -63,6 +63,6 @@ private object TimestampConstructorHelper { if (timestamp eq null) { throw new NullPointerException("Timestamp cannot be null") } else { - new Date(timestamp.getTime) + new Date(timestamp.getTime()) } } diff --git a/javalib/src/main/scala/java/security/cert/CertPath.scala b/javalib/src/main/scala/java/security/cert/CertPath.scala index 5e584fc953..5962e6d6ac 100644 --- a/javalib/src/main/scala/java/security/cert/CertPath.scala +++ b/javalib/src/main/scala/java/security/cert/CertPath.scala @@ -9,8 +9,8 @@ abstract class CertPath protected (`type`: String) { override def equals(other: Any): Boolean = other match { case otherRef: AnyRef if this eq otherRef => true - case otherCp: CertPath if otherCp.getType.equals(`type`) => - getCertificates().equals(otherCp.getCertificates) + case otherCp: CertPath if otherCp.getType().equals(`type`) => + getCertificates().equals(otherCp.getCertificates()) case _ => false } } diff --git a/javalib/src/main/scala/java/security/cert/Certificate.scala b/javalib/src/main/scala/java/security/cert/Certificate.scala index 1d71fc7973..d28edb1678 100644 --- a/javalib/src/main/scala/java/security/cert/Certificate.scala +++ b/javalib/src/main/scala/java/security/cert/Certificate.scala @@ -13,7 +13,7 @@ abstract class Certificate(private val `type`: String) extends Serializable { case otherCertificate: Certificate => try { java.util.Arrays - .equals(this.getEncoded(), otherCertificate.getEncoded) + .equals(this.getEncoded(), otherCertificate.getEncoded()) } catch { case e: CertificateEncodingException => throw new RuntimeException(e) diff --git a/javalib/src/main/scala/java/text/DecimalFormat.scala b/javalib/src/main/scala/java/text/DecimalFormat.scala index 2c543ea774..b31cc354d0 100644 --- a/javalib/src/main/scala/java/text/DecimalFormat.scala +++ b/javalib/src/main/scala/java/text/DecimalFormat.scala @@ -217,7 +217,7 @@ class DecimalFormat extends NumberFormat { val (fracDigits, expDigits) = suffix.tail.span(_ != 'E') - if (expDigits.isEmpty) { + if (expDigits.isEmpty()) { DoubleDigits(toUnicode(wdPrefix.toSeq), toUnicode(fracDigits.toSeq)) } else { val exponentB10 = java.lang.Integer.parseInt(expDigits.tail) @@ -261,7 +261,7 @@ class DecimalFormat extends NumberFormat { 0.0 } val toBeRounded = (whole.mkString.toDouble + fracd) * signum - Math.round(toBeRounded) + Math.round(toBeRounded).toDouble } } @@ -292,8 +292,8 @@ class DecimalFormat extends NumberFormat { implicit private object BigIntegerFormatting extends Formatting[BigInteger] { def toDigits(number: BigInteger): Digits = { - val numabs = number.abs - Digits(number.signum < 0, numabs.toString.toSeq, Seq.empty) + val numabs = number.abs() + Digits(number.signum() < 0, numabs.toString.toSeq, Seq.empty) } def roundToInteger(digits: Digits): Digits = { @@ -302,22 +302,22 @@ class DecimalFormat extends NumberFormat { val sign = if (negative) "-" else "" val toBeRounded = new BigDecimal(sign + whole.mkString + "." + frac.mkString) - toBeRounded.setScale(0, getRoundingMode()).toBigInteger + toBeRounded.setScale(0, getRoundingMode()).toBigInteger() } } } implicit private object BigDecimalFormatting extends Formatting[BigDecimal] { def toDigits(number: BigDecimal): Digits = { - val numabs = number.abs - val s = numabs.toPlainString + val numabs = number.abs() + val s = numabs.toPlainString() val (whole, frac) = s.indexOf('.') match { case -1 => (s, "") case dp => val (whole, dotFrac) = s.splitAt(dp) (whole, dotFrac.tail) } - Digits(number.signum < 0, whole.toSeq, frac.toSeq) + Digits(number.signum() < 0, whole.toSeq, frac.toSeq) } def roundToInteger(digits: Digits): Digits = { @@ -340,8 +340,8 @@ class DecimalFormat extends NumberFormat { case bd: BigDecimal => formatImpl(bd, toAppendTo, pos) case num: Number => - val l = num.longValue - val d = num.doubleValue + val l = num.longValue() + val d = num.doubleValue() // type ascriptions are put to make sure the correct overload is called if (num == l) format(l: Long, toAppendTo, pos) diff --git a/javalib/src/main/scala/java/text/NumberFormat.scala b/javalib/src/main/scala/java/text/NumberFormat.scala index aa2523d41c..53f11108e8 100644 --- a/javalib/src/main/scala/java/text/NumberFormat.scala +++ b/javalib/src/main/scala/java/text/NumberFormat.scala @@ -10,8 +10,8 @@ abstract class NumberFormat extends Format { pos: FieldPosition): StringBuffer = obj match { case num: Number => - val l = num.longValue - val d = num.doubleValue + val l = num.longValue() + val d = num.doubleValue() if (l == d) format(l, toAppendTo, pos) else diff --git a/javalib/src/main/scala/java/time/Duration.scala b/javalib/src/main/scala/java/time/Duration.scala index 54185f201e..143a9cb526 100644 --- a/javalib/src/main/scala/java/time/Duration.scala +++ b/javalib/src/main/scala/java/time/Duration.scala @@ -50,14 +50,14 @@ final class Duration private (seconds: Long, nanos: Int) } def compareTo(that: Duration): Int = { - val secCmp = seconds.compareTo(that.getSeconds) - if (secCmp == 0) nanos.compareTo(that.getNano) + val secCmp = seconds.compareTo(that.getSeconds()) + if (secCmp == 0) nanos.compareTo(that.getNano()) else secCmp } override def equals(that: Any): Boolean = that match { case that: Duration => - seconds == that.getSeconds && nanos == that.getNano + seconds == that.getSeconds() && nanos == that.getNano() case _ => false } diff --git a/javalib/src/main/scala/java/util/AbstractCollection.scala b/javalib/src/main/scala/java/util/AbstractCollection.scala index add8541cc9..abbb852cde 100644 --- a/javalib/src/main/scala/java/util/AbstractCollection.scala +++ b/javalib/src/main/scala/java/util/AbstractCollection.scala @@ -8,26 +8,26 @@ abstract class AbstractCollection[E] protected () extends Collection[E] { def iterator(): Iterator[E] def size(): Int - def isEmpty(): Boolean = size == 0 + def isEmpty(): Boolean = size() == 0 def contains(o: Any): Boolean = iterator().scalaOps.exists(o === _) def toArray(): Array[AnyRef] = - toArray(new Array[AnyRef](size)) + toArray(new Array[AnyRef](size())) def toArray[T <: AnyRef](a: Array[T]): Array[T] = { val toFill: Array[T] = - if (a.size >= size) a + if (a.length >= size()) a else jlr.Array - .newInstance(a.getClass.getComponentType, size) + .newInstance(a.getClass.getComponentType, size()) .asInstanceOf[Array[T]] - val iter = iterator - for (i <- 0 until size) toFill(i) = iter.next().asInstanceOf[T] - if (toFill.size > size) - toFill(size) = null.asInstanceOf[T] + val iter = iterator() + for (i <- 0 until size()) toFill(i) = iter.next().asInstanceOf[T] + if (toFill.length > size()) + toFill(size()) = null.asInstanceOf[T] toFill } @@ -37,7 +37,7 @@ abstract class AbstractCollection[E] protected () extends Collection[E] { def remove(o: Any): Boolean = { @tailrec def findAndRemove(iter: Iterator[E]): Boolean = { - if (iter.hasNext) { + if (iter.hasNext()) { if (iter.next() === o) { iter.remove() true @@ -67,7 +67,7 @@ abstract class AbstractCollection[E] protected () extends Collection[E] { private def removeWhere(p: Any => Boolean): Boolean = { val iter = iterator() var changed = false - while (iter.hasNext) { + while (iter.hasNext()) { if (p(iter.next())) { iter.remove() changed = true @@ -77,5 +77,5 @@ abstract class AbstractCollection[E] protected () extends Collection[E] { } override def toString(): String = - iterator.scalaOps.mkString("[", ", ", "]") + iterator().scalaOps.mkString("[", ", ", "]") } diff --git a/javalib/src/main/scala/java/util/AbstractList.scala b/javalib/src/main/scala/java/util/AbstractList.scala index dcd96af4ec..b9a4471b19 100644 --- a/javalib/src/main/scala/java/util/AbstractList.scala +++ b/javalib/src/main/scala/java/util/AbstractList.scala @@ -7,7 +7,7 @@ abstract class AbstractList[E] protected () with List[E] { self => override def add(element: E): Boolean = { - add(size, element) + add(size(), element) true } @@ -38,15 +38,15 @@ abstract class AbstractList[E] protected () def lastIndexOf(o: Any): Int = { @tailrec def findIndex(iter: ListIterator[E]): Int = { - if (!iter.hasPrevious) -1 - else if (iter.previous() === o) iter.nextIndex + if (!iter.hasPrevious()) -1 + else if (iter.previous() === o) iter.nextIndex() else findIndex(iter) } - findIndex(listIterator(size)) + findIndex(listIterator(size())) } override def clear(): Unit = - removeRange(0, size) + removeRange(0, size()) def addAll(index: Int, c: Collection[_ <: E]): Boolean = { checkIndexOnBounds(index) @@ -56,7 +56,7 @@ abstract class AbstractList[E] protected () add(i, iter.next()) i += 1 } - !c.isEmpty + !c.isEmpty() } def iterator(): Iterator[E] = @@ -70,13 +70,13 @@ abstract class AbstractList[E] protected () // By default we use RandomAccessListIterator because we only have access to // the get(index) operation in the API. Subclasses override this if needs // using their knowledge of the structure instead. - new RandomAccessListIterator(self, index, 0, size) + new RandomAccessListIterator(self, index, 0, size()) } def subList(fromIndex: Int, toIndex: Int): List[E] = { if (fromIndex < 0) throw new IndexOutOfBoundsException(fromIndex.toString) - else if (toIndex > size) + else if (toIndex > size()) throw new IndexOutOfBoundsException(toIndex.toString) else if (fromIndex > toIndex) throw new IllegalArgumentException @@ -119,8 +119,8 @@ abstract class AbstractList[E] protected () } else { o match { case o: List[_] => - val oIter = o.listIterator - val iter = listIterator + val oIter = o.listIterator() + val iter = listIterator() var result = true while (result && iter.hasNext() && oIter.hasNext()) { @@ -146,19 +146,19 @@ abstract class AbstractList[E] protected () protected def removeRange(fromIndex: Int, toIndex: Int): Unit = { var i = 0 val iter = listIterator(fromIndex) - while (iter.hasNext && i <= toIndex) { + while (iter.hasNext() && i <= toIndex) { iter.remove() i += 1 } } protected[this] def checkIndexInBounds(index: Int): Unit = { - if (index < 0 || index >= size) + if (index < 0 || index >= size()) throw new IndexOutOfBoundsException(index.toString) } protected[this] def checkIndexOnBounds(index: Int): Unit = { - if (index < 0 || index > size) + if (index < 0 || index > size()) throw new IndexOutOfBoundsException(index.toString) } } @@ -177,13 +177,13 @@ private abstract class AbstractListView[E](protected val list: List[E], override def addAll(index: Int, c: Collection[_ <: E]): Boolean = { checkIndexOnBounds(index) list.addAll(fromIndex + index, c) - val elementsAdded = c.size + val elementsAdded = c.size() toIndex += elementsAdded elementsAdded != 0 } override def addAll(c: Collection[_ <: E]): Boolean = - addAll(size, c) + addAll(size(), c) def get(index: Int): E = { checkIndexInBounds(index) @@ -251,7 +251,7 @@ private class BackedUpListIterator[E](innerIterator: ListIterator[E], } private def i: Int = - innerIterator.nextIndex - fromIndex + innerIterator.nextIndex() - fromIndex } /* RandomAccessListIterator implementation assumes that the has an efficient diff --git a/javalib/src/main/scala/java/util/AbstractMap.scala b/javalib/src/main/scala/java/util/AbstractMap.scala index 48a32aa65d..139ca7b6e4 100644 --- a/javalib/src/main/scala/java/util/AbstractMap.scala +++ b/javalib/src/main/scala/java/util/AbstractMap.scala @@ -8,18 +8,19 @@ object AbstractMap { private def entryEquals[K, V](entry: Map.Entry[K, V], other: Any): Boolean = { other match { case other: Map.Entry[_, _] => - entry.getKey === other.getKey && entry.getValue === other.getValue + entry.getKey() === other.getKey() && entry.getValue() === other + .getValue() case _ => false } } private def entryHashCode[K, V](entry: Map.Entry[K, V]): Int = { val keyHash = - if (entry.getKey == null) 0 - else entry.getKey.hashCode + if (entry.getKey() == null) 0 + else entry.getKey().hashCode val valueHash = - if (entry.getValue == null) 0 - else entry.getValue.hashCode + if (entry.getValue() == null) 0 + else entry.getValue().hashCode keyHash ^ valueHash } @@ -29,7 +30,7 @@ object AbstractMap { with Serializable { def this(entry: Map.Entry[_ <: K, _ <: V]) = - this(entry.getKey, entry.getValue) + this(entry.getKey(), entry.getValue()) def getKey(): K = key @@ -48,7 +49,7 @@ object AbstractMap { entryHashCode(this) override def toString(): String = - getKey + "=" + getValue + getKey().toString + "=" + getValue() } class SimpleImmutableEntry[K, V](key: K, value: V) @@ -56,7 +57,7 @@ object AbstractMap { with Serializable { def this(entry: Map.Entry[_ <: K, _ <: V]) = - this(entry.getKey, entry.getValue) + this(entry.getKey(), entry.getValue()) def getKey(): K = key @@ -72,16 +73,16 @@ object AbstractMap { entryHashCode(this) override def toString(): String = - getKey + "=" + getValue + getKey().toString + "=" + getValue() } } abstract class AbstractMap[K, V] protected () extends java.util.Map[K, V] { self => - def size(): Int = entrySet.size + def size(): Int = entrySet().size() - def isEmpty(): Boolean = size == 0 + def isEmpty(): Boolean = size() == 0 def containsValue(value: Any): Boolean = entrySet().iterator().scalaOps.exists(value === _.getValue()) @@ -105,17 +106,17 @@ abstract class AbstractMap[K, V] protected () extends java.util.Map[K, V] { override def remove(key: Any): V = { @tailrec def findAndRemove(iter: Iterator[Map.Entry[K, V]]): V = { - if (iter.hasNext) { + if (iter.hasNext()) { val item = iter.next() - if (key === item.getKey) { + if (key === item.getKey()) { iter.remove() - item.getValue + item.getValue() } else findAndRemove(iter) } else null.asInstanceOf[V] } - findAndRemove(entrySet.iterator) + findAndRemove(entrySet().iterator()) } def putAll(m: Map[_ <: K, _ <: V]): Unit = @@ -125,15 +126,15 @@ abstract class AbstractMap[K, V] protected () extends java.util.Map[K, V] { .foreach(e => put(e.getKey(), e.getValue())) def clear(): Unit = - entrySet.clear() + entrySet().clear() def keySet(): Set[K] = { new AbstractSet[K] { - override def size(): Int = self.size + override def size(): Int = self.size() def iterator(): Iterator[K] = { new Iterator[K] { - val iter = entrySet.iterator() + val iter = entrySet().iterator() def hasNext(): Boolean = iter.hasNext() @@ -147,11 +148,11 @@ abstract class AbstractMap[K, V] protected () extends java.util.Map[K, V] { def values(): Collection[V] = { new AbstractCollection[V] { - override def size(): Int = self.size + override def size(): Int = self.size() def iterator(): Iterator[V] = { new Iterator[V] { - val iter = entrySet.iterator() + val iter = entrySet().iterator() def hasNext(): Boolean = iter.hasNext() diff --git a/javalib/src/main/scala/java/util/AbstractQueue.scala b/javalib/src/main/scala/java/util/AbstractQueue.scala index f8d8494468..f626ebf73f 100644 --- a/javalib/src/main/scala/java/util/AbstractQueue.scala +++ b/javalib/src/main/scala/java/util/AbstractQueue.scala @@ -21,7 +21,7 @@ abstract class AbstractQueue[E] protected () } override def addAll(c: Collection[_ <: E]): Boolean = { - val iter = c.iterator + val iter = c.iterator() var changed = false while (iter.hasNext()) changed = add(iter.next()) || changed changed diff --git a/javalib/src/main/scala/java/util/AbstractSequentialList.scala b/javalib/src/main/scala/java/util/AbstractSequentialList.scala index 0140340473..26dc208500 100644 --- a/javalib/src/main/scala/java/util/AbstractSequentialList.scala +++ b/javalib/src/main/scala/java/util/AbstractSequentialList.scala @@ -4,13 +4,13 @@ abstract class AbstractSequentialList[E] protected () extends AbstractList[E] { def get(index: Int): E = { val iter = listIterator(index) - if (iter.hasNext) iter.next() + if (iter.hasNext()) iter.next() else throw new IndexOutOfBoundsException(index.toString) } override def set(index: Int, element: E): E = { val iter = listIterator(index) - if (!iter.hasNext) + if (!iter.hasNext()) throw new IndexOutOfBoundsException val ret = iter.next() iter.set(element) @@ -22,10 +22,10 @@ abstract class AbstractSequentialList[E] protected () extends AbstractList[E] { override def remove(index: Int): E = { val iter = listIterator(index) - if (!iter.hasNext) + if (!iter.hasNext()) throw new IndexOutOfBoundsException val ret = iter.next() - iter.remove + iter.remove() ret } diff --git a/javalib/src/main/scala/java/util/AbstractSet.scala b/javalib/src/main/scala/java/util/AbstractSet.scala index c209edae86..c5e1179e50 100644 --- a/javalib/src/main/scala/java/util/AbstractSet.scala +++ b/javalib/src/main/scala/java/util/AbstractSet.scala @@ -10,8 +10,9 @@ abstract class AbstractSet[E] protected () if (that.asInstanceOf[AnyRef] eq this) true else { that match { - case that: Collection[_] => that.size == this.size && containsAll(that) - case _ => false + case that: Collection[_] => + that.size() == this.size() && containsAll(that) + case _ => false } } } @@ -25,7 +26,7 @@ abstract class AbstractSet[E] protected () else { @tailrec def removeAll(iter: Iterator[E], modified: Boolean): Boolean = { - if (iter.hasNext) { + if (iter.hasNext()) { if (c.contains(iter.next())) { iter.remove() removeAll(iter, true) @@ -34,7 +35,7 @@ abstract class AbstractSet[E] protected () } else modified } - removeAll(this.iterator, false) + removeAll(this.iterator(), false) } } } diff --git a/javalib/src/main/scala/java/util/ArrayDeque.scala b/javalib/src/main/scala/java/util/ArrayDeque.scala index 8e0a53b9c7..5e16e5ea6a 100644 --- a/javalib/src/main/scala/java/util/ArrayDeque.scala +++ b/javalib/src/main/scala/java/util/ArrayDeque.scala @@ -42,7 +42,7 @@ class ArrayDeque[E] private (private val inner: ArrayList[E]) } def this(c: Collection[_ <: E]) = { - this(c.size) + this(c.size()) addAll(c) } @@ -82,21 +82,21 @@ class ArrayDeque[E] private (private val inner: ArrayList[E]) } def removeFirst(): E = { - if (inner.isEmpty) + if (inner.isEmpty()) throw new NoSuchElementException() else pollFirst() } def removeLast(): E = { - if (inner.isEmpty) + if (inner.isEmpty()) throw new NoSuchElementException() else pollLast() } def pollFirst(): E = { - if (inner.isEmpty) null.asInstanceOf[E] + if (inner.isEmpty()) null.asInstanceOf[E] else { val res = inner.remove(0) status += 1 @@ -105,36 +105,36 @@ class ArrayDeque[E] private (private val inner: ArrayList[E]) } def pollLast(): E = { - if (inner.isEmpty) null.asInstanceOf[E] + if (inner.isEmpty()) null.asInstanceOf[E] else { - val res = inner.remove(inner.size - 1) + val res = inner.remove(inner.size() - 1) status += 1 res } } def getFirst(): E = { - if (inner.isEmpty) + if (inner.isEmpty()) throw new NoSuchElementException() else peekFirst() } def getLast(): E = { - if (inner.isEmpty) + if (inner.isEmpty()) throw new NoSuchElementException() else peekLast() } def peekFirst(): E = { - if (inner.isEmpty) null.asInstanceOf[E] + if (inner.isEmpty()) null.asInstanceOf[E] else inner.get(0) } def peekLast(): E = { - if (inner.isEmpty) null.asInstanceOf[E] - else inner.get(inner.size - 1) + if (inner.isEmpty()) null.asInstanceOf[E] + else inner.get(inner.size() - 1) } def removeFirstOccurrence(o: Any): Boolean = { @@ -171,7 +171,7 @@ class ArrayDeque[E] private (private val inner: ArrayList[E]) def pop(): E = removeFirst() - def size(): Int = inner.size + def size(): Int = inner.size() private def failFastIterator(startIndex: Int, nex: (Int) => Int) = { new Iterator[E] { @@ -187,7 +187,7 @@ class ArrayDeque[E] private (private val inner: ArrayList[E]) def hasNext(): Boolean = { checkStatus() val n = nex(index) - (n >= 0) && (n < inner.size) + (n >= 0) && (n < inner.size()) } def next(): E = { @@ -198,7 +198,7 @@ class ArrayDeque[E] private (private val inner: ArrayList[E]) override def remove(): Unit = { checkStatus() - if (index < 0 || index >= inner.size) { + if (index < 0 || index >= inner.size()) { throw new IllegalStateException() } else { inner.remove(index) @@ -211,14 +211,14 @@ class ArrayDeque[E] private (private val inner: ArrayList[E]) failFastIterator(-1, x => (x + 1)) def descendingIterator(): Iterator[E] = - failFastIterator(inner.size, x => (x - 1)) + failFastIterator(inner.size(), x => (x - 1)) override def contains(o: Any): Boolean = inner.contains(o) override def remove(o: Any): Boolean = removeFirstOccurrence(o) override def clear(): Unit = { - if (!inner.isEmpty) status += 1 + if (!inner.isEmpty()) status += 1 inner.clear() } diff --git a/javalib/src/main/scala/java/util/ArrayList.scala b/javalib/src/main/scala/java/util/ArrayList.scala index 5d1f370835..f4aa33c436 100644 --- a/javalib/src/main/scala/java/util/ArrayList.scala +++ b/javalib/src/main/scala/java/util/ArrayList.scala @@ -151,7 +151,7 @@ class ArrayList[E] private (private[this] var inner: Array[Any], // JVM documents fromIndex == toIndex as having 'no effect' if (fromIndex != toIndex) { - if ((fromIndex < 0) || (fromIndex >= _size) || (toIndex > size) + if ((fromIndex < 0) || (fromIndex >= _size) || (toIndex > size()) || (toIndex < fromIndex)) { // N.B.: JVM docs specify IndexOutOfBounds but use de facto. throw new ArrayIndexOutOfBoundsException() diff --git a/javalib/src/main/scala/java/util/Arrays.scala b/javalib/src/main/scala/java/util/Arrays.scala index 7f3bd51ab1..7be95f504c 100644 --- a/javalib/src/main/scala/java/util/Arrays.scala +++ b/javalib/src/main/scala/java/util/Arrays.scala @@ -787,7 +787,7 @@ object Arrays { start: Int, end: Int): Unit = { if (start > end) - throw new IllegalArgumentException(start + " > " + end) + throw new IllegalArgumentException(s"$start > end") if (start < 0 || start > len) throw new ArrayIndexOutOfBoundsException } diff --git a/javalib/src/main/scala/java/util/Base64.scala b/javalib/src/main/scala/java/util/Base64.scala index 77f78d2fb5..be14bfa1da 100644 --- a/javalib/src/main/scala/java/util/Base64.scala +++ b/javalib/src/main/scala/java/util/Base64.scala @@ -440,7 +440,7 @@ object Base64 { currentLine += 4 if (lineSeparator.length > 0 && lineLength > 0 && currentLine == lineLength && dst.hasRemaining) { - lineSeparator.foreach(dst.put(_)) + lineSeparator.foreach(dst.put) currentLine = 0 } } diff --git a/javalib/src/main/scala/java/util/Collections.scala b/javalib/src/main/scala/java/util/Collections.scala index 9c73ea75b3..2ddd53d33a 100644 --- a/javalib/src/main/scala/java/util/Collections.scala +++ b/javalib/src/main/scala/java/util/Collections.scala @@ -15,7 +15,7 @@ object Collections { new ImmutableSet(new AbstractSet[Any] with Serializable { override def size(): Int = 0 - override def iterator(): Iterator[Any] = emptyIterator[Any] + override def iterator(): Iterator[Any] = emptyIterator[Any]() }) } @@ -42,7 +42,7 @@ object Collections { private lazy val EMPTY_ENUMERATION: Enumeration[_] = { new Enumeration[Any] { - def hasMoreElements: Boolean = false + override def hasMoreElements(): Boolean = false def nextElement(): Any = throw new NoSuchElementException @@ -62,8 +62,8 @@ object Collections { Arrays.asList(arrayBuf).asInstanceOf[List[T] with RandomAccess] list match { - case list: RandomAccess => copyImpl(sortedList.iterator, list) - case _ => copyImpl(sortedList.iterator, list.listIterator) + case list: RandomAccess => copyImpl(sortedList.iterator(), list) + case _ => copyImpl(sortedList.iterator(), list.listIterator()) } } @@ -95,18 +95,18 @@ object Collections { list match { case _: RandomAccess => - binarySearch(0, list.size, list.get(_)) + binarySearch(0, list.size(), list.get(_)) case _ => def getFrom(iter: ListIterator[E])(index: Int): E = { - val shift = index - iter.nextIndex + val shift = index - iter.nextIndex() if (shift > 0) (0 until shift).foreach(_ => iter.next()) else (0 until -shift).foreach(_ => iter.previous()) iter.next() } - binarySearch(0, list.size, getFrom(list.listIterator)) + binarySearch(0, list.size(), getFrom(list.listIterator())) } } @@ -115,7 +115,7 @@ object Collections { @inline def reverseImpl[T](list: List[T]): Unit = { - val size = list.size + val size = list.size() list match { case list: RandomAccess => for (i <- 0 until size / 2) { @@ -184,7 +184,7 @@ object Collections { case _ => val it1 = list.listIterator(i) val it2 = list.listIterator(j) - if (!it1.hasNext || !it2.hasNext) + if (!it1.hasNext() || !it2.hasNext()) throw new IndexOutOfBoundsException val tmp = it1.next() it1.set(it2.next()) @@ -195,11 +195,11 @@ object Collections { def fill[T](list: List[_ >: T], obj: T): Unit = { list match { case list: RandomAccess => - (0 until list.size).foreach(list.set(_, obj)) + (0 until list.size()).foreach(list.set(_, obj)) case _ => - val iter = list.listIterator - while (iter.hasNext) { + val iter = list.listIterator() + while (iter.hasNext()) { iter.next() iter.set(obj) } @@ -209,22 +209,22 @@ object Collections { def copy[T](dest: List[_ >: T], src: List[_ <: T]): Unit = { (dest, src) match { case (dest: RandomAccess, src: RandomAccess) => copyImpl(src, dest) - case (dest: RandomAccess, _) => copyImpl(src.iterator, dest) - case (_, src: RandomAccess) => copyImpl(src, dest.listIterator) - case (_, _) => copyImpl(src.iterator, dest.listIterator) + case (dest: RandomAccess, _) => copyImpl(src.iterator(), dest) + case (_, src: RandomAccess) => copyImpl(src, dest.listIterator()) + case (_, _) => copyImpl(src.iterator(), dest.listIterator()) } } private def copyImpl[T](source: List[_ <: T] with RandomAccess, dest: List[T] with RandomAccess): Unit = { - (0 until source.size).foreach(i => dest.set(i, source.get(i))) + (0 until source.size()).foreach(i => dest.set(i, source.get(i))) } private def copyImpl[T](source: Iterator[_ <: T], dest: List[T] with RandomAccess): Unit = { val destEnd = dest.size() var i = 0 - while (source.hasNext) { + while (source.hasNext()) { if (i < destEnd) dest.set(i, source.next()) else @@ -235,8 +235,8 @@ object Collections { private def copyImpl[T](source: List[_ <: T] with RandomAccess, dest: ListIterator[T]): Unit = { - for (i <- 0 until source.size) { - if (dest.hasNext) { + for (i <- 0 until source.size()) { + if (dest.hasNext()) { dest.next() dest.set(source.get(i)) } else { @@ -247,8 +247,8 @@ object Collections { private def copyImpl[T](source: Iterator[_ <: T], dest: ListIterator[T]): Unit = { - while (source.hasNext) { - if (dest.hasNext) { + while (source.hasNext()) { + if (dest.hasNext()) { dest.next() dest.set(source.next()) } else { @@ -275,7 +275,7 @@ object Collections { rotateImpl(list, distance) private def rotateImpl[T](list: List[T], distance: Int): Unit = { - val listSize = list.size + val listSize = list.size() if (listSize > 1 && distance % listSize != 0) { def exchangeRotation(): Unit = { def indexModulo(i: Int): Int = modulo(i, listSize) @@ -324,7 +324,7 @@ object Collections { list match { case _: RandomAccess => var modified = false - for (i <- 0 until list.size) { + for (i <- 0 until list.size()) { if (list.get(i) === oldVal) { list.set(i, newVal) modified = true @@ -335,7 +335,7 @@ object Collections { case _ => @tailrec def replaceAll(iter: ListIterator[T], mod: Boolean): Boolean = { - if (iter.hasNext) { + if (iter.hasNext()) { val isEqual = iter.next() === oldVal if (isEqual) iter.set(newVal) @@ -358,15 +358,15 @@ object Collections { private def indexOfSubListImpl(source: List[_], target: List[_], fromStart: Boolean): Int = { - val targetSize = target.size + val targetSize = target.size() if (targetSize == 0) { if (fromStart) 0 - else source.size + else source.size() } else { - val indices = 0 to source.size - targetSize + val indices = 0 to source.size() - targetSize val indicesInOrder = if (fromStart) indices else indices.reverse indicesInOrder - .find { i => source.subList(i, i + target.size).equals(target) } + .find { i => source.subList(i, i + target.size()).equals(target) } .getOrElse(-1) } } @@ -550,10 +550,10 @@ object Collections { } def enumeration[T](c: Collection[T]): Enumeration[T] = { - val it = c.iterator + val it = c.iterator() new Enumeration[T] { - override def hasMoreElements: Boolean = - it.hasNext + override def hasMoreElements(): Boolean = + it.hasNext() override def nextElement(): T = it.next() @@ -570,7 +570,7 @@ object Collections { c.scalaOps.count(_ === o) def disjoint(c1: Collection[_], c2: Collection[_]): Boolean = { - if (c1.size < c2.size) + if (c1.size() < c2.size()) !c1.scalaOps.exists(elem => c2.contains(elem)) else !c2.scalaOps.exists(elem => c1.contains(elem)) @@ -589,11 +589,11 @@ object Collections { } def newSetFromMap[E](map: Map[E, java.lang.Boolean]): Set[E] = { - if (!map.isEmpty) + if (!map.isEmpty()) throw new IllegalArgumentException new WrappedSet[E, Set[E]] { - override protected val inner: Set[E] = map.keySet + override protected val inner: Set[E] = map.keySet() override def add(e: E): Boolean = map.put(e, true) == null @@ -639,18 +639,18 @@ object Collections { protected def inner: Coll def size(): Int = - inner.size + inner.size() - def isEmpty: Boolean = - inner.isEmpty + def isEmpty(): Boolean = + inner.isEmpty() def contains(o: Any): Boolean = inner.contains(o) def iterator(): Iterator[E] = - inner.iterator + inner.iterator() - def toArray: Array[AnyRef] = + def toArray(): Array[AnyRef] = inner.toArray() def toArray[T <: AnyRef](a: Array[T]): Array[T] = @@ -703,10 +703,10 @@ object Collections { inner.headSet(toElement) def first(): E = - inner.first + inner.first() def last(): E = - inner.last + inner.last() } private trait WrappedList[E] @@ -754,8 +754,8 @@ object Collections { def size(): Int = inner.size() - def isEmpty: Boolean = - inner.isEmpty + def isEmpty(): Boolean = + inner.isEmpty() def containsKey(key: scala.Any): Boolean = inner.containsKey(key) @@ -779,13 +779,13 @@ object Collections { inner.clear() def keySet(): Set[K] = - inner.keySet + inner.keySet() def values(): Collection[V] = - inner.values + inner.values() def entrySet(): Set[Map.Entry[K, V]] = - inner.entrySet.asInstanceOf[Set[Map.Entry[K, V]]] + inner.entrySet().asInstanceOf[Set[Map.Entry[K, V]]] override def toString(): String = inner.toString @@ -795,7 +795,7 @@ object Collections { extends WrappedMap[K, V, SortedMap[K, V]] with SortedMap[K, V] { def comparator(): Comparator[_ >: K] = - inner.comparator + inner.comparator() def subMap(fromKey: K, toKey: K): SortedMap[K, V] = inner.subMap(fromKey, toKey) @@ -807,17 +807,17 @@ object Collections { inner.tailMap(fromKey) def firstKey(): K = - inner.firstKey + inner.firstKey() def lastKey(): K = - inner.lastKey + inner.lastKey() } private trait WrappedIterator[E, Iter <: Iterator[E]] extends Iterator[E] { protected def inner: Iter def hasNext(): Boolean = - inner.hasNext + inner.hasNext() def next(): E = inner.next() @@ -830,16 +830,16 @@ object Collections { extends WrappedIterator[E, ListIterator[E]] with ListIterator[E] { def hasPrevious(): Boolean = - inner.hasPrevious + inner.hasPrevious() def previous(): E = inner.previous() def nextIndex(): Int = - inner.nextIndex + inner.nextIndex() def previousIndex(): Int = - inner.previousIndex + inner.previousIndex() def set(e: E): Unit = inner.set(e) @@ -855,12 +855,12 @@ object Collections { protected val eagerThrow: Boolean = true override def clear(): Unit = { - if (eagerThrow || !isEmpty) + if (eagerThrow || !isEmpty()) throw new UnsupportedOperationException } override def iterator(): Iterator[E] = - new UnmodifiableIterator(inner.iterator) + new UnmodifiableIterator(inner.iterator()) override def add(e: E): Boolean = throw new UnsupportedOperationException @@ -870,7 +870,7 @@ object Collections { else false override def addAll(c: Collection[_ <: E]): Boolean = - if (eagerThrow || !c.isEmpty) throw new UnsupportedOperationException + if (eagerThrow || !c.isEmpty()) throw new UnsupportedOperationException else false override def removeAll(c: Collection[_]): Boolean = { @@ -918,7 +918,7 @@ object Collections { with WrappedList[E] { override def addAll(index: Int, c: Collection[_ <: E]): Boolean = - if (eagerThrow || !c.isEmpty) throw new UnsupportedOperationException + if (eagerThrow || !c.isEmpty()) throw new UnsupportedOperationException else false override def set(index: Int, element: E): E = @@ -960,23 +960,23 @@ object Collections { } override def putAll(m: Map[_ <: K, _ <: V]): Unit = { - if (eagerThrow || !m.isEmpty) + if (eagerThrow || !m.isEmpty()) throw new UnsupportedOperationException } override def clear(): Unit = { - if (eagerThrow || !isEmpty) + if (eagerThrow || !isEmpty()) throw new UnsupportedOperationException } override def keySet(): Set[K] = - unmodifiableSet(super.keySet) + unmodifiableSet(super.keySet()) override def values(): Collection[V] = - unmodifiableCollection(super.values) + unmodifiableCollection(super.values()) override def entrySet(): Set[Map.Entry[K, V]] = - unmodifiableSet(super.entrySet) + unmodifiableSet(super.entrySet()) } private class ImmutableMap[K, V](inner: Map[K, V]) diff --git a/javalib/src/main/scala/java/util/Date.scala b/javalib/src/main/scala/java/util/Date.scala index 5492c42146..19cf0cbbe9 100644 --- a/javalib/src/main/scala/java/util/Date.scala +++ b/javalib/src/main/scala/java/util/Date.scala @@ -22,7 +22,7 @@ class Date(var milliseconds: Long) milliseconds.compareTo(anotherDate.getTime()) override def equals(obj: Any): Boolean = obj match { - case d: Date => d.getTime == milliseconds + case d: Date => d.getTime() == milliseconds case _ => false } diff --git a/javalib/src/main/scala/java/util/Formatter.scala b/javalib/src/main/scala/java/util/Formatter.scala index 6111d579c2..883b86cbaa 100644 --- a/javalib/src/main/scala/java/util/Formatter.scala +++ b/javalib/src/main/scala/java/util/Formatter.scala @@ -216,7 +216,7 @@ object Formatter { final val DECIMAL_FLOAT = new BigDecimalLayoutForm("DECIMAL_FLOAT", 1) def valueOf(name: String): BigDecimalLayoutForm = - _values.find(_.name == name).getOrElse { + _values.find(_.name() == name).getOrElse { throw new IllegalArgumentException( "No enum constant java.util.Formatter.BigDecimalLayoutForm." + name) } @@ -1050,7 +1050,7 @@ object Formatter { } def transform_g(): Unit = { - var precision = formatToken.getPrecision + var precision = formatToken.getPrecision() precision = if (0 == precision) 1 else precision formatToken.setPrecision(precision) diff --git a/javalib/src/main/scala/java/util/HashMap.scala b/javalib/src/main/scala/java/util/HashMap.scala index b776c8d412..927fb70414 100644 --- a/javalib/src/main/scala/java/util/HashMap.scala +++ b/javalib/src/main/scala/java/util/HashMap.scala @@ -138,12 +138,12 @@ class HashMap[K, V] protected (inner: mutable.Map[AnyRef, V]) protected def getNextForm(key: AnyRef): E - final override def next: E = { + final override def next(): E = { lastKey = Some(innerIterator.next()) getNextForm(lastKey.get) } - final override def hasNext: Boolean = + final override def hasNext(): Boolean = innerIterator.hasNext final override def remove(): Unit = { diff --git a/javalib/src/main/scala/java/util/HashSet.scala b/javalib/src/main/scala/java/util/HashSet.scala index 8e4e552248..15e86c424c 100644 --- a/javalib/src/main/scala/java/util/HashSet.scala +++ b/javalib/src/main/scala/java/util/HashSet.scala @@ -31,17 +31,17 @@ class HashSet[E] c.iterator().scalaOps.forall(contains) override def removeAll(c: Collection[_]): Boolean = { - val iter = c.iterator + val iter = c.iterator() var changed = false - while (iter.hasNext) changed = remove(iter.next()) || changed + while (iter.hasNext()) changed = remove(iter.next()) || changed changed } override def retainAll(c: Collection[_]): Boolean = { - val iter = iterator + val iter = iterator() var changed = false - while (iter.hasNext) { - val value = iter.next + while (iter.hasNext()) { + val value = iter.next() if (!c.contains(value)) changed = remove(value) || changed } @@ -54,7 +54,7 @@ class HashSet[E] override def addAll(c: Collection[_ <: E]): Boolean = { val iter = c.iterator() var changed = false - while (iter.hasNext) changed = add(iter.next()) || changed + while (iter.hasNext()) changed = add(iter.next()) || changed changed } diff --git a/javalib/src/main/scala/java/util/Hashtable.scala b/javalib/src/main/scala/java/util/Hashtable.scala index 5b87361881..46342981bb 100644 --- a/javalib/src/main/scala/java/util/Hashtable.scala +++ b/javalib/src/main/scala/java/util/Hashtable.scala @@ -26,7 +26,7 @@ class Hashtable[K, V] private (inner: mutable.HashMap[Box[Any], V]) def size(): Int = inner.size - def isEmpty: Boolean = + def isEmpty(): Boolean = inner.isEmpty def keys(): ju.Enumeration[K] = Collections.enumeration(keySet()) @@ -80,7 +80,7 @@ class Hashtable[K, V] private (inner: mutable.HashMap[Box[Any], V]) override def toString(): String = inner.iterator - .map(kv => kv._1.inner + "=" + kv._2) + .map(kv => kv._1.inner.toString + "=" + kv._2) .mkString("{", ", ", "}") def keySet(): ju.Set[K] = { @@ -93,8 +93,8 @@ class Hashtable[K, V] private (inner: mutable.HashMap[Box[Any], V]) class UnboxedEntry( private[UnboxedEntry] val boxedEntry: ju.Map.Entry[Box[Any], V]) extends ju.Map.Entry[K, V] { - def getKey(): K = boxedEntry.getKey.inner.asInstanceOf[K] - def getValue(): V = boxedEntry.getValue + def getKey(): K = boxedEntry.getKey().inner.asInstanceOf[K] + def getValue(): V = boxedEntry.getValue() def setValue(value: V): V = boxedEntry.setValue(value) override def equals(o: Any): Boolean = o match { case o: UnboxedEntry => boxedEntry.equals(o.boxedEntry) diff --git a/javalib/src/main/scala/java/util/LinkedHashMap.scala b/javalib/src/main/scala/java/util/LinkedHashMap.scala index f3816f6ef0..76b1cd42f5 100644 --- a/javalib/src/main/scala/java/util/LinkedHashMap.scala +++ b/javalib/src/main/scala/java/util/LinkedHashMap.scala @@ -56,7 +56,7 @@ class LinkedHashMap[K, V] private (inner: mutable.LinkedHashMap[AnyRef, V], } } val iter = entrySet().iterator() - if (iter.hasNext && removeEldestEntry(iter.next())) + if (iter.hasNext() && removeEldestEntry(iter.next())) iter.remove() oldValue } diff --git a/javalib/src/main/scala/java/util/LinkedList.scala b/javalib/src/main/scala/java/util/LinkedList.scala index 75fcbc964c..37c7e99a3e 100644 --- a/javalib/src/main/scala/java/util/LinkedList.scala +++ b/javalib/src/main/scala/java/util/LinkedList.scala @@ -110,10 +110,10 @@ class LinkedList[E]() } override def remove(o: Any): Boolean = - _removeOccurrence(listIterator, o) + _removeOccurrence(listIterator(), o) override def addAll(c: Collection[_ <: E]): Boolean = { - val iter = c.iterator + val iter = c.iterator() val changed = iter.hasNext() while (iter.hasNext()) addLast(iter.next()) @@ -128,15 +128,15 @@ class LinkedList[E]() private def getNodeAt(index: Int): Node[E] = { if (index == 0) head - else if (index == size - 1) last + else if (index == size() - 1) last else { var current: Node[E] = null - if (index <= size / 2) { + if (index <= size() / 2) { current = head for (_ <- 0 until index) current = current.next } else { current = last - for (_ <- index until (size - 1)) current = current.prev + for (_ <- index until (size() - 1)) current = current.prev } current } @@ -228,7 +228,7 @@ class LinkedList[E]() else removeFirst() def pollLast(): E = - if (isEmpty) null.asInstanceOf[E] + if (isEmpty()) null.asInstanceOf[E] else removeLast() def push(e: E): Unit = @@ -263,7 +263,7 @@ class LinkedList[E]() private var i: Long = index private var currentNode: Node[E] = - if (index == size) null + if (index == size()) null else getNodeAt(index) @@ -273,10 +273,10 @@ class LinkedList[E]() LinkedList.this.last def hasNext(): Boolean = - i < size + i < size() def next(): E = { - if (i >= size) + if (i >= size()) throw new NoSuchElementException() last = i @@ -292,7 +292,7 @@ class LinkedList[E]() i > 0 def previous(): E = { - if (!hasPrevious) + if (!hasPrevious()) throw new NoSuchElementException() i -= 1 diff --git a/javalib/src/main/scala/java/util/Locale.scala b/javalib/src/main/scala/java/util/Locale.scala index ddd4cfffc9..74c5f7bdbf 100644 --- a/javalib/src/main/scala/java/util/Locale.scala +++ b/javalib/src/main/scala/java/util/Locale.scala @@ -94,7 +94,7 @@ final class Locale(val language: String, buf.append('_') buf.append(country) } - if (variant.length > 0 && buf.length > 0) { + if (variant.length > 0 && buf.nonEmpty) { if (0 == country.length) { buf.append("__") } else { diff --git a/javalib/src/main/scala/java/util/NavigableView.scala b/javalib/src/main/scala/java/util/NavigableView.scala index bb085d9802..9e564142b0 100644 --- a/javalib/src/main/scala/java/util/NavigableView.scala +++ b/javalib/src/main/scala/java/util/NavigableView.scala @@ -68,7 +68,7 @@ private[util] class NavigableView[E](original: NavigableSet[E], override def removeAll(c: Collection[_]): Boolean = { val iter = c.iterator() var changed = false - while (iter.hasNext) changed = remove(iter.next) || changed + while (iter.hasNext()) changed = remove(iter.next()) || changed changed } diff --git a/javalib/src/main/scala/java/util/PriorityQueue.scala b/javalib/src/main/scala/java/util/PriorityQueue.scala index 958c39b906..a1040c87b1 100644 --- a/javalib/src/main/scala/java/util/PriorityQueue.scala +++ b/javalib/src/main/scala/java/util/PriorityQueue.scala @@ -73,7 +73,7 @@ class PriorityQueue[E] protected (ordering: Ordering[_ >: E], part: mutable.PriorityQueue[Box[E]]): mutable.PriorityQueue[Box[E]] = { if (inner.isEmpty) part else { - val next = inner.dequeue + val next = inner.dequeue() if (boxed == next) part else if (BoxOrdering.compare(boxed, next) > 0) part += next diff --git a/javalib/src/main/scala/java/util/Properties.scala b/javalib/src/main/scala/java/util/Properties.scala index 30ff36d9cb..5790fa95a5 100644 --- a/javalib/src/main/scala/java/util/Properties.scala +++ b/javalib/src/main/scala/java/util/Properties.scala @@ -109,7 +109,7 @@ class Properties(protected val defaults: Properties) writer.write('#') writer.write(new Date().toString) - writer.write(System.lineSeparator) + writer.write(System.lineSeparator()) entrySet().scalaOps.foreach { entry => writer.write( @@ -119,7 +119,7 @@ class Properties(protected val defaults: Properties) encodeString(entry.getValue().asInstanceOf[String], isKey = false, toHex)) - writer.write(System.lineSeparator) + writer.write(System.lineSeparator()) } writer.flush() } @@ -286,7 +286,7 @@ class Properties(protected val defaults: Properties) if (isCrlf) { index += 1 } - writer.write(System.lineSeparator) + writer.write(System.lineSeparator()) def noExplicitComment = { index + 1 < chars.length && @@ -308,7 +308,7 @@ class Properties(protected val defaults: Properties) } index += 1 } - writer.write(System.lineSeparator) + writer.write(System.lineSeparator()) } private def encodeString(string: String, diff --git a/javalib/src/main/scala/java/util/TreeSet.scala b/javalib/src/main/scala/java/util/TreeSet.scala index 174941fdc4..3c883cf5f2 100644 --- a/javalib/src/main/scala/java/util/TreeSet.scala +++ b/javalib/src/main/scala/java/util/TreeSet.scala @@ -106,7 +106,7 @@ class TreeSet[E](_comparator: Comparator[_ >: E]) override def add(e: E): Boolean = { val boxed = Box(e) - if (isEmpty) + if (isEmpty()) BoxOrdering.compare(boxed, boxed) inner.add(boxed) @@ -121,16 +121,16 @@ class TreeSet[E](_comparator: Comparator[_ >: E]) override def addAll(c: Collection[_ <: E]): Boolean = { val iter = c.iterator() var changed = false - while (iter.hasNext) changed = add(iter.next()) || changed + while (iter.hasNext()) changed = add(iter.next()) || changed changed } override def removeAll(c: Collection[_]): Boolean = { val iter = c.iterator() var changed = false - while (iter.hasNext) + while (iter.hasNext()) changed = - inner.remove(Box(iter.next).asInstanceOf[Box[E]]) || changed + inner.remove(Box(iter.next()).asInstanceOf[Box[E]]) || changed changed } @@ -145,10 +145,10 @@ class TreeSet[E](_comparator: Comparator[_ >: E]) var base = new mutable.TreeSet[Box[E]] base ++= inner.range(boxedFrom, boxedTo) if (!fromInclusive) - base = base - boxedFrom + base = base diff Set(boxedFrom) if (toInclusive && inner.contains(boxedTo)) - base = base + boxedTo + base = base diff Set(boxedTo) base } diff --git a/javalib/src/main/scala/java/util/UUID.scala b/javalib/src/main/scala/java/util/UUID.scala index 0b576b0111..a13d7a8f9c 100644 --- a/javalib/src/main/scala/java/util/UUID.scala +++ b/javalib/src/main/scala/java/util/UUID.scala @@ -40,13 +40,13 @@ final class UUID private (private val i1: Int, def getLeastSignificantBits(): Long = { if (l2 eq null) l2 = (i3.toLong << 32) | (i4.toLong & 0xffffffffL) - l2.longValue + l2.longValue() } def getMostSignificantBits(): Long = { if (l1 eq null) l1 = (i1.toLong << 32) | (i2.toLong & 0xffffffffL) - l1.longValue + l1.longValue() } def version(): Int = diff --git a/javalib/src/main/scala/java/util/WeakHashMap.scala b/javalib/src/main/scala/java/util/WeakHashMap.scala index ab30b1f33b..d87997e4cb 100644 --- a/javalib/src/main/scala/java/util/WeakHashMap.scala +++ b/javalib/src/main/scala/java/util/WeakHashMap.scala @@ -128,12 +128,12 @@ class WeakHashMap[K, V] protected (inner: mutable.Map[Box[K], V]) protected def getNextForm(key: Box[K]): E - final override def next: E = { + final override def next(): E = { lastKey = Some(innerIterator.next()) getNextForm(lastKey.get) } - final override def hasNext: Boolean = + final override def hasNext(): Boolean = innerIterator.hasNext final override def remove(): Unit = { diff --git a/javalib/src/main/scala/java/util/concurrent/TimeUnit.scala b/javalib/src/main/scala/java/util/concurrent/TimeUnit.scala index c1706cfa3b..570e20d4ba 100644 --- a/javalib/src/main/scala/java/util/concurrent/TimeUnit.scala +++ b/javalib/src/main/scala/java/util/concurrent/TimeUnit.scala @@ -116,7 +116,7 @@ object TimeUnit { def values(): Array[TimeUnit] = _values.clone() def valueOf(name: String): TimeUnit = { - _values.find(_.name == name).getOrElse { + _values.find(_.name() == name).getOrElse { throw new IllegalArgumentException("No enum const TimeUnit." + name) } } diff --git a/javalib/src/main/scala/java/util/jar/JarEntry.scala b/javalib/src/main/scala/java/util/jar/JarEntry.scala index 2d41859419..db1f729440 100644 --- a/javalib/src/main/scala/java/util/jar/JarEntry.scala +++ b/javalib/src/main/scala/java/util/jar/JarEntry.scala @@ -93,8 +93,8 @@ class JarEntry(private val ze: ZipEntry) extends ZipEntry(ze) { if (asigners.isEmpty()) { null } else { - val tmp = new Array[CodeSigner](asigners.size) - System.arraycopy(asigners, 0, tmp, 0, asigners.size) + val tmp = new Array[CodeSigner](asigners.size()) + System.arraycopy(asigners, 0, tmp, 0, asigners.size()) tmp } } diff --git a/javalib/src/main/scala/java/util/jar/JarVerifier.scala b/javalib/src/main/scala/java/util/jar/JarVerifier.scala index 0ea6e43fd2..928ec06376 100644 --- a/javalib/src/main/scala/java/util/jar/JarVerifier.scala +++ b/javalib/src/main/scala/java/util/jar/JarVerifier.scala @@ -50,7 +50,7 @@ private[jar] class JarVerifier(jarName: String) { } private[jar] def initEntry(name: String): VerifierEntry = - if (man == null || signatures.size == 0) { + if (man == null || signatures.size() == 0) { null } else { val attributes = man.getAttributes(name) @@ -72,7 +72,7 @@ private[jar] class JarVerifier(jarName: String) { } // entry is not signed - if (certs.size == 0) { + if (certs.isEmpty) { null } else { var algorithms = attributes.getValue("Digest-Algorithms") @@ -110,7 +110,7 @@ private[jar] class JarVerifier(jarName: String) { } else { var result = true val it = metaEntries.keySet().iterator() - while (result && it.hasNext) { + while (result && it.hasNext()) { val key = it.next() if (key.endsWith(".DSA") || key.endsWith(".RSA")) { verifyCertificate(key) @@ -202,7 +202,7 @@ private[jar] class JarVerifier(jarName: String) { false, false)) { val it = entries.entrySet().iterator() - while (it.hasNext) { + while (it.hasNext()) { val entry = it.next() val key = entry.getKey() val value = entry.getValue() @@ -233,7 +233,7 @@ private[jar] class JarVerifier(jarName: String) { man = mf private[jar] def isSignedJar(): Boolean = - certificates.size > 0 + certificates.size() > 0 private def verify(attributes: Attributes, entry: String, diff --git a/javalib/src/main/scala/java/util/jar/Manifest.scala b/javalib/src/main/scala/java/util/jar/Manifest.scala index 68468eb601..cd8d3a81db 100644 --- a/javalib/src/main/scala/java/util/jar/Manifest.scala +++ b/javalib/src/main/scala/java/util/jar/Manifest.scala @@ -135,8 +135,8 @@ object Manifest { } } out.write(LINE_SEPARATOR) - val i = manifest.getEntries.keySet.iterator - while (i.hasNext) { + val i = manifest.getEntries().keySet().iterator() + while (i.hasNext()) { val key = i.next() writeEntry(out, NAME_ATTRIBUTE, key, encoder, buffer) val attrib = manifest.entries.get(key) diff --git a/javalib/src/main/scala/java/util/regex/Matcher.scala b/javalib/src/main/scala/java/util/regex/Matcher.scala index 186911ca08..6ba5520471 100644 --- a/javalib/src/main/scala/java/util/regex/Matcher.scala +++ b/javalib/src/main/scala/java/util/regex/Matcher.scala @@ -62,7 +62,7 @@ final class Matcher private[regex] (var _pattern: Pattern, def group(name: String): String = underlying.group(name) - def groupCount: Int = underlying.groupCount + def groupCount(): Int = underlying.groupCount() def hasAnchoringBounds(): Boolean = anchoringBoundsInUse diff --git a/javalib/src/main/scala/java/util/regex/Pattern.scala b/javalib/src/main/scala/java/util/regex/Pattern.scala index 33cc0f0435..dd611f50df 100644 --- a/javalib/src/main/scala/java/util/regex/Pattern.scala +++ b/javalib/src/main/scala/java/util/regex/Pattern.scala @@ -6,6 +6,7 @@ import scalanative.{regex => snRegex} import java.util.function.Predicate import java.util.stream.Stream import java.util.stream.WrappedScalaStream +import scala.collection.compat.immutable.LazyList // Inspired & informed by: // https://github.com/google/re2j/blob/master/java/com/google/re2j/Pattern.java @@ -100,7 +101,7 @@ object Pattern { } def matches(regex: String, input: CharSequence): Boolean = - compile(regex).matcher(input).matches + compile(regex).matcher(input).matches() def quote(s: String): String = s"\\Q${s}\\E" } @@ -131,7 +132,7 @@ final class Pattern private[regex] (_regex: String, _flags: Int) { compiled.split(input, limit) def splitAsStream(input: CharSequence): Stream[String] = - new WrappedScalaStream(split(input).toStream, None) + new WrappedScalaStream(split(input).to(LazyList), None) override def toString: String = _regex } diff --git a/javalib/src/main/scala/java/util/regex/PatternSyntaxException.scala b/javalib/src/main/scala/java/util/regex/PatternSyntaxException.scala index 6abf649724..052363b9bd 100644 --- a/javalib/src/main/scala/java/util/regex/PatternSyntaxException.scala +++ b/javalib/src/main/scala/java/util/regex/PatternSyntaxException.scala @@ -6,7 +6,7 @@ class PatternSyntaxException(desc: String, regex: String, index: Int) def getPattern: String = regex def getDescription: String = desc def getIndex: Int = index - override def getMessage: String = { + override def getMessage(): String = { val cursor = (" " * index) + "^" s"""|$desc near index $index diff --git a/javalib/src/main/scala/java/util/stream/Stream.scala b/javalib/src/main/scala/java/util/stream/Stream.scala index d577338728..946faf8acb 100644 --- a/javalib/src/main/scala/java/util/stream/Stream.scala +++ b/javalib/src/main/scala/java/util/stream/Stream.scala @@ -1,8 +1,7 @@ package java.util.stream import java.util.function.{Function, Predicate} - -import scala.collection.immutable.{Stream => SStream} +import scala.collection.compat.immutable.{LazyList => SStream} trait Stream[+T] extends BaseStream[T, Stream[T]] { def flatMap[R](mapper: Function[_ >: T, _ <: Stream[_ <: R]]): Stream[R] @@ -22,5 +21,5 @@ object Stream { def builder[T](): Builder[T] = new WrappedScalaStream.Builder[T] def empty[T](): Stream[T] = new WrappedScalaStream(SStream.empty[T], None) def of[T](values: Array[AnyRef]): Stream[T] = - new WrappedScalaStream(values.asInstanceOf[Array[T]].toStream, None) + new WrappedScalaStream(values.asInstanceOf[Array[T]].to(SStream), None) } diff --git a/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala b/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala index 9786294c66..5f6fc22b6f 100644 --- a/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala +++ b/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala @@ -1,19 +1,19 @@ package java.util.stream import java.util.Iterator +import scala.collection.compat.immutable.{LazyList => SStream} import java.util.function.{Function, Predicate} -import scala.collection.immutable.{Stream => SStream} class WrappedScalaStream[T](private val underlying: SStream[T], closeHandler: Option[Runnable]) extends Stream[T] { - override def close(): Unit = closeHandler.foreach(_.run) + override def close(): Unit = closeHandler.foreach(_.run()) override def isParallel(): Boolean = false - override def iterator(): Iterator[T] = + override def iterator: Iterator[T] = WrappedScalaStream.scala2javaIterator(underlying.iterator) override def parallel(): Stream[T] = this override def sequential(): Stream[T] = this - override def unordered: Stream[T] = this + override def unordered(): Stream[T] = this override def onClose(closeHandler: Runnable): Stream[T] = new WrappedScalaStream(underlying, Some(closeHandler)) @@ -32,7 +32,7 @@ object WrappedScalaStream { val buffer = new scala.collection.mutable.ListBuffer[T]() override def accept(t: T): Unit = buffer += t override def build(): Stream[T] = - new WrappedScalaStream(buffer.toStream, None) + new WrappedScalaStream(buffer.to(SStream), None) } def scala2javaIterator[T]( @@ -52,13 +52,13 @@ private final class CompositeStream[T](substreams: Seq[Stream[T]], closeHandler.foreach(_.run()) } override def isParallel(): Boolean = false - override def iterator(): Iterator[T] = + override def iterator: Iterator[T] = new Iterator[T] { private val its = substreams.iterator private var currentIt: Iterator[_ <: T] = EmptyIterator override def hasNext(): Boolean = - if (currentIt.hasNext) true + if (currentIt.hasNext()) true else if (its.hasNext) { currentIt = its.next().iterator hasNext() @@ -77,7 +77,7 @@ private final class CompositeStream[T](substreams: Seq[Stream[T]], override def parallel(): Stream[T] = this override def sequential(): Stream[T] = this - override def unordered: Stream[T] = this + override def unordered(): Stream[T] = this override def onClose(closeHandler: Runnable): Stream[T] = new CompositeStream(substreams, Some(closeHandler)) diff --git a/javalib/src/main/scala/java/util/zip/Checksum.scala b/javalib/src/main/scala/java/util/zip/Checksum.scala index 6a9795083a..aaeebd04ed 100644 --- a/javalib/src/main/scala/java/util/zip/Checksum.scala +++ b/javalib/src/main/scala/java/util/zip/Checksum.scala @@ -3,7 +3,7 @@ package java.util.zip trait Checksum { def getValue(): Long def reset(): Unit - def update(v: Int) + def update(v: Int): Unit def update(buf: Array[Byte]): Unit def update(buf: Array[Byte], off: Int, nbytes: Int): Unit } diff --git a/javalib/src/main/scala/java/util/zip/GZIPInputStream.scala b/javalib/src/main/scala/java/util/zip/GZIPInputStream.scala index 375082406d..4f970e00aa 100644 --- a/javalib/src/main/scala/java/util/zip/GZIPInputStream.scala +++ b/javalib/src/main/scala/java/util/zip/GZIPInputStream.scala @@ -115,7 +115,7 @@ class GZIPInputStream(in: InputStream, size: Int) } } - private def readFully(buffer: Array[Byte], offset: Int, length: Int) { + private def readFully(buffer: Array[Byte], offset: Int, length: Int) = { var result: Int = 0 var off: Int = offset var l: Int = length @@ -131,7 +131,7 @@ class GZIPInputStream(in: InputStream, size: Int) private def readZeroTerminated(hcrc: Boolean): Unit = { var result: Int = 0 - while ({ result = in.read; result > 0 }) { + while ({ result = in.read(); result > 0 }) { if (hcrc) { crc.update(result) } diff --git a/javalib/src/main/scala/java/util/zip/ZipOutputStream.scala b/javalib/src/main/scala/java/util/zip/ZipOutputStream.scala index ca99edf2a9..f9c123a548 100644 --- a/javalib/src/main/scala/java/util/zip/ZipOutputStream.scala +++ b/javalib/src/main/scala/java/util/zip/ZipOutputStream.scala @@ -36,7 +36,7 @@ class ZipOutputStream(_out: OutputStream, charset: Charset) } } - def closeEntry() { + def closeEntry() = { if (cDir == null) { throw new IOException() } else if (currentEntry == null) { @@ -46,7 +46,7 @@ class ZipOutputStream(_out: OutputStream, charset: Charset) } // Verify values for STORED types - if (currentEntry.getMethod == STORED) { + if (currentEntry.getMethod() == STORED) { if (crc.getValue() != currentEntry.crc) { throw new ZipException("Crc mismatch") } else if (currentEntry.size != crc.tbytes) { @@ -61,11 +61,11 @@ class ZipOutputStream(_out: OutputStream, charset: Charset) writeLong(out, EXTSIG) writeLong(out, { currentEntry.crc = crc.getValue(); currentEntry.crc }) writeLong(out, { - currentEntry.compressedSize = `def`.getTotalOut; + currentEntry.compressedSize = `def`.getTotalOut(); currentEntry.compressedSize }) writeLong(out, { - currentEntry.size = `def`.getTotalIn; currentEntry.size + currentEntry.size = `def`.getTotalIn(); currentEntry.size }) } // Update the CentralDirectory @@ -80,8 +80,8 @@ class ZipOutputStream(_out: OutputStream, charset: Charset) writeShort(cDir, currentEntry.modDate) writeLong(cDir, crc.getValue()) if (currentEntry.getMethod() == DEFLATED) { - curOffset += writeLong(cDir, `def`.getTotalOut).toInt - writeLong(cDir, `def`.getTotalIn) + curOffset += writeLong(cDir, `def`.getTotalOut()).toInt + writeLong(cDir, `def`.getTotalIn()) } else { curOffset += writeLong(cDir, crc.tbytes).toInt writeLong(cDir, crc.tbytes) diff --git a/javalib/src/main/scala/javax/security/auth/x500/X500Principal.scala b/javalib/src/main/scala/javax/security/auth/x500/X500Principal.scala index 7c959d7ee8..f458b4ce35 100644 --- a/javalib/src/main/scala/javax/security/auth/x500/X500Principal.scala +++ b/javalib/src/main/scala/javax/security/auth/x500/X500Principal.scala @@ -3,5 +3,5 @@ package javax.security.auth.x500 import java.security.Principal final class X500Principal(name: String) extends Principal with Serializable { - override def getName: String = name + override def getName(): String = name } diff --git a/javalib/src/main/scala/niocharset/ISO_8859_1_And_US_ASCII_Common.scala b/javalib/src/main/scala/niocharset/ISO_8859_1_And_US_ASCII_Common.scala index cdd2668daf..87f17e0f99 100644 --- a/javalib/src/main/scala/niocharset/ISO_8859_1_And_US_ASCII_Common.scala +++ b/javalib/src/main/scala/niocharset/ISO_8859_1_And_US_ASCII_Common.scala @@ -39,22 +39,22 @@ private[niocharset] abstract class ISO_8859_1_And_US_ASCII_Common protected ( def decodeLoop(in: ByteBuffer, out: CharBuffer): CoderResult = { // scalastyle:off return val maxValue = ISO_8859_1_And_US_ASCII_Common.this.maxValue - val inRemaining = in.remaining + val inRemaining = in.remaining() if (inRemaining == 0) { CoderResult.UNDERFLOW } else { - val outRemaining = out.remaining + val outRemaining = out.remaining() val overflow = outRemaining < inRemaining val rem = if (overflow) outRemaining else inRemaining - if (in.hasArray && out.hasArray) { - val inArr = in.array - val inOffset = in.arrayOffset + if (in.hasArray() && out.hasArray()) { + val inArr = in.array() + val inOffset = in.arrayOffset() val inStart = in.position() + inOffset val inEnd = inStart + rem - val outArr = out.array - val outOffset = out.arrayOffset + val outArr = out.array() + val outOffset = out.arrayOffset() val outStart = out.position() + outOffset var inPos = inStart @@ -105,22 +105,22 @@ private[niocharset] abstract class ISO_8859_1_And_US_ASCII_Common protected ( import java.lang.Character.{MIN_SURROGATE, MAX_SURROGATE} val maxValue = ISO_8859_1_And_US_ASCII_Common.this.maxValue - val inRemaining = in.remaining + val inRemaining = in.remaining() if (inRemaining == 0) { CoderResult.UNDERFLOW } else { - if (in.hasArray && out.hasArray) { - val outRemaining = out.remaining + if (in.hasArray() && out.hasArray()) { + val outRemaining = out.remaining() val overflow = outRemaining < inRemaining val rem = if (overflow) outRemaining else inRemaining - val inArr = in.array - val inOffset = in.arrayOffset + val inArr = in.array() + val inOffset = in.arrayOffset() val inStart = in.position() + inOffset val inEnd = inStart + rem - val outArr = out.array - val outOffset = out.arrayOffset + val outArr = out.array() + val outOffset = out.arrayOffset() val outStart = out.position() + outOffset @inline @@ -171,9 +171,9 @@ private[niocharset] abstract class ISO_8859_1_And_US_ASCII_Common protected ( @inline @tailrec def loop(): CoderResult = { - if (!in.hasRemaining) { + if (!in.hasRemaining()) { CoderResult.UNDERFLOW - } else if (!out.hasRemaining) { + } else if (!out.hasRemaining()) { CoderResult.OVERFLOW } else { val c = in.get() @@ -185,7 +185,7 @@ private[niocharset] abstract class ISO_8859_1_And_US_ASCII_Common protected ( in.position(in.position() - 1) CoderResult.malformedForLength(1) } else if (Character.isHighSurrogate(c)) { - if (in.hasRemaining) { + if (in.hasRemaining()) { val c2 = in.get() in.position(in.position() - 2) if (Character.isLowSurrogate(c2)) { diff --git a/javalib/src/main/scala/niocharset/UTF_16_Common.scala b/javalib/src/main/scala/niocharset/UTF_16_Common.scala index d249486b60..cb03d063a8 100644 --- a/javalib/src/main/scala/niocharset/UTF_16_Common.scala +++ b/javalib/src/main/scala/niocharset/UTF_16_Common.scala @@ -41,7 +41,7 @@ private[niocharset] abstract class UTF_16_Common protected ( @inline @tailrec def loop(): CoderResult = { - if (in.remaining < 2) CoderResult.UNDERFLOW + if (in.remaining() < 2) CoderResult.UNDERFLOW else { val b1 = in.get() & 0xff val b2 = in.get() & 0xff @@ -75,7 +75,7 @@ private[niocharset] abstract class UTF_16_Common protected ( in.position(in.position() - 2) CoderResult.malformedForLength(2) } else if (!Character.isHighSurrogate(c1)) { - if (out.remaining == 0) { + if (out.remaining() == 0) { in.position(in.position() - 2) CoderResult.OVERFLOW } else { @@ -83,7 +83,7 @@ private[niocharset] abstract class UTF_16_Common protected ( loop() } } else { - if (in.remaining < 2) { + if (in.remaining() < 2) { in.position(in.position() - 2) CoderResult.UNDERFLOW } else { @@ -95,7 +95,7 @@ private[niocharset] abstract class UTF_16_Common protected ( in.position(in.position() - 4) CoderResult.malformedForLength(2) } else { - if (out.remaining < 2) { + if (out.remaining() < 2) { in.position(in.position() - 4) CoderResult.OVERFLOW } else { @@ -131,7 +131,7 @@ private[niocharset] abstract class UTF_16_Common protected ( def encodeLoop(in: CharBuffer, out: ByteBuffer): CoderResult = { if (needToWriteBOM) { - if (out.remaining < 2) { + if (out.remaining() < 2) { return CoderResult.OVERFLOW // scalastyle:ignore } else { // Always encode in big endian @@ -157,7 +157,7 @@ private[niocharset] abstract class UTF_16_Common protected ( @inline @tailrec def loop(): CoderResult = { - if (in.remaining == 0) CoderResult.UNDERFLOW + if (in.remaining() == 0) CoderResult.UNDERFLOW else { val c1 = in.get() @@ -165,7 +165,7 @@ private[niocharset] abstract class UTF_16_Common protected ( in.position(in.position() - 1) CoderResult.malformedForLength(1) } else if (!Character.isHighSurrogate(c1)) { - if (out.remaining < 2) { + if (out.remaining() < 2) { in.position(in.position() - 1) CoderResult.OVERFLOW } else { @@ -173,7 +173,7 @@ private[niocharset] abstract class UTF_16_Common protected ( loop() } } else { - if (in.remaining < 1) { + if (in.remaining() < 1) { in.position(in.position() - 1) CoderResult.UNDERFLOW } else { @@ -183,7 +183,7 @@ private[niocharset] abstract class UTF_16_Common protected ( in.position(in.position() - 2) CoderResult.malformedForLength(1) } else { - if (out.remaining < 4) { + if (out.remaining() < 4) { in.position(in.position() - 2) CoderResult.OVERFLOW } else { diff --git a/javalib/src/main/scala/niocharset/UTF_8.scala b/javalib/src/main/scala/niocharset/UTF_8.scala index 5f61b58295..5e8aa129e3 100644 --- a/javalib/src/main/scala/niocharset/UTF_8.scala +++ b/javalib/src/main/scala/niocharset/UTF_8.scala @@ -67,7 +67,7 @@ private[niocharset] object UTF_8 private class Decoder extends CharsetDecoder(UTF_8, 1.0f, 1.0f) { def decodeLoop(in: ByteBuffer, out: CharBuffer): CoderResult = { - if (in.hasArray && out.hasArray) + if (in.hasArray() && out.hasArray()) decodeLoopArray(in, out) else decodeLoopNoArray(in, out) @@ -75,13 +75,13 @@ private[niocharset] object UTF_8 private def decodeLoopArray(in: ByteBuffer, out: CharBuffer): CoderResult = { - val inArray = in.array - val inOffset = in.arrayOffset + val inArray = in.array() + val inOffset = in.arrayOffset() val inStart = in.position() + inOffset val inEnd = in.limit() + inOffset - val outArray = out.array - val outOffset = out.arrayOffset + val outArray = out.array() + val outOffset = out.arrayOffset() val outStart = out.position() + outOffset val outEnd = out.limit() + outOffset @@ -164,13 +164,13 @@ private[niocharset] object UTF_8 result } - if (!in.hasRemaining) { + if (!in.hasRemaining()) { CoderResult.UNDERFLOW } else { val leading = in.get().toInt if (leading >= 0) { // US-ASCII repertoire - if (!out.hasRemaining) { + if (!out.hasRemaining()) { finalize(1, CoderResult.OVERFLOW) } else { out.put(leading.toChar) @@ -187,7 +187,7 @@ private[niocharset] object UTF_8 val decoded = { @inline def getOr0(): Int = - if (in.hasRemaining) { + if (in.hasRemaining()) { bytesRead += 1; in.get() } else 0 // 0 is not a valid next byte @@ -200,7 +200,7 @@ private[niocharset] object UTF_8 finalize(bytesRead, decoded.failure) } else if (decoded.low == 0) { // not a surrogate pair - if (!out.hasRemaining) + if (!out.hasRemaining()) finalize(bytesRead, CoderResult.OVERFLOW) else { out.put(decoded.high) @@ -208,7 +208,7 @@ private[niocharset] object UTF_8 } } else { // a surrogate pair - if (out.remaining < 2) + if (out.remaining() < 2) finalize(bytesRead, CoderResult.OVERFLOW) else { out.put(decoded.high) @@ -293,7 +293,7 @@ private[niocharset] object UTF_8 private class Encoder extends CharsetEncoder(UTF_8, 1.1f, 4.0f) { def encodeLoop(in: CharBuffer, out: ByteBuffer): CoderResult = { - if (in.hasArray && out.hasArray) + if (in.hasArray() && out.hasArray()) encodeLoopArray(in, out) else encodeLoopNoArray(in, out) @@ -301,13 +301,13 @@ private[niocharset] object UTF_8 private def encodeLoopArray(in: CharBuffer, out: ByteBuffer): CoderResult = { - val inArray = in.array - val inOffset = in.arrayOffset + val inArray = in.array() + val inOffset = in.arrayOffset() val inStart = in.position() + inOffset val inEnd = in.limit() + inOffset - val outArray = out.array - val outOffset = out.arrayOffset + val outArray = out.array() + val outOffset = out.arrayOffset() val outStart = out.position() + outOffset val outEnd = out.limit() + outOffset @@ -395,14 +395,14 @@ private[niocharset] object UTF_8 result } - if (!in.hasRemaining) { + if (!in.hasRemaining()) { CoderResult.UNDERFLOW } else { val c1 = in.get() if (c1 < 0x80) { // Encoding in one byte - if (!out.hasRemaining) + if (!out.hasRemaining()) finalize(1, CoderResult.OVERFLOW) else { out.put(c1.toByte) @@ -410,7 +410,7 @@ private[niocharset] object UTF_8 } } else if (c1 < 0x800) { // Encoding in 2 bytes (by construction, not a surrogate) - if (out.remaining < 2) + if (out.remaining() < 2) finalize(1, CoderResult.OVERFLOW) else { out.put(((c1 >> 6) | 0xc0).toByte) @@ -419,7 +419,7 @@ private[niocharset] object UTF_8 } } else if (!isSurrogate(c1)) { // Not a surrogate, encoding in 3 bytes - if (out.remaining < 3) + if (out.remaining() < 3) finalize(1, CoderResult.OVERFLOW) else { out.put(((c1 >> 12) | 0xe0).toByte) @@ -429,7 +429,7 @@ private[niocharset] object UTF_8 } } else if (isHighSurrogate(c1)) { // Should have a low surrogate that follows - if (!in.hasRemaining) + if (!in.hasRemaining()) finalize(1, CoderResult.UNDERFLOW) else { val c2 = in.get() @@ -437,7 +437,7 @@ private[niocharset] object UTF_8 finalize(2, CoderResult.malformedForLength(1)) } else { // Surrogate pair, encoding in 4 bytes - if (out.remaining < 4) + if (out.remaining() < 4) finalize(2, CoderResult.OVERFLOW) else { val cp = toCodePoint(c1, c2) diff --git a/javalib/src/main/scala/scala/scalanative/nio/fs/FileHelpers.scala b/javalib/src/main/scala/scala/scalanative/nio/fs/FileHelpers.scala index 1ffe55a9c3..1bc54b5215 100644 --- a/javalib/src/main/scala/scala/scalanative/nio/fs/FileHelpers.scala +++ b/javalib/src/main/scala/scala/scalanative/nio/fs/FileHelpers.scala @@ -45,7 +45,7 @@ object FileHelpers { } def createNewFile(path: String, throwOnError: Boolean = false): Boolean = - if (path.isEmpty) { + if (path.isEmpty()) { throw new IOException("No such file or directory") } else if (exists(path)) { false @@ -68,7 +68,7 @@ object FileHelpers { else if (minLength && prefix.length < 3) throw new IllegalArgumentException("Prefix string too short") else { - val tmpDir = Option(dir).fold(tempDir)(_.toString) + val tmpDir = Option(dir).fold(tempDir())(_.toString) val newSuffix = Option(suffix).getOrElse(".tmp") var result: File = null do { diff --git a/javalib/src/main/scala/scala/scalanative/nio/fs/NativePosixFileAttributeView.scala b/javalib/src/main/scala/scala/scalanative/nio/fs/NativePosixFileAttributeView.scala index f5b475b65b..776741b4d9 100644 --- a/javalib/src/main/scala/scala/scalanative/nio/fs/NativePosixFileAttributeView.scala +++ b/javalib/src/main/scala/scala/scalanative/nio/fs/NativePosixFileAttributeView.scala @@ -38,7 +38,7 @@ final class NativePosixFileAttributeView(path: Path, options: Array[LinkOption]) override def setOwner(owner: UserPrincipal): Unit = Zone { implicit z => - val passwd = getPasswd(toCString(owner.getName)) + val passwd = getPasswd(toCString(owner.getName())) if (unistd.chown(toCString(path.toString), passwd._2, -1.toUInt) != 0) throwIOException() } @@ -54,11 +54,11 @@ final class NativePosixFileAttributeView(path: Path, options: Array[LinkOption]) } } - override def getOwner(): UserPrincipal = attributes.owner + override def getOwner(): UserPrincipal = attributes.owner() override def setGroup(group: GroupPrincipal): Unit = Zone { implicit z => - val _group = getGroup(toCString(group.getName)) + val _group = getGroup(toCString(group.getName())) val err = unistd.chown(toCString(path.toString), -1.toUInt, _group._2) if (err != 0) { @@ -107,7 +107,7 @@ final class NativePosixFileAttributeView(path: Path, options: Array[LinkOption]) private def fileGroup()(implicit z: Zone) = getGroup(st_gid) - override def fileKey = st_ino.asInstanceOf[Object] + override def fileKey() = st_ino.asInstanceOf[Object] override lazy val isDirectory = stat.S_ISDIR(st_mode) == 1 @@ -121,26 +121,26 @@ final class NativePosixFileAttributeView(path: Path, options: Array[LinkOption]) override lazy val isOther = !isDirectory && !isRegularFile && !isSymbolicLink - override def lastAccessTime = + override def lastAccessTime() = FileTime.from(st_atime, TimeUnit.SECONDS) - override def lastModifiedTime = + override def lastModifiedTime() = FileTime.from(st_mtime, TimeUnit.SECONDS) - override def creationTime = + override def creationTime() = FileTime.from(st_ctime, TimeUnit.SECONDS) - override def group = new GroupPrincipal { + override def group() = new GroupPrincipal { override val getName = Zone { implicit z => fromCString(fileGroup()._1) } } - override def owner = new UserPrincipal { + override def owner() = new UserPrincipal { override val getName = Zone { implicit z => fromCString(filePasswd()._1) } } - override def permissions = { + override def permissions() = { val set = new HashSet[PosixFilePermission] NativePosixFileAttributeView.permMap.foreach { case (flag, value) => @@ -149,24 +149,24 @@ final class NativePosixFileAttributeView(path: Path, options: Array[LinkOption]) set } - override def size = st_size + override def size() = st_size } - override def asMap(): HashMap[String, Object] = { + override def asMap: HashMap[String, Object] = { val attrs = attributes val values = List( - "lastModifiedTime" -> attrs.lastModifiedTime, - "lastAccessTime" -> attrs.lastAccessTime, - "creationTime" -> attrs.creationTime, - "size" -> Long.box(attrs.size), + "lastModifiedTime" -> attrs.lastModifiedTime(), + "lastAccessTime" -> attrs.lastAccessTime(), + "creationTime" -> attrs.creationTime(), + "size" -> Long.box(attrs.size()), "isRegularFile" -> Boolean.box(attrs.isRegularFile), "isDirectory" -> Boolean.box(attrs.isDirectory), "isSymbolicLink" -> Boolean.box(attrs.isSymbolicLink), "isOther" -> Boolean.box(attrs.isOther), - "fileKey" -> attrs.fileKey, - "permissions" -> attrs.permissions, - "group" -> attrs.group + "fileKey" -> attrs.fileKey(), + "permissions" -> attrs.permissions(), + "group" -> attrs.group() ) val map = new HashMap[String, Object]() diff --git a/javalib/src/main/scala/scala/scalanative/nio/fs/UnixFileSystem.scala b/javalib/src/main/scala/scala/scalanative/nio/fs/UnixFileSystem.scala index 19f5562e6c..0788044fea 100644 --- a/javalib/src/main/scala/scala/scalanative/nio/fs/UnixFileSystem.scala +++ b/javalib/src/main/scala/scala/scalanative/nio/fs/UnixFileSystem.scala @@ -69,7 +69,7 @@ class UnixFileSystem(override val provider: FileSystemProvider, } } - override def newWatchService: WatchService = + override def newWatchService(): WatchService = throw new UnsupportedOperationException() override def supportedFileAttributeViews(): Set[String] = { diff --git a/javalib/src/main/scala/scala/scalanative/nio/fs/UnixFileSystemProvider.scala b/javalib/src/main/scala/scala/scalanative/nio/fs/UnixFileSystemProvider.scala index 43959ff556..89f392f770 100644 --- a/javalib/src/main/scala/scala/scalanative/nio/fs/UnixFileSystemProvider.scala +++ b/javalib/src/main/scala/scala/scalanative/nio/fs/UnixFileSystemProvider.scala @@ -25,34 +25,34 @@ class UnixFileSystemProvider extends FileSystemProvider { "file" override def newFileSystem(uri: URI, env: Map[String, _]): FileSystem = - if (uri.getPath != "/") { + if (uri.getPath() != "/") { throw new IllegalArgumentException("Path component should be '/'") - } else if (uri.getScheme != "file") { + } else if (uri.getScheme() != "file") { throw new IllegalArgumentException("URI does not match this provider.") } else { throw new FileSystemAlreadyExistsException() } override def getFileSystem(uri: URI): FileSystem = - if (uri.getPath != "/") { + if (uri.getPath() != "/") { throw new IllegalArgumentException("Path component should be '/'") - } else if (uri.getScheme != "file") { + } else if (uri.getScheme() != "file") { throw new IllegalArgumentException("URI does not match this provider") } else { fs } override def getPath(uri: URI): Path = - if (uri.getScheme != "file") { + if (uri.getScheme() != "file") { throw new IllegalArgumentException("URI scheme is not \"file\"") - } else if (!uri.getPath.startsWith("/")) { + } else if (!uri.getPath().startsWith("/")) { throw new IllegalArgumentException("URI is not hierarchical") } else { - fs.getPath(uri.getPath, Array.empty) + fs.getPath(uri.getPath(), Array.empty) } override def newFileSystem(path: Path, env: Map[String, _]): FileSystem = - newFileSystem(path.toUri, env) + newFileSystem(path.toUri(), env) override def newFileChannel(path: Path, options: Set[_ <: OpenOption], @@ -99,7 +99,7 @@ class UnixFileSystemProvider extends FileSystemProvider { Files.isHidden(path) override def checkAccess(path: Path, modes: Array[AccessMode]): Unit = { - val file = path.toFile + val file = path.toFile() if (modes.contains(AccessMode.READ) && !file.canRead()) throw new AccessDeniedException(path.toString) if (modes.contains(AccessMode.WRITE) && !file.canWrite()) diff --git a/javalib/src/main/scala/scala/scalanative/nio/fs/UnixPath.scala b/javalib/src/main/scala/scala/scalanative/nio/fs/UnixPath.scala index 373cc8505e..4086282a51 100644 --- a/javalib/src/main/scala/scala/scalanative/nio/fs/UnixPath.scala +++ b/javalib/src/main/scala/scala/scalanative/nio/fs/UnixPath.scala @@ -20,7 +20,7 @@ class UnixPath(private val fs: UnixFileSystem, rawPath: String) extends Path { private lazy val path: String = removeRedundantSlashes(rawPath) private lazy val offsets = - if (path.isEmpty) Array(-1, 0) + if (path.isEmpty()) Array(-1, 0) else if (path == "/") Array(0) else { var i = 0 @@ -43,23 +43,23 @@ class UnixPath(private val fs: UnixFileSystem, rawPath: String) extends Path { private lazy val _isAbsolute = rawPath.startsWith("/") - private lazy val root = if (isAbsolute) new UnixPath(fs, "/") else null + private lazy val root = if (isAbsolute()) new UnixPath(fs, "/") else null private lazy val fileName = if (path == "/") null - else if (path.isEmpty) this + else if (path.isEmpty()) this else new UnixPath(fs, path.split("/").last) private lazy val parent = { val nameCount = getNameCount() - if (nameCount == 0 || (nameCount == 1 && !isAbsolute)) null - else if (isAbsolute) + if (nameCount == 0 || (nameCount == 1 && !isAbsolute())) null + else if (isAbsolute()) new UnixPath(fs, "/" + subpath(0, nameCount - 1).toString) else subpath(0, nameCount - 1) } private lazy val nameCount = - if (rawPath.isEmpty) 1 + if (rawPath.isEmpty()) 1 else path.split("/").filter(_.nonEmpty).length private lazy val normalizedPath = new UnixPath(fs, normalized(this)) @@ -69,7 +69,7 @@ class UnixPath(private val fs: UnixFileSystem, rawPath: String) extends Path { else new UnixPath(fs, toFile().getAbsolutePath()) private lazy val file = - if (isAbsolute) new File(path) + if (isAbsolute()) new File(path) else new File(s"${fs.defaultDirectory}/$path") private lazy val uri = @@ -94,11 +94,11 @@ class UnixPath(private val fs: UnixFileSystem, rawPath: String) extends Path { override def getNameCount(): Int = offsets.size - 1 @inline private def getNameString(index: Int): String = { - val nameCount = getNameCount + val nameCount = getNameCount() if (index < 0 || nameCount == 0 || index >= nameCount) throw new IllegalArgumentException else { - if (path.isEmpty) null + if (path.isEmpty()) null else path.substring(offsets(index) + 1, offsets(index + 1)) } } @@ -112,12 +112,12 @@ class UnixPath(private val fs: UnixFileSystem, rawPath: String) extends Path { new UnixPath(fs, (beginIndex until endIndex).map(getName).mkString("/")) override def startsWith(other: Path): Boolean = - if (fs.provider == other.getFileSystem.provider) { + if (fs.provider == other.getFileSystem().provider()) { val otherLength = other.getNameCount() val thisLength = getNameCount() if (otherLength > thisLength) false - else if (isAbsolute ^ other.isAbsolute) false + else if (isAbsolute() ^ other.isAbsolute()) false else { (0 until otherLength).forall(i => getName(i) == other.getName(i)) } @@ -129,14 +129,14 @@ class UnixPath(private val fs: UnixFileSystem, rawPath: String) extends Path { startsWith(new UnixPath(fs, other)) override def endsWith(other: Path): Boolean = - if (fs.provider == other.getFileSystem.provider) { + if (fs.provider == other.getFileSystem().provider()) { val otherLength = other.getNameCount() val thisLength = getNameCount() if (otherLength > thisLength) false - else if (!other.isAbsolute) { + else if (!other.isAbsolute()) { (0 until otherLength).forall(i => getName(thisLength - 1 - i) == other.getName(otherLength - 1 - i)) - } else if (isAbsolute) { + } else if (isAbsolute()) { this == other } else { false @@ -151,8 +151,8 @@ class UnixPath(private val fs: UnixFileSystem, rawPath: String) extends Path { override def normalize(): Path = normalizedPath override def resolve(other: Path): Path = - if (other.isAbsolute || path.isEmpty) other - else if (other.toString.isEmpty) this + if (other.isAbsolute() || path.isEmpty()) other + else if (other.toString.isEmpty()) this else new UnixPath(fs, path + "/" + other.toString()) override def resolve(other: String): Path = @@ -168,17 +168,17 @@ class UnixPath(private val fs: UnixFileSystem, rawPath: String) extends Path { resolveSibling(new UnixPath(fs, other)) override def relativize(other: Path): Path = { - if (isAbsolute ^ other.isAbsolute) { + if (isAbsolute() ^ other.isAbsolute()) { throw new IllegalArgumentException("'other' is different type of Path") - } else if (path.isEmpty) { + } else if (path.isEmpty()) { other } else if (other.startsWith(this)) { - other.subpath(getNameCount, other.getNameCount) + other.subpath(getNameCount(), other.getNameCount()) } else if (getParent() == null) { new UnixPath(fs, "../" + other.toString()) } else { val next = getParent().relativize(other).toString() - if (next.isEmpty) new UnixPath(fs, "..") + if (next.isEmpty()) new UnixPath(fs, "..") else new UnixPath(fs, "../" + next) } } @@ -205,7 +205,7 @@ class UnixPath(private val fs: UnixFileSystem, rawPath: String) extends Path { override def remove(): Unit = throw new UnsupportedOperationException() override def hasNext(): Boolean = i < getNameCount() override def next(): Path = - if (hasNext) { + if (hasNext()) { val name = getName(i) i += 1 name @@ -215,8 +215,8 @@ class UnixPath(private val fs: UnixFileSystem, rawPath: String) extends Path { } override def compareTo(other: Path): Int = - if (fs.provider == other.getFileSystem.provider) { - this.toString.compareTo(other.toString) + if (fs.provider == other.getFileSystem().provider()) { + this.toString().compareTo(other.toString) } else { throw new ClassCastException() } @@ -268,7 +268,7 @@ private object UnixPath { val buffer: StringBuffer = new StringBuffer(str) var previous = '/' var i = idx + 1 - while (i < buffer.length) { + while (i < buffer.length()) { val current = buffer.charAt(i) if (previous == '/' && current == '/') { buffer.deleteCharAt(i) From f1a49d9db52168c25c487e875b0cb214eddc277b Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Thu, 1 Oct 2020 17:58:59 +0200 Subject: [PATCH 07/75] Replace deprecated scala.collection.JavaConverters with scala.jdk.CollectionConverters --- tools/src/main/scala/scala/scalanative/build/Discover.scala | 2 +- tools/src/main/scala/scala/scalanative/build/IO.scala | 2 +- tools/src/main/scala/scala/scalanative/build/LLVM.scala | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/src/main/scala/scala/scalanative/build/Discover.scala b/tools/src/main/scala/scala/scalanative/build/Discover.scala index 0ca8cab1a7..4b985a5ee0 100644 --- a/tools/src/main/scala/scala/scalanative/build/Discover.scala +++ b/tools/src/main/scala/scala/scalanative/build/Discover.scala @@ -2,7 +2,7 @@ package scala.scalanative package build import java.nio.file.{Files, Path, Paths} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Try import scala.sys.process._ import scalanative.build.IO.RichPath diff --git a/tools/src/main/scala/scala/scalanative/build/IO.scala b/tools/src/main/scala/scala/scalanative/build/IO.scala index d773d1ad33..3848eac5b9 100644 --- a/tools/src/main/scala/scala/scalanative/build/IO.scala +++ b/tools/src/main/scala/scala/scalanative/build/IO.scala @@ -100,7 +100,7 @@ private[scalanative] object IO { /** Look for a zip entry path string using a matcher function */ def existsInJar(path: Path, matcher: String => Boolean): Boolean = { import java.util.zip.ZipFile - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val zf = new ZipFile(path.toFile) val it = zf.entries().asScala it.exists(e => matcher(e.getName)) diff --git a/tools/src/main/scala/scala/scalanative/build/LLVM.scala b/tools/src/main/scala/scala/scalanative/build/LLVM.scala index fddde67374..1318959d5e 100644 --- a/tools/src/main/scala/scala/scalanative/build/LLVM.scala +++ b/tools/src/main/scala/scala/scalanative/build/LLVM.scala @@ -3,7 +3,7 @@ package build import java.nio.file.{Files, Path, Paths, StandardCopyOption} import java.util.Arrays -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Try import scala.sys.process._ import scalanative.build.IO.RichPath From c94f59ff8a56a2a1649ecd92ed66cf1463beb1ec Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Fri, 2 Oct 2020 12:15:37 +0200 Subject: [PATCH 08/75] Enable implicitConversions in scalalib --- build.sbt | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 8101e0047d..a47ba0df90 100644 --- a/build.sbt +++ b/build.sbt @@ -448,7 +448,9 @@ lazy val scalalib = scalacOptions -= "-deprecation", scalacOptions += "-deprecation:false", // The option below is needed since Scala 2.12.12. - scalacOptions += "-language:postfixOps" + scalacOptions += "-language:postfixOps", + // The option below is needed since Scala 2.13.0. + scalacOptions += "-language:implicitConversions" ) .settings(mavenPublishSettings) .settings( From a0db29cbee16136ce6c0d5f3c2e4779292ef2d96 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Fri, 2 Oct 2020 14:28:16 +0200 Subject: [PATCH 09/75] Move common overrides to scalalib/overrides --- scalalib/overrides-2.11/scala/package.scala | 133 ------------------ .../scala/package.scala | 0 2 files changed, 133 deletions(-) delete mode 100644 scalalib/overrides-2.11/scala/package.scala rename scalalib/{overrides-2.12 => overrides}/scala/package.scala (100%) diff --git a/scalalib/overrides-2.11/scala/package.scala b/scalalib/overrides-2.11/scala/package.scala deleted file mode 100644 index 41db14e080..0000000000 --- a/scalalib/overrides-2.11/scala/package.scala +++ /dev/null @@ -1,133 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -/** - * Core Scala types. They are always available without an explicit import. - * @contentDiagram hideNodes "scala.Serializable" - */ -package object scala { - type Throwable = java.lang.Throwable - type Exception = java.lang.Exception - type Error = java.lang.Error - - type RuntimeException = java.lang.RuntimeException - type NullPointerException = java.lang.NullPointerException - type ClassCastException = java.lang.ClassCastException - type IndexOutOfBoundsException = java.lang.IndexOutOfBoundsException - type ArrayIndexOutOfBoundsException = java.lang.ArrayIndexOutOfBoundsException - type StringIndexOutOfBoundsException = java.lang.StringIndexOutOfBoundsException - type UnsupportedOperationException = java.lang.UnsupportedOperationException - type IllegalArgumentException = java.lang.IllegalArgumentException - type NoSuchElementException = java.util.NoSuchElementException - type NumberFormatException = java.lang.NumberFormatException - type AbstractMethodError = java.lang.AbstractMethodError - type InterruptedException = java.lang.InterruptedException - - // A dummy used by the specialization annotation. - lazy val AnyRef = new Specializable { - override def toString = "object AnyRef" - } - - type TraversableOnce[+A] = scala.collection.TraversableOnce[A] - - type Traversable[+A] = scala.collection.Traversable[A] - lazy val Traversable = scala.collection.Traversable - - type Iterable[+A] = scala.collection.Iterable[A] - lazy val Iterable = scala.collection.Iterable - - type Seq[+A] = scala.collection.Seq[A] - lazy val Seq = scala.collection.Seq - - type IndexedSeq[+A] = scala.collection.IndexedSeq[A] - lazy val IndexedSeq = scala.collection.IndexedSeq - - type Iterator[+A] = scala.collection.Iterator[A] - lazy val Iterator = scala.collection.Iterator - - type BufferedIterator[+A] = scala.collection.BufferedIterator[A] - - type List[+A] = scala.collection.immutable.List[A] - lazy val List = scala.collection.immutable.List - - lazy val Nil = scala.collection.immutable.Nil - - type ::[A] = scala.collection.immutable.::[A] - lazy val :: = scala.collection.immutable.:: - - lazy val +: = scala.collection.+: - lazy val :+ = scala.collection.:+ - - type Stream[+A] = scala.collection.immutable.Stream[A] - lazy val Stream = scala.collection.immutable.Stream - lazy val #:: = scala.collection.immutable.Stream.#:: - - type Vector[+A] = scala.collection.immutable.Vector[A] - lazy val Vector = scala.collection.immutable.Vector - - type StringBuilder = scala.collection.mutable.StringBuilder - lazy val StringBuilder = scala.collection.mutable.StringBuilder - - type Range = scala.collection.immutable.Range - lazy val Range = scala.collection.immutable.Range - - // Numeric types which were moved into scala.math.* - - type BigDecimal = scala.math.BigDecimal - lazy val BigDecimal = scala.math.BigDecimal - - type BigInt = scala.math.BigInt - lazy val BigInt = scala.math.BigInt - - type Equiv[T] = scala.math.Equiv[T] - lazy val Equiv = scala.math.Equiv - - type Fractional[T] = scala.math.Fractional[T] - lazy val Fractional = scala.math.Fractional - - type Integral[T] = scala.math.Integral[T] - lazy val Integral = scala.math.Integral - - type Numeric[T] = scala.math.Numeric[T] - lazy val Numeric = scala.math.Numeric - - type Ordered[T] = scala.math.Ordered[T] - lazy val Ordered = scala.math.Ordered - - type Ordering[T] = scala.math.Ordering[T] - lazy val Ordering = scala.math.Ordering - - type PartialOrdering[T] = scala.math.PartialOrdering[T] - type PartiallyOrdered[T] = scala.math.PartiallyOrdered[T] - - type Either[+A, +B] = scala.util.Either[A, B] - lazy val Either = scala.util.Either - - type Left[+A, +B] = scala.util.Left[A, B] - lazy val Left = scala.util.Left - - type Right[+A, +B] = scala.util.Right[A, B] - lazy val Right = scala.util.Right - - // Annotations which we might move to annotation.* -/* - type SerialVersionUID = annotation.SerialVersionUID - type deprecated = annotation.deprecated - type deprecatedName = annotation.deprecatedName - type inline = annotation.inline - type native = annotation.native - type noinline = annotation.noinline - type remote = annotation.remote - type specialized = annotation.specialized - type transient = annotation.transient - type throws = annotation.throws - type unchecked = annotation.unchecked.unchecked - type volatile = annotation.volatile - */ -} diff --git a/scalalib/overrides-2.12/scala/package.scala b/scalalib/overrides/scala/package.scala similarity index 100% rename from scalalib/overrides-2.12/scala/package.scala rename to scalalib/overrides/scala/package.scala From 11acf24a8bbe2094a0e09d91f70925a4a4321494 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Fri, 2 Oct 2020 17:46:11 +0200 Subject: [PATCH 10/75] Reimport missing java.Iterable, fix FilesSuite to work with underyling LazyList since 2.13 --- .../src/main/scala/java/nio/file/Files.scala | 27 ++++++++++++++----- .../test/scala/java/nio/file/FilesTest.scala | 6 ++++- 2 files changed, 25 insertions(+), 8 deletions(-) diff --git a/javalib/src/main/scala/java/nio/file/Files.scala b/javalib/src/main/scala/java/nio/file/Files.scala index d354b6fe4a..2b05169ecf 100644 --- a/javalib/src/main/scala/java/nio/file/Files.scala +++ b/javalib/src/main/scala/java/nio/file/Files.scala @@ -1,32 +1,45 @@ package java.nio.file +import java.lang.Iterable import java.io.{ BufferedReader, BufferedWriter, File, FileOutputStream, - IOException, InputStream, InputStreamReader, + IOException, OutputStream, OutputStreamWriter } + import java.nio.file.attribute._ import java.nio.charset.{Charset, StandardCharsets} import java.nio.channels.{FileChannel, SeekableByteChannel} + import java.util.function.BiPredicate -import java.util.{EnumSet, HashMap, HashSet, LinkedList, List, Map, Set} +import java.util.{ + EnumSet, + HashMap, + HashSet, + Iterator, + LinkedList, + List, + Map, + Set +} import java.util.stream.{Stream, WrappedScalaStream} + import scalanative.unsigned._ import scalanative.unsafe._ import scalanative.libc._ -import scalanative.posix.{dirent, fcntl, limits, unistd} -import dirent._ -import java.nio.file.StandardCopyOption._ +import scalanative.posix.{dirent, fcntl, limits, unistd}, dirent._ import scalanative.posix.sys.stat import scalanative.nio.fs.{FileHelpers, UnixException} import scala.collection.compat.immutable.{LazyList => SStream} + import scala.collection.immutable.{Map => SMap, Set => SSet} +import StandardCopyOption._ object Files { @@ -708,8 +721,8 @@ object Files { StandardOpenOption.WRITE) else _options val writer = newBufferedWriter(path, cs, options) - val it = lines.iterator - while (it.hasNext) { + val it = lines.iterator() + while (it.hasNext()) { writer.append(it.next()) writer.newLine() } diff --git a/unit-tests/src/test/scala/java/nio/file/FilesTest.scala b/unit-tests/src/test/scala/java/nio/file/FilesTest.scala index f74d53ff57..cbbede50fd 100644 --- a/unit-tests/src/test/scala/java/nio/file/FilesTest.scala +++ b/unit-tests/src/test/scala/java/nio/file/FilesTest.scala @@ -1045,7 +1045,11 @@ class FilesTest { assertThrows( classOf[NoSuchFileException], - Files.find(d1, 10, predicate, FileVisitOption.FOLLOW_LINKS).iterator) + Files + .find(d1, 10, predicate, FileVisitOption.FOLLOW_LINKS) + .iterator + .hasNext //used to materialize underlying LazyList (since 2.13) + ) // Test broken symlink when not following links. From 46a8ee68b874a5f7987b4b9e5b8777f32cd33f5c Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Sat, 3 Oct 2020 13:43:04 +0200 Subject: [PATCH 11/75] Test interface/compiler cross-build compat fixes --- .../scala/scalanative/junit/JUnitTask.scala | 2 +- .../scala/scalanative/junit/Reporter.scala | 4 +-- .../main/scala/sbt/testing/Selectors.scala | 27 ++++++++++--------- .../src/main/scala/sbt/testing/TaskDef.scala | 10 +++---- .../scalanative/compiler/CompatReporter.scala | 13 +++++++++ .../scalanative/compiler/CompatReporter.scala | 13 +++++++++ .../scalanative/compiler/CompatReporter.scala | 11 ++++++++ .../scalanative/compiler/NIRCompiler.scala | 13 ++++----- 8 files changed, 64 insertions(+), 29 deletions(-) create mode 100644 testing-compiler/src/main/scala-2.11/scala/scalanative/compiler/CompatReporter.scala create mode 100644 testing-compiler/src/main/scala-2.12/scala/scalanative/compiler/CompatReporter.scala create mode 100644 testing-compiler/src/main/scala-2.13/scala/scalanative/compiler/CompatReporter.scala diff --git a/junit-runtime/src/main/scala/scala/scalanative/junit/JUnitTask.scala b/junit-runtime/src/main/scala/scala/scalanative/junit/JUnitTask.scala index d93c433f72..6636500603 100644 --- a/junit-runtime/src/main/scala/scala/scalanative/junit/JUnitTask.scala +++ b/junit-runtime/src/main/scala/scala/scalanative/junit/JUnitTask.scala @@ -115,7 +115,7 @@ private[junit] final class JUnitTask(val taskDef: TaskDef, private def loadBootstrapper(reporter: Reporter): Option[Bootstrapper] = { val bootstrapperName = - taskDef.fullyQualifiedName + "$scalanative$junit$bootstrapper$" + taskDef.fullyQualifiedName() + "$scalanative$junit$bootstrapper$" try { val b = Reflect diff --git a/junit-runtime/src/main/scala/scala/scalanative/junit/Reporter.scala b/junit-runtime/src/main/scala/scala/scalanative/junit/Reporter.scala index 6fe1b0d27e..4aa3910e87 100644 --- a/junit-runtime/src/main/scala/scala/scalanative/junit/Reporter.scala +++ b/junit-runtime/src/main/scala/scala/scalanative/junit/Reporter.scala @@ -232,14 +232,14 @@ private[junit] final class Reporter(eventHandler: EventHandler, private def findTestFileName(trace: Array[StackTraceElement]): String = trace - .find(_.getClassName == taskDef.fullyQualifiedName) + .find(_.getClassName == taskDef.fullyQualifiedName()) .map(_.getFileName) .orNull private def stackTraceElementToString(e: StackTraceElement, testFileName: String): String = { val highlight = settings.color && { - taskDef.fullyQualifiedName == e.getClassName || + taskDef.fullyQualifiedName() == e.getClassName || (testFileName != null && testFileName == e.getFileName) } var r = "" diff --git a/test-interface-sbt-defs/src/main/scala/sbt/testing/Selectors.scala b/test-interface-sbt-defs/src/main/scala/sbt/testing/Selectors.scala index f6ec5f7e6f..9da078a710 100644 --- a/test-interface-sbt-defs/src/main/scala/sbt/testing/Selectors.scala +++ b/test-interface-sbt-defs/src/main/scala/sbt/testing/Selectors.scala @@ -50,12 +50,12 @@ final class TestSelector(_testName: String) extends Selector with Serializable { def testName(): String = _testName override def equals(that: Any): Boolean = that match { - case that: TestSelector => this.testName == that.testName + case that: TestSelector => this.testName() == that.testName() case _ => false } - override def hashCode(): Int = testName.hashCode() - override def toString(): String = s"TestSelector($testName)" + override def hashCode(): Int = testName().hashCode() + override def toString(): String = s"TestSelector(${testName()})" } /** Information in addition to a test class name that identifies a nested suite @@ -76,12 +76,12 @@ final class NestedSuiteSelector(_suiteId: String) def suiteId(): String = _suiteId override def equals(that: Any): Boolean = that match { - case that: NestedSuiteSelector => this.suiteId == that.suiteId + case that: NestedSuiteSelector => this.suiteId() == that.suiteId() case _ => false } - override def hashCode(): Int = suiteId.hashCode() - override def toString(): String = s"NestedSuiteSelector($suiteId)" + override def hashCode(): Int = suiteId().hashCode() + override def toString(): String = s"NestedSuiteSelector(${suiteId()})" } /** Information in addition to a test class name that identifies a test in a @@ -112,18 +112,19 @@ final class NestedTestSelector(_suiteId: String, _testName: String) override def equals(that: Any): Boolean = that match { case that: NestedTestSelector => - this.suiteId == that.suiteId && this.testName == that.testName + this.suiteId() == that.suiteId() && this.testName() == that.testName() case _ => false } override def hashCode(): Int = { var retVal = 17 - retVal = 31 * retVal + suiteId.hashCode() - retVal = 31 * retVal + testName.hashCode() + retVal = 31 * retVal + suiteId().hashCode() + retVal = 31 * retVal + testName().hashCode() retVal } - override def toString(): String = s"NestedTestSelector($suiteId, $testName)" + override def toString(): String = + s"NestedTestSelector(${suiteId()}, ${testName()})" } /** Information that identifies zero to many tests directly contained in a test @@ -152,11 +153,11 @@ final class TestWildcardSelector(_testWildcard: String) override def equals(that: Any): Boolean = that match { case that: TestWildcardSelector => - this.testWildcard == that.testWildcard + this.testWildcard() == that.testWildcard() case _ => false } - override def hashCode(): Int = testWildcard.hashCode() + override def hashCode(): Int = testWildcard().hashCode() - override def toString(): String = s"TestWildcardSelector($testWildcard)" + override def toString(): String = s"TestWildcardSelector(${testWildcard()})" } diff --git a/test-interface-sbt-defs/src/main/scala/sbt/testing/TaskDef.scala b/test-interface-sbt-defs/src/main/scala/sbt/testing/TaskDef.scala index 9a1f99191b..d687596bea 100644 --- a/test-interface-sbt-defs/src/main/scala/sbt/testing/TaskDef.scala +++ b/test-interface-sbt-defs/src/main/scala/sbt/testing/TaskDef.scala @@ -101,11 +101,11 @@ final class TaskDef(_fullyQualifiedName: String, override def equals(that: Any): Boolean = that match { case that: TaskDef => - this.fullyQualifiedName == that.fullyQualifiedName && - this.fingerprint == that.fingerprint && - this.explicitlySpecified == that.explicitlySpecified && - Arrays.equals(this.selectors.asInstanceOf[Array[AnyRef]], - that.selectors.asInstanceOf[Array[AnyRef]]) + this.fullyQualifiedName() == that.fullyQualifiedName() && + this.fingerprint() == that.fingerprint() && + this.explicitlySpecified() == that.explicitlySpecified() && + Arrays.equals(this.selectors().asInstanceOf[Array[AnyRef]], + that.selectors().asInstanceOf[Array[AnyRef]]) case _ => false } diff --git a/testing-compiler/src/main/scala-2.11/scala/scalanative/compiler/CompatReporter.scala b/testing-compiler/src/main/scala-2.11/scala/scalanative/compiler/CompatReporter.scala new file mode 100644 index 0000000000..9491c6e4df --- /dev/null +++ b/testing-compiler/src/main/scala-2.11/scala/scalanative/compiler/CompatReporter.scala @@ -0,0 +1,13 @@ +package scala.scalanative.compiler + +import scala.tools.nsc.reporters.AbstractReporter +import scala.reflect.internal.util.Position + +private[scalanative] trait CompatReporter extends AbstractReporter { + def add(pos: Position, msg: String, severity: Severity): Unit + + override def display(pos: Position, msg: String, severity: Severity): Unit = + add(pos, msg, severity) + + override def displayPrompt(): Unit = () +} diff --git a/testing-compiler/src/main/scala-2.12/scala/scalanative/compiler/CompatReporter.scala b/testing-compiler/src/main/scala-2.12/scala/scalanative/compiler/CompatReporter.scala new file mode 100644 index 0000000000..9491c6e4df --- /dev/null +++ b/testing-compiler/src/main/scala-2.12/scala/scalanative/compiler/CompatReporter.scala @@ -0,0 +1,13 @@ +package scala.scalanative.compiler + +import scala.tools.nsc.reporters.AbstractReporter +import scala.reflect.internal.util.Position + +private[scalanative] trait CompatReporter extends AbstractReporter { + def add(pos: Position, msg: String, severity: Severity): Unit + + override def display(pos: Position, msg: String, severity: Severity): Unit = + add(pos, msg, severity) + + override def displayPrompt(): Unit = () +} diff --git a/testing-compiler/src/main/scala-2.13/scala/scalanative/compiler/CompatReporter.scala b/testing-compiler/src/main/scala-2.13/scala/scalanative/compiler/CompatReporter.scala new file mode 100644 index 0000000000..98f9619be0 --- /dev/null +++ b/testing-compiler/src/main/scala-2.13/scala/scalanative/compiler/CompatReporter.scala @@ -0,0 +1,11 @@ +package scala.scalanative.compiler + +import scala.tools.nsc.reporters.FilteringReporter +import scala.reflect.internal.util.Position + +private[scalanative] trait CompatReporter extends FilteringReporter { + def add(pos: Position, msg: String, severity: Severity): Unit + + override def doReport(pos: Position, msg: String, severity: Severity): Unit = + add(pos, msg, severity) +} diff --git a/testing-compiler/src/main/scala/scalanative/compiler/NIRCompiler.scala b/testing-compiler/src/main/scala/scalanative/compiler/NIRCompiler.scala index ed3b00e2e3..3f6c8e40dd 100644 --- a/testing-compiler/src/main/scala/scalanative/compiler/NIRCompiler.scala +++ b/testing-compiler/src/main/scala/scalanative/compiler/NIRCompiler.scala @@ -2,14 +2,12 @@ package scala.scalanative import scala.reflect.internal.util.{BatchSourceFile, NoFile, SourceFile} import scala.reflect.internal.util.Position - import scala.tools.cmd.CommandLineParser import scala.tools.nsc.{CompilerCommand, Global, Settings} import scala.tools.nsc.io.AbstractFile -import scala.tools.nsc.reporters.AbstractReporter - import java.nio.file.{Files, Path} import java.io.File +import scala.scalanative.compiler.CompatReporter /** * Helper class to compile snippets of code. @@ -57,14 +55,13 @@ class NIRCompiler(outputDir: Path) extends api.NIRCompiler { * on ERRORs. */ private class TestReporter(override val settings: Settings) - extends AbstractReporter { - override def display(pos: Position, msg: String, severity: Severity): Unit = + extends CompatReporter { + override def add(pos: Position, msg: String, severity: Severity): Unit = severity match { - case INFO | WARNING => () case ERROR => reportError(msg) + case INFO | WARNING => () + case _ => () } - - override def displayPrompt(): Unit = () } /** From fed20a6500de7584a67f94cc0d1a0ab11a6c0d23 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Sat, 3 Oct 2020 13:43:56 +0200 Subject: [PATCH 12/75] Unit-tests cross-build compat fixes --- unit-tests/src/main/scala/tests/NativeFingerprint.scala | 2 +- unit-tests/src/test/scala/java/util/AbstractMapTest.scala | 2 +- unit-tests/src/test/scala/java/util/HashMapTest.scala | 2 +- .../src/test/scala/scala/scalanative/posix/TimeSuite.scala | 4 ++-- .../scalanative/reflect/ReflectiveInstantiationSuite.scala | 2 +- .../src/test/scala/scala/scalanative/regex/ApiTestUtils.scala | 2 +- .../test/scala/scala/scalanative/regex/CharClassSuite.scala | 2 +- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/unit-tests/src/main/scala/tests/NativeFingerprint.scala b/unit-tests/src/main/scala/tests/NativeFingerprint.scala index 9d8b50b46e..3f100f5e2e 100644 --- a/unit-tests/src/main/scala/tests/NativeFingerprint.scala +++ b/unit-tests/src/main/scala/tests/NativeFingerprint.scala @@ -3,7 +3,7 @@ package tests import sbt.testing.SubclassFingerprint object NativeFingerprint extends SubclassFingerprint { - override def isModule: Boolean = true + override def isModule(): Boolean = true override def requireNoArgConstructor(): Boolean = false diff --git a/unit-tests/src/test/scala/java/util/AbstractMapTest.scala b/unit-tests/src/test/scala/java/util/AbstractMapTest.scala index 41896de859..9822c1739d 100644 --- a/unit-tests/src/test/scala/java/util/AbstractMapTest.scala +++ b/unit-tests/src/test/scala/java/util/AbstractMapTest.scala @@ -7,7 +7,7 @@ import java.{util => ju} import scala.reflect.ClassTag abstract class AbstractMapTest extends MapTest { - def factory(): AbstractMapFactory + def factory: AbstractMapFactory } abstract class AbstractMapFactory extends MapFactory { diff --git a/unit-tests/src/test/scala/java/util/HashMapTest.scala b/unit-tests/src/test/scala/java/util/HashMapTest.scala index ddb70722c5..7b112619d9 100644 --- a/unit-tests/src/test/scala/java/util/HashMapTest.scala +++ b/unit-tests/src/test/scala/java/util/HashMapTest.scala @@ -7,7 +7,7 @@ import java.{util => ju} import scala.reflect.ClassTag class HashMapTest extends MapTest { - def factory(): HashMapFactory = new HashMapFactory + def factory: HashMapFactory = new HashMapFactory } class HashMapFactory extends AbstractMapFactory { diff --git a/unit-tests/src/test/scala/scala/scalanative/posix/TimeSuite.scala b/unit-tests/src/test/scala/scala/scalanative/posix/TimeSuite.scala index 63bac23fd4..654680673e 100644 --- a/unit-tests/src/test/scala/scala/scalanative/posix/TimeSuite.scala +++ b/unit-tests/src/test/scala/scala/scalanative/posix/TimeSuite.scala @@ -7,8 +7,8 @@ import timeOps.tmOps object TimeSuite extends tests.Suite { tzset() - val now_time_t: time_t = time.time(null) - val epoch: time_t = 0 + val now_time_t: time_t = time(null) + val epoch: time_t = 0L test("asctime() with a given known state should match its representation") { Zone { implicit z => diff --git a/unit-tests/src/test/scala/scala/scalanative/reflect/ReflectiveInstantiationSuite.scala b/unit-tests/src/test/scala/scala/scalanative/reflect/ReflectiveInstantiationSuite.scala index 89be90a97a..a9dfef1d0d 100644 --- a/unit-tests/src/test/scala/scala/scalanative/reflect/ReflectiveInstantiationSuite.scala +++ b/unit-tests/src/test/scala/scala/scalanative/reflect/ReflectiveInstantiationSuite.scala @@ -285,7 +285,7 @@ object ReflectiveInstantiationSuite extends tests.Suite { { () => @EnableReflectiveInstantiation class Foo - } + }.apply() } } diff --git a/unit-tests/src/test/scala/scala/scalanative/regex/ApiTestUtils.scala b/unit-tests/src/test/scala/scala/scalanative/regex/ApiTestUtils.scala index 16bf1cbbfa..366368d698 100644 --- a/unit-tests/src/test/scala/scala/scalanative/regex/ApiTestUtils.scala +++ b/unit-tests/src/test/scala/scala/scalanative/regex/ApiTestUtils.scala @@ -8,7 +8,7 @@ import ScalaTestCompat.fail object ApiTestUtils extends tests.Suite { def assertArrayEquals[A](arr1: Array[A], arr2: Array[A]) = - assert(arr1.deep == arr2.deep) + assert(arr1 sameElements arr2) /** * Asserts that IllegalArgumentException is thrown from compile with flags. diff --git a/unit-tests/src/test/scala/scala/scalanative/regex/CharClassSuite.scala b/unit-tests/src/test/scala/scala/scalanative/regex/CharClassSuite.scala index 9ec91b97c7..8e6559dde8 100644 --- a/unit-tests/src/test/scala/scala/scalanative/regex/CharClassSuite.scala +++ b/unit-tests/src/test/scala/scala/scalanative/regex/CharClassSuite.scala @@ -10,7 +10,7 @@ object CharClassSuite extends tests.Suite { private def assertClass(cc: CharClass, expected: Array[Int]): Unit = { val actual = cc.toArray - if (actual.deep != expected.deep) + if (!(actual sameElements expected)) throw new AssertionError( "Incorrect CharClass value:\n" + "Expected: " + expected .mkString(", ") + "\n" + "Actual: " + actual.mkString(", ")) From d9cfd7481d51d3d50f84cf28de1275fcc1a3105e Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Sat, 3 Oct 2020 14:00:17 +0200 Subject: [PATCH 13/75] Replace serializable symbol to use java.io.Serializable --- .../main/scala/scala/scalanative/nscplugin/NirGenSymbols.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenSymbols.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenSymbols.scala index 3992650142..48387e55d1 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenSymbols.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenSymbols.scala @@ -4,7 +4,7 @@ import scala.scalanative.nir.Global import scala.scalanative.nir.Type object NirGenSymbols { - val serializable = Global.Top("scala.Serializable") + val serializable = Global.Top("java.io.Serializable") val jlClass = Global.Top("java.lang.Class") val jlClassRef = Type.Ref(jlClass) From 7f304436422dcd0b247c67628bc9636ab0251ebd Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Sat, 3 Oct 2020 14:03:06 +0200 Subject: [PATCH 14/75] Add scala-collections-compat sources to scalalib --- .../overrides-2.13/scala/collection/compat/package.scala | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 scalalib/overrides-2.13/scala/collection/compat/package.scala diff --git a/scalalib/overrides-2.13/scala/collection/compat/package.scala b/scalalib/overrides-2.13/scala/collection/compat/package.scala new file mode 100644 index 0000000000..2c42da993e --- /dev/null +++ b/scalalib/overrides-2.13/scala/collection/compat/package.scala @@ -0,0 +1,6 @@ +package scala.collection + +package object compat { + type ScalaStream[+T] = scala.collection.immutable.LazyList[T] + val ScalaStream = scala.collection.immutable.LazyList +} \ No newline at end of file From 8bd903285983e54b12b90b7a72796d0071e3bf1d Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Sat, 3 Oct 2020 14:03:06 +0200 Subject: [PATCH 15/75] Add scala-collections-compat sources to scalalib --- build.sbt | 1 + .../scala/collection/compat/BuildFrom.scala | 53 + .../scala/collection/compat/CompatImpl.scala | 74 + .../collection/compat/PackageShared.scala | 467 +++++ .../compat/immutable/ArraySeq.scala | 267 +++ .../compat/immutable/LazyList.scala | 1537 +++++++++++++++++ .../scala/collection/compat/package.scala | 66 + .../scala/jdk/CollectionConventers.scala | 5 + .../scala/collection/compat/BuildFrom.scala | 53 + .../scala/collection/compat/CompatImpl.scala | 74 + .../collection/compat/PackageShared.scala | 467 +++++ .../compat/immutable/ArraySeq.scala | 267 +++ .../compat/immutable/LazyList.scala | 1537 +++++++++++++++++ .../scala/collection/compat/package.scala | 74 + .../scala/jdk/CollectionConventers.scala | 5 + .../collection/compat/immutable/package.scala | 21 + .../scala/collection/compat/package.scala | 22 +- .../scalanative/CrossCompileCompat.scala | 15 - 18 files changed, 4988 insertions(+), 17 deletions(-) create mode 100644 scalalib/overrides-2.11/scala/collection/compat/BuildFrom.scala create mode 100644 scalalib/overrides-2.11/scala/collection/compat/CompatImpl.scala create mode 100644 scalalib/overrides-2.11/scala/collection/compat/PackageShared.scala create mode 100644 scalalib/overrides-2.11/scala/collection/compat/immutable/ArraySeq.scala create mode 100644 scalalib/overrides-2.11/scala/collection/compat/immutable/LazyList.scala create mode 100644 scalalib/overrides-2.11/scala/collection/compat/package.scala create mode 100644 scalalib/overrides-2.11/scala/jdk/CollectionConventers.scala create mode 100644 scalalib/overrides-2.12/scala/collection/compat/BuildFrom.scala create mode 100644 scalalib/overrides-2.12/scala/collection/compat/CompatImpl.scala create mode 100644 scalalib/overrides-2.12/scala/collection/compat/PackageShared.scala create mode 100644 scalalib/overrides-2.12/scala/collection/compat/immutable/ArraySeq.scala create mode 100644 scalalib/overrides-2.12/scala/collection/compat/immutable/LazyList.scala create mode 100644 scalalib/overrides-2.12/scala/collection/compat/package.scala create mode 100644 scalalib/overrides-2.12/scala/jdk/CollectionConventers.scala create mode 100644 scalalib/overrides-2.13/scala/collection/compat/immutable/package.scala delete mode 100644 util/src/main/scala/scala/scalanative/CrossCompileCompat.scala diff --git a/build.sbt b/build.sbt index a47ba0df90..706d981da4 100644 --- a/build.sbt +++ b/build.sbt @@ -234,6 +234,7 @@ lazy val util = .in(file("util")) .settings(toolSettings) .settings(mavenPublishSettings) + .settings(crossCompileCompatSettings) lazy val nir = project diff --git a/scalalib/overrides-2.11/scala/collection/compat/BuildFrom.scala b/scalalib/overrides-2.11/scala/collection/compat/BuildFrom.scala new file mode 100644 index 0000000000..8cdd8756c3 --- /dev/null +++ b/scalalib/overrides-2.11/scala/collection/compat/BuildFrom.scala @@ -0,0 +1,53 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.compat + +import scala.collection.generic.CanBuildFrom +import scala.collection.mutable +import scala.language.higherKinds +import scala.language.implicitConversions + +/** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available. + * Implicit instances of `BuildFrom` are available for all collection types. + * + * @tparam From Type of source collection + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +trait BuildFrom[-From, -A, +C] extends Any { + def fromSpecific(from: From)(it: IterableOnce[A]): C + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder(from: From): mutable.Builder[A, C] + + @deprecated("Use newBuilder() instead of apply()", "2.13.0") + @`inline` def apply(from: From): mutable.Builder[A, C] = newBuilder(from) +} + +object BuildFrom { + + // Implicit instance derived from an implicit CanBuildFrom instance + implicit def fromCanBuildFrom[From, A, C]( + implicit cbf: CanBuildFrom[From, A, C]): BuildFrom[From, A, C] = + new BuildFrom[From, A, C] { + def fromSpecific(from: From)(it: IterableOnce[A]): C = (cbf(from) ++= it).result() + def newBuilder(from: From): mutable.Builder[A, C] = cbf(from) + } + + // Implicit conversion derived from an implicit conversion to CanBuildFrom + implicit def fromCanBuildFromConversion[X, From, A, C](x: X)( + implicit toCanBuildFrom: X => CanBuildFrom[From, A, C]): BuildFrom[From, A, C] = + fromCanBuildFrom(toCanBuildFrom(x)) + +} \ No newline at end of file diff --git a/scalalib/overrides-2.11/scala/collection/compat/CompatImpl.scala b/scalalib/overrides-2.11/scala/collection/compat/CompatImpl.scala new file mode 100644 index 0000000000..4ff1f5515e --- /dev/null +++ b/scalalib/overrides-2.11/scala/collection/compat/CompatImpl.scala @@ -0,0 +1,74 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.compat + +import scala.reflect.ClassTag +import scala.collection.generic.CanBuildFrom +import scala.collection.{immutable => i, mutable => m} +import scala.language.higherKinds + +/* builder optimized for a single ++= call, which returns identity on result if possible + * and defers to the underlying builder if not. + */ +private final class IdentityPreservingBuilder[A, CC[X] <: TraversableOnce[X]]( + that: m.Builder[A, CC[A]])(implicit ct: ClassTag[CC[A]]) + extends m.Builder[A, CC[A]] { + + //invariant: ruined => (collection == null) + var collection: CC[A] = null.asInstanceOf[CC[A]] + var ruined = false + + private[this] def ruin(): Unit = { + if (collection != null) that ++= collection + collection = null.asInstanceOf[CC[A]] + ruined = true + } + + override def ++=(elems: TraversableOnce[A]): this.type = + elems match { + case ct(ca) if collection == null && !ruined => { + collection = ca + this + } + case _ => { + ruin() + that ++= elems + this + } + } + + def +=(elem: A): this.type = { + ruin() + that += elem + this + } + + def clear(): Unit = { + collection = null.asInstanceOf[CC[A]] + if (ruined) that.clear() + ruined = false + } + + def result(): CC[A] = if (collection == null) that.result() else collection +} + +private[compat] object CompatImpl { + def simpleCBF[A, C](f: => m.Builder[A, C]): CanBuildFrom[Any, A, C] = + new CanBuildFrom[Any, A, C] { + def apply(from: Any): m.Builder[A, C] = apply() + def apply(): m.Builder[A, C] = f + } + + type ImmutableBitSetCC[X] = ({ type L[_] = i.BitSet })#L[X] + type MutableBitSetCC[X] = ({ type L[_] = m.BitSet })#L[X] +} \ No newline at end of file diff --git a/scalalib/overrides-2.11/scala/collection/compat/PackageShared.scala b/scalalib/overrides-2.11/scala/collection/compat/PackageShared.scala new file mode 100644 index 0000000000..b7ab3f8ac7 --- /dev/null +++ b/scalalib/overrides-2.11/scala/collection/compat/PackageShared.scala @@ -0,0 +1,467 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.compat + +import scala.collection.generic._ +import scala.reflect.ClassTag +import scala.collection.{ + BitSet, + GenTraversable, + IterableLike, + IterableView, + MapLike, + TraversableLike, + immutable => i, + mutable => m +} +import scala.runtime.{Tuple2Zipped, Tuple3Zipped} +import scala.{collection => c} +import scala.language.higherKinds +import scala.language.implicitConversions + +/** The collection compatibility API */ +private[compat] trait PackageShared { + import CompatImpl._ + + /** + * A factory that builds a collection of type `C` with elements of type `A`. + * + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ + type Factory[-A, +C] = CanBuildFrom[Nothing, A, C] + + implicit class FactoryOps[-A, +C](private val factory: Factory[A, C]) { + + /** + * @return A collection of type `C` containing the same elements + * as the source collection `it`. + * @param it Source collection + */ + def fromSpecific(it: TraversableOnce[A]): C = (factory() ++= it).result() + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder: m.Builder[A, C] = factory() + } + + implicit def genericCompanionToCBF[A, CC[X] <: GenTraversable[X]]( + fact: GenericCompanion[CC]): CanBuildFrom[Any, A, CC[A]] = { + /* see https://github.com/scala/scala-collection-compat/issues/337 + `simpleCBF.apply` takes a by-name parameter and relies on + repeated references generating new builders, thus this expression + must be non-strict + */ + def builder: m.Builder[A, CC[A]] = fact match { + case c.Seq | i.Seq => new IdentityPreservingBuilder[A, i.Seq](i.Seq.newBuilder[A]) + case c.LinearSeq | i.LinearSeq => + new IdentityPreservingBuilder[A, i.LinearSeq](i.LinearSeq.newBuilder[A]) + case _ => fact.newBuilder[A] + } + simpleCBF(builder) + } + + implicit def sortedSetCompanionToCBF[A: Ordering, + CC[X] <: c.SortedSet[X] with c.SortedSetLike[X, CC[X]]]( + fact: SortedSetFactory[CC]): CanBuildFrom[Any, A, CC[A]] = + simpleCBF(fact.newBuilder[A]) + + implicit def arrayCompanionToCBF[A: ClassTag](fact: Array.type): CanBuildFrom[Any, A, Array[A]] = + simpleCBF(Array.newBuilder[A]) + + implicit def mapFactoryToCBF[K, V, CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]]( + fact: MapFactory[CC]): CanBuildFrom[Any, (K, V), CC[K, V]] = + simpleCBF(fact.newBuilder[K, V]) + + implicit def sortedMapFactoryToCBF[ + K: Ordering, + V, + CC[A, B] <: c.SortedMap[A, B] with c.SortedMapLike[A, B, CC[A, B]]]( + fact: SortedMapFactory[CC]): CanBuildFrom[Any, (K, V), CC[K, V]] = + simpleCBF(fact.newBuilder[K, V]) + + implicit def bitSetFactoryToCBF(fact: BitSetFactory[BitSet]): CanBuildFrom[Any, Int, BitSet] = + simpleCBF(fact.newBuilder) + + implicit def immutableBitSetFactoryToCBF( + fact: BitSetFactory[i.BitSet]): CanBuildFrom[Any, Int, ImmutableBitSetCC[Int]] = + simpleCBF(fact.newBuilder) + + implicit def mutableBitSetFactoryToCBF( + fact: BitSetFactory[m.BitSet]): CanBuildFrom[Any, Int, MutableBitSetCC[Int]] = + simpleCBF(fact.newBuilder) + + implicit class IterableFactoryExtensionMethods[CC[X] <: GenTraversable[X]]( + private val fact: GenericCompanion[CC]) { + def from[A](source: TraversableOnce[A]): CC[A] = + fact.apply(source.toSeq: _*) + } + + implicit class MapFactoryExtensionMethods[CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]]( + private val fact: MapFactory[CC]) { + def from[K, V](source: TraversableOnce[(K, V)]): CC[K, V] = + fact.apply(source.toSeq: _*) + } + + implicit class BitSetFactoryExtensionMethods[ + C <: scala.collection.BitSet with scala.collection.BitSetLike[C]]( + private val fact: BitSetFactory[C]) { + def fromSpecific(source: TraversableOnce[Int]): C = + fact.apply(source.toSeq: _*) + } + + private[compat] def build[T, CC](builder: m.Builder[T, CC], source: TraversableOnce[T]): CC = { + builder ++= source + builder.result() + } + + implicit def toImmutableSortedMapExtensions( + fact: i.SortedMap.type): ImmutableSortedMapExtensions = + new ImmutableSortedMapExtensions(fact) + + implicit def toImmutableListMapExtensions(fact: i.ListMap.type): ImmutableListMapExtensions = + new ImmutableListMapExtensions(fact) + + implicit def toImmutableHashMapExtensions(fact: i.HashMap.type): ImmutableHashMapExtensions = + new ImmutableHashMapExtensions(fact) + + implicit def toImmutableTreeMapExtensions(fact: i.TreeMap.type): ImmutableTreeMapExtensions = + new ImmutableTreeMapExtensions(fact) + + implicit def toImmutableIntMapExtensions(fact: i.IntMap.type): ImmutableIntMapExtensions = + new ImmutableIntMapExtensions(fact) + + implicit def toImmutableLongMapExtensions(fact: i.LongMap.type): ImmutableLongMapExtensions = + new ImmutableLongMapExtensions(fact) + + implicit def toMutableLongMapExtensions(fact: m.LongMap.type): MutableLongMapExtensions = + new MutableLongMapExtensions(fact) + + implicit def toMutableHashMapExtensions(fact: m.HashMap.type): MutableHashMapExtensions = + new MutableHashMapExtensions(fact) + + implicit def toMutableListMapExtensions(fact: m.ListMap.type): MutableListMapExtensions = + new MutableListMapExtensions(fact) + + implicit def toMutableMapExtensions(fact: m.Map.type): MutableMapExtensions = + new MutableMapExtensions(fact) + + implicit def toStreamExtensionMethods[A](stream: Stream[A]): StreamExtensionMethods[A] = + new StreamExtensionMethods[A](stream) + + implicit def toSortedExtensionMethods[K, V <: Sorted[K, V]]( + fact: Sorted[K, V]): SortedExtensionMethods[K, V] = + new SortedExtensionMethods[K, V](fact) + + implicit def toIteratorExtensionMethods[A](self: Iterator[A]): IteratorExtensionMethods[A] = + new IteratorExtensionMethods[A](self) + + implicit def toTraversableExtensionMethods[A]( + self: Traversable[A]): TraversableExtensionMethods[A] = + new TraversableExtensionMethods[A](self) + + implicit def toTraversableOnceExtensionMethods[A]( + self: TraversableOnce[A]): TraversableOnceExtensionMethods[A] = + new TraversableOnceExtensionMethods[A](self) + + // This really belongs into scala.collection but there's already a package object + // in scala-library so we can't add to it + type IterableOnce[+X] = c.TraversableOnce[X] + val IterableOnce = c.TraversableOnce + + implicit def toMapExtensionMethods[K, V]( + self: scala.collection.Map[K, V]): MapExtensionMethods[K, V] = + new MapExtensionMethods[K, V](self) + + implicit def toMapViewExtensionMethods[K, V, C <: scala.collection.Map[K, V]]( + self: IterableView[(K, V), C]): MapViewExtensionMethods[K, V, C] = + new MapViewExtensionMethods[K, V, C](self) +} + +class ImmutableSortedMapExtensions(private val fact: i.SortedMap.type) extends AnyVal { + def from[K: Ordering, V](source: TraversableOnce[(K, V)]): i.SortedMap[K, V] = + build(i.SortedMap.newBuilder[K, V], source) +} + +class ImmutableListMapExtensions(private val fact: i.ListMap.type) extends AnyVal { + def from[K, V](source: TraversableOnce[(K, V)]): i.ListMap[K, V] = + build(i.ListMap.newBuilder[K, V], source) +} + +class ImmutableHashMapExtensions(private val fact: i.HashMap.type) extends AnyVal { + def from[K, V](source: TraversableOnce[(K, V)]): i.HashMap[K, V] = + build(i.HashMap.newBuilder[K, V], source) +} + +class ImmutableTreeMapExtensions(private val fact: i.TreeMap.type) extends AnyVal { + def from[K: Ordering, V](source: TraversableOnce[(K, V)]): i.TreeMap[K, V] = + build(i.TreeMap.newBuilder[K, V], source) +} + +class ImmutableIntMapExtensions(private val fact: i.IntMap.type) extends AnyVal { + def from[V](source: TraversableOnce[(Int, V)]): i.IntMap[V] = + build(i.IntMap.canBuildFrom[Int, V](), source) +} + +class ImmutableLongMapExtensions(private val fact: i.LongMap.type) extends AnyVal { + def from[V](source: TraversableOnce[(Long, V)]): i.LongMap[V] = + build(i.LongMap.canBuildFrom[Long, V](), source) +} + +class MutableLongMapExtensions(private val fact: m.LongMap.type) extends AnyVal { + def from[V](source: TraversableOnce[(Long, V)]): m.LongMap[V] = + build(m.LongMap.canBuildFrom[Long, V](), source) +} + +class MutableHashMapExtensions(private val fact: m.HashMap.type) extends AnyVal { + def from[K, V](source: TraversableOnce[(K, V)]): m.HashMap[K, V] = + build(m.HashMap.canBuildFrom[K, V](), source) +} + +class MutableListMapExtensions(private val fact: m.ListMap.type) extends AnyVal { + def from[K, V](source: TraversableOnce[(K, V)]): m.ListMap[K, V] = + build(m.ListMap.canBuildFrom[K, V](), source) +} + +class MutableMapExtensions(private val fact: m.Map.type) extends AnyVal { + def from[K, V](source: TraversableOnce[(K, V)]): m.Map[K, V] = + build(m.Map.canBuildFrom[K, V](), source) +} + +class StreamExtensionMethods[A](private val stream: Stream[A]) extends AnyVal { + def lazyAppendedAll(as: => TraversableOnce[A]): Stream[A] = stream.append(as) +} + +class SortedExtensionMethods[K, T <: Sorted[K, T]](private val fact: Sorted[K, T]) { + def rangeFrom(from: K): T = fact.from(from) + def rangeTo(to: K): T = fact.to(to) + def rangeUntil(until: K): T = fact.until(until) +} + +class IteratorExtensionMethods[A](private val self: c.Iterator[A]) extends AnyVal { + def sameElements[B >: A](that: c.TraversableOnce[B]): Boolean = { + self.sameElements(that.iterator) + } + def concat[B >: A](that: c.TraversableOnce[B]): c.TraversableOnce[B] = self ++ that + def tapEach[U](f: A => U): c.Iterator[A] = self.map(a => { f(a); a }) +} + +class TraversableOnceExtensionMethods[A](private val self: c.TraversableOnce[A]) extends AnyVal { + def iterator: Iterator[A] = self.toIterator + + def minOption[B >: A](implicit ord: Ordering[B]): Option[A] = { + if (self.isEmpty) + None + else + Some(self.min(ord)) + } + + def maxOption[B >: A](implicit ord: Ordering[B]): Option[A] = { + if (self.isEmpty) + None + else + Some(self.max(ord)) + } + + def minByOption[B](f: A => B)(implicit cmp: Ordering[B]): Option[A] = { + if (self.isEmpty) + None + else + Some(self.minBy(f)(cmp)) + } + + def maxByOption[B](f: A => B)(implicit cmp: Ordering[B]): Option[A] = { + if (self.isEmpty) + None + else + Some(self.maxBy(f)(cmp)) + } +} + +class TraversableExtensionMethods[A](private val self: c.Traversable[A]) extends AnyVal { + def iterableFactory: GenericCompanion[Traversable] = self.companion + + def sizeCompare(otherSize: Int): Int = SizeCompareImpl.sizeCompareInt(self)(otherSize) + def sizeIs: SizeCompareOps = new SizeCompareOps(self) + def sizeCompare(that: c.Traversable[_]): Int = SizeCompareImpl.sizeCompareColl(self)(that) + +} + +class SeqExtensionMethods[A](private val self: c.Seq[A]) extends AnyVal { + def lengthIs: SizeCompareOps = new SizeCompareOps(self) +} + +class SizeCompareOps private[compat] (private val it: c.Traversable[_]) extends AnyVal { + import SizeCompareImpl._ + + /** Tests if the size of the collection is less than some value. */ + @inline def <(size: Int): Boolean = sizeCompareInt(it)(size) < 0 + + /** Tests if the size of the collection is less than or equal to some value. */ + @inline def <=(size: Int): Boolean = sizeCompareInt(it)(size) <= 0 + + /** Tests if the size of the collection is equal to some value. */ + @inline def ==(size: Int): Boolean = sizeCompareInt(it)(size) == 0 + + /** Tests if the size of the collection is not equal to some value. */ + @inline def !=(size: Int): Boolean = sizeCompareInt(it)(size) != 0 + + /** Tests if the size of the collection is greater than or equal to some value. */ + @inline def >=(size: Int): Boolean = sizeCompareInt(it)(size) >= 0 + + /** Tests if the size of the collection is greater than some value. */ + @inline def >(size: Int): Boolean = sizeCompareInt(it)(size) > 0 +} + +private object SizeCompareImpl { + def sizeCompareInt(self: c.Traversable[_])(otherSize: Int): Int = + self match { + case self: c.SeqLike[_, _] => self.lengthCompare(otherSize) + case _ => + if (otherSize < 0) 1 + else { + var i = 0 + val it = self.toIterator + while (it.hasNext) { + if (i == otherSize) return 1 + it.next() + i += 1 + } + i - otherSize + } + } + + // `IndexedSeq` is the only thing that we can safely say has a known size + def sizeCompareColl(self: c.Traversable[_])(that: c.Traversable[_]): Int = + that match { + case that: c.IndexedSeq[_] => sizeCompareInt(self)(that.length) + case _ => + self match { + case self: c.IndexedSeq[_] => + val res = sizeCompareInt(that)(self.length) + // can't just invert the result, because `-Int.MinValue == Int.MinValue` + if (res == Int.MinValue) 1 else -res + case _ => + val thisIt = self.toIterator + val thatIt = that.toIterator + while (thisIt.hasNext && thatIt.hasNext) { + thisIt.next() + thatIt.next() + } + java.lang.Boolean.compare(thisIt.hasNext, thatIt.hasNext) + } + } +} + +class TraversableLikeExtensionMethods[A, Repr](private val self: c.GenTraversableLike[A, Repr]) + extends AnyVal { + def tapEach[U](f: A => U)(implicit bf: CanBuildFrom[Repr, A, Repr]): Repr = + self.map(a => { f(a); a }) + + def partitionMap[A1, A2, That, Repr1, Repr2](f: A => Either[A1, A2])( + implicit bf1: CanBuildFrom[Repr, A1, Repr1], + bf2: CanBuildFrom[Repr, A2, Repr2] + ): (Repr1, Repr2) = { + val l = bf1() + val r = bf2() + self.foreach { x => + f(x) match { + case Left(x1) => l += x1 + case Right(x2) => r += x2 + } + } + (l.result(), r.result()) + } + + def groupMap[K, B, That](key: A => K)(f: A => B)( + implicit bf: CanBuildFrom[Repr, B, That]): Map[K, That] = { + val map = m.Map.empty[K, m.Builder[B, That]] + for (elem <- self) { + val k = key(elem) + val bldr = map.getOrElseUpdate(k, bf(self.repr)) + bldr += f(elem) + } + val res = Map.newBuilder[K, That] + for ((k, bldr) <- map) res += ((k, bldr.result())) + res.result() + } + + def groupMapReduce[K, B](key: A => K)(f: A => B)(reduce: (B, B) => B): Map[K, B] = { + val map = m.Map.empty[K, B] + for (elem <- self) { + val k = key(elem) + val v = map.get(k) match { + case Some(b) => reduce(b, f(elem)) + case None => f(elem) + } + map.put(k, v) + } + map.toMap + } +} + +class TrulyTraversableLikeExtensionMethods[El1, Repr1]( + private val self: TraversableLike[El1, Repr1]) + extends AnyVal { + + def lazyZip[El2, Repr2, T2](t2: T2)( + implicit w2: T2 => IterableLike[El2, Repr2] + ): Tuple2Zipped[El1, Repr1, El2, Repr2] = new Tuple2Zipped((self, t2)) +} + +class Tuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2]( + private val self: Tuple2Zipped[El1, Repr1, El2, Repr2]) { + + def lazyZip[El3, Repr3, T3](t3: T3)(implicit w3: T3 => IterableLike[El3, Repr3]) + : Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3] = + new Tuple3Zipped((self.colls._1, self.colls._2, t3)) +} + +class MapExtensionMethods[K, V](private val self: scala.collection.Map[K, V]) extends AnyVal { + + def foreachEntry[U](f: (K, V) => U): Unit = { + self.foreach { case (k, v) => f(k, v) } + } + +} + +class MapViewExtensionMethods[K, V, C <: scala.collection.Map[K, V]]( + private val self: IterableView[(K, V), C]) + extends AnyVal { + + def mapValues[W, That](f: V => W)( + implicit bf: CanBuildFrom[IterableView[(K, V), C], (K, W), That]): That = + self.map[(K, W), That] { case (k, v) => (k, f(v)) } + + // TODO: Replace the current implementation of `mapValues` with this + // after major version bump when bincompat can be broken. + // At the same time, remove `canBuildFromIterableViewMapLike` + /* + def mapValues[W](f: V => W): IterableView[(K, W), C] = + // the implementation of `self.map` also casts the result + self.map({ case (k, v) => (k, f(v)) }).asInstanceOf[IterableView[(K, W), C]] + */ + + def filterKeys(p: K => Boolean): IterableView[(K, V), C] = + self.filter { case (k, _) => p(k) } +} + +class ImmutableQueueExtensionMethods[A](private val self: i.Queue[A]) extends AnyVal { + def enqueueAll[B >: A](iter: c.Iterable[B]): i.Queue[B] = + self.enqueue(iter.to[i.Iterable]) +} + +class MutableQueueExtensionMethods[Element](private val self: m.Queue[Element]) extends AnyVal { + def enqueueAll(iter: c.Iterable[Element]): Unit = + self.enqueue(iter.toIndexedSeq: _*) +} \ No newline at end of file diff --git a/scalalib/overrides-2.11/scala/collection/compat/immutable/ArraySeq.scala b/scalalib/overrides-2.11/scala/collection/compat/immutable/ArraySeq.scala new file mode 100644 index 0000000000..e0da76ef4a --- /dev/null +++ b/scalalib/overrides-2.11/scala/collection/compat/immutable/ArraySeq.scala @@ -0,0 +1,267 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.compat.immutable + +import java.util.Arrays + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.AbstractSeq +import scala.collection.generic._ +import scala.collection.immutable.IndexedSeq +import scala.collection.mutable.{ArrayBuilder, Builder, WrappedArrayBuilder} +import scala.reflect.ClassTag +import scala.util.hashing.MurmurHash3 + +/** + * An immutable array. + * + * Supports efficient indexed access and has a small memory footprint. + * + * @define Coll `ArraySeq` + * @define coll wrapped array + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +abstract class ArraySeq[+T] extends AbstractSeq[T] with IndexedSeq[T] { + + override protected[this] def thisCollection: ArraySeq[T] = this + + /** The tag of the element type */ + protected[this] def elemTag: ClassTag[T] + + /** The length of the array */ + def length: Int + + /** The element at given index */ + def apply(index: Int): T + + /** The underlying array */ + def unsafeArray: Array[T @uncheckedVariance] + + override def stringPrefix = "ArraySeq" + + /** Clones this object, including the underlying Array. */ + override def clone(): ArraySeq[T] = ArraySeq unsafeWrapArray unsafeArray.clone() + + /** Creates new builder for this collection ==> move to subclasses + */ + override protected[this] def newBuilder: Builder[T, ArraySeq[T]] = + ArraySeq.newBuilder[T](elemTag) + +} + +/** A companion object used to create instances of `ArraySeq`. + */ +object ArraySeq { + // This is reused for all calls to empty. + private val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) + def empty[T <: AnyRef]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] + + def newBuilder[T](implicit elemTag: ClassTag[T]): Builder[T, ArraySeq[T]] = + new WrappedArrayBuilder[T](elemTag).mapResult(w => unsafeWrapArray(w.array)) + + def apply[T](elems: T*)(implicit elemTag: ClassTag[T]): ArraySeq[T] = { + val b = newBuilder[T] + b ++= elems + b.result() + } + + def unapplySeq[T](seq: ArraySeq[T]): Some[ArraySeq[T]] = Some(seq) + + /** + * Wrap an existing `Array` into an `ArraySeq` of the proper primitive specialization type + * without copying. + * + * Note that an array containing boxed primitives can be wrapped in an `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a + * `ClassCastException` at runtime. + */ + def unsafeWrapArray[T](x: Array[T]): ArraySeq[T] = + (x.asInstanceOf[Array[_]] match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + implicit def canBuildFrom[T](implicit m: ClassTag[T]): CanBuildFrom[ArraySeq[_], T, ArraySeq[T]] = + new CanBuildFrom[ArraySeq[_], T, ArraySeq[T]] { + def apply(from: ArraySeq[_]): Builder[T, ArraySeq[T]] = + ArrayBuilder.make[T]()(m) mapResult ArraySeq.unsafeWrapArray[T] + def apply: Builder[T, ArraySeq[T]] = + ArrayBuilder.make[T]()(m) mapResult ArraySeq.unsafeWrapArray[T] + } + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val unsafeArray: Array[T]) extends ArraySeq[T] with Serializable { + lazy val elemTag = ClassTag[T](unsafeArray.getClass.getComponentType) + def length: Int = unsafeArray.length + def apply(index: Int): T = unsafeArray(index) + def update(index: Int, elem: T) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofRef[_] => + arrayEquals(unsafeArray.asInstanceOf[Array[AnyRef]], + that.unsafeArray.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofByte(val unsafeArray: Array[Byte]) extends ArraySeq[Byte] with Serializable { + def elemTag = ClassTag.Byte + def length: Int = unsafeArray.length + def apply(index: Int): Byte = unsafeArray(index) + def update(index: Int, elem: Byte) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofShort(val unsafeArray: Array[Short]) extends ArraySeq[Short] with Serializable { + def elemTag = ClassTag.Short + def length: Int = unsafeArray.length + def apply(index: Int): Short = unsafeArray(index) + def update(index: Int, elem: Short) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofChar(val unsafeArray: Array[Char]) extends ArraySeq[Char] with Serializable { + def elemTag = ClassTag.Char + def length: Int = unsafeArray.length + def apply(index: Int): Char = unsafeArray(index) + def update(index: Int, elem: Char) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofInt(val unsafeArray: Array[Int]) extends ArraySeq[Int] with Serializable { + def elemTag = ClassTag.Int + def length: Int = unsafeArray.length + def apply(index: Int): Int = unsafeArray(index) + def update(index: Int, elem: Int) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofLong(val unsafeArray: Array[Long]) extends ArraySeq[Long] with Serializable { + def elemTag = ClassTag.Long + def length: Int = unsafeArray.length + def apply(index: Int): Long = unsafeArray(index) + def update(index: Int, elem: Long) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofFloat(val unsafeArray: Array[Float]) extends ArraySeq[Float] with Serializable { + def elemTag = ClassTag.Float + def length: Int = unsafeArray.length + def apply(index: Int): Float = unsafeArray(index) + def update(index: Int, elem: Float) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofDouble(val unsafeArray: Array[Double]) extends ArraySeq[Double] with Serializable { + def elemTag = ClassTag.Double + def length: Int = unsafeArray.length + def apply(index: Int): Double = unsafeArray(index) + def update(index: Int, elem: Double) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofBoolean(val unsafeArray: Array[Boolean]) + extends ArraySeq[Boolean] + with Serializable { + def elemTag = ClassTag.Boolean + def length: Int = unsafeArray.length + def apply(index: Int): Boolean = unsafeArray(index) + def update(index: Int, elem: Boolean) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofUnit(val unsafeArray: Array[Unit]) extends ArraySeq[Unit] with Serializable { + def elemTag = ClassTag.Unit + def length: Int = unsafeArray.length + def apply(index: Int): Unit = unsafeArray(index) + def update(index: Int, elem: Unit) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofUnit => unsafeArray.length == that.unsafeArray.length + case _ => super.equals(that) + } + } + + private[this] def arrayEquals(xs: Array[AnyRef], ys: Array[AnyRef]): Boolean = { + if (xs eq ys) + return true + if (xs.length != ys.length) + return false + + val len = xs.length + var i = 0 + while (i < len) { + if (xs(i) != ys(i)) + return false + i += 1 + } + true + } +} \ No newline at end of file diff --git a/scalalib/overrides-2.11/scala/collection/compat/immutable/LazyList.scala b/scalalib/overrides-2.11/scala/collection/compat/immutable/LazyList.scala new file mode 100644 index 0000000000..4b4dd58bf2 --- /dev/null +++ b/scalalib/overrides-2.11/scala/collection/compat/immutable/LazyList.scala @@ -0,0 +1,1537 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.compat.immutable + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import scala.annotation.tailrec +import scala.annotation.unchecked.{uncheckedVariance => uV} +import scala.collection.{ + AbstractIterator, + AbstractSeq, + GenIterable, + GenSeq, + GenTraversableOnce, + LinearSeqOptimized, + mutable +} +import scala.collection.generic.{ + CanBuildFrom, + FilterMonadic, + GenericCompanion, + GenericTraversableTemplate, + SeqFactory +} +import scala.collection.immutable.{LinearSeq, NumericRange} +import scala.collection.mutable.{ArrayBuffer, Builder, StringBuilder} +import scala.language.implicitConversions +import scala.language.higherKinds + +/** This class implements an immutable linked list that evaluates elements + * in order and only when needed. Here is an example: + * + * {{{ + * import scala.math.BigInt + * object Main extends App { + * + * val fibs: LazyList[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map { n => n._1 + n._2 } + * + * fibs take 5 foreach println + * } + * + * // prints + * // + * // 0 + * // 1 + * // 1 + * // 2 + * // 3 + * }}} + * + * A `LazyList`, like the one in the example above, may be infinite in length. + * Aggregate methods, such as `count`, `sum`, `max` or `min` on such infinite length + * sequences will not terminate. Filtered infinite lazy lists are also effectively + * infinite in length. + * + * Elements of a `LazyList` are memoized; that is, the value of each element + * is computed only once. + * To illustrate, we will alter body of the `fibs` value above and take some + * more values: + * + * {{{ + * import scala.math.BigInt + * object Main extends App { + * + * val fibs: LazyList[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip( + * fibs.tail).map(n => { + * println("Adding %d and %d".format(n._1, n._2)) + * n._1 + n._2 + * }) + * + * fibs take 5 foreach println + * fibs take 6 foreach println + * } + * + * // prints + * // + * // 0 + * // 1 + * // Adding 0 and 1 + * // 1 + * // Adding 1 and 1 + * // 2 + * // Adding 1 and 2 + * // 3 + * + * // And then prints + * // + * // 0 + * // 1 + * // 1 + * // 2 + * // 3 + * // Adding 2 and 3 + * // 5 + * }}} + * + * There are a number of subtle points to the above example. + * + * - The definition of `fibs` is a `val` not a method. The memoization of the + * `LazyList` requires us to have somewhere to store the information and a `val` + * allows us to do that. + * + * - While the `LazyList` is actually being modified during access, this does not + * change the notion of its immutability. Once the values are memoized they do + * not change and values that have yet to be memoized still "exist", they + * simply haven't been realized yet. + * + * - One must be cautious of memoization; you can very quickly eat up large + * amounts of memory if you're not careful. The reason for this is that the + * memoization of the `LazyList` creates a structure much like + * [[scala.collection.immutable.List]]. So long as something is holding on to + * the head, the head holds on to the tail, and so it continues recursively. + * If, on the other hand, there is nothing holding on to the head (e.g. we used + * `def` to define the `LazyList`) then once it is no longer being used directly, + * it disappears. + * + * - Note that some operations, including [[drop]], [[dropWhile]], + * [[flatMap]] or [[collect]] may process a large number of intermediate + * elements before returning. These necessarily hold onto the head, since + * they are methods on `LazyList`, and a lazy list holds its own head. For + * computations of this sort where memoization is not desired, use + * `Iterator` when possible. + * + * {{{ + * // For example, let's build the natural numbers and do some silly iteration + * // over them. + * + * // We'll start with a silly iteration + * def loop(s: String, i: Int, iter: Iterator[Int]): Unit = { + * // Stop after 200,000 + * if (i < 200001) { + * if (i % 50000 == 0) println(s + i) + * loop(s, iter.next(), iter) + * } + * } + * + * // Our first LazyList definition will be a val definition + * val lazylist1: LazyList[Int] = { + * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * loop(0) + * } + * + * // Because lazylist1 is a val, everything that the iterator produces is held + * // by virtue of the fact that the head of the LazyList is held in lazylist1 + * val it1 = lazylist1.toIterator + * loop("Iterator1: ", it1.next(), it1) + * + * // We can redefine this LazyList such that all we have is the Iterator left + * // and allow the LazyList to be garbage collected as required. Using a def + * // to provide the LazyList ensures that no val is holding onto the head as + * // is the case with lazylist1 + * def lazylist2: LazyList[Int] = { + * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * loop(0) + * } + * val it2 = lazylist2.toIterator + * loop("Iterator2: ", it2.next(), it2) + * + * // And, of course, we don't actually need a LazyList at all for such a simple + * // problem. There's no reason to use a LazyList if you don't actually need + * // one. + * val it3 = new Iterator[Int] { + * var i = -1 + * def hasNext = true + * def next(): Int = { i += 1; i } + * } + * loop("Iterator3: ", it3.next(), it3) + * }}} + * + * - The fact that `tail` works at all is of interest. In the definition of + * `fibs` we have an initial `(0, 1, LazyList(...))` so `tail` is deterministic. + * If we defined `fibs` such that only `0` were concretely known then the act + * of determining `tail` would require the evaluation of `tail` which would + * cause an infinite recursion and stack overflow. If we define a definition + * where the tail is not initially computable then we're going to have an + * infinite recursion: + * {{{ + * // The first time we try to access the tail we're going to need more + * // information which will require us to recurse, which will require us to + * // recurse, which... + * lazy val sov: LazyList[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } + * }}} + * + * The definition of `fibs` above creates a larger number of objects than + * necessary depending on how you might want to implement it. The following + * implementation provides a more "cost effective" implementation due to the + * fact that it has a more direct route to the numbers themselves: + * + * {{{ + * lazy val fib: LazyList[Int] = { + * def loop(h: Int, n: Int): LazyList[Int] = h #:: loop(n, h + n) + * loop(1, 1) + * } + * }}} + * + * @tparam A the type of the elements contained in this lazy list. + * + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]] + * section on `LazyLists` for more information. + * @define Coll `LazyList` + * @define coll lazy list + * @define orderDependent + * @define orderDependentFold + * @define appendStackSafety Note: Repeated chaining of calls to append methods (`appended`, + * `appendedAll`, `lazyAppendedAll`) without forcing any of the + * intermediate resulting lazy lists may overflow the stack when + * the final result is forced. + * @define preservesLaziness This method preserves laziness; elements are only evaluated + * individually as needed. + * @define initiallyLazy This method does not evaluate anything until an operation is performed + * on the result (e.g. calling `head` or `tail`, or checking if it is empty). + * @define evaluatesAllElements This method evaluates all elements of the collection. + */ +@SerialVersionUID(3L) +final class LazyList[+A] private (private[this] var lazyState: () => LazyList.State[A]) + extends AbstractSeq[A] + with LinearSeq[A] + with GenericTraversableTemplate[A, LazyList] + with LinearSeqOptimized[A, LazyList[A]] + with Serializable { + import LazyList._ + + @volatile private[this] var stateEvaluated: Boolean = false + @inline private def stateDefined: Boolean = stateEvaluated + private[this] var midEvaluation = false + + private lazy val state: State[A] = { + // if it's already mid-evaluation, we're stuck in an infinite + // self-referential loop (also it's empty) + if (midEvaluation) { + throw new RuntimeException( + "self-referential LazyList or a derivation thereof has no more elements") + } + midEvaluation = true + val res = try lazyState() + finally midEvaluation = false + // if we set it to `true` before evaluating, we may infinite loop + // if something expects `state` to already be evaluated + stateEvaluated = true + lazyState = null // allow GC + res + } + + /** $preservesLaziness */ + def knownSize: Int = if (knownIsEmpty) 0 else -1 +// override def iterableFactory: SeqFactory[LazyList] = LazyList + + override def isEmpty: Boolean = state eq State.Empty + + override def head: A = state.head + + override def tail: LazyList[A] = state.tail + + @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline) + @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline) + + // It's an imperfect world, but at least we can bottle up the + // imperfection in a capsule. + @inline private def asThat[That](x: AnyRef): That = x.asInstanceOf[That] + @inline private def isLLBuilder[B, That](bf: CanBuildFrom[LazyList[A], B, That]) = + bf(repr).isInstanceOf[LazyList.LazyBuilder[_]] + + override def companion: GenericCompanion[LazyList] = LazyList + + /** Evaluates all undefined elements of the lazy list. + * + * This method detects cycles in lazy lists, and terminates after all + * elements of the cycle are evaluated. For example: + * + * {{{ + * val ring: LazyList[Int] = 1 #:: 2 #:: 3 #:: ring + * ring.force + * ring.toString + * + * // prints + * // + * // LazyList(1, 2, 3, ...) + * }}} + * + * This method will *not* terminate for non-cyclic infinite-sized collections. + * + * @return this + */ + def force: this.type = { + // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) + var these, those: LazyList[A] = this + if (!these.isEmpty) { + these = these.tail + } + while (those ne these) { + if (these.isEmpty) return this + these = these.tail + if (these.isEmpty) return this + these = these.tail + if (these eq those) return this + those = those.tail + } + this + } + + /** @inheritdoc + * + * The iterator returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + override def iterator: Iterator[A] = + if (knownIsEmpty) Iterator.empty + else new LazyIterator(this) + + /** Apply the given function `f` to each element of this linear sequence + * (while respecting the order of the elements). + * + * @param f The treatment to apply to each element. + * @note Overridden here as final to trigger tail-call optimization, which + * replaces 'this' with 'tail' at each iteration. This is absolutely + * necessary for allowing the GC to collect the underlying LazyList as elements + * are consumed. + * @note This function will force the realization of the entire LazyList + * unless the `f` throws an exception. + */ + @tailrec + override def foreach[U](f: A => U): Unit = { + if (!isEmpty) { + f(head) + tail.foreach(f) + } + } + + /** LazyList specialization of foldLeft which allows GC to collect along the + * way. + * + * @tparam B The type of value being accumulated. + * @param z The initial value seeded into the function `op`. + * @param op The operation to perform on successive elements of the `LazyList`. + * @return The accumulated value from successive applications of `op`. + */ + @tailrec + override def foldLeft[B](z: B)(op: (B, A) => B): B = + if (isEmpty) z + else tail.foldLeft(op(z, head))(op) + + // State.Empty doesn't use the SerializationProxy + protected[this] def writeReplace(): AnyRef = + if (knownNonEmpty) new LazyList.SerializationProxy[A](this) else this + + override def stringPrefix = "LazyList" + + /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list. + * + * $preservesLaziness + * + * $appendStackSafety + * + * @param suffix The collection that gets appended to this lazy list + * @return The lazy list containing elements of this lazy list and the iterable object. + */ + def lazyAppendedAll[B >: A](suffix: => GenTraversableOnce[B]): LazyList[B] = + newLL { + if (isEmpty) suffix match { + case lazyList: LazyList[B] => lazyList.state // don't recompute the LazyList + case coll => stateFromIterator(coll.toIterator) + } else sCons(head, tail lazyAppendedAll suffix) + } + + /** @inheritdoc + * + * $preservesLaziness + * + * $appendStackSafety + */ + override def ++[B >: A, That](suffix: GenTraversableOnce[B])( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (knownIsEmpty) LazyList.from(suffix) + else lazyAppendedAll(suffix) + } else super.++(suffix)(bf) + + /** @inheritdoc + * + * $preservesLaziness + * + * $appendStackSafety + */ + override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (knownIsEmpty) newLL(sCons(elem, LazyList.empty)) + else lazyAppendedAll(Iterator.single(elem)) + } else super.:+(elem)(bf) + + /** @inheritdoc + * + * $evaluatesAllElements + */ + override def equals(that: Any): Boolean = + if (this eq that.asInstanceOf[AnyRef]) true else super.equals(that) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def scanLeft[B, That](z: B)(op: (B, A) => B)( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (knownIsEmpty) newLL(sCons(z, LazyList.empty)) + else newLL(scanLeftState(z)(op)) + } else super.scanLeft(z)(op)(bf) + + private def scanLeftState[B](z: B)(op: (B, A) => B): State[B] = + sCons( + z, + newLL { + if (isEmpty) State.Empty + else tail.scanLeftState(op(z, head))(op) + } + ) + + /** LazyList specialization of reduceLeft which allows GC to collect + * along the way. + * + * @tparam B The type of value being accumulated. + * @param f The operation to perform on successive elements of the `LazyList`. + * @return The accumulated value from successive applications of `f`. + */ + override def reduceLeft[B >: A](f: (B, A) => B): B = { + if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") + else { + var reducedRes: B = this.head + var left: LazyList[A] = this.tail + while (!left.isEmpty) { + reducedRes = f(reducedRes, left.head) + left = left.tail + } + reducedRes + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def partition(p: A => Boolean): (LazyList[A], LazyList[A]) = (filter(p), filterNot(p)) + + /** @inheritdoc + * + * $preservesLaziness + */ + def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyList[A1], LazyList[A2]) = { + val (left, right) = mapToLL(f).partition(_.isLeft) + (left.mapToLL(_.asInstanceOf[Left[A1, _]].a), right.mapToLL(_.asInstanceOf[Right[_, A2]].b)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def filter(pred: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.filterImpl(this, pred, isFlipped = false) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def filterNot(pred: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.filterImpl(this, pred, isFlipped = true) + + /** A `collection.WithFilter` which allows GC of the head of lazy list during processing. + * + * This method is not particularly useful for a lazy list, as [[filter]] already preserves + * laziness. + * + * The `collection.WithFilter` returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + override def withFilter(p: A => Boolean): FilterMonadic[A, LazyList[A]] = + new LazyList.WithFilter(this, p) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + newLL(sCons(elem, this)) + } else super.+:(elem)(bf) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def ++:[B >: A, That](prefix: TraversableOnce[B])( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (knownIsEmpty) LazyList.from(prefix) + else newLL(stateFromIteratorConcatSuffix(prefix.toIterator)(state)) + } else super.++:(prefix)(bf) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def ++:[B >: A, That](prefix: Traversable[B])( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (knownIsEmpty) LazyList.from(prefix) + else newLL(stateFromIteratorConcatSuffix(prefix.toIterator)(state)) + } else super.++:(prefix)(bf) + + private def prependedAllToLL[B >: A](prefix: Traversable[B]): LazyList[B] = + if (knownIsEmpty) LazyList.from(prefix) + else newLL(stateFromIteratorConcatSuffix(prefix.toIterator)(state)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat(mapToLL(f): @inline) + else super.map(f)(bf) + + private def mapToLL[B](f: A => B): LazyList[B] = + if (knownIsEmpty) LazyList.empty + else (mapImpl(f): @inline) + + /** @inheritdoc + * + * $preservesLaziness + */ + def tapEach[U](f: A => U): LazyList[A] = mapToLL { a => + f(a); a + } + + private def mapImpl[B](f: A => B): LazyList[B] = + newLL { + if (isEmpty) State.Empty + else sCons(f(head), tail.mapImpl(f)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def collect[B, That](pf: PartialFunction[A, B])( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (knownIsEmpty) LazyList.empty + else LazyList.collectImpl(this, pf) + } else super.collect(pf)(bf) + + /** @inheritdoc + * + * This method does not evaluate any elements further than + * the first element for which the partial function is defined. + */ + @tailrec + override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = + if (isEmpty) None + else { + val res = pf.applyOrElse(head, LazyList.anyToMarker.asInstanceOf[A => B]) + if (res.asInstanceOf[AnyRef] eq LazyList.pfMarker) tail.collectFirst(pf) + else Some(res) + } + + /** @inheritdoc + * + * This method does not evaluate any elements further than + * the first element matching the predicate. + */ + @tailrec + override def find(p: A => Boolean): Option[A] = + if (isEmpty) None + else { + val elem = head + if (p(elem)) Some(elem) + else tail.find(p) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def flatMap[B, That](f: A => GenTraversableOnce[B])( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat(flatMapToLL(f): @inline) + else super.flatMap(f)(bf) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def flatten[B](implicit asIterable: A => GenTraversableOnce[B]): LazyList[B] = + flatMapToLL(asIterable) + + private def flatMapToLL[B](f: A => GenTraversableOnce[B]): LazyList[B] = + if (knownIsEmpty) LazyList.empty + else LazyList.flatMapImpl(this, f) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zip[A1 >: A, B, That](that: GenIterable[B])( + implicit bf: CanBuildFrom[LazyList[A], (A1, B), That]): That = + if (isLLBuilder(bf)) asThat(zipToLL(that): @inline) + else super.zip(that)(bf) + + private def zipToLL[B](that: GenIterable[B]): LazyList[(A, B)] = + if (this.knownIsEmpty) LazyList.empty + else newLL(zipState(that.toIterator)) + + private def zipState[B](it: Iterator[B]): State[(A, B)] = + if (this.isEmpty || !it.hasNext) State.Empty + else sCons((head, it.next()), newLL { tail zipState it }) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zipWithIndex[A1 >: A, That]( + implicit bf: CanBuildFrom[LazyList[A], (A1, Int), That]): That = + if (isLLBuilder(bf)) asThat { + this zip LazyList.from(0) + } else super.zipWithIndex(bf) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)( + implicit bf: CanBuildFrom[LazyList[A], (A1, B), That]): That = + if (isLLBuilder(bf)) asThat { + if (this.knownIsEmpty) LazyList.continually(thisElem) zip that + else newLL(zipAllState(that.toIterator, thisElem, thatElem)) + } else super.zipAll(that, thisElem, thatElem)(bf) + + private def zipAllState[A1 >: A, B](it: Iterator[B], + thisElem: A1, + thatElem: B): State[(A1, B)] = { + if (it.hasNext) { + if (this.isEmpty) sCons((thisElem, it.next()), newLL { + LazyList.continually(thisElem) zipState it + }) + else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) }) + } else { + if (this.isEmpty) State.Empty + else sCons((this.head, thatElem), this.tail zipToLL LazyList.continually(thatElem)) + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (LazyList[A1], LazyList[A2]) = + (mapToLL(asPair(_)._1), mapToLL(asPair(_)._2)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def unzip3[A1, A2, A3]( + implicit asTriple: A => (A1, A2, A3)): (LazyList[A1], LazyList[A2], LazyList[A3]) = + (mapToLL(asTriple(_)._1), mapToLL(asTriple(_)._2), mapToLL(asTriple(_)._3)) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all except the first `n` elements. + */ + override def drop(n: Int): LazyList[A] = + if (n <= 0) this + else if (knownIsEmpty) LazyList.empty + else LazyList.dropImpl(this, n) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all elements after the predicate returns `false`. + */ + override def dropWhile(p: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.dropWhileImpl(this, p) + + /** @inheritdoc + * + * $initiallyLazy + */ + override def dropRight(n: Int): LazyList[A] = { + if (n <= 0) this + else if (knownIsEmpty) LazyList.empty + else + newLL { + var scout = this + var remaining = n + // advance scout n elements ahead (or until empty) + while (remaining > 0 && !scout.isEmpty) { + remaining -= 1 + scout = scout.tail + } + dropRightState(scout) + } + } + + private def dropRightState(scout: LazyList[_]): State[A] = + if (scout.isEmpty) State.Empty + else sCons(head, newLL(tail.dropRightState(scout.tail))) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def take(n: Int): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else (takeImpl(n): @inline) + + private def takeImpl(n: Int): LazyList[A] = { + if (n <= 0) LazyList.empty + else + newLL { + if (isEmpty) State.Empty + else sCons(head, tail.takeImpl(n - 1)) + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def takeWhile(p: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else (takeWhileImpl(p): @inline) + + private def takeWhileImpl(p: A => Boolean): LazyList[A] = + newLL { + if (isEmpty || !p(head)) State.Empty + else sCons(head, tail.takeWhileImpl(p)) + } + + /** @inheritdoc + * + * $initiallyLazy + */ + override def takeRight(n: Int): LazyList[A] = + if (n <= 0 || knownIsEmpty) LazyList.empty + else LazyList.takeRightImpl(this, n) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all but the first `from` elements. + */ + override def slice(from: Int, until: Int): LazyList[A] = take(until).drop(from) + + /** @inheritdoc + * + * $evaluatesAllElements + */ + override def reverse: LazyList[A] = reverseOnto(LazyList.empty) + + // need contravariant type B to make the compiler happy - still returns LazyList[A] + @tailrec + private def reverseOnto[B >: A](tl: LazyList[B]): LazyList[B] = + if (isEmpty) tl + else tail.reverseOnto(newLL(sCons(head, tl))) + + private def occCounts0[B](sq: collection.Seq[B]): mutable.Map[B, Int] = { + val occ = new mutable.HashMap[B, Int] { override def default(k: B) = 0 } + for (y <- sq) occ(y) += 1 + occ + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def diff[B >: A](that: GenSeq[B]): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else { + val occ = occCounts0(that.seq) + LazyList.from { + iterator.filter { x => + val ox = occ(x) // Avoid multiple map lookups + if (ox == 0) true + else { + occ(x) = ox - 1 + false + } + } + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def intersect[B >: A](that: GenSeq[B]): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else { + val occ = occCounts0(that.seq) + LazyList.from { + iterator.filter { x => + val ox = occ(x) // Avoid multiple map lookups + if (ox > 0) { + occ(x) = ox - 1 + true + } else false + } + } + } + + @tailrec + private def lengthGt(len: Int): Boolean = + if (len < 0) true + else if (isEmpty) false + else tail.lengthGt(len - 1) + + /** @inheritdoc + * + * The iterator returned by this method mostly preserves laziness; + * a single element ahead of the iterator is evaluated. + */ + override def grouped(size: Int): Iterator[LazyList[A]] = { + require(size > 0, "size must be positive, but was " + size) + slidingImpl(size = size, step = size) + } + + /** @inheritdoc + * + * The iterator returned by this method mostly preserves laziness; + * `size - step max 1` elements ahead of the iterator are evaluated. + */ + override def sliding(size: Int, step: Int): Iterator[LazyList[A]] = { + require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") + slidingImpl(size = size, step = step) + } + + @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyList[A]] = + if (knownIsEmpty) Iterator.empty + else new SlidingIterator[A](this, size = size, step = step) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def padTo[B >: A, That](len: Int, elem: B)( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat(padToLL(len, elem)) + else super.padTo(len, elem)(bf) + + private def padToLL[B >: A](len: Int, elem: B): LazyList[B] = + if (len <= 0) this + else + newLL { + if (isEmpty) LazyList.fill(len)(elem).state + else sCons(head, tail.padToLL(len - 1, elem)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def patch[B >: A, That](from: Int, other: GenSeq[B], replaced: Int)( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (knownIsEmpty) LazyList from other + else patchImpl(from, other, replaced) + } else super.patch(from, other, replaced) + + private def patchImpl[B >: A](from: Int, other: GenSeq[B], replaced: Int): LazyList[B] = + newLL { + if (from <= 0) + stateFromIteratorConcatSuffix(other.toIterator)(LazyList.dropImpl(this, replaced).state) + else if (isEmpty) stateFromIterator(other.toIterator) + else sCons(head, tail.patchImpl(from - 1, other, replaced)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def updated[B >: A, That](index: Int, elem: B)( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (index < 0) throw new IndexOutOfBoundsException(s"$index") + else updatedImpl(index, elem, index) + } else super.updated(index, elem) + + private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyList[B] = { + newLL { + if (index <= 0) sCons(elem, tail) + else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) + else sCons(head, tail.updatedImpl(index - 1, elem, startIndex)) + } + } + + /** Appends all elements of this $coll to a string builder using start, end, and separator strings. + * The written text begins with the string `start` and ends with the string `end`. + * Inside, the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * An undefined state is represented with `"<not computed>"` and cycles are represented with `"<cycle>"`. + * + * $evaluatesAllElements + * + * @param sb the string builder to which elements are appended. + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return the string builder `b` to which elements were appended. + */ + override def addString(sb: StringBuilder, + start: String, + sep: String, + end: String): StringBuilder = { + force + addStringNoForce(sb, start, sep, end) + sb + } + + private[this] def addStringNoForce(b: StringBuilder, + start: String, + sep: String, + end: String): StringBuilder = { + b.append(start) + if (!stateDefined) b.append("") + else if (!isEmpty) { + b.append(head) + var cursor = this + @inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) + var scout = tail + @inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty + if ((cursor ne scout) && (!scout.stateDefined || (cursor.state ne scout.state))) { + cursor = scout + if (scoutNonEmpty) { + scout = scout.tail + // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings + while ((cursor ne scout) && scoutNonEmpty && (cursor.state ne scout.state)) { + appendCursorElement() + cursor = cursor.tail + scout = scout.tail + if (scoutNonEmpty) scout = scout.tail + } + } + } + if (!scoutNonEmpty) { // Not a cycle, scout hit an end + while (cursor ne scout) { + appendCursorElement() + cursor = cursor.tail + } + // if cursor (eq scout) has state defined, it is empty; else unknown state + if (!cursor.stateDefined) b.append(sep).append("") + } else { + @inline def same(a: LazyList[A], b: LazyList[A]): Boolean = (a eq b) || (a.state eq b.state) + // Cycle. + // If we have a prefix of length P followed by a cycle of length C, + // the scout will be at position (P%C) in the cycle when the cursor + // enters it at P. They'll then collide when the scout advances another + // C - (P%C) ahead of the cursor. + // If we run the scout P farther, then it will be at the start of + // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner + // starts at the beginning of the prefix, they'll collide exactly at + // the start of the loop. + var runner = this + var k = 0 + while (!same(runner, scout)) { + runner = runner.tail + scout = scout.tail + k += 1 + } + // Now runner and scout are at the beginning of the cycle. Advance + // cursor, adding to string, until it hits; then we'll have covered + // everything once. If cursor is already at beginning, we'd better + // advance one first unless runner didn't go anywhere (in which case + // we've already looped once). + if (same(cursor, scout) && (k > 0)) { + appendCursorElement() + cursor = cursor.tail + } + while (!same(cursor, scout)) { + appendCursorElement() + cursor = cursor.tail + } + b.append(sep).append("") + } + } + b.append(end) + } + + /** $preservesLaziness + * + * @return a string representation of this collection. An undefined state is + * represented with `"<not computed>"` and cycles are represented with `"<cycle>"` + * + * Examples: + * + * - `"LazyList(4, <not computed>)"`, a non-empty lazy list ; + * - `"LazyList(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; + * - `"LazyList(1, 2, 3, <cycle>)"`, an infinite lazy list that contains + * a cycle at the fourth element. + */ + override def toString(): String = + addStringNoForce(new StringBuilder(stringPrefix), "(", ", ", ")").toString + + /** @inheritdoc + * + * $preservesLaziness + */ + override def hasDefiniteSize: Boolean = { + if (!stateDefined) false + else if (isEmpty) true + else { + // Two-iterator trick (2x & 1x speed) for cycle detection. + var those = this + var these = tail + while (those ne these) { + if (!these.stateDefined) return false + else if (these.isEmpty) return true + these = these.tail + if (!these.stateDefined) return false + else if (these.isEmpty) return true + these = these.tail + if (those eq these) return false + those = those.tail + } + false // Cycle detected + } + } + + override def sameElements[B >: A](that: GenIterable[B]): Boolean = that match { + case that: LazyList[B] => this eqLL that + case _ => super.sameElements(that) + } + + @tailrec + private def eqLL[B >: A](that: LazyList[B]): Boolean = + (this eq that) || + (this.state eq that.state) || + (!this.isEmpty && + !that.isEmpty && + (this.head == that.head) && + (this.tail eqLL that.tail)) + + override def splitAt(n: Int): (LazyList[A], LazyList[A]) = (take(n), drop(n)) + + override def span(p: A => Boolean): (LazyList[A], LazyList[A]) = (takeWhile(p), dropWhile(p)) + + override def distinct: LazyList[A] = distinctBy(identity) + + def distinctBy[B](f: A => B): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else + LazyList.from { + val outer = iterator + new AbstractIterator[A] { + private[this] val traversedValues = mutable.HashSet.empty[B] + private[this] var nextElementDefined: Boolean = false + private[this] var nextElement: A = _ + + def hasNext: Boolean = + nextElementDefined || (outer.hasNext && { + val a = outer.next() + if (traversedValues.add(f(a))) { + nextElement = a + nextElementDefined = true + true + } else hasNext + }) + + def next(): A = + if (hasNext) { + nextElementDefined = false + nextElement + } else { + Iterator.empty.next() + } + } + } + + override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = + if (cbf().isInstanceOf[LazyList.LazyBuilder[_]]) asThat(this) + else super.to(cbf) + + override def init: LazyList[A] = { + if (isEmpty) throw new UnsupportedOperationException + dropRight(1) + } +} + +/** + * $factoryInfo + * @define coll lazy list + * @define Coll `LazyList` + */ +@SerialVersionUID(3L) +object LazyList extends SeqFactory[LazyList] { + // Eagerly evaluate cached empty instance + private[this] val _empty = newLL(State.Empty).force + + private sealed trait State[+A] extends Serializable { + def head: A + def tail: LazyList[A] + } + + private object State { + @SerialVersionUID(3L) + object Empty extends State[Nothing] { + def head: Nothing = throw new NoSuchElementException("head of empty lazy list") + def tail: LazyList[Nothing] = + throw new UnsupportedOperationException("tail of empty lazy list") + } + + @SerialVersionUID(3L) + final class Cons[A](val head: A, val tail: LazyList[A]) extends State[A] + } + + /** Creates a new LazyList. */ + @inline private def newLL[A](state: => State[A]): LazyList[A] = new LazyList[A](() => state) + + /** Creates a new State.Cons. */ + @inline private def sCons[A](hd: A, tl: LazyList[A]): State[A] = new State.Cons[A](hd, tl) + + private val pfMarker: AnyRef = new AnyRef + private val anyToMarker: Any => Any = _ => pfMarker + + /* All of the following `Impl` methods are carefully written so as not to + * leak the beginning of the `LazyList`. They copy the initial `LazyList` (`ll`) into + * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently + * leaking the head of the `LazyList`. Additionally, the methods are written so that, should + * an exception be thrown by the evaluation of the `LazyList` or any supplied function, they + * can continue their execution where they left off. + */ + + private def filterImpl[A](ll: LazyList[A], p: A => Boolean, isFlipped: Boolean): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var elem: A = null.asInstanceOf[A] + var found = false + var rest = restRef // var rest = restRef.elem + while (!found && !rest.isEmpty) { + elem = rest.head + found = p(elem) != isFlipped + rest = rest.tail + restRef = rest // restRef.elem = rest + } + if (found) sCons(elem, filterImpl(rest, p, isFlipped)) else State.Empty + } + } + + private def collectImpl[A, B](ll: LazyList[A], pf: PartialFunction[A, B]): LazyList[B] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + val marker = pfMarker + val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased + + var res: B = marker.asInstanceOf[B] // safe because B is unbounded + var rest = restRef // var rest = restRef.elem + while ((res.asInstanceOf[AnyRef] eq marker) && !rest.isEmpty) { + res = pf.applyOrElse(rest.head, toMarker) + rest = rest.tail + restRef = rest // restRef.elem = rest + } + if (res.asInstanceOf[AnyRef] eq marker) State.Empty + else sCons(res, collectImpl(rest, pf)) + } + } + + private def flatMapImpl[A, B](ll: LazyList[A], f: A => GenTraversableOnce[B]): LazyList[B] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var it: Iterator[B] = null + var itHasNext = false + var rest = restRef // var rest = restRef.elem + while (!itHasNext && !rest.isEmpty) { + it = f(rest.head).toIterator + itHasNext = it.hasNext + if (!itHasNext) { // wait to advance `rest` because `it.next()` can throw + rest = rest.tail + restRef = rest // restRef.elem = rest + } + } + if (itHasNext) { + val head = it.next() + rest = rest.tail + restRef = rest // restRef.elem = rest + sCons(head, newLL(stateFromIteratorConcatSuffix(it)(flatMapImpl(rest, f).state))) + } else State.Empty + } + } + + private def dropImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + var iRef = n // val iRef = new IntRef(n) + newLL { + var rest = restRef // var rest = restRef.elem + var i = iRef // var i = iRef.elem + while (i > 0 && !rest.isEmpty) { + rest = rest.tail + restRef = rest // restRef.elem = rest + i -= 1 + iRef = i // iRef.elem = i + } + rest.state + } + } + + private def dropWhileImpl[A](ll: LazyList[A], p: A => Boolean): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var rest = restRef // var rest = restRef.elem + while (!rest.isEmpty && p(rest.head)) { + rest = rest.tail + restRef = rest // restRef.elem = rest + } + rest.state + } + } + + private def takeRightImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + var scoutRef = ll // val scoutRef = new ObjectRef(ll) + var remainingRef = n // val remainingRef = new IntRef(n) + newLL { + var scout = scoutRef // var scout = scoutRef.elem + var remaining = remainingRef // var remaining = remainingRef.elem + // advance `scout` `n` elements ahead (or until empty) + while (remaining > 0 && !scout.isEmpty) { + scout = scout.tail + scoutRef = scout // scoutRef.elem = scout + remaining -= 1 + remainingRef = remaining // remainingRef.elem = remaining + } + var rest = restRef // var rest = restRef.elem + // advance `rest` and `scout` in tandem until `scout` reaches the end + while (!scout.isEmpty) { + scout = scout.tail + scoutRef = scout // scoutRef.elem = scout + rest = rest.tail // can't throw an exception as `scout` has already evaluated its tail + restRef = rest // restRef.elem = rest + } + // `rest` is the last `n` elements (or all of them) + rest.state + } + } + + /** An alternative way of building and matching lazy lists using LazyList.cons(hd, tl). + */ + object cons { + + /** A lazy list consisting of a given first element and remaining elements + * @param hd The first element of the result lazy list + * @param tl The remaining elements of the result lazy list + */ + def apply[A](hd: => A, tl: => LazyList[A]): LazyList[A] = newLL(sCons(hd, newLL(tl.state))) + + /** Maps a lazy list to its head and tail */ + def unapply[A](xs: LazyList[A]): Option[(A, LazyList[A])] = #::.unapply(xs) + } + + implicit def toDeferrer[A](l: => LazyList[A]): Deferrer[A] = new Deferrer[A](() => l) + + final class Deferrer[A] private[LazyList] (private val l: () => LazyList[A]) extends AnyVal { + + /** Construct a LazyList consisting of a given first element followed by elements + * from another LazyList. + */ + def #::[B >: A](elem: => B): LazyList[B] = newLL(sCons(elem, newLL(l().state))) + + /** Construct a LazyList consisting of the concatenation of the given LazyList and + * another LazyList. + */ + def #:::[B >: A](prefix: LazyList[B]): LazyList[B] = prefix lazyAppendedAll l() + } + + object #:: { + def unapply[A](s: LazyList[A]): Option[(A, LazyList[A])] = + if (!s.isEmpty) Some((s.head, s.tail)) else None + } + + def from[A](coll: GenTraversableOnce[A]): LazyList[A] = coll match { + case lazyList: LazyList[A] => lazyList + case _ => newLL(stateFromIterator(coll.toIterator)) + } + + override def apply[A](elems: A*): LazyList[A] = from(elems) + + override def empty[A]: LazyList[A] = _empty + + /** Creates a State from an Iterator, with another State appended after the Iterator + * is empty. + */ + private def stateFromIteratorConcatSuffix[A](it: Iterator[A])(suffix: => State[A]): State[A] = + if (it.hasNext) sCons(it.next(), newLL(stateFromIteratorConcatSuffix(it)(suffix))) + else suffix + + /** Creates a State from an IterableOnce. */ + private def stateFromIterator[A](it: Iterator[A]): State[A] = + if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it))) + else State.Empty + + def concat[A](xss: collection.Iterable[A]*): LazyList[A] = + newLL(concatIterator(xss.toIterator)) + + private def concatIterator[A](it: Iterator[collection.Iterable[A]]): State[A] = + if (!it.hasNext) State.Empty + else stateFromIteratorConcatSuffix(it.next().toIterator)(concatIterator(it)) + + /** An infinite LazyList that repeatedly applies a given function to a start value. + * + * @param start the start value of the LazyList + * @param f the function that's repeatedly applied + * @return the LazyList returning the infinite sequence of values `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: => A)(f: A => A): LazyList[A] = + newLL { + val head = start + sCons(head, iterate(f(head))(f)) + } + + /** + * Create an infinite LazyList starting at `start` and incrementing by + * step `step`. + * + * @param start the start value of the LazyList + * @param step the increment value of the LazyList + * @return the LazyList starting at value `start`. + */ + def from(start: Int, step: Int): LazyList[Int] = + newLL(sCons(start, from(start + step, step))) + + /** + * Create an infinite LazyList starting at `start` and incrementing by `1`. + * + * @param start the start value of the LazyList + * @return the LazyList starting at value `start`. + */ + def from(start: Int): LazyList[Int] = from(start, 1) + + /** + * Create an infinite LazyList containing the given element expression (which + * is computed for each occurrence). + * + * @param elem the element composing the resulting LazyList + * @return the LazyList containing an infinite number of elem + */ + def continually[A](elem: => A): LazyList[A] = newLL(sCons(elem, continually(elem))) + + override def fill[A](n: Int)(elem: => A): LazyList[A] = + if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty + + override def tabulate[A](n: Int)(f: Int => A): LazyList[A] = { + def at(index: Int): LazyList[A] = + if (index < n) newLL(sCons(f(index), at(index + 1))) else empty + + at(0) + } + + // significantly simpler than the iterator returned by Iterator.unfold + def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyList[A] = + newLL { + f(init) match { + case Some((elem, state)) => sCons(elem, unfold(state)(f)) + case None => State.Empty + } + } + + /** The builder returned by this method only evaluates elements + * of collections added to it as needed. + * + * @tparam A the type of the ${coll}’s elements + * @return A builder for $Coll objects. + */ + def newBuilder[A]: Builder[A, LazyList[A]] = new LazyBuilder[A] + + private class LazyIterator[+A](private[this] var lazyList: LazyList[A]) + extends AbstractIterator[A] { + override def hasNext: Boolean = !lazyList.isEmpty + + override def next(): A = + if (lazyList.isEmpty) Iterator.empty.next() + else { + val res = lazyList.head + lazyList = lazyList.tail + res + } + } + + private class SlidingIterator[A](private[this] var lazyList: LazyList[A], size: Int, step: Int) + extends AbstractIterator[LazyList[A]] { + private val minLen = size - step max 0 + private var first = true + + def hasNext: Boolean = + if (first) !lazyList.isEmpty + else lazyList.lengthGt(minLen) + + def next(): LazyList[A] = { + if (!hasNext) Iterator.empty.next() + else { + first = false + val list = lazyList + lazyList = list.drop(step) + list.take(size) + } + } + } + + private final class WithFilter[A] private[LazyList] (lazyList: LazyList[A], p: A => Boolean) + extends FilterMonadic[A, LazyList[A]] { + private[this] val filtered = lazyList.filter(p) + def map[B, That](f: A => B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + filtered.map(f) + def flatMap[B, That](f: A => GenTraversableOnce[B])( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = filtered.flatMap(f) + def foreach[U](f: A => U): Unit = filtered.foreach(f) + def withFilter(q: A => Boolean): FilterMonadic[A, LazyList[A]] = new WithFilter(filtered, q) + } + + class LazyListCanBuildFrom[A] extends GenericCanBuildFrom[A] + + implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LazyList[A]] = new LazyListCanBuildFrom[A] + + private final class LazyBuilder[A] extends Builder[A, LazyList[A]] { + import LazyBuilder._ + + private[this] var next: DeferredState[A] = _ + private[this] var list: LazyList[A] = _ + + clear() + + override def clear(): Unit = { + val deferred = new DeferredState[A] + list = newLL(deferred.eval()) + next = deferred + } + + override def result(): LazyList[A] = { + next init State.Empty + list + } + + override def +=(elem: A): this.type = { + val deferred = new DeferredState[A] + next init sCons(elem, newLL(deferred.eval())) + next = deferred + this + } + + // lazy implementation which doesn't evaluate the collection being added + override def ++=(xs: TraversableOnce[A]): this.type = { + val deferred = new DeferredState[A] + next init stateFromIteratorConcatSuffix(xs.toIterator)(deferred.eval()) + next = deferred + this + } + } + + private object LazyBuilder { + final class DeferredState[A] { + private[this] var _state: () => State[A] = _ + + def eval(): State[A] = { + val state = _state + if (state == null) throw new IllegalStateException("uninitialized") + state() + } + + // racy + def init(state: => State[A]): Unit = { + if (_state != null) throw new IllegalStateException("already initialized") + _state = () => state + } + } + } + + private case object SerializeEnd + + /** This serialization proxy is used for LazyLists which start with a sequence of evaluated cons cells. + * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses + * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization + * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. + */ + @SerialVersionUID(3L) + final class SerializationProxy[A](@transient protected var coll: LazyList[A]) + extends Serializable { + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + var these = coll + while (these.knownNonEmpty) { + out.writeObject(these.head) + these = these.tail + } + out.writeObject(SerializeEnd) + out.writeObject(these) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val init = new ArrayBuffer[A] + var initRead = false + while (!initRead) in.readObject match { + case SerializeEnd => initRead = true + case a => init += a.asInstanceOf[A] + } + val tail = in.readObject().asInstanceOf[LazyList[A]] + coll = tail.prependedAllToLL(init) + } + + private[this] def readResolve(): Any = coll + } + + override def iterate[A](start: A, len: Int)(f: A => A): LazyList[A] = + iterate(start)(f).take(len) + + override def range[A: Integral](start: A, end: A): LazyList[A] = + from(NumericRange(start, end, implicitly[Integral[A]].one)) + + override def range[A: Integral](start: A, end: A, step: A): LazyList[A] = + from(NumericRange(start, end, step)) +} \ No newline at end of file diff --git a/scalalib/overrides-2.11/scala/collection/compat/package.scala b/scalalib/overrides-2.11/scala/collection/compat/package.scala new file mode 100644 index 0000000000..4bf7420f86 --- /dev/null +++ b/scalalib/overrides-2.11/scala/collection/compat/package.scala @@ -0,0 +1,66 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.collection.generic.{CanBuildFrom, GenericOrderedCompanion, IsTraversableLike} + +import scala.runtime.Tuple2Zipped +import scala.collection.{immutable => i, mutable => m} +import scala.{collection => c} +import scala.language.higherKinds + +package object compat extends compat.PackageShared { + implicit def genericOrderedCompanionToCBF[A, CC[X] <: Traversable[X]]( + fact: GenericOrderedCompanion[CC])( + implicit ordering: Ordering[A]): CanBuildFrom[Any, A, CC[A]] = + CompatImpl.simpleCBF(fact.newBuilder[A]) + + // CanBuildFrom instances for `IterableView[(K, V), Map[K, V]]` that preserve + // the strict type of the view to be `Map` instead of `Iterable` + // Instances produced by this method are used to chain `filterKeys` after `mapValues` + implicit def canBuildFromIterableViewMapLike[K, V, L, W, CC[X, Y] <: Map[X, Y]] + : CanBuildFrom[IterableView[(K, V), CC[K, V]], (L, W), IterableView[(L, W), CC[L, W]]] = + new CanBuildFrom[IterableView[(K, V), CC[K, V]], (L, W), IterableView[(L, W), CC[L, W]]] { + // `CanBuildFrom` parameters are used as type constraints, they are not used + // at run-time, hence the dummy builder implementations + def apply(from: IterableView[(K, V), CC[K, V]]) = new TraversableView.NoBuilder + def apply() = new TraversableView.NoBuilder + } + + implicit def toTraversableLikeExtensionMethods[Repr](self: Repr)( + implicit traversable: IsTraversableLike[Repr]) + : TraversableLikeExtensionMethods[traversable.A, Repr] = + new TraversableLikeExtensionMethods[traversable.A, Repr](traversable.conversion(self)) + + implicit def toSeqExtensionMethods[A](self: c.Seq[A]): SeqExtensionMethods[A] = + new SeqExtensionMethods[A](self) + + implicit def toTrulyTraversableLikeExtensionMethods[T1, El1, Repr1](self: T1)( + implicit w1: T1 => TraversableLike[El1, Repr1] + ): TrulyTraversableLikeExtensionMethods[El1, Repr1] = + new TrulyTraversableLikeExtensionMethods[El1, Repr1](w1(self)) + + implicit def toTuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2]( + self: Tuple2Zipped[El1, Repr1, El2, Repr2] + ): Tuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2] = + new Tuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2](self) + + implicit def toImmutableQueueExtensionMethods[A]( + self: i.Queue[A]): ImmutableQueueExtensionMethods[A] = + new ImmutableQueueExtensionMethods[A](self) + + implicit def toMutableQueueExtensionMethods[A]( + self: m.Queue[A]): MutableQueueExtensionMethods[A] = + new MutableQueueExtensionMethods[A](self) + +} \ No newline at end of file diff --git a/scalalib/overrides-2.11/scala/jdk/CollectionConventers.scala b/scalalib/overrides-2.11/scala/jdk/CollectionConventers.scala new file mode 100644 index 0000000000..abd9071ae1 --- /dev/null +++ b/scalalib/overrides-2.11/scala/jdk/CollectionConventers.scala @@ -0,0 +1,5 @@ +package scala.jdk + +import scala.collection.convert.{DecorateAsJava, DecorateAsScala} + +object CollectionConverters extends DecorateAsJava with DecorateAsScala \ No newline at end of file diff --git a/scalalib/overrides-2.12/scala/collection/compat/BuildFrom.scala b/scalalib/overrides-2.12/scala/collection/compat/BuildFrom.scala new file mode 100644 index 0000000000..8cdd8756c3 --- /dev/null +++ b/scalalib/overrides-2.12/scala/collection/compat/BuildFrom.scala @@ -0,0 +1,53 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.compat + +import scala.collection.generic.CanBuildFrom +import scala.collection.mutable +import scala.language.higherKinds +import scala.language.implicitConversions + +/** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available. + * Implicit instances of `BuildFrom` are available for all collection types. + * + * @tparam From Type of source collection + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +trait BuildFrom[-From, -A, +C] extends Any { + def fromSpecific(from: From)(it: IterableOnce[A]): C + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder(from: From): mutable.Builder[A, C] + + @deprecated("Use newBuilder() instead of apply()", "2.13.0") + @`inline` def apply(from: From): mutable.Builder[A, C] = newBuilder(from) +} + +object BuildFrom { + + // Implicit instance derived from an implicit CanBuildFrom instance + implicit def fromCanBuildFrom[From, A, C]( + implicit cbf: CanBuildFrom[From, A, C]): BuildFrom[From, A, C] = + new BuildFrom[From, A, C] { + def fromSpecific(from: From)(it: IterableOnce[A]): C = (cbf(from) ++= it).result() + def newBuilder(from: From): mutable.Builder[A, C] = cbf(from) + } + + // Implicit conversion derived from an implicit conversion to CanBuildFrom + implicit def fromCanBuildFromConversion[X, From, A, C](x: X)( + implicit toCanBuildFrom: X => CanBuildFrom[From, A, C]): BuildFrom[From, A, C] = + fromCanBuildFrom(toCanBuildFrom(x)) + +} \ No newline at end of file diff --git a/scalalib/overrides-2.12/scala/collection/compat/CompatImpl.scala b/scalalib/overrides-2.12/scala/collection/compat/CompatImpl.scala new file mode 100644 index 0000000000..4ff1f5515e --- /dev/null +++ b/scalalib/overrides-2.12/scala/collection/compat/CompatImpl.scala @@ -0,0 +1,74 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.compat + +import scala.reflect.ClassTag +import scala.collection.generic.CanBuildFrom +import scala.collection.{immutable => i, mutable => m} +import scala.language.higherKinds + +/* builder optimized for a single ++= call, which returns identity on result if possible + * and defers to the underlying builder if not. + */ +private final class IdentityPreservingBuilder[A, CC[X] <: TraversableOnce[X]]( + that: m.Builder[A, CC[A]])(implicit ct: ClassTag[CC[A]]) + extends m.Builder[A, CC[A]] { + + //invariant: ruined => (collection == null) + var collection: CC[A] = null.asInstanceOf[CC[A]] + var ruined = false + + private[this] def ruin(): Unit = { + if (collection != null) that ++= collection + collection = null.asInstanceOf[CC[A]] + ruined = true + } + + override def ++=(elems: TraversableOnce[A]): this.type = + elems match { + case ct(ca) if collection == null && !ruined => { + collection = ca + this + } + case _ => { + ruin() + that ++= elems + this + } + } + + def +=(elem: A): this.type = { + ruin() + that += elem + this + } + + def clear(): Unit = { + collection = null.asInstanceOf[CC[A]] + if (ruined) that.clear() + ruined = false + } + + def result(): CC[A] = if (collection == null) that.result() else collection +} + +private[compat] object CompatImpl { + def simpleCBF[A, C](f: => m.Builder[A, C]): CanBuildFrom[Any, A, C] = + new CanBuildFrom[Any, A, C] { + def apply(from: Any): m.Builder[A, C] = apply() + def apply(): m.Builder[A, C] = f + } + + type ImmutableBitSetCC[X] = ({ type L[_] = i.BitSet })#L[X] + type MutableBitSetCC[X] = ({ type L[_] = m.BitSet })#L[X] +} \ No newline at end of file diff --git a/scalalib/overrides-2.12/scala/collection/compat/PackageShared.scala b/scalalib/overrides-2.12/scala/collection/compat/PackageShared.scala new file mode 100644 index 0000000000..b7ab3f8ac7 --- /dev/null +++ b/scalalib/overrides-2.12/scala/collection/compat/PackageShared.scala @@ -0,0 +1,467 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.compat + +import scala.collection.generic._ +import scala.reflect.ClassTag +import scala.collection.{ + BitSet, + GenTraversable, + IterableLike, + IterableView, + MapLike, + TraversableLike, + immutable => i, + mutable => m +} +import scala.runtime.{Tuple2Zipped, Tuple3Zipped} +import scala.{collection => c} +import scala.language.higherKinds +import scala.language.implicitConversions + +/** The collection compatibility API */ +private[compat] trait PackageShared { + import CompatImpl._ + + /** + * A factory that builds a collection of type `C` with elements of type `A`. + * + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ + type Factory[-A, +C] = CanBuildFrom[Nothing, A, C] + + implicit class FactoryOps[-A, +C](private val factory: Factory[A, C]) { + + /** + * @return A collection of type `C` containing the same elements + * as the source collection `it`. + * @param it Source collection + */ + def fromSpecific(it: TraversableOnce[A]): C = (factory() ++= it).result() + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder: m.Builder[A, C] = factory() + } + + implicit def genericCompanionToCBF[A, CC[X] <: GenTraversable[X]]( + fact: GenericCompanion[CC]): CanBuildFrom[Any, A, CC[A]] = { + /* see https://github.com/scala/scala-collection-compat/issues/337 + `simpleCBF.apply` takes a by-name parameter and relies on + repeated references generating new builders, thus this expression + must be non-strict + */ + def builder: m.Builder[A, CC[A]] = fact match { + case c.Seq | i.Seq => new IdentityPreservingBuilder[A, i.Seq](i.Seq.newBuilder[A]) + case c.LinearSeq | i.LinearSeq => + new IdentityPreservingBuilder[A, i.LinearSeq](i.LinearSeq.newBuilder[A]) + case _ => fact.newBuilder[A] + } + simpleCBF(builder) + } + + implicit def sortedSetCompanionToCBF[A: Ordering, + CC[X] <: c.SortedSet[X] with c.SortedSetLike[X, CC[X]]]( + fact: SortedSetFactory[CC]): CanBuildFrom[Any, A, CC[A]] = + simpleCBF(fact.newBuilder[A]) + + implicit def arrayCompanionToCBF[A: ClassTag](fact: Array.type): CanBuildFrom[Any, A, Array[A]] = + simpleCBF(Array.newBuilder[A]) + + implicit def mapFactoryToCBF[K, V, CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]]( + fact: MapFactory[CC]): CanBuildFrom[Any, (K, V), CC[K, V]] = + simpleCBF(fact.newBuilder[K, V]) + + implicit def sortedMapFactoryToCBF[ + K: Ordering, + V, + CC[A, B] <: c.SortedMap[A, B] with c.SortedMapLike[A, B, CC[A, B]]]( + fact: SortedMapFactory[CC]): CanBuildFrom[Any, (K, V), CC[K, V]] = + simpleCBF(fact.newBuilder[K, V]) + + implicit def bitSetFactoryToCBF(fact: BitSetFactory[BitSet]): CanBuildFrom[Any, Int, BitSet] = + simpleCBF(fact.newBuilder) + + implicit def immutableBitSetFactoryToCBF( + fact: BitSetFactory[i.BitSet]): CanBuildFrom[Any, Int, ImmutableBitSetCC[Int]] = + simpleCBF(fact.newBuilder) + + implicit def mutableBitSetFactoryToCBF( + fact: BitSetFactory[m.BitSet]): CanBuildFrom[Any, Int, MutableBitSetCC[Int]] = + simpleCBF(fact.newBuilder) + + implicit class IterableFactoryExtensionMethods[CC[X] <: GenTraversable[X]]( + private val fact: GenericCompanion[CC]) { + def from[A](source: TraversableOnce[A]): CC[A] = + fact.apply(source.toSeq: _*) + } + + implicit class MapFactoryExtensionMethods[CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]]( + private val fact: MapFactory[CC]) { + def from[K, V](source: TraversableOnce[(K, V)]): CC[K, V] = + fact.apply(source.toSeq: _*) + } + + implicit class BitSetFactoryExtensionMethods[ + C <: scala.collection.BitSet with scala.collection.BitSetLike[C]]( + private val fact: BitSetFactory[C]) { + def fromSpecific(source: TraversableOnce[Int]): C = + fact.apply(source.toSeq: _*) + } + + private[compat] def build[T, CC](builder: m.Builder[T, CC], source: TraversableOnce[T]): CC = { + builder ++= source + builder.result() + } + + implicit def toImmutableSortedMapExtensions( + fact: i.SortedMap.type): ImmutableSortedMapExtensions = + new ImmutableSortedMapExtensions(fact) + + implicit def toImmutableListMapExtensions(fact: i.ListMap.type): ImmutableListMapExtensions = + new ImmutableListMapExtensions(fact) + + implicit def toImmutableHashMapExtensions(fact: i.HashMap.type): ImmutableHashMapExtensions = + new ImmutableHashMapExtensions(fact) + + implicit def toImmutableTreeMapExtensions(fact: i.TreeMap.type): ImmutableTreeMapExtensions = + new ImmutableTreeMapExtensions(fact) + + implicit def toImmutableIntMapExtensions(fact: i.IntMap.type): ImmutableIntMapExtensions = + new ImmutableIntMapExtensions(fact) + + implicit def toImmutableLongMapExtensions(fact: i.LongMap.type): ImmutableLongMapExtensions = + new ImmutableLongMapExtensions(fact) + + implicit def toMutableLongMapExtensions(fact: m.LongMap.type): MutableLongMapExtensions = + new MutableLongMapExtensions(fact) + + implicit def toMutableHashMapExtensions(fact: m.HashMap.type): MutableHashMapExtensions = + new MutableHashMapExtensions(fact) + + implicit def toMutableListMapExtensions(fact: m.ListMap.type): MutableListMapExtensions = + new MutableListMapExtensions(fact) + + implicit def toMutableMapExtensions(fact: m.Map.type): MutableMapExtensions = + new MutableMapExtensions(fact) + + implicit def toStreamExtensionMethods[A](stream: Stream[A]): StreamExtensionMethods[A] = + new StreamExtensionMethods[A](stream) + + implicit def toSortedExtensionMethods[K, V <: Sorted[K, V]]( + fact: Sorted[K, V]): SortedExtensionMethods[K, V] = + new SortedExtensionMethods[K, V](fact) + + implicit def toIteratorExtensionMethods[A](self: Iterator[A]): IteratorExtensionMethods[A] = + new IteratorExtensionMethods[A](self) + + implicit def toTraversableExtensionMethods[A]( + self: Traversable[A]): TraversableExtensionMethods[A] = + new TraversableExtensionMethods[A](self) + + implicit def toTraversableOnceExtensionMethods[A]( + self: TraversableOnce[A]): TraversableOnceExtensionMethods[A] = + new TraversableOnceExtensionMethods[A](self) + + // This really belongs into scala.collection but there's already a package object + // in scala-library so we can't add to it + type IterableOnce[+X] = c.TraversableOnce[X] + val IterableOnce = c.TraversableOnce + + implicit def toMapExtensionMethods[K, V]( + self: scala.collection.Map[K, V]): MapExtensionMethods[K, V] = + new MapExtensionMethods[K, V](self) + + implicit def toMapViewExtensionMethods[K, V, C <: scala.collection.Map[K, V]]( + self: IterableView[(K, V), C]): MapViewExtensionMethods[K, V, C] = + new MapViewExtensionMethods[K, V, C](self) +} + +class ImmutableSortedMapExtensions(private val fact: i.SortedMap.type) extends AnyVal { + def from[K: Ordering, V](source: TraversableOnce[(K, V)]): i.SortedMap[K, V] = + build(i.SortedMap.newBuilder[K, V], source) +} + +class ImmutableListMapExtensions(private val fact: i.ListMap.type) extends AnyVal { + def from[K, V](source: TraversableOnce[(K, V)]): i.ListMap[K, V] = + build(i.ListMap.newBuilder[K, V], source) +} + +class ImmutableHashMapExtensions(private val fact: i.HashMap.type) extends AnyVal { + def from[K, V](source: TraversableOnce[(K, V)]): i.HashMap[K, V] = + build(i.HashMap.newBuilder[K, V], source) +} + +class ImmutableTreeMapExtensions(private val fact: i.TreeMap.type) extends AnyVal { + def from[K: Ordering, V](source: TraversableOnce[(K, V)]): i.TreeMap[K, V] = + build(i.TreeMap.newBuilder[K, V], source) +} + +class ImmutableIntMapExtensions(private val fact: i.IntMap.type) extends AnyVal { + def from[V](source: TraversableOnce[(Int, V)]): i.IntMap[V] = + build(i.IntMap.canBuildFrom[Int, V](), source) +} + +class ImmutableLongMapExtensions(private val fact: i.LongMap.type) extends AnyVal { + def from[V](source: TraversableOnce[(Long, V)]): i.LongMap[V] = + build(i.LongMap.canBuildFrom[Long, V](), source) +} + +class MutableLongMapExtensions(private val fact: m.LongMap.type) extends AnyVal { + def from[V](source: TraversableOnce[(Long, V)]): m.LongMap[V] = + build(m.LongMap.canBuildFrom[Long, V](), source) +} + +class MutableHashMapExtensions(private val fact: m.HashMap.type) extends AnyVal { + def from[K, V](source: TraversableOnce[(K, V)]): m.HashMap[K, V] = + build(m.HashMap.canBuildFrom[K, V](), source) +} + +class MutableListMapExtensions(private val fact: m.ListMap.type) extends AnyVal { + def from[K, V](source: TraversableOnce[(K, V)]): m.ListMap[K, V] = + build(m.ListMap.canBuildFrom[K, V](), source) +} + +class MutableMapExtensions(private val fact: m.Map.type) extends AnyVal { + def from[K, V](source: TraversableOnce[(K, V)]): m.Map[K, V] = + build(m.Map.canBuildFrom[K, V](), source) +} + +class StreamExtensionMethods[A](private val stream: Stream[A]) extends AnyVal { + def lazyAppendedAll(as: => TraversableOnce[A]): Stream[A] = stream.append(as) +} + +class SortedExtensionMethods[K, T <: Sorted[K, T]](private val fact: Sorted[K, T]) { + def rangeFrom(from: K): T = fact.from(from) + def rangeTo(to: K): T = fact.to(to) + def rangeUntil(until: K): T = fact.until(until) +} + +class IteratorExtensionMethods[A](private val self: c.Iterator[A]) extends AnyVal { + def sameElements[B >: A](that: c.TraversableOnce[B]): Boolean = { + self.sameElements(that.iterator) + } + def concat[B >: A](that: c.TraversableOnce[B]): c.TraversableOnce[B] = self ++ that + def tapEach[U](f: A => U): c.Iterator[A] = self.map(a => { f(a); a }) +} + +class TraversableOnceExtensionMethods[A](private val self: c.TraversableOnce[A]) extends AnyVal { + def iterator: Iterator[A] = self.toIterator + + def minOption[B >: A](implicit ord: Ordering[B]): Option[A] = { + if (self.isEmpty) + None + else + Some(self.min(ord)) + } + + def maxOption[B >: A](implicit ord: Ordering[B]): Option[A] = { + if (self.isEmpty) + None + else + Some(self.max(ord)) + } + + def minByOption[B](f: A => B)(implicit cmp: Ordering[B]): Option[A] = { + if (self.isEmpty) + None + else + Some(self.minBy(f)(cmp)) + } + + def maxByOption[B](f: A => B)(implicit cmp: Ordering[B]): Option[A] = { + if (self.isEmpty) + None + else + Some(self.maxBy(f)(cmp)) + } +} + +class TraversableExtensionMethods[A](private val self: c.Traversable[A]) extends AnyVal { + def iterableFactory: GenericCompanion[Traversable] = self.companion + + def sizeCompare(otherSize: Int): Int = SizeCompareImpl.sizeCompareInt(self)(otherSize) + def sizeIs: SizeCompareOps = new SizeCompareOps(self) + def sizeCompare(that: c.Traversable[_]): Int = SizeCompareImpl.sizeCompareColl(self)(that) + +} + +class SeqExtensionMethods[A](private val self: c.Seq[A]) extends AnyVal { + def lengthIs: SizeCompareOps = new SizeCompareOps(self) +} + +class SizeCompareOps private[compat] (private val it: c.Traversable[_]) extends AnyVal { + import SizeCompareImpl._ + + /** Tests if the size of the collection is less than some value. */ + @inline def <(size: Int): Boolean = sizeCompareInt(it)(size) < 0 + + /** Tests if the size of the collection is less than or equal to some value. */ + @inline def <=(size: Int): Boolean = sizeCompareInt(it)(size) <= 0 + + /** Tests if the size of the collection is equal to some value. */ + @inline def ==(size: Int): Boolean = sizeCompareInt(it)(size) == 0 + + /** Tests if the size of the collection is not equal to some value. */ + @inline def !=(size: Int): Boolean = sizeCompareInt(it)(size) != 0 + + /** Tests if the size of the collection is greater than or equal to some value. */ + @inline def >=(size: Int): Boolean = sizeCompareInt(it)(size) >= 0 + + /** Tests if the size of the collection is greater than some value. */ + @inline def >(size: Int): Boolean = sizeCompareInt(it)(size) > 0 +} + +private object SizeCompareImpl { + def sizeCompareInt(self: c.Traversable[_])(otherSize: Int): Int = + self match { + case self: c.SeqLike[_, _] => self.lengthCompare(otherSize) + case _ => + if (otherSize < 0) 1 + else { + var i = 0 + val it = self.toIterator + while (it.hasNext) { + if (i == otherSize) return 1 + it.next() + i += 1 + } + i - otherSize + } + } + + // `IndexedSeq` is the only thing that we can safely say has a known size + def sizeCompareColl(self: c.Traversable[_])(that: c.Traversable[_]): Int = + that match { + case that: c.IndexedSeq[_] => sizeCompareInt(self)(that.length) + case _ => + self match { + case self: c.IndexedSeq[_] => + val res = sizeCompareInt(that)(self.length) + // can't just invert the result, because `-Int.MinValue == Int.MinValue` + if (res == Int.MinValue) 1 else -res + case _ => + val thisIt = self.toIterator + val thatIt = that.toIterator + while (thisIt.hasNext && thatIt.hasNext) { + thisIt.next() + thatIt.next() + } + java.lang.Boolean.compare(thisIt.hasNext, thatIt.hasNext) + } + } +} + +class TraversableLikeExtensionMethods[A, Repr](private val self: c.GenTraversableLike[A, Repr]) + extends AnyVal { + def tapEach[U](f: A => U)(implicit bf: CanBuildFrom[Repr, A, Repr]): Repr = + self.map(a => { f(a); a }) + + def partitionMap[A1, A2, That, Repr1, Repr2](f: A => Either[A1, A2])( + implicit bf1: CanBuildFrom[Repr, A1, Repr1], + bf2: CanBuildFrom[Repr, A2, Repr2] + ): (Repr1, Repr2) = { + val l = bf1() + val r = bf2() + self.foreach { x => + f(x) match { + case Left(x1) => l += x1 + case Right(x2) => r += x2 + } + } + (l.result(), r.result()) + } + + def groupMap[K, B, That](key: A => K)(f: A => B)( + implicit bf: CanBuildFrom[Repr, B, That]): Map[K, That] = { + val map = m.Map.empty[K, m.Builder[B, That]] + for (elem <- self) { + val k = key(elem) + val bldr = map.getOrElseUpdate(k, bf(self.repr)) + bldr += f(elem) + } + val res = Map.newBuilder[K, That] + for ((k, bldr) <- map) res += ((k, bldr.result())) + res.result() + } + + def groupMapReduce[K, B](key: A => K)(f: A => B)(reduce: (B, B) => B): Map[K, B] = { + val map = m.Map.empty[K, B] + for (elem <- self) { + val k = key(elem) + val v = map.get(k) match { + case Some(b) => reduce(b, f(elem)) + case None => f(elem) + } + map.put(k, v) + } + map.toMap + } +} + +class TrulyTraversableLikeExtensionMethods[El1, Repr1]( + private val self: TraversableLike[El1, Repr1]) + extends AnyVal { + + def lazyZip[El2, Repr2, T2](t2: T2)( + implicit w2: T2 => IterableLike[El2, Repr2] + ): Tuple2Zipped[El1, Repr1, El2, Repr2] = new Tuple2Zipped((self, t2)) +} + +class Tuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2]( + private val self: Tuple2Zipped[El1, Repr1, El2, Repr2]) { + + def lazyZip[El3, Repr3, T3](t3: T3)(implicit w3: T3 => IterableLike[El3, Repr3]) + : Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3] = + new Tuple3Zipped((self.colls._1, self.colls._2, t3)) +} + +class MapExtensionMethods[K, V](private val self: scala.collection.Map[K, V]) extends AnyVal { + + def foreachEntry[U](f: (K, V) => U): Unit = { + self.foreach { case (k, v) => f(k, v) } + } + +} + +class MapViewExtensionMethods[K, V, C <: scala.collection.Map[K, V]]( + private val self: IterableView[(K, V), C]) + extends AnyVal { + + def mapValues[W, That](f: V => W)( + implicit bf: CanBuildFrom[IterableView[(K, V), C], (K, W), That]): That = + self.map[(K, W), That] { case (k, v) => (k, f(v)) } + + // TODO: Replace the current implementation of `mapValues` with this + // after major version bump when bincompat can be broken. + // At the same time, remove `canBuildFromIterableViewMapLike` + /* + def mapValues[W](f: V => W): IterableView[(K, W), C] = + // the implementation of `self.map` also casts the result + self.map({ case (k, v) => (k, f(v)) }).asInstanceOf[IterableView[(K, W), C]] + */ + + def filterKeys(p: K => Boolean): IterableView[(K, V), C] = + self.filter { case (k, _) => p(k) } +} + +class ImmutableQueueExtensionMethods[A](private val self: i.Queue[A]) extends AnyVal { + def enqueueAll[B >: A](iter: c.Iterable[B]): i.Queue[B] = + self.enqueue(iter.to[i.Iterable]) +} + +class MutableQueueExtensionMethods[Element](private val self: m.Queue[Element]) extends AnyVal { + def enqueueAll(iter: c.Iterable[Element]): Unit = + self.enqueue(iter.toIndexedSeq: _*) +} \ No newline at end of file diff --git a/scalalib/overrides-2.12/scala/collection/compat/immutable/ArraySeq.scala b/scalalib/overrides-2.12/scala/collection/compat/immutable/ArraySeq.scala new file mode 100644 index 0000000000..e0da76ef4a --- /dev/null +++ b/scalalib/overrides-2.12/scala/collection/compat/immutable/ArraySeq.scala @@ -0,0 +1,267 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.compat.immutable + +import java.util.Arrays + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.AbstractSeq +import scala.collection.generic._ +import scala.collection.immutable.IndexedSeq +import scala.collection.mutable.{ArrayBuilder, Builder, WrappedArrayBuilder} +import scala.reflect.ClassTag +import scala.util.hashing.MurmurHash3 + +/** + * An immutable array. + * + * Supports efficient indexed access and has a small memory footprint. + * + * @define Coll `ArraySeq` + * @define coll wrapped array + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +abstract class ArraySeq[+T] extends AbstractSeq[T] with IndexedSeq[T] { + + override protected[this] def thisCollection: ArraySeq[T] = this + + /** The tag of the element type */ + protected[this] def elemTag: ClassTag[T] + + /** The length of the array */ + def length: Int + + /** The element at given index */ + def apply(index: Int): T + + /** The underlying array */ + def unsafeArray: Array[T @uncheckedVariance] + + override def stringPrefix = "ArraySeq" + + /** Clones this object, including the underlying Array. */ + override def clone(): ArraySeq[T] = ArraySeq unsafeWrapArray unsafeArray.clone() + + /** Creates new builder for this collection ==> move to subclasses + */ + override protected[this] def newBuilder: Builder[T, ArraySeq[T]] = + ArraySeq.newBuilder[T](elemTag) + +} + +/** A companion object used to create instances of `ArraySeq`. + */ +object ArraySeq { + // This is reused for all calls to empty. + private val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) + def empty[T <: AnyRef]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] + + def newBuilder[T](implicit elemTag: ClassTag[T]): Builder[T, ArraySeq[T]] = + new WrappedArrayBuilder[T](elemTag).mapResult(w => unsafeWrapArray(w.array)) + + def apply[T](elems: T*)(implicit elemTag: ClassTag[T]): ArraySeq[T] = { + val b = newBuilder[T] + b ++= elems + b.result() + } + + def unapplySeq[T](seq: ArraySeq[T]): Some[ArraySeq[T]] = Some(seq) + + /** + * Wrap an existing `Array` into an `ArraySeq` of the proper primitive specialization type + * without copying. + * + * Note that an array containing boxed primitives can be wrapped in an `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a + * `ClassCastException` at runtime. + */ + def unsafeWrapArray[T](x: Array[T]): ArraySeq[T] = + (x.asInstanceOf[Array[_]] match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + implicit def canBuildFrom[T](implicit m: ClassTag[T]): CanBuildFrom[ArraySeq[_], T, ArraySeq[T]] = + new CanBuildFrom[ArraySeq[_], T, ArraySeq[T]] { + def apply(from: ArraySeq[_]): Builder[T, ArraySeq[T]] = + ArrayBuilder.make[T]()(m) mapResult ArraySeq.unsafeWrapArray[T] + def apply: Builder[T, ArraySeq[T]] = + ArrayBuilder.make[T]()(m) mapResult ArraySeq.unsafeWrapArray[T] + } + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val unsafeArray: Array[T]) extends ArraySeq[T] with Serializable { + lazy val elemTag = ClassTag[T](unsafeArray.getClass.getComponentType) + def length: Int = unsafeArray.length + def apply(index: Int): T = unsafeArray(index) + def update(index: Int, elem: T) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofRef[_] => + arrayEquals(unsafeArray.asInstanceOf[Array[AnyRef]], + that.unsafeArray.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofByte(val unsafeArray: Array[Byte]) extends ArraySeq[Byte] with Serializable { + def elemTag = ClassTag.Byte + def length: Int = unsafeArray.length + def apply(index: Int): Byte = unsafeArray(index) + def update(index: Int, elem: Byte) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofShort(val unsafeArray: Array[Short]) extends ArraySeq[Short] with Serializable { + def elemTag = ClassTag.Short + def length: Int = unsafeArray.length + def apply(index: Int): Short = unsafeArray(index) + def update(index: Int, elem: Short) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofChar(val unsafeArray: Array[Char]) extends ArraySeq[Char] with Serializable { + def elemTag = ClassTag.Char + def length: Int = unsafeArray.length + def apply(index: Int): Char = unsafeArray(index) + def update(index: Int, elem: Char) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofInt(val unsafeArray: Array[Int]) extends ArraySeq[Int] with Serializable { + def elemTag = ClassTag.Int + def length: Int = unsafeArray.length + def apply(index: Int): Int = unsafeArray(index) + def update(index: Int, elem: Int) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofLong(val unsafeArray: Array[Long]) extends ArraySeq[Long] with Serializable { + def elemTag = ClassTag.Long + def length: Int = unsafeArray.length + def apply(index: Int): Long = unsafeArray(index) + def update(index: Int, elem: Long) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofFloat(val unsafeArray: Array[Float]) extends ArraySeq[Float] with Serializable { + def elemTag = ClassTag.Float + def length: Int = unsafeArray.length + def apply(index: Int): Float = unsafeArray(index) + def update(index: Int, elem: Float) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofDouble(val unsafeArray: Array[Double]) extends ArraySeq[Double] with Serializable { + def elemTag = ClassTag.Double + def length: Int = unsafeArray.length + def apply(index: Int): Double = unsafeArray(index) + def update(index: Int, elem: Double) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofBoolean(val unsafeArray: Array[Boolean]) + extends ArraySeq[Boolean] + with Serializable { + def elemTag = ClassTag.Boolean + def length: Int = unsafeArray.length + def apply(index: Int): Boolean = unsafeArray(index) + def update(index: Int, elem: Boolean) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + } + + @SerialVersionUID(3L) + final class ofUnit(val unsafeArray: Array[Unit]) extends ArraySeq[Unit] with Serializable { + def elemTag = ClassTag.Unit + def length: Int = unsafeArray.length + def apply(index: Int): Unit = unsafeArray(index) + def update(index: Int, elem: Unit) { unsafeArray(index) = elem } + override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) + override def equals(that: Any) = that match { + case that: ofUnit => unsafeArray.length == that.unsafeArray.length + case _ => super.equals(that) + } + } + + private[this] def arrayEquals(xs: Array[AnyRef], ys: Array[AnyRef]): Boolean = { + if (xs eq ys) + return true + if (xs.length != ys.length) + return false + + val len = xs.length + var i = 0 + while (i < len) { + if (xs(i) != ys(i)) + return false + i += 1 + } + true + } +} \ No newline at end of file diff --git a/scalalib/overrides-2.12/scala/collection/compat/immutable/LazyList.scala b/scalalib/overrides-2.12/scala/collection/compat/immutable/LazyList.scala new file mode 100644 index 0000000000..4b4dd58bf2 --- /dev/null +++ b/scalalib/overrides-2.12/scala/collection/compat/immutable/LazyList.scala @@ -0,0 +1,1537 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.compat.immutable + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import scala.annotation.tailrec +import scala.annotation.unchecked.{uncheckedVariance => uV} +import scala.collection.{ + AbstractIterator, + AbstractSeq, + GenIterable, + GenSeq, + GenTraversableOnce, + LinearSeqOptimized, + mutable +} +import scala.collection.generic.{ + CanBuildFrom, + FilterMonadic, + GenericCompanion, + GenericTraversableTemplate, + SeqFactory +} +import scala.collection.immutable.{LinearSeq, NumericRange} +import scala.collection.mutable.{ArrayBuffer, Builder, StringBuilder} +import scala.language.implicitConversions +import scala.language.higherKinds + +/** This class implements an immutable linked list that evaluates elements + * in order and only when needed. Here is an example: + * + * {{{ + * import scala.math.BigInt + * object Main extends App { + * + * val fibs: LazyList[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map { n => n._1 + n._2 } + * + * fibs take 5 foreach println + * } + * + * // prints + * // + * // 0 + * // 1 + * // 1 + * // 2 + * // 3 + * }}} + * + * A `LazyList`, like the one in the example above, may be infinite in length. + * Aggregate methods, such as `count`, `sum`, `max` or `min` on such infinite length + * sequences will not terminate. Filtered infinite lazy lists are also effectively + * infinite in length. + * + * Elements of a `LazyList` are memoized; that is, the value of each element + * is computed only once. + * To illustrate, we will alter body of the `fibs` value above and take some + * more values: + * + * {{{ + * import scala.math.BigInt + * object Main extends App { + * + * val fibs: LazyList[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip( + * fibs.tail).map(n => { + * println("Adding %d and %d".format(n._1, n._2)) + * n._1 + n._2 + * }) + * + * fibs take 5 foreach println + * fibs take 6 foreach println + * } + * + * // prints + * // + * // 0 + * // 1 + * // Adding 0 and 1 + * // 1 + * // Adding 1 and 1 + * // 2 + * // Adding 1 and 2 + * // 3 + * + * // And then prints + * // + * // 0 + * // 1 + * // 1 + * // 2 + * // 3 + * // Adding 2 and 3 + * // 5 + * }}} + * + * There are a number of subtle points to the above example. + * + * - The definition of `fibs` is a `val` not a method. The memoization of the + * `LazyList` requires us to have somewhere to store the information and a `val` + * allows us to do that. + * + * - While the `LazyList` is actually being modified during access, this does not + * change the notion of its immutability. Once the values are memoized they do + * not change and values that have yet to be memoized still "exist", they + * simply haven't been realized yet. + * + * - One must be cautious of memoization; you can very quickly eat up large + * amounts of memory if you're not careful. The reason for this is that the + * memoization of the `LazyList` creates a structure much like + * [[scala.collection.immutable.List]]. So long as something is holding on to + * the head, the head holds on to the tail, and so it continues recursively. + * If, on the other hand, there is nothing holding on to the head (e.g. we used + * `def` to define the `LazyList`) then once it is no longer being used directly, + * it disappears. + * + * - Note that some operations, including [[drop]], [[dropWhile]], + * [[flatMap]] or [[collect]] may process a large number of intermediate + * elements before returning. These necessarily hold onto the head, since + * they are methods on `LazyList`, and a lazy list holds its own head. For + * computations of this sort where memoization is not desired, use + * `Iterator` when possible. + * + * {{{ + * // For example, let's build the natural numbers and do some silly iteration + * // over them. + * + * // We'll start with a silly iteration + * def loop(s: String, i: Int, iter: Iterator[Int]): Unit = { + * // Stop after 200,000 + * if (i < 200001) { + * if (i % 50000 == 0) println(s + i) + * loop(s, iter.next(), iter) + * } + * } + * + * // Our first LazyList definition will be a val definition + * val lazylist1: LazyList[Int] = { + * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * loop(0) + * } + * + * // Because lazylist1 is a val, everything that the iterator produces is held + * // by virtue of the fact that the head of the LazyList is held in lazylist1 + * val it1 = lazylist1.toIterator + * loop("Iterator1: ", it1.next(), it1) + * + * // We can redefine this LazyList such that all we have is the Iterator left + * // and allow the LazyList to be garbage collected as required. Using a def + * // to provide the LazyList ensures that no val is holding onto the head as + * // is the case with lazylist1 + * def lazylist2: LazyList[Int] = { + * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * loop(0) + * } + * val it2 = lazylist2.toIterator + * loop("Iterator2: ", it2.next(), it2) + * + * // And, of course, we don't actually need a LazyList at all for such a simple + * // problem. There's no reason to use a LazyList if you don't actually need + * // one. + * val it3 = new Iterator[Int] { + * var i = -1 + * def hasNext = true + * def next(): Int = { i += 1; i } + * } + * loop("Iterator3: ", it3.next(), it3) + * }}} + * + * - The fact that `tail` works at all is of interest. In the definition of + * `fibs` we have an initial `(0, 1, LazyList(...))` so `tail` is deterministic. + * If we defined `fibs` such that only `0` were concretely known then the act + * of determining `tail` would require the evaluation of `tail` which would + * cause an infinite recursion and stack overflow. If we define a definition + * where the tail is not initially computable then we're going to have an + * infinite recursion: + * {{{ + * // The first time we try to access the tail we're going to need more + * // information which will require us to recurse, which will require us to + * // recurse, which... + * lazy val sov: LazyList[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } + * }}} + * + * The definition of `fibs` above creates a larger number of objects than + * necessary depending on how you might want to implement it. The following + * implementation provides a more "cost effective" implementation due to the + * fact that it has a more direct route to the numbers themselves: + * + * {{{ + * lazy val fib: LazyList[Int] = { + * def loop(h: Int, n: Int): LazyList[Int] = h #:: loop(n, h + n) + * loop(1, 1) + * } + * }}} + * + * @tparam A the type of the elements contained in this lazy list. + * + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]] + * section on `LazyLists` for more information. + * @define Coll `LazyList` + * @define coll lazy list + * @define orderDependent + * @define orderDependentFold + * @define appendStackSafety Note: Repeated chaining of calls to append methods (`appended`, + * `appendedAll`, `lazyAppendedAll`) without forcing any of the + * intermediate resulting lazy lists may overflow the stack when + * the final result is forced. + * @define preservesLaziness This method preserves laziness; elements are only evaluated + * individually as needed. + * @define initiallyLazy This method does not evaluate anything until an operation is performed + * on the result (e.g. calling `head` or `tail`, or checking if it is empty). + * @define evaluatesAllElements This method evaluates all elements of the collection. + */ +@SerialVersionUID(3L) +final class LazyList[+A] private (private[this] var lazyState: () => LazyList.State[A]) + extends AbstractSeq[A] + with LinearSeq[A] + with GenericTraversableTemplate[A, LazyList] + with LinearSeqOptimized[A, LazyList[A]] + with Serializable { + import LazyList._ + + @volatile private[this] var stateEvaluated: Boolean = false + @inline private def stateDefined: Boolean = stateEvaluated + private[this] var midEvaluation = false + + private lazy val state: State[A] = { + // if it's already mid-evaluation, we're stuck in an infinite + // self-referential loop (also it's empty) + if (midEvaluation) { + throw new RuntimeException( + "self-referential LazyList or a derivation thereof has no more elements") + } + midEvaluation = true + val res = try lazyState() + finally midEvaluation = false + // if we set it to `true` before evaluating, we may infinite loop + // if something expects `state` to already be evaluated + stateEvaluated = true + lazyState = null // allow GC + res + } + + /** $preservesLaziness */ + def knownSize: Int = if (knownIsEmpty) 0 else -1 +// override def iterableFactory: SeqFactory[LazyList] = LazyList + + override def isEmpty: Boolean = state eq State.Empty + + override def head: A = state.head + + override def tail: LazyList[A] = state.tail + + @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline) + @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline) + + // It's an imperfect world, but at least we can bottle up the + // imperfection in a capsule. + @inline private def asThat[That](x: AnyRef): That = x.asInstanceOf[That] + @inline private def isLLBuilder[B, That](bf: CanBuildFrom[LazyList[A], B, That]) = + bf(repr).isInstanceOf[LazyList.LazyBuilder[_]] + + override def companion: GenericCompanion[LazyList] = LazyList + + /** Evaluates all undefined elements of the lazy list. + * + * This method detects cycles in lazy lists, and terminates after all + * elements of the cycle are evaluated. For example: + * + * {{{ + * val ring: LazyList[Int] = 1 #:: 2 #:: 3 #:: ring + * ring.force + * ring.toString + * + * // prints + * // + * // LazyList(1, 2, 3, ...) + * }}} + * + * This method will *not* terminate for non-cyclic infinite-sized collections. + * + * @return this + */ + def force: this.type = { + // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) + var these, those: LazyList[A] = this + if (!these.isEmpty) { + these = these.tail + } + while (those ne these) { + if (these.isEmpty) return this + these = these.tail + if (these.isEmpty) return this + these = these.tail + if (these eq those) return this + those = those.tail + } + this + } + + /** @inheritdoc + * + * The iterator returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + override def iterator: Iterator[A] = + if (knownIsEmpty) Iterator.empty + else new LazyIterator(this) + + /** Apply the given function `f` to each element of this linear sequence + * (while respecting the order of the elements). + * + * @param f The treatment to apply to each element. + * @note Overridden here as final to trigger tail-call optimization, which + * replaces 'this' with 'tail' at each iteration. This is absolutely + * necessary for allowing the GC to collect the underlying LazyList as elements + * are consumed. + * @note This function will force the realization of the entire LazyList + * unless the `f` throws an exception. + */ + @tailrec + override def foreach[U](f: A => U): Unit = { + if (!isEmpty) { + f(head) + tail.foreach(f) + } + } + + /** LazyList specialization of foldLeft which allows GC to collect along the + * way. + * + * @tparam B The type of value being accumulated. + * @param z The initial value seeded into the function `op`. + * @param op The operation to perform on successive elements of the `LazyList`. + * @return The accumulated value from successive applications of `op`. + */ + @tailrec + override def foldLeft[B](z: B)(op: (B, A) => B): B = + if (isEmpty) z + else tail.foldLeft(op(z, head))(op) + + // State.Empty doesn't use the SerializationProxy + protected[this] def writeReplace(): AnyRef = + if (knownNonEmpty) new LazyList.SerializationProxy[A](this) else this + + override def stringPrefix = "LazyList" + + /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list. + * + * $preservesLaziness + * + * $appendStackSafety + * + * @param suffix The collection that gets appended to this lazy list + * @return The lazy list containing elements of this lazy list and the iterable object. + */ + def lazyAppendedAll[B >: A](suffix: => GenTraversableOnce[B]): LazyList[B] = + newLL { + if (isEmpty) suffix match { + case lazyList: LazyList[B] => lazyList.state // don't recompute the LazyList + case coll => stateFromIterator(coll.toIterator) + } else sCons(head, tail lazyAppendedAll suffix) + } + + /** @inheritdoc + * + * $preservesLaziness + * + * $appendStackSafety + */ + override def ++[B >: A, That](suffix: GenTraversableOnce[B])( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (knownIsEmpty) LazyList.from(suffix) + else lazyAppendedAll(suffix) + } else super.++(suffix)(bf) + + /** @inheritdoc + * + * $preservesLaziness + * + * $appendStackSafety + */ + override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (knownIsEmpty) newLL(sCons(elem, LazyList.empty)) + else lazyAppendedAll(Iterator.single(elem)) + } else super.:+(elem)(bf) + + /** @inheritdoc + * + * $evaluatesAllElements + */ + override def equals(that: Any): Boolean = + if (this eq that.asInstanceOf[AnyRef]) true else super.equals(that) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def scanLeft[B, That](z: B)(op: (B, A) => B)( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (knownIsEmpty) newLL(sCons(z, LazyList.empty)) + else newLL(scanLeftState(z)(op)) + } else super.scanLeft(z)(op)(bf) + + private def scanLeftState[B](z: B)(op: (B, A) => B): State[B] = + sCons( + z, + newLL { + if (isEmpty) State.Empty + else tail.scanLeftState(op(z, head))(op) + } + ) + + /** LazyList specialization of reduceLeft which allows GC to collect + * along the way. + * + * @tparam B The type of value being accumulated. + * @param f The operation to perform on successive elements of the `LazyList`. + * @return The accumulated value from successive applications of `f`. + */ + override def reduceLeft[B >: A](f: (B, A) => B): B = { + if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") + else { + var reducedRes: B = this.head + var left: LazyList[A] = this.tail + while (!left.isEmpty) { + reducedRes = f(reducedRes, left.head) + left = left.tail + } + reducedRes + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def partition(p: A => Boolean): (LazyList[A], LazyList[A]) = (filter(p), filterNot(p)) + + /** @inheritdoc + * + * $preservesLaziness + */ + def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyList[A1], LazyList[A2]) = { + val (left, right) = mapToLL(f).partition(_.isLeft) + (left.mapToLL(_.asInstanceOf[Left[A1, _]].a), right.mapToLL(_.asInstanceOf[Right[_, A2]].b)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def filter(pred: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.filterImpl(this, pred, isFlipped = false) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def filterNot(pred: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.filterImpl(this, pred, isFlipped = true) + + /** A `collection.WithFilter` which allows GC of the head of lazy list during processing. + * + * This method is not particularly useful for a lazy list, as [[filter]] already preserves + * laziness. + * + * The `collection.WithFilter` returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + override def withFilter(p: A => Boolean): FilterMonadic[A, LazyList[A]] = + new LazyList.WithFilter(this, p) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + newLL(sCons(elem, this)) + } else super.+:(elem)(bf) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def ++:[B >: A, That](prefix: TraversableOnce[B])( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (knownIsEmpty) LazyList.from(prefix) + else newLL(stateFromIteratorConcatSuffix(prefix.toIterator)(state)) + } else super.++:(prefix)(bf) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def ++:[B >: A, That](prefix: Traversable[B])( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (knownIsEmpty) LazyList.from(prefix) + else newLL(stateFromIteratorConcatSuffix(prefix.toIterator)(state)) + } else super.++:(prefix)(bf) + + private def prependedAllToLL[B >: A](prefix: Traversable[B]): LazyList[B] = + if (knownIsEmpty) LazyList.from(prefix) + else newLL(stateFromIteratorConcatSuffix(prefix.toIterator)(state)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat(mapToLL(f): @inline) + else super.map(f)(bf) + + private def mapToLL[B](f: A => B): LazyList[B] = + if (knownIsEmpty) LazyList.empty + else (mapImpl(f): @inline) + + /** @inheritdoc + * + * $preservesLaziness + */ + def tapEach[U](f: A => U): LazyList[A] = mapToLL { a => + f(a); a + } + + private def mapImpl[B](f: A => B): LazyList[B] = + newLL { + if (isEmpty) State.Empty + else sCons(f(head), tail.mapImpl(f)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def collect[B, That](pf: PartialFunction[A, B])( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (knownIsEmpty) LazyList.empty + else LazyList.collectImpl(this, pf) + } else super.collect(pf)(bf) + + /** @inheritdoc + * + * This method does not evaluate any elements further than + * the first element for which the partial function is defined. + */ + @tailrec + override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = + if (isEmpty) None + else { + val res = pf.applyOrElse(head, LazyList.anyToMarker.asInstanceOf[A => B]) + if (res.asInstanceOf[AnyRef] eq LazyList.pfMarker) tail.collectFirst(pf) + else Some(res) + } + + /** @inheritdoc + * + * This method does not evaluate any elements further than + * the first element matching the predicate. + */ + @tailrec + override def find(p: A => Boolean): Option[A] = + if (isEmpty) None + else { + val elem = head + if (p(elem)) Some(elem) + else tail.find(p) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def flatMap[B, That](f: A => GenTraversableOnce[B])( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat(flatMapToLL(f): @inline) + else super.flatMap(f)(bf) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def flatten[B](implicit asIterable: A => GenTraversableOnce[B]): LazyList[B] = + flatMapToLL(asIterable) + + private def flatMapToLL[B](f: A => GenTraversableOnce[B]): LazyList[B] = + if (knownIsEmpty) LazyList.empty + else LazyList.flatMapImpl(this, f) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zip[A1 >: A, B, That](that: GenIterable[B])( + implicit bf: CanBuildFrom[LazyList[A], (A1, B), That]): That = + if (isLLBuilder(bf)) asThat(zipToLL(that): @inline) + else super.zip(that)(bf) + + private def zipToLL[B](that: GenIterable[B]): LazyList[(A, B)] = + if (this.knownIsEmpty) LazyList.empty + else newLL(zipState(that.toIterator)) + + private def zipState[B](it: Iterator[B]): State[(A, B)] = + if (this.isEmpty || !it.hasNext) State.Empty + else sCons((head, it.next()), newLL { tail zipState it }) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zipWithIndex[A1 >: A, That]( + implicit bf: CanBuildFrom[LazyList[A], (A1, Int), That]): That = + if (isLLBuilder(bf)) asThat { + this zip LazyList.from(0) + } else super.zipWithIndex(bf) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)( + implicit bf: CanBuildFrom[LazyList[A], (A1, B), That]): That = + if (isLLBuilder(bf)) asThat { + if (this.knownIsEmpty) LazyList.continually(thisElem) zip that + else newLL(zipAllState(that.toIterator, thisElem, thatElem)) + } else super.zipAll(that, thisElem, thatElem)(bf) + + private def zipAllState[A1 >: A, B](it: Iterator[B], + thisElem: A1, + thatElem: B): State[(A1, B)] = { + if (it.hasNext) { + if (this.isEmpty) sCons((thisElem, it.next()), newLL { + LazyList.continually(thisElem) zipState it + }) + else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) }) + } else { + if (this.isEmpty) State.Empty + else sCons((this.head, thatElem), this.tail zipToLL LazyList.continually(thatElem)) + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (LazyList[A1], LazyList[A2]) = + (mapToLL(asPair(_)._1), mapToLL(asPair(_)._2)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def unzip3[A1, A2, A3]( + implicit asTriple: A => (A1, A2, A3)): (LazyList[A1], LazyList[A2], LazyList[A3]) = + (mapToLL(asTriple(_)._1), mapToLL(asTriple(_)._2), mapToLL(asTriple(_)._3)) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all except the first `n` elements. + */ + override def drop(n: Int): LazyList[A] = + if (n <= 0) this + else if (knownIsEmpty) LazyList.empty + else LazyList.dropImpl(this, n) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all elements after the predicate returns `false`. + */ + override def dropWhile(p: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.dropWhileImpl(this, p) + + /** @inheritdoc + * + * $initiallyLazy + */ + override def dropRight(n: Int): LazyList[A] = { + if (n <= 0) this + else if (knownIsEmpty) LazyList.empty + else + newLL { + var scout = this + var remaining = n + // advance scout n elements ahead (or until empty) + while (remaining > 0 && !scout.isEmpty) { + remaining -= 1 + scout = scout.tail + } + dropRightState(scout) + } + } + + private def dropRightState(scout: LazyList[_]): State[A] = + if (scout.isEmpty) State.Empty + else sCons(head, newLL(tail.dropRightState(scout.tail))) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def take(n: Int): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else (takeImpl(n): @inline) + + private def takeImpl(n: Int): LazyList[A] = { + if (n <= 0) LazyList.empty + else + newLL { + if (isEmpty) State.Empty + else sCons(head, tail.takeImpl(n - 1)) + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def takeWhile(p: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else (takeWhileImpl(p): @inline) + + private def takeWhileImpl(p: A => Boolean): LazyList[A] = + newLL { + if (isEmpty || !p(head)) State.Empty + else sCons(head, tail.takeWhileImpl(p)) + } + + /** @inheritdoc + * + * $initiallyLazy + */ + override def takeRight(n: Int): LazyList[A] = + if (n <= 0 || knownIsEmpty) LazyList.empty + else LazyList.takeRightImpl(this, n) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all but the first `from` elements. + */ + override def slice(from: Int, until: Int): LazyList[A] = take(until).drop(from) + + /** @inheritdoc + * + * $evaluatesAllElements + */ + override def reverse: LazyList[A] = reverseOnto(LazyList.empty) + + // need contravariant type B to make the compiler happy - still returns LazyList[A] + @tailrec + private def reverseOnto[B >: A](tl: LazyList[B]): LazyList[B] = + if (isEmpty) tl + else tail.reverseOnto(newLL(sCons(head, tl))) + + private def occCounts0[B](sq: collection.Seq[B]): mutable.Map[B, Int] = { + val occ = new mutable.HashMap[B, Int] { override def default(k: B) = 0 } + for (y <- sq) occ(y) += 1 + occ + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def diff[B >: A](that: GenSeq[B]): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else { + val occ = occCounts0(that.seq) + LazyList.from { + iterator.filter { x => + val ox = occ(x) // Avoid multiple map lookups + if (ox == 0) true + else { + occ(x) = ox - 1 + false + } + } + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def intersect[B >: A](that: GenSeq[B]): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else { + val occ = occCounts0(that.seq) + LazyList.from { + iterator.filter { x => + val ox = occ(x) // Avoid multiple map lookups + if (ox > 0) { + occ(x) = ox - 1 + true + } else false + } + } + } + + @tailrec + private def lengthGt(len: Int): Boolean = + if (len < 0) true + else if (isEmpty) false + else tail.lengthGt(len - 1) + + /** @inheritdoc + * + * The iterator returned by this method mostly preserves laziness; + * a single element ahead of the iterator is evaluated. + */ + override def grouped(size: Int): Iterator[LazyList[A]] = { + require(size > 0, "size must be positive, but was " + size) + slidingImpl(size = size, step = size) + } + + /** @inheritdoc + * + * The iterator returned by this method mostly preserves laziness; + * `size - step max 1` elements ahead of the iterator are evaluated. + */ + override def sliding(size: Int, step: Int): Iterator[LazyList[A]] = { + require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") + slidingImpl(size = size, step = step) + } + + @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyList[A]] = + if (knownIsEmpty) Iterator.empty + else new SlidingIterator[A](this, size = size, step = step) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def padTo[B >: A, That](len: Int, elem: B)( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat(padToLL(len, elem)) + else super.padTo(len, elem)(bf) + + private def padToLL[B >: A](len: Int, elem: B): LazyList[B] = + if (len <= 0) this + else + newLL { + if (isEmpty) LazyList.fill(len)(elem).state + else sCons(head, tail.padToLL(len - 1, elem)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def patch[B >: A, That](from: Int, other: GenSeq[B], replaced: Int)( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (knownIsEmpty) LazyList from other + else patchImpl(from, other, replaced) + } else super.patch(from, other, replaced) + + private def patchImpl[B >: A](from: Int, other: GenSeq[B], replaced: Int): LazyList[B] = + newLL { + if (from <= 0) + stateFromIteratorConcatSuffix(other.toIterator)(LazyList.dropImpl(this, replaced).state) + else if (isEmpty) stateFromIterator(other.toIterator) + else sCons(head, tail.patchImpl(from - 1, other, replaced)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def updated[B >: A, That](index: Int, elem: B)( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + if (isLLBuilder(bf)) asThat { + if (index < 0) throw new IndexOutOfBoundsException(s"$index") + else updatedImpl(index, elem, index) + } else super.updated(index, elem) + + private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyList[B] = { + newLL { + if (index <= 0) sCons(elem, tail) + else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) + else sCons(head, tail.updatedImpl(index - 1, elem, startIndex)) + } + } + + /** Appends all elements of this $coll to a string builder using start, end, and separator strings. + * The written text begins with the string `start` and ends with the string `end`. + * Inside, the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * An undefined state is represented with `"<not computed>"` and cycles are represented with `"<cycle>"`. + * + * $evaluatesAllElements + * + * @param sb the string builder to which elements are appended. + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return the string builder `b` to which elements were appended. + */ + override def addString(sb: StringBuilder, + start: String, + sep: String, + end: String): StringBuilder = { + force + addStringNoForce(sb, start, sep, end) + sb + } + + private[this] def addStringNoForce(b: StringBuilder, + start: String, + sep: String, + end: String): StringBuilder = { + b.append(start) + if (!stateDefined) b.append("") + else if (!isEmpty) { + b.append(head) + var cursor = this + @inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) + var scout = tail + @inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty + if ((cursor ne scout) && (!scout.stateDefined || (cursor.state ne scout.state))) { + cursor = scout + if (scoutNonEmpty) { + scout = scout.tail + // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings + while ((cursor ne scout) && scoutNonEmpty && (cursor.state ne scout.state)) { + appendCursorElement() + cursor = cursor.tail + scout = scout.tail + if (scoutNonEmpty) scout = scout.tail + } + } + } + if (!scoutNonEmpty) { // Not a cycle, scout hit an end + while (cursor ne scout) { + appendCursorElement() + cursor = cursor.tail + } + // if cursor (eq scout) has state defined, it is empty; else unknown state + if (!cursor.stateDefined) b.append(sep).append("") + } else { + @inline def same(a: LazyList[A], b: LazyList[A]): Boolean = (a eq b) || (a.state eq b.state) + // Cycle. + // If we have a prefix of length P followed by a cycle of length C, + // the scout will be at position (P%C) in the cycle when the cursor + // enters it at P. They'll then collide when the scout advances another + // C - (P%C) ahead of the cursor. + // If we run the scout P farther, then it will be at the start of + // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner + // starts at the beginning of the prefix, they'll collide exactly at + // the start of the loop. + var runner = this + var k = 0 + while (!same(runner, scout)) { + runner = runner.tail + scout = scout.tail + k += 1 + } + // Now runner and scout are at the beginning of the cycle. Advance + // cursor, adding to string, until it hits; then we'll have covered + // everything once. If cursor is already at beginning, we'd better + // advance one first unless runner didn't go anywhere (in which case + // we've already looped once). + if (same(cursor, scout) && (k > 0)) { + appendCursorElement() + cursor = cursor.tail + } + while (!same(cursor, scout)) { + appendCursorElement() + cursor = cursor.tail + } + b.append(sep).append("") + } + } + b.append(end) + } + + /** $preservesLaziness + * + * @return a string representation of this collection. An undefined state is + * represented with `"<not computed>"` and cycles are represented with `"<cycle>"` + * + * Examples: + * + * - `"LazyList(4, <not computed>)"`, a non-empty lazy list ; + * - `"LazyList(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; + * - `"LazyList(1, 2, 3, <cycle>)"`, an infinite lazy list that contains + * a cycle at the fourth element. + */ + override def toString(): String = + addStringNoForce(new StringBuilder(stringPrefix), "(", ", ", ")").toString + + /** @inheritdoc + * + * $preservesLaziness + */ + override def hasDefiniteSize: Boolean = { + if (!stateDefined) false + else if (isEmpty) true + else { + // Two-iterator trick (2x & 1x speed) for cycle detection. + var those = this + var these = tail + while (those ne these) { + if (!these.stateDefined) return false + else if (these.isEmpty) return true + these = these.tail + if (!these.stateDefined) return false + else if (these.isEmpty) return true + these = these.tail + if (those eq these) return false + those = those.tail + } + false // Cycle detected + } + } + + override def sameElements[B >: A](that: GenIterable[B]): Boolean = that match { + case that: LazyList[B] => this eqLL that + case _ => super.sameElements(that) + } + + @tailrec + private def eqLL[B >: A](that: LazyList[B]): Boolean = + (this eq that) || + (this.state eq that.state) || + (!this.isEmpty && + !that.isEmpty && + (this.head == that.head) && + (this.tail eqLL that.tail)) + + override def splitAt(n: Int): (LazyList[A], LazyList[A]) = (take(n), drop(n)) + + override def span(p: A => Boolean): (LazyList[A], LazyList[A]) = (takeWhile(p), dropWhile(p)) + + override def distinct: LazyList[A] = distinctBy(identity) + + def distinctBy[B](f: A => B): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else + LazyList.from { + val outer = iterator + new AbstractIterator[A] { + private[this] val traversedValues = mutable.HashSet.empty[B] + private[this] var nextElementDefined: Boolean = false + private[this] var nextElement: A = _ + + def hasNext: Boolean = + nextElementDefined || (outer.hasNext && { + val a = outer.next() + if (traversedValues.add(f(a))) { + nextElement = a + nextElementDefined = true + true + } else hasNext + }) + + def next(): A = + if (hasNext) { + nextElementDefined = false + nextElement + } else { + Iterator.empty.next() + } + } + } + + override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = + if (cbf().isInstanceOf[LazyList.LazyBuilder[_]]) asThat(this) + else super.to(cbf) + + override def init: LazyList[A] = { + if (isEmpty) throw new UnsupportedOperationException + dropRight(1) + } +} + +/** + * $factoryInfo + * @define coll lazy list + * @define Coll `LazyList` + */ +@SerialVersionUID(3L) +object LazyList extends SeqFactory[LazyList] { + // Eagerly evaluate cached empty instance + private[this] val _empty = newLL(State.Empty).force + + private sealed trait State[+A] extends Serializable { + def head: A + def tail: LazyList[A] + } + + private object State { + @SerialVersionUID(3L) + object Empty extends State[Nothing] { + def head: Nothing = throw new NoSuchElementException("head of empty lazy list") + def tail: LazyList[Nothing] = + throw new UnsupportedOperationException("tail of empty lazy list") + } + + @SerialVersionUID(3L) + final class Cons[A](val head: A, val tail: LazyList[A]) extends State[A] + } + + /** Creates a new LazyList. */ + @inline private def newLL[A](state: => State[A]): LazyList[A] = new LazyList[A](() => state) + + /** Creates a new State.Cons. */ + @inline private def sCons[A](hd: A, tl: LazyList[A]): State[A] = new State.Cons[A](hd, tl) + + private val pfMarker: AnyRef = new AnyRef + private val anyToMarker: Any => Any = _ => pfMarker + + /* All of the following `Impl` methods are carefully written so as not to + * leak the beginning of the `LazyList`. They copy the initial `LazyList` (`ll`) into + * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently + * leaking the head of the `LazyList`. Additionally, the methods are written so that, should + * an exception be thrown by the evaluation of the `LazyList` or any supplied function, they + * can continue their execution where they left off. + */ + + private def filterImpl[A](ll: LazyList[A], p: A => Boolean, isFlipped: Boolean): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var elem: A = null.asInstanceOf[A] + var found = false + var rest = restRef // var rest = restRef.elem + while (!found && !rest.isEmpty) { + elem = rest.head + found = p(elem) != isFlipped + rest = rest.tail + restRef = rest // restRef.elem = rest + } + if (found) sCons(elem, filterImpl(rest, p, isFlipped)) else State.Empty + } + } + + private def collectImpl[A, B](ll: LazyList[A], pf: PartialFunction[A, B]): LazyList[B] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + val marker = pfMarker + val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased + + var res: B = marker.asInstanceOf[B] // safe because B is unbounded + var rest = restRef // var rest = restRef.elem + while ((res.asInstanceOf[AnyRef] eq marker) && !rest.isEmpty) { + res = pf.applyOrElse(rest.head, toMarker) + rest = rest.tail + restRef = rest // restRef.elem = rest + } + if (res.asInstanceOf[AnyRef] eq marker) State.Empty + else sCons(res, collectImpl(rest, pf)) + } + } + + private def flatMapImpl[A, B](ll: LazyList[A], f: A => GenTraversableOnce[B]): LazyList[B] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var it: Iterator[B] = null + var itHasNext = false + var rest = restRef // var rest = restRef.elem + while (!itHasNext && !rest.isEmpty) { + it = f(rest.head).toIterator + itHasNext = it.hasNext + if (!itHasNext) { // wait to advance `rest` because `it.next()` can throw + rest = rest.tail + restRef = rest // restRef.elem = rest + } + } + if (itHasNext) { + val head = it.next() + rest = rest.tail + restRef = rest // restRef.elem = rest + sCons(head, newLL(stateFromIteratorConcatSuffix(it)(flatMapImpl(rest, f).state))) + } else State.Empty + } + } + + private def dropImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + var iRef = n // val iRef = new IntRef(n) + newLL { + var rest = restRef // var rest = restRef.elem + var i = iRef // var i = iRef.elem + while (i > 0 && !rest.isEmpty) { + rest = rest.tail + restRef = rest // restRef.elem = rest + i -= 1 + iRef = i // iRef.elem = i + } + rest.state + } + } + + private def dropWhileImpl[A](ll: LazyList[A], p: A => Boolean): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var rest = restRef // var rest = restRef.elem + while (!rest.isEmpty && p(rest.head)) { + rest = rest.tail + restRef = rest // restRef.elem = rest + } + rest.state + } + } + + private def takeRightImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + var scoutRef = ll // val scoutRef = new ObjectRef(ll) + var remainingRef = n // val remainingRef = new IntRef(n) + newLL { + var scout = scoutRef // var scout = scoutRef.elem + var remaining = remainingRef // var remaining = remainingRef.elem + // advance `scout` `n` elements ahead (or until empty) + while (remaining > 0 && !scout.isEmpty) { + scout = scout.tail + scoutRef = scout // scoutRef.elem = scout + remaining -= 1 + remainingRef = remaining // remainingRef.elem = remaining + } + var rest = restRef // var rest = restRef.elem + // advance `rest` and `scout` in tandem until `scout` reaches the end + while (!scout.isEmpty) { + scout = scout.tail + scoutRef = scout // scoutRef.elem = scout + rest = rest.tail // can't throw an exception as `scout` has already evaluated its tail + restRef = rest // restRef.elem = rest + } + // `rest` is the last `n` elements (or all of them) + rest.state + } + } + + /** An alternative way of building and matching lazy lists using LazyList.cons(hd, tl). + */ + object cons { + + /** A lazy list consisting of a given first element and remaining elements + * @param hd The first element of the result lazy list + * @param tl The remaining elements of the result lazy list + */ + def apply[A](hd: => A, tl: => LazyList[A]): LazyList[A] = newLL(sCons(hd, newLL(tl.state))) + + /** Maps a lazy list to its head and tail */ + def unapply[A](xs: LazyList[A]): Option[(A, LazyList[A])] = #::.unapply(xs) + } + + implicit def toDeferrer[A](l: => LazyList[A]): Deferrer[A] = new Deferrer[A](() => l) + + final class Deferrer[A] private[LazyList] (private val l: () => LazyList[A]) extends AnyVal { + + /** Construct a LazyList consisting of a given first element followed by elements + * from another LazyList. + */ + def #::[B >: A](elem: => B): LazyList[B] = newLL(sCons(elem, newLL(l().state))) + + /** Construct a LazyList consisting of the concatenation of the given LazyList and + * another LazyList. + */ + def #:::[B >: A](prefix: LazyList[B]): LazyList[B] = prefix lazyAppendedAll l() + } + + object #:: { + def unapply[A](s: LazyList[A]): Option[(A, LazyList[A])] = + if (!s.isEmpty) Some((s.head, s.tail)) else None + } + + def from[A](coll: GenTraversableOnce[A]): LazyList[A] = coll match { + case lazyList: LazyList[A] => lazyList + case _ => newLL(stateFromIterator(coll.toIterator)) + } + + override def apply[A](elems: A*): LazyList[A] = from(elems) + + override def empty[A]: LazyList[A] = _empty + + /** Creates a State from an Iterator, with another State appended after the Iterator + * is empty. + */ + private def stateFromIteratorConcatSuffix[A](it: Iterator[A])(suffix: => State[A]): State[A] = + if (it.hasNext) sCons(it.next(), newLL(stateFromIteratorConcatSuffix(it)(suffix))) + else suffix + + /** Creates a State from an IterableOnce. */ + private def stateFromIterator[A](it: Iterator[A]): State[A] = + if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it))) + else State.Empty + + def concat[A](xss: collection.Iterable[A]*): LazyList[A] = + newLL(concatIterator(xss.toIterator)) + + private def concatIterator[A](it: Iterator[collection.Iterable[A]]): State[A] = + if (!it.hasNext) State.Empty + else stateFromIteratorConcatSuffix(it.next().toIterator)(concatIterator(it)) + + /** An infinite LazyList that repeatedly applies a given function to a start value. + * + * @param start the start value of the LazyList + * @param f the function that's repeatedly applied + * @return the LazyList returning the infinite sequence of values `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: => A)(f: A => A): LazyList[A] = + newLL { + val head = start + sCons(head, iterate(f(head))(f)) + } + + /** + * Create an infinite LazyList starting at `start` and incrementing by + * step `step`. + * + * @param start the start value of the LazyList + * @param step the increment value of the LazyList + * @return the LazyList starting at value `start`. + */ + def from(start: Int, step: Int): LazyList[Int] = + newLL(sCons(start, from(start + step, step))) + + /** + * Create an infinite LazyList starting at `start` and incrementing by `1`. + * + * @param start the start value of the LazyList + * @return the LazyList starting at value `start`. + */ + def from(start: Int): LazyList[Int] = from(start, 1) + + /** + * Create an infinite LazyList containing the given element expression (which + * is computed for each occurrence). + * + * @param elem the element composing the resulting LazyList + * @return the LazyList containing an infinite number of elem + */ + def continually[A](elem: => A): LazyList[A] = newLL(sCons(elem, continually(elem))) + + override def fill[A](n: Int)(elem: => A): LazyList[A] = + if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty + + override def tabulate[A](n: Int)(f: Int => A): LazyList[A] = { + def at(index: Int): LazyList[A] = + if (index < n) newLL(sCons(f(index), at(index + 1))) else empty + + at(0) + } + + // significantly simpler than the iterator returned by Iterator.unfold + def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyList[A] = + newLL { + f(init) match { + case Some((elem, state)) => sCons(elem, unfold(state)(f)) + case None => State.Empty + } + } + + /** The builder returned by this method only evaluates elements + * of collections added to it as needed. + * + * @tparam A the type of the ${coll}’s elements + * @return A builder for $Coll objects. + */ + def newBuilder[A]: Builder[A, LazyList[A]] = new LazyBuilder[A] + + private class LazyIterator[+A](private[this] var lazyList: LazyList[A]) + extends AbstractIterator[A] { + override def hasNext: Boolean = !lazyList.isEmpty + + override def next(): A = + if (lazyList.isEmpty) Iterator.empty.next() + else { + val res = lazyList.head + lazyList = lazyList.tail + res + } + } + + private class SlidingIterator[A](private[this] var lazyList: LazyList[A], size: Int, step: Int) + extends AbstractIterator[LazyList[A]] { + private val minLen = size - step max 0 + private var first = true + + def hasNext: Boolean = + if (first) !lazyList.isEmpty + else lazyList.lengthGt(minLen) + + def next(): LazyList[A] = { + if (!hasNext) Iterator.empty.next() + else { + first = false + val list = lazyList + lazyList = list.drop(step) + list.take(size) + } + } + } + + private final class WithFilter[A] private[LazyList] (lazyList: LazyList[A], p: A => Boolean) + extends FilterMonadic[A, LazyList[A]] { + private[this] val filtered = lazyList.filter(p) + def map[B, That](f: A => B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = + filtered.map(f) + def flatMap[B, That](f: A => GenTraversableOnce[B])( + implicit bf: CanBuildFrom[LazyList[A], B, That]): That = filtered.flatMap(f) + def foreach[U](f: A => U): Unit = filtered.foreach(f) + def withFilter(q: A => Boolean): FilterMonadic[A, LazyList[A]] = new WithFilter(filtered, q) + } + + class LazyListCanBuildFrom[A] extends GenericCanBuildFrom[A] + + implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LazyList[A]] = new LazyListCanBuildFrom[A] + + private final class LazyBuilder[A] extends Builder[A, LazyList[A]] { + import LazyBuilder._ + + private[this] var next: DeferredState[A] = _ + private[this] var list: LazyList[A] = _ + + clear() + + override def clear(): Unit = { + val deferred = new DeferredState[A] + list = newLL(deferred.eval()) + next = deferred + } + + override def result(): LazyList[A] = { + next init State.Empty + list + } + + override def +=(elem: A): this.type = { + val deferred = new DeferredState[A] + next init sCons(elem, newLL(deferred.eval())) + next = deferred + this + } + + // lazy implementation which doesn't evaluate the collection being added + override def ++=(xs: TraversableOnce[A]): this.type = { + val deferred = new DeferredState[A] + next init stateFromIteratorConcatSuffix(xs.toIterator)(deferred.eval()) + next = deferred + this + } + } + + private object LazyBuilder { + final class DeferredState[A] { + private[this] var _state: () => State[A] = _ + + def eval(): State[A] = { + val state = _state + if (state == null) throw new IllegalStateException("uninitialized") + state() + } + + // racy + def init(state: => State[A]): Unit = { + if (_state != null) throw new IllegalStateException("already initialized") + _state = () => state + } + } + } + + private case object SerializeEnd + + /** This serialization proxy is used for LazyLists which start with a sequence of evaluated cons cells. + * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses + * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization + * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. + */ + @SerialVersionUID(3L) + final class SerializationProxy[A](@transient protected var coll: LazyList[A]) + extends Serializable { + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + var these = coll + while (these.knownNonEmpty) { + out.writeObject(these.head) + these = these.tail + } + out.writeObject(SerializeEnd) + out.writeObject(these) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val init = new ArrayBuffer[A] + var initRead = false + while (!initRead) in.readObject match { + case SerializeEnd => initRead = true + case a => init += a.asInstanceOf[A] + } + val tail = in.readObject().asInstanceOf[LazyList[A]] + coll = tail.prependedAllToLL(init) + } + + private[this] def readResolve(): Any = coll + } + + override def iterate[A](start: A, len: Int)(f: A => A): LazyList[A] = + iterate(start)(f).take(len) + + override def range[A: Integral](start: A, end: A): LazyList[A] = + from(NumericRange(start, end, implicitly[Integral[A]].one)) + + override def range[A: Integral](start: A, end: A, step: A): LazyList[A] = + from(NumericRange(start, end, step)) +} \ No newline at end of file diff --git a/scalalib/overrides-2.12/scala/collection/compat/package.scala b/scalalib/overrides-2.12/scala/collection/compat/package.scala new file mode 100644 index 0000000000..cd3f627625 --- /dev/null +++ b/scalalib/overrides-2.12/scala/collection/compat/package.scala @@ -0,0 +1,74 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.collection.generic.{CanBuildFrom, GenericOrderedCompanion, IsTraversableLike} +import scala.{collection => c} +import scala.runtime.Tuple2Zipped +import scala.collection.{immutable => i, mutable => m} +import scala.language.higherKinds + +package object compat extends compat.PackageShared { + implicit class MutableTreeMapExtensions2(private val fact: m.TreeMap.type) extends AnyVal { + def from[K: Ordering, V](source: TraversableOnce[(K, V)]): m.TreeMap[K, V] = + build(m.TreeMap.newBuilder[K, V], source) + } + + implicit class MutableSortedMapExtensions(private val fact: m.SortedMap.type) extends AnyVal { + def from[K: Ordering, V](source: TraversableOnce[(K, V)]): m.SortedMap[K, V] = + build(m.SortedMap.newBuilder[K, V], source) + } + + implicit def genericOrderedCompanionToCBF[A, CC[X] <: Traversable[X]]( + fact: GenericOrderedCompanion[CC])( + implicit ordering: Ordering[A]): CanBuildFrom[Any, A, CC[A]] = + CompatImpl.simpleCBF(fact.newBuilder[A]) + + // CanBuildFrom instances for `IterableView[(K, V), Map[K, V]]` that preserve + // the strict type of the view to be `Map` instead of `Iterable` + // Instances produced by this method are used to chain `filterKeys` after `mapValues` + implicit def canBuildFromIterableViewMapLike[K, V, L, W, CC[X, Y] <: Map[X, Y]] + : CanBuildFrom[IterableView[(K, V), CC[K, V]], (L, W), IterableView[(L, W), CC[L, W]]] = + new CanBuildFrom[IterableView[(K, V), CC[K, V]], (L, W), IterableView[(L, W), CC[L, W]]] { + // `CanBuildFrom` parameters are used as type constraints, they are not used + // at run-time, hence the dummy builder implementations + def apply(from: IterableView[(K, V), CC[K, V]]) = new TraversableView.NoBuilder + def apply() = new TraversableView.NoBuilder + } + + implicit def toTraversableLikeExtensionMethods[Repr](self: Repr)( + implicit traversable: IsTraversableLike[Repr]) + : TraversableLikeExtensionMethods[traversable.A, Repr] = + new TraversableLikeExtensionMethods[traversable.A, Repr](traversable.conversion(self)) + + implicit def toSeqExtensionMethods[A](self: c.Seq[A]): SeqExtensionMethods[A] = + new SeqExtensionMethods[A](self) + + implicit def toTrulyTraversableLikeExtensionMethods[T1, El1, Repr1](self: T1)( + implicit w1: T1 => TraversableLike[El1, Repr1] + ): TrulyTraversableLikeExtensionMethods[El1, Repr1] = + new TrulyTraversableLikeExtensionMethods[El1, Repr1](w1(self)) + + implicit def toTuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2]( + self: Tuple2Zipped[El1, Repr1, El2, Repr2]) + : Tuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2] = + new Tuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2](self) + + implicit def toImmutableQueueExtensionMethods[A]( + self: i.Queue[A]): ImmutableQueueExtensionMethods[A] = + new ImmutableQueueExtensionMethods[A](self) + + implicit def toMutableQueueExtensionMethods[A]( + self: m.Queue[A]): MutableQueueExtensionMethods[A] = + new MutableQueueExtensionMethods[A](self) +} \ No newline at end of file diff --git a/scalalib/overrides-2.12/scala/jdk/CollectionConventers.scala b/scalalib/overrides-2.12/scala/jdk/CollectionConventers.scala new file mode 100644 index 0000000000..abd9071ae1 --- /dev/null +++ b/scalalib/overrides-2.12/scala/jdk/CollectionConventers.scala @@ -0,0 +1,5 @@ +package scala.jdk + +import scala.collection.convert.{DecorateAsJava, DecorateAsScala} + +object CollectionConverters extends DecorateAsJava with DecorateAsScala \ No newline at end of file diff --git a/scalalib/overrides-2.13/scala/collection/compat/immutable/package.scala b/scalalib/overrides-2.13/scala/collection/compat/immutable/package.scala new file mode 100644 index 0000000000..d217b45551 --- /dev/null +++ b/scalalib/overrides-2.13/scala/collection/compat/immutable/package.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.compat + +package object immutable { + type ArraySeq[+T] = scala.collection.immutable.ArraySeq[T] + val ArraySeq = scala.collection.immutable.ArraySeq + + type LazyList[+T] = scala.collection.immutable.LazyList[T] + val LazyList = scala.collection.immutable.LazyList +} \ No newline at end of file diff --git a/scalalib/overrides-2.13/scala/collection/compat/package.scala b/scalalib/overrides-2.13/scala/collection/compat/package.scala index 2c42da993e..cbebca0331 100644 --- a/scalalib/overrides-2.13/scala/collection/compat/package.scala +++ b/scalalib/overrides-2.13/scala/collection/compat/package.scala @@ -1,6 +1,24 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.collection package object compat { - type ScalaStream[+T] = scala.collection.immutable.LazyList[T] - val ScalaStream = scala.collection.immutable.LazyList + type Factory[-A, +C] = scala.collection.Factory[A, C] + val Factory = scala.collection.Factory + + type BuildFrom[-From, -A, +C] = scala.collection.BuildFrom[From, A, C] + val BuildFrom = scala.collection.BuildFrom + + type IterableOnce[+X] = scala.collection.IterableOnce[X] + val IterableOnce = scala.collection.IterableOnce } \ No newline at end of file diff --git a/util/src/main/scala/scala/scalanative/CrossCompileCompat.scala b/util/src/main/scala/scala/scalanative/CrossCompileCompat.scala deleted file mode 100644 index 6298c92f37..0000000000 --- a/util/src/main/scala/scala/scalanative/CrossCompileCompat.scala +++ /dev/null @@ -1,15 +0,0 @@ -package scala.scalanative - -private[scalanative] object CrossCompileCompat { - val Converters = { - import Compat._ - { - import scala.collection.parallel._ - CollectionConverters - } - } - - object Compat { - object CollectionConverters - } -} From b46c52239cbfc0cde485b22030f483605a2cea3b Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Sat, 3 Oct 2020 14:23:15 +0200 Subject: [PATCH 16/75] fix diferrent resolving of time import in TimesSuite --- .../src/test/scala/scala/scalanative/posix/TimeSuite.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/unit-tests/src/test/scala/scala/scalanative/posix/TimeSuite.scala b/unit-tests/src/test/scala/scala/scalanative/posix/TimeSuite.scala index 654680673e..db77e6dce1 100644 --- a/unit-tests/src/test/scala/scala/scalanative/posix/TimeSuite.scala +++ b/unit-tests/src/test/scala/scala/scalanative/posix/TimeSuite.scala @@ -7,7 +7,8 @@ import timeOps.tmOps object TimeSuite extends tests.Suite { tzset() - val now_time_t: time_t = time(null) + //In 2.11/2.12 time was resolved to posix.time.type, in 2.13 to posix.time.time method + val now_time_t: time_t = scala.scalanative.posix.time.time(null) val epoch: time_t = 0L test("asctime() with a given known state should match its representation") { From 07cdfcc0591339ff10297e3298c76ebc9f552ec0 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Sat, 3 Oct 2020 14:29:16 +0200 Subject: [PATCH 17/75] Add basic scalalib overrides for 2.13, remove redundant overrides in previous versions --- scalalib/overrides-2.11/scala/Predef.scala | 518 -------------- scalalib/overrides-2.12/scala/Predef.scala | 646 ----------------- scalalib/overrides-2.13/scala/Array.scala | 659 ++++++++++++++++++ scalalib/overrides-2.13/scala/Symbol.scala | 51 ++ .../scala/concurrent/ExecutionContext.scala | 227 ++++++ .../scala/reflect/ClassTag.scala | 139 ++++ .../scala/reflect/Manifest.scala | 457 ++++++++++++ .../scala/runtime/ScalaRunTime.scala | 298 ++++++++ .../scala/runtime/Statics.scala | 97 +++ scalalib/overrides/scala/package.scala | 136 ---- 10 files changed, 1928 insertions(+), 1300 deletions(-) delete mode 100644 scalalib/overrides-2.11/scala/Predef.scala delete mode 100644 scalalib/overrides-2.12/scala/Predef.scala create mode 100644 scalalib/overrides-2.13/scala/Array.scala create mode 100644 scalalib/overrides-2.13/scala/Symbol.scala create mode 100644 scalalib/overrides-2.13/scala/concurrent/ExecutionContext.scala create mode 100644 scalalib/overrides-2.13/scala/reflect/ClassTag.scala create mode 100644 scalalib/overrides-2.13/scala/reflect/Manifest.scala create mode 100644 scalalib/overrides-2.13/scala/runtime/ScalaRunTime.scala create mode 100644 scalalib/overrides-2.13/scala/runtime/Statics.scala delete mode 100644 scalalib/overrides/scala/package.scala diff --git a/scalalib/overrides-2.11/scala/Predef.scala b/scalalib/overrides-2.11/scala/Predef.scala deleted file mode 100644 index a8a831a75a..0000000000 --- a/scalalib/overrides-2.11/scala/Predef.scala +++ /dev/null @@ -1,518 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.collection.{ mutable, immutable, generic } -import immutable.StringOps -import mutable.ArrayOps -import generic.CanBuildFrom -import scala.annotation.{ elidable, implicitNotFound } -import scala.annotation.elidable.ASSERTION -import scala.language.{implicitConversions, existentials} -import scala.io.StdIn -import scala.scalanative.annotation.alwaysinline - -/** The `Predef` object provides definitions that are accessible in all Scala - * compilation units without explicit qualification. - * - * === Commonly Used Types === - * Predef provides type aliases for types which are commonly used, such as - * the immutable collection types [[scala.collection.immutable.Map]], - * [[scala.collection.immutable.Set]], and the [[scala.collection.immutable.List]] - * constructors ([[scala.collection.immutable.::]] and - * [[scala.collection.immutable.Nil]]). - * - * === Console I/O === - * Predef provides a number of simple functions for console I/O, such as - * `print`, `println`, `readLine`, `readInt`, etc. These functions are all - * aliases of the functions provided by [[scala.Console]]. - * - * === Assertions === - * - * A set of `assert` functions are provided for use as a way to document - * and dynamically check invariants in code. Invocations of `assert` can be elided - * at compile time by providing the command line option `-Xdisable-assertions`, - * which raises `-Xelide-below` above `elidable.ASSERTION`, to the `scalac` command. - * - * Variants of `assert` intended for use with static analysis tools are also - * provided: `assume`, `require` and `ensuring`. `require` and `ensuring` are - * intended for use as a means of design-by-contract style specification - * of pre- and post-conditions on functions, with the intention that these - * specifications could be consumed by a static analysis tool. For instance, - * - * {{{ - * def addNaturals(nats: List[Int]): Int = { - * require(nats forall (_ >= 0), "List contains negative numbers") - * nats.foldLeft(0)(_ + _) - * } ensuring(_ >= 0) - * }}} - * - * The declaration of `addNaturals` states that the list of integers passed should - * only contain natural numbers (i.e. non-negative), and that the result returned - * will also be natural. `require` is distinct from `assert` in that if the - * condition fails, then the caller of the function is to blame rather than a - * logical error having been made within `addNaturals` itself. `ensuring` is a - * form of `assert` that declares the guarantee the function is providing with - * regards to its return value. - * - * === Implicit Conversions === - * A number of commonly applied implicit conversions are also defined here, and - * in the parent type [[scala.LowPriorityImplicits]]. Implicit conversions - * are provided for the "widening" of numeric values, for instance, converting a - * Short value to a Long value as required, and to add additional higher-order - * functions to Array values. These are described in more detail in the documentation of [[scala.Array]]. - */ -object Predef extends LowPriorityImplicits with DeprecatedPredef { - /** - * Retrieve the runtime representation of a class type. `classOf[T]` is equivalent to - * the class literal `T.class` in Java. - * - * @example {{{ - * val listClass = classOf[List[_]] - * // listClass is java.lang.Class[List[_]] = class scala.collection.immutable.List - * - * val mapIntString = classOf[Map[Int,String]] - * // mapIntString is java.lang.Class[Map[Int,String]] = interface scala.collection.immutable.Map - * }}} - */ - def classOf[T]: Class[T] = null // This is a stub method. The actual implementation is filled in by the compiler. - - /** The `String` type in Scala has methods that come either from the underlying - * Java String (see the documentation corresponding to your Java version, for - * example [[http://docs.oracle.com/javase/8/docs/api/java/lang/String.html]]) or - * are added implicitly through [[scala.collection.immutable.StringOps]]. - */ - type String = java.lang.String - type Class[T] = java.lang.Class[T] - - // miscellaneous ----------------------------------------------------- - scala.`package` // to force scala package object to be seen. - scala.collection.immutable.List // to force Nil, :: to be seen. - - type Function[-A, +B] = Function1[A, B] - - type Map[A, +B] = immutable.Map[A, B] - type Set[A] = immutable.Set[A] - @inline def Map = immutable.Map - @inline def Set = immutable.Set - - // Manifest types, companions, and incantations for summoning - @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.") - @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0") - type ClassManifest[T] = scala.reflect.ClassManifest[T] - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") - type OptManifest[T] = scala.reflect.OptManifest[T] - @annotation.implicitNotFound(msg = "No Manifest available for ${T}.") - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") - type Manifest[T] = scala.reflect.Manifest[T] - @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0") - @inline def ClassManifest = scala.reflect.ClassManifest - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") - @inline def Manifest = scala.reflect.Manifest - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") - @inline def NoManifest = scala.reflect.NoManifest - - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("Use scala.reflect.classTag[T] and scala.reflect.runtime.universe.typeTag[T] instead", "2.10.0") - @inline def manifest[T](implicit m: Manifest[T]) = m - @deprecated("Use scala.reflect.classTag[T] instead", "2.10.0") - @inline def classManifest[T](implicit m: ClassManifest[T]) = m - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") - @inline def optManifest[T](implicit m: OptManifest[T]) = m - - // Minor variations on identity functions - @inline def identity[A](x: A): A = x // @see `conforms` for the implicit version - @alwaysinline def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world -- TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero` - @inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements - - // errors and asserts ------------------------------------------------- - - // !!! Remove this when possible - ideally for 2.11. - // We are stuck with it a while longer because sbt's compiler interface - // still calls it as of 0.12.2. - @deprecated("Use `sys.error(message)` instead", "2.9.0") - @inline def error(message: String): Nothing = sys.error(message) - - /** Tests an expression, throwing an `AssertionError` if false. - * Calls to this method will not be generated if `-Xelide-below` - * is at least `ASSERTION`. - * - * @see elidable - * @param assertion the expression to test - */ - @elidable(ASSERTION) - @inline def assert(assertion: Boolean) { - if (!assertion) - throw new java.lang.AssertionError("assertion failed") - } - - /** Tests an expression, throwing an `AssertionError` if false. - * Calls to this method will not be generated if `-Xelide-below` - * is at least `ASSERTION`. - * - * @see elidable - * @param assertion the expression to test - * @param message a String to include in the failure message - */ - @elidable(ASSERTION) @inline - final def assert(assertion: Boolean, message: => Any) { - if (!assertion) - throw new java.lang.AssertionError("assertion failed: "+ message) - } - - /** Tests an expression, throwing an `AssertionError` if false. - * This method differs from assert only in the intent expressed: - * assert contains a predicate which needs to be proven, while - * assume contains an axiom for a static checker. Calls to this method - * will not be generated if `-Xelide-below` is at least `ASSERTION`. - * - * @see elidable - * @param assumption the expression to test - */ - @elidable(ASSERTION) - @inline def assume(assumption: Boolean) { - if (!assumption) - throw new java.lang.AssertionError("assumption failed") - } - - /** Tests an expression, throwing an `AssertionError` if false. - * This method differs from assert only in the intent expressed: - * assert contains a predicate which needs to be proven, while - * assume contains an axiom for a static checker. Calls to this method - * will not be generated if `-Xelide-below` is at least `ASSERTION`. - * - * @see elidable - * @param assumption the expression to test - * @param message a String to include in the failure message - */ - @elidable(ASSERTION) @inline - final def assume(assumption: Boolean, message: => Any) { - if (!assumption) - throw new java.lang.AssertionError("assumption failed: "+ message) - } - - /** Tests an expression, throwing an `IllegalArgumentException` if false. - * This method is similar to `assert`, but blames the caller of the method - * for violating the condition. - * - * @param requirement the expression to test - */ - @inline def require(requirement: Boolean) { - if (!requirement) - throw new IllegalArgumentException("requirement failed") - } - - /** Tests an expression, throwing an `IllegalArgumentException` if false. - * This method is similar to `assert`, but blames the caller of the method - * for violating the condition. - * - * @param requirement the expression to test - * @param message a String to include in the failure message - */ - @inline final def require(requirement: Boolean, message: => Any) { - if (!requirement) - throw new IllegalArgumentException("requirement failed: "+ message) - } - - /** `???` can be used for marking methods that remain to be implemented. - * @throws NotImplementedError - */ - @inline def ??? : Nothing = throw new NotImplementedError - - // tupling ------------------------------------------------------------ - - @deprecated("Use built-in tuple syntax or Tuple2 instead", "2.11.0") - type Pair[+A, +B] = Tuple2[A, B] - @deprecated("Use built-in tuple syntax or Tuple2 instead", "2.11.0") - object Pair { - def apply[A, B](x: A, y: B) = Tuple2(x, y) - def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) - } - - @deprecated("Use built-in tuple syntax or Tuple3 instead", "2.11.0") - type Triple[+A, +B, +C] = Tuple3[A, B, C] - @deprecated("Use built-in tuple syntax or Tuple3 instead", "2.11.0") - object Triple { - def apply[A, B, C](x: A, y: B, z: C) = Tuple3(x, y, z) - def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x) - } - - // implicit classes ----------------------------------------------------- - - implicit final class ArrowAssoc[A](private val self: A) extends AnyVal { - @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(self, y) - def →[B](y: B): Tuple2[A, B] = ->(y) - } - - implicit final class Ensuring[A](private val self: A) extends AnyVal { - def ensuring(cond: Boolean): A = { assert(cond); self } - def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); self } - def ensuring(cond: A => Boolean): A = { assert(cond(self)); self } - def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(self), msg); self } - } - - implicit final class StringFormat[A](private val self: A) extends AnyVal { - /** Returns string formatted according to given `format` string. - * Format strings are as for `String.format` - * (@see java.lang.String.format). - */ - @inline def formatted(fmtstr: String): String = fmtstr format self - } - - // TODO: remove, only needed for binary compatibility of 2.11.0-RC1 with 2.11.0-M8 - // note that `private[scala]` becomes `public` in bytecode - private[scala] final class StringAdd[A](private val self: A) extends AnyVal { - def +(other: String): String = String.valueOf(self) + other - } - private[scala] def StringAdd(x: Any): Any = new StringAdd(x) - - // SI-8229 retaining the pre 2.11 name for source compatibility in shadowing this implicit - implicit final class any2stringadd[A](private val self: A) extends AnyVal { - def +(other: String): String = String.valueOf(self) + other - } - - implicit final class RichException(private val self: Throwable) extends AnyVal { - import scala.compat.Platform.EOL - @deprecated("Use Throwable#getStackTrace", "2.11.0") def getStackTraceString = self.getStackTrace().mkString("", EOL, EOL) - } - - implicit final class SeqCharSequence(val __sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence { - def length: Int = __sequenceOfChars.length - def charAt(index: Int): Char = __sequenceOfChars(index) - def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(__sequenceOfChars.slice(start, end)) - override def toString = __sequenceOfChars mkString "" - } - - implicit final class ArrayCharSequence(val __arrayOfChars: Array[Char]) extends CharSequence { - def length: Int = __arrayOfChars.length - def charAt(index: Int): Char = __arrayOfChars(index) - def subSequence(start: Int, end: Int): CharSequence = new runtime.ArrayCharSequence(__arrayOfChars, start, end) - override def toString = __arrayOfChars mkString "" - } - - private object StringCanBuildFromInstance extends CanBuildFrom[String, Char, String] { - def apply(from: String) = apply() - def apply() = mutable.StringBuilder.newBuilder - } - - @inline implicit def StringCanBuildFrom: CanBuildFrom[String, Char, String] = StringCanBuildFromInstance - - @inline implicit def augmentString(x: String): StringOps = new StringOps(x) - @inline implicit def unaugmentString(x: StringOps): String = x.repr - - // printing ----------------------------------------------------------- - - @inline def print(x: Any) = Console.print(x) - @inline def println() = Console.println() - @inline def println(x: Any) = Console.println(x) - @inline def printf(text: String, xs: Any*) = Console.print(text.format(xs: _*)) - - // views -------------------------------------------------------------- - - @inline implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)) = new runtime.Tuple2Zipped.Ops(x) - @inline implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)) = new runtime.Tuple3Zipped.Ops(x) - - @inline implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match { - case x: Array[AnyRef] => refArrayOps[AnyRef](x) - case x: Array[Boolean] => booleanArrayOps(x) - case x: Array[Byte] => byteArrayOps(x) - case x: Array[Char] => charArrayOps(x) - case x: Array[Double] => doubleArrayOps(x) - case x: Array[Float] => floatArrayOps(x) - case x: Array[Int] => intArrayOps(x) - case x: Array[Long] => longArrayOps(x) - case x: Array[Short] => shortArrayOps(x) - case x: Array[Unit] => unitArrayOps(x) - case null => null - }).asInstanceOf[ArrayOps[T]] - - @inline implicit def booleanArrayOps(xs: Array[Boolean]): ArrayOps[Boolean] = new ArrayOps.ofBoolean(xs) - @inline implicit def byteArrayOps(xs: Array[Byte]): ArrayOps[Byte] = new ArrayOps.ofByte(xs) - @inline implicit def charArrayOps(xs: Array[Char]): ArrayOps[Char] = new ArrayOps.ofChar(xs) - @inline implicit def doubleArrayOps(xs: Array[Double]): ArrayOps[Double] = new ArrayOps.ofDouble(xs) - @inline implicit def floatArrayOps(xs: Array[Float]): ArrayOps[Float] = new ArrayOps.ofFloat(xs) - @inline implicit def intArrayOps(xs: Array[Int]): ArrayOps[Int] = new ArrayOps.ofInt(xs) - @inline implicit def longArrayOps(xs: Array[Long]): ArrayOps[Long] = new ArrayOps.ofLong(xs) - @inline implicit def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps.ofRef[T](xs) - @inline implicit def shortArrayOps(xs: Array[Short]): ArrayOps[Short] = new ArrayOps.ofShort(xs) - @inline implicit def unitArrayOps(xs: Array[Unit]): ArrayOps[Unit] = new ArrayOps.ofUnit(xs) - - // "Autoboxing" and "Autounboxing" --------------------------------------------------- - - @inline implicit def byte2Byte(x: Byte) = java.lang.Byte.valueOf(x) - @inline implicit def short2Short(x: Short) = java.lang.Short.valueOf(x) - @inline implicit def char2Character(x: Char) = java.lang.Character.valueOf(x) - @inline implicit def int2Integer(x: Int) = java.lang.Integer.valueOf(x) - @inline implicit def long2Long(x: Long) = java.lang.Long.valueOf(x) - @inline implicit def float2Float(x: Float) = java.lang.Float.valueOf(x) - @inline implicit def double2Double(x: Double) = java.lang.Double.valueOf(x) - @inline implicit def boolean2Boolean(x: Boolean) = java.lang.Boolean.valueOf(x) - - @inline implicit def Byte2byte(x: java.lang.Byte): Byte = x.byteValue - @inline implicit def Short2short(x: java.lang.Short): Short = x.shortValue - @inline implicit def Character2char(x: java.lang.Character): Char = x.charValue - @inline implicit def Integer2int(x: java.lang.Integer): Int = x.intValue - @inline implicit def Long2long(x: java.lang.Long): Long = x.longValue - @inline implicit def Float2float(x: java.lang.Float): Float = x.floatValue - @inline implicit def Double2double(x: java.lang.Double): Double = x.doubleValue - @inline implicit def Boolean2boolean(x: java.lang.Boolean): Boolean = x.booleanValue - - // Type Constraints -------------------------------------------------------------- - - /** - * An instance of `A <:< B` witnesses that `A` is a subtype of `B`. - * Requiring an implicit argument of the type `A <:< B` encodes - * the generalized constraint `A <: B`. - * - * @note we need a new type constructor `<:<` and evidence `conforms`, - * as reusing `Function1` and `identity` leads to ambiguities in - * case of type errors (`any2stringadd` is inferred) - * - * To constrain any abstract type T that's in scope in a method's - * argument list (not just the method's own type parameters) simply - * add an implicit argument of type `T <:< U`, where `U` is the required - * upper bound; or for lower-bounds, use: `L <:< T`, where `L` is the - * required lower bound. - * - * In part contributed by Jason Zaugg. - */ - @implicitNotFound(msg = "Cannot prove that ${From} <:< ${To}.") - sealed abstract class <:<[-From, +To] extends (From => To) with Serializable - private[this] lazy val singleton_<:< = new <:<[Any,Any] { def apply(x: Any): Any = x } - // The dollar prefix is to dodge accidental shadowing of this method - // by a user-defined method of the same name (SI-7788). - // The collections rely on this method. - @inline implicit def $conforms[A]: A <:< A = singleton_<:<.asInstanceOf[A <:< A] - - @deprecated("Use `implicitly[T <:< U]` or `identity` instead.", "2.11.0") - def conforms[A]: A <:< A = $conforms[A] - - /** An instance of `A =:= B` witnesses that the types `A` and `B` are equal. - * - * @see `<:<` for expressing subtyping constraints - */ - @implicitNotFound(msg = "Cannot prove that ${From} =:= ${To}.") - sealed abstract class =:=[From, To] extends (From => To) with Serializable - private[this] lazy val singleton_=:= = new =:=[Any,Any] { def apply(x: Any): Any = x } - object =:= { - @inline implicit def tpEquals[A]: A =:= A = singleton_=:=.asInstanceOf[A =:= A] - } - - /** A type for which there is always an implicit value. - * @see [[scala.Array$]], method `fallbackCanBuildFrom` - */ - class DummyImplicit - - object DummyImplicit { - - /** An implicit value yielding a `DummyImplicit`. - * @see [[scala.Array$]], method `fallbackCanBuildFrom` - */ - @inline implicit def dummyImplicit: DummyImplicit = new DummyImplicit - } -} - -private[scala] trait DeprecatedPredef { - self: Predef.type => - - // Deprecated stubs for any who may have been calling these methods directly. - @deprecated("Use `ArrowAssoc`", "2.11.0") def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x) - @deprecated("Use `Ensuring`", "2.11.0") def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x) - @deprecated("Use `StringFormat`", "2.11.0") def any2stringfmt(x: Any): StringFormat[Any] = new StringFormat(x) - @deprecated("Use `Throwable` directly", "2.11.0") def exceptionWrapper(exc: Throwable) = new RichException(exc) - @deprecated("Use `SeqCharSequence`", "2.11.0") def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new SeqCharSequence(xs) - @deprecated("Use `ArrayCharSequence`", "2.11.0") def arrayToCharSequence(xs: Array[Char]): CharSequence = new ArrayCharSequence(xs) - - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLine(): String = StdIn.readLine() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLine(text: String, args: Any*) = StdIn.readLine(text, args: _*) - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readBoolean() = StdIn.readBoolean() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readByte() = StdIn.readByte() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readShort() = StdIn.readShort() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readChar() = StdIn.readChar() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readInt() = StdIn.readInt() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLong() = StdIn.readLong() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readFloat() = StdIn.readFloat() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readDouble() = StdIn.readDouble() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf(format: String) = StdIn.readf(format) - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf1(format: String) = StdIn.readf1(format) - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf2(format: String) = StdIn.readf2(format) - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf3(format: String) = StdIn.readf3(format) -} - -/** The `LowPriorityImplicits` class provides implicit values that -* are valid in all Scala compilation units without explicit qualification, -* but that are partially overridden by higher-priority conversions in object -* `Predef`. -* -* @author Martin Odersky -* @since 2.8 -*/ -// SI-7335 Parents of Predef are defined in the same compilation unit to avoid -// cyclic reference errors compiling the standard library *without* a previously -// compiled copy on the classpath. -private[scala] abstract class LowPriorityImplicits { - import mutable.WrappedArray - import immutable.WrappedString - - /** We prefer the java.lang.* boxed types to these wrappers in - * any potential conflicts. Conflicts do exist because the wrappers - * need to implement ScalaNumber in order to have a symmetric equals - * method, but that implies implementing java.lang.Number as well. - * - * Note - these are inlined because they are value classes, but - * the call to xxxWrapper is not eliminated even though it does nothing. - * Even inlined, every call site does a no-op retrieval of Predef's MODULE$ - * because maybe loading Predef has side effects! - */ - @inline implicit def byteWrapper(x: Byte) = new runtime.RichByte(x) - @inline implicit def shortWrapper(x: Short) = new runtime.RichShort(x) - @inline implicit def intWrapper(x: Int) = new runtime.RichInt(x) - @inline implicit def charWrapper(c: Char) = new runtime.RichChar(c) - @inline implicit def longWrapper(x: Long) = new runtime.RichLong(x) - @inline implicit def floatWrapper(x: Float) = new runtime.RichFloat(x) - @inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x) - @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x) - - @inline implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] = - if (xs eq null) null - else WrappedArray.make(xs) - - // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef] - // is as good as another for all T <: AnyRef. Instead of creating 100,000,000 - // unique ones by way of this implicit, let's share one. - @inline implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = { - if (xs eq null) null - else if (xs.length == 0) WrappedArray.empty[T] - else new WrappedArray.ofRef[T](xs) - } - - @inline implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null - @inline implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null - @inline implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null - @inline implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null - @inline implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null - @inline implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null - @inline implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null - @inline implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null - @inline implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null - - @inline implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null - @inline implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null - - @inline implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] = - new CanBuildFrom[String, T, immutable.IndexedSeq[T]] { - def apply(from: String) = immutable.IndexedSeq.newBuilder[T] - def apply() = immutable.IndexedSeq.newBuilder[T] - } -} diff --git a/scalalib/overrides-2.12/scala/Predef.scala b/scalalib/overrides-2.12/scala/Predef.scala deleted file mode 100644 index 90b49e892a..0000000000 --- a/scalalib/overrides-2.12/scala/Predef.scala +++ /dev/null @@ -1,646 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala - -import scala.language.implicitConversions - -import scala.collection.{ mutable, immutable, generic } -import immutable.StringOps -import mutable.ArrayOps -import generic.CanBuildFrom -import scala.annotation.{ elidable, implicitNotFound } -import scala.annotation.elidable.ASSERTION -import scala.io.StdIn - -/** The `Predef` object provides definitions that are accessible in all Scala - * compilation units without explicit qualification. - * - * === Commonly Used Types === - * Predef provides type aliases for types which are commonly used, such as - * the immutable collection types [[scala.collection.immutable.Map]], - * [[scala.collection.immutable.Set]], and the [[scala.collection.immutable.List]] - * constructors ([[scala.collection.immutable.::]] and - * [[scala.collection.immutable.Nil]]). - * - * === Console Output === - * For basic console output, `Predef` provides convenience methods [[print(x:Any* print]] and [[println(x:Any* println]], - * which are aliases of the methods in the object [[scala.Console]]. - * - * === Assertions === - * A set of `assert` functions are provided for use as a way to document - * and dynamically check invariants in code. Invocations of `assert` can be elided - * at compile time by providing the command line option `-Xdisable-assertions`, - * which raises `-Xelide-below` above `elidable.ASSERTION`, to the `scalac` command. - * - * Variants of `assert` intended for use with static analysis tools are also - * provided: `assume`, `require` and `ensuring`. `require` and `ensuring` are - * intended for use as a means of design-by-contract style specification - * of pre- and post-conditions on functions, with the intention that these - * specifications could be consumed by a static analysis tool. For instance, - * - * {{{ - * def addNaturals(nats: List[Int]): Int = { - * require(nats forall (_ >= 0), "List contains negative numbers") - * nats.foldLeft(0)(_ + _) - * } ensuring(_ >= 0) - * }}} - * - * The declaration of `addNaturals` states that the list of integers passed should - * only contain natural numbers (i.e. non-negative), and that the result returned - * will also be natural. `require` is distinct from `assert` in that if the - * condition fails, then the caller of the function is to blame rather than a - * logical error having been made within `addNaturals` itself. `ensuring` is a - * form of `assert` that declares the guarantee the function is providing with - * regards to its return value. - * - * === Implicit Conversions === - * A number of commonly applied implicit conversions are also defined here, and - * in the parent type [[scala.LowPriorityImplicits]]. Implicit conversions - * are provided for the "widening" of numeric values, for instance, converting a - * Short value to a Long value as required, and to add additional higher-order - * functions to Array values. These are described in more detail in the documentation of [[scala.Array]]. - * - * @groupname utilities Utility Methods - * @groupprio utilities 10 - * - * @groupname assertions Assertions - * @groupprio assertions 20 - * @groupdesc assertions These methods support program verification and runtime correctness. - * - * @groupname console-output Console Output - * @groupprio console-output 30 - * @groupdesc console-output These methods provide output via the console. - * - * @groupname type-constraints Type Constraints - * @groupprio type-constraints 40 - * @groupdesc type-constraints These entities allows constraints between types to be stipulated. - * - * @groupname aliases Aliases - * @groupprio aliases 50 - * @groupdesc aliases These aliases bring selected immutable types into scope without any imports. - * - * @groupname conversions-string String Conversions - * @groupprio conversions-string 60 - * @groupdesc conversions-string Conversions to and from String and StringOps. - * - * @groupname implicit-classes-any Implicit Classes - * @groupprio implicit-classes-any 70 - * @groupdesc implicit-classes-any These implicit classes add useful extension methods to every type. - * - * @groupname implicit-classes-char CharSequence Conversions - * @groupprio implicit-classes-char 80 - * @groupdesc implicit-classes-char These implicit classes add CharSequence methods to Array[Char] and IndexedSeq[Char] instances. - * - * @groupname conversions-java-to-anyval Java to Scala - * @groupprio conversions-java-to-anyval 90 - * @groupdesc conversions-java-to-anyval Implicit conversion from Java primitive wrapper types to Scala equivalents. - * - * @groupname conversions-anyval-to-java Scala to Java - * @groupprio conversions-anyval-to-java 100 - * @groupdesc conversions-anyval-to-java Implicit conversion from Scala AnyVals to Java primitive wrapper types equivalents. - * - * @groupname conversions-array-to-wrapped-array Array to WrappedArray - * @groupprio conversions-array-to-wrapped-array 110 - * @groupdesc conversions-array-to-wrapped-array Conversions from Arrays to WrappedArrays. - */ -object Predef extends LowPriorityImplicits with DeprecatedPredef { - /** - * Retrieve the runtime representation of a class type. `classOf[T]` is equivalent to - * the class literal `T.class` in Java. - * - * @example {{{ - * val listClass = classOf[List[_]] - * // listClass is java.lang.Class[List[_]] = class scala.collection.immutable.List - * - * val mapIntString = classOf[Map[Int,String]] - * // mapIntString is java.lang.Class[Map[Int,String]] = interface scala.collection.immutable.Map - * }}} - * @group utilities - */ - def classOf[T]: Class[T] = null // This is a stub method. The actual implementation is filled in by the compiler. - - /** The `String` type in Scala has methods that come either from the underlying - * Java String (see the documentation corresponding to your Java version, for - * example [[http://docs.oracle.com/javase/8/docs/api/java/lang/String.html]]) or - * are added implicitly through [[scala.collection.immutable.StringOps]]. - * @group aliases - */ - type String = java.lang.String - /** @group aliases */ - type Class[T] = java.lang.Class[T] - - // miscellaneous ----------------------------------------------------- - scala.`package` // to force scala package object to be seen. - scala.collection.immutable.List // to force Nil, :: to be seen. - - /** @group aliases */ - type Function[-A, +B] = Function1[A, B] - - /** @group aliases */ - type Map[A, +B] = immutable.Map[A, B] - /** @group aliases */ - type Set[A] = immutable.Set[A] - /** @group aliases */ - @inline def Map = immutable.Map - /** @group aliases */ - @inline def Set = immutable.Set - - // Manifest types, companions, and incantations for summoning - @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.") - @deprecated("use `scala.reflect.ClassTag` instead", "2.10.0") - type ClassManifest[T] = scala.reflect.ClassManifest[T] - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") - type OptManifest[T] = scala.reflect.OptManifest[T] - @annotation.implicitNotFound(msg = "No Manifest available for ${T}.") - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") - type Manifest[T] = scala.reflect.Manifest[T] - @deprecated("use `scala.reflect.ClassTag` instead", "2.10.0") - @inline def ClassManifest = scala.reflect.ClassManifest - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") - @inline def Manifest = scala.reflect.Manifest - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") - @inline def NoManifest = scala.reflect.NoManifest - - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("use scala.reflect.classTag[T] and scala.reflect.runtime.universe.typeTag[T] instead", "2.10.0") - def manifest[T](implicit m: Manifest[T]) = m - @deprecated("use scala.reflect.classTag[T] instead", "2.10.0") - def classManifest[T](implicit m: ClassManifest[T]) = m - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") - def optManifest[T](implicit m: OptManifest[T]) = m - - // Minor variations on identity functions - /** @group utilities */ - @inline def identity[A](x: A): A = x // @see `conforms` for the implicit version - /** @group utilities */ - @inline def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world -- TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero` - /** @group utilities */ - @inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements - - // assertions --------------------------------------------------------- - - /** Tests an expression, throwing an `AssertionError` if false. - * Calls to this method will not be generated if `-Xelide-below` - * is greater than `ASSERTION`. - * - * @see [[scala.annotation.elidable elidable]] - * @param assertion the expression to test - * @group assertions - */ - @elidable(ASSERTION) - def assert(assertion: Boolean) { - if (!assertion) - throw new java.lang.AssertionError("assertion failed") - } - - /** Tests an expression, throwing an `AssertionError` if false. - * Calls to this method will not be generated if `-Xelide-below` - * is greater than `ASSERTION`. - * - * @see [[scala.annotation.elidable elidable]] - * @param assertion the expression to test - * @param message a String to include in the failure message - * @group assertions - */ - @elidable(ASSERTION) @inline - final def assert(assertion: Boolean, message: => Any) { - if (!assertion) - throw new java.lang.AssertionError("assertion failed: "+ message) - } - - /** Tests an expression, throwing an `AssertionError` if false. - * This method differs from assert only in the intent expressed: - * assert contains a predicate which needs to be proven, while - * assume contains an axiom for a static checker. Calls to this method - * will not be generated if `-Xelide-below` is greater than `ASSERTION`. - * - * @see [[scala.annotation.elidable elidable]] - * @param assumption the expression to test - * @group assertions - */ - @elidable(ASSERTION) - def assume(assumption: Boolean) { - if (!assumption) - throw new java.lang.AssertionError("assumption failed") - } - - /** Tests an expression, throwing an `AssertionError` if false. - * This method differs from assert only in the intent expressed: - * assert contains a predicate which needs to be proven, while - * assume contains an axiom for a static checker. Calls to this method - * will not be generated if `-Xelide-below` is greater than `ASSERTION`. - * - * @see [[scala.annotation.elidable elidable]] - * @param assumption the expression to test - * @param message a String to include in the failure message - * @group assertions - */ - @elidable(ASSERTION) @inline - final def assume(assumption: Boolean, message: => Any) { - if (!assumption) - throw new java.lang.AssertionError("assumption failed: "+ message) - } - - /** Tests an expression, throwing an `IllegalArgumentException` if false. - * This method is similar to `assert`, but blames the caller of the method - * for violating the condition. - * - * @param requirement the expression to test - * @group assertions - */ - def require(requirement: Boolean) { - if (!requirement) - throw new IllegalArgumentException("requirement failed") - } - - /** Tests an expression, throwing an `IllegalArgumentException` if false. - * This method is similar to `assert`, but blames the caller of the method - * for violating the condition. - * - * @param requirement the expression to test - * @param message a String to include in the failure message - * @group assertions - */ - @inline final def require(requirement: Boolean, message: => Any) { - if (!requirement) - throw new IllegalArgumentException("requirement failed: "+ message) - } - - /** `???` can be used for marking methods that remain to be implemented. - * @throws NotImplementedError - * @group utilities - */ - def ??? : Nothing = throw new NotImplementedError - - // tupling ------------------------------------------------------------ - - @deprecated("use built-in tuple syntax or Tuple2 instead", "2.11.0") - type Pair[+A, +B] = Tuple2[A, B] - @deprecated("use built-in tuple syntax or Tuple2 instead", "2.11.0") - object Pair { - def apply[A, B](x: A, y: B) = Tuple2(x, y) - def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) - } - - @deprecated("use built-in tuple syntax or Tuple3 instead", "2.11.0") - type Triple[+A, +B, +C] = Tuple3[A, B, C] - @deprecated("use built-in tuple syntax or Tuple3 instead", "2.11.0") - object Triple { - def apply[A, B, C](x: A, y: B, z: C) = Tuple3(x, y, z) - def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x) - } - - // implicit classes ----------------------------------------------------- - - /** @group implicit-classes-any */ - implicit final class ArrowAssoc[A](private val self: A) extends AnyVal { - @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(self, y) - def →[B](y: B): Tuple2[A, B] = ->(y) - } - - /** @group implicit-classes-any */ - implicit final class Ensuring[A](private val self: A) extends AnyVal { - def ensuring(cond: Boolean): A = { assert(cond); self } - def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); self } - def ensuring(cond: A => Boolean): A = { assert(cond(self)); self } - def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(self), msg); self } - } - - /** @group implicit-classes-any */ - implicit final class StringFormat[A](private val self: A) extends AnyVal { - /** Returns string formatted according to given `format` string. - * Format strings are as for `String.format` - * (@see java.lang.String.format). - */ - @inline def formatted(fmtstr: String): String = fmtstr format self - } - - // scala/bug#8229 retaining the pre 2.11 name for source compatibility in shadowing this implicit - /** @group implicit-classes-any */ - implicit final class any2stringadd[A](private val self: A) extends AnyVal { - def +(other: String): String = String.valueOf(self) + other - } - - implicit final class RichException(private val self: Throwable) extends AnyVal { - import scala.compat.Platform.EOL - @deprecated("use Throwable#getStackTrace", "2.11.0") def getStackTraceString = self.getStackTrace().mkString("", EOL, EOL) - } - - // Sadly we have to do `@deprecatedName(null, "2.12.0")` because - // `@deprecatedName(since="2.12.0")` incurs a warning about - // Usage of named or default arguments transformed this annotation constructor call into a block. - // The corresponding AnnotationInfo will contain references to local values and default getters - // instead of the actual argument trees - // and `@deprecatedName(Symbol(""), "2.12.0")` crashes scalac with - // scala.reflect.internal.Symbols$CyclicReference: illegal cyclic reference involving object Symbol - // in run/repl-no-imports-no-predef-power.scala. - /** @group implicit-classes-char */ - implicit final class SeqCharSequence(@deprecated("will be made private", "2.12.0") @deprecatedName(null, "2.12.0") val __sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence { - def length: Int = __sequenceOfChars.length - def charAt(index: Int): Char = __sequenceOfChars(index) - def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(__sequenceOfChars.slice(start, end)) - override def toString = __sequenceOfChars mkString "" - } - - /** @group implicit-classes-char */ - implicit final class ArrayCharSequence(@deprecated("will be made private", "2.12.0") @deprecatedName(null, "2.12.0") val __arrayOfChars: Array[Char]) extends CharSequence { - def length: Int = __arrayOfChars.length - def charAt(index: Int): Char = __arrayOfChars(index) - def subSequence(start: Int, end: Int): CharSequence = new runtime.ArrayCharSequence(__arrayOfChars, start, end) - override def toString = __arrayOfChars mkString "" - } - - implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] { - def apply(from: String) = apply() - def apply() = mutable.StringBuilder.newBuilder - } - - /** @group conversions-string */ - @inline implicit def augmentString(x: String): StringOps = new StringOps(x) - /** @group conversions-string */ - @inline implicit def unaugmentString(x: StringOps): String = x.repr - - // printing ----------------------------------------------------------- - - /** Prints an object to `out` using its `toString` method. - * - * @param x the object to print; may be null. - * @group console-output - */ - def print(x: Any) = Console.print(x) - - /** Prints a newline character on the default output. - * @group console-output - */ - def println() = Console.println() - - /** Prints out an object to the default output, followed by a newline character. - * - * @param x the object to print. - * @group console-output - */ - def println(x: Any) = Console.println(x) - - /** Prints its arguments as a formatted string to the default output, - * based on a string pattern (in a fashion similar to printf in C). - * - * The interpretation of the formatting patterns is described in - * [[java.util.Formatter]]. - * - * Consider using the [[scala.StringContext.f f interpolator]] as more type safe and idiomatic. - * - * @param text the pattern for formatting the arguments. - * @param args the arguments used to instantiating the pattern. - * @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments - * - * @see [[scala.StringContext.f StringContext.f]] - * @group console-output - */ - def printf(text: String, xs: Any*) = Console.print(text.format(xs: _*)) - - // views -------------------------------------------------------------- - - implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)) = new runtime.Tuple2Zipped.Ops(x) - implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)) = new runtime.Tuple3Zipped.Ops(x) - - implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match { - case x: Array[AnyRef] => refArrayOps[AnyRef](x) - case x: Array[Boolean] => booleanArrayOps(x) - case x: Array[Byte] => byteArrayOps(x) - case x: Array[Char] => charArrayOps(x) - case x: Array[Double] => doubleArrayOps(x) - case x: Array[Float] => floatArrayOps(x) - case x: Array[Int] => intArrayOps(x) - case x: Array[Long] => longArrayOps(x) - case x: Array[Short] => shortArrayOps(x) - case x: Array[Unit] => unitArrayOps(x) - case null => null - }).asInstanceOf[ArrayOps[T]] - - implicit def booleanArrayOps(xs: Array[Boolean]): ArrayOps.ofBoolean = new ArrayOps.ofBoolean(xs) - implicit def byteArrayOps(xs: Array[Byte]): ArrayOps.ofByte = new ArrayOps.ofByte(xs) - implicit def charArrayOps(xs: Array[Char]): ArrayOps.ofChar = new ArrayOps.ofChar(xs) - implicit def doubleArrayOps(xs: Array[Double]): ArrayOps.ofDouble = new ArrayOps.ofDouble(xs) - implicit def floatArrayOps(xs: Array[Float]): ArrayOps.ofFloat = new ArrayOps.ofFloat(xs) - implicit def intArrayOps(xs: Array[Int]): ArrayOps.ofInt = new ArrayOps.ofInt(xs) - implicit def longArrayOps(xs: Array[Long]): ArrayOps.ofLong = new ArrayOps.ofLong(xs) - implicit def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps.ofRef[T] = new ArrayOps.ofRef[T](xs) - implicit def shortArrayOps(xs: Array[Short]): ArrayOps.ofShort = new ArrayOps.ofShort(xs) - implicit def unitArrayOps(xs: Array[Unit]): ArrayOps.ofUnit = new ArrayOps.ofUnit(xs) - - // "Autoboxing" and "Autounboxing" --------------------------------------------------- - - /** @group conversions-anyval-to-java */ - implicit def byte2Byte(x: Byte): java.lang.Byte = x.asInstanceOf[java.lang.Byte] - /** @group conversions-anyval-to-java */ - implicit def short2Short(x: Short): java.lang.Short = x.asInstanceOf[java.lang.Short] - /** @group conversions-anyval-to-java */ - implicit def char2Character(x: Char): java.lang.Character = x.asInstanceOf[java.lang.Character] - /** @group conversions-anyval-to-java */ - implicit def int2Integer(x: Int): java.lang.Integer = x.asInstanceOf[java.lang.Integer] - /** @group conversions-anyval-to-java */ - implicit def long2Long(x: Long): java.lang.Long = x.asInstanceOf[java.lang.Long] - /** @group conversions-anyval-to-java */ - implicit def float2Float(x: Float): java.lang.Float = x.asInstanceOf[java.lang.Float] - /** @group conversions-anyval-to-java */ - implicit def double2Double(x: Double): java.lang.Double = x.asInstanceOf[java.lang.Double] - /** @group conversions-anyval-to-java */ - implicit def boolean2Boolean(x: Boolean): java.lang.Boolean = x.asInstanceOf[java.lang.Boolean] - - /** @group conversions-java-to-anyval */ - implicit def Byte2byte(x: java.lang.Byte): Byte = x.asInstanceOf[Byte] - /** @group conversions-java-to-anyval */ - implicit def Short2short(x: java.lang.Short): Short = x.asInstanceOf[Short] - /** @group conversions-java-to-anyval */ - implicit def Character2char(x: java.lang.Character): Char = x.asInstanceOf[Char] - /** @group conversions-java-to-anyval */ - implicit def Integer2int(x: java.lang.Integer): Int = x.asInstanceOf[Int] - /** @group conversions-java-to-anyval */ - implicit def Long2long(x: java.lang.Long): Long = x.asInstanceOf[Long] - /** @group conversions-java-to-anyval */ - implicit def Float2float(x: java.lang.Float): Float = x.asInstanceOf[Float] - /** @group conversions-java-to-anyval */ - implicit def Double2double(x: java.lang.Double): Double = x.asInstanceOf[Double] - /** @group conversions-java-to-anyval */ - implicit def Boolean2boolean(x: java.lang.Boolean): Boolean = x.asInstanceOf[Boolean] - - // Type Constraints -------------------------------------------------------------- - - /** - * An instance of `A <:< B` witnesses that `A` is a subtype of `B`. - * Requiring an implicit argument of the type `A <:< B` encodes - * the generalized constraint `A <: B`. - * - * @note we need a new type constructor `<:<` and evidence `conforms`, - * as reusing `Function1` and `identity` leads to ambiguities in - * case of type errors (`any2stringadd` is inferred) - * - * To constrain any abstract type T that's in scope in a method's - * argument list (not just the method's own type parameters) simply - * add an implicit argument of type `T <:< U`, where `U` is the required - * upper bound; or for lower-bounds, use: `L <:< T`, where `L` is the - * required lower bound. - * - * In part contributed by Jason Zaugg. - * @group type-constraints - */ - @implicitNotFound(msg = "Cannot prove that ${From} <:< ${To}.") - sealed abstract class <:<[-From, +To] extends (From => To) with Serializable - private[this] final val singleton_<:< = new <:<[Any,Any] { def apply(x: Any): Any = x } - // The dollar prefix is to dodge accidental shadowing of this method - // by a user-defined method of the same name (scala/bug#7788). - // The collections rely on this method. - /** @group type-constraints */ - implicit def $conforms[A]: A <:< A = singleton_<:<.asInstanceOf[A <:< A] - - @deprecated("use `implicitly[T <:< U]` or `identity` instead.", "2.11.0") - def conforms[A]: A <:< A = $conforms[A] - - /** An instance of `A =:= B` witnesses that the types `A` and `B` are equal. - * - * @see `<:<` for expressing subtyping constraints - * @group type-constraints - */ - @implicitNotFound(msg = "Cannot prove that ${From} =:= ${To}.") - sealed abstract class =:=[From, To] extends (From => To) with Serializable - private[this] final val singleton_=:= = new =:=[Any,Any] { def apply(x: Any): Any = x } - /** @group type-constraints */ - object =:= { - implicit def tpEquals[A]: A =:= A = singleton_=:=.asInstanceOf[A =:= A] - } - - /** A type for which there is always an implicit value. - * @see [[scala.Array$]], method `fallbackCanBuildFrom` - */ - class DummyImplicit - - object DummyImplicit { - - /** An implicit value yielding a `DummyImplicit`. - * @see [[scala.Array$]], method `fallbackCanBuildFrom` - */ - implicit def dummyImplicit: DummyImplicit = new DummyImplicit - } -} - -private[scala] trait DeprecatedPredef { - self: Predef.type => - - // Deprecated stubs for any who may have been calling these methods directly. - @deprecated("use `ArrowAssoc`", "2.11.0") def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x) - @deprecated("use `Ensuring`", "2.11.0") def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x) - @deprecated("use `StringFormat`", "2.11.0") def any2stringfmt(x: Any): StringFormat[Any] = new StringFormat(x) - @deprecated("use `Throwable` directly", "2.11.0") def exceptionWrapper(exc: Throwable) = new RichException(exc) - @deprecated("use `SeqCharSequence`", "2.11.0") def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new SeqCharSequence(xs) - @deprecated("use `ArrayCharSequence`", "2.11.0") def arrayToCharSequence(xs: Array[Char]): CharSequence = new ArrayCharSequence(xs) - - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLine(): String = StdIn.readLine() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLine(text: String, args: Any*) = StdIn.readLine(text, args: _*) - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readBoolean() = StdIn.readBoolean() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readByte() = StdIn.readByte() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readShort() = StdIn.readShort() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readChar() = StdIn.readChar() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readInt() = StdIn.readInt() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLong() = StdIn.readLong() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readFloat() = StdIn.readFloat() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readDouble() = StdIn.readDouble() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf(format: String) = StdIn.readf(format) - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf1(format: String) = StdIn.readf1(format) - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf2(format: String) = StdIn.readf2(format) - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf3(format: String) = StdIn.readf3(format) -} - -/** The `LowPriorityImplicits` class provides implicit values that -* are valid in all Scala compilation units without explicit qualification, -* but that are partially overridden by higher-priority conversions in object -* `Predef`. -* -* @author Martin Odersky -* @since 2.8 -*/ -// scala/bug#7335 Parents of Predef are defined in the same compilation unit to avoid -// cyclic reference errors compiling the standard library *without* a previously -// compiled copy on the classpath. -private[scala] abstract class LowPriorityImplicits { - import mutable.WrappedArray - import immutable.WrappedString - - /** We prefer the java.lang.* boxed types to these wrappers in - * any potential conflicts. Conflicts do exist because the wrappers - * need to implement ScalaNumber in order to have a symmetric equals - * method, but that implies implementing java.lang.Number as well. - * - * Note - these are inlined because they are value classes, but - * the call to xxxWrapper is not eliminated even though it does nothing. - * Even inlined, every call site does a no-op retrieval of Predef's MODULE$ - * because maybe loading Predef has side effects! - */ - @inline implicit def byteWrapper(x: Byte) = new runtime.RichByte(x) - @inline implicit def shortWrapper(x: Short) = new runtime.RichShort(x) - @inline implicit def intWrapper(x: Int) = new runtime.RichInt(x) - @inline implicit def charWrapper(c: Char) = new runtime.RichChar(c) - @inline implicit def longWrapper(x: Long) = new runtime.RichLong(x) - @inline implicit def floatWrapper(x: Float) = new runtime.RichFloat(x) - @inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x) - @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x) - - /** @group conversions-array-to-wrapped-array */ - implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] = - if (xs eq null) null - else WrappedArray.make(xs) - - // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef] - // is as good as another for all T <: AnyRef. Instead of creating 100,000,000 - // unique ones by way of this implicit, let's share one. - /** @group conversions-array-to-wrapped-array */ - implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = { - if (xs eq null) null - else if (xs.length == 0) WrappedArray.empty[T] - else new WrappedArray.ofRef[T](xs) - } - - /** @group conversions-array-to-wrapped-array */ - implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null - - /** @group conversions-string */ - implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null - /** @group conversions-string */ - implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null - - implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] = - new CanBuildFrom[String, T, immutable.IndexedSeq[T]] { - def apply(from: String) = immutable.IndexedSeq.newBuilder[T] - def apply() = immutable.IndexedSeq.newBuilder[T] - } -} diff --git a/scalalib/overrides-2.13/scala/Array.scala b/scalalib/overrides-2.13/scala/Array.scala new file mode 100644 index 0000000000..2add5fde8b --- /dev/null +++ b/scalalib/overrides-2.13/scala/Array.scala @@ -0,0 +1,659 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +//import scala.collection.generic._ +import scala.collection.{Factory, immutable, mutable} +import mutable.ArrayBuilder +import immutable.ArraySeq +import scala.language.implicitConversions +import scala.reflect.ClassTag +import scala.runtime.BoxedUnit +import scala.runtime.ScalaRunTime.{array_apply, array_update} + +/** Utility methods for operating on arrays. + * For example: + * {{{ + * val a = Array(1, 2) + * val b = Array.ofDim[Int](2) + * val c = Array.concat(a, b) + * }}} + * where the array objects `a`, `b` and `c` have respectively the values + * `Array(1, 2)`, `Array(0, 0)` and `Array(1, 2, 0, 0)`. + */ +object Array { + val emptyBooleanArray = new Array[Boolean](0) + val emptyByteArray = new Array[Byte](0) + val emptyCharArray = new Array[Char](0) + val emptyDoubleArray = new Array[Double](0) + val emptyFloatArray = new Array[Float](0) + val emptyIntArray = new Array[Int](0) + val emptyLongArray = new Array[Long](0) + val emptyShortArray = new Array[Short](0) + val emptyObjectArray = new Array[Object](0) + + /** Provides an implicit conversion from the Array object to a collection Factory */ + implicit def toFactory[A : ClassTag](dummy: Array.type): Factory[A, Array[A]] = new ArrayFactory(dummy) + @SerialVersionUID(3L) + private class ArrayFactory[A : ClassTag](dummy: Array.type) extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = Array.from[A](it) + def newBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder[A] + } + + /** + * Returns a new [[scala.collection.mutable.ArrayBuilder]]. + */ + def newBuilder[T](implicit t: ClassTag[T]): ArrayBuilder[T] = ArrayBuilder.make[T](t) + + def from[A : ClassTag](it: IterableOnce[A]): Array[A] = it match { + case it: Iterable[A] => it.toArray[A] + case _ => it.iterator.toArray[A] + } + + private def slowcopy(src : AnyRef, + srcPos : Int, + dest : AnyRef, + destPos : Int, + length : Int): Unit = { + var i = srcPos + var j = destPos + val srcUntil = srcPos + length + while (i < srcUntil) { + array_update(dest, j, array_apply(src, i)) + i += 1 + j += 1 + } + } + + /** Copy one array to another. + * Equivalent to Java's + * `System.arraycopy(src, srcPos, dest, destPos, length)`, + * except that this also works for polymorphic and boxed arrays. + * + * Note that the passed-in `dest` array will be modified by this call. + * + * @param src the source array. + * @param srcPos starting position in the source array. + * @param dest destination array. + * @param destPos starting position in the destination array. + * @param length the number of array elements to be copied. + * + * @see `java.lang.System#arraycopy` + */ + def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int): Unit = { + val srcClass = src.getClass + if (srcClass.isArray && dest.getClass.isAssignableFrom(srcClass)) + java.lang.System.arraycopy(src, srcPos, dest, destPos, length) + else + slowcopy(src, srcPos, dest, destPos, length) + } + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength)`, + * except that this works for primitive and object arrays in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyOf[A](original: Array[A], newLength: Int): Array[A] = (original match { + case x: Array[BoxedUnit] => newUnitArray(newLength).asInstanceOf[Array[A]] + case x: Array[AnyRef] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Int] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Double] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Long] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Float] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Char] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Byte] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Short] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Boolean] => java.util.Arrays.copyOf(x, newLength) + }).asInstanceOf[Array[A]] + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. The new array can have + * a different type than the original one as long as the values are + * assignment-compatible. When copying between primitive and object arrays, + * boxing and unboxing are supported. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength, newType)`, + * except that this works for all combinations of primitive and object arrays + * in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyAs[A](original: Array[_], newLength: Int)(implicit ct: ClassTag[A]): Array[A] = { + val runtimeClass = ct.runtimeClass + if (runtimeClass == Void.TYPE) newUnitArray(newLength).asInstanceOf[Array[A]] + else { + val destClass = runtimeClass.asInstanceOf[Class[A]] + if (destClass.isAssignableFrom(original.getClass.getComponentType)) { + if (destClass.isPrimitive) copyOf[A](original.asInstanceOf[Array[A]], newLength) + else { + val destArrayClass = java.lang.reflect.Array.newInstance(destClass, 0).getClass.asInstanceOf[Class[Array[AnyRef]]] + java.util.Arrays.copyOf(original.asInstanceOf[Array[AnyRef]], newLength, destArrayClass).asInstanceOf[Array[A]] + } + } else { + val dest = new Array[A](newLength) + Array.copy(original, 0, dest, 0, original.length) + dest + } + } + } + + private def newUnitArray(len: Int): Array[Unit] = { + val result = new Array[Unit](len) + java.util.Arrays.fill(result.asInstanceOf[Array[AnyRef]], ()) + result + } + + /** Returns an array of length 0 */ + def empty[T: ClassTag]: Array[T] = new Array[T](0) + + /** Creates an array with given elements. + * + * @param xs the elements to put in the array + * @return an array containing all elements from xs. + */ + // Subject to a compiler optimization in Cleanup. + // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a } + def apply[T: ClassTag](xs: T*): Array[T] = { + val array = new Array[T](xs.length) + val iterator = xs.iterator + var i = 0 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Boolean` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Boolean, xs: Boolean*): Array[Boolean] = { + val array = new Array[Boolean](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Byte` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Byte, xs: Byte*): Array[Byte] = { + val array = new Array[Byte](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Short` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Short, xs: Short*): Array[Short] = { + val array = new Array[Short](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Char` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Char, xs: Char*): Array[Char] = { + val array = new Array[Char](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Int` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Int, xs: Int*): Array[Int] = { + val array = new Array[Int](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Long` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Long, xs: Long*): Array[Long] = { + val array = new Array[Long](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Float` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Float, xs: Float*): Array[Float] = { + val array = new Array[Float](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Double` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Double, xs: Double*): Array[Double] = { + val array = new Array[Double](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Unit` objects */ + def apply(x: Unit, xs: Unit*): Array[Unit] = { + val array = new Array[Unit](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates array with given dimensions */ + def ofDim[T: ClassTag](n1: Int): Array[T] = + new Array[T](n1) + /** Creates a 2-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = { + val arr: Array[Array[T]] = (new Array[Array[T]](n1): Array[Array[T]]) + for (i <- 0 until n1) arr(i) = new Array[T](n2) + arr + // tabulate(n1)(_ => ofDim[T](n2)) + } + /** Creates a 3-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] = + tabulate(n1)(_ => ofDim[T](n2, n3)) + /** Creates a 4-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] = + tabulate(n1)(_ => ofDim[T](n2, n3, n4)) + /** Creates a 5-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(_ => ofDim[T](n2, n3, n4, n5)) + + /** Concatenates all arrays into a single array. + * + * @param xss the given arrays + * @return the array created from concatenating `xss` + */ + def concat[T: ClassTag](xss: Array[T]*): Array[T] = { + val b = newBuilder[T] + b.sizeHint(xss.map(_.length).sum) + for (xs <- xss) b ++= xs + b.result() + } + + /** Returns an array that contains the results of some element computation a number + * of times. + * + * Note that this means that `elem` is computed a total of n times: + * {{{ + * scala> Array.fill(3){ math.random } + * res3: Array[Double] = Array(0.365461167592537, 1.550395944913685E-4, 0.7907242137333306) + * }}} + * + * @param n the number of elements desired + * @param elem the element computation + * @return an Array of size n, where each element contains the result of computing + * `elem`. + */ + def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = { + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = elem + i += 1 + } + array + } + } + + /** Returns a two-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = + tabulate(n1)(_ => fill(n2)(elem)) + + /** Returns a three-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] = + tabulate(n1)(_ => fill(n2, n3)(elem)) + + /** Returns a four-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] = + tabulate(n1)(_ => fill(n2, n3, n4)(elem)) + + /** Returns a five-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) + + /** Returns an array containing values of a given function over a range of integer + * values starting from 0. + * + * @param n The number of elements in the array + * @param f The function computing element values + * @return A traversable consisting of elements `f(0),f(1), ..., f(n - 1)` + */ + def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = { + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = f(i) + i += 1 + } + array + } + } + + /** Returns a two-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Returns a three-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Returns a four-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Returns a five-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Returns an array containing a sequence of increasing integers in a range. + * + * @param start the start value of the array + * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) + * @return the array with values in range `start, start + 1, ..., end - 1` + * up to, but excluding, `end`. + */ + def range(start: Int, end: Int): Array[Int] = range(start, end, 1) + + /** Returns an array containing equally spaced values in some integer interval. + * + * @param start the start value of the array + * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) + * @param step the increment value of the array (may not be zero) + * @return the array with values in `start, start + step, ...` up to, but excluding `end` + */ + def range(start: Int, end: Int, step: Int): Array[Int] = { + if (step == 0) throw new IllegalArgumentException("zero step") + val array = new Array[Int](immutable.Range.count(start, end, step, isInclusive = false)) + + var n = 0 + var i = start + while (if (step < 0) end < i else i < end) { + array(n) = i + i += step + n += 1 + } + array + } + + /** Returns an array containing repeated applications of a function to a start value. + * + * @param start the start value of the array + * @param len the number of elements returned by the array + * @param f the function that is repeatedly applied + * @return the array returning `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = { + if (len > 0) { + val array = new Array[T](len) + var acc = start + var i = 1 + array(0) = acc + + while (i < len) { + acc = f(acc) + array(i) = acc + i += 1 + } + array + } else { + empty[T] + } + } + + def equals(xs: Array[AnyRef], ys: Array[AnyRef]): Boolean = { + if (xs eq ys) + return true + if (xs.length != ys.length) + return false + + val len = xs.length + var i = 0 + while (i < len) { + if (xs(i) != ys(i)) + return false + i += 1 + } + true + } + + /** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`. + * + * @param x the selector value + * @return sequence wrapped in a [[scala.Some]], if `x` is an Array, otherwise `None` + */ + def unapplySeq[T](x: Array[T]): UnapplySeqWrapper[T] = new UnapplySeqWrapper(x) + + final class UnapplySeqWrapper[T](private val a: Array[T]) extends AnyVal { + def isEmpty: Boolean = false + def get: UnapplySeqWrapper[T] = this + def lengthCompare(len: Int): Int = a.lengthCompare(len) + def apply(i: Int): T = a(i) + def drop(n: Int): scala.Seq[T] = ArraySeq.unsafeWrapArray(a.drop(n)) // clones the array, also if n == 0 + def toSeq: scala.Seq[T] = a.toSeq // clones the array + } +} + +/** Arrays are mutable, indexed collections of values. `Array[T]` is Scala's representation + * for Java's `T[]`. + * + * {{{ + * val numbers = Array(1, 2, 3, 4) + * val first = numbers(0) // read the first element + * numbers(3) = 100 // replace the 4th array element with 100 + * val biggerNumbers = numbers.map(_ * 2) // multiply all numbers by two + * }}} + * + * Arrays make use of two common pieces of Scala syntactic sugar, shown on lines 2 and 3 of the above + * example code. + * Line 2 is translated into a call to `apply(Int)`, while line 3 is translated into a call to + * `update(Int, T)`. + * + * Two implicit conversions exist in [[scala.Predef]] that are frequently applied to arrays: a conversion + * to [[scala.collection.ArrayOps]] (shown on line 4 of the example above) and a conversion + * to [[scala.collection.mutable.ArraySeq]] (a subtype of [[scala.collection.Seq]]). + * Both types make available many of the standard operations found in the Scala collections API. + * The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return an `Array`, + * while the conversion to `ArraySeq` is permanent as all operations return a `ArraySeq`. + * + * The conversion to `ArrayOps` takes priority over the conversion to `ArraySeq`. For instance, + * consider the following code: + * + * {{{ + * val arr = Array(1, 2, 3) + * val arrReversed = arr.reverse + * val seqReversed : collection.Seq[Int] = arr.reverse + * }}} + * + * Value `arrReversed` will be of type `Array[Int]`, with an implicit conversion to `ArrayOps` occurring + * to perform the `reverse` operation. The value of `seqReversed`, on the other hand, will be computed + * by converting to `ArraySeq` first and invoking the variant of `reverse` that returns another + * `ArraySeq`. + * + * @see [[http://www.scala-lang.org/files/archive/spec/2.13/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.) + * @see [[http://docs.scala-lang.org/sips/completed/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8. + * @see [[http://docs.scala-lang.org/overviews/collections/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information. + * @hideImplicitConversion scala.Predef.booleanArrayOps + * @hideImplicitConversion scala.Predef.byteArrayOps + * @hideImplicitConversion scala.Predef.charArrayOps + * @hideImplicitConversion scala.Predef.doubleArrayOps + * @hideImplicitConversion scala.Predef.floatArrayOps + * @hideImplicitConversion scala.Predef.intArrayOps + * @hideImplicitConversion scala.Predef.longArrayOps + * @hideImplicitConversion scala.Predef.refArrayOps + * @hideImplicitConversion scala.Predef.shortArrayOps + * @hideImplicitConversion scala.Predef.unitArrayOps + * @hideImplicitConversion scala.LowPriorityImplicits.wrapRefArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapIntArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapDoubleArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapLongArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapFloatArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapCharArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapByteArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapShortArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapBooleanArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapUnitArray + * @hideImplicitConversion scala.LowPriorityImplicits.genericWrapArray + * @define coll array + * @define Coll `Array` + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define collectExample + * @define undefinedorder + */ +final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable { + + /** The length of the array */ + def length: Int = throw new Error() + + /** The element at given index. + * + * Indices start at `0`; `xs.apply(0)` is the first element of array `xs`. + * Note the indexing syntax `xs(i)` is a shorthand for `xs.apply(i)`. + * + * @param i the index + * @return the element at the given index + * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` + */ + def apply(i: Int): T = throw new Error() + + /** Update the element at given index. + * + * Indices start at `0`; `xs.update(i, x)` replaces the i^th^ element in the array. + * Note the syntax `xs(i) = x` is a shorthand for `xs.update(i, x)`. + * + * @param i the index + * @param x the value to be written at index `i` + * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` + */ + def update(i: Int, x: T): Unit = { throw new Error() } + + /** Clone the Array. + * + * @return A clone of the Array. + */ + override def clone(): Array[T] = throw new Error() +} diff --git a/scalalib/overrides-2.13/scala/Symbol.scala b/scalalib/overrides-2.13/scala/Symbol.scala new file mode 100644 index 0000000000..79a7a5e401 --- /dev/null +++ b/scalalib/overrides-2.13/scala/Symbol.scala @@ -0,0 +1,51 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +/** This class provides a simple way to get unique objects for equal strings. + * Since symbols are interned, they can be compared using reference equality. + * Instances of `Symbol` can be created easily with Scala's built-in quote + * mechanism. + * + * For instance, the Scala term `'mysym` will + * invoke the constructor of the `Symbol` class in the following way: + * `Symbol("mysym")`. + */ +final class Symbol private (val name: String) extends Serializable { + /** Converts this symbol to a string. + */ + override def toString(): String = "Symbol(" + name + ")" + + @throws(classOf[java.io.ObjectStreamException]) + private def readResolve(): Any = Symbol.apply(name) + override def hashCode = name.hashCode() + override def equals(other: Any) = this eq other.asInstanceOf[AnyRef] +} + +object Symbol extends UniquenessCache[Symbol] { + override def apply(name: String): Symbol = super.apply(name) + protected def valueFromKey(name: String): Symbol = new Symbol(name) + protected def keyFromValue(sym: Symbol): Option[String] = Some(sym.name) +} + +private[scala] abstract class UniquenessCache[V] { + private val cache = collection.mutable.Map.empty[String, V] + + protected def valueFromKey(k: String): V + protected def keyFromValue(v: V): Option[String] + + def apply(name: String): V = + cache.getOrElseUpdate(name, valueFromKey(name)) + + def unapply(other: V): Option[String] = keyFromValue(other) +} diff --git a/scalalib/overrides-2.13/scala/concurrent/ExecutionContext.scala b/scalalib/overrides-2.13/scala/concurrent/ExecutionContext.scala new file mode 100644 index 0000000000..0fd2bd17af --- /dev/null +++ b/scalalib/overrides-2.13/scala/concurrent/ExecutionContext.scala @@ -0,0 +1,227 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.concurrent + + +import java.util.concurrent.{ ExecutorService, Executor } +import scala.annotation.implicitNotFound + +/** + * An `ExecutionContext` can execute program logic asynchronously, + * typically but not necessarily on a thread pool. + * + * A general purpose `ExecutionContext` must be asynchronous in executing + * any `Runnable` that is passed into its `execute`-method. A special purpose + * `ExecutionContext` may be synchronous but must only be passed to code that + * is explicitly safe to be run using a synchronously executing `ExecutionContext`. + * + * APIs such as `Future.onComplete` require you to provide a callback + * and an implicit `ExecutionContext`. The implicit `ExecutionContext` + * will be used to execute the callback. + * + * While it is possible to simply import + * `scala.concurrent.ExecutionContext.Implicits.global` to obtain an + * implicit `ExecutionContext`, application developers should carefully + * consider where they want to set execution policy; + * ideally, one place per application—or per logically related section of code— + * will make a decision about which `ExecutionContext` to use. + * That is, you will mostly want to avoid hardcoding, especially via an import, + * `scala.concurrent.ExecutionContext.Implicits.global`. + * The recommended approach is to add `(implicit ec: ExecutionContext)` to methods, + * or class constructor parameters, which need an `ExecutionContext`. + * + * Then locally import a specific `ExecutionContext` in one place for the entire + * application or module, passing it implicitly to individual methods. + * Alternatively define a local implicit val with the required `ExecutionContext`. + * + * A custom `ExecutionContext` may be appropriate to execute code + * which blocks on IO or performs long-running computations. + * `ExecutionContext.fromExecutorService` and `ExecutionContext.fromExecutor` + * are good ways to create a custom `ExecutionContext`. + * + * The intent of `ExecutionContext` is to lexically scope code execution. + * That is, each method, class, file, package, or application determines + * how to run its own code. This avoids issues such as running + * application callbacks on a thread pool belonging to a networking library. + * The size of a networking library's thread pool can be safely configured, + * knowing that only that library's network operations will be affected. + * Application callback execution can be configured separately. + */ +@implicitNotFound("""Cannot find an implicit ExecutionContext. You might pass +an (implicit ec: ExecutionContext) parameter to your method. + +The ExecutionContext is used to configure how and on which +thread pools Futures will run, so the specific ExecutionContext +that is selected is important. + +If your application does not define an ExecutionContext elsewhere, +consider using Scala's global ExecutionContext by defining +the following: + +implicit val ec: scala.concurrent.ExecutionContext = scala.concurrent.ExecutionContext.global""") +trait ExecutionContext { + + /** Runs a block of code on this execution context. + * + * @param runnable the task to execute + */ + def execute(runnable: Runnable): Unit + + /** Reports that an asynchronous computation failed. + * + * @param cause the cause of the failure + */ + def reportFailure(@deprecatedName("t") cause: Throwable): Unit + + /** Prepares for the execution of a task. Returns the prepared + * execution context. The recommended implementation of + * `prepare` is to return `this`. + * + * This method should no longer be overridden or called. It was + * originally expected that `prepare` would be called by + * all libraries that consume ExecutionContexts, in order to + * capture thread local context. However, this usage has proven + * difficult to implement in practice and instead it is + * now better to avoid using `prepare` entirely. + * + * Instead, if an `ExecutionContext` needs to capture thread + * local context, it should capture that context when it is + * constructed, so that it doesn't need any additional + * preparation later. + */ + @deprecated("preparation of ExecutionContexts will be removed", "2.12.0") + // This cannot be removed until there is a suitable replacement + def prepare(): ExecutionContext = this +} + +/** + * An [[ExecutionContext]] that is also a + * Java [[http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Executor.html Executor]]. + */ +trait ExecutionContextExecutor extends ExecutionContext with Executor + +/** + * An [[ExecutionContext]] that is also a + * Java [[http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html ExecutorService]]. + */ +trait ExecutionContextExecutorService extends ExecutionContextExecutor with ExecutorService + + +/** Contains factory methods for creating execution contexts. + */ +object ExecutionContext { + /** + * The explicit global `ExecutionContext`. Invoke `global` when you want to provide the global + * `ExecutionContext` explicitly. + * + * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. + * It can be configured via the following [[scala.sys.SystemProperties]]: + * + * `scala.concurrent.context.minThreads` = defaults to "1" + * `scala.concurrent.context.numThreads` = defaults to "x1" (i.e. the current number of available processors * 1) + * `scala.concurrent.context.maxThreads` = defaults to "x1" (i.e. the current number of available processors * 1) + * `scala.concurrent.context.maxExtraThreads` = defaults to "256" + * + * The pool size of threads is then `numThreads` bounded by `minThreads` on the lower end and `maxThreads` on the high end. + * + * The `maxExtraThreads` is the maximum number of extra threads to have at any given time to evade deadlock, + * see [[scala.concurrent.BlockContext]]. + * + * @return the global `ExecutionContext` + */ + final lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor) + + /** + * WARNING: Only ever execute logic which will quickly return control to the caller. + * + * This `ExecutionContext` steals execution time from other threads by having its + * `Runnable`s run on the `Thread` which calls `execute` and then yielding back control + * to the caller after *all* its `Runnable`s have been executed. + * Nested invocations of `execute` will be trampolined to prevent uncontrolled stack space growth. + * + * When using `parasitic` with abstractions such as `Future` it will in many cases be non-deterministic + * as to which `Thread` will be executing the logic, as it depends on when/if that `Future` is completed. + * + * Do *not* call any blocking code in the `Runnable`s submitted to this `ExecutionContext` + * as it will prevent progress by other enqueued `Runnable`s and the calling `Thread`. + * + * Symptoms of misuse of this `ExecutionContext` include, but are not limited to, deadlocks + * and severe performance problems. + * + * Any `NonFatal` or `InterruptedException`s will be reported to the `defaultReporter`. + */ + object parasitic extends ExecutionContextExecutor with BatchingExecutor { + override final def submitForExecution(runnable: Runnable): Unit = runnable.run() + override final def execute(runnable: Runnable): Unit = submitSyncBatched(runnable) + override final def reportFailure(t: Throwable): Unit = defaultReporter(t) + } + + object Implicits { + /** + * The implicit global `ExecutionContext`. Import `global` when you want to provide the global + * `ExecutionContext` implicitly. + * + * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. By default, + * the thread pool uses a target number of worker threads equal to the number of + * [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]]. + */ + implicit final def global: ExecutionContext = scala.scalanative.runtime.ExecutionContext.global + + } + + /** Creates an `ExecutionContext` from the given `ExecutorService`. + * + * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[scala.concurrent.ExecutionContext$.global default configuration]]. + * @param reporter a function for error reporting + * @return the `ExecutionContext` using the given `ExecutorService` + */ + def fromExecutorService(e: ExecutorService, reporter: Throwable => Unit): ExecutionContextExecutorService = + impl.ExecutionContextImpl.fromExecutorService(e, reporter) + + /** Creates an `ExecutionContext` from the given `ExecutorService` with the [[scala.concurrent.ExecutionContext$.defaultReporter default reporter]]. + * + * If it is guaranteed that none of the executed tasks are blocking, a single-threaded `ExecutorService` + * can be used to create an `ExecutionContext` as follows: + * + * {{{ + * import java.util.concurrent.Executors + * val ec = ExecutionContext.fromExecutorService(Executors.newSingleThreadExecutor()) + * }}} + * + * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[scala.concurrent.ExecutionContext$.global default configuration]]. + * @return the `ExecutionContext` using the given `ExecutorService` + */ + def fromExecutorService(e: ExecutorService): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter) + + /** Creates an `ExecutionContext` from the given `Executor`. + * + * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[scala.concurrent.ExecutionContext$.global default configuration]]. + * @param reporter a function for error reporting + * @return the `ExecutionContext` using the given `Executor` + */ + def fromExecutor(e: Executor, reporter: Throwable => Unit): ExecutionContextExecutor = + impl.ExecutionContextImpl.fromExecutor(e, reporter) + + /** Creates an `ExecutionContext` from the given `Executor` with the [[scala.concurrent.ExecutionContext$.defaultReporter default reporter]]. + * + * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[scala.concurrent.ExecutionContext$.global default configuration]]. + * @return the `ExecutionContext` using the given `Executor` + */ + def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter) + + /** The default reporter simply prints the stack trace of the `Throwable` to [[http://docs.oracle.com/javase/8/docs/api/java/lang/System.html#err System.err]]. + * + * @return the function for error reporting + */ + final val defaultReporter: Throwable => Unit = _.printStackTrace() +} diff --git a/scalalib/overrides-2.13/scala/reflect/ClassTag.scala b/scalalib/overrides-2.13/scala/reflect/ClassTag.scala new file mode 100644 index 0000000000..4b69bb0240 --- /dev/null +++ b/scalalib/overrides-2.13/scala/reflect/ClassTag.scala @@ -0,0 +1,139 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package reflect + +import java.lang.{ Class => jClass } + +/** + * + * A `ClassTag[T]` stores the erased class of a given type `T`, accessible via the `runtimeClass` + * field. This is particularly useful for instantiating `Array`s whose element types are unknown + * at compile time. + * + * `ClassTag`s are a weaker special case of [[scala.reflect.api.TypeTags.TypeTag]]s, in that they + * wrap only the runtime class of a given type, whereas a `TypeTag` contains all static type + * information. That is, `ClassTag`s are constructed from knowing only the top-level class of a + * type, without necessarily knowing all of its argument types. This runtime information is enough + * for runtime `Array` creation. + * + * For example: + * {{{ + * scala> def mkArray[T : ClassTag](elems: T*) = Array[T](elems: _*) + * mkArray: [T](elems: T*)(implicit evidence\$1: scala.reflect.ClassTag[T])Array[T] + * + * scala> mkArray(42, 13) + * res0: Array[Int] = Array(42, 13) + * + * scala> mkArray("Japan","Brazil","Germany") + * res1: Array[String] = Array(Japan, Brazil, Germany) + * }}} + * + * See [[scala.reflect.api.TypeTags]] for more examples, or the + * [[http://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]] + * for more details. + * + */ +@scala.annotation.implicitNotFound(msg = "No ClassTag available for ${T}") +trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serializable { + // please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder` + // class tags, and all tags in general, should be as minimalistic as possible + + /** A class representing the type `U` to which `T` would be erased. + * Note that there is no subtyping relationship between `T` and `U`. + */ + def runtimeClass: jClass[_] + + /** Produces a `ClassTag` that knows how to instantiate an `Array[Array[T]]` */ + def wrap: ClassTag[Array[T]] = ClassTag[Array[T]](arrayClass(runtimeClass)) + + /** Produces a new array with element type `T` and length `len` */ + def newArray(len: Int): Array[T] = + java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] + + /** A ClassTag[T] can serve as an extractor that matches only objects of type T. + * + * The compiler tries to turn unchecked type tests in pattern matches into checked ones + * by wrapping a `(_: T)` type pattern as `ct(_: T)`, where `ct` is the `ClassTag[T]` instance. + * Type tests necessary before calling other extractors are treated similarly. + * `SomeExtractor(...)` is turned into `ct(SomeExtractor(...))` if `T` in `SomeExtractor.unapply(x: T)` + * is uncheckable, but we have an instance of `ClassTag[T]`. + */ + def unapply(x: Any): Option[T] = + if (runtimeClass.isInstance(x)) Some(x.asInstanceOf[T]) + else None + + // case class accessories + override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]] + override def equals(x: Any) = x.isInstanceOf[ClassTag[_]] && this.runtimeClass == x.asInstanceOf[ClassTag[_]].runtimeClass + override def hashCode = runtimeClass.## + override def toString = { + def prettyprint(clazz: jClass[_]): String = + if (clazz.isArray) s"Array[${prettyprint(clazz.getComponentType)}]" else + clazz.getName + prettyprint(runtimeClass) + } +} + +/** + * Class tags corresponding to primitive types and constructor/extractor for ClassTags. + */ +object ClassTag { + private[this] val ObjectTYPE = classOf[java.lang.Object] + private[this] val NothingTYPE = classOf[scala.runtime.Nothing$] + private[this] val NullTYPE = classOf[scala.runtime.Null$] + + import ManifestFactory._ + + val Byte : ByteManifest = Manifest.Byte + val Short : ShortManifest = Manifest.Short + val Char : CharManifest = Manifest.Char + val Int : IntManifest = Manifest.Int + val Long : LongManifest = Manifest.Long + val Float : FloatManifest = Manifest.Float + val Double : DoubleManifest = Manifest.Double + val Boolean : BooleanManifest = Manifest.Boolean + val Unit : UnitManifest = Manifest.Unit + val Any : ClassTag[scala.Any] = Manifest.Any + val Object : ClassTag[java.lang.Object] = Manifest.Object + val AnyVal : ClassTag[scala.AnyVal] = Manifest.AnyVal + val AnyRef : ClassTag[scala.AnyRef] = Manifest.AnyRef + val Nothing : ClassTag[scala.Nothing] = Manifest.Nothing + val Null : ClassTag[scala.Null] = Manifest.Null + + @SerialVersionUID(1L) + private class GenericClassTag[T](val runtimeClass: jClass[_]) extends ClassTag[T] { + override def newArray(len: Int): Array[T] = { + java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] + } + } + + def apply[T](runtimeClass1: jClass[_]): ClassTag[T] = + runtimeClass1 match { + case java.lang.Byte.TYPE => ClassTag.Byte.asInstanceOf[ClassTag[T]] + case java.lang.Short.TYPE => ClassTag.Short.asInstanceOf[ClassTag[T]] + case java.lang.Character.TYPE => ClassTag.Char.asInstanceOf[ClassTag[T]] + case java.lang.Integer.TYPE => ClassTag.Int.asInstanceOf[ClassTag[T]] + case java.lang.Long.TYPE => ClassTag.Long.asInstanceOf[ClassTag[T]] + case java.lang.Float.TYPE => ClassTag.Float.asInstanceOf[ClassTag[T]] + case java.lang.Double.TYPE => ClassTag.Double.asInstanceOf[ClassTag[T]] + case java.lang.Boolean.TYPE => ClassTag.Boolean.asInstanceOf[ClassTag[T]] + case java.lang.Void.TYPE => ClassTag.Unit.asInstanceOf[ClassTag[T]] + case ObjectTYPE => ClassTag.Object.asInstanceOf[ClassTag[T]] + case NothingTYPE => ClassTag.Nothing.asInstanceOf[ClassTag[T]] + case NullTYPE => ClassTag.Null.asInstanceOf[ClassTag[T]] + case _ => new GenericClassTag[T](runtimeClass1) + } + + def unapply[T](ctag: ClassTag[T]): Option[Class[_]] = Some(ctag.runtimeClass) +} diff --git a/scalalib/overrides-2.13/scala/reflect/Manifest.scala b/scalalib/overrides-2.13/scala/reflect/Manifest.scala new file mode 100644 index 0000000000..081a4a09ab --- /dev/null +++ b/scalalib/overrides-2.13/scala/reflect/Manifest.scala @@ -0,0 +1,457 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package reflect + +import scala.collection.mutable.{ArrayBuilder, ArraySeq} + +/** A `Manifest[T]` is an opaque descriptor for type T. Its supported use + * is to give access to the erasure of the type as a `Class` instance, as + * is necessary for the creation of native `Arrays` if the class is not + * known at compile time. + * + * The type-relation operators `<:<` and `=:=` should be considered + * approximations only, as there are numerous aspects of type conformance + * which are not yet adequately represented in manifests. + * + * Example usages: + * {{{ + * def arr[T] = new Array[T](0) // does not compile + * def arr[T](implicit m: Manifest[T]) = new Array[T](0) // compiles + * def arr[T: Manifest] = new Array[T](0) // shorthand for the preceding + * + * // Methods manifest and optManifest are in [[scala.Predef]]. + * def isApproxSubType[T: Manifest, U: Manifest] = manifest[T] <:< manifest[U] + * isApproxSubType[List[String], List[AnyRef]] // true + * isApproxSubType[List[String], List[Int]] // false + * + * def methods[T: Manifest] = manifest[T].runtimeClass.getMethods + * def retType[T: Manifest](name: String) = + * methods[T] find (_.getName == name) map (_.getGenericReturnType) + * + * retType[Map[_, _]]("values") // Some(scala.collection.Iterable) + * }}} + */ +@scala.annotation.implicitNotFound(msg = "No Manifest available for ${T}.") +// TODO undeprecated until Scala reflection becomes non-experimental +// @deprecated("use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") +trait Manifest[T] extends ClassManifest[T] with Equals { + override def typeArguments: List[Manifest[_]] = Nil + + override def arrayManifest: Manifest[Array[T]] = + Manifest.classType[Array[T]](arrayClass[T](runtimeClass), this) + + override def canEqual(that: Any): Boolean = that match { + case _: Manifest[_] => true + case _ => false + } + /** Note: testing for erasure here is important, as it is many times + * faster than <:< and rules out most comparisons. + */ + override def equals(that: Any): Boolean = that match { + case m: Manifest[_] => (m canEqual this) && (this.runtimeClass == m.runtimeClass) && (this <:< m) && (m <:< this) + case _ => false + } + override def hashCode = this.runtimeClass.## +} + +/** The object `Manifest` defines factory methods for manifests. + * It is intended for use by the compiler and should not be used in client code. + */ +// TODO undeprecated until Scala reflection becomes non-experimental +// @deprecated("use scala.reflect.ClassTag (to capture erasures), scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") +object Manifest { + /* Forward all the public members of ManifestFactory, since this object used + * to be a `private val Manifest = ManifestFactory` in the package object. It + * was moved here because it needs to be in the same file as `trait Manifest` + * defined above. + */ + + def valueManifests: List[AnyValManifest[_]] = + ManifestFactory.valueManifests + + val Byte: ManifestFactory.ByteManifest = ManifestFactory.Byte + val Short: ManifestFactory.ShortManifest = ManifestFactory.Short + val Char: ManifestFactory.CharManifest = ManifestFactory.Char + val Int: ManifestFactory.IntManifest = ManifestFactory.Int + val Long: ManifestFactory.LongManifest = ManifestFactory.Long + val Float: ManifestFactory.FloatManifest = ManifestFactory.Float + val Double: ManifestFactory.DoubleManifest = ManifestFactory.Double + val Boolean: ManifestFactory.BooleanManifest = ManifestFactory.Boolean + val Unit: ManifestFactory.UnitManifest = ManifestFactory.Unit + + val Any: Manifest[scala.Any] = ManifestFactory.Any + val Object: Manifest[java.lang.Object] = ManifestFactory.Object + val AnyRef: Manifest[scala.AnyRef] = ManifestFactory.AnyRef + val AnyVal: Manifest[scala.AnyVal] = ManifestFactory.AnyVal + val Null: Manifest[scala.Null] = ManifestFactory.Null + val Nothing: Manifest[scala.Nothing] = ManifestFactory.Nothing + + /** Manifest for the singleton type `value.type`. */ + def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = + ManifestFactory.singleType[T](value) + + /** Manifest for the class type `clazz[args]`, where `clazz` is + * a top-level or static class. + * @note This no-prefix, no-arguments case is separate because we + * it's called from ScalaRunTime.boxArray itself. If we + * pass varargs as arrays into this, we get an infinitely recursive call + * to boxArray. (Besides, having a separate case is more efficient) + */ + def classType[T](clazz: Predef.Class[_]): Manifest[T] = + ManifestFactory.classType[T](clazz) + + /** Manifest for the class type `clazz`, where `clazz` is + * a top-level or static class and args are its type arguments. */ + def classType[T](clazz: Predef.Class[T], arg1: Manifest[_], args: Manifest[_]*): Manifest[T] = + ManifestFactory.classType[T](clazz, arg1, args: _*) + + /** Manifest for the class type `clazz[args]`, where `clazz` is + * a class with non-package prefix type `prefix` and type arguments `args`. + */ + def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] = + ManifestFactory.classType[T](prefix, clazz, args: _*) + + def arrayType[T](arg: Manifest[_]): Manifest[Array[T]] = + ManifestFactory.arrayType[T](arg) + + /** Manifest for the abstract type `prefix # name`. `upperBound` is not + * strictly necessary as it could be obtained by reflection. It was + * added so that erasure can be calculated without reflection. */ + def abstractType[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Manifest[_]*): Manifest[T] = + ManifestFactory.abstractType[T](prefix, name, upperBound, args: _*) + + /** Manifest for the unknown type `_ >: L <: U` in an existential. */ + def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] = + ManifestFactory.wildcardType[T](lowerBound, upperBound) + + /** Manifest for the intersection type `parents_0 with ... with parents_n`. */ + def intersectionType[T](parents: Manifest[_]*): Manifest[T] = + ManifestFactory.intersectionType[T](parents: _*) + +} + +// TODO undeprecated until Scala reflection becomes non-experimental +// @deprecated("use type tags and manually check the corresponding class or type instead", "2.10.0") +@SerialVersionUID(1L) +abstract class AnyValManifest[T <: AnyVal](override val toString: String) extends Manifest[T] with Equals { + override def <:<(that: ClassManifest[_]): Boolean = + (that eq this) || (that eq Manifest.Any) || (that eq Manifest.AnyVal) + override def canEqual(other: Any) = other match { + case _: AnyValManifest[_] => true + case _ => false + } + override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] + @transient + override val hashCode = System.identityHashCode(this) +} + +/** `ManifestFactory` defines factory methods for manifests. + * It is intended for use by the compiler and should not be used in client code. + * + * Unlike `Manifest`, this factory isn't annotated with a deprecation warning. + * This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests. + * Why so complicated? Read up the comments for `ClassManifestFactory`. + */ +object ManifestFactory { + def valueManifests: List[AnyValManifest[_]] = + List(Byte, Short, Char, Int, Long, Float, Double, Boolean, Unit) + + @SerialVersionUID(1L) + final private[reflect] class ByteManifest extends AnyValManifest[scala.Byte]("Byte") { + def runtimeClass = java.lang.Byte.TYPE + @inline override def newArray(len: Int): Array[Byte] = new Array[Byte](len) + override def newWrappedArray(len: Int): ArraySeq[Byte] = new ArraySeq.ofByte(new Array[Byte](len)) + override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte() + override def unapply(x: Any): Option[Byte] = { + x match { + case d: Byte => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Byte + } + val Byte: ByteManifest = new ByteManifest + + @SerialVersionUID(1L) + final private[reflect] class ShortManifest extends AnyValManifest[scala.Short]("Short") { + def runtimeClass = java.lang.Short.TYPE + @inline override def newArray(len: Int): Array[Short] = new Array[Short](len) + override def newWrappedArray(len: Int): ArraySeq[Short] = new ArraySeq.ofShort(new Array[Short](len)) + override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort() + override def unapply(x: Any): Option[Short] = { + x match { + case d: Short => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Short + } + val Short: ShortManifest = new ShortManifest + + @SerialVersionUID(1L) + final private[reflect] class CharManifest extends AnyValManifest[scala.Char]("Char") { + def runtimeClass = java.lang.Character.TYPE + @inline override def newArray(len: Int): Array[Char] = new Array[Char](len) + override def newWrappedArray(len: Int): ArraySeq[Char] = new ArraySeq.ofChar(new Array[Char](len)) + override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar() + override def unapply(x: Any): Option[Char] = { + x match { + case d: Char => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Char + } + val Char: CharManifest = new CharManifest + + @SerialVersionUID(1L) + final private[reflect] class IntManifest extends AnyValManifest[scala.Int]("Int") { + def runtimeClass = java.lang.Integer.TYPE + @inline override def newArray(len: Int): Array[Int] = new Array[Int](len) + override def newWrappedArray(len: Int): ArraySeq[Int] = new ArraySeq.ofInt(new Array[Int](len)) + override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt() + override def unapply(x: Any): Option[Int] = { + x match { + case d: Int => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Int + } + val Int: IntManifest = new IntManifest + + @SerialVersionUID(1L) + final private[reflect] class LongManifest extends AnyValManifest[scala.Long]("Long") { + def runtimeClass = java.lang.Long.TYPE + @inline override def newArray(len: Int): Array[Long] = new Array[Long](len) + override def newWrappedArray(len: Int): ArraySeq[Long] = new ArraySeq.ofLong(new Array[Long](len)) + override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong() + override def unapply(x: Any): Option[Long] = { + x match { + case d: Long => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Long + } + val Long: LongManifest = new LongManifest + + @SerialVersionUID(1L) + final private[reflect] class FloatManifest extends AnyValManifest[scala.Float]("Float") { + def runtimeClass = java.lang.Float.TYPE + @inline override def newArray(len: Int): Array[Float] = new Array[Float](len) + override def newWrappedArray(len: Int): ArraySeq[Float] = new ArraySeq.ofFloat(new Array[Float](len)) + override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat() + override def unapply(x: Any): Option[Float] = { + x match { + case d: Float => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Float + } + val Float: FloatManifest = new FloatManifest + + @SerialVersionUID(1L) + final private[reflect] class DoubleManifest extends AnyValManifest[scala.Double]("Double") { + def runtimeClass = java.lang.Double.TYPE + @inline override def newArray(len: Int): Array[Double] = new Array[Double](len) + override def newWrappedArray(len: Int): ArraySeq[Double] = new ArraySeq.ofDouble(new Array[Double](len)) + override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble() + + override def unapply(x: Any): Option[Double] = { + x match { + case d: Double => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Double + } + val Double: DoubleManifest = new DoubleManifest + + @SerialVersionUID(1L) + final private[reflect] class BooleanManifest extends AnyValManifest[scala.Boolean]("Boolean") { + def runtimeClass = java.lang.Boolean.TYPE + @inline override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len) + override def newWrappedArray(len: Int): ArraySeq[Boolean] = new ArraySeq.ofBoolean(new Array[Boolean](len)) + override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean() + override def unapply(x: Any): Option[Boolean] = { + x match { + case d: Boolean => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Boolean + } + val Boolean: BooleanManifest = new BooleanManifest + + @SerialVersionUID(1L) + final private[reflect] class UnitManifest extends AnyValManifest[scala.Unit]("Unit") { + def runtimeClass = java.lang.Void.TYPE + @inline override def newArray(len: Int): Array[Unit] = new Array[Unit](len) + override def newWrappedArray(len: Int): ArraySeq[Unit] = new ArraySeq.ofUnit(new Array[Unit](len)) + override def newArrayBuilder(): ArrayBuilder[Unit] = new ArrayBuilder.ofUnit() + override protected def arrayClass[T](tp: Class[_]): Class[Array[T]] = + if (tp eq runtimeClass) classOf[Array[scala.runtime.BoxedUnit]].asInstanceOf[Class[Array[T]]] + else super.arrayClass(tp) + override def unapply(x: Any): Option[Unit] = { + x match { + case d: Unit => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Unit + } + val Unit: UnitManifest = new UnitManifest + + private[this] val ObjectTYPE = classOf[java.lang.Object] + private[this] val NothingTYPE = classOf[scala.runtime.Nothing$] + private[this] val NullTYPE = classOf[scala.runtime.Null$] + + @SerialVersionUID(1L) + final private class AnyManifest extends PhantomManifest[scala.Any](ObjectTYPE, "Any") { + override def newArray(len: Int) = new Array[scala.Any](len) + override def <:<(that: ClassManifest[_]): Boolean = (that eq this) + private def readResolve(): Any = Manifest.Any + } + val Any: Manifest[scala.Any] = new AnyManifest + + @SerialVersionUID(1L) + final private class ObjectManifest extends PhantomManifest[java.lang.Object](ObjectTYPE, "Object") { + override def newArray(len: Int) = new Array[java.lang.Object](len) + override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) + private def readResolve(): Any = Manifest.Object + } + val Object: Manifest[java.lang.Object] = new ObjectManifest + + val AnyRef: Manifest[scala.AnyRef] = Object.asInstanceOf[Manifest[scala.AnyRef]] + + @SerialVersionUID(1L) + final private class AnyValPhantomManifest extends PhantomManifest[scala.AnyVal](ObjectTYPE, "AnyVal") { + override def newArray(len: Int) = new Array[scala.AnyVal](len) + override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) + private def readResolve(): Any = Manifest.AnyVal + } + val AnyVal: Manifest[scala.AnyVal] = new AnyValPhantomManifest + + @SerialVersionUID(1L) + final private class NullManifest extends PhantomManifest[scala.Null](NullTYPE, "Null") { + override def newArray(len: Int) = new Array[scala.Null](len) + override def <:<(that: ClassManifest[_]): Boolean = + (that ne null) && (that ne Nothing) && !(that <:< AnyVal) + private def readResolve(): Any = Manifest.Null + } + val Null: Manifest[scala.Null] = new NullManifest + + @SerialVersionUID(1L) + final private class NothingManifest extends PhantomManifest[scala.Nothing](NothingTYPE, "Nothing") { + override def newArray(len: Int) = new Array[scala.Nothing](len) + override def <:<(that: ClassManifest[_]): Boolean = (that ne null) + private def readResolve(): Any = Manifest.Nothing + } + val Nothing: Manifest[scala.Nothing] = new NothingManifest + + @SerialVersionUID(1L) + final private class SingletonTypeManifest[T <: AnyRef](value: AnyRef) extends Manifest[T] { + lazy val runtimeClass = value.getClass + override lazy val toString = value.toString + ".type" + } + + /** Manifest for the singleton type `value.type`. */ + def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = + new SingletonTypeManifest[T](value) + + /** Manifest for the class type `clazz[args]`, where `clazz` is + * a top-level or static class. + * @note This no-prefix, no-arguments case is separate because we + * it's called from ScalaRunTime.boxArray itself. If we + * pass varargs as arrays into this, we get an infinitely recursive call + * to boxArray. (Besides, having a separate case is more efficient) + */ + def classType[T](clazz: Predef.Class[_]): Manifest[T] = + new ClassTypeManifest[T](None, clazz, Nil) + + /** Manifest for the class type `clazz`, where `clazz` is + * a top-level or static class and args are its type arguments. */ + def classType[T](clazz: Predef.Class[T], arg1: Manifest[_], args: Manifest[_]*): Manifest[T] = + new ClassTypeManifest[T](None, clazz, arg1 :: args.toList) + + /** Manifest for the class type `clazz[args]`, where `clazz` is + * a class with non-package prefix type `prefix` and type arguments `args`. + */ + def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] = + new ClassTypeManifest[T](Some(prefix), clazz, args.toList) + + @SerialVersionUID(1L) + private abstract class PhantomManifest[T](_runtimeClass: Predef.Class[_], + override val toString: String) extends ClassTypeManifest[T](None, _runtimeClass, Nil) { + override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] + @transient + override val hashCode = System.identityHashCode(this) + } + + /** Manifest for the class type `clazz[args]`, where `clazz` is + * a top-level or static class. */ + @SerialVersionUID(1L) + private class ClassTypeManifest[T](prefix: Option[Manifest[_]], + val runtimeClass: Predef.Class[_], + override val typeArguments: List[Manifest[_]]) extends Manifest[T] { + override def toString = + (if (prefix.isEmpty) "" else prefix.get.toString+"#") + + (if (runtimeClass.isArray) "Array" else runtimeClass.getName) + + argString + } + + def arrayType[T](arg: Manifest[_]): Manifest[Array[T]] = + arg.asInstanceOf[Manifest[T]].arrayManifest + + @SerialVersionUID(1L) + private class AbstractTypeManifest[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: scala.collection.Seq[Manifest[_]]) extends Manifest[T] { + def runtimeClass = upperBound + override val typeArguments = args.toList + override def toString = prefix.toString+"#"+name+argString + } + + /** Manifest for the abstract type `prefix # name`. `upperBound` is not + * strictly necessary as it could be obtained by reflection. It was + * added so that erasure can be calculated without reflection. */ + def abstractType[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Manifest[_]*): Manifest[T] = + new AbstractTypeManifest[T](prefix, name, upperBound, args) + + @SerialVersionUID(1L) + private class WildcardManifest[T](lowerBound: Manifest[_], upperBound: Manifest[_]) extends Manifest[T] { + def runtimeClass = upperBound.runtimeClass + override def toString = + "_" + + (if (lowerBound eq Nothing) "" else " >: "+lowerBound) + + (if (upperBound eq Nothing) "" else " <: "+upperBound) + } + + /** Manifest for the unknown type `_ >: L <: U` in an existential. + */ + def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] = + new WildcardManifest[T](lowerBound, upperBound) + + @SerialVersionUID(1L) + private class IntersectionTypeManifest[T](parents: Array[Manifest[_]]) extends Manifest[T] { + // We use an `Array` instead of a `Seq` for `parents` to avoid cyclic dependencies during deserialization + // which can cause serialization proxies to leak and cause a ClassCastException. + def runtimeClass = parents(0).runtimeClass + override def toString = parents.mkString(" with ") + } + + /** Manifest for the intersection type `parents_0 with ... with parents_n`. */ + def intersectionType[T](parents: Manifest[_]*): Manifest[T] = + new IntersectionTypeManifest[T](parents.toArray) +} diff --git a/scalalib/overrides-2.13/scala/runtime/ScalaRunTime.scala b/scalalib/overrides-2.13/scala/runtime/ScalaRunTime.scala new file mode 100644 index 0000000000..578ae7e290 --- /dev/null +++ b/scalalib/overrides-2.13/scala/runtime/ScalaRunTime.scala @@ -0,0 +1,298 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package runtime + +import scala.collection.{AbstractIterator, AnyConstr, SortedOps, StrictOptimizedIterableOps, StringOps, StringView, View} +import scala.collection.generic.IsIterable +import scala.collection.immutable.{ArraySeq, NumericRange} +import scala.collection.mutable.StringBuilder +import scala.math.min +import scala.reflect.{ClassTag, classTag} +import java.lang.{Class => jClass} +import java.lang.reflect.{Method => JMethod} + +/** The object ScalaRunTime provides support methods required by + * the scala runtime. All these methods should be considered + * outside the API and subject to change or removal without notice. + */ +object ScalaRunTime { + def isArray(x: Any, atLevel: Int = 1): Boolean = + x != null && isArrayClass(x.getClass, atLevel) + + private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean = + clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1)) + + // A helper method to make my life in the pattern matcher a lot easier. + def drop[Repr](coll: Repr, num: Int)(implicit iterable: IsIterable[Repr] { type C <: Repr }): Repr = + iterable(coll) drop num + + /** Return the class object representing an array with element class `clazz`. + */ + def arrayClass(clazz: jClass[_]): jClass[_] = { + // newInstance throws an exception if the erasure is Void.TYPE. see scala/bug#5680 + if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]] + else java.lang.reflect.Array.newInstance(clazz, 0).getClass + } + + /** Return the class object representing an unboxed value type, + * e.g., classOf[int], not classOf[java.lang.Integer]. The compiler + * rewrites expressions like 5.getClass to come here. + */ + def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] = + classTag[T].runtimeClass.asInstanceOf[jClass[T]] + + /** Retrieve generic array element */ + def array_apply(xs: AnyRef, idx: Int): Any = { + xs match { + case x: Array[AnyRef] => x(idx).asInstanceOf[Any] + case x: Array[Int] => x(idx).asInstanceOf[Any] + case x: Array[Double] => x(idx).asInstanceOf[Any] + case x: Array[Long] => x(idx).asInstanceOf[Any] + case x: Array[Float] => x(idx).asInstanceOf[Any] + case x: Array[Char] => x(idx).asInstanceOf[Any] + case x: Array[Byte] => x(idx).asInstanceOf[Any] + case x: Array[Short] => x(idx).asInstanceOf[Any] + case x: Array[Boolean] => x(idx).asInstanceOf[Any] + case x: Array[Unit] => x(idx).asInstanceOf[Any] + case null => throw new NullPointerException + } + } + + /** update generic array element */ + def array_update(xs: AnyRef, idx: Int, value: Any): Unit = { + xs match { + case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef] + case x: Array[Int] => x(idx) = value.asInstanceOf[Int] + case x: Array[Double] => x(idx) = value.asInstanceOf[Double] + case x: Array[Long] => x(idx) = value.asInstanceOf[Long] + case x: Array[Float] => x(idx) = value.asInstanceOf[Float] + case x: Array[Char] => x(idx) = value.asInstanceOf[Char] + case x: Array[Byte] => x(idx) = value.asInstanceOf[Byte] + case x: Array[Short] => x(idx) = value.asInstanceOf[Short] + case x: Array[Boolean] => x(idx) = value.asInstanceOf[Boolean] + case x: Array[Unit] => x(idx) = value.asInstanceOf[Unit] + case null => throw new NullPointerException + } + } + + /** Get generic array length */ + @inline def array_length(xs: AnyRef): Int = java.lang.reflect.Array.getLength(xs) + + // TODO: bytecode Object.clone() will in fact work here and avoids + // the type switch. See Array_clone comment in BCodeBodyBuilder. + def array_clone(xs: AnyRef): AnyRef = xs match { + case x: Array[AnyRef] => x.clone() + case x: Array[Int] => x.clone() + case x: Array[Double] => x.clone() + case x: Array[Long] => x.clone() + case x: Array[Float] => x.clone() + case x: Array[Char] => x.clone() + case x: Array[Byte] => x.clone() + case x: Array[Short] => x.clone() + case x: Array[Boolean] => x.clone() + case null => throw new NullPointerException + } + + /** Convert an array to an object array. + * Needed to deal with vararg arguments of primitive types that are passed + * to a generic Java vararg parameter T ... + */ + def toObjectArray(src: AnyRef): Array[Object] = { + def copy[@specialized T <: AnyVal](src: Array[T]): Array[Object] = { + val length = src.length + if (length == 0) Array.emptyObjectArray + else { + val dest = new Array[Object](length) + var i = 0 + while (i < length) { + dest(i) = src(i).asInstanceOf[AnyRef] + i += 1 + } + dest + } + } + src match { + case x: Array[AnyRef] => x + case x: Array[Int] => copy(x) + case x: Array[Double] => copy(x) + case x: Array[Long] => copy(x) + case x: Array[Float] => copy(x) + case x: Array[Char] => copy(x) + case x: Array[Byte] => copy(x) + case x: Array[Short] => copy(x) + case x: Array[Boolean] => copy(x) + case x: Array[Unit] => copy(x) + case null => throw new NullPointerException + } + } + + def toArray[T](xs: scala.collection.Seq[T]) = { + if (xs.isEmpty) Array.emptyObjectArray + else { + val arr = new Array[AnyRef](xs.length) + val it = xs.iterator + var i = 0 + while (it.hasNext) { + arr(i) = it.next().asInstanceOf[AnyRef] + i += 1 + } + arr + } + } + + // Java bug: https://bugs.java.com/view_bug.do?bug_id=4071957 + // More background at ticket #2318. + def ensureAccessible(m: JMethod): JMethod = scala.reflect.ensureAccessible(m) + + def _toString(x: Product): String = + x.productIterator.mkString(x.productPrefix + "(", ",", ")") + + def _hashCode(x: Product): Int = scala.util.hashing.MurmurHash3.productHash(x) + + /** A helper for case classes. */ + def typedProductIterator[T](x: Product): Iterator[T] = { + new AbstractIterator[T] { + private[this] var c: Int = 0 + private[this] val cmax = x.productArity + def hasNext = c < cmax + def next() = { + val result = x.productElement(c) + c += 1 + result.asInstanceOf[T] + } + } + } + + /** Given any Scala value, convert it to a String. + * + * The primary motivation for this method is to provide a means for + * correctly obtaining a String representation of a value, while + * avoiding the pitfalls of naively calling toString on said value. + * In particular, it addresses the fact that (a) toString cannot be + * called on null and (b) depending on the apparent type of an + * array, toString may or may not print it in a human-readable form. + * + * @param arg the value to stringify + * @return a string representation of arg. + */ + def stringOf(arg: Any): String = stringOf(arg, scala.Int.MaxValue) + def stringOf(arg: Any, maxElements: Int): String = { + def packageOf(x: AnyRef) = x.getClass.getPackage match { + case null => "" + case p => p.getName + } + def isScalaClass(x: AnyRef) = packageOf(x) startsWith "scala." + def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc." + + // includes specialized subclasses and future proofed against hypothetical TupleN (for N > 22) + def isTuple(x: Any) = x != null && x.getClass.getName.startsWith("scala.Tuple") + + // We use reflection because the scala.xml package might not be available + def isSubClassOf(potentialSubClass: Class[_], ofClass: String) = + try { + val classLoader = potentialSubClass.getClassLoader + val clazz = Class.forName(ofClass, /*initialize =*/ false, classLoader) + clazz.isAssignableFrom(potentialSubClass) + } catch { + case cnfe: ClassNotFoundException => false + } + def isXmlNode(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.Node") + def isXmlMetaData(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.MetaData") + + // When doing our own iteration is dangerous + def useOwnToString(x: Any) = x match { + // Range/NumericRange have a custom toString to avoid walking a gazillion elements + case _: Range | _: NumericRange[_] => true + // Sorted collections to the wrong thing (for us) on iteration - ticket #3493 + case _: SortedOps[_, _] => true + // StringBuilder(a, b, c) and similar not so attractive + case _: StringView | _: StringOps | _: StringBuilder => true + // Don't want to evaluate any elements in a view + case _: View[_] => true + // Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData] + // -> catch those by isXmlNode and isXmlMetaData. + // Don't want to a) traverse infinity or b) be overly helpful with peoples' custom + // collections which may have useful toString methods - ticket #3710 + // or c) print AbstractFiles which are somehow also Iterable[AbstractFile]s. + case x: Iterable[_] => (!x.isInstanceOf[StrictOptimizedIterableOps[_, AnyConstr, _]]) || !isScalaClass(x) || isScalaCompilerClass(x) || isXmlNode(x.getClass) || isXmlMetaData(x.getClass) + // Otherwise, nothing could possibly go wrong + case _ => false + } + + // A variation on inner for maps so they print -> instead of bare tuples + def mapInner(arg: Any): String = arg match { + case (k, v) => inner(k) + " -> " + inner(v) + case _ => inner(arg) + } + + // Special casing Unit arrays, the value class which uses a reference array type. + def arrayToString(x: AnyRef) = { + if (x.getClass.getComponentType == classOf[BoxedUnit]) + (0 until min(array_length(x), maxElements)).map(_ => "()").mkString("Array(", ", ", ")") + else + x.asInstanceOf[Array[_]].iterator.take(maxElements).map(inner).mkString("Array(", ", ", ")") + } + + // The recursively applied attempt to prettify Array printing. + // Note that iterator is used if possible and foreach is used as a + // last resort, because the parallel collections "foreach" in a + // random order even on sequences. + def inner(arg: Any): String = arg match { + case null => "null" + case "" => "\"\"" + case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\"" + x + "\"" else x + case x if useOwnToString(x) => x.toString + case x: AnyRef if isArray(x) => arrayToString(x) + case x: scala.collection.Map[_, _] => x.iterator.take(maxElements).map(mapInner).mkString(x.collectionClassName + "(", ", ", ")") + case x: Iterable[_] => x.iterator.take(maxElements).map(inner).mkString(x.collectionClassName + "(", ", ", ")") + case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma + case x: Product if isTuple(x) => x.productIterator.map(inner).mkString("(", ",", ")") + case x => x.toString + } + + // The try/catch is defense against iterables which aren't actually designed + // to be iterated, such as some scala.tools.nsc.io.AbstractFile derived classes. + try inner(arg) + catch { + case _: UnsupportedOperationException | _: AssertionError => "" + arg + } + } + + /** stringOf formatted for use in a repl result. */ + def replStringOf(arg: Any, maxElements: Int): String = + stringOf(arg, maxElements) match { + case null => "null toString" + case s if s.indexOf('\n') >= 0 => "\n" + s + "\n" + case s => s + "\n" + } + + // Convert arrays to immutable.ArraySeq for use with Java varargs: + def genericWrapArray[T](xs: Array[T]): ArraySeq[T] = + if (xs eq null) null + else ArraySeq.unsafeWrapArray(xs) + def wrapRefArray[T <: AnyRef](xs: Array[T]): ArraySeq[T] = { + if (xs eq null) null + else if (xs.length == 0) ArraySeq.empty[AnyRef].asInstanceOf[ArraySeq[T]] + else new ArraySeq.ofRef[T](xs) + } + def wrapIntArray(xs: Array[Int]): ArraySeq[Int] = if (xs ne null) new ArraySeq.ofInt(xs) else null + def wrapDoubleArray(xs: Array[Double]): ArraySeq[Double] = if (xs ne null) new ArraySeq.ofDouble(xs) else null + def wrapLongArray(xs: Array[Long]): ArraySeq[Long] = if (xs ne null) new ArraySeq.ofLong(xs) else null + def wrapFloatArray(xs: Array[Float]): ArraySeq[Float] = if (xs ne null) new ArraySeq.ofFloat(xs) else null + def wrapCharArray(xs: Array[Char]): ArraySeq[Char] = if (xs ne null) new ArraySeq.ofChar(xs) else null + def wrapByteArray(xs: Array[Byte]): ArraySeq[Byte] = if (xs ne null) new ArraySeq.ofByte(xs) else null + def wrapShortArray(xs: Array[Short]): ArraySeq[Short] = if (xs ne null) new ArraySeq.ofShort(xs) else null + def wrapBooleanArray(xs: Array[Boolean]): ArraySeq[Boolean] = if (xs ne null) new ArraySeq.ofBoolean(xs) else null + def wrapUnitArray(xs: Array[Unit]): ArraySeq[Unit] = if (xs ne null) new ArraySeq.ofUnit(xs) else null +} diff --git a/scalalib/overrides-2.13/scala/runtime/Statics.scala b/scalalib/overrides-2.13/scala/runtime/Statics.scala new file mode 100644 index 0000000000..61286337de --- /dev/null +++ b/scalalib/overrides-2.13/scala/runtime/Statics.scala @@ -0,0 +1,97 @@ +package scala.runtime + +// Ported from Scala.js + +/** Not for public consumption. Usage by the runtime only. + */ + +object Statics { + def mix(hash: Int, data: Int): Int = { + var h = mixLast(hash, data) + h = Integer.rotateLeft(h, 13) + (h * 5) + 0xe6546b64 + } + + def mixLast(hash: Int, data: Int): Int = { + var k = data + k *= 0xcc9e2d51 + k = Integer.rotateLeft(k, 15) + k *= 0x1b873593 + hash ^ k + } + + def finalizeHash(hash: Int, length: Int): Int = { + avalanche(hash ^ length) + } + + /** Force all bits of the hash to avalanche. Used for finalizing the hash. */ + def avalanche(h0: Int): Int = { + var h = h0 + h ^= h >>> 16 + h *= 0x85ebca6b + h ^= h >>> 13 + h *= 0xc2b2ae35 + h ^= h >>> 16 + h + } + + def longHash(lv: Long): Int = { + val lo = lv.toInt + val hi = (lv >>> 32).toInt + if (hi == (lo >> 31)) lo // it is in the Int range + else lo ^ hi + } + + def doubleHash(dv: Double): Int = { + /* This implementation is based on what 2.12.0-M5+ does on the JVM. + * The 2.11 implementation on the JVM was not consistent with that of + * BoxesRunTime, and most importantly was not consistent with the hash of + * Long values. + * + * In Scala.js, we always use the version consistent with BoxesRunTime. + * Note that, for values that happen to be valid floats but not valid + * longs, this implementation is *not* consistent with the JVM (just like + * that of BoxesRunTime). + */ + val iv = dv.toInt + if (iv == dv) { + iv + } else { + // don't test the case dv.toFloat == dv + val lv = dv.toLong + if (lv == dv) + lv.hashCode() + else + dv.hashCode() + } + } + + def floatHash(fv: Float): Int = { + doubleHash(fv.toDouble) + } + + def anyHash(x: Any): Int = { + x match { + case null => 0 + case x: Double => doubleHash(x) + case x: Long => longHash(x) + case _ => x.hashCode() + } + } + + /** Used as a marker object to return from PartialFunctions */ + def pfMarker: AnyRef = PFMarker + + private object PFMarker extends AnyRef + + def releaseFence(): Unit = () + + /** Just throws an exception. + * + * Used by the synthetic `productElement` and `productElementName` methods + * in case classes. Delegating the exception-throwing to this function + * reduces the bytecode size of the case class. + */ + final def ioobe[T](n: Int): T = + throw new IndexOutOfBoundsException(String.valueOf(n)) +} diff --git a/scalalib/overrides/scala/package.scala b/scalalib/overrides/scala/package.scala deleted file mode 100644 index 35f0a87ede..0000000000 --- a/scalalib/overrides/scala/package.scala +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/** - * Core Scala types. They are always available without an explicit import. - * @contentDiagram hideNodes "scala.Serializable" - */ -package object scala { - type Throwable = java.lang.Throwable - type Exception = java.lang.Exception - type Error = java.lang.Error - - type RuntimeException = java.lang.RuntimeException - type NullPointerException = java.lang.NullPointerException - type ClassCastException = java.lang.ClassCastException - type IndexOutOfBoundsException = java.lang.IndexOutOfBoundsException - type ArrayIndexOutOfBoundsException = java.lang.ArrayIndexOutOfBoundsException - type StringIndexOutOfBoundsException = java.lang.StringIndexOutOfBoundsException - type UnsupportedOperationException = java.lang.UnsupportedOperationException - type IllegalArgumentException = java.lang.IllegalArgumentException - type NoSuchElementException = java.util.NoSuchElementException - type NumberFormatException = java.lang.NumberFormatException - type AbstractMethodError = java.lang.AbstractMethodError - type InterruptedException = java.lang.InterruptedException - - // A dummy used by the specialization annotation. - lazy val AnyRef = new Specializable { - override def toString = "object AnyRef" - } - - type TraversableOnce[+A] = scala.collection.TraversableOnce[A] - - type Traversable[+A] = scala.collection.Traversable[A] - lazy val Traversable = scala.collection.Traversable - - type Iterable[+A] = scala.collection.Iterable[A] - lazy val Iterable = scala.collection.Iterable - - type Seq[+A] = scala.collection.Seq[A] - lazy val Seq = scala.collection.Seq - - type IndexedSeq[+A] = scala.collection.IndexedSeq[A] - lazy val IndexedSeq = scala.collection.IndexedSeq - - type Iterator[+A] = scala.collection.Iterator[A] - lazy val Iterator = scala.collection.Iterator - - type BufferedIterator[+A] = scala.collection.BufferedIterator[A] - - type List[+A] = scala.collection.immutable.List[A] - lazy val List = scala.collection.immutable.List - - lazy val Nil = scala.collection.immutable.Nil - - type ::[A] = scala.collection.immutable.::[A] - lazy val :: = scala.collection.immutable.:: - - lazy val +: = scala.collection.+: - lazy val :+ = scala.collection.:+ - - type Stream[+A] = scala.collection.immutable.Stream[A] - lazy val Stream = scala.collection.immutable.Stream - lazy val #:: = scala.collection.immutable.Stream.#:: - - type Vector[+A] = scala.collection.immutable.Vector[A] - lazy val Vector = scala.collection.immutable.Vector - - type StringBuilder = scala.collection.mutable.StringBuilder - lazy val StringBuilder = scala.collection.mutable.StringBuilder - - type Range = scala.collection.immutable.Range - lazy val Range = scala.collection.immutable.Range - - // Numeric types which were moved into scala.math.* - - type BigDecimal = scala.math.BigDecimal - lazy val BigDecimal = scala.math.BigDecimal - - type BigInt = scala.math.BigInt - lazy val BigInt = scala.math.BigInt - - type Equiv[T] = scala.math.Equiv[T] - lazy val Equiv = scala.math.Equiv - - type Fractional[T] = scala.math.Fractional[T] - lazy val Fractional = scala.math.Fractional - - type Integral[T] = scala.math.Integral[T] - lazy val Integral = scala.math.Integral - - type Numeric[T] = scala.math.Numeric[T] - lazy val Numeric = scala.math.Numeric - - type Ordered[T] = scala.math.Ordered[T] - lazy val Ordered = scala.math.Ordered - - type Ordering[T] = scala.math.Ordering[T] - lazy val Ordering = scala.math.Ordering - - type PartialOrdering[T] = scala.math.PartialOrdering[T] - type PartiallyOrdered[T] = scala.math.PartiallyOrdered[T] - - type Either[+A, +B] = scala.util.Either[A, B] - lazy val Either = scala.util.Either - - type Left[+A, +B] = scala.util.Left[A, B] - lazy val Left = scala.util.Left - - type Right[+A, +B] = scala.util.Right[A, B] - lazy val Right = scala.util.Right - - // Annotations which we might move to annotation.* -/* - type SerialVersionUID = annotation.SerialVersionUID - type deprecated = annotation.deprecated - type deprecatedName = annotation.deprecatedName - type inline = annotation.inline - type native = annotation.native - type noinline = annotation.noinline - type remote = annotation.remote - type specialized = annotation.specialized - type transient = annotation.transient - type throws = annotation.throws - type unchecked = annotation.unchecked.unchecked - type volatile = annotation.volatile - */ -} From f1f53303bbd2de39899def6df401021ea0417a7c Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Sat, 3 Oct 2020 16:26:29 +0200 Subject: [PATCH 18/75] Introduce Float/Double Ordering implicit overrides in java.util.Arrays --- javalib/src/main/scala/java/util/Arrays.scala | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/javalib/src/main/scala/java/util/Arrays.scala b/javalib/src/main/scala/java/util/Arrays.scala index 7be95f504c..786d192a35 100644 --- a/javalib/src/main/scala/java/util/Arrays.scala +++ b/javalib/src/main/scala/java/util/Arrays.scala @@ -15,6 +15,16 @@ object Arrays { } } + // Impose the total ordering of java.lang.Float.compare in Arrays + private implicit object FloatTotalOrdering extends Ordering[Float] { + def compare(x: Float, y: Float): Int = java.lang.Float.compare(x, y) + } + + // Impose the total ordering of java.lang.Double.compare in Arrays + private implicit object DoubleTotalOrdering extends Ordering[Double] { + def compare(x: Double, y: Double): Int = java.lang.Double.compare(x, y) + } + @noinline def sort(a: Array[Int]): Unit = sortImpl(a) @@ -962,4 +972,5 @@ object Arrays { } } } + } From 927a82077527b3d39a9bdc7ad78b05ad420c4816 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Sat, 3 Oct 2020 16:28:43 +0200 Subject: [PATCH 19/75] Add higherKinds scalacOption to scalalib --- build.sbt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 706d981da4..b4cd02f51a 100644 --- a/build.sbt +++ b/build.sbt @@ -451,7 +451,8 @@ lazy val scalalib = // The option below is needed since Scala 2.12.12. scalacOptions += "-language:postfixOps", // The option below is needed since Scala 2.13.0. - scalacOptions += "-language:implicitConversions" + scalacOptions += "-language:implicitConversions", + scalacOptions += "-language:higherKinds" ) .settings(mavenPublishSettings) .settings( From 97f071d60e47b187b8198f37bb222dc0e917bbd7 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Sun, 4 Oct 2020 14:35:54 +0200 Subject: [PATCH 20/75] Fix match in 2.13 - for non Int cases generate if-else chain instead of switch --- .../scalanative/nscplugin/NirGenExpr.scala | 79 +++++++++++++------ 1 file changed, 56 insertions(+), 23 deletions(-) diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala index 5cc0c19b81..a852a77344 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala @@ -209,19 +209,20 @@ trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => def genMatch(m: Match): Val = { val Match(scrutp, allcaseps) = m + type Case = (Local, Val, Tree) // Extract switch cases and assign unique names to them. - val caseps: Seq[(Local, Val, Tree)] = allcaseps.flatMap { + val caseps: Seq[Case] = allcaseps.flatMap { case CaseDef(Ident(nme.WILDCARD), _, _) => Seq() case CaseDef(pat, guard, body) => assert(guard.isEmpty, "CaseDef guard was not empty") val vals: Seq[Val] = pat match { case lit: Literal => - List(genLiteralValue(lit)) + List(genExpr(lit)) case Alternative(alts) => alts.map { - case lit: Literal => genLiteralValue(lit) + case lit: Literal => genExpr(lit) } case _ => Nil @@ -234,29 +235,61 @@ trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => case c @ CaseDef(Ident(nme.WILDCARD), _, body) => body }.get - // Generate some more fresh names and types. - val retty = genType(m.tpe) - val casenexts = caseps.map { case (n, v, _) => Next.Case(v, n) } - val defaultnext = Next(fresh()) - val merge = fresh() - val mergev = Val.Local(fresh(), retty) - - implicit val pos: nir.Position = m.pos + val retty = genType(m.tpe) + val scrut = genExpr(scrutp) // Generate code for the switch and its cases. - val scrut = genExpr(scrutp) - buf.switch(scrut, defaultnext, casenexts) - buf.label(defaultnext.name)(defaultp.pos) - val defaultres = genExpr(defaultp) - buf.jump(merge, Seq(defaultres))(defaultp.pos) - caseps.foreach { - case (n, _, expr) => - buf.label(n)(expr.pos) - val caseres = genExpr(expr) - buf.jump(merge, Seq(caseres)) + def genSwitch(): Val = { + // Generate some more fresh names and types. + val casenexts = caseps.map { case (n, v, _) => Next.Case(v, n) } + val defaultnext = Next(fresh()) + val merge = fresh() + val mergev = Val.Local(fresh(), retty) + + implicit val pos: nir.Position = m.pos + + // Generate code for the switch and its cases. + val scrut = genExpr(scrutp) + buf.switch(scrut, defaultnext, casenexts) + buf.label(defaultnext.name)(defaultp.pos) + buf.jump(merge, Seq(genExpr(defaultp)))(defaultp.pos) + caseps.foreach { + case (n, _, expr) => + buf.label(n)(expr.pos) + val caseres = genExpr(expr) + buf.jump(merge, Seq(caseres)) + } + buf.label(merge, Seq(mergev)) + mergev } - buf.label(merge, Seq(mergev)) - mergev + + def genIfsChain(): Val = { + def loop(cases: List[Case]): Val = { + cases match { + case (_, caze, body) :: elsep => + val cond = + buf.genClassEquality(leftp = ValTree(scrut), + rightp = ValTree(caze), + ref = false, + negated = false) + buf.genIf(retty = retty, + condp = ValTree(cond), + thenp = ContTree(() => genExpr(body)), + elsep = ContTree(() => loop(elsep))) + + case Nil => genExpr(defaultp) + } + } + loop(caseps.toList) + } + + /* Since 2.13 we need to enforce that only Int switch cases reach backend + * For all other cases we're generating If-else chain */ + val isIntMatch = scrut.ty == Type.Int && + caseps.forall(_._2.ty == Type.Int) + + if (isIntMatch) genSwitch() + else genIfsChain() } def genMatch(prologue: List[Tree], lds: List[LabelDef]): Val = { From d7b276ee9bbabeab4ea3966dc9afb4609f2d13c2 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Sun, 4 Oct 2020 15:38:04 +0200 Subject: [PATCH 21/75] Add 2.13.0 - 2.13.2 compat for Symbol.toString (Changed in 2.13.3) --- scalalib/overrides-2.13.0/scala/Symbol.scala | 31 ++++++++++++++++++++ scalalib/overrides-2.13.1/scala/Symbol.scala | 31 ++++++++++++++++++++ scalalib/overrides-2.13.2/scala/Symbol.scala | 31 ++++++++++++++++++++ 3 files changed, 93 insertions(+) create mode 100644 scalalib/overrides-2.13.0/scala/Symbol.scala create mode 100644 scalalib/overrides-2.13.1/scala/Symbol.scala create mode 100644 scalalib/overrides-2.13.2/scala/Symbol.scala diff --git a/scalalib/overrides-2.13.0/scala/Symbol.scala b/scalalib/overrides-2.13.0/scala/Symbol.scala new file mode 100644 index 0000000000..04d24cc921 --- /dev/null +++ b/scalalib/overrides-2.13.0/scala/Symbol.scala @@ -0,0 +1,31 @@ +package scala + +// Ported from Scala.js. +// Modified to use collection.mutable.Map instead of java.util.WeakHashMap. + +final class Symbol private (val name: String) extends Serializable { + override def toString(): String = "'" + name + + @throws(classOf[java.io.ObjectStreamException]) + private def readResolve(): Any = Symbol.apply(name) + override def hashCode = name.hashCode() + override def equals(other: Any) = this eq other.asInstanceOf[AnyRef] +} + +object Symbol extends UniquenessCache[Symbol] { + override def apply(name: String): Symbol = super.apply(name) + protected def valueFromKey(name: String): Symbol = new Symbol(name) + protected def keyFromValue(sym: Symbol): Option[String] = Some(sym.name) +} + +private[scala] abstract class UniquenessCache[V] { + private val cache = collection.mutable.Map.empty[String, V] + + protected def valueFromKey(k: String): V + protected def keyFromValue(v: V): Option[String] + + def apply(name: String): V = + cache.getOrElseUpdate(name, valueFromKey(name)) + + def unapply(other: V): Option[String] = keyFromValue(other) +} diff --git a/scalalib/overrides-2.13.1/scala/Symbol.scala b/scalalib/overrides-2.13.1/scala/Symbol.scala new file mode 100644 index 0000000000..04d24cc921 --- /dev/null +++ b/scalalib/overrides-2.13.1/scala/Symbol.scala @@ -0,0 +1,31 @@ +package scala + +// Ported from Scala.js. +// Modified to use collection.mutable.Map instead of java.util.WeakHashMap. + +final class Symbol private (val name: String) extends Serializable { + override def toString(): String = "'" + name + + @throws(classOf[java.io.ObjectStreamException]) + private def readResolve(): Any = Symbol.apply(name) + override def hashCode = name.hashCode() + override def equals(other: Any) = this eq other.asInstanceOf[AnyRef] +} + +object Symbol extends UniquenessCache[Symbol] { + override def apply(name: String): Symbol = super.apply(name) + protected def valueFromKey(name: String): Symbol = new Symbol(name) + protected def keyFromValue(sym: Symbol): Option[String] = Some(sym.name) +} + +private[scala] abstract class UniquenessCache[V] { + private val cache = collection.mutable.Map.empty[String, V] + + protected def valueFromKey(k: String): V + protected def keyFromValue(v: V): Option[String] + + def apply(name: String): V = + cache.getOrElseUpdate(name, valueFromKey(name)) + + def unapply(other: V): Option[String] = keyFromValue(other) +} diff --git a/scalalib/overrides-2.13.2/scala/Symbol.scala b/scalalib/overrides-2.13.2/scala/Symbol.scala new file mode 100644 index 0000000000..04d24cc921 --- /dev/null +++ b/scalalib/overrides-2.13.2/scala/Symbol.scala @@ -0,0 +1,31 @@ +package scala + +// Ported from Scala.js. +// Modified to use collection.mutable.Map instead of java.util.WeakHashMap. + +final class Symbol private (val name: String) extends Serializable { + override def toString(): String = "'" + name + + @throws(classOf[java.io.ObjectStreamException]) + private def readResolve(): Any = Symbol.apply(name) + override def hashCode = name.hashCode() + override def equals(other: Any) = this eq other.asInstanceOf[AnyRef] +} + +object Symbol extends UniquenessCache[Symbol] { + override def apply(name: String): Symbol = super.apply(name) + protected def valueFromKey(name: String): Symbol = new Symbol(name) + protected def keyFromValue(sym: Symbol): Option[String] = Some(sym.name) +} + +private[scala] abstract class UniquenessCache[V] { + private val cache = collection.mutable.Map.empty[String, V] + + protected def valueFromKey(k: String): V + protected def keyFromValue(v: V): Option[String] + + def apply(name: String): V = + cache.getOrElseUpdate(name, valueFromKey(name)) + + def unapply(other: V): Option[String] = keyFromValue(other) +} From 8aed3f140a364b628415b21f534a5782382b9009 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Sun, 4 Oct 2020 15:56:10 +0200 Subject: [PATCH 22/75] Remove more redundant overrides --- .../scala/runtime/ScalaRunTime.scala | 267 ------- scalalib/overrides-2.13/scala/Array.scala | 659 ------------------ .../scala/reflect/ClassTag.scala | 139 ---- .../scala/reflect/Manifest.scala | 457 ------------ .../scala/runtime/ScalaRunTime.scala | 298 -------- 5 files changed, 1820 deletions(-) delete mode 100644 scalalib/overrides-2.12/scala/runtime/ScalaRunTime.scala delete mode 100644 scalalib/overrides-2.13/scala/Array.scala delete mode 100644 scalalib/overrides-2.13/scala/reflect/ClassTag.scala delete mode 100644 scalalib/overrides-2.13/scala/reflect/Manifest.scala delete mode 100644 scalalib/overrides-2.13/scala/runtime/ScalaRunTime.scala diff --git a/scalalib/overrides-2.12/scala/runtime/ScalaRunTime.scala b/scalalib/overrides-2.12/scala/runtime/ScalaRunTime.scala deleted file mode 100644 index 40bac101cc..0000000000 --- a/scalalib/overrides-2.12/scala/runtime/ScalaRunTime.scala +++ /dev/null @@ -1,267 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package runtime - -import scala.collection.{ TraversableView, AbstractIterator, GenIterable } -import scala.collection.mutable.WrappedArray -import scala.collection.immutable.{ StringLike, NumericRange } -import scala.collection.generic.{ Sorted, IsTraversableLike } -import scala.reflect.{ ClassTag, classTag } -import java.lang.{ Class => jClass } - -import java.lang.reflect.{ Method => JMethod } - -/** The object ScalaRunTime provides support methods required by - * the scala runtime. All these methods should be considered - * outside the API and subject to change or removal without notice. - */ -object ScalaRunTime { - def isArray(x: Any, atLevel: Int = 1): Boolean = - x != null && isArrayClass(x.getClass, atLevel) - - private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean = - clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1)) - - // A helper method to make my life in the pattern matcher a lot easier. - def drop[Repr](coll: Repr, num: Int)(implicit traversable: IsTraversableLike[Repr]): Repr = - traversable conversion coll drop num - - /** Return the class object representing an array with element class `clazz`. - */ - def arrayClass(clazz: jClass[_]): jClass[_] = { - // newInstance throws an exception if the erasure is Void.TYPE. see scala/bug#5680 - if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]] - else java.lang.reflect.Array.newInstance(clazz, 0).getClass - } - - /** Return the class object representing an unboxed value type, - * e.g., classOf[int], not classOf[java.lang.Integer]. The compiler - * rewrites expressions like 5.getClass to come here. - */ - def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] = - classTag[T].runtimeClass.asInstanceOf[jClass[T]] - - /** Retrieve generic array element */ - @inline def array_apply(xs: AnyRef, idx: Int): Any = { - if (xs == null) { - throw new NullPointerException - } else { - xs.asInstanceOf[scala.scalanative.runtime.Array[Any]].apply(idx) - } - } - - /** update generic array element */ - @inline def array_update(xs: AnyRef, idx: Int, value: Any): Unit = { - if (xs == null) { - throw new NullPointerException - } else { - xs.asInstanceOf[scala.scalanative.runtime.Array[Any]].update(idx, value) - } - } - - /** Get generic array length */ - def array_length(xs: AnyRef): Int = { - if (xs == null) { - throw new NullPointerException - } else { - xs.asInstanceOf[scala.scalanative.runtime.Array[Any]].length - } - } - - def array_clone(xs: AnyRef): AnyRef = { - if (xs == null) { - throw new NullPointerException - } else { - xs.asInstanceOf[scala.scalanative.runtime.Array[Any]].clone() - } - } - - /** Convert an array to an object array. - * Needed to deal with vararg arguments of primitive types that are passed - * to a generic Java vararg parameter T ... - */ - def toObjectArray(src: AnyRef): Array[Object] = { - def copy[@specialized T <: AnyVal](src: Array[T]): Array[Object] = { - val length = src.length - if (length == 0) Array.emptyObjectArray - else { - val dest = new Array[Object](length) - var i = 0 - while (i < length) { - dest(i) = src(i).asInstanceOf[AnyRef] - i += 1 - } - dest - } - } - src match { - case x: Array[AnyRef] => x - case x: Array[Int] => copy(x) - case x: Array[Double] => copy(x) - case x: Array[Long] => copy(x) - case x: Array[Float] => copy(x) - case x: Array[Char] => copy(x) - case x: Array[Byte] => copy(x) - case x: Array[Short] => copy(x) - case x: Array[Boolean] => copy(x) - case x: Array[Unit] => copy(x) - case null => throw new NullPointerException - } - } - - def toArray[T](xs: scala.collection.Seq[T]) = { - if (xs.isEmpty) Array.emptyObjectArray - else { - val arr = new Array[AnyRef](xs.length) - val it = xs.iterator - var i = 0 - while (it.hasNext) { - arr(i) = it.next().asInstanceOf[AnyRef] - i += 1 - } - arr - } - } - - // Java bug: https://bugs.java.com/view_bug.do?bug_id=4071957 - // More background at ticket #2318. - def ensureAccessible(m: JMethod): JMethod = scala.reflect.ensureAccessible(m) - - def _toString(x: Product): String = - x.productIterator.mkString(x.productPrefix + "(", ",", ")") - - def _hashCode(x: Product): Int = scala.util.hashing.MurmurHash3.productHash(x) - - /** A helper for case classes. */ - def typedProductIterator[T](x: Product): Iterator[T] = { - new AbstractIterator[T] { - private var c: Int = 0 - private val cmax = x.productArity - def hasNext = c < cmax - def next() = { - val result = x.productElement(c) - c += 1 - result.asInstanceOf[T] - } - } - } - - /** Old implementation of `##`. */ - @deprecated("Use scala.runtime.Statics.anyHash instead.", "2.12.0") - def hash(x: Any): Int = Statics.anyHash(x.asInstanceOf[Object]) - - /** Given any Scala value, convert it to a String. - * - * The primary motivation for this method is to provide a means for - * correctly obtaining a String representation of a value, while - * avoiding the pitfalls of naively calling toString on said value. - * In particular, it addresses the fact that (a) toString cannot be - * called on null and (b) depending on the apparent type of an - * array, toString may or may not print it in a human-readable form. - * - * @param arg the value to stringify - * @return a string representation of arg. - */ - def stringOf(arg: Any): String = stringOf(arg, scala.Int.MaxValue) - def stringOf(arg: Any, maxElements: Int): String = { - def packageOf(x: AnyRef) = x.getClass.getPackage match { - case null => "" - case p => p.getName - } - def isScalaClass(x: AnyRef) = packageOf(x) startsWith "scala." - def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc." - - // includes specialized subclasses and future proofed against hypothetical TupleN (for N > 22) - def isTuple(x: Any) = x != null && x.getClass.getName.startsWith("scala.Tuple") - - // We use reflection because the scala.xml package might not be available - def isSubClassOf(potentialSubClass: Class[_], ofClass: String) = - try { - val classLoader = potentialSubClass.getClassLoader - val clazz = Class.forName(ofClass, /*initialize =*/ false, classLoader) - clazz.isAssignableFrom(potentialSubClass) - } catch { - case cnfe: ClassNotFoundException => false - } - def isXmlNode(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.Node") - def isXmlMetaData(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.MetaData") - - // When doing our own iteration is dangerous - def useOwnToString(x: Any) = x match { - // Range/NumericRange have a custom toString to avoid walking a gazillion elements - case _: Range | _: NumericRange[_] => true - // Sorted collections to the wrong thing (for us) on iteration - ticket #3493 - case _: Sorted[_, _] => true - // StringBuilder(a, b, c) and similar not so attractive - case _: StringLike[_] => true - // Don't want to evaluate any elements in a view - case _: TraversableView[_, _] => true - // Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData] - // -> catch those by isXmlNode and isXmlMetaData. - // Don't want to a) traverse infinity or b) be overly helpful with peoples' custom - // collections which may have useful toString methods - ticket #3710 - // or c) print AbstractFiles which are somehow also Iterable[AbstractFile]s. - case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x) || isXmlNode(x.getClass) || isXmlMetaData(x.getClass) - // Otherwise, nothing could possibly go wrong - case _ => false - } - - // A variation on inner for maps so they print -> instead of bare tuples - def mapInner(arg: Any): String = arg match { - case (k, v) => inner(k) + " -> " + inner(v) - case _ => inner(arg) - } - - // Special casing Unit arrays, the value class which uses a reference array type. - def arrayToString(x: AnyRef) = { - if (x.getClass.getComponentType == classOf[BoxedUnit]) - 0 until (array_length(x) min maxElements) map (_ => "()") mkString ("Array(", ", ", ")") - else - WrappedArray make x take maxElements map inner mkString ("Array(", ", ", ")") - } - - // The recursively applied attempt to prettify Array printing. - // Note that iterator is used if possible and foreach is used as a - // last resort, because the parallel collections "foreach" in a - // random order even on sequences. - def inner(arg: Any): String = arg match { - case null => "null" - case "" => "\"\"" - case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\"" + x + "\"" else x - case x if useOwnToString(x) => x.toString - case x: AnyRef if isArray(x) => arrayToString(x) - case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")") - case x: GenIterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") - case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") - case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma - case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")") - case x => x.toString - } - - // The try/catch is defense against iterables which aren't actually designed - // to be iterated, such as some scala.tools.nsc.io.AbstractFile derived classes. - try inner(arg) - catch { - case _: UnsupportedOperationException | _: AssertionError => "" + arg - } - } - - /** stringOf formatted for use in a repl result. */ - def replStringOf(arg: Any, maxElements: Int): String = { - val s = stringOf(arg, maxElements) - val nl = if (s contains "\n") "\n" else "" - - nl + s + "\n" - } -} diff --git a/scalalib/overrides-2.13/scala/Array.scala b/scalalib/overrides-2.13/scala/Array.scala deleted file mode 100644 index 2add5fde8b..0000000000 --- a/scalalib/overrides-2.13/scala/Array.scala +++ /dev/null @@ -1,659 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala - -//import scala.collection.generic._ -import scala.collection.{Factory, immutable, mutable} -import mutable.ArrayBuilder -import immutable.ArraySeq -import scala.language.implicitConversions -import scala.reflect.ClassTag -import scala.runtime.BoxedUnit -import scala.runtime.ScalaRunTime.{array_apply, array_update} - -/** Utility methods for operating on arrays. - * For example: - * {{{ - * val a = Array(1, 2) - * val b = Array.ofDim[Int](2) - * val c = Array.concat(a, b) - * }}} - * where the array objects `a`, `b` and `c` have respectively the values - * `Array(1, 2)`, `Array(0, 0)` and `Array(1, 2, 0, 0)`. - */ -object Array { - val emptyBooleanArray = new Array[Boolean](0) - val emptyByteArray = new Array[Byte](0) - val emptyCharArray = new Array[Char](0) - val emptyDoubleArray = new Array[Double](0) - val emptyFloatArray = new Array[Float](0) - val emptyIntArray = new Array[Int](0) - val emptyLongArray = new Array[Long](0) - val emptyShortArray = new Array[Short](0) - val emptyObjectArray = new Array[Object](0) - - /** Provides an implicit conversion from the Array object to a collection Factory */ - implicit def toFactory[A : ClassTag](dummy: Array.type): Factory[A, Array[A]] = new ArrayFactory(dummy) - @SerialVersionUID(3L) - private class ArrayFactory[A : ClassTag](dummy: Array.type) extends Factory[A, Array[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]): Array[A] = Array.from[A](it) - def newBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder[A] - } - - /** - * Returns a new [[scala.collection.mutable.ArrayBuilder]]. - */ - def newBuilder[T](implicit t: ClassTag[T]): ArrayBuilder[T] = ArrayBuilder.make[T](t) - - def from[A : ClassTag](it: IterableOnce[A]): Array[A] = it match { - case it: Iterable[A] => it.toArray[A] - case _ => it.iterator.toArray[A] - } - - private def slowcopy(src : AnyRef, - srcPos : Int, - dest : AnyRef, - destPos : Int, - length : Int): Unit = { - var i = srcPos - var j = destPos - val srcUntil = srcPos + length - while (i < srcUntil) { - array_update(dest, j, array_apply(src, i)) - i += 1 - j += 1 - } - } - - /** Copy one array to another. - * Equivalent to Java's - * `System.arraycopy(src, srcPos, dest, destPos, length)`, - * except that this also works for polymorphic and boxed arrays. - * - * Note that the passed-in `dest` array will be modified by this call. - * - * @param src the source array. - * @param srcPos starting position in the source array. - * @param dest destination array. - * @param destPos starting position in the destination array. - * @param length the number of array elements to be copied. - * - * @see `java.lang.System#arraycopy` - */ - def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int): Unit = { - val srcClass = src.getClass - if (srcClass.isArray && dest.getClass.isAssignableFrom(srcClass)) - java.lang.System.arraycopy(src, srcPos, dest, destPos, length) - else - slowcopy(src, srcPos, dest, destPos, length) - } - - /** Copy one array to another, truncating or padding with default values (if - * necessary) so the copy has the specified length. - * - * Equivalent to Java's - * `java.util.Arrays.copyOf(original, newLength)`, - * except that this works for primitive and object arrays in a single method. - * - * @see `java.util.Arrays#copyOf` - */ - def copyOf[A](original: Array[A], newLength: Int): Array[A] = (original match { - case x: Array[BoxedUnit] => newUnitArray(newLength).asInstanceOf[Array[A]] - case x: Array[AnyRef] => java.util.Arrays.copyOf(x, newLength) - case x: Array[Int] => java.util.Arrays.copyOf(x, newLength) - case x: Array[Double] => java.util.Arrays.copyOf(x, newLength) - case x: Array[Long] => java.util.Arrays.copyOf(x, newLength) - case x: Array[Float] => java.util.Arrays.copyOf(x, newLength) - case x: Array[Char] => java.util.Arrays.copyOf(x, newLength) - case x: Array[Byte] => java.util.Arrays.copyOf(x, newLength) - case x: Array[Short] => java.util.Arrays.copyOf(x, newLength) - case x: Array[Boolean] => java.util.Arrays.copyOf(x, newLength) - }).asInstanceOf[Array[A]] - - /** Copy one array to another, truncating or padding with default values (if - * necessary) so the copy has the specified length. The new array can have - * a different type than the original one as long as the values are - * assignment-compatible. When copying between primitive and object arrays, - * boxing and unboxing are supported. - * - * Equivalent to Java's - * `java.util.Arrays.copyOf(original, newLength, newType)`, - * except that this works for all combinations of primitive and object arrays - * in a single method. - * - * @see `java.util.Arrays#copyOf` - */ - def copyAs[A](original: Array[_], newLength: Int)(implicit ct: ClassTag[A]): Array[A] = { - val runtimeClass = ct.runtimeClass - if (runtimeClass == Void.TYPE) newUnitArray(newLength).asInstanceOf[Array[A]] - else { - val destClass = runtimeClass.asInstanceOf[Class[A]] - if (destClass.isAssignableFrom(original.getClass.getComponentType)) { - if (destClass.isPrimitive) copyOf[A](original.asInstanceOf[Array[A]], newLength) - else { - val destArrayClass = java.lang.reflect.Array.newInstance(destClass, 0).getClass.asInstanceOf[Class[Array[AnyRef]]] - java.util.Arrays.copyOf(original.asInstanceOf[Array[AnyRef]], newLength, destArrayClass).asInstanceOf[Array[A]] - } - } else { - val dest = new Array[A](newLength) - Array.copy(original, 0, dest, 0, original.length) - dest - } - } - } - - private def newUnitArray(len: Int): Array[Unit] = { - val result = new Array[Unit](len) - java.util.Arrays.fill(result.asInstanceOf[Array[AnyRef]], ()) - result - } - - /** Returns an array of length 0 */ - def empty[T: ClassTag]: Array[T] = new Array[T](0) - - /** Creates an array with given elements. - * - * @param xs the elements to put in the array - * @return an array containing all elements from xs. - */ - // Subject to a compiler optimization in Cleanup. - // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a } - def apply[T: ClassTag](xs: T*): Array[T] = { - val array = new Array[T](xs.length) - val iterator = xs.iterator - var i = 0 - while (iterator.hasNext) { - array(i) = iterator.next(); i += 1 - } - array - } - - /** Creates an array of `Boolean` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Boolean, xs: Boolean*): Array[Boolean] = { - val array = new Array[Boolean](xs.length + 1) - array(0) = x - val iterator = xs.iterator - var i = 1 - while (iterator.hasNext) { - array(i) = iterator.next(); i += 1 - } - array - } - - /** Creates an array of `Byte` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Byte, xs: Byte*): Array[Byte] = { - val array = new Array[Byte](xs.length + 1) - array(0) = x - val iterator = xs.iterator - var i = 1 - while (iterator.hasNext) { - array(i) = iterator.next(); i += 1 - } - array - } - - /** Creates an array of `Short` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Short, xs: Short*): Array[Short] = { - val array = new Array[Short](xs.length + 1) - array(0) = x - val iterator = xs.iterator - var i = 1 - while (iterator.hasNext) { - array(i) = iterator.next(); i += 1 - } - array - } - - /** Creates an array of `Char` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Char, xs: Char*): Array[Char] = { - val array = new Array[Char](xs.length + 1) - array(0) = x - val iterator = xs.iterator - var i = 1 - while (iterator.hasNext) { - array(i) = iterator.next(); i += 1 - } - array - } - - /** Creates an array of `Int` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Int, xs: Int*): Array[Int] = { - val array = new Array[Int](xs.length + 1) - array(0) = x - val iterator = xs.iterator - var i = 1 - while (iterator.hasNext) { - array(i) = iterator.next(); i += 1 - } - array - } - - /** Creates an array of `Long` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Long, xs: Long*): Array[Long] = { - val array = new Array[Long](xs.length + 1) - array(0) = x - val iterator = xs.iterator - var i = 1 - while (iterator.hasNext) { - array(i) = iterator.next(); i += 1 - } - array - } - - /** Creates an array of `Float` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Float, xs: Float*): Array[Float] = { - val array = new Array[Float](xs.length + 1) - array(0) = x - val iterator = xs.iterator - var i = 1 - while (iterator.hasNext) { - array(i) = iterator.next(); i += 1 - } - array - } - - /** Creates an array of `Double` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Double, xs: Double*): Array[Double] = { - val array = new Array[Double](xs.length + 1) - array(0) = x - val iterator = xs.iterator - var i = 1 - while (iterator.hasNext) { - array(i) = iterator.next(); i += 1 - } - array - } - - /** Creates an array of `Unit` objects */ - def apply(x: Unit, xs: Unit*): Array[Unit] = { - val array = new Array[Unit](xs.length + 1) - array(0) = x - val iterator = xs.iterator - var i = 1 - while (iterator.hasNext) { - array(i) = iterator.next(); i += 1 - } - array - } - - /** Creates array with given dimensions */ - def ofDim[T: ClassTag](n1: Int): Array[T] = - new Array[T](n1) - /** Creates a 2-dimensional array */ - def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = { - val arr: Array[Array[T]] = (new Array[Array[T]](n1): Array[Array[T]]) - for (i <- 0 until n1) arr(i) = new Array[T](n2) - arr - // tabulate(n1)(_ => ofDim[T](n2)) - } - /** Creates a 3-dimensional array */ - def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] = - tabulate(n1)(_ => ofDim[T](n2, n3)) - /** Creates a 4-dimensional array */ - def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] = - tabulate(n1)(_ => ofDim[T](n2, n3, n4)) - /** Creates a 5-dimensional array */ - def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] = - tabulate(n1)(_ => ofDim[T](n2, n3, n4, n5)) - - /** Concatenates all arrays into a single array. - * - * @param xss the given arrays - * @return the array created from concatenating `xss` - */ - def concat[T: ClassTag](xss: Array[T]*): Array[T] = { - val b = newBuilder[T] - b.sizeHint(xss.map(_.length).sum) - for (xs <- xss) b ++= xs - b.result() - } - - /** Returns an array that contains the results of some element computation a number - * of times. - * - * Note that this means that `elem` is computed a total of n times: - * {{{ - * scala> Array.fill(3){ math.random } - * res3: Array[Double] = Array(0.365461167592537, 1.550395944913685E-4, 0.7907242137333306) - * }}} - * - * @param n the number of elements desired - * @param elem the element computation - * @return an Array of size n, where each element contains the result of computing - * `elem`. - */ - def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = { - if (n <= 0) { - empty[T] - } else { - val array = new Array[T](n) - var i = 0 - while (i < n) { - array(i) = elem - i += 1 - } - array - } - } - - /** Returns a two-dimensional array that contains the results of some element - * computation a number of times. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param elem the element computation - */ - def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = - tabulate(n1)(_ => fill(n2)(elem)) - - /** Returns a three-dimensional array that contains the results of some element - * computation a number of times. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param elem the element computation - */ - def fill[T: ClassTag](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] = - tabulate(n1)(_ => fill(n2, n3)(elem)) - - /** Returns a four-dimensional array that contains the results of some element - * computation a number of times. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param elem the element computation - */ - def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] = - tabulate(n1)(_ => fill(n2, n3, n4)(elem)) - - /** Returns a five-dimensional array that contains the results of some element - * computation a number of times. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param elem the element computation - */ - def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] = - tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) - - /** Returns an array containing values of a given function over a range of integer - * values starting from 0. - * - * @param n The number of elements in the array - * @param f The function computing element values - * @return A traversable consisting of elements `f(0),f(1), ..., f(n - 1)` - */ - def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = { - if (n <= 0) { - empty[T] - } else { - val array = new Array[T](n) - var i = 0 - while (i < n) { - array(i) = f(i) - i += 1 - } - array - } - } - - /** Returns a two-dimensional array containing values of a given function - * over ranges of integer values starting from `0`. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param f The function computing element values - */ - def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = - tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) - - /** Returns a three-dimensional array containing values of a given function - * over ranges of integer values starting from `0`. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param f The function computing element values - */ - def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] = - tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) - - /** Returns a four-dimensional array containing values of a given function - * over ranges of integer values starting from `0`. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param f The function computing element values - */ - def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] = - tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) - - /** Returns a five-dimensional array containing values of a given function - * over ranges of integer values starting from `0`. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param f The function computing element values - */ - def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] = - tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) - - /** Returns an array containing a sequence of increasing integers in a range. - * - * @param start the start value of the array - * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) - * @return the array with values in range `start, start + 1, ..., end - 1` - * up to, but excluding, `end`. - */ - def range(start: Int, end: Int): Array[Int] = range(start, end, 1) - - /** Returns an array containing equally spaced values in some integer interval. - * - * @param start the start value of the array - * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) - * @param step the increment value of the array (may not be zero) - * @return the array with values in `start, start + step, ...` up to, but excluding `end` - */ - def range(start: Int, end: Int, step: Int): Array[Int] = { - if (step == 0) throw new IllegalArgumentException("zero step") - val array = new Array[Int](immutable.Range.count(start, end, step, isInclusive = false)) - - var n = 0 - var i = start - while (if (step < 0) end < i else i < end) { - array(n) = i - i += step - n += 1 - } - array - } - - /** Returns an array containing repeated applications of a function to a start value. - * - * @param start the start value of the array - * @param len the number of elements returned by the array - * @param f the function that is repeatedly applied - * @return the array returning `len` values in the sequence `start, f(start), f(f(start)), ...` - */ - def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = { - if (len > 0) { - val array = new Array[T](len) - var acc = start - var i = 1 - array(0) = acc - - while (i < len) { - acc = f(acc) - array(i) = acc - i += 1 - } - array - } else { - empty[T] - } - } - - def equals(xs: Array[AnyRef], ys: Array[AnyRef]): Boolean = { - if (xs eq ys) - return true - if (xs.length != ys.length) - return false - - val len = xs.length - var i = 0 - while (i < len) { - if (xs(i) != ys(i)) - return false - i += 1 - } - true - } - - /** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`. - * - * @param x the selector value - * @return sequence wrapped in a [[scala.Some]], if `x` is an Array, otherwise `None` - */ - def unapplySeq[T](x: Array[T]): UnapplySeqWrapper[T] = new UnapplySeqWrapper(x) - - final class UnapplySeqWrapper[T](private val a: Array[T]) extends AnyVal { - def isEmpty: Boolean = false - def get: UnapplySeqWrapper[T] = this - def lengthCompare(len: Int): Int = a.lengthCompare(len) - def apply(i: Int): T = a(i) - def drop(n: Int): scala.Seq[T] = ArraySeq.unsafeWrapArray(a.drop(n)) // clones the array, also if n == 0 - def toSeq: scala.Seq[T] = a.toSeq // clones the array - } -} - -/** Arrays are mutable, indexed collections of values. `Array[T]` is Scala's representation - * for Java's `T[]`. - * - * {{{ - * val numbers = Array(1, 2, 3, 4) - * val first = numbers(0) // read the first element - * numbers(3) = 100 // replace the 4th array element with 100 - * val biggerNumbers = numbers.map(_ * 2) // multiply all numbers by two - * }}} - * - * Arrays make use of two common pieces of Scala syntactic sugar, shown on lines 2 and 3 of the above - * example code. - * Line 2 is translated into a call to `apply(Int)`, while line 3 is translated into a call to - * `update(Int, T)`. - * - * Two implicit conversions exist in [[scala.Predef]] that are frequently applied to arrays: a conversion - * to [[scala.collection.ArrayOps]] (shown on line 4 of the example above) and a conversion - * to [[scala.collection.mutable.ArraySeq]] (a subtype of [[scala.collection.Seq]]). - * Both types make available many of the standard operations found in the Scala collections API. - * The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return an `Array`, - * while the conversion to `ArraySeq` is permanent as all operations return a `ArraySeq`. - * - * The conversion to `ArrayOps` takes priority over the conversion to `ArraySeq`. For instance, - * consider the following code: - * - * {{{ - * val arr = Array(1, 2, 3) - * val arrReversed = arr.reverse - * val seqReversed : collection.Seq[Int] = arr.reverse - * }}} - * - * Value `arrReversed` will be of type `Array[Int]`, with an implicit conversion to `ArrayOps` occurring - * to perform the `reverse` operation. The value of `seqReversed`, on the other hand, will be computed - * by converting to `ArraySeq` first and invoking the variant of `reverse` that returns another - * `ArraySeq`. - * - * @see [[http://www.scala-lang.org/files/archive/spec/2.13/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.) - * @see [[http://docs.scala-lang.org/sips/completed/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8. - * @see [[http://docs.scala-lang.org/overviews/collections/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information. - * @hideImplicitConversion scala.Predef.booleanArrayOps - * @hideImplicitConversion scala.Predef.byteArrayOps - * @hideImplicitConversion scala.Predef.charArrayOps - * @hideImplicitConversion scala.Predef.doubleArrayOps - * @hideImplicitConversion scala.Predef.floatArrayOps - * @hideImplicitConversion scala.Predef.intArrayOps - * @hideImplicitConversion scala.Predef.longArrayOps - * @hideImplicitConversion scala.Predef.refArrayOps - * @hideImplicitConversion scala.Predef.shortArrayOps - * @hideImplicitConversion scala.Predef.unitArrayOps - * @hideImplicitConversion scala.LowPriorityImplicits.wrapRefArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapIntArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapDoubleArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapLongArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapFloatArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapCharArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapByteArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapShortArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapBooleanArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapUnitArray - * @hideImplicitConversion scala.LowPriorityImplicits.genericWrapArray - * @define coll array - * @define Coll `Array` - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define collectExample - * @define undefinedorder - */ -final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable { - - /** The length of the array */ - def length: Int = throw new Error() - - /** The element at given index. - * - * Indices start at `0`; `xs.apply(0)` is the first element of array `xs`. - * Note the indexing syntax `xs(i)` is a shorthand for `xs.apply(i)`. - * - * @param i the index - * @return the element at the given index - * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` - */ - def apply(i: Int): T = throw new Error() - - /** Update the element at given index. - * - * Indices start at `0`; `xs.update(i, x)` replaces the i^th^ element in the array. - * Note the syntax `xs(i) = x` is a shorthand for `xs.update(i, x)`. - * - * @param i the index - * @param x the value to be written at index `i` - * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` - */ - def update(i: Int, x: T): Unit = { throw new Error() } - - /** Clone the Array. - * - * @return A clone of the Array. - */ - override def clone(): Array[T] = throw new Error() -} diff --git a/scalalib/overrides-2.13/scala/reflect/ClassTag.scala b/scalalib/overrides-2.13/scala/reflect/ClassTag.scala deleted file mode 100644 index 4b69bb0240..0000000000 --- a/scalalib/overrides-2.13/scala/reflect/ClassTag.scala +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package reflect - -import java.lang.{ Class => jClass } - -/** - * - * A `ClassTag[T]` stores the erased class of a given type `T`, accessible via the `runtimeClass` - * field. This is particularly useful for instantiating `Array`s whose element types are unknown - * at compile time. - * - * `ClassTag`s are a weaker special case of [[scala.reflect.api.TypeTags.TypeTag]]s, in that they - * wrap only the runtime class of a given type, whereas a `TypeTag` contains all static type - * information. That is, `ClassTag`s are constructed from knowing only the top-level class of a - * type, without necessarily knowing all of its argument types. This runtime information is enough - * for runtime `Array` creation. - * - * For example: - * {{{ - * scala> def mkArray[T : ClassTag](elems: T*) = Array[T](elems: _*) - * mkArray: [T](elems: T*)(implicit evidence\$1: scala.reflect.ClassTag[T])Array[T] - * - * scala> mkArray(42, 13) - * res0: Array[Int] = Array(42, 13) - * - * scala> mkArray("Japan","Brazil","Germany") - * res1: Array[String] = Array(Japan, Brazil, Germany) - * }}} - * - * See [[scala.reflect.api.TypeTags]] for more examples, or the - * [[http://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]] - * for more details. - * - */ -@scala.annotation.implicitNotFound(msg = "No ClassTag available for ${T}") -trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serializable { - // please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder` - // class tags, and all tags in general, should be as minimalistic as possible - - /** A class representing the type `U` to which `T` would be erased. - * Note that there is no subtyping relationship between `T` and `U`. - */ - def runtimeClass: jClass[_] - - /** Produces a `ClassTag` that knows how to instantiate an `Array[Array[T]]` */ - def wrap: ClassTag[Array[T]] = ClassTag[Array[T]](arrayClass(runtimeClass)) - - /** Produces a new array with element type `T` and length `len` */ - def newArray(len: Int): Array[T] = - java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] - - /** A ClassTag[T] can serve as an extractor that matches only objects of type T. - * - * The compiler tries to turn unchecked type tests in pattern matches into checked ones - * by wrapping a `(_: T)` type pattern as `ct(_: T)`, where `ct` is the `ClassTag[T]` instance. - * Type tests necessary before calling other extractors are treated similarly. - * `SomeExtractor(...)` is turned into `ct(SomeExtractor(...))` if `T` in `SomeExtractor.unapply(x: T)` - * is uncheckable, but we have an instance of `ClassTag[T]`. - */ - def unapply(x: Any): Option[T] = - if (runtimeClass.isInstance(x)) Some(x.asInstanceOf[T]) - else None - - // case class accessories - override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]] - override def equals(x: Any) = x.isInstanceOf[ClassTag[_]] && this.runtimeClass == x.asInstanceOf[ClassTag[_]].runtimeClass - override def hashCode = runtimeClass.## - override def toString = { - def prettyprint(clazz: jClass[_]): String = - if (clazz.isArray) s"Array[${prettyprint(clazz.getComponentType)}]" else - clazz.getName - prettyprint(runtimeClass) - } -} - -/** - * Class tags corresponding to primitive types and constructor/extractor for ClassTags. - */ -object ClassTag { - private[this] val ObjectTYPE = classOf[java.lang.Object] - private[this] val NothingTYPE = classOf[scala.runtime.Nothing$] - private[this] val NullTYPE = classOf[scala.runtime.Null$] - - import ManifestFactory._ - - val Byte : ByteManifest = Manifest.Byte - val Short : ShortManifest = Manifest.Short - val Char : CharManifest = Manifest.Char - val Int : IntManifest = Manifest.Int - val Long : LongManifest = Manifest.Long - val Float : FloatManifest = Manifest.Float - val Double : DoubleManifest = Manifest.Double - val Boolean : BooleanManifest = Manifest.Boolean - val Unit : UnitManifest = Manifest.Unit - val Any : ClassTag[scala.Any] = Manifest.Any - val Object : ClassTag[java.lang.Object] = Manifest.Object - val AnyVal : ClassTag[scala.AnyVal] = Manifest.AnyVal - val AnyRef : ClassTag[scala.AnyRef] = Manifest.AnyRef - val Nothing : ClassTag[scala.Nothing] = Manifest.Nothing - val Null : ClassTag[scala.Null] = Manifest.Null - - @SerialVersionUID(1L) - private class GenericClassTag[T](val runtimeClass: jClass[_]) extends ClassTag[T] { - override def newArray(len: Int): Array[T] = { - java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] - } - } - - def apply[T](runtimeClass1: jClass[_]): ClassTag[T] = - runtimeClass1 match { - case java.lang.Byte.TYPE => ClassTag.Byte.asInstanceOf[ClassTag[T]] - case java.lang.Short.TYPE => ClassTag.Short.asInstanceOf[ClassTag[T]] - case java.lang.Character.TYPE => ClassTag.Char.asInstanceOf[ClassTag[T]] - case java.lang.Integer.TYPE => ClassTag.Int.asInstanceOf[ClassTag[T]] - case java.lang.Long.TYPE => ClassTag.Long.asInstanceOf[ClassTag[T]] - case java.lang.Float.TYPE => ClassTag.Float.asInstanceOf[ClassTag[T]] - case java.lang.Double.TYPE => ClassTag.Double.asInstanceOf[ClassTag[T]] - case java.lang.Boolean.TYPE => ClassTag.Boolean.asInstanceOf[ClassTag[T]] - case java.lang.Void.TYPE => ClassTag.Unit.asInstanceOf[ClassTag[T]] - case ObjectTYPE => ClassTag.Object.asInstanceOf[ClassTag[T]] - case NothingTYPE => ClassTag.Nothing.asInstanceOf[ClassTag[T]] - case NullTYPE => ClassTag.Null.asInstanceOf[ClassTag[T]] - case _ => new GenericClassTag[T](runtimeClass1) - } - - def unapply[T](ctag: ClassTag[T]): Option[Class[_]] = Some(ctag.runtimeClass) -} diff --git a/scalalib/overrides-2.13/scala/reflect/Manifest.scala b/scalalib/overrides-2.13/scala/reflect/Manifest.scala deleted file mode 100644 index 081a4a09ab..0000000000 --- a/scalalib/overrides-2.13/scala/reflect/Manifest.scala +++ /dev/null @@ -1,457 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package reflect - -import scala.collection.mutable.{ArrayBuilder, ArraySeq} - -/** A `Manifest[T]` is an opaque descriptor for type T. Its supported use - * is to give access to the erasure of the type as a `Class` instance, as - * is necessary for the creation of native `Arrays` if the class is not - * known at compile time. - * - * The type-relation operators `<:<` and `=:=` should be considered - * approximations only, as there are numerous aspects of type conformance - * which are not yet adequately represented in manifests. - * - * Example usages: - * {{{ - * def arr[T] = new Array[T](0) // does not compile - * def arr[T](implicit m: Manifest[T]) = new Array[T](0) // compiles - * def arr[T: Manifest] = new Array[T](0) // shorthand for the preceding - * - * // Methods manifest and optManifest are in [[scala.Predef]]. - * def isApproxSubType[T: Manifest, U: Manifest] = manifest[T] <:< manifest[U] - * isApproxSubType[List[String], List[AnyRef]] // true - * isApproxSubType[List[String], List[Int]] // false - * - * def methods[T: Manifest] = manifest[T].runtimeClass.getMethods - * def retType[T: Manifest](name: String) = - * methods[T] find (_.getName == name) map (_.getGenericReturnType) - * - * retType[Map[_, _]]("values") // Some(scala.collection.Iterable) - * }}} - */ -@scala.annotation.implicitNotFound(msg = "No Manifest available for ${T}.") -// TODO undeprecated until Scala reflection becomes non-experimental -// @deprecated("use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") -trait Manifest[T] extends ClassManifest[T] with Equals { - override def typeArguments: List[Manifest[_]] = Nil - - override def arrayManifest: Manifest[Array[T]] = - Manifest.classType[Array[T]](arrayClass[T](runtimeClass), this) - - override def canEqual(that: Any): Boolean = that match { - case _: Manifest[_] => true - case _ => false - } - /** Note: testing for erasure here is important, as it is many times - * faster than <:< and rules out most comparisons. - */ - override def equals(that: Any): Boolean = that match { - case m: Manifest[_] => (m canEqual this) && (this.runtimeClass == m.runtimeClass) && (this <:< m) && (m <:< this) - case _ => false - } - override def hashCode = this.runtimeClass.## -} - -/** The object `Manifest` defines factory methods for manifests. - * It is intended for use by the compiler and should not be used in client code. - */ -// TODO undeprecated until Scala reflection becomes non-experimental -// @deprecated("use scala.reflect.ClassTag (to capture erasures), scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") -object Manifest { - /* Forward all the public members of ManifestFactory, since this object used - * to be a `private val Manifest = ManifestFactory` in the package object. It - * was moved here because it needs to be in the same file as `trait Manifest` - * defined above. - */ - - def valueManifests: List[AnyValManifest[_]] = - ManifestFactory.valueManifests - - val Byte: ManifestFactory.ByteManifest = ManifestFactory.Byte - val Short: ManifestFactory.ShortManifest = ManifestFactory.Short - val Char: ManifestFactory.CharManifest = ManifestFactory.Char - val Int: ManifestFactory.IntManifest = ManifestFactory.Int - val Long: ManifestFactory.LongManifest = ManifestFactory.Long - val Float: ManifestFactory.FloatManifest = ManifestFactory.Float - val Double: ManifestFactory.DoubleManifest = ManifestFactory.Double - val Boolean: ManifestFactory.BooleanManifest = ManifestFactory.Boolean - val Unit: ManifestFactory.UnitManifest = ManifestFactory.Unit - - val Any: Manifest[scala.Any] = ManifestFactory.Any - val Object: Manifest[java.lang.Object] = ManifestFactory.Object - val AnyRef: Manifest[scala.AnyRef] = ManifestFactory.AnyRef - val AnyVal: Manifest[scala.AnyVal] = ManifestFactory.AnyVal - val Null: Manifest[scala.Null] = ManifestFactory.Null - val Nothing: Manifest[scala.Nothing] = ManifestFactory.Nothing - - /** Manifest for the singleton type `value.type`. */ - def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = - ManifestFactory.singleType[T](value) - - /** Manifest for the class type `clazz[args]`, where `clazz` is - * a top-level or static class. - * @note This no-prefix, no-arguments case is separate because we - * it's called from ScalaRunTime.boxArray itself. If we - * pass varargs as arrays into this, we get an infinitely recursive call - * to boxArray. (Besides, having a separate case is more efficient) - */ - def classType[T](clazz: Predef.Class[_]): Manifest[T] = - ManifestFactory.classType[T](clazz) - - /** Manifest for the class type `clazz`, where `clazz` is - * a top-level or static class and args are its type arguments. */ - def classType[T](clazz: Predef.Class[T], arg1: Manifest[_], args: Manifest[_]*): Manifest[T] = - ManifestFactory.classType[T](clazz, arg1, args: _*) - - /** Manifest for the class type `clazz[args]`, where `clazz` is - * a class with non-package prefix type `prefix` and type arguments `args`. - */ - def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] = - ManifestFactory.classType[T](prefix, clazz, args: _*) - - def arrayType[T](arg: Manifest[_]): Manifest[Array[T]] = - ManifestFactory.arrayType[T](arg) - - /** Manifest for the abstract type `prefix # name`. `upperBound` is not - * strictly necessary as it could be obtained by reflection. It was - * added so that erasure can be calculated without reflection. */ - def abstractType[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Manifest[_]*): Manifest[T] = - ManifestFactory.abstractType[T](prefix, name, upperBound, args: _*) - - /** Manifest for the unknown type `_ >: L <: U` in an existential. */ - def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] = - ManifestFactory.wildcardType[T](lowerBound, upperBound) - - /** Manifest for the intersection type `parents_0 with ... with parents_n`. */ - def intersectionType[T](parents: Manifest[_]*): Manifest[T] = - ManifestFactory.intersectionType[T](parents: _*) - -} - -// TODO undeprecated until Scala reflection becomes non-experimental -// @deprecated("use type tags and manually check the corresponding class or type instead", "2.10.0") -@SerialVersionUID(1L) -abstract class AnyValManifest[T <: AnyVal](override val toString: String) extends Manifest[T] with Equals { - override def <:<(that: ClassManifest[_]): Boolean = - (that eq this) || (that eq Manifest.Any) || (that eq Manifest.AnyVal) - override def canEqual(other: Any) = other match { - case _: AnyValManifest[_] => true - case _ => false - } - override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] - @transient - override val hashCode = System.identityHashCode(this) -} - -/** `ManifestFactory` defines factory methods for manifests. - * It is intended for use by the compiler and should not be used in client code. - * - * Unlike `Manifest`, this factory isn't annotated with a deprecation warning. - * This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests. - * Why so complicated? Read up the comments for `ClassManifestFactory`. - */ -object ManifestFactory { - def valueManifests: List[AnyValManifest[_]] = - List(Byte, Short, Char, Int, Long, Float, Double, Boolean, Unit) - - @SerialVersionUID(1L) - final private[reflect] class ByteManifest extends AnyValManifest[scala.Byte]("Byte") { - def runtimeClass = java.lang.Byte.TYPE - @inline override def newArray(len: Int): Array[Byte] = new Array[Byte](len) - override def newWrappedArray(len: Int): ArraySeq[Byte] = new ArraySeq.ofByte(new Array[Byte](len)) - override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte() - override def unapply(x: Any): Option[Byte] = { - x match { - case d: Byte => Some(d) - case _ => None - } - } - private def readResolve(): Any = Manifest.Byte - } - val Byte: ByteManifest = new ByteManifest - - @SerialVersionUID(1L) - final private[reflect] class ShortManifest extends AnyValManifest[scala.Short]("Short") { - def runtimeClass = java.lang.Short.TYPE - @inline override def newArray(len: Int): Array[Short] = new Array[Short](len) - override def newWrappedArray(len: Int): ArraySeq[Short] = new ArraySeq.ofShort(new Array[Short](len)) - override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort() - override def unapply(x: Any): Option[Short] = { - x match { - case d: Short => Some(d) - case _ => None - } - } - private def readResolve(): Any = Manifest.Short - } - val Short: ShortManifest = new ShortManifest - - @SerialVersionUID(1L) - final private[reflect] class CharManifest extends AnyValManifest[scala.Char]("Char") { - def runtimeClass = java.lang.Character.TYPE - @inline override def newArray(len: Int): Array[Char] = new Array[Char](len) - override def newWrappedArray(len: Int): ArraySeq[Char] = new ArraySeq.ofChar(new Array[Char](len)) - override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar() - override def unapply(x: Any): Option[Char] = { - x match { - case d: Char => Some(d) - case _ => None - } - } - private def readResolve(): Any = Manifest.Char - } - val Char: CharManifest = new CharManifest - - @SerialVersionUID(1L) - final private[reflect] class IntManifest extends AnyValManifest[scala.Int]("Int") { - def runtimeClass = java.lang.Integer.TYPE - @inline override def newArray(len: Int): Array[Int] = new Array[Int](len) - override def newWrappedArray(len: Int): ArraySeq[Int] = new ArraySeq.ofInt(new Array[Int](len)) - override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt() - override def unapply(x: Any): Option[Int] = { - x match { - case d: Int => Some(d) - case _ => None - } - } - private def readResolve(): Any = Manifest.Int - } - val Int: IntManifest = new IntManifest - - @SerialVersionUID(1L) - final private[reflect] class LongManifest extends AnyValManifest[scala.Long]("Long") { - def runtimeClass = java.lang.Long.TYPE - @inline override def newArray(len: Int): Array[Long] = new Array[Long](len) - override def newWrappedArray(len: Int): ArraySeq[Long] = new ArraySeq.ofLong(new Array[Long](len)) - override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong() - override def unapply(x: Any): Option[Long] = { - x match { - case d: Long => Some(d) - case _ => None - } - } - private def readResolve(): Any = Manifest.Long - } - val Long: LongManifest = new LongManifest - - @SerialVersionUID(1L) - final private[reflect] class FloatManifest extends AnyValManifest[scala.Float]("Float") { - def runtimeClass = java.lang.Float.TYPE - @inline override def newArray(len: Int): Array[Float] = new Array[Float](len) - override def newWrappedArray(len: Int): ArraySeq[Float] = new ArraySeq.ofFloat(new Array[Float](len)) - override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat() - override def unapply(x: Any): Option[Float] = { - x match { - case d: Float => Some(d) - case _ => None - } - } - private def readResolve(): Any = Manifest.Float - } - val Float: FloatManifest = new FloatManifest - - @SerialVersionUID(1L) - final private[reflect] class DoubleManifest extends AnyValManifest[scala.Double]("Double") { - def runtimeClass = java.lang.Double.TYPE - @inline override def newArray(len: Int): Array[Double] = new Array[Double](len) - override def newWrappedArray(len: Int): ArraySeq[Double] = new ArraySeq.ofDouble(new Array[Double](len)) - override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble() - - override def unapply(x: Any): Option[Double] = { - x match { - case d: Double => Some(d) - case _ => None - } - } - private def readResolve(): Any = Manifest.Double - } - val Double: DoubleManifest = new DoubleManifest - - @SerialVersionUID(1L) - final private[reflect] class BooleanManifest extends AnyValManifest[scala.Boolean]("Boolean") { - def runtimeClass = java.lang.Boolean.TYPE - @inline override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len) - override def newWrappedArray(len: Int): ArraySeq[Boolean] = new ArraySeq.ofBoolean(new Array[Boolean](len)) - override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean() - override def unapply(x: Any): Option[Boolean] = { - x match { - case d: Boolean => Some(d) - case _ => None - } - } - private def readResolve(): Any = Manifest.Boolean - } - val Boolean: BooleanManifest = new BooleanManifest - - @SerialVersionUID(1L) - final private[reflect] class UnitManifest extends AnyValManifest[scala.Unit]("Unit") { - def runtimeClass = java.lang.Void.TYPE - @inline override def newArray(len: Int): Array[Unit] = new Array[Unit](len) - override def newWrappedArray(len: Int): ArraySeq[Unit] = new ArraySeq.ofUnit(new Array[Unit](len)) - override def newArrayBuilder(): ArrayBuilder[Unit] = new ArrayBuilder.ofUnit() - override protected def arrayClass[T](tp: Class[_]): Class[Array[T]] = - if (tp eq runtimeClass) classOf[Array[scala.runtime.BoxedUnit]].asInstanceOf[Class[Array[T]]] - else super.arrayClass(tp) - override def unapply(x: Any): Option[Unit] = { - x match { - case d: Unit => Some(d) - case _ => None - } - } - private def readResolve(): Any = Manifest.Unit - } - val Unit: UnitManifest = new UnitManifest - - private[this] val ObjectTYPE = classOf[java.lang.Object] - private[this] val NothingTYPE = classOf[scala.runtime.Nothing$] - private[this] val NullTYPE = classOf[scala.runtime.Null$] - - @SerialVersionUID(1L) - final private class AnyManifest extends PhantomManifest[scala.Any](ObjectTYPE, "Any") { - override def newArray(len: Int) = new Array[scala.Any](len) - override def <:<(that: ClassManifest[_]): Boolean = (that eq this) - private def readResolve(): Any = Manifest.Any - } - val Any: Manifest[scala.Any] = new AnyManifest - - @SerialVersionUID(1L) - final private class ObjectManifest extends PhantomManifest[java.lang.Object](ObjectTYPE, "Object") { - override def newArray(len: Int) = new Array[java.lang.Object](len) - override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) - private def readResolve(): Any = Manifest.Object - } - val Object: Manifest[java.lang.Object] = new ObjectManifest - - val AnyRef: Manifest[scala.AnyRef] = Object.asInstanceOf[Manifest[scala.AnyRef]] - - @SerialVersionUID(1L) - final private class AnyValPhantomManifest extends PhantomManifest[scala.AnyVal](ObjectTYPE, "AnyVal") { - override def newArray(len: Int) = new Array[scala.AnyVal](len) - override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) - private def readResolve(): Any = Manifest.AnyVal - } - val AnyVal: Manifest[scala.AnyVal] = new AnyValPhantomManifest - - @SerialVersionUID(1L) - final private class NullManifest extends PhantomManifest[scala.Null](NullTYPE, "Null") { - override def newArray(len: Int) = new Array[scala.Null](len) - override def <:<(that: ClassManifest[_]): Boolean = - (that ne null) && (that ne Nothing) && !(that <:< AnyVal) - private def readResolve(): Any = Manifest.Null - } - val Null: Manifest[scala.Null] = new NullManifest - - @SerialVersionUID(1L) - final private class NothingManifest extends PhantomManifest[scala.Nothing](NothingTYPE, "Nothing") { - override def newArray(len: Int) = new Array[scala.Nothing](len) - override def <:<(that: ClassManifest[_]): Boolean = (that ne null) - private def readResolve(): Any = Manifest.Nothing - } - val Nothing: Manifest[scala.Nothing] = new NothingManifest - - @SerialVersionUID(1L) - final private class SingletonTypeManifest[T <: AnyRef](value: AnyRef) extends Manifest[T] { - lazy val runtimeClass = value.getClass - override lazy val toString = value.toString + ".type" - } - - /** Manifest for the singleton type `value.type`. */ - def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = - new SingletonTypeManifest[T](value) - - /** Manifest for the class type `clazz[args]`, where `clazz` is - * a top-level or static class. - * @note This no-prefix, no-arguments case is separate because we - * it's called from ScalaRunTime.boxArray itself. If we - * pass varargs as arrays into this, we get an infinitely recursive call - * to boxArray. (Besides, having a separate case is more efficient) - */ - def classType[T](clazz: Predef.Class[_]): Manifest[T] = - new ClassTypeManifest[T](None, clazz, Nil) - - /** Manifest for the class type `clazz`, where `clazz` is - * a top-level or static class and args are its type arguments. */ - def classType[T](clazz: Predef.Class[T], arg1: Manifest[_], args: Manifest[_]*): Manifest[T] = - new ClassTypeManifest[T](None, clazz, arg1 :: args.toList) - - /** Manifest for the class type `clazz[args]`, where `clazz` is - * a class with non-package prefix type `prefix` and type arguments `args`. - */ - def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] = - new ClassTypeManifest[T](Some(prefix), clazz, args.toList) - - @SerialVersionUID(1L) - private abstract class PhantomManifest[T](_runtimeClass: Predef.Class[_], - override val toString: String) extends ClassTypeManifest[T](None, _runtimeClass, Nil) { - override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] - @transient - override val hashCode = System.identityHashCode(this) - } - - /** Manifest for the class type `clazz[args]`, where `clazz` is - * a top-level or static class. */ - @SerialVersionUID(1L) - private class ClassTypeManifest[T](prefix: Option[Manifest[_]], - val runtimeClass: Predef.Class[_], - override val typeArguments: List[Manifest[_]]) extends Manifest[T] { - override def toString = - (if (prefix.isEmpty) "" else prefix.get.toString+"#") + - (if (runtimeClass.isArray) "Array" else runtimeClass.getName) + - argString - } - - def arrayType[T](arg: Manifest[_]): Manifest[Array[T]] = - arg.asInstanceOf[Manifest[T]].arrayManifest - - @SerialVersionUID(1L) - private class AbstractTypeManifest[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: scala.collection.Seq[Manifest[_]]) extends Manifest[T] { - def runtimeClass = upperBound - override val typeArguments = args.toList - override def toString = prefix.toString+"#"+name+argString - } - - /** Manifest for the abstract type `prefix # name`. `upperBound` is not - * strictly necessary as it could be obtained by reflection. It was - * added so that erasure can be calculated without reflection. */ - def abstractType[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Manifest[_]*): Manifest[T] = - new AbstractTypeManifest[T](prefix, name, upperBound, args) - - @SerialVersionUID(1L) - private class WildcardManifest[T](lowerBound: Manifest[_], upperBound: Manifest[_]) extends Manifest[T] { - def runtimeClass = upperBound.runtimeClass - override def toString = - "_" + - (if (lowerBound eq Nothing) "" else " >: "+lowerBound) + - (if (upperBound eq Nothing) "" else " <: "+upperBound) - } - - /** Manifest for the unknown type `_ >: L <: U` in an existential. - */ - def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] = - new WildcardManifest[T](lowerBound, upperBound) - - @SerialVersionUID(1L) - private class IntersectionTypeManifest[T](parents: Array[Manifest[_]]) extends Manifest[T] { - // We use an `Array` instead of a `Seq` for `parents` to avoid cyclic dependencies during deserialization - // which can cause serialization proxies to leak and cause a ClassCastException. - def runtimeClass = parents(0).runtimeClass - override def toString = parents.mkString(" with ") - } - - /** Manifest for the intersection type `parents_0 with ... with parents_n`. */ - def intersectionType[T](parents: Manifest[_]*): Manifest[T] = - new IntersectionTypeManifest[T](parents.toArray) -} diff --git a/scalalib/overrides-2.13/scala/runtime/ScalaRunTime.scala b/scalalib/overrides-2.13/scala/runtime/ScalaRunTime.scala deleted file mode 100644 index 578ae7e290..0000000000 --- a/scalalib/overrides-2.13/scala/runtime/ScalaRunTime.scala +++ /dev/null @@ -1,298 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package runtime - -import scala.collection.{AbstractIterator, AnyConstr, SortedOps, StrictOptimizedIterableOps, StringOps, StringView, View} -import scala.collection.generic.IsIterable -import scala.collection.immutable.{ArraySeq, NumericRange} -import scala.collection.mutable.StringBuilder -import scala.math.min -import scala.reflect.{ClassTag, classTag} -import java.lang.{Class => jClass} -import java.lang.reflect.{Method => JMethod} - -/** The object ScalaRunTime provides support methods required by - * the scala runtime. All these methods should be considered - * outside the API and subject to change or removal without notice. - */ -object ScalaRunTime { - def isArray(x: Any, atLevel: Int = 1): Boolean = - x != null && isArrayClass(x.getClass, atLevel) - - private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean = - clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1)) - - // A helper method to make my life in the pattern matcher a lot easier. - def drop[Repr](coll: Repr, num: Int)(implicit iterable: IsIterable[Repr] { type C <: Repr }): Repr = - iterable(coll) drop num - - /** Return the class object representing an array with element class `clazz`. - */ - def arrayClass(clazz: jClass[_]): jClass[_] = { - // newInstance throws an exception if the erasure is Void.TYPE. see scala/bug#5680 - if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]] - else java.lang.reflect.Array.newInstance(clazz, 0).getClass - } - - /** Return the class object representing an unboxed value type, - * e.g., classOf[int], not classOf[java.lang.Integer]. The compiler - * rewrites expressions like 5.getClass to come here. - */ - def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] = - classTag[T].runtimeClass.asInstanceOf[jClass[T]] - - /** Retrieve generic array element */ - def array_apply(xs: AnyRef, idx: Int): Any = { - xs match { - case x: Array[AnyRef] => x(idx).asInstanceOf[Any] - case x: Array[Int] => x(idx).asInstanceOf[Any] - case x: Array[Double] => x(idx).asInstanceOf[Any] - case x: Array[Long] => x(idx).asInstanceOf[Any] - case x: Array[Float] => x(idx).asInstanceOf[Any] - case x: Array[Char] => x(idx).asInstanceOf[Any] - case x: Array[Byte] => x(idx).asInstanceOf[Any] - case x: Array[Short] => x(idx).asInstanceOf[Any] - case x: Array[Boolean] => x(idx).asInstanceOf[Any] - case x: Array[Unit] => x(idx).asInstanceOf[Any] - case null => throw new NullPointerException - } - } - - /** update generic array element */ - def array_update(xs: AnyRef, idx: Int, value: Any): Unit = { - xs match { - case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef] - case x: Array[Int] => x(idx) = value.asInstanceOf[Int] - case x: Array[Double] => x(idx) = value.asInstanceOf[Double] - case x: Array[Long] => x(idx) = value.asInstanceOf[Long] - case x: Array[Float] => x(idx) = value.asInstanceOf[Float] - case x: Array[Char] => x(idx) = value.asInstanceOf[Char] - case x: Array[Byte] => x(idx) = value.asInstanceOf[Byte] - case x: Array[Short] => x(idx) = value.asInstanceOf[Short] - case x: Array[Boolean] => x(idx) = value.asInstanceOf[Boolean] - case x: Array[Unit] => x(idx) = value.asInstanceOf[Unit] - case null => throw new NullPointerException - } - } - - /** Get generic array length */ - @inline def array_length(xs: AnyRef): Int = java.lang.reflect.Array.getLength(xs) - - // TODO: bytecode Object.clone() will in fact work here and avoids - // the type switch. See Array_clone comment in BCodeBodyBuilder. - def array_clone(xs: AnyRef): AnyRef = xs match { - case x: Array[AnyRef] => x.clone() - case x: Array[Int] => x.clone() - case x: Array[Double] => x.clone() - case x: Array[Long] => x.clone() - case x: Array[Float] => x.clone() - case x: Array[Char] => x.clone() - case x: Array[Byte] => x.clone() - case x: Array[Short] => x.clone() - case x: Array[Boolean] => x.clone() - case null => throw new NullPointerException - } - - /** Convert an array to an object array. - * Needed to deal with vararg arguments of primitive types that are passed - * to a generic Java vararg parameter T ... - */ - def toObjectArray(src: AnyRef): Array[Object] = { - def copy[@specialized T <: AnyVal](src: Array[T]): Array[Object] = { - val length = src.length - if (length == 0) Array.emptyObjectArray - else { - val dest = new Array[Object](length) - var i = 0 - while (i < length) { - dest(i) = src(i).asInstanceOf[AnyRef] - i += 1 - } - dest - } - } - src match { - case x: Array[AnyRef] => x - case x: Array[Int] => copy(x) - case x: Array[Double] => copy(x) - case x: Array[Long] => copy(x) - case x: Array[Float] => copy(x) - case x: Array[Char] => copy(x) - case x: Array[Byte] => copy(x) - case x: Array[Short] => copy(x) - case x: Array[Boolean] => copy(x) - case x: Array[Unit] => copy(x) - case null => throw new NullPointerException - } - } - - def toArray[T](xs: scala.collection.Seq[T]) = { - if (xs.isEmpty) Array.emptyObjectArray - else { - val arr = new Array[AnyRef](xs.length) - val it = xs.iterator - var i = 0 - while (it.hasNext) { - arr(i) = it.next().asInstanceOf[AnyRef] - i += 1 - } - arr - } - } - - // Java bug: https://bugs.java.com/view_bug.do?bug_id=4071957 - // More background at ticket #2318. - def ensureAccessible(m: JMethod): JMethod = scala.reflect.ensureAccessible(m) - - def _toString(x: Product): String = - x.productIterator.mkString(x.productPrefix + "(", ",", ")") - - def _hashCode(x: Product): Int = scala.util.hashing.MurmurHash3.productHash(x) - - /** A helper for case classes. */ - def typedProductIterator[T](x: Product): Iterator[T] = { - new AbstractIterator[T] { - private[this] var c: Int = 0 - private[this] val cmax = x.productArity - def hasNext = c < cmax - def next() = { - val result = x.productElement(c) - c += 1 - result.asInstanceOf[T] - } - } - } - - /** Given any Scala value, convert it to a String. - * - * The primary motivation for this method is to provide a means for - * correctly obtaining a String representation of a value, while - * avoiding the pitfalls of naively calling toString on said value. - * In particular, it addresses the fact that (a) toString cannot be - * called on null and (b) depending on the apparent type of an - * array, toString may or may not print it in a human-readable form. - * - * @param arg the value to stringify - * @return a string representation of arg. - */ - def stringOf(arg: Any): String = stringOf(arg, scala.Int.MaxValue) - def stringOf(arg: Any, maxElements: Int): String = { - def packageOf(x: AnyRef) = x.getClass.getPackage match { - case null => "" - case p => p.getName - } - def isScalaClass(x: AnyRef) = packageOf(x) startsWith "scala." - def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc." - - // includes specialized subclasses and future proofed against hypothetical TupleN (for N > 22) - def isTuple(x: Any) = x != null && x.getClass.getName.startsWith("scala.Tuple") - - // We use reflection because the scala.xml package might not be available - def isSubClassOf(potentialSubClass: Class[_], ofClass: String) = - try { - val classLoader = potentialSubClass.getClassLoader - val clazz = Class.forName(ofClass, /*initialize =*/ false, classLoader) - clazz.isAssignableFrom(potentialSubClass) - } catch { - case cnfe: ClassNotFoundException => false - } - def isXmlNode(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.Node") - def isXmlMetaData(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.MetaData") - - // When doing our own iteration is dangerous - def useOwnToString(x: Any) = x match { - // Range/NumericRange have a custom toString to avoid walking a gazillion elements - case _: Range | _: NumericRange[_] => true - // Sorted collections to the wrong thing (for us) on iteration - ticket #3493 - case _: SortedOps[_, _] => true - // StringBuilder(a, b, c) and similar not so attractive - case _: StringView | _: StringOps | _: StringBuilder => true - // Don't want to evaluate any elements in a view - case _: View[_] => true - // Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData] - // -> catch those by isXmlNode and isXmlMetaData. - // Don't want to a) traverse infinity or b) be overly helpful with peoples' custom - // collections which may have useful toString methods - ticket #3710 - // or c) print AbstractFiles which are somehow also Iterable[AbstractFile]s. - case x: Iterable[_] => (!x.isInstanceOf[StrictOptimizedIterableOps[_, AnyConstr, _]]) || !isScalaClass(x) || isScalaCompilerClass(x) || isXmlNode(x.getClass) || isXmlMetaData(x.getClass) - // Otherwise, nothing could possibly go wrong - case _ => false - } - - // A variation on inner for maps so they print -> instead of bare tuples - def mapInner(arg: Any): String = arg match { - case (k, v) => inner(k) + " -> " + inner(v) - case _ => inner(arg) - } - - // Special casing Unit arrays, the value class which uses a reference array type. - def arrayToString(x: AnyRef) = { - if (x.getClass.getComponentType == classOf[BoxedUnit]) - (0 until min(array_length(x), maxElements)).map(_ => "()").mkString("Array(", ", ", ")") - else - x.asInstanceOf[Array[_]].iterator.take(maxElements).map(inner).mkString("Array(", ", ", ")") - } - - // The recursively applied attempt to prettify Array printing. - // Note that iterator is used if possible and foreach is used as a - // last resort, because the parallel collections "foreach" in a - // random order even on sequences. - def inner(arg: Any): String = arg match { - case null => "null" - case "" => "\"\"" - case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\"" + x + "\"" else x - case x if useOwnToString(x) => x.toString - case x: AnyRef if isArray(x) => arrayToString(x) - case x: scala.collection.Map[_, _] => x.iterator.take(maxElements).map(mapInner).mkString(x.collectionClassName + "(", ", ", ")") - case x: Iterable[_] => x.iterator.take(maxElements).map(inner).mkString(x.collectionClassName + "(", ", ", ")") - case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma - case x: Product if isTuple(x) => x.productIterator.map(inner).mkString("(", ",", ")") - case x => x.toString - } - - // The try/catch is defense against iterables which aren't actually designed - // to be iterated, such as some scala.tools.nsc.io.AbstractFile derived classes. - try inner(arg) - catch { - case _: UnsupportedOperationException | _: AssertionError => "" + arg - } - } - - /** stringOf formatted for use in a repl result. */ - def replStringOf(arg: Any, maxElements: Int): String = - stringOf(arg, maxElements) match { - case null => "null toString" - case s if s.indexOf('\n') >= 0 => "\n" + s + "\n" - case s => s + "\n" - } - - // Convert arrays to immutable.ArraySeq for use with Java varargs: - def genericWrapArray[T](xs: Array[T]): ArraySeq[T] = - if (xs eq null) null - else ArraySeq.unsafeWrapArray(xs) - def wrapRefArray[T <: AnyRef](xs: Array[T]): ArraySeq[T] = { - if (xs eq null) null - else if (xs.length == 0) ArraySeq.empty[AnyRef].asInstanceOf[ArraySeq[T]] - else new ArraySeq.ofRef[T](xs) - } - def wrapIntArray(xs: Array[Int]): ArraySeq[Int] = if (xs ne null) new ArraySeq.ofInt(xs) else null - def wrapDoubleArray(xs: Array[Double]): ArraySeq[Double] = if (xs ne null) new ArraySeq.ofDouble(xs) else null - def wrapLongArray(xs: Array[Long]): ArraySeq[Long] = if (xs ne null) new ArraySeq.ofLong(xs) else null - def wrapFloatArray(xs: Array[Float]): ArraySeq[Float] = if (xs ne null) new ArraySeq.ofFloat(xs) else null - def wrapCharArray(xs: Array[Char]): ArraySeq[Char] = if (xs ne null) new ArraySeq.ofChar(xs) else null - def wrapByteArray(xs: Array[Byte]): ArraySeq[Byte] = if (xs ne null) new ArraySeq.ofByte(xs) else null - def wrapShortArray(xs: Array[Short]): ArraySeq[Short] = if (xs ne null) new ArraySeq.ofShort(xs) else null - def wrapBooleanArray(xs: Array[Boolean]): ArraySeq[Boolean] = if (xs ne null) new ArraySeq.ofBoolean(xs) else null - def wrapUnitArray(xs: Array[Unit]): ArraySeq[Unit] = if (xs ne null) new ArraySeq.ofUnit(xs) else null -} From 92995904752f766f8d258b0b6ddd85d99f9dc6c7 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 5 Oct 2020 13:37:16 +0200 Subject: [PATCH 23/75] Update auxlib runtime.Statics to match 2.13 changes --- .../main/scala/scala/runtime/Statics.scala | 16 +++ .../scala/runtime/Statics.scala | 97 ------------------- 2 files changed, 16 insertions(+), 97 deletions(-) delete mode 100644 scalalib/overrides-2.13/scala/runtime/Statics.scala diff --git a/auxlib/src/main/scala/scala/runtime/Statics.scala b/auxlib/src/main/scala/scala/runtime/Statics.scala index 8e040cbd58..daea0e9ac3 100644 --- a/auxlib/src/main/scala/scala/runtime/Statics.scala +++ b/auxlib/src/main/scala/scala/runtime/Statics.scala @@ -73,4 +73,20 @@ object Statics { case x: java.lang.Float => floatHash(x.floatValue) case _ => x.hashCode } + + /** Used as a marker object to return from PartialFunctions */ + def pfMarker: AnyRef = PFMarker + + private object PFMarker extends AnyRef + + def releaseFence(): Unit = () + + /** Just throws an exception. + * + * Used by the synthetic `productElement` and `productElementName` methods + * in case classes. Delegating the exception-throwing to this function + * reduces the bytecode size of the case class. + */ + final def ioobe[T](n: Int): T = + throw new IndexOutOfBoundsException(String.valueOf(n)) } diff --git a/scalalib/overrides-2.13/scala/runtime/Statics.scala b/scalalib/overrides-2.13/scala/runtime/Statics.scala deleted file mode 100644 index 61286337de..0000000000 --- a/scalalib/overrides-2.13/scala/runtime/Statics.scala +++ /dev/null @@ -1,97 +0,0 @@ -package scala.runtime - -// Ported from Scala.js - -/** Not for public consumption. Usage by the runtime only. - */ - -object Statics { - def mix(hash: Int, data: Int): Int = { - var h = mixLast(hash, data) - h = Integer.rotateLeft(h, 13) - (h * 5) + 0xe6546b64 - } - - def mixLast(hash: Int, data: Int): Int = { - var k = data - k *= 0xcc9e2d51 - k = Integer.rotateLeft(k, 15) - k *= 0x1b873593 - hash ^ k - } - - def finalizeHash(hash: Int, length: Int): Int = { - avalanche(hash ^ length) - } - - /** Force all bits of the hash to avalanche. Used for finalizing the hash. */ - def avalanche(h0: Int): Int = { - var h = h0 - h ^= h >>> 16 - h *= 0x85ebca6b - h ^= h >>> 13 - h *= 0xc2b2ae35 - h ^= h >>> 16 - h - } - - def longHash(lv: Long): Int = { - val lo = lv.toInt - val hi = (lv >>> 32).toInt - if (hi == (lo >> 31)) lo // it is in the Int range - else lo ^ hi - } - - def doubleHash(dv: Double): Int = { - /* This implementation is based on what 2.12.0-M5+ does on the JVM. - * The 2.11 implementation on the JVM was not consistent with that of - * BoxesRunTime, and most importantly was not consistent with the hash of - * Long values. - * - * In Scala.js, we always use the version consistent with BoxesRunTime. - * Note that, for values that happen to be valid floats but not valid - * longs, this implementation is *not* consistent with the JVM (just like - * that of BoxesRunTime). - */ - val iv = dv.toInt - if (iv == dv) { - iv - } else { - // don't test the case dv.toFloat == dv - val lv = dv.toLong - if (lv == dv) - lv.hashCode() - else - dv.hashCode() - } - } - - def floatHash(fv: Float): Int = { - doubleHash(fv.toDouble) - } - - def anyHash(x: Any): Int = { - x match { - case null => 0 - case x: Double => doubleHash(x) - case x: Long => longHash(x) - case _ => x.hashCode() - } - } - - /** Used as a marker object to return from PartialFunctions */ - def pfMarker: AnyRef = PFMarker - - private object PFMarker extends AnyRef - - def releaseFence(): Unit = () - - /** Just throws an exception. - * - * Used by the synthetic `productElement` and `productElementName` methods - * in case classes. Delegating the exception-throwing to this function - * reduces the bytecode size of the case class. - */ - final def ioobe[T](n: Int): T = - throw new IndexOutOfBoundsException(String.valueOf(n)) -} From 8231c2a50a8dddbd19be609027aef9de387fd891 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 5 Oct 2020 13:37:53 +0200 Subject: [PATCH 24/75] build.sbt cleanup and fixes --- build.sbt | 38 +++++++++++++++++++++++++------------- 1 file changed, 25 insertions(+), 13 deletions(-) diff --git a/build.sbt b/build.sbt index b4cd02f51a..7f76f2c6c3 100644 --- a/build.sbt +++ b/build.sbt @@ -110,6 +110,18 @@ addCommandAlias( lazy val publishSnapshot = taskKey[Unit]("Publish snapshot to sonatype on every commit to master.") +val collectionsCompatLib = { + "org.scala-lang.modules" %% "scala-collection-compat" % "2.2.0" +} + +def parallelCollectionsLib(scalaVersion: String): Seq[ModuleID] = { + CrossVersion.partialVersion(scalaVersion) match { + case Some((2, n)) if n >= 13 => + Seq("org.scala-lang.modules" %% "scala-parallel-collections" % "0.2.0") + case _ => Nil + } +} + // to publish plugin (we only need to do this once, it's already done!) // follow: https://www.scala-sbt.org/1.x/docs/Bintray-For-Plugins.html // then add a new package @@ -211,12 +223,6 @@ lazy val toolSettings: Seq[Setting[_]] = javacOptions ++= Seq("-encoding", "utf8") ) -lazy val crossCompileCompatSettings = Def.settings( - libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % "2.2.0" - ) ++ parallelCollectionsDependencies(scalaVersion.value) -) - lazy val buildInfoSettings: Seq[Setting[_]] = Def.settings( buildInfoPackage := "scala.scalanative.buildinfo", @@ -234,7 +240,9 @@ lazy val util = .in(file("util")) .settings(toolSettings) .settings(mavenPublishSettings) - .settings(crossCompileCompatSettings) + .settings( + libraryDependencies += collectionsCompatLib + ) lazy val nir = project @@ -252,6 +260,7 @@ lazy val nirparser = .settings(toolSettings) .settings(noPublishSettings) .settings( + crossScalaVersions := Seq(sbt10ScalaVersion), libraryDependencies ++= Seq( "com.lihaoyi" %% "fastparse" % "2.3.0", "com.lihaoyi" %% "scalaparse" % "2.3.0", @@ -271,8 +280,9 @@ lazy val tools = .settings( libraryDependencies ++= Seq( scalacheckDep, - scalatestDep - ), + scalatestDep, + collectionsCompatLib + ) ++ parallelCollectionsLib(scalaVersion.value), Test / fork := true, Test / javaOptions ++= { val nscpluginjar = (nscplugin / Compile / Keys.`package`).value @@ -292,7 +302,6 @@ lazy val tools = Test / parallelExecution := false, mimaSettings ) - .settings(crossCompileCompatSettings) .dependsOn(nir, util, testingCompilerInterface % Test) lazy val nscplugin = @@ -308,11 +317,11 @@ lazy val nscplugin = ), libraryDependencies ++= Seq( "org.scala-lang" % "scala-compiler" % scalaVersion.value, - "org.scala-lang" % "scala-reflect" % scalaVersion.value + "org.scala-lang" % "scala-reflect" % scalaVersion.value, + collectionsCompatLib ), exportJars := true ) - .settings(crossCompileCompatSettings) .settings(scalacOptions += "-Xno-patmat-analysis") lazy val sbtPluginSettings: Seq[Setting[_]] = @@ -397,8 +406,10 @@ lazy val javalib = .in(file("javalib")) .enablePlugins(MyScalaNativePlugin) .settings(mavenPublishSettings) - .settings(crossCompileCompatSettings) .settings( + libraryDependencies ++= Seq( + collectionsCompatLib + ) ++ parallelCollectionsLib(scalaVersion.value), Compile / doc / sources := Nil, // doc generation currently broken // This is required to have incremental compilation to work in javalib. // We put our classes on scalac's `javabootclasspath` so that it uses them @@ -787,6 +798,7 @@ lazy val junitAsyncJVM = project .in(file("junit-async/jvm")) .settings( + scalaVersion := sbt10ScalaVersion, crossScalaVersions := Seq(sbt10ScalaVersion), nameSettings, publishArtifact := false From b3e90b281711e6771587a0b21d6cd176efe6e805 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 5 Oct 2020 14:30:19 +0200 Subject: [PATCH 25/75] After rebase fixes --- build.sbt | 2 +- javalib/src/main/scala/java/lang/System.scala | 2 +- .../nir/serialization/BinaryDeserializer.scala | 14 ++++++++++---- .../scala/scalanative/nscplugin/NirGenExpr.scala | 2 +- 4 files changed, 13 insertions(+), 7 deletions(-) diff --git a/build.sbt b/build.sbt index 7f76f2c6c3..8fcac04436 100644 --- a/build.sbt +++ b/build.sbt @@ -49,7 +49,7 @@ inThisBuild( Def.settings( organization := "org.scala-native", // Maven version := nativeVersion, // Maven - scalaVersion := libScalaVersion, + scalaVersion := scala213, crossScalaVersions := libCrossScalaVersions, scalacOptions ++= Seq( "-deprecation", diff --git a/javalib/src/main/scala/java/lang/System.scala b/javalib/src/main/scala/java/lang/System.scala index b6f2fb0f69..c823670c7b 100644 --- a/javalib/src/main/scala/java/lang/System.scala +++ b/javalib/src/main/scala/java/lang/System.scala @@ -91,7 +91,7 @@ object System { var err: PrintStream = new PrintStream(new FileOutputStream(FileDescriptor.err)) - private val systemProperties = loadProperties() + private lazy val systemProperties = loadProperties() Platform.setOSProps(new CFuncPtr2[CString, CString, Unit] { def apply(key: CString, value: CString): Unit = systemProperties.setProperty(fromCString(key), fromCString(value)) diff --git a/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala b/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala index 6f46248167..b9eb7621be 100644 --- a/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala +++ b/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala @@ -6,6 +6,7 @@ import java.net.URI import java.nio.ByteBuffer import java.nio.charset.StandardCharsets import scala.collection.mutable +import scala.collection.compat.immutable.ArraySeq import scala.scalanative.nir.serialization.{Tags => T} import scala.scalanative.util.StringUtils @@ -186,7 +187,7 @@ final class BinaryDeserializer(buffer: ByteBuffer) { case T.TopGlobal => Global.Top(getUTF8String()) case T.MemberGlobal => - Global.Member(Global.Top(getUTF8String()), getSig) + Global.Member(Global.Top(getUTF8String()), getSig()) } private def getSig(): Sig = @@ -281,9 +282,14 @@ final class BinaryDeserializer(buffer: ByteBuffer) { case T.DoubleVal => Val.Double(getDouble) case T.StructValueVal => Val.StructValue(getVals()) case T.ArrayValueVal => Val.ArrayValue(getType(), getVals()) - case T.CharsVal => Val.Chars(getBytes()) - case T.LocalVal => Val.Local(getLocal(), getType) - case T.GlobalVal => Val.Global(getGlobal(), getType()) + case T.CharsVal => + Val.Chars { + ArraySeq.unsafeWrapArray { + getBytes() + } + } + case T.LocalVal => Val.Local(getLocal(), getType()) + case T.GlobalVal => Val.Global(getGlobal(), getType()) case T.UnitVal => Val.Unit case T.ConstVal => Val.Const(getVal()) diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala index a852a77344..5076ee1e5d 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala @@ -826,7 +826,7 @@ trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => // and hence `samBridges` will always be empty (scala/bug#10512). // Since we only support Scala 2.12.12 and up, // we assert that this is not the case. - assert(synthCls != NoSymbol) + assert(synthCls != NoSymbol, "Unexpected NoSymbol") val samBridges = { import scala.reflect.internal.Flags.BRIDGE synthCls.info.findMembers(excludedFlags = 0L, requiredFlags = BRIDGE).toList From 15df0d1f6e88e6d344920ed40ced52d74ef6f86f Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 5 Oct 2020 15:02:30 +0200 Subject: [PATCH 26/75] Port Hashtable from Scala.js, due to to cyclic dependency of 2.13 HashMap on System.Properties --- javalib/src/main/scala/java/lang/System.scala | 2 +- javalib/src/main/scala/java/util/Hashtable.scala | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/javalib/src/main/scala/java/lang/System.scala b/javalib/src/main/scala/java/lang/System.scala index c823670c7b..b6f2fb0f69 100644 --- a/javalib/src/main/scala/java/lang/System.scala +++ b/javalib/src/main/scala/java/lang/System.scala @@ -91,7 +91,7 @@ object System { var err: PrintStream = new PrintStream(new FileOutputStream(FileDescriptor.err)) - private lazy val systemProperties = loadProperties() + private val systemProperties = loadProperties() Platform.setOSProps(new CFuncPtr2[CString, CString, Unit] { def apply(key: CString, value: CString): Unit = systemProperties.setProperty(fromCString(key), fromCString(value)) diff --git a/javalib/src/main/scala/java/util/Hashtable.scala b/javalib/src/main/scala/java/util/Hashtable.scala index 46342981bb..0e12abd73c 100644 --- a/javalib/src/main/scala/java/util/Hashtable.scala +++ b/javalib/src/main/scala/java/util/Hashtable.scala @@ -24,12 +24,13 @@ class Hashtable[K, V] private (inner: mutable.HashMap[Box[Any], V]) } def size(): Int = - inner.size + inner.size() def isEmpty(): Boolean = - inner.isEmpty + inner.isEmpty() - def keys(): ju.Enumeration[K] = Collections.enumeration(keySet()) + def keys(): ju.Enumeration[K] = + Collections.enumeration(keySet()) def elements(): ju.Enumeration[V] = Collections.enumeration(values()) From 262ac361294b5fd52b45af8a9869575bec29fd17 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 5 Oct 2020 15:03:43 +0200 Subject: [PATCH 27/75] Add 2.13.3 to travis CI matrix [ci skip] --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index aef40ba2fe..3e380480dc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,6 +3,7 @@ sudo: required scala: - "2.11.12" - "2.12.12" + - "2.13.3" os: linux dist: trusty From 12488aed612f2b203e3fa48f347237a2fe3633ad Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 5 Oct 2020 16:23:18 +0200 Subject: [PATCH 28/75] Add missing changes: Revert fastparse version to 1.0.0, Lazy init System.systemProperties --- build.sbt | 4 ++-- javalib/src/main/scala/java/lang/System.scala | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/build.sbt b/build.sbt index 8fcac04436..82345fcd89 100644 --- a/build.sbt +++ b/build.sbt @@ -262,8 +262,8 @@ lazy val nirparser = .settings( crossScalaVersions := Seq(sbt10ScalaVersion), libraryDependencies ++= Seq( - "com.lihaoyi" %% "fastparse" % "2.3.0", - "com.lihaoyi" %% "scalaparse" % "2.3.0", + "com.lihaoyi" %% "fastparse" % "1.0.0", + "com.lihaoyi" %% "scalaparse" % "1.0.0", scalacheckDep, scalatestDep ) diff --git a/javalib/src/main/scala/java/lang/System.scala b/javalib/src/main/scala/java/lang/System.scala index b6f2fb0f69..c823670c7b 100644 --- a/javalib/src/main/scala/java/lang/System.scala +++ b/javalib/src/main/scala/java/lang/System.scala @@ -91,7 +91,7 @@ object System { var err: PrintStream = new PrintStream(new FileOutputStream(FileDescriptor.err)) - private val systemProperties = loadProperties() + private lazy val systemProperties = loadProperties() Platform.setOSProps(new CFuncPtr2[CString, CString, Unit] { def apply(key: CString, value: CString): Unit = systemProperties.setProperty(fromCString(key), fromCString(value)) From 7480f62fc9538f31329f55bbb9ba1cd06c5f535f Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 5 Oct 2020 16:55:00 +0200 Subject: [PATCH 29/75] Allow cross-testing junit jvm --- build.sbt | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/build.sbt b/build.sbt index 82345fcd89..867bb55a93 100644 --- a/build.sbt +++ b/build.sbt @@ -777,9 +777,9 @@ lazy val junitTestOutputsJVM = .in(file("junit-test/output-jvm")) .settings( commonJUnitTestOutputsSettings, - crossScalaVersions := Seq(sbt10ScalaVersion), libraryDependencies ++= Seq( - "com.novocode" % "junit-interface" % "0.11" % "test" + "com.novocode" % "junit-interface" % "0.11" % "test", + collectionsCompatLib ) ) .dependsOn(junitAsyncJVM % "test") @@ -798,8 +798,6 @@ lazy val junitAsyncJVM = project .in(file("junit-async/jvm")) .settings( - scalaVersion := sbt10ScalaVersion, - crossScalaVersions := Seq(sbt10ScalaVersion), nameSettings, publishArtifact := false ) From 7d3dbaa59f68b6d9e17ae4a8b3045214d4fd6105 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Tue, 6 Oct 2020 14:34:28 +0200 Subject: [PATCH 30/75] Remove ported scala-collections-compat from scalalib overides. Adapt javalib to use ScalaOps Ported Collections.shuffleImpl, addAll and sort from as well as AbstractMap.toString from Scala.js Replaced implicit range constructors for Ints with explicit Range due to compiler errors --- .../main/scala/java/lang/UnixProcess.scala | 16 +- .../scala/java/util/AbstractCollection.scala | 2 +- .../scala/collection/compat/BuildFrom.scala | 53 - .../scala/collection/compat/CompatImpl.scala | 74 - .../collection/compat/PackageShared.scala | 467 ----- .../compat/immutable/ArraySeq.scala | 267 --- .../compat/immutable/LazyList.scala | 1537 ----------------- .../scala/collection/compat/package.scala | 66 - .../scala/collection/compat/BuildFrom.scala | 53 - .../scala/collection/compat/CompatImpl.scala | 74 - .../collection/compat/PackageShared.scala | 467 ----- .../compat/immutable/ArraySeq.scala | 267 --- .../compat/immutable/LazyList.scala | 1537 ----------------- .../scala/collection/compat/package.scala | 74 - .../collection/compat/immutable/package.scala | 21 - .../scala/collection/compat/package.scala | 24 - 16 files changed, 10 insertions(+), 4989 deletions(-) delete mode 100644 scalalib/overrides-2.11/scala/collection/compat/BuildFrom.scala delete mode 100644 scalalib/overrides-2.11/scala/collection/compat/CompatImpl.scala delete mode 100644 scalalib/overrides-2.11/scala/collection/compat/PackageShared.scala delete mode 100644 scalalib/overrides-2.11/scala/collection/compat/immutable/ArraySeq.scala delete mode 100644 scalalib/overrides-2.11/scala/collection/compat/immutable/LazyList.scala delete mode 100644 scalalib/overrides-2.11/scala/collection/compat/package.scala delete mode 100644 scalalib/overrides-2.12/scala/collection/compat/BuildFrom.scala delete mode 100644 scalalib/overrides-2.12/scala/collection/compat/CompatImpl.scala delete mode 100644 scalalib/overrides-2.12/scala/collection/compat/PackageShared.scala delete mode 100644 scalalib/overrides-2.12/scala/collection/compat/immutable/ArraySeq.scala delete mode 100644 scalalib/overrides-2.12/scala/collection/compat/immutable/LazyList.scala delete mode 100644 scalalib/overrides-2.12/scala/collection/compat/package.scala delete mode 100644 scalalib/overrides-2.13/scala/collection/compat/immutable/package.scala delete mode 100644 scalalib/overrides-2.13/scala/collection/compat/package.scala diff --git a/javalib/src/main/scala/java/lang/UnixProcess.scala b/javalib/src/main/scala/java/lang/UnixProcess.scala index 21c88a36fd..6d3578860e 100644 --- a/javalib/src/main/scala/java/lang/UnixProcess.scala +++ b/javalib/src/main/scala/java/lang/UnixProcess.scala @@ -146,6 +146,7 @@ object UnixProcess { .scalaOps .toSeq .map(e => s"${e.getKey()}=${e.getValue()}") + } } /* @@ -268,13 +269,14 @@ object UnixProcess { case null => "/bin:/usr/bin:/usr/local/bin" case p => p } - ArraySeq - .unsafeWrapArray(path.split(":")) - .map { absPath => - new File(s"$absPath/$bin") - } collect { - case f if f.canExecute() => f.toString - } + + path + .split(':') + .toIndexedSeq + .map { absPath => new File(s"$absPath/$bin") } + .collect { + case f if f.canExecute() => f.toString + } } } } diff --git a/javalib/src/main/scala/java/util/AbstractCollection.scala b/javalib/src/main/scala/java/util/AbstractCollection.scala index abbb852cde..f0978bb5fd 100644 --- a/javalib/src/main/scala/java/util/AbstractCollection.scala +++ b/javalib/src/main/scala/java/util/AbstractCollection.scala @@ -25,7 +25,7 @@ abstract class AbstractCollection[E] protected () extends Collection[E] { .asInstanceOf[Array[T]] val iter = iterator() - for (i <- 0 until size()) toFill(i) = iter.next().asInstanceOf[T] + for (i <- Range(0, size())) toFill(i) = iter.next().asInstanceOf[T] if (toFill.length > size()) toFill(size()) = null.asInstanceOf[T] toFill diff --git a/scalalib/overrides-2.11/scala/collection/compat/BuildFrom.scala b/scalalib/overrides-2.11/scala/collection/compat/BuildFrom.scala deleted file mode 100644 index 8cdd8756c3..0000000000 --- a/scalalib/overrides-2.11/scala/collection/compat/BuildFrom.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.compat - -import scala.collection.generic.CanBuildFrom -import scala.collection.mutable -import scala.language.higherKinds -import scala.language.implicitConversions - -/** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available. - * Implicit instances of `BuildFrom` are available for all collection types. - * - * @tparam From Type of source collection - * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) - * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) - */ -trait BuildFrom[-From, -A, +C] extends Any { - def fromSpecific(from: From)(it: IterableOnce[A]): C - - /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. - * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ - def newBuilder(from: From): mutable.Builder[A, C] - - @deprecated("Use newBuilder() instead of apply()", "2.13.0") - @`inline` def apply(from: From): mutable.Builder[A, C] = newBuilder(from) -} - -object BuildFrom { - - // Implicit instance derived from an implicit CanBuildFrom instance - implicit def fromCanBuildFrom[From, A, C]( - implicit cbf: CanBuildFrom[From, A, C]): BuildFrom[From, A, C] = - new BuildFrom[From, A, C] { - def fromSpecific(from: From)(it: IterableOnce[A]): C = (cbf(from) ++= it).result() - def newBuilder(from: From): mutable.Builder[A, C] = cbf(from) - } - - // Implicit conversion derived from an implicit conversion to CanBuildFrom - implicit def fromCanBuildFromConversion[X, From, A, C](x: X)( - implicit toCanBuildFrom: X => CanBuildFrom[From, A, C]): BuildFrom[From, A, C] = - fromCanBuildFrom(toCanBuildFrom(x)) - -} \ No newline at end of file diff --git a/scalalib/overrides-2.11/scala/collection/compat/CompatImpl.scala b/scalalib/overrides-2.11/scala/collection/compat/CompatImpl.scala deleted file mode 100644 index 4ff1f5515e..0000000000 --- a/scalalib/overrides-2.11/scala/collection/compat/CompatImpl.scala +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.compat - -import scala.reflect.ClassTag -import scala.collection.generic.CanBuildFrom -import scala.collection.{immutable => i, mutable => m} -import scala.language.higherKinds - -/* builder optimized for a single ++= call, which returns identity on result if possible - * and defers to the underlying builder if not. - */ -private final class IdentityPreservingBuilder[A, CC[X] <: TraversableOnce[X]]( - that: m.Builder[A, CC[A]])(implicit ct: ClassTag[CC[A]]) - extends m.Builder[A, CC[A]] { - - //invariant: ruined => (collection == null) - var collection: CC[A] = null.asInstanceOf[CC[A]] - var ruined = false - - private[this] def ruin(): Unit = { - if (collection != null) that ++= collection - collection = null.asInstanceOf[CC[A]] - ruined = true - } - - override def ++=(elems: TraversableOnce[A]): this.type = - elems match { - case ct(ca) if collection == null && !ruined => { - collection = ca - this - } - case _ => { - ruin() - that ++= elems - this - } - } - - def +=(elem: A): this.type = { - ruin() - that += elem - this - } - - def clear(): Unit = { - collection = null.asInstanceOf[CC[A]] - if (ruined) that.clear() - ruined = false - } - - def result(): CC[A] = if (collection == null) that.result() else collection -} - -private[compat] object CompatImpl { - def simpleCBF[A, C](f: => m.Builder[A, C]): CanBuildFrom[Any, A, C] = - new CanBuildFrom[Any, A, C] { - def apply(from: Any): m.Builder[A, C] = apply() - def apply(): m.Builder[A, C] = f - } - - type ImmutableBitSetCC[X] = ({ type L[_] = i.BitSet })#L[X] - type MutableBitSetCC[X] = ({ type L[_] = m.BitSet })#L[X] -} \ No newline at end of file diff --git a/scalalib/overrides-2.11/scala/collection/compat/PackageShared.scala b/scalalib/overrides-2.11/scala/collection/compat/PackageShared.scala deleted file mode 100644 index b7ab3f8ac7..0000000000 --- a/scalalib/overrides-2.11/scala/collection/compat/PackageShared.scala +++ /dev/null @@ -1,467 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.compat - -import scala.collection.generic._ -import scala.reflect.ClassTag -import scala.collection.{ - BitSet, - GenTraversable, - IterableLike, - IterableView, - MapLike, - TraversableLike, - immutable => i, - mutable => m -} -import scala.runtime.{Tuple2Zipped, Tuple3Zipped} -import scala.{collection => c} -import scala.language.higherKinds -import scala.language.implicitConversions - -/** The collection compatibility API */ -private[compat] trait PackageShared { - import CompatImpl._ - - /** - * A factory that builds a collection of type `C` with elements of type `A`. - * - * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) - * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) - */ - type Factory[-A, +C] = CanBuildFrom[Nothing, A, C] - - implicit class FactoryOps[-A, +C](private val factory: Factory[A, C]) { - - /** - * @return A collection of type `C` containing the same elements - * as the source collection `it`. - * @param it Source collection - */ - def fromSpecific(it: TraversableOnce[A]): C = (factory() ++= it).result() - - /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. - * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ - def newBuilder: m.Builder[A, C] = factory() - } - - implicit def genericCompanionToCBF[A, CC[X] <: GenTraversable[X]]( - fact: GenericCompanion[CC]): CanBuildFrom[Any, A, CC[A]] = { - /* see https://github.com/scala/scala-collection-compat/issues/337 - `simpleCBF.apply` takes a by-name parameter and relies on - repeated references generating new builders, thus this expression - must be non-strict - */ - def builder: m.Builder[A, CC[A]] = fact match { - case c.Seq | i.Seq => new IdentityPreservingBuilder[A, i.Seq](i.Seq.newBuilder[A]) - case c.LinearSeq | i.LinearSeq => - new IdentityPreservingBuilder[A, i.LinearSeq](i.LinearSeq.newBuilder[A]) - case _ => fact.newBuilder[A] - } - simpleCBF(builder) - } - - implicit def sortedSetCompanionToCBF[A: Ordering, - CC[X] <: c.SortedSet[X] with c.SortedSetLike[X, CC[X]]]( - fact: SortedSetFactory[CC]): CanBuildFrom[Any, A, CC[A]] = - simpleCBF(fact.newBuilder[A]) - - implicit def arrayCompanionToCBF[A: ClassTag](fact: Array.type): CanBuildFrom[Any, A, Array[A]] = - simpleCBF(Array.newBuilder[A]) - - implicit def mapFactoryToCBF[K, V, CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]]( - fact: MapFactory[CC]): CanBuildFrom[Any, (K, V), CC[K, V]] = - simpleCBF(fact.newBuilder[K, V]) - - implicit def sortedMapFactoryToCBF[ - K: Ordering, - V, - CC[A, B] <: c.SortedMap[A, B] with c.SortedMapLike[A, B, CC[A, B]]]( - fact: SortedMapFactory[CC]): CanBuildFrom[Any, (K, V), CC[K, V]] = - simpleCBF(fact.newBuilder[K, V]) - - implicit def bitSetFactoryToCBF(fact: BitSetFactory[BitSet]): CanBuildFrom[Any, Int, BitSet] = - simpleCBF(fact.newBuilder) - - implicit def immutableBitSetFactoryToCBF( - fact: BitSetFactory[i.BitSet]): CanBuildFrom[Any, Int, ImmutableBitSetCC[Int]] = - simpleCBF(fact.newBuilder) - - implicit def mutableBitSetFactoryToCBF( - fact: BitSetFactory[m.BitSet]): CanBuildFrom[Any, Int, MutableBitSetCC[Int]] = - simpleCBF(fact.newBuilder) - - implicit class IterableFactoryExtensionMethods[CC[X] <: GenTraversable[X]]( - private val fact: GenericCompanion[CC]) { - def from[A](source: TraversableOnce[A]): CC[A] = - fact.apply(source.toSeq: _*) - } - - implicit class MapFactoryExtensionMethods[CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]]( - private val fact: MapFactory[CC]) { - def from[K, V](source: TraversableOnce[(K, V)]): CC[K, V] = - fact.apply(source.toSeq: _*) - } - - implicit class BitSetFactoryExtensionMethods[ - C <: scala.collection.BitSet with scala.collection.BitSetLike[C]]( - private val fact: BitSetFactory[C]) { - def fromSpecific(source: TraversableOnce[Int]): C = - fact.apply(source.toSeq: _*) - } - - private[compat] def build[T, CC](builder: m.Builder[T, CC], source: TraversableOnce[T]): CC = { - builder ++= source - builder.result() - } - - implicit def toImmutableSortedMapExtensions( - fact: i.SortedMap.type): ImmutableSortedMapExtensions = - new ImmutableSortedMapExtensions(fact) - - implicit def toImmutableListMapExtensions(fact: i.ListMap.type): ImmutableListMapExtensions = - new ImmutableListMapExtensions(fact) - - implicit def toImmutableHashMapExtensions(fact: i.HashMap.type): ImmutableHashMapExtensions = - new ImmutableHashMapExtensions(fact) - - implicit def toImmutableTreeMapExtensions(fact: i.TreeMap.type): ImmutableTreeMapExtensions = - new ImmutableTreeMapExtensions(fact) - - implicit def toImmutableIntMapExtensions(fact: i.IntMap.type): ImmutableIntMapExtensions = - new ImmutableIntMapExtensions(fact) - - implicit def toImmutableLongMapExtensions(fact: i.LongMap.type): ImmutableLongMapExtensions = - new ImmutableLongMapExtensions(fact) - - implicit def toMutableLongMapExtensions(fact: m.LongMap.type): MutableLongMapExtensions = - new MutableLongMapExtensions(fact) - - implicit def toMutableHashMapExtensions(fact: m.HashMap.type): MutableHashMapExtensions = - new MutableHashMapExtensions(fact) - - implicit def toMutableListMapExtensions(fact: m.ListMap.type): MutableListMapExtensions = - new MutableListMapExtensions(fact) - - implicit def toMutableMapExtensions(fact: m.Map.type): MutableMapExtensions = - new MutableMapExtensions(fact) - - implicit def toStreamExtensionMethods[A](stream: Stream[A]): StreamExtensionMethods[A] = - new StreamExtensionMethods[A](stream) - - implicit def toSortedExtensionMethods[K, V <: Sorted[K, V]]( - fact: Sorted[K, V]): SortedExtensionMethods[K, V] = - new SortedExtensionMethods[K, V](fact) - - implicit def toIteratorExtensionMethods[A](self: Iterator[A]): IteratorExtensionMethods[A] = - new IteratorExtensionMethods[A](self) - - implicit def toTraversableExtensionMethods[A]( - self: Traversable[A]): TraversableExtensionMethods[A] = - new TraversableExtensionMethods[A](self) - - implicit def toTraversableOnceExtensionMethods[A]( - self: TraversableOnce[A]): TraversableOnceExtensionMethods[A] = - new TraversableOnceExtensionMethods[A](self) - - // This really belongs into scala.collection but there's already a package object - // in scala-library so we can't add to it - type IterableOnce[+X] = c.TraversableOnce[X] - val IterableOnce = c.TraversableOnce - - implicit def toMapExtensionMethods[K, V]( - self: scala.collection.Map[K, V]): MapExtensionMethods[K, V] = - new MapExtensionMethods[K, V](self) - - implicit def toMapViewExtensionMethods[K, V, C <: scala.collection.Map[K, V]]( - self: IterableView[(K, V), C]): MapViewExtensionMethods[K, V, C] = - new MapViewExtensionMethods[K, V, C](self) -} - -class ImmutableSortedMapExtensions(private val fact: i.SortedMap.type) extends AnyVal { - def from[K: Ordering, V](source: TraversableOnce[(K, V)]): i.SortedMap[K, V] = - build(i.SortedMap.newBuilder[K, V], source) -} - -class ImmutableListMapExtensions(private val fact: i.ListMap.type) extends AnyVal { - def from[K, V](source: TraversableOnce[(K, V)]): i.ListMap[K, V] = - build(i.ListMap.newBuilder[K, V], source) -} - -class ImmutableHashMapExtensions(private val fact: i.HashMap.type) extends AnyVal { - def from[K, V](source: TraversableOnce[(K, V)]): i.HashMap[K, V] = - build(i.HashMap.newBuilder[K, V], source) -} - -class ImmutableTreeMapExtensions(private val fact: i.TreeMap.type) extends AnyVal { - def from[K: Ordering, V](source: TraversableOnce[(K, V)]): i.TreeMap[K, V] = - build(i.TreeMap.newBuilder[K, V], source) -} - -class ImmutableIntMapExtensions(private val fact: i.IntMap.type) extends AnyVal { - def from[V](source: TraversableOnce[(Int, V)]): i.IntMap[V] = - build(i.IntMap.canBuildFrom[Int, V](), source) -} - -class ImmutableLongMapExtensions(private val fact: i.LongMap.type) extends AnyVal { - def from[V](source: TraversableOnce[(Long, V)]): i.LongMap[V] = - build(i.LongMap.canBuildFrom[Long, V](), source) -} - -class MutableLongMapExtensions(private val fact: m.LongMap.type) extends AnyVal { - def from[V](source: TraversableOnce[(Long, V)]): m.LongMap[V] = - build(m.LongMap.canBuildFrom[Long, V](), source) -} - -class MutableHashMapExtensions(private val fact: m.HashMap.type) extends AnyVal { - def from[K, V](source: TraversableOnce[(K, V)]): m.HashMap[K, V] = - build(m.HashMap.canBuildFrom[K, V](), source) -} - -class MutableListMapExtensions(private val fact: m.ListMap.type) extends AnyVal { - def from[K, V](source: TraversableOnce[(K, V)]): m.ListMap[K, V] = - build(m.ListMap.canBuildFrom[K, V](), source) -} - -class MutableMapExtensions(private val fact: m.Map.type) extends AnyVal { - def from[K, V](source: TraversableOnce[(K, V)]): m.Map[K, V] = - build(m.Map.canBuildFrom[K, V](), source) -} - -class StreamExtensionMethods[A](private val stream: Stream[A]) extends AnyVal { - def lazyAppendedAll(as: => TraversableOnce[A]): Stream[A] = stream.append(as) -} - -class SortedExtensionMethods[K, T <: Sorted[K, T]](private val fact: Sorted[K, T]) { - def rangeFrom(from: K): T = fact.from(from) - def rangeTo(to: K): T = fact.to(to) - def rangeUntil(until: K): T = fact.until(until) -} - -class IteratorExtensionMethods[A](private val self: c.Iterator[A]) extends AnyVal { - def sameElements[B >: A](that: c.TraversableOnce[B]): Boolean = { - self.sameElements(that.iterator) - } - def concat[B >: A](that: c.TraversableOnce[B]): c.TraversableOnce[B] = self ++ that - def tapEach[U](f: A => U): c.Iterator[A] = self.map(a => { f(a); a }) -} - -class TraversableOnceExtensionMethods[A](private val self: c.TraversableOnce[A]) extends AnyVal { - def iterator: Iterator[A] = self.toIterator - - def minOption[B >: A](implicit ord: Ordering[B]): Option[A] = { - if (self.isEmpty) - None - else - Some(self.min(ord)) - } - - def maxOption[B >: A](implicit ord: Ordering[B]): Option[A] = { - if (self.isEmpty) - None - else - Some(self.max(ord)) - } - - def minByOption[B](f: A => B)(implicit cmp: Ordering[B]): Option[A] = { - if (self.isEmpty) - None - else - Some(self.minBy(f)(cmp)) - } - - def maxByOption[B](f: A => B)(implicit cmp: Ordering[B]): Option[A] = { - if (self.isEmpty) - None - else - Some(self.maxBy(f)(cmp)) - } -} - -class TraversableExtensionMethods[A](private val self: c.Traversable[A]) extends AnyVal { - def iterableFactory: GenericCompanion[Traversable] = self.companion - - def sizeCompare(otherSize: Int): Int = SizeCompareImpl.sizeCompareInt(self)(otherSize) - def sizeIs: SizeCompareOps = new SizeCompareOps(self) - def sizeCompare(that: c.Traversable[_]): Int = SizeCompareImpl.sizeCompareColl(self)(that) - -} - -class SeqExtensionMethods[A](private val self: c.Seq[A]) extends AnyVal { - def lengthIs: SizeCompareOps = new SizeCompareOps(self) -} - -class SizeCompareOps private[compat] (private val it: c.Traversable[_]) extends AnyVal { - import SizeCompareImpl._ - - /** Tests if the size of the collection is less than some value. */ - @inline def <(size: Int): Boolean = sizeCompareInt(it)(size) < 0 - - /** Tests if the size of the collection is less than or equal to some value. */ - @inline def <=(size: Int): Boolean = sizeCompareInt(it)(size) <= 0 - - /** Tests if the size of the collection is equal to some value. */ - @inline def ==(size: Int): Boolean = sizeCompareInt(it)(size) == 0 - - /** Tests if the size of the collection is not equal to some value. */ - @inline def !=(size: Int): Boolean = sizeCompareInt(it)(size) != 0 - - /** Tests if the size of the collection is greater than or equal to some value. */ - @inline def >=(size: Int): Boolean = sizeCompareInt(it)(size) >= 0 - - /** Tests if the size of the collection is greater than some value. */ - @inline def >(size: Int): Boolean = sizeCompareInt(it)(size) > 0 -} - -private object SizeCompareImpl { - def sizeCompareInt(self: c.Traversable[_])(otherSize: Int): Int = - self match { - case self: c.SeqLike[_, _] => self.lengthCompare(otherSize) - case _ => - if (otherSize < 0) 1 - else { - var i = 0 - val it = self.toIterator - while (it.hasNext) { - if (i == otherSize) return 1 - it.next() - i += 1 - } - i - otherSize - } - } - - // `IndexedSeq` is the only thing that we can safely say has a known size - def sizeCompareColl(self: c.Traversable[_])(that: c.Traversable[_]): Int = - that match { - case that: c.IndexedSeq[_] => sizeCompareInt(self)(that.length) - case _ => - self match { - case self: c.IndexedSeq[_] => - val res = sizeCompareInt(that)(self.length) - // can't just invert the result, because `-Int.MinValue == Int.MinValue` - if (res == Int.MinValue) 1 else -res - case _ => - val thisIt = self.toIterator - val thatIt = that.toIterator - while (thisIt.hasNext && thatIt.hasNext) { - thisIt.next() - thatIt.next() - } - java.lang.Boolean.compare(thisIt.hasNext, thatIt.hasNext) - } - } -} - -class TraversableLikeExtensionMethods[A, Repr](private val self: c.GenTraversableLike[A, Repr]) - extends AnyVal { - def tapEach[U](f: A => U)(implicit bf: CanBuildFrom[Repr, A, Repr]): Repr = - self.map(a => { f(a); a }) - - def partitionMap[A1, A2, That, Repr1, Repr2](f: A => Either[A1, A2])( - implicit bf1: CanBuildFrom[Repr, A1, Repr1], - bf2: CanBuildFrom[Repr, A2, Repr2] - ): (Repr1, Repr2) = { - val l = bf1() - val r = bf2() - self.foreach { x => - f(x) match { - case Left(x1) => l += x1 - case Right(x2) => r += x2 - } - } - (l.result(), r.result()) - } - - def groupMap[K, B, That](key: A => K)(f: A => B)( - implicit bf: CanBuildFrom[Repr, B, That]): Map[K, That] = { - val map = m.Map.empty[K, m.Builder[B, That]] - for (elem <- self) { - val k = key(elem) - val bldr = map.getOrElseUpdate(k, bf(self.repr)) - bldr += f(elem) - } - val res = Map.newBuilder[K, That] - for ((k, bldr) <- map) res += ((k, bldr.result())) - res.result() - } - - def groupMapReduce[K, B](key: A => K)(f: A => B)(reduce: (B, B) => B): Map[K, B] = { - val map = m.Map.empty[K, B] - for (elem <- self) { - val k = key(elem) - val v = map.get(k) match { - case Some(b) => reduce(b, f(elem)) - case None => f(elem) - } - map.put(k, v) - } - map.toMap - } -} - -class TrulyTraversableLikeExtensionMethods[El1, Repr1]( - private val self: TraversableLike[El1, Repr1]) - extends AnyVal { - - def lazyZip[El2, Repr2, T2](t2: T2)( - implicit w2: T2 => IterableLike[El2, Repr2] - ): Tuple2Zipped[El1, Repr1, El2, Repr2] = new Tuple2Zipped((self, t2)) -} - -class Tuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2]( - private val self: Tuple2Zipped[El1, Repr1, El2, Repr2]) { - - def lazyZip[El3, Repr3, T3](t3: T3)(implicit w3: T3 => IterableLike[El3, Repr3]) - : Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3] = - new Tuple3Zipped((self.colls._1, self.colls._2, t3)) -} - -class MapExtensionMethods[K, V](private val self: scala.collection.Map[K, V]) extends AnyVal { - - def foreachEntry[U](f: (K, V) => U): Unit = { - self.foreach { case (k, v) => f(k, v) } - } - -} - -class MapViewExtensionMethods[K, V, C <: scala.collection.Map[K, V]]( - private val self: IterableView[(K, V), C]) - extends AnyVal { - - def mapValues[W, That](f: V => W)( - implicit bf: CanBuildFrom[IterableView[(K, V), C], (K, W), That]): That = - self.map[(K, W), That] { case (k, v) => (k, f(v)) } - - // TODO: Replace the current implementation of `mapValues` with this - // after major version bump when bincompat can be broken. - // At the same time, remove `canBuildFromIterableViewMapLike` - /* - def mapValues[W](f: V => W): IterableView[(K, W), C] = - // the implementation of `self.map` also casts the result - self.map({ case (k, v) => (k, f(v)) }).asInstanceOf[IterableView[(K, W), C]] - */ - - def filterKeys(p: K => Boolean): IterableView[(K, V), C] = - self.filter { case (k, _) => p(k) } -} - -class ImmutableQueueExtensionMethods[A](private val self: i.Queue[A]) extends AnyVal { - def enqueueAll[B >: A](iter: c.Iterable[B]): i.Queue[B] = - self.enqueue(iter.to[i.Iterable]) -} - -class MutableQueueExtensionMethods[Element](private val self: m.Queue[Element]) extends AnyVal { - def enqueueAll(iter: c.Iterable[Element]): Unit = - self.enqueue(iter.toIndexedSeq: _*) -} \ No newline at end of file diff --git a/scalalib/overrides-2.11/scala/collection/compat/immutable/ArraySeq.scala b/scalalib/overrides-2.11/scala/collection/compat/immutable/ArraySeq.scala deleted file mode 100644 index e0da76ef4a..0000000000 --- a/scalalib/overrides-2.11/scala/collection/compat/immutable/ArraySeq.scala +++ /dev/null @@ -1,267 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.compat.immutable - -import java.util.Arrays - -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.AbstractSeq -import scala.collection.generic._ -import scala.collection.immutable.IndexedSeq -import scala.collection.mutable.{ArrayBuilder, Builder, WrappedArrayBuilder} -import scala.reflect.ClassTag -import scala.util.hashing.MurmurHash3 - -/** - * An immutable array. - * - * Supports efficient indexed access and has a small memory footprint. - * - * @define Coll `ArraySeq` - * @define coll wrapped array - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -abstract class ArraySeq[+T] extends AbstractSeq[T] with IndexedSeq[T] { - - override protected[this] def thisCollection: ArraySeq[T] = this - - /** The tag of the element type */ - protected[this] def elemTag: ClassTag[T] - - /** The length of the array */ - def length: Int - - /** The element at given index */ - def apply(index: Int): T - - /** The underlying array */ - def unsafeArray: Array[T @uncheckedVariance] - - override def stringPrefix = "ArraySeq" - - /** Clones this object, including the underlying Array. */ - override def clone(): ArraySeq[T] = ArraySeq unsafeWrapArray unsafeArray.clone() - - /** Creates new builder for this collection ==> move to subclasses - */ - override protected[this] def newBuilder: Builder[T, ArraySeq[T]] = - ArraySeq.newBuilder[T](elemTag) - -} - -/** A companion object used to create instances of `ArraySeq`. - */ -object ArraySeq { - // This is reused for all calls to empty. - private val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) - def empty[T <: AnyRef]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] - - def newBuilder[T](implicit elemTag: ClassTag[T]): Builder[T, ArraySeq[T]] = - new WrappedArrayBuilder[T](elemTag).mapResult(w => unsafeWrapArray(w.array)) - - def apply[T](elems: T*)(implicit elemTag: ClassTag[T]): ArraySeq[T] = { - val b = newBuilder[T] - b ++= elems - b.result() - } - - def unapplySeq[T](seq: ArraySeq[T]): Some[ArraySeq[T]] = Some(seq) - - /** - * Wrap an existing `Array` into an `ArraySeq` of the proper primitive specialization type - * without copying. - * - * Note that an array containing boxed primitives can be wrapped in an `ArraySeq` without - * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, - * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: - * `ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[Int]]`. The values are still - * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing - * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a - * `ClassCastException` at runtime. - */ - def unsafeWrapArray[T](x: Array[T]): ArraySeq[T] = - (x.asInstanceOf[Array[_]] match { - case null => null - case x: Array[AnyRef] => new ofRef[AnyRef](x) - case x: Array[Int] => new ofInt(x) - case x: Array[Double] => new ofDouble(x) - case x: Array[Long] => new ofLong(x) - case x: Array[Float] => new ofFloat(x) - case x: Array[Char] => new ofChar(x) - case x: Array[Byte] => new ofByte(x) - case x: Array[Short] => new ofShort(x) - case x: Array[Boolean] => new ofBoolean(x) - case x: Array[Unit] => new ofUnit(x) - }).asInstanceOf[ArraySeq[T]] - - implicit def canBuildFrom[T](implicit m: ClassTag[T]): CanBuildFrom[ArraySeq[_], T, ArraySeq[T]] = - new CanBuildFrom[ArraySeq[_], T, ArraySeq[T]] { - def apply(from: ArraySeq[_]): Builder[T, ArraySeq[T]] = - ArrayBuilder.make[T]()(m) mapResult ArraySeq.unsafeWrapArray[T] - def apply: Builder[T, ArraySeq[T]] = - ArrayBuilder.make[T]()(m) mapResult ArraySeq.unsafeWrapArray[T] - } - - @SerialVersionUID(3L) - final class ofRef[T <: AnyRef](val unsafeArray: Array[T]) extends ArraySeq[T] with Serializable { - lazy val elemTag = ClassTag[T](unsafeArray.getClass.getComponentType) - def length: Int = unsafeArray.length - def apply(index: Int): T = unsafeArray(index) - def update(index: Int, elem: T) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofRef[_] => - arrayEquals(unsafeArray.asInstanceOf[Array[AnyRef]], - that.unsafeArray.asInstanceOf[Array[AnyRef]]) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofByte(val unsafeArray: Array[Byte]) extends ArraySeq[Byte] with Serializable { - def elemTag = ClassTag.Byte - def length: Int = unsafeArray.length - def apply(index: Int): Byte = unsafeArray(index) - def update(index: Int, elem: Byte) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofByte => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofShort(val unsafeArray: Array[Short]) extends ArraySeq[Short] with Serializable { - def elemTag = ClassTag.Short - def length: Int = unsafeArray.length - def apply(index: Int): Short = unsafeArray(index) - def update(index: Int, elem: Short) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofShort => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofChar(val unsafeArray: Array[Char]) extends ArraySeq[Char] with Serializable { - def elemTag = ClassTag.Char - def length: Int = unsafeArray.length - def apply(index: Int): Char = unsafeArray(index) - def update(index: Int, elem: Char) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofChar => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofInt(val unsafeArray: Array[Int]) extends ArraySeq[Int] with Serializable { - def elemTag = ClassTag.Int - def length: Int = unsafeArray.length - def apply(index: Int): Int = unsafeArray(index) - def update(index: Int, elem: Int) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofInt => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofLong(val unsafeArray: Array[Long]) extends ArraySeq[Long] with Serializable { - def elemTag = ClassTag.Long - def length: Int = unsafeArray.length - def apply(index: Int): Long = unsafeArray(index) - def update(index: Int, elem: Long) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofLong => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofFloat(val unsafeArray: Array[Float]) extends ArraySeq[Float] with Serializable { - def elemTag = ClassTag.Float - def length: Int = unsafeArray.length - def apply(index: Int): Float = unsafeArray(index) - def update(index: Int, elem: Float) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofFloat => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofDouble(val unsafeArray: Array[Double]) extends ArraySeq[Double] with Serializable { - def elemTag = ClassTag.Double - def length: Int = unsafeArray.length - def apply(index: Int): Double = unsafeArray(index) - def update(index: Int, elem: Double) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofDouble => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofBoolean(val unsafeArray: Array[Boolean]) - extends ArraySeq[Boolean] - with Serializable { - def elemTag = ClassTag.Boolean - def length: Int = unsafeArray.length - def apply(index: Int): Boolean = unsafeArray(index) - def update(index: Int, elem: Boolean) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofBoolean => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofUnit(val unsafeArray: Array[Unit]) extends ArraySeq[Unit] with Serializable { - def elemTag = ClassTag.Unit - def length: Int = unsafeArray.length - def apply(index: Int): Unit = unsafeArray(index) - def update(index: Int, elem: Unit) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofUnit => unsafeArray.length == that.unsafeArray.length - case _ => super.equals(that) - } - } - - private[this] def arrayEquals(xs: Array[AnyRef], ys: Array[AnyRef]): Boolean = { - if (xs eq ys) - return true - if (xs.length != ys.length) - return false - - val len = xs.length - var i = 0 - while (i < len) { - if (xs(i) != ys(i)) - return false - i += 1 - } - true - } -} \ No newline at end of file diff --git a/scalalib/overrides-2.11/scala/collection/compat/immutable/LazyList.scala b/scalalib/overrides-2.11/scala/collection/compat/immutable/LazyList.scala deleted file mode 100644 index 4b4dd58bf2..0000000000 --- a/scalalib/overrides-2.11/scala/collection/compat/immutable/LazyList.scala +++ /dev/null @@ -1,1537 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.compat.immutable - -import java.io.{ObjectInputStream, ObjectOutputStream} - -import scala.annotation.tailrec -import scala.annotation.unchecked.{uncheckedVariance => uV} -import scala.collection.{ - AbstractIterator, - AbstractSeq, - GenIterable, - GenSeq, - GenTraversableOnce, - LinearSeqOptimized, - mutable -} -import scala.collection.generic.{ - CanBuildFrom, - FilterMonadic, - GenericCompanion, - GenericTraversableTemplate, - SeqFactory -} -import scala.collection.immutable.{LinearSeq, NumericRange} -import scala.collection.mutable.{ArrayBuffer, Builder, StringBuilder} -import scala.language.implicitConversions -import scala.language.higherKinds - -/** This class implements an immutable linked list that evaluates elements - * in order and only when needed. Here is an example: - * - * {{{ - * import scala.math.BigInt - * object Main extends App { - * - * val fibs: LazyList[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map { n => n._1 + n._2 } - * - * fibs take 5 foreach println - * } - * - * // prints - * // - * // 0 - * // 1 - * // 1 - * // 2 - * // 3 - * }}} - * - * A `LazyList`, like the one in the example above, may be infinite in length. - * Aggregate methods, such as `count`, `sum`, `max` or `min` on such infinite length - * sequences will not terminate. Filtered infinite lazy lists are also effectively - * infinite in length. - * - * Elements of a `LazyList` are memoized; that is, the value of each element - * is computed only once. - * To illustrate, we will alter body of the `fibs` value above and take some - * more values: - * - * {{{ - * import scala.math.BigInt - * object Main extends App { - * - * val fibs: LazyList[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip( - * fibs.tail).map(n => { - * println("Adding %d and %d".format(n._1, n._2)) - * n._1 + n._2 - * }) - * - * fibs take 5 foreach println - * fibs take 6 foreach println - * } - * - * // prints - * // - * // 0 - * // 1 - * // Adding 0 and 1 - * // 1 - * // Adding 1 and 1 - * // 2 - * // Adding 1 and 2 - * // 3 - * - * // And then prints - * // - * // 0 - * // 1 - * // 1 - * // 2 - * // 3 - * // Adding 2 and 3 - * // 5 - * }}} - * - * There are a number of subtle points to the above example. - * - * - The definition of `fibs` is a `val` not a method. The memoization of the - * `LazyList` requires us to have somewhere to store the information and a `val` - * allows us to do that. - * - * - While the `LazyList` is actually being modified during access, this does not - * change the notion of its immutability. Once the values are memoized they do - * not change and values that have yet to be memoized still "exist", they - * simply haven't been realized yet. - * - * - One must be cautious of memoization; you can very quickly eat up large - * amounts of memory if you're not careful. The reason for this is that the - * memoization of the `LazyList` creates a structure much like - * [[scala.collection.immutable.List]]. So long as something is holding on to - * the head, the head holds on to the tail, and so it continues recursively. - * If, on the other hand, there is nothing holding on to the head (e.g. we used - * `def` to define the `LazyList`) then once it is no longer being used directly, - * it disappears. - * - * - Note that some operations, including [[drop]], [[dropWhile]], - * [[flatMap]] or [[collect]] may process a large number of intermediate - * elements before returning. These necessarily hold onto the head, since - * they are methods on `LazyList`, and a lazy list holds its own head. For - * computations of this sort where memoization is not desired, use - * `Iterator` when possible. - * - * {{{ - * // For example, let's build the natural numbers and do some silly iteration - * // over them. - * - * // We'll start with a silly iteration - * def loop(s: String, i: Int, iter: Iterator[Int]): Unit = { - * // Stop after 200,000 - * if (i < 200001) { - * if (i % 50000 == 0) println(s + i) - * loop(s, iter.next(), iter) - * } - * } - * - * // Our first LazyList definition will be a val definition - * val lazylist1: LazyList[Int] = { - * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) - * loop(0) - * } - * - * // Because lazylist1 is a val, everything that the iterator produces is held - * // by virtue of the fact that the head of the LazyList is held in lazylist1 - * val it1 = lazylist1.toIterator - * loop("Iterator1: ", it1.next(), it1) - * - * // We can redefine this LazyList such that all we have is the Iterator left - * // and allow the LazyList to be garbage collected as required. Using a def - * // to provide the LazyList ensures that no val is holding onto the head as - * // is the case with lazylist1 - * def lazylist2: LazyList[Int] = { - * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) - * loop(0) - * } - * val it2 = lazylist2.toIterator - * loop("Iterator2: ", it2.next(), it2) - * - * // And, of course, we don't actually need a LazyList at all for such a simple - * // problem. There's no reason to use a LazyList if you don't actually need - * // one. - * val it3 = new Iterator[Int] { - * var i = -1 - * def hasNext = true - * def next(): Int = { i += 1; i } - * } - * loop("Iterator3: ", it3.next(), it3) - * }}} - * - * - The fact that `tail` works at all is of interest. In the definition of - * `fibs` we have an initial `(0, 1, LazyList(...))` so `tail` is deterministic. - * If we defined `fibs` such that only `0` were concretely known then the act - * of determining `tail` would require the evaluation of `tail` which would - * cause an infinite recursion and stack overflow. If we define a definition - * where the tail is not initially computable then we're going to have an - * infinite recursion: - * {{{ - * // The first time we try to access the tail we're going to need more - * // information which will require us to recurse, which will require us to - * // recurse, which... - * lazy val sov: LazyList[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } - * }}} - * - * The definition of `fibs` above creates a larger number of objects than - * necessary depending on how you might want to implement it. The following - * implementation provides a more "cost effective" implementation due to the - * fact that it has a more direct route to the numbers themselves: - * - * {{{ - * lazy val fib: LazyList[Int] = { - * def loop(h: Int, n: Int): LazyList[Int] = h #:: loop(n, h + n) - * loop(1, 1) - * } - * }}} - * - * @tparam A the type of the elements contained in this lazy list. - * - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]] - * section on `LazyLists` for more information. - * @define Coll `LazyList` - * @define coll lazy list - * @define orderDependent - * @define orderDependentFold - * @define appendStackSafety Note: Repeated chaining of calls to append methods (`appended`, - * `appendedAll`, `lazyAppendedAll`) without forcing any of the - * intermediate resulting lazy lists may overflow the stack when - * the final result is forced. - * @define preservesLaziness This method preserves laziness; elements are only evaluated - * individually as needed. - * @define initiallyLazy This method does not evaluate anything until an operation is performed - * on the result (e.g. calling `head` or `tail`, or checking if it is empty). - * @define evaluatesAllElements This method evaluates all elements of the collection. - */ -@SerialVersionUID(3L) -final class LazyList[+A] private (private[this] var lazyState: () => LazyList.State[A]) - extends AbstractSeq[A] - with LinearSeq[A] - with GenericTraversableTemplate[A, LazyList] - with LinearSeqOptimized[A, LazyList[A]] - with Serializable { - import LazyList._ - - @volatile private[this] var stateEvaluated: Boolean = false - @inline private def stateDefined: Boolean = stateEvaluated - private[this] var midEvaluation = false - - private lazy val state: State[A] = { - // if it's already mid-evaluation, we're stuck in an infinite - // self-referential loop (also it's empty) - if (midEvaluation) { - throw new RuntimeException( - "self-referential LazyList or a derivation thereof has no more elements") - } - midEvaluation = true - val res = try lazyState() - finally midEvaluation = false - // if we set it to `true` before evaluating, we may infinite loop - // if something expects `state` to already be evaluated - stateEvaluated = true - lazyState = null // allow GC - res - } - - /** $preservesLaziness */ - def knownSize: Int = if (knownIsEmpty) 0 else -1 -// override def iterableFactory: SeqFactory[LazyList] = LazyList - - override def isEmpty: Boolean = state eq State.Empty - - override def head: A = state.head - - override def tail: LazyList[A] = state.tail - - @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline) - @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline) - - // It's an imperfect world, but at least we can bottle up the - // imperfection in a capsule. - @inline private def asThat[That](x: AnyRef): That = x.asInstanceOf[That] - @inline private def isLLBuilder[B, That](bf: CanBuildFrom[LazyList[A], B, That]) = - bf(repr).isInstanceOf[LazyList.LazyBuilder[_]] - - override def companion: GenericCompanion[LazyList] = LazyList - - /** Evaluates all undefined elements of the lazy list. - * - * This method detects cycles in lazy lists, and terminates after all - * elements of the cycle are evaluated. For example: - * - * {{{ - * val ring: LazyList[Int] = 1 #:: 2 #:: 3 #:: ring - * ring.force - * ring.toString - * - * // prints - * // - * // LazyList(1, 2, 3, ...) - * }}} - * - * This method will *not* terminate for non-cyclic infinite-sized collections. - * - * @return this - */ - def force: this.type = { - // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) - var these, those: LazyList[A] = this - if (!these.isEmpty) { - these = these.tail - } - while (those ne these) { - if (these.isEmpty) return this - these = these.tail - if (these.isEmpty) return this - these = these.tail - if (these eq those) return this - those = those.tail - } - this - } - - /** @inheritdoc - * - * The iterator returned by this method preserves laziness; elements are - * only evaluated individually as needed. - */ - override def iterator: Iterator[A] = - if (knownIsEmpty) Iterator.empty - else new LazyIterator(this) - - /** Apply the given function `f` to each element of this linear sequence - * (while respecting the order of the elements). - * - * @param f The treatment to apply to each element. - * @note Overridden here as final to trigger tail-call optimization, which - * replaces 'this' with 'tail' at each iteration. This is absolutely - * necessary for allowing the GC to collect the underlying LazyList as elements - * are consumed. - * @note This function will force the realization of the entire LazyList - * unless the `f` throws an exception. - */ - @tailrec - override def foreach[U](f: A => U): Unit = { - if (!isEmpty) { - f(head) - tail.foreach(f) - } - } - - /** LazyList specialization of foldLeft which allows GC to collect along the - * way. - * - * @tparam B The type of value being accumulated. - * @param z The initial value seeded into the function `op`. - * @param op The operation to perform on successive elements of the `LazyList`. - * @return The accumulated value from successive applications of `op`. - */ - @tailrec - override def foldLeft[B](z: B)(op: (B, A) => B): B = - if (isEmpty) z - else tail.foldLeft(op(z, head))(op) - - // State.Empty doesn't use the SerializationProxy - protected[this] def writeReplace(): AnyRef = - if (knownNonEmpty) new LazyList.SerializationProxy[A](this) else this - - override def stringPrefix = "LazyList" - - /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list. - * - * $preservesLaziness - * - * $appendStackSafety - * - * @param suffix The collection that gets appended to this lazy list - * @return The lazy list containing elements of this lazy list and the iterable object. - */ - def lazyAppendedAll[B >: A](suffix: => GenTraversableOnce[B]): LazyList[B] = - newLL { - if (isEmpty) suffix match { - case lazyList: LazyList[B] => lazyList.state // don't recompute the LazyList - case coll => stateFromIterator(coll.toIterator) - } else sCons(head, tail lazyAppendedAll suffix) - } - - /** @inheritdoc - * - * $preservesLaziness - * - * $appendStackSafety - */ - override def ++[B >: A, That](suffix: GenTraversableOnce[B])( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (knownIsEmpty) LazyList.from(suffix) - else lazyAppendedAll(suffix) - } else super.++(suffix)(bf) - - /** @inheritdoc - * - * $preservesLaziness - * - * $appendStackSafety - */ - override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (knownIsEmpty) newLL(sCons(elem, LazyList.empty)) - else lazyAppendedAll(Iterator.single(elem)) - } else super.:+(elem)(bf) - - /** @inheritdoc - * - * $evaluatesAllElements - */ - override def equals(that: Any): Boolean = - if (this eq that.asInstanceOf[AnyRef]) true else super.equals(that) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def scanLeft[B, That](z: B)(op: (B, A) => B)( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (knownIsEmpty) newLL(sCons(z, LazyList.empty)) - else newLL(scanLeftState(z)(op)) - } else super.scanLeft(z)(op)(bf) - - private def scanLeftState[B](z: B)(op: (B, A) => B): State[B] = - sCons( - z, - newLL { - if (isEmpty) State.Empty - else tail.scanLeftState(op(z, head))(op) - } - ) - - /** LazyList specialization of reduceLeft which allows GC to collect - * along the way. - * - * @tparam B The type of value being accumulated. - * @param f The operation to perform on successive elements of the `LazyList`. - * @return The accumulated value from successive applications of `f`. - */ - override def reduceLeft[B >: A](f: (B, A) => B): B = { - if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") - else { - var reducedRes: B = this.head - var left: LazyList[A] = this.tail - while (!left.isEmpty) { - reducedRes = f(reducedRes, left.head) - left = left.tail - } - reducedRes - } - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def partition(p: A => Boolean): (LazyList[A], LazyList[A]) = (filter(p), filterNot(p)) - - /** @inheritdoc - * - * $preservesLaziness - */ - def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyList[A1], LazyList[A2]) = { - val (left, right) = mapToLL(f).partition(_.isLeft) - (left.mapToLL(_.asInstanceOf[Left[A1, _]].a), right.mapToLL(_.asInstanceOf[Right[_, A2]].b)) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def filter(pred: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else LazyList.filterImpl(this, pred, isFlipped = false) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def filterNot(pred: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else LazyList.filterImpl(this, pred, isFlipped = true) - - /** A `collection.WithFilter` which allows GC of the head of lazy list during processing. - * - * This method is not particularly useful for a lazy list, as [[filter]] already preserves - * laziness. - * - * The `collection.WithFilter` returned by this method preserves laziness; elements are - * only evaluated individually as needed. - */ - override def withFilter(p: A => Boolean): FilterMonadic[A, LazyList[A]] = - new LazyList.WithFilter(this, p) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - newLL(sCons(elem, this)) - } else super.+:(elem)(bf) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def ++:[B >: A, That](prefix: TraversableOnce[B])( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (knownIsEmpty) LazyList.from(prefix) - else newLL(stateFromIteratorConcatSuffix(prefix.toIterator)(state)) - } else super.++:(prefix)(bf) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def ++:[B >: A, That](prefix: Traversable[B])( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (knownIsEmpty) LazyList.from(prefix) - else newLL(stateFromIteratorConcatSuffix(prefix.toIterator)(state)) - } else super.++:(prefix)(bf) - - private def prependedAllToLL[B >: A](prefix: Traversable[B]): LazyList[B] = - if (knownIsEmpty) LazyList.from(prefix) - else newLL(stateFromIteratorConcatSuffix(prefix.toIterator)(state)) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat(mapToLL(f): @inline) - else super.map(f)(bf) - - private def mapToLL[B](f: A => B): LazyList[B] = - if (knownIsEmpty) LazyList.empty - else (mapImpl(f): @inline) - - /** @inheritdoc - * - * $preservesLaziness - */ - def tapEach[U](f: A => U): LazyList[A] = mapToLL { a => - f(a); a - } - - private def mapImpl[B](f: A => B): LazyList[B] = - newLL { - if (isEmpty) State.Empty - else sCons(f(head), tail.mapImpl(f)) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def collect[B, That](pf: PartialFunction[A, B])( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (knownIsEmpty) LazyList.empty - else LazyList.collectImpl(this, pf) - } else super.collect(pf)(bf) - - /** @inheritdoc - * - * This method does not evaluate any elements further than - * the first element for which the partial function is defined. - */ - @tailrec - override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = - if (isEmpty) None - else { - val res = pf.applyOrElse(head, LazyList.anyToMarker.asInstanceOf[A => B]) - if (res.asInstanceOf[AnyRef] eq LazyList.pfMarker) tail.collectFirst(pf) - else Some(res) - } - - /** @inheritdoc - * - * This method does not evaluate any elements further than - * the first element matching the predicate. - */ - @tailrec - override def find(p: A => Boolean): Option[A] = - if (isEmpty) None - else { - val elem = head - if (p(elem)) Some(elem) - else tail.find(p) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def flatMap[B, That](f: A => GenTraversableOnce[B])( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat(flatMapToLL(f): @inline) - else super.flatMap(f)(bf) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def flatten[B](implicit asIterable: A => GenTraversableOnce[B]): LazyList[B] = - flatMapToLL(asIterable) - - private def flatMapToLL[B](f: A => GenTraversableOnce[B]): LazyList[B] = - if (knownIsEmpty) LazyList.empty - else LazyList.flatMapImpl(this, f) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def zip[A1 >: A, B, That](that: GenIterable[B])( - implicit bf: CanBuildFrom[LazyList[A], (A1, B), That]): That = - if (isLLBuilder(bf)) asThat(zipToLL(that): @inline) - else super.zip(that)(bf) - - private def zipToLL[B](that: GenIterable[B]): LazyList[(A, B)] = - if (this.knownIsEmpty) LazyList.empty - else newLL(zipState(that.toIterator)) - - private def zipState[B](it: Iterator[B]): State[(A, B)] = - if (this.isEmpty || !it.hasNext) State.Empty - else sCons((head, it.next()), newLL { tail zipState it }) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def zipWithIndex[A1 >: A, That]( - implicit bf: CanBuildFrom[LazyList[A], (A1, Int), That]): That = - if (isLLBuilder(bf)) asThat { - this zip LazyList.from(0) - } else super.zipWithIndex(bf) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)( - implicit bf: CanBuildFrom[LazyList[A], (A1, B), That]): That = - if (isLLBuilder(bf)) asThat { - if (this.knownIsEmpty) LazyList.continually(thisElem) zip that - else newLL(zipAllState(that.toIterator, thisElem, thatElem)) - } else super.zipAll(that, thisElem, thatElem)(bf) - - private def zipAllState[A1 >: A, B](it: Iterator[B], - thisElem: A1, - thatElem: B): State[(A1, B)] = { - if (it.hasNext) { - if (this.isEmpty) sCons((thisElem, it.next()), newLL { - LazyList.continually(thisElem) zipState it - }) - else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) }) - } else { - if (this.isEmpty) State.Empty - else sCons((this.head, thatElem), this.tail zipToLL LazyList.continually(thatElem)) - } - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (LazyList[A1], LazyList[A2]) = - (mapToLL(asPair(_)._1), mapToLL(asPair(_)._2)) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def unzip3[A1, A2, A3]( - implicit asTriple: A => (A1, A2, A3)): (LazyList[A1], LazyList[A2], LazyList[A3]) = - (mapToLL(asTriple(_)._1), mapToLL(asTriple(_)._2), mapToLL(asTriple(_)._3)) - - /** @inheritdoc - * - * $initiallyLazy - * Additionally, it preserves laziness for all except the first `n` elements. - */ - override def drop(n: Int): LazyList[A] = - if (n <= 0) this - else if (knownIsEmpty) LazyList.empty - else LazyList.dropImpl(this, n) - - /** @inheritdoc - * - * $initiallyLazy - * Additionally, it preserves laziness for all elements after the predicate returns `false`. - */ - override def dropWhile(p: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else LazyList.dropWhileImpl(this, p) - - /** @inheritdoc - * - * $initiallyLazy - */ - override def dropRight(n: Int): LazyList[A] = { - if (n <= 0) this - else if (knownIsEmpty) LazyList.empty - else - newLL { - var scout = this - var remaining = n - // advance scout n elements ahead (or until empty) - while (remaining > 0 && !scout.isEmpty) { - remaining -= 1 - scout = scout.tail - } - dropRightState(scout) - } - } - - private def dropRightState(scout: LazyList[_]): State[A] = - if (scout.isEmpty) State.Empty - else sCons(head, newLL(tail.dropRightState(scout.tail))) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def take(n: Int): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else (takeImpl(n): @inline) - - private def takeImpl(n: Int): LazyList[A] = { - if (n <= 0) LazyList.empty - else - newLL { - if (isEmpty) State.Empty - else sCons(head, tail.takeImpl(n - 1)) - } - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def takeWhile(p: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else (takeWhileImpl(p): @inline) - - private def takeWhileImpl(p: A => Boolean): LazyList[A] = - newLL { - if (isEmpty || !p(head)) State.Empty - else sCons(head, tail.takeWhileImpl(p)) - } - - /** @inheritdoc - * - * $initiallyLazy - */ - override def takeRight(n: Int): LazyList[A] = - if (n <= 0 || knownIsEmpty) LazyList.empty - else LazyList.takeRightImpl(this, n) - - /** @inheritdoc - * - * $initiallyLazy - * Additionally, it preserves laziness for all but the first `from` elements. - */ - override def slice(from: Int, until: Int): LazyList[A] = take(until).drop(from) - - /** @inheritdoc - * - * $evaluatesAllElements - */ - override def reverse: LazyList[A] = reverseOnto(LazyList.empty) - - // need contravariant type B to make the compiler happy - still returns LazyList[A] - @tailrec - private def reverseOnto[B >: A](tl: LazyList[B]): LazyList[B] = - if (isEmpty) tl - else tail.reverseOnto(newLL(sCons(head, tl))) - - private def occCounts0[B](sq: collection.Seq[B]): mutable.Map[B, Int] = { - val occ = new mutable.HashMap[B, Int] { override def default(k: B) = 0 } - for (y <- sq) occ(y) += 1 - occ - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def diff[B >: A](that: GenSeq[B]): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else { - val occ = occCounts0(that.seq) - LazyList.from { - iterator.filter { x => - val ox = occ(x) // Avoid multiple map lookups - if (ox == 0) true - else { - occ(x) = ox - 1 - false - } - } - } - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def intersect[B >: A](that: GenSeq[B]): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else { - val occ = occCounts0(that.seq) - LazyList.from { - iterator.filter { x => - val ox = occ(x) // Avoid multiple map lookups - if (ox > 0) { - occ(x) = ox - 1 - true - } else false - } - } - } - - @tailrec - private def lengthGt(len: Int): Boolean = - if (len < 0) true - else if (isEmpty) false - else tail.lengthGt(len - 1) - - /** @inheritdoc - * - * The iterator returned by this method mostly preserves laziness; - * a single element ahead of the iterator is evaluated. - */ - override def grouped(size: Int): Iterator[LazyList[A]] = { - require(size > 0, "size must be positive, but was " + size) - slidingImpl(size = size, step = size) - } - - /** @inheritdoc - * - * The iterator returned by this method mostly preserves laziness; - * `size - step max 1` elements ahead of the iterator are evaluated. - */ - override def sliding(size: Int, step: Int): Iterator[LazyList[A]] = { - require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") - slidingImpl(size = size, step = step) - } - - @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyList[A]] = - if (knownIsEmpty) Iterator.empty - else new SlidingIterator[A](this, size = size, step = step) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def padTo[B >: A, That](len: Int, elem: B)( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat(padToLL(len, elem)) - else super.padTo(len, elem)(bf) - - private def padToLL[B >: A](len: Int, elem: B): LazyList[B] = - if (len <= 0) this - else - newLL { - if (isEmpty) LazyList.fill(len)(elem).state - else sCons(head, tail.padToLL(len - 1, elem)) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def patch[B >: A, That](from: Int, other: GenSeq[B], replaced: Int)( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (knownIsEmpty) LazyList from other - else patchImpl(from, other, replaced) - } else super.patch(from, other, replaced) - - private def patchImpl[B >: A](from: Int, other: GenSeq[B], replaced: Int): LazyList[B] = - newLL { - if (from <= 0) - stateFromIteratorConcatSuffix(other.toIterator)(LazyList.dropImpl(this, replaced).state) - else if (isEmpty) stateFromIterator(other.toIterator) - else sCons(head, tail.patchImpl(from - 1, other, replaced)) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def updated[B >: A, That](index: Int, elem: B)( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (index < 0) throw new IndexOutOfBoundsException(s"$index") - else updatedImpl(index, elem, index) - } else super.updated(index, elem) - - private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyList[B] = { - newLL { - if (index <= 0) sCons(elem, tail) - else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) - else sCons(head, tail.updatedImpl(index - 1, elem, startIndex)) - } - } - - /** Appends all elements of this $coll to a string builder using start, end, and separator strings. - * The written text begins with the string `start` and ends with the string `end`. - * Inside, the string representations (w.r.t. the method `toString`) - * of all elements of this $coll are separated by the string `sep`. - * - * An undefined state is represented with `"<not computed>"` and cycles are represented with `"<cycle>"`. - * - * $evaluatesAllElements - * - * @param sb the string builder to which elements are appended. - * @param start the starting string. - * @param sep the separator string. - * @param end the ending string. - * @return the string builder `b` to which elements were appended. - */ - override def addString(sb: StringBuilder, - start: String, - sep: String, - end: String): StringBuilder = { - force - addStringNoForce(sb, start, sep, end) - sb - } - - private[this] def addStringNoForce(b: StringBuilder, - start: String, - sep: String, - end: String): StringBuilder = { - b.append(start) - if (!stateDefined) b.append("") - else if (!isEmpty) { - b.append(head) - var cursor = this - @inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) - var scout = tail - @inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty - if ((cursor ne scout) && (!scout.stateDefined || (cursor.state ne scout.state))) { - cursor = scout - if (scoutNonEmpty) { - scout = scout.tail - // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings - while ((cursor ne scout) && scoutNonEmpty && (cursor.state ne scout.state)) { - appendCursorElement() - cursor = cursor.tail - scout = scout.tail - if (scoutNonEmpty) scout = scout.tail - } - } - } - if (!scoutNonEmpty) { // Not a cycle, scout hit an end - while (cursor ne scout) { - appendCursorElement() - cursor = cursor.tail - } - // if cursor (eq scout) has state defined, it is empty; else unknown state - if (!cursor.stateDefined) b.append(sep).append("") - } else { - @inline def same(a: LazyList[A], b: LazyList[A]): Boolean = (a eq b) || (a.state eq b.state) - // Cycle. - // If we have a prefix of length P followed by a cycle of length C, - // the scout will be at position (P%C) in the cycle when the cursor - // enters it at P. They'll then collide when the scout advances another - // C - (P%C) ahead of the cursor. - // If we run the scout P farther, then it will be at the start of - // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner - // starts at the beginning of the prefix, they'll collide exactly at - // the start of the loop. - var runner = this - var k = 0 - while (!same(runner, scout)) { - runner = runner.tail - scout = scout.tail - k += 1 - } - // Now runner and scout are at the beginning of the cycle. Advance - // cursor, adding to string, until it hits; then we'll have covered - // everything once. If cursor is already at beginning, we'd better - // advance one first unless runner didn't go anywhere (in which case - // we've already looped once). - if (same(cursor, scout) && (k > 0)) { - appendCursorElement() - cursor = cursor.tail - } - while (!same(cursor, scout)) { - appendCursorElement() - cursor = cursor.tail - } - b.append(sep).append("") - } - } - b.append(end) - } - - /** $preservesLaziness - * - * @return a string representation of this collection. An undefined state is - * represented with `"<not computed>"` and cycles are represented with `"<cycle>"` - * - * Examples: - * - * - `"LazyList(4, <not computed>)"`, a non-empty lazy list ; - * - `"LazyList(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; - * - `"LazyList(1, 2, 3, <cycle>)"`, an infinite lazy list that contains - * a cycle at the fourth element. - */ - override def toString(): String = - addStringNoForce(new StringBuilder(stringPrefix), "(", ", ", ")").toString - - /** @inheritdoc - * - * $preservesLaziness - */ - override def hasDefiniteSize: Boolean = { - if (!stateDefined) false - else if (isEmpty) true - else { - // Two-iterator trick (2x & 1x speed) for cycle detection. - var those = this - var these = tail - while (those ne these) { - if (!these.stateDefined) return false - else if (these.isEmpty) return true - these = these.tail - if (!these.stateDefined) return false - else if (these.isEmpty) return true - these = these.tail - if (those eq these) return false - those = those.tail - } - false // Cycle detected - } - } - - override def sameElements[B >: A](that: GenIterable[B]): Boolean = that match { - case that: LazyList[B] => this eqLL that - case _ => super.sameElements(that) - } - - @tailrec - private def eqLL[B >: A](that: LazyList[B]): Boolean = - (this eq that) || - (this.state eq that.state) || - (!this.isEmpty && - !that.isEmpty && - (this.head == that.head) && - (this.tail eqLL that.tail)) - - override def splitAt(n: Int): (LazyList[A], LazyList[A]) = (take(n), drop(n)) - - override def span(p: A => Boolean): (LazyList[A], LazyList[A]) = (takeWhile(p), dropWhile(p)) - - override def distinct: LazyList[A] = distinctBy(identity) - - def distinctBy[B](f: A => B): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else - LazyList.from { - val outer = iterator - new AbstractIterator[A] { - private[this] val traversedValues = mutable.HashSet.empty[B] - private[this] var nextElementDefined: Boolean = false - private[this] var nextElement: A = _ - - def hasNext: Boolean = - nextElementDefined || (outer.hasNext && { - val a = outer.next() - if (traversedValues.add(f(a))) { - nextElement = a - nextElementDefined = true - true - } else hasNext - }) - - def next(): A = - if (hasNext) { - nextElementDefined = false - nextElement - } else { - Iterator.empty.next() - } - } - } - - override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = - if (cbf().isInstanceOf[LazyList.LazyBuilder[_]]) asThat(this) - else super.to(cbf) - - override def init: LazyList[A] = { - if (isEmpty) throw new UnsupportedOperationException - dropRight(1) - } -} - -/** - * $factoryInfo - * @define coll lazy list - * @define Coll `LazyList` - */ -@SerialVersionUID(3L) -object LazyList extends SeqFactory[LazyList] { - // Eagerly evaluate cached empty instance - private[this] val _empty = newLL(State.Empty).force - - private sealed trait State[+A] extends Serializable { - def head: A - def tail: LazyList[A] - } - - private object State { - @SerialVersionUID(3L) - object Empty extends State[Nothing] { - def head: Nothing = throw new NoSuchElementException("head of empty lazy list") - def tail: LazyList[Nothing] = - throw new UnsupportedOperationException("tail of empty lazy list") - } - - @SerialVersionUID(3L) - final class Cons[A](val head: A, val tail: LazyList[A]) extends State[A] - } - - /** Creates a new LazyList. */ - @inline private def newLL[A](state: => State[A]): LazyList[A] = new LazyList[A](() => state) - - /** Creates a new State.Cons. */ - @inline private def sCons[A](hd: A, tl: LazyList[A]): State[A] = new State.Cons[A](hd, tl) - - private val pfMarker: AnyRef = new AnyRef - private val anyToMarker: Any => Any = _ => pfMarker - - /* All of the following `Impl` methods are carefully written so as not to - * leak the beginning of the `LazyList`. They copy the initial `LazyList` (`ll`) into - * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently - * leaking the head of the `LazyList`. Additionally, the methods are written so that, should - * an exception be thrown by the evaluation of the `LazyList` or any supplied function, they - * can continue their execution where they left off. - */ - - private def filterImpl[A](ll: LazyList[A], p: A => Boolean, isFlipped: Boolean): LazyList[A] = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - newLL { - var elem: A = null.asInstanceOf[A] - var found = false - var rest = restRef // var rest = restRef.elem - while (!found && !rest.isEmpty) { - elem = rest.head - found = p(elem) != isFlipped - rest = rest.tail - restRef = rest // restRef.elem = rest - } - if (found) sCons(elem, filterImpl(rest, p, isFlipped)) else State.Empty - } - } - - private def collectImpl[A, B](ll: LazyList[A], pf: PartialFunction[A, B]): LazyList[B] = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - newLL { - val marker = pfMarker - val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased - - var res: B = marker.asInstanceOf[B] // safe because B is unbounded - var rest = restRef // var rest = restRef.elem - while ((res.asInstanceOf[AnyRef] eq marker) && !rest.isEmpty) { - res = pf.applyOrElse(rest.head, toMarker) - rest = rest.tail - restRef = rest // restRef.elem = rest - } - if (res.asInstanceOf[AnyRef] eq marker) State.Empty - else sCons(res, collectImpl(rest, pf)) - } - } - - private def flatMapImpl[A, B](ll: LazyList[A], f: A => GenTraversableOnce[B]): LazyList[B] = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - newLL { - var it: Iterator[B] = null - var itHasNext = false - var rest = restRef // var rest = restRef.elem - while (!itHasNext && !rest.isEmpty) { - it = f(rest.head).toIterator - itHasNext = it.hasNext - if (!itHasNext) { // wait to advance `rest` because `it.next()` can throw - rest = rest.tail - restRef = rest // restRef.elem = rest - } - } - if (itHasNext) { - val head = it.next() - rest = rest.tail - restRef = rest // restRef.elem = rest - sCons(head, newLL(stateFromIteratorConcatSuffix(it)(flatMapImpl(rest, f).state))) - } else State.Empty - } - } - - private def dropImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - var iRef = n // val iRef = new IntRef(n) - newLL { - var rest = restRef // var rest = restRef.elem - var i = iRef // var i = iRef.elem - while (i > 0 && !rest.isEmpty) { - rest = rest.tail - restRef = rest // restRef.elem = rest - i -= 1 - iRef = i // iRef.elem = i - } - rest.state - } - } - - private def dropWhileImpl[A](ll: LazyList[A], p: A => Boolean): LazyList[A] = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - newLL { - var rest = restRef // var rest = restRef.elem - while (!rest.isEmpty && p(rest.head)) { - rest = rest.tail - restRef = rest // restRef.elem = rest - } - rest.state - } - } - - private def takeRightImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - var scoutRef = ll // val scoutRef = new ObjectRef(ll) - var remainingRef = n // val remainingRef = new IntRef(n) - newLL { - var scout = scoutRef // var scout = scoutRef.elem - var remaining = remainingRef // var remaining = remainingRef.elem - // advance `scout` `n` elements ahead (or until empty) - while (remaining > 0 && !scout.isEmpty) { - scout = scout.tail - scoutRef = scout // scoutRef.elem = scout - remaining -= 1 - remainingRef = remaining // remainingRef.elem = remaining - } - var rest = restRef // var rest = restRef.elem - // advance `rest` and `scout` in tandem until `scout` reaches the end - while (!scout.isEmpty) { - scout = scout.tail - scoutRef = scout // scoutRef.elem = scout - rest = rest.tail // can't throw an exception as `scout` has already evaluated its tail - restRef = rest // restRef.elem = rest - } - // `rest` is the last `n` elements (or all of them) - rest.state - } - } - - /** An alternative way of building and matching lazy lists using LazyList.cons(hd, tl). - */ - object cons { - - /** A lazy list consisting of a given first element and remaining elements - * @param hd The first element of the result lazy list - * @param tl The remaining elements of the result lazy list - */ - def apply[A](hd: => A, tl: => LazyList[A]): LazyList[A] = newLL(sCons(hd, newLL(tl.state))) - - /** Maps a lazy list to its head and tail */ - def unapply[A](xs: LazyList[A]): Option[(A, LazyList[A])] = #::.unapply(xs) - } - - implicit def toDeferrer[A](l: => LazyList[A]): Deferrer[A] = new Deferrer[A](() => l) - - final class Deferrer[A] private[LazyList] (private val l: () => LazyList[A]) extends AnyVal { - - /** Construct a LazyList consisting of a given first element followed by elements - * from another LazyList. - */ - def #::[B >: A](elem: => B): LazyList[B] = newLL(sCons(elem, newLL(l().state))) - - /** Construct a LazyList consisting of the concatenation of the given LazyList and - * another LazyList. - */ - def #:::[B >: A](prefix: LazyList[B]): LazyList[B] = prefix lazyAppendedAll l() - } - - object #:: { - def unapply[A](s: LazyList[A]): Option[(A, LazyList[A])] = - if (!s.isEmpty) Some((s.head, s.tail)) else None - } - - def from[A](coll: GenTraversableOnce[A]): LazyList[A] = coll match { - case lazyList: LazyList[A] => lazyList - case _ => newLL(stateFromIterator(coll.toIterator)) - } - - override def apply[A](elems: A*): LazyList[A] = from(elems) - - override def empty[A]: LazyList[A] = _empty - - /** Creates a State from an Iterator, with another State appended after the Iterator - * is empty. - */ - private def stateFromIteratorConcatSuffix[A](it: Iterator[A])(suffix: => State[A]): State[A] = - if (it.hasNext) sCons(it.next(), newLL(stateFromIteratorConcatSuffix(it)(suffix))) - else suffix - - /** Creates a State from an IterableOnce. */ - private def stateFromIterator[A](it: Iterator[A]): State[A] = - if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it))) - else State.Empty - - def concat[A](xss: collection.Iterable[A]*): LazyList[A] = - newLL(concatIterator(xss.toIterator)) - - private def concatIterator[A](it: Iterator[collection.Iterable[A]]): State[A] = - if (!it.hasNext) State.Empty - else stateFromIteratorConcatSuffix(it.next().toIterator)(concatIterator(it)) - - /** An infinite LazyList that repeatedly applies a given function to a start value. - * - * @param start the start value of the LazyList - * @param f the function that's repeatedly applied - * @return the LazyList returning the infinite sequence of values `start, f(start), f(f(start)), ...` - */ - def iterate[A](start: => A)(f: A => A): LazyList[A] = - newLL { - val head = start - sCons(head, iterate(f(head))(f)) - } - - /** - * Create an infinite LazyList starting at `start` and incrementing by - * step `step`. - * - * @param start the start value of the LazyList - * @param step the increment value of the LazyList - * @return the LazyList starting at value `start`. - */ - def from(start: Int, step: Int): LazyList[Int] = - newLL(sCons(start, from(start + step, step))) - - /** - * Create an infinite LazyList starting at `start` and incrementing by `1`. - * - * @param start the start value of the LazyList - * @return the LazyList starting at value `start`. - */ - def from(start: Int): LazyList[Int] = from(start, 1) - - /** - * Create an infinite LazyList containing the given element expression (which - * is computed for each occurrence). - * - * @param elem the element composing the resulting LazyList - * @return the LazyList containing an infinite number of elem - */ - def continually[A](elem: => A): LazyList[A] = newLL(sCons(elem, continually(elem))) - - override def fill[A](n: Int)(elem: => A): LazyList[A] = - if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty - - override def tabulate[A](n: Int)(f: Int => A): LazyList[A] = { - def at(index: Int): LazyList[A] = - if (index < n) newLL(sCons(f(index), at(index + 1))) else empty - - at(0) - } - - // significantly simpler than the iterator returned by Iterator.unfold - def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyList[A] = - newLL { - f(init) match { - case Some((elem, state)) => sCons(elem, unfold(state)(f)) - case None => State.Empty - } - } - - /** The builder returned by this method only evaluates elements - * of collections added to it as needed. - * - * @tparam A the type of the ${coll}’s elements - * @return A builder for $Coll objects. - */ - def newBuilder[A]: Builder[A, LazyList[A]] = new LazyBuilder[A] - - private class LazyIterator[+A](private[this] var lazyList: LazyList[A]) - extends AbstractIterator[A] { - override def hasNext: Boolean = !lazyList.isEmpty - - override def next(): A = - if (lazyList.isEmpty) Iterator.empty.next() - else { - val res = lazyList.head - lazyList = lazyList.tail - res - } - } - - private class SlidingIterator[A](private[this] var lazyList: LazyList[A], size: Int, step: Int) - extends AbstractIterator[LazyList[A]] { - private val minLen = size - step max 0 - private var first = true - - def hasNext: Boolean = - if (first) !lazyList.isEmpty - else lazyList.lengthGt(minLen) - - def next(): LazyList[A] = { - if (!hasNext) Iterator.empty.next() - else { - first = false - val list = lazyList - lazyList = list.drop(step) - list.take(size) - } - } - } - - private final class WithFilter[A] private[LazyList] (lazyList: LazyList[A], p: A => Boolean) - extends FilterMonadic[A, LazyList[A]] { - private[this] val filtered = lazyList.filter(p) - def map[B, That](f: A => B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - filtered.map(f) - def flatMap[B, That](f: A => GenTraversableOnce[B])( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = filtered.flatMap(f) - def foreach[U](f: A => U): Unit = filtered.foreach(f) - def withFilter(q: A => Boolean): FilterMonadic[A, LazyList[A]] = new WithFilter(filtered, q) - } - - class LazyListCanBuildFrom[A] extends GenericCanBuildFrom[A] - - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LazyList[A]] = new LazyListCanBuildFrom[A] - - private final class LazyBuilder[A] extends Builder[A, LazyList[A]] { - import LazyBuilder._ - - private[this] var next: DeferredState[A] = _ - private[this] var list: LazyList[A] = _ - - clear() - - override def clear(): Unit = { - val deferred = new DeferredState[A] - list = newLL(deferred.eval()) - next = deferred - } - - override def result(): LazyList[A] = { - next init State.Empty - list - } - - override def +=(elem: A): this.type = { - val deferred = new DeferredState[A] - next init sCons(elem, newLL(deferred.eval())) - next = deferred - this - } - - // lazy implementation which doesn't evaluate the collection being added - override def ++=(xs: TraversableOnce[A]): this.type = { - val deferred = new DeferredState[A] - next init stateFromIteratorConcatSuffix(xs.toIterator)(deferred.eval()) - next = deferred - this - } - } - - private object LazyBuilder { - final class DeferredState[A] { - private[this] var _state: () => State[A] = _ - - def eval(): State[A] = { - val state = _state - if (state == null) throw new IllegalStateException("uninitialized") - state() - } - - // racy - def init(state: => State[A]): Unit = { - if (_state != null) throw new IllegalStateException("already initialized") - _state = () => state - } - } - } - - private case object SerializeEnd - - /** This serialization proxy is used for LazyLists which start with a sequence of evaluated cons cells. - * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses - * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization - * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. - */ - @SerialVersionUID(3L) - final class SerializationProxy[A](@transient protected var coll: LazyList[A]) - extends Serializable { - - private[this] def writeObject(out: ObjectOutputStream): Unit = { - out.defaultWriteObject() - var these = coll - while (these.knownNonEmpty) { - out.writeObject(these.head) - these = these.tail - } - out.writeObject(SerializeEnd) - out.writeObject(these) - } - - private[this] def readObject(in: ObjectInputStream): Unit = { - in.defaultReadObject() - val init = new ArrayBuffer[A] - var initRead = false - while (!initRead) in.readObject match { - case SerializeEnd => initRead = true - case a => init += a.asInstanceOf[A] - } - val tail = in.readObject().asInstanceOf[LazyList[A]] - coll = tail.prependedAllToLL(init) - } - - private[this] def readResolve(): Any = coll - } - - override def iterate[A](start: A, len: Int)(f: A => A): LazyList[A] = - iterate(start)(f).take(len) - - override def range[A: Integral](start: A, end: A): LazyList[A] = - from(NumericRange(start, end, implicitly[Integral[A]].one)) - - override def range[A: Integral](start: A, end: A, step: A): LazyList[A] = - from(NumericRange(start, end, step)) -} \ No newline at end of file diff --git a/scalalib/overrides-2.11/scala/collection/compat/package.scala b/scalalib/overrides-2.11/scala/collection/compat/package.scala deleted file mode 100644 index 4bf7420f86..0000000000 --- a/scalalib/overrides-2.11/scala/collection/compat/package.scala +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.collection.generic.{CanBuildFrom, GenericOrderedCompanion, IsTraversableLike} - -import scala.runtime.Tuple2Zipped -import scala.collection.{immutable => i, mutable => m} -import scala.{collection => c} -import scala.language.higherKinds - -package object compat extends compat.PackageShared { - implicit def genericOrderedCompanionToCBF[A, CC[X] <: Traversable[X]]( - fact: GenericOrderedCompanion[CC])( - implicit ordering: Ordering[A]): CanBuildFrom[Any, A, CC[A]] = - CompatImpl.simpleCBF(fact.newBuilder[A]) - - // CanBuildFrom instances for `IterableView[(K, V), Map[K, V]]` that preserve - // the strict type of the view to be `Map` instead of `Iterable` - // Instances produced by this method are used to chain `filterKeys` after `mapValues` - implicit def canBuildFromIterableViewMapLike[K, V, L, W, CC[X, Y] <: Map[X, Y]] - : CanBuildFrom[IterableView[(K, V), CC[K, V]], (L, W), IterableView[(L, W), CC[L, W]]] = - new CanBuildFrom[IterableView[(K, V), CC[K, V]], (L, W), IterableView[(L, W), CC[L, W]]] { - // `CanBuildFrom` parameters are used as type constraints, they are not used - // at run-time, hence the dummy builder implementations - def apply(from: IterableView[(K, V), CC[K, V]]) = new TraversableView.NoBuilder - def apply() = new TraversableView.NoBuilder - } - - implicit def toTraversableLikeExtensionMethods[Repr](self: Repr)( - implicit traversable: IsTraversableLike[Repr]) - : TraversableLikeExtensionMethods[traversable.A, Repr] = - new TraversableLikeExtensionMethods[traversable.A, Repr](traversable.conversion(self)) - - implicit def toSeqExtensionMethods[A](self: c.Seq[A]): SeqExtensionMethods[A] = - new SeqExtensionMethods[A](self) - - implicit def toTrulyTraversableLikeExtensionMethods[T1, El1, Repr1](self: T1)( - implicit w1: T1 => TraversableLike[El1, Repr1] - ): TrulyTraversableLikeExtensionMethods[El1, Repr1] = - new TrulyTraversableLikeExtensionMethods[El1, Repr1](w1(self)) - - implicit def toTuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2]( - self: Tuple2Zipped[El1, Repr1, El2, Repr2] - ): Tuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2] = - new Tuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2](self) - - implicit def toImmutableQueueExtensionMethods[A]( - self: i.Queue[A]): ImmutableQueueExtensionMethods[A] = - new ImmutableQueueExtensionMethods[A](self) - - implicit def toMutableQueueExtensionMethods[A]( - self: m.Queue[A]): MutableQueueExtensionMethods[A] = - new MutableQueueExtensionMethods[A](self) - -} \ No newline at end of file diff --git a/scalalib/overrides-2.12/scala/collection/compat/BuildFrom.scala b/scalalib/overrides-2.12/scala/collection/compat/BuildFrom.scala deleted file mode 100644 index 8cdd8756c3..0000000000 --- a/scalalib/overrides-2.12/scala/collection/compat/BuildFrom.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.compat - -import scala.collection.generic.CanBuildFrom -import scala.collection.mutable -import scala.language.higherKinds -import scala.language.implicitConversions - -/** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available. - * Implicit instances of `BuildFrom` are available for all collection types. - * - * @tparam From Type of source collection - * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) - * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) - */ -trait BuildFrom[-From, -A, +C] extends Any { - def fromSpecific(from: From)(it: IterableOnce[A]): C - - /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. - * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ - def newBuilder(from: From): mutable.Builder[A, C] - - @deprecated("Use newBuilder() instead of apply()", "2.13.0") - @`inline` def apply(from: From): mutable.Builder[A, C] = newBuilder(from) -} - -object BuildFrom { - - // Implicit instance derived from an implicit CanBuildFrom instance - implicit def fromCanBuildFrom[From, A, C]( - implicit cbf: CanBuildFrom[From, A, C]): BuildFrom[From, A, C] = - new BuildFrom[From, A, C] { - def fromSpecific(from: From)(it: IterableOnce[A]): C = (cbf(from) ++= it).result() - def newBuilder(from: From): mutable.Builder[A, C] = cbf(from) - } - - // Implicit conversion derived from an implicit conversion to CanBuildFrom - implicit def fromCanBuildFromConversion[X, From, A, C](x: X)( - implicit toCanBuildFrom: X => CanBuildFrom[From, A, C]): BuildFrom[From, A, C] = - fromCanBuildFrom(toCanBuildFrom(x)) - -} \ No newline at end of file diff --git a/scalalib/overrides-2.12/scala/collection/compat/CompatImpl.scala b/scalalib/overrides-2.12/scala/collection/compat/CompatImpl.scala deleted file mode 100644 index 4ff1f5515e..0000000000 --- a/scalalib/overrides-2.12/scala/collection/compat/CompatImpl.scala +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.compat - -import scala.reflect.ClassTag -import scala.collection.generic.CanBuildFrom -import scala.collection.{immutable => i, mutable => m} -import scala.language.higherKinds - -/* builder optimized for a single ++= call, which returns identity on result if possible - * and defers to the underlying builder if not. - */ -private final class IdentityPreservingBuilder[A, CC[X] <: TraversableOnce[X]]( - that: m.Builder[A, CC[A]])(implicit ct: ClassTag[CC[A]]) - extends m.Builder[A, CC[A]] { - - //invariant: ruined => (collection == null) - var collection: CC[A] = null.asInstanceOf[CC[A]] - var ruined = false - - private[this] def ruin(): Unit = { - if (collection != null) that ++= collection - collection = null.asInstanceOf[CC[A]] - ruined = true - } - - override def ++=(elems: TraversableOnce[A]): this.type = - elems match { - case ct(ca) if collection == null && !ruined => { - collection = ca - this - } - case _ => { - ruin() - that ++= elems - this - } - } - - def +=(elem: A): this.type = { - ruin() - that += elem - this - } - - def clear(): Unit = { - collection = null.asInstanceOf[CC[A]] - if (ruined) that.clear() - ruined = false - } - - def result(): CC[A] = if (collection == null) that.result() else collection -} - -private[compat] object CompatImpl { - def simpleCBF[A, C](f: => m.Builder[A, C]): CanBuildFrom[Any, A, C] = - new CanBuildFrom[Any, A, C] { - def apply(from: Any): m.Builder[A, C] = apply() - def apply(): m.Builder[A, C] = f - } - - type ImmutableBitSetCC[X] = ({ type L[_] = i.BitSet })#L[X] - type MutableBitSetCC[X] = ({ type L[_] = m.BitSet })#L[X] -} \ No newline at end of file diff --git a/scalalib/overrides-2.12/scala/collection/compat/PackageShared.scala b/scalalib/overrides-2.12/scala/collection/compat/PackageShared.scala deleted file mode 100644 index b7ab3f8ac7..0000000000 --- a/scalalib/overrides-2.12/scala/collection/compat/PackageShared.scala +++ /dev/null @@ -1,467 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.compat - -import scala.collection.generic._ -import scala.reflect.ClassTag -import scala.collection.{ - BitSet, - GenTraversable, - IterableLike, - IterableView, - MapLike, - TraversableLike, - immutable => i, - mutable => m -} -import scala.runtime.{Tuple2Zipped, Tuple3Zipped} -import scala.{collection => c} -import scala.language.higherKinds -import scala.language.implicitConversions - -/** The collection compatibility API */ -private[compat] trait PackageShared { - import CompatImpl._ - - /** - * A factory that builds a collection of type `C` with elements of type `A`. - * - * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) - * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) - */ - type Factory[-A, +C] = CanBuildFrom[Nothing, A, C] - - implicit class FactoryOps[-A, +C](private val factory: Factory[A, C]) { - - /** - * @return A collection of type `C` containing the same elements - * as the source collection `it`. - * @param it Source collection - */ - def fromSpecific(it: TraversableOnce[A]): C = (factory() ++= it).result() - - /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. - * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ - def newBuilder: m.Builder[A, C] = factory() - } - - implicit def genericCompanionToCBF[A, CC[X] <: GenTraversable[X]]( - fact: GenericCompanion[CC]): CanBuildFrom[Any, A, CC[A]] = { - /* see https://github.com/scala/scala-collection-compat/issues/337 - `simpleCBF.apply` takes a by-name parameter and relies on - repeated references generating new builders, thus this expression - must be non-strict - */ - def builder: m.Builder[A, CC[A]] = fact match { - case c.Seq | i.Seq => new IdentityPreservingBuilder[A, i.Seq](i.Seq.newBuilder[A]) - case c.LinearSeq | i.LinearSeq => - new IdentityPreservingBuilder[A, i.LinearSeq](i.LinearSeq.newBuilder[A]) - case _ => fact.newBuilder[A] - } - simpleCBF(builder) - } - - implicit def sortedSetCompanionToCBF[A: Ordering, - CC[X] <: c.SortedSet[X] with c.SortedSetLike[X, CC[X]]]( - fact: SortedSetFactory[CC]): CanBuildFrom[Any, A, CC[A]] = - simpleCBF(fact.newBuilder[A]) - - implicit def arrayCompanionToCBF[A: ClassTag](fact: Array.type): CanBuildFrom[Any, A, Array[A]] = - simpleCBF(Array.newBuilder[A]) - - implicit def mapFactoryToCBF[K, V, CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]]( - fact: MapFactory[CC]): CanBuildFrom[Any, (K, V), CC[K, V]] = - simpleCBF(fact.newBuilder[K, V]) - - implicit def sortedMapFactoryToCBF[ - K: Ordering, - V, - CC[A, B] <: c.SortedMap[A, B] with c.SortedMapLike[A, B, CC[A, B]]]( - fact: SortedMapFactory[CC]): CanBuildFrom[Any, (K, V), CC[K, V]] = - simpleCBF(fact.newBuilder[K, V]) - - implicit def bitSetFactoryToCBF(fact: BitSetFactory[BitSet]): CanBuildFrom[Any, Int, BitSet] = - simpleCBF(fact.newBuilder) - - implicit def immutableBitSetFactoryToCBF( - fact: BitSetFactory[i.BitSet]): CanBuildFrom[Any, Int, ImmutableBitSetCC[Int]] = - simpleCBF(fact.newBuilder) - - implicit def mutableBitSetFactoryToCBF( - fact: BitSetFactory[m.BitSet]): CanBuildFrom[Any, Int, MutableBitSetCC[Int]] = - simpleCBF(fact.newBuilder) - - implicit class IterableFactoryExtensionMethods[CC[X] <: GenTraversable[X]]( - private val fact: GenericCompanion[CC]) { - def from[A](source: TraversableOnce[A]): CC[A] = - fact.apply(source.toSeq: _*) - } - - implicit class MapFactoryExtensionMethods[CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]]( - private val fact: MapFactory[CC]) { - def from[K, V](source: TraversableOnce[(K, V)]): CC[K, V] = - fact.apply(source.toSeq: _*) - } - - implicit class BitSetFactoryExtensionMethods[ - C <: scala.collection.BitSet with scala.collection.BitSetLike[C]]( - private val fact: BitSetFactory[C]) { - def fromSpecific(source: TraversableOnce[Int]): C = - fact.apply(source.toSeq: _*) - } - - private[compat] def build[T, CC](builder: m.Builder[T, CC], source: TraversableOnce[T]): CC = { - builder ++= source - builder.result() - } - - implicit def toImmutableSortedMapExtensions( - fact: i.SortedMap.type): ImmutableSortedMapExtensions = - new ImmutableSortedMapExtensions(fact) - - implicit def toImmutableListMapExtensions(fact: i.ListMap.type): ImmutableListMapExtensions = - new ImmutableListMapExtensions(fact) - - implicit def toImmutableHashMapExtensions(fact: i.HashMap.type): ImmutableHashMapExtensions = - new ImmutableHashMapExtensions(fact) - - implicit def toImmutableTreeMapExtensions(fact: i.TreeMap.type): ImmutableTreeMapExtensions = - new ImmutableTreeMapExtensions(fact) - - implicit def toImmutableIntMapExtensions(fact: i.IntMap.type): ImmutableIntMapExtensions = - new ImmutableIntMapExtensions(fact) - - implicit def toImmutableLongMapExtensions(fact: i.LongMap.type): ImmutableLongMapExtensions = - new ImmutableLongMapExtensions(fact) - - implicit def toMutableLongMapExtensions(fact: m.LongMap.type): MutableLongMapExtensions = - new MutableLongMapExtensions(fact) - - implicit def toMutableHashMapExtensions(fact: m.HashMap.type): MutableHashMapExtensions = - new MutableHashMapExtensions(fact) - - implicit def toMutableListMapExtensions(fact: m.ListMap.type): MutableListMapExtensions = - new MutableListMapExtensions(fact) - - implicit def toMutableMapExtensions(fact: m.Map.type): MutableMapExtensions = - new MutableMapExtensions(fact) - - implicit def toStreamExtensionMethods[A](stream: Stream[A]): StreamExtensionMethods[A] = - new StreamExtensionMethods[A](stream) - - implicit def toSortedExtensionMethods[K, V <: Sorted[K, V]]( - fact: Sorted[K, V]): SortedExtensionMethods[K, V] = - new SortedExtensionMethods[K, V](fact) - - implicit def toIteratorExtensionMethods[A](self: Iterator[A]): IteratorExtensionMethods[A] = - new IteratorExtensionMethods[A](self) - - implicit def toTraversableExtensionMethods[A]( - self: Traversable[A]): TraversableExtensionMethods[A] = - new TraversableExtensionMethods[A](self) - - implicit def toTraversableOnceExtensionMethods[A]( - self: TraversableOnce[A]): TraversableOnceExtensionMethods[A] = - new TraversableOnceExtensionMethods[A](self) - - // This really belongs into scala.collection but there's already a package object - // in scala-library so we can't add to it - type IterableOnce[+X] = c.TraversableOnce[X] - val IterableOnce = c.TraversableOnce - - implicit def toMapExtensionMethods[K, V]( - self: scala.collection.Map[K, V]): MapExtensionMethods[K, V] = - new MapExtensionMethods[K, V](self) - - implicit def toMapViewExtensionMethods[K, V, C <: scala.collection.Map[K, V]]( - self: IterableView[(K, V), C]): MapViewExtensionMethods[K, V, C] = - new MapViewExtensionMethods[K, V, C](self) -} - -class ImmutableSortedMapExtensions(private val fact: i.SortedMap.type) extends AnyVal { - def from[K: Ordering, V](source: TraversableOnce[(K, V)]): i.SortedMap[K, V] = - build(i.SortedMap.newBuilder[K, V], source) -} - -class ImmutableListMapExtensions(private val fact: i.ListMap.type) extends AnyVal { - def from[K, V](source: TraversableOnce[(K, V)]): i.ListMap[K, V] = - build(i.ListMap.newBuilder[K, V], source) -} - -class ImmutableHashMapExtensions(private val fact: i.HashMap.type) extends AnyVal { - def from[K, V](source: TraversableOnce[(K, V)]): i.HashMap[K, V] = - build(i.HashMap.newBuilder[K, V], source) -} - -class ImmutableTreeMapExtensions(private val fact: i.TreeMap.type) extends AnyVal { - def from[K: Ordering, V](source: TraversableOnce[(K, V)]): i.TreeMap[K, V] = - build(i.TreeMap.newBuilder[K, V], source) -} - -class ImmutableIntMapExtensions(private val fact: i.IntMap.type) extends AnyVal { - def from[V](source: TraversableOnce[(Int, V)]): i.IntMap[V] = - build(i.IntMap.canBuildFrom[Int, V](), source) -} - -class ImmutableLongMapExtensions(private val fact: i.LongMap.type) extends AnyVal { - def from[V](source: TraversableOnce[(Long, V)]): i.LongMap[V] = - build(i.LongMap.canBuildFrom[Long, V](), source) -} - -class MutableLongMapExtensions(private val fact: m.LongMap.type) extends AnyVal { - def from[V](source: TraversableOnce[(Long, V)]): m.LongMap[V] = - build(m.LongMap.canBuildFrom[Long, V](), source) -} - -class MutableHashMapExtensions(private val fact: m.HashMap.type) extends AnyVal { - def from[K, V](source: TraversableOnce[(K, V)]): m.HashMap[K, V] = - build(m.HashMap.canBuildFrom[K, V](), source) -} - -class MutableListMapExtensions(private val fact: m.ListMap.type) extends AnyVal { - def from[K, V](source: TraversableOnce[(K, V)]): m.ListMap[K, V] = - build(m.ListMap.canBuildFrom[K, V](), source) -} - -class MutableMapExtensions(private val fact: m.Map.type) extends AnyVal { - def from[K, V](source: TraversableOnce[(K, V)]): m.Map[K, V] = - build(m.Map.canBuildFrom[K, V](), source) -} - -class StreamExtensionMethods[A](private val stream: Stream[A]) extends AnyVal { - def lazyAppendedAll(as: => TraversableOnce[A]): Stream[A] = stream.append(as) -} - -class SortedExtensionMethods[K, T <: Sorted[K, T]](private val fact: Sorted[K, T]) { - def rangeFrom(from: K): T = fact.from(from) - def rangeTo(to: K): T = fact.to(to) - def rangeUntil(until: K): T = fact.until(until) -} - -class IteratorExtensionMethods[A](private val self: c.Iterator[A]) extends AnyVal { - def sameElements[B >: A](that: c.TraversableOnce[B]): Boolean = { - self.sameElements(that.iterator) - } - def concat[B >: A](that: c.TraversableOnce[B]): c.TraversableOnce[B] = self ++ that - def tapEach[U](f: A => U): c.Iterator[A] = self.map(a => { f(a); a }) -} - -class TraversableOnceExtensionMethods[A](private val self: c.TraversableOnce[A]) extends AnyVal { - def iterator: Iterator[A] = self.toIterator - - def minOption[B >: A](implicit ord: Ordering[B]): Option[A] = { - if (self.isEmpty) - None - else - Some(self.min(ord)) - } - - def maxOption[B >: A](implicit ord: Ordering[B]): Option[A] = { - if (self.isEmpty) - None - else - Some(self.max(ord)) - } - - def minByOption[B](f: A => B)(implicit cmp: Ordering[B]): Option[A] = { - if (self.isEmpty) - None - else - Some(self.minBy(f)(cmp)) - } - - def maxByOption[B](f: A => B)(implicit cmp: Ordering[B]): Option[A] = { - if (self.isEmpty) - None - else - Some(self.maxBy(f)(cmp)) - } -} - -class TraversableExtensionMethods[A](private val self: c.Traversable[A]) extends AnyVal { - def iterableFactory: GenericCompanion[Traversable] = self.companion - - def sizeCompare(otherSize: Int): Int = SizeCompareImpl.sizeCompareInt(self)(otherSize) - def sizeIs: SizeCompareOps = new SizeCompareOps(self) - def sizeCompare(that: c.Traversable[_]): Int = SizeCompareImpl.sizeCompareColl(self)(that) - -} - -class SeqExtensionMethods[A](private val self: c.Seq[A]) extends AnyVal { - def lengthIs: SizeCompareOps = new SizeCompareOps(self) -} - -class SizeCompareOps private[compat] (private val it: c.Traversable[_]) extends AnyVal { - import SizeCompareImpl._ - - /** Tests if the size of the collection is less than some value. */ - @inline def <(size: Int): Boolean = sizeCompareInt(it)(size) < 0 - - /** Tests if the size of the collection is less than or equal to some value. */ - @inline def <=(size: Int): Boolean = sizeCompareInt(it)(size) <= 0 - - /** Tests if the size of the collection is equal to some value. */ - @inline def ==(size: Int): Boolean = sizeCompareInt(it)(size) == 0 - - /** Tests if the size of the collection is not equal to some value. */ - @inline def !=(size: Int): Boolean = sizeCompareInt(it)(size) != 0 - - /** Tests if the size of the collection is greater than or equal to some value. */ - @inline def >=(size: Int): Boolean = sizeCompareInt(it)(size) >= 0 - - /** Tests if the size of the collection is greater than some value. */ - @inline def >(size: Int): Boolean = sizeCompareInt(it)(size) > 0 -} - -private object SizeCompareImpl { - def sizeCompareInt(self: c.Traversable[_])(otherSize: Int): Int = - self match { - case self: c.SeqLike[_, _] => self.lengthCompare(otherSize) - case _ => - if (otherSize < 0) 1 - else { - var i = 0 - val it = self.toIterator - while (it.hasNext) { - if (i == otherSize) return 1 - it.next() - i += 1 - } - i - otherSize - } - } - - // `IndexedSeq` is the only thing that we can safely say has a known size - def sizeCompareColl(self: c.Traversable[_])(that: c.Traversable[_]): Int = - that match { - case that: c.IndexedSeq[_] => sizeCompareInt(self)(that.length) - case _ => - self match { - case self: c.IndexedSeq[_] => - val res = sizeCompareInt(that)(self.length) - // can't just invert the result, because `-Int.MinValue == Int.MinValue` - if (res == Int.MinValue) 1 else -res - case _ => - val thisIt = self.toIterator - val thatIt = that.toIterator - while (thisIt.hasNext && thatIt.hasNext) { - thisIt.next() - thatIt.next() - } - java.lang.Boolean.compare(thisIt.hasNext, thatIt.hasNext) - } - } -} - -class TraversableLikeExtensionMethods[A, Repr](private val self: c.GenTraversableLike[A, Repr]) - extends AnyVal { - def tapEach[U](f: A => U)(implicit bf: CanBuildFrom[Repr, A, Repr]): Repr = - self.map(a => { f(a); a }) - - def partitionMap[A1, A2, That, Repr1, Repr2](f: A => Either[A1, A2])( - implicit bf1: CanBuildFrom[Repr, A1, Repr1], - bf2: CanBuildFrom[Repr, A2, Repr2] - ): (Repr1, Repr2) = { - val l = bf1() - val r = bf2() - self.foreach { x => - f(x) match { - case Left(x1) => l += x1 - case Right(x2) => r += x2 - } - } - (l.result(), r.result()) - } - - def groupMap[K, B, That](key: A => K)(f: A => B)( - implicit bf: CanBuildFrom[Repr, B, That]): Map[K, That] = { - val map = m.Map.empty[K, m.Builder[B, That]] - for (elem <- self) { - val k = key(elem) - val bldr = map.getOrElseUpdate(k, bf(self.repr)) - bldr += f(elem) - } - val res = Map.newBuilder[K, That] - for ((k, bldr) <- map) res += ((k, bldr.result())) - res.result() - } - - def groupMapReduce[K, B](key: A => K)(f: A => B)(reduce: (B, B) => B): Map[K, B] = { - val map = m.Map.empty[K, B] - for (elem <- self) { - val k = key(elem) - val v = map.get(k) match { - case Some(b) => reduce(b, f(elem)) - case None => f(elem) - } - map.put(k, v) - } - map.toMap - } -} - -class TrulyTraversableLikeExtensionMethods[El1, Repr1]( - private val self: TraversableLike[El1, Repr1]) - extends AnyVal { - - def lazyZip[El2, Repr2, T2](t2: T2)( - implicit w2: T2 => IterableLike[El2, Repr2] - ): Tuple2Zipped[El1, Repr1, El2, Repr2] = new Tuple2Zipped((self, t2)) -} - -class Tuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2]( - private val self: Tuple2Zipped[El1, Repr1, El2, Repr2]) { - - def lazyZip[El3, Repr3, T3](t3: T3)(implicit w3: T3 => IterableLike[El3, Repr3]) - : Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3] = - new Tuple3Zipped((self.colls._1, self.colls._2, t3)) -} - -class MapExtensionMethods[K, V](private val self: scala.collection.Map[K, V]) extends AnyVal { - - def foreachEntry[U](f: (K, V) => U): Unit = { - self.foreach { case (k, v) => f(k, v) } - } - -} - -class MapViewExtensionMethods[K, V, C <: scala.collection.Map[K, V]]( - private val self: IterableView[(K, V), C]) - extends AnyVal { - - def mapValues[W, That](f: V => W)( - implicit bf: CanBuildFrom[IterableView[(K, V), C], (K, W), That]): That = - self.map[(K, W), That] { case (k, v) => (k, f(v)) } - - // TODO: Replace the current implementation of `mapValues` with this - // after major version bump when bincompat can be broken. - // At the same time, remove `canBuildFromIterableViewMapLike` - /* - def mapValues[W](f: V => W): IterableView[(K, W), C] = - // the implementation of `self.map` also casts the result - self.map({ case (k, v) => (k, f(v)) }).asInstanceOf[IterableView[(K, W), C]] - */ - - def filterKeys(p: K => Boolean): IterableView[(K, V), C] = - self.filter { case (k, _) => p(k) } -} - -class ImmutableQueueExtensionMethods[A](private val self: i.Queue[A]) extends AnyVal { - def enqueueAll[B >: A](iter: c.Iterable[B]): i.Queue[B] = - self.enqueue(iter.to[i.Iterable]) -} - -class MutableQueueExtensionMethods[Element](private val self: m.Queue[Element]) extends AnyVal { - def enqueueAll(iter: c.Iterable[Element]): Unit = - self.enqueue(iter.toIndexedSeq: _*) -} \ No newline at end of file diff --git a/scalalib/overrides-2.12/scala/collection/compat/immutable/ArraySeq.scala b/scalalib/overrides-2.12/scala/collection/compat/immutable/ArraySeq.scala deleted file mode 100644 index e0da76ef4a..0000000000 --- a/scalalib/overrides-2.12/scala/collection/compat/immutable/ArraySeq.scala +++ /dev/null @@ -1,267 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.compat.immutable - -import java.util.Arrays - -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.AbstractSeq -import scala.collection.generic._ -import scala.collection.immutable.IndexedSeq -import scala.collection.mutable.{ArrayBuilder, Builder, WrappedArrayBuilder} -import scala.reflect.ClassTag -import scala.util.hashing.MurmurHash3 - -/** - * An immutable array. - * - * Supports efficient indexed access and has a small memory footprint. - * - * @define Coll `ArraySeq` - * @define coll wrapped array - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -abstract class ArraySeq[+T] extends AbstractSeq[T] with IndexedSeq[T] { - - override protected[this] def thisCollection: ArraySeq[T] = this - - /** The tag of the element type */ - protected[this] def elemTag: ClassTag[T] - - /** The length of the array */ - def length: Int - - /** The element at given index */ - def apply(index: Int): T - - /** The underlying array */ - def unsafeArray: Array[T @uncheckedVariance] - - override def stringPrefix = "ArraySeq" - - /** Clones this object, including the underlying Array. */ - override def clone(): ArraySeq[T] = ArraySeq unsafeWrapArray unsafeArray.clone() - - /** Creates new builder for this collection ==> move to subclasses - */ - override protected[this] def newBuilder: Builder[T, ArraySeq[T]] = - ArraySeq.newBuilder[T](elemTag) - -} - -/** A companion object used to create instances of `ArraySeq`. - */ -object ArraySeq { - // This is reused for all calls to empty. - private val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) - def empty[T <: AnyRef]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] - - def newBuilder[T](implicit elemTag: ClassTag[T]): Builder[T, ArraySeq[T]] = - new WrappedArrayBuilder[T](elemTag).mapResult(w => unsafeWrapArray(w.array)) - - def apply[T](elems: T*)(implicit elemTag: ClassTag[T]): ArraySeq[T] = { - val b = newBuilder[T] - b ++= elems - b.result() - } - - def unapplySeq[T](seq: ArraySeq[T]): Some[ArraySeq[T]] = Some(seq) - - /** - * Wrap an existing `Array` into an `ArraySeq` of the proper primitive specialization type - * without copying. - * - * Note that an array containing boxed primitives can be wrapped in an `ArraySeq` without - * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, - * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: - * `ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[Int]]`. The values are still - * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing - * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a - * `ClassCastException` at runtime. - */ - def unsafeWrapArray[T](x: Array[T]): ArraySeq[T] = - (x.asInstanceOf[Array[_]] match { - case null => null - case x: Array[AnyRef] => new ofRef[AnyRef](x) - case x: Array[Int] => new ofInt(x) - case x: Array[Double] => new ofDouble(x) - case x: Array[Long] => new ofLong(x) - case x: Array[Float] => new ofFloat(x) - case x: Array[Char] => new ofChar(x) - case x: Array[Byte] => new ofByte(x) - case x: Array[Short] => new ofShort(x) - case x: Array[Boolean] => new ofBoolean(x) - case x: Array[Unit] => new ofUnit(x) - }).asInstanceOf[ArraySeq[T]] - - implicit def canBuildFrom[T](implicit m: ClassTag[T]): CanBuildFrom[ArraySeq[_], T, ArraySeq[T]] = - new CanBuildFrom[ArraySeq[_], T, ArraySeq[T]] { - def apply(from: ArraySeq[_]): Builder[T, ArraySeq[T]] = - ArrayBuilder.make[T]()(m) mapResult ArraySeq.unsafeWrapArray[T] - def apply: Builder[T, ArraySeq[T]] = - ArrayBuilder.make[T]()(m) mapResult ArraySeq.unsafeWrapArray[T] - } - - @SerialVersionUID(3L) - final class ofRef[T <: AnyRef](val unsafeArray: Array[T]) extends ArraySeq[T] with Serializable { - lazy val elemTag = ClassTag[T](unsafeArray.getClass.getComponentType) - def length: Int = unsafeArray.length - def apply(index: Int): T = unsafeArray(index) - def update(index: Int, elem: T) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofRef[_] => - arrayEquals(unsafeArray.asInstanceOf[Array[AnyRef]], - that.unsafeArray.asInstanceOf[Array[AnyRef]]) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofByte(val unsafeArray: Array[Byte]) extends ArraySeq[Byte] with Serializable { - def elemTag = ClassTag.Byte - def length: Int = unsafeArray.length - def apply(index: Int): Byte = unsafeArray(index) - def update(index: Int, elem: Byte) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofByte => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofShort(val unsafeArray: Array[Short]) extends ArraySeq[Short] with Serializable { - def elemTag = ClassTag.Short - def length: Int = unsafeArray.length - def apply(index: Int): Short = unsafeArray(index) - def update(index: Int, elem: Short) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofShort => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofChar(val unsafeArray: Array[Char]) extends ArraySeq[Char] with Serializable { - def elemTag = ClassTag.Char - def length: Int = unsafeArray.length - def apply(index: Int): Char = unsafeArray(index) - def update(index: Int, elem: Char) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofChar => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofInt(val unsafeArray: Array[Int]) extends ArraySeq[Int] with Serializable { - def elemTag = ClassTag.Int - def length: Int = unsafeArray.length - def apply(index: Int): Int = unsafeArray(index) - def update(index: Int, elem: Int) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofInt => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofLong(val unsafeArray: Array[Long]) extends ArraySeq[Long] with Serializable { - def elemTag = ClassTag.Long - def length: Int = unsafeArray.length - def apply(index: Int): Long = unsafeArray(index) - def update(index: Int, elem: Long) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofLong => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofFloat(val unsafeArray: Array[Float]) extends ArraySeq[Float] with Serializable { - def elemTag = ClassTag.Float - def length: Int = unsafeArray.length - def apply(index: Int): Float = unsafeArray(index) - def update(index: Int, elem: Float) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofFloat => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofDouble(val unsafeArray: Array[Double]) extends ArraySeq[Double] with Serializable { - def elemTag = ClassTag.Double - def length: Int = unsafeArray.length - def apply(index: Int): Double = unsafeArray(index) - def update(index: Int, elem: Double) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofDouble => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofBoolean(val unsafeArray: Array[Boolean]) - extends ArraySeq[Boolean] - with Serializable { - def elemTag = ClassTag.Boolean - def length: Int = unsafeArray.length - def apply(index: Int): Boolean = unsafeArray(index) - def update(index: Int, elem: Boolean) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofBoolean => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3L) - final class ofUnit(val unsafeArray: Array[Unit]) extends ArraySeq[Unit] with Serializable { - def elemTag = ClassTag.Unit - def length: Int = unsafeArray.length - def apply(index: Int): Unit = unsafeArray(index) - def update(index: Int, elem: Unit) { unsafeArray(index) = elem } - override def hashCode = MurmurHash3.arrayHash(unsafeArray, MurmurHash3.seqSeed) - override def equals(that: Any) = that match { - case that: ofUnit => unsafeArray.length == that.unsafeArray.length - case _ => super.equals(that) - } - } - - private[this] def arrayEquals(xs: Array[AnyRef], ys: Array[AnyRef]): Boolean = { - if (xs eq ys) - return true - if (xs.length != ys.length) - return false - - val len = xs.length - var i = 0 - while (i < len) { - if (xs(i) != ys(i)) - return false - i += 1 - } - true - } -} \ No newline at end of file diff --git a/scalalib/overrides-2.12/scala/collection/compat/immutable/LazyList.scala b/scalalib/overrides-2.12/scala/collection/compat/immutable/LazyList.scala deleted file mode 100644 index 4b4dd58bf2..0000000000 --- a/scalalib/overrides-2.12/scala/collection/compat/immutable/LazyList.scala +++ /dev/null @@ -1,1537 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.compat.immutable - -import java.io.{ObjectInputStream, ObjectOutputStream} - -import scala.annotation.tailrec -import scala.annotation.unchecked.{uncheckedVariance => uV} -import scala.collection.{ - AbstractIterator, - AbstractSeq, - GenIterable, - GenSeq, - GenTraversableOnce, - LinearSeqOptimized, - mutable -} -import scala.collection.generic.{ - CanBuildFrom, - FilterMonadic, - GenericCompanion, - GenericTraversableTemplate, - SeqFactory -} -import scala.collection.immutable.{LinearSeq, NumericRange} -import scala.collection.mutable.{ArrayBuffer, Builder, StringBuilder} -import scala.language.implicitConversions -import scala.language.higherKinds - -/** This class implements an immutable linked list that evaluates elements - * in order and only when needed. Here is an example: - * - * {{{ - * import scala.math.BigInt - * object Main extends App { - * - * val fibs: LazyList[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map { n => n._1 + n._2 } - * - * fibs take 5 foreach println - * } - * - * // prints - * // - * // 0 - * // 1 - * // 1 - * // 2 - * // 3 - * }}} - * - * A `LazyList`, like the one in the example above, may be infinite in length. - * Aggregate methods, such as `count`, `sum`, `max` or `min` on such infinite length - * sequences will not terminate. Filtered infinite lazy lists are also effectively - * infinite in length. - * - * Elements of a `LazyList` are memoized; that is, the value of each element - * is computed only once. - * To illustrate, we will alter body of the `fibs` value above and take some - * more values: - * - * {{{ - * import scala.math.BigInt - * object Main extends App { - * - * val fibs: LazyList[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip( - * fibs.tail).map(n => { - * println("Adding %d and %d".format(n._1, n._2)) - * n._1 + n._2 - * }) - * - * fibs take 5 foreach println - * fibs take 6 foreach println - * } - * - * // prints - * // - * // 0 - * // 1 - * // Adding 0 and 1 - * // 1 - * // Adding 1 and 1 - * // 2 - * // Adding 1 and 2 - * // 3 - * - * // And then prints - * // - * // 0 - * // 1 - * // 1 - * // 2 - * // 3 - * // Adding 2 and 3 - * // 5 - * }}} - * - * There are a number of subtle points to the above example. - * - * - The definition of `fibs` is a `val` not a method. The memoization of the - * `LazyList` requires us to have somewhere to store the information and a `val` - * allows us to do that. - * - * - While the `LazyList` is actually being modified during access, this does not - * change the notion of its immutability. Once the values are memoized they do - * not change and values that have yet to be memoized still "exist", they - * simply haven't been realized yet. - * - * - One must be cautious of memoization; you can very quickly eat up large - * amounts of memory if you're not careful. The reason for this is that the - * memoization of the `LazyList` creates a structure much like - * [[scala.collection.immutable.List]]. So long as something is holding on to - * the head, the head holds on to the tail, and so it continues recursively. - * If, on the other hand, there is nothing holding on to the head (e.g. we used - * `def` to define the `LazyList`) then once it is no longer being used directly, - * it disappears. - * - * - Note that some operations, including [[drop]], [[dropWhile]], - * [[flatMap]] or [[collect]] may process a large number of intermediate - * elements before returning. These necessarily hold onto the head, since - * they are methods on `LazyList`, and a lazy list holds its own head. For - * computations of this sort where memoization is not desired, use - * `Iterator` when possible. - * - * {{{ - * // For example, let's build the natural numbers and do some silly iteration - * // over them. - * - * // We'll start with a silly iteration - * def loop(s: String, i: Int, iter: Iterator[Int]): Unit = { - * // Stop after 200,000 - * if (i < 200001) { - * if (i % 50000 == 0) println(s + i) - * loop(s, iter.next(), iter) - * } - * } - * - * // Our first LazyList definition will be a val definition - * val lazylist1: LazyList[Int] = { - * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) - * loop(0) - * } - * - * // Because lazylist1 is a val, everything that the iterator produces is held - * // by virtue of the fact that the head of the LazyList is held in lazylist1 - * val it1 = lazylist1.toIterator - * loop("Iterator1: ", it1.next(), it1) - * - * // We can redefine this LazyList such that all we have is the Iterator left - * // and allow the LazyList to be garbage collected as required. Using a def - * // to provide the LazyList ensures that no val is holding onto the head as - * // is the case with lazylist1 - * def lazylist2: LazyList[Int] = { - * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) - * loop(0) - * } - * val it2 = lazylist2.toIterator - * loop("Iterator2: ", it2.next(), it2) - * - * // And, of course, we don't actually need a LazyList at all for such a simple - * // problem. There's no reason to use a LazyList if you don't actually need - * // one. - * val it3 = new Iterator[Int] { - * var i = -1 - * def hasNext = true - * def next(): Int = { i += 1; i } - * } - * loop("Iterator3: ", it3.next(), it3) - * }}} - * - * - The fact that `tail` works at all is of interest. In the definition of - * `fibs` we have an initial `(0, 1, LazyList(...))` so `tail` is deterministic. - * If we defined `fibs` such that only `0` were concretely known then the act - * of determining `tail` would require the evaluation of `tail` which would - * cause an infinite recursion and stack overflow. If we define a definition - * where the tail is not initially computable then we're going to have an - * infinite recursion: - * {{{ - * // The first time we try to access the tail we're going to need more - * // information which will require us to recurse, which will require us to - * // recurse, which... - * lazy val sov: LazyList[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } - * }}} - * - * The definition of `fibs` above creates a larger number of objects than - * necessary depending on how you might want to implement it. The following - * implementation provides a more "cost effective" implementation due to the - * fact that it has a more direct route to the numbers themselves: - * - * {{{ - * lazy val fib: LazyList[Int] = { - * def loop(h: Int, n: Int): LazyList[Int] = h #:: loop(n, h + n) - * loop(1, 1) - * } - * }}} - * - * @tparam A the type of the elements contained in this lazy list. - * - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]] - * section on `LazyLists` for more information. - * @define Coll `LazyList` - * @define coll lazy list - * @define orderDependent - * @define orderDependentFold - * @define appendStackSafety Note: Repeated chaining of calls to append methods (`appended`, - * `appendedAll`, `lazyAppendedAll`) without forcing any of the - * intermediate resulting lazy lists may overflow the stack when - * the final result is forced. - * @define preservesLaziness This method preserves laziness; elements are only evaluated - * individually as needed. - * @define initiallyLazy This method does not evaluate anything until an operation is performed - * on the result (e.g. calling `head` or `tail`, or checking if it is empty). - * @define evaluatesAllElements This method evaluates all elements of the collection. - */ -@SerialVersionUID(3L) -final class LazyList[+A] private (private[this] var lazyState: () => LazyList.State[A]) - extends AbstractSeq[A] - with LinearSeq[A] - with GenericTraversableTemplate[A, LazyList] - with LinearSeqOptimized[A, LazyList[A]] - with Serializable { - import LazyList._ - - @volatile private[this] var stateEvaluated: Boolean = false - @inline private def stateDefined: Boolean = stateEvaluated - private[this] var midEvaluation = false - - private lazy val state: State[A] = { - // if it's already mid-evaluation, we're stuck in an infinite - // self-referential loop (also it's empty) - if (midEvaluation) { - throw new RuntimeException( - "self-referential LazyList or a derivation thereof has no more elements") - } - midEvaluation = true - val res = try lazyState() - finally midEvaluation = false - // if we set it to `true` before evaluating, we may infinite loop - // if something expects `state` to already be evaluated - stateEvaluated = true - lazyState = null // allow GC - res - } - - /** $preservesLaziness */ - def knownSize: Int = if (knownIsEmpty) 0 else -1 -// override def iterableFactory: SeqFactory[LazyList] = LazyList - - override def isEmpty: Boolean = state eq State.Empty - - override def head: A = state.head - - override def tail: LazyList[A] = state.tail - - @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline) - @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline) - - // It's an imperfect world, but at least we can bottle up the - // imperfection in a capsule. - @inline private def asThat[That](x: AnyRef): That = x.asInstanceOf[That] - @inline private def isLLBuilder[B, That](bf: CanBuildFrom[LazyList[A], B, That]) = - bf(repr).isInstanceOf[LazyList.LazyBuilder[_]] - - override def companion: GenericCompanion[LazyList] = LazyList - - /** Evaluates all undefined elements of the lazy list. - * - * This method detects cycles in lazy lists, and terminates after all - * elements of the cycle are evaluated. For example: - * - * {{{ - * val ring: LazyList[Int] = 1 #:: 2 #:: 3 #:: ring - * ring.force - * ring.toString - * - * // prints - * // - * // LazyList(1, 2, 3, ...) - * }}} - * - * This method will *not* terminate for non-cyclic infinite-sized collections. - * - * @return this - */ - def force: this.type = { - // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) - var these, those: LazyList[A] = this - if (!these.isEmpty) { - these = these.tail - } - while (those ne these) { - if (these.isEmpty) return this - these = these.tail - if (these.isEmpty) return this - these = these.tail - if (these eq those) return this - those = those.tail - } - this - } - - /** @inheritdoc - * - * The iterator returned by this method preserves laziness; elements are - * only evaluated individually as needed. - */ - override def iterator: Iterator[A] = - if (knownIsEmpty) Iterator.empty - else new LazyIterator(this) - - /** Apply the given function `f` to each element of this linear sequence - * (while respecting the order of the elements). - * - * @param f The treatment to apply to each element. - * @note Overridden here as final to trigger tail-call optimization, which - * replaces 'this' with 'tail' at each iteration. This is absolutely - * necessary for allowing the GC to collect the underlying LazyList as elements - * are consumed. - * @note This function will force the realization of the entire LazyList - * unless the `f` throws an exception. - */ - @tailrec - override def foreach[U](f: A => U): Unit = { - if (!isEmpty) { - f(head) - tail.foreach(f) - } - } - - /** LazyList specialization of foldLeft which allows GC to collect along the - * way. - * - * @tparam B The type of value being accumulated. - * @param z The initial value seeded into the function `op`. - * @param op The operation to perform on successive elements of the `LazyList`. - * @return The accumulated value from successive applications of `op`. - */ - @tailrec - override def foldLeft[B](z: B)(op: (B, A) => B): B = - if (isEmpty) z - else tail.foldLeft(op(z, head))(op) - - // State.Empty doesn't use the SerializationProxy - protected[this] def writeReplace(): AnyRef = - if (knownNonEmpty) new LazyList.SerializationProxy[A](this) else this - - override def stringPrefix = "LazyList" - - /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list. - * - * $preservesLaziness - * - * $appendStackSafety - * - * @param suffix The collection that gets appended to this lazy list - * @return The lazy list containing elements of this lazy list and the iterable object. - */ - def lazyAppendedAll[B >: A](suffix: => GenTraversableOnce[B]): LazyList[B] = - newLL { - if (isEmpty) suffix match { - case lazyList: LazyList[B] => lazyList.state // don't recompute the LazyList - case coll => stateFromIterator(coll.toIterator) - } else sCons(head, tail lazyAppendedAll suffix) - } - - /** @inheritdoc - * - * $preservesLaziness - * - * $appendStackSafety - */ - override def ++[B >: A, That](suffix: GenTraversableOnce[B])( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (knownIsEmpty) LazyList.from(suffix) - else lazyAppendedAll(suffix) - } else super.++(suffix)(bf) - - /** @inheritdoc - * - * $preservesLaziness - * - * $appendStackSafety - */ - override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (knownIsEmpty) newLL(sCons(elem, LazyList.empty)) - else lazyAppendedAll(Iterator.single(elem)) - } else super.:+(elem)(bf) - - /** @inheritdoc - * - * $evaluatesAllElements - */ - override def equals(that: Any): Boolean = - if (this eq that.asInstanceOf[AnyRef]) true else super.equals(that) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def scanLeft[B, That](z: B)(op: (B, A) => B)( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (knownIsEmpty) newLL(sCons(z, LazyList.empty)) - else newLL(scanLeftState(z)(op)) - } else super.scanLeft(z)(op)(bf) - - private def scanLeftState[B](z: B)(op: (B, A) => B): State[B] = - sCons( - z, - newLL { - if (isEmpty) State.Empty - else tail.scanLeftState(op(z, head))(op) - } - ) - - /** LazyList specialization of reduceLeft which allows GC to collect - * along the way. - * - * @tparam B The type of value being accumulated. - * @param f The operation to perform on successive elements of the `LazyList`. - * @return The accumulated value from successive applications of `f`. - */ - override def reduceLeft[B >: A](f: (B, A) => B): B = { - if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") - else { - var reducedRes: B = this.head - var left: LazyList[A] = this.tail - while (!left.isEmpty) { - reducedRes = f(reducedRes, left.head) - left = left.tail - } - reducedRes - } - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def partition(p: A => Boolean): (LazyList[A], LazyList[A]) = (filter(p), filterNot(p)) - - /** @inheritdoc - * - * $preservesLaziness - */ - def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyList[A1], LazyList[A2]) = { - val (left, right) = mapToLL(f).partition(_.isLeft) - (left.mapToLL(_.asInstanceOf[Left[A1, _]].a), right.mapToLL(_.asInstanceOf[Right[_, A2]].b)) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def filter(pred: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else LazyList.filterImpl(this, pred, isFlipped = false) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def filterNot(pred: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else LazyList.filterImpl(this, pred, isFlipped = true) - - /** A `collection.WithFilter` which allows GC of the head of lazy list during processing. - * - * This method is not particularly useful for a lazy list, as [[filter]] already preserves - * laziness. - * - * The `collection.WithFilter` returned by this method preserves laziness; elements are - * only evaluated individually as needed. - */ - override def withFilter(p: A => Boolean): FilterMonadic[A, LazyList[A]] = - new LazyList.WithFilter(this, p) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - newLL(sCons(elem, this)) - } else super.+:(elem)(bf) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def ++:[B >: A, That](prefix: TraversableOnce[B])( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (knownIsEmpty) LazyList.from(prefix) - else newLL(stateFromIteratorConcatSuffix(prefix.toIterator)(state)) - } else super.++:(prefix)(bf) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def ++:[B >: A, That](prefix: Traversable[B])( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (knownIsEmpty) LazyList.from(prefix) - else newLL(stateFromIteratorConcatSuffix(prefix.toIterator)(state)) - } else super.++:(prefix)(bf) - - private def prependedAllToLL[B >: A](prefix: Traversable[B]): LazyList[B] = - if (knownIsEmpty) LazyList.from(prefix) - else newLL(stateFromIteratorConcatSuffix(prefix.toIterator)(state)) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat(mapToLL(f): @inline) - else super.map(f)(bf) - - private def mapToLL[B](f: A => B): LazyList[B] = - if (knownIsEmpty) LazyList.empty - else (mapImpl(f): @inline) - - /** @inheritdoc - * - * $preservesLaziness - */ - def tapEach[U](f: A => U): LazyList[A] = mapToLL { a => - f(a); a - } - - private def mapImpl[B](f: A => B): LazyList[B] = - newLL { - if (isEmpty) State.Empty - else sCons(f(head), tail.mapImpl(f)) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def collect[B, That](pf: PartialFunction[A, B])( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (knownIsEmpty) LazyList.empty - else LazyList.collectImpl(this, pf) - } else super.collect(pf)(bf) - - /** @inheritdoc - * - * This method does not evaluate any elements further than - * the first element for which the partial function is defined. - */ - @tailrec - override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = - if (isEmpty) None - else { - val res = pf.applyOrElse(head, LazyList.anyToMarker.asInstanceOf[A => B]) - if (res.asInstanceOf[AnyRef] eq LazyList.pfMarker) tail.collectFirst(pf) - else Some(res) - } - - /** @inheritdoc - * - * This method does not evaluate any elements further than - * the first element matching the predicate. - */ - @tailrec - override def find(p: A => Boolean): Option[A] = - if (isEmpty) None - else { - val elem = head - if (p(elem)) Some(elem) - else tail.find(p) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def flatMap[B, That](f: A => GenTraversableOnce[B])( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat(flatMapToLL(f): @inline) - else super.flatMap(f)(bf) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def flatten[B](implicit asIterable: A => GenTraversableOnce[B]): LazyList[B] = - flatMapToLL(asIterable) - - private def flatMapToLL[B](f: A => GenTraversableOnce[B]): LazyList[B] = - if (knownIsEmpty) LazyList.empty - else LazyList.flatMapImpl(this, f) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def zip[A1 >: A, B, That](that: GenIterable[B])( - implicit bf: CanBuildFrom[LazyList[A], (A1, B), That]): That = - if (isLLBuilder(bf)) asThat(zipToLL(that): @inline) - else super.zip(that)(bf) - - private def zipToLL[B](that: GenIterable[B]): LazyList[(A, B)] = - if (this.knownIsEmpty) LazyList.empty - else newLL(zipState(that.toIterator)) - - private def zipState[B](it: Iterator[B]): State[(A, B)] = - if (this.isEmpty || !it.hasNext) State.Empty - else sCons((head, it.next()), newLL { tail zipState it }) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def zipWithIndex[A1 >: A, That]( - implicit bf: CanBuildFrom[LazyList[A], (A1, Int), That]): That = - if (isLLBuilder(bf)) asThat { - this zip LazyList.from(0) - } else super.zipWithIndex(bf) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)( - implicit bf: CanBuildFrom[LazyList[A], (A1, B), That]): That = - if (isLLBuilder(bf)) asThat { - if (this.knownIsEmpty) LazyList.continually(thisElem) zip that - else newLL(zipAllState(that.toIterator, thisElem, thatElem)) - } else super.zipAll(that, thisElem, thatElem)(bf) - - private def zipAllState[A1 >: A, B](it: Iterator[B], - thisElem: A1, - thatElem: B): State[(A1, B)] = { - if (it.hasNext) { - if (this.isEmpty) sCons((thisElem, it.next()), newLL { - LazyList.continually(thisElem) zipState it - }) - else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) }) - } else { - if (this.isEmpty) State.Empty - else sCons((this.head, thatElem), this.tail zipToLL LazyList.continually(thatElem)) - } - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (LazyList[A1], LazyList[A2]) = - (mapToLL(asPair(_)._1), mapToLL(asPair(_)._2)) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def unzip3[A1, A2, A3]( - implicit asTriple: A => (A1, A2, A3)): (LazyList[A1], LazyList[A2], LazyList[A3]) = - (mapToLL(asTriple(_)._1), mapToLL(asTriple(_)._2), mapToLL(asTriple(_)._3)) - - /** @inheritdoc - * - * $initiallyLazy - * Additionally, it preserves laziness for all except the first `n` elements. - */ - override def drop(n: Int): LazyList[A] = - if (n <= 0) this - else if (knownIsEmpty) LazyList.empty - else LazyList.dropImpl(this, n) - - /** @inheritdoc - * - * $initiallyLazy - * Additionally, it preserves laziness for all elements after the predicate returns `false`. - */ - override def dropWhile(p: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else LazyList.dropWhileImpl(this, p) - - /** @inheritdoc - * - * $initiallyLazy - */ - override def dropRight(n: Int): LazyList[A] = { - if (n <= 0) this - else if (knownIsEmpty) LazyList.empty - else - newLL { - var scout = this - var remaining = n - // advance scout n elements ahead (or until empty) - while (remaining > 0 && !scout.isEmpty) { - remaining -= 1 - scout = scout.tail - } - dropRightState(scout) - } - } - - private def dropRightState(scout: LazyList[_]): State[A] = - if (scout.isEmpty) State.Empty - else sCons(head, newLL(tail.dropRightState(scout.tail))) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def take(n: Int): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else (takeImpl(n): @inline) - - private def takeImpl(n: Int): LazyList[A] = { - if (n <= 0) LazyList.empty - else - newLL { - if (isEmpty) State.Empty - else sCons(head, tail.takeImpl(n - 1)) - } - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def takeWhile(p: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else (takeWhileImpl(p): @inline) - - private def takeWhileImpl(p: A => Boolean): LazyList[A] = - newLL { - if (isEmpty || !p(head)) State.Empty - else sCons(head, tail.takeWhileImpl(p)) - } - - /** @inheritdoc - * - * $initiallyLazy - */ - override def takeRight(n: Int): LazyList[A] = - if (n <= 0 || knownIsEmpty) LazyList.empty - else LazyList.takeRightImpl(this, n) - - /** @inheritdoc - * - * $initiallyLazy - * Additionally, it preserves laziness for all but the first `from` elements. - */ - override def slice(from: Int, until: Int): LazyList[A] = take(until).drop(from) - - /** @inheritdoc - * - * $evaluatesAllElements - */ - override def reverse: LazyList[A] = reverseOnto(LazyList.empty) - - // need contravariant type B to make the compiler happy - still returns LazyList[A] - @tailrec - private def reverseOnto[B >: A](tl: LazyList[B]): LazyList[B] = - if (isEmpty) tl - else tail.reverseOnto(newLL(sCons(head, tl))) - - private def occCounts0[B](sq: collection.Seq[B]): mutable.Map[B, Int] = { - val occ = new mutable.HashMap[B, Int] { override def default(k: B) = 0 } - for (y <- sq) occ(y) += 1 - occ - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def diff[B >: A](that: GenSeq[B]): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else { - val occ = occCounts0(that.seq) - LazyList.from { - iterator.filter { x => - val ox = occ(x) // Avoid multiple map lookups - if (ox == 0) true - else { - occ(x) = ox - 1 - false - } - } - } - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def intersect[B >: A](that: GenSeq[B]): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else { - val occ = occCounts0(that.seq) - LazyList.from { - iterator.filter { x => - val ox = occ(x) // Avoid multiple map lookups - if (ox > 0) { - occ(x) = ox - 1 - true - } else false - } - } - } - - @tailrec - private def lengthGt(len: Int): Boolean = - if (len < 0) true - else if (isEmpty) false - else tail.lengthGt(len - 1) - - /** @inheritdoc - * - * The iterator returned by this method mostly preserves laziness; - * a single element ahead of the iterator is evaluated. - */ - override def grouped(size: Int): Iterator[LazyList[A]] = { - require(size > 0, "size must be positive, but was " + size) - slidingImpl(size = size, step = size) - } - - /** @inheritdoc - * - * The iterator returned by this method mostly preserves laziness; - * `size - step max 1` elements ahead of the iterator are evaluated. - */ - override def sliding(size: Int, step: Int): Iterator[LazyList[A]] = { - require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") - slidingImpl(size = size, step = step) - } - - @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyList[A]] = - if (knownIsEmpty) Iterator.empty - else new SlidingIterator[A](this, size = size, step = step) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def padTo[B >: A, That](len: Int, elem: B)( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat(padToLL(len, elem)) - else super.padTo(len, elem)(bf) - - private def padToLL[B >: A](len: Int, elem: B): LazyList[B] = - if (len <= 0) this - else - newLL { - if (isEmpty) LazyList.fill(len)(elem).state - else sCons(head, tail.padToLL(len - 1, elem)) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def patch[B >: A, That](from: Int, other: GenSeq[B], replaced: Int)( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (knownIsEmpty) LazyList from other - else patchImpl(from, other, replaced) - } else super.patch(from, other, replaced) - - private def patchImpl[B >: A](from: Int, other: GenSeq[B], replaced: Int): LazyList[B] = - newLL { - if (from <= 0) - stateFromIteratorConcatSuffix(other.toIterator)(LazyList.dropImpl(this, replaced).state) - else if (isEmpty) stateFromIterator(other.toIterator) - else sCons(head, tail.patchImpl(from - 1, other, replaced)) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def updated[B >: A, That](index: Int, elem: B)( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - if (isLLBuilder(bf)) asThat { - if (index < 0) throw new IndexOutOfBoundsException(s"$index") - else updatedImpl(index, elem, index) - } else super.updated(index, elem) - - private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyList[B] = { - newLL { - if (index <= 0) sCons(elem, tail) - else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) - else sCons(head, tail.updatedImpl(index - 1, elem, startIndex)) - } - } - - /** Appends all elements of this $coll to a string builder using start, end, and separator strings. - * The written text begins with the string `start` and ends with the string `end`. - * Inside, the string representations (w.r.t. the method `toString`) - * of all elements of this $coll are separated by the string `sep`. - * - * An undefined state is represented with `"<not computed>"` and cycles are represented with `"<cycle>"`. - * - * $evaluatesAllElements - * - * @param sb the string builder to which elements are appended. - * @param start the starting string. - * @param sep the separator string. - * @param end the ending string. - * @return the string builder `b` to which elements were appended. - */ - override def addString(sb: StringBuilder, - start: String, - sep: String, - end: String): StringBuilder = { - force - addStringNoForce(sb, start, sep, end) - sb - } - - private[this] def addStringNoForce(b: StringBuilder, - start: String, - sep: String, - end: String): StringBuilder = { - b.append(start) - if (!stateDefined) b.append("") - else if (!isEmpty) { - b.append(head) - var cursor = this - @inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) - var scout = tail - @inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty - if ((cursor ne scout) && (!scout.stateDefined || (cursor.state ne scout.state))) { - cursor = scout - if (scoutNonEmpty) { - scout = scout.tail - // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings - while ((cursor ne scout) && scoutNonEmpty && (cursor.state ne scout.state)) { - appendCursorElement() - cursor = cursor.tail - scout = scout.tail - if (scoutNonEmpty) scout = scout.tail - } - } - } - if (!scoutNonEmpty) { // Not a cycle, scout hit an end - while (cursor ne scout) { - appendCursorElement() - cursor = cursor.tail - } - // if cursor (eq scout) has state defined, it is empty; else unknown state - if (!cursor.stateDefined) b.append(sep).append("") - } else { - @inline def same(a: LazyList[A], b: LazyList[A]): Boolean = (a eq b) || (a.state eq b.state) - // Cycle. - // If we have a prefix of length P followed by a cycle of length C, - // the scout will be at position (P%C) in the cycle when the cursor - // enters it at P. They'll then collide when the scout advances another - // C - (P%C) ahead of the cursor. - // If we run the scout P farther, then it will be at the start of - // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner - // starts at the beginning of the prefix, they'll collide exactly at - // the start of the loop. - var runner = this - var k = 0 - while (!same(runner, scout)) { - runner = runner.tail - scout = scout.tail - k += 1 - } - // Now runner and scout are at the beginning of the cycle. Advance - // cursor, adding to string, until it hits; then we'll have covered - // everything once. If cursor is already at beginning, we'd better - // advance one first unless runner didn't go anywhere (in which case - // we've already looped once). - if (same(cursor, scout) && (k > 0)) { - appendCursorElement() - cursor = cursor.tail - } - while (!same(cursor, scout)) { - appendCursorElement() - cursor = cursor.tail - } - b.append(sep).append("") - } - } - b.append(end) - } - - /** $preservesLaziness - * - * @return a string representation of this collection. An undefined state is - * represented with `"<not computed>"` and cycles are represented with `"<cycle>"` - * - * Examples: - * - * - `"LazyList(4, <not computed>)"`, a non-empty lazy list ; - * - `"LazyList(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; - * - `"LazyList(1, 2, 3, <cycle>)"`, an infinite lazy list that contains - * a cycle at the fourth element. - */ - override def toString(): String = - addStringNoForce(new StringBuilder(stringPrefix), "(", ", ", ")").toString - - /** @inheritdoc - * - * $preservesLaziness - */ - override def hasDefiniteSize: Boolean = { - if (!stateDefined) false - else if (isEmpty) true - else { - // Two-iterator trick (2x & 1x speed) for cycle detection. - var those = this - var these = tail - while (those ne these) { - if (!these.stateDefined) return false - else if (these.isEmpty) return true - these = these.tail - if (!these.stateDefined) return false - else if (these.isEmpty) return true - these = these.tail - if (those eq these) return false - those = those.tail - } - false // Cycle detected - } - } - - override def sameElements[B >: A](that: GenIterable[B]): Boolean = that match { - case that: LazyList[B] => this eqLL that - case _ => super.sameElements(that) - } - - @tailrec - private def eqLL[B >: A](that: LazyList[B]): Boolean = - (this eq that) || - (this.state eq that.state) || - (!this.isEmpty && - !that.isEmpty && - (this.head == that.head) && - (this.tail eqLL that.tail)) - - override def splitAt(n: Int): (LazyList[A], LazyList[A]) = (take(n), drop(n)) - - override def span(p: A => Boolean): (LazyList[A], LazyList[A]) = (takeWhile(p), dropWhile(p)) - - override def distinct: LazyList[A] = distinctBy(identity) - - def distinctBy[B](f: A => B): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else - LazyList.from { - val outer = iterator - new AbstractIterator[A] { - private[this] val traversedValues = mutable.HashSet.empty[B] - private[this] var nextElementDefined: Boolean = false - private[this] var nextElement: A = _ - - def hasNext: Boolean = - nextElementDefined || (outer.hasNext && { - val a = outer.next() - if (traversedValues.add(f(a))) { - nextElement = a - nextElementDefined = true - true - } else hasNext - }) - - def next(): A = - if (hasNext) { - nextElementDefined = false - nextElement - } else { - Iterator.empty.next() - } - } - } - - override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = - if (cbf().isInstanceOf[LazyList.LazyBuilder[_]]) asThat(this) - else super.to(cbf) - - override def init: LazyList[A] = { - if (isEmpty) throw new UnsupportedOperationException - dropRight(1) - } -} - -/** - * $factoryInfo - * @define coll lazy list - * @define Coll `LazyList` - */ -@SerialVersionUID(3L) -object LazyList extends SeqFactory[LazyList] { - // Eagerly evaluate cached empty instance - private[this] val _empty = newLL(State.Empty).force - - private sealed trait State[+A] extends Serializable { - def head: A - def tail: LazyList[A] - } - - private object State { - @SerialVersionUID(3L) - object Empty extends State[Nothing] { - def head: Nothing = throw new NoSuchElementException("head of empty lazy list") - def tail: LazyList[Nothing] = - throw new UnsupportedOperationException("tail of empty lazy list") - } - - @SerialVersionUID(3L) - final class Cons[A](val head: A, val tail: LazyList[A]) extends State[A] - } - - /** Creates a new LazyList. */ - @inline private def newLL[A](state: => State[A]): LazyList[A] = new LazyList[A](() => state) - - /** Creates a new State.Cons. */ - @inline private def sCons[A](hd: A, tl: LazyList[A]): State[A] = new State.Cons[A](hd, tl) - - private val pfMarker: AnyRef = new AnyRef - private val anyToMarker: Any => Any = _ => pfMarker - - /* All of the following `Impl` methods are carefully written so as not to - * leak the beginning of the `LazyList`. They copy the initial `LazyList` (`ll`) into - * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently - * leaking the head of the `LazyList`. Additionally, the methods are written so that, should - * an exception be thrown by the evaluation of the `LazyList` or any supplied function, they - * can continue their execution where they left off. - */ - - private def filterImpl[A](ll: LazyList[A], p: A => Boolean, isFlipped: Boolean): LazyList[A] = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - newLL { - var elem: A = null.asInstanceOf[A] - var found = false - var rest = restRef // var rest = restRef.elem - while (!found && !rest.isEmpty) { - elem = rest.head - found = p(elem) != isFlipped - rest = rest.tail - restRef = rest // restRef.elem = rest - } - if (found) sCons(elem, filterImpl(rest, p, isFlipped)) else State.Empty - } - } - - private def collectImpl[A, B](ll: LazyList[A], pf: PartialFunction[A, B]): LazyList[B] = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - newLL { - val marker = pfMarker - val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased - - var res: B = marker.asInstanceOf[B] // safe because B is unbounded - var rest = restRef // var rest = restRef.elem - while ((res.asInstanceOf[AnyRef] eq marker) && !rest.isEmpty) { - res = pf.applyOrElse(rest.head, toMarker) - rest = rest.tail - restRef = rest // restRef.elem = rest - } - if (res.asInstanceOf[AnyRef] eq marker) State.Empty - else sCons(res, collectImpl(rest, pf)) - } - } - - private def flatMapImpl[A, B](ll: LazyList[A], f: A => GenTraversableOnce[B]): LazyList[B] = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - newLL { - var it: Iterator[B] = null - var itHasNext = false - var rest = restRef // var rest = restRef.elem - while (!itHasNext && !rest.isEmpty) { - it = f(rest.head).toIterator - itHasNext = it.hasNext - if (!itHasNext) { // wait to advance `rest` because `it.next()` can throw - rest = rest.tail - restRef = rest // restRef.elem = rest - } - } - if (itHasNext) { - val head = it.next() - rest = rest.tail - restRef = rest // restRef.elem = rest - sCons(head, newLL(stateFromIteratorConcatSuffix(it)(flatMapImpl(rest, f).state))) - } else State.Empty - } - } - - private def dropImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - var iRef = n // val iRef = new IntRef(n) - newLL { - var rest = restRef // var rest = restRef.elem - var i = iRef // var i = iRef.elem - while (i > 0 && !rest.isEmpty) { - rest = rest.tail - restRef = rest // restRef.elem = rest - i -= 1 - iRef = i // iRef.elem = i - } - rest.state - } - } - - private def dropWhileImpl[A](ll: LazyList[A], p: A => Boolean): LazyList[A] = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - newLL { - var rest = restRef // var rest = restRef.elem - while (!rest.isEmpty && p(rest.head)) { - rest = rest.tail - restRef = rest // restRef.elem = rest - } - rest.state - } - } - - private def takeRightImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - var scoutRef = ll // val scoutRef = new ObjectRef(ll) - var remainingRef = n // val remainingRef = new IntRef(n) - newLL { - var scout = scoutRef // var scout = scoutRef.elem - var remaining = remainingRef // var remaining = remainingRef.elem - // advance `scout` `n` elements ahead (or until empty) - while (remaining > 0 && !scout.isEmpty) { - scout = scout.tail - scoutRef = scout // scoutRef.elem = scout - remaining -= 1 - remainingRef = remaining // remainingRef.elem = remaining - } - var rest = restRef // var rest = restRef.elem - // advance `rest` and `scout` in tandem until `scout` reaches the end - while (!scout.isEmpty) { - scout = scout.tail - scoutRef = scout // scoutRef.elem = scout - rest = rest.tail // can't throw an exception as `scout` has already evaluated its tail - restRef = rest // restRef.elem = rest - } - // `rest` is the last `n` elements (or all of them) - rest.state - } - } - - /** An alternative way of building and matching lazy lists using LazyList.cons(hd, tl). - */ - object cons { - - /** A lazy list consisting of a given first element and remaining elements - * @param hd The first element of the result lazy list - * @param tl The remaining elements of the result lazy list - */ - def apply[A](hd: => A, tl: => LazyList[A]): LazyList[A] = newLL(sCons(hd, newLL(tl.state))) - - /** Maps a lazy list to its head and tail */ - def unapply[A](xs: LazyList[A]): Option[(A, LazyList[A])] = #::.unapply(xs) - } - - implicit def toDeferrer[A](l: => LazyList[A]): Deferrer[A] = new Deferrer[A](() => l) - - final class Deferrer[A] private[LazyList] (private val l: () => LazyList[A]) extends AnyVal { - - /** Construct a LazyList consisting of a given first element followed by elements - * from another LazyList. - */ - def #::[B >: A](elem: => B): LazyList[B] = newLL(sCons(elem, newLL(l().state))) - - /** Construct a LazyList consisting of the concatenation of the given LazyList and - * another LazyList. - */ - def #:::[B >: A](prefix: LazyList[B]): LazyList[B] = prefix lazyAppendedAll l() - } - - object #:: { - def unapply[A](s: LazyList[A]): Option[(A, LazyList[A])] = - if (!s.isEmpty) Some((s.head, s.tail)) else None - } - - def from[A](coll: GenTraversableOnce[A]): LazyList[A] = coll match { - case lazyList: LazyList[A] => lazyList - case _ => newLL(stateFromIterator(coll.toIterator)) - } - - override def apply[A](elems: A*): LazyList[A] = from(elems) - - override def empty[A]: LazyList[A] = _empty - - /** Creates a State from an Iterator, with another State appended after the Iterator - * is empty. - */ - private def stateFromIteratorConcatSuffix[A](it: Iterator[A])(suffix: => State[A]): State[A] = - if (it.hasNext) sCons(it.next(), newLL(stateFromIteratorConcatSuffix(it)(suffix))) - else suffix - - /** Creates a State from an IterableOnce. */ - private def stateFromIterator[A](it: Iterator[A]): State[A] = - if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it))) - else State.Empty - - def concat[A](xss: collection.Iterable[A]*): LazyList[A] = - newLL(concatIterator(xss.toIterator)) - - private def concatIterator[A](it: Iterator[collection.Iterable[A]]): State[A] = - if (!it.hasNext) State.Empty - else stateFromIteratorConcatSuffix(it.next().toIterator)(concatIterator(it)) - - /** An infinite LazyList that repeatedly applies a given function to a start value. - * - * @param start the start value of the LazyList - * @param f the function that's repeatedly applied - * @return the LazyList returning the infinite sequence of values `start, f(start), f(f(start)), ...` - */ - def iterate[A](start: => A)(f: A => A): LazyList[A] = - newLL { - val head = start - sCons(head, iterate(f(head))(f)) - } - - /** - * Create an infinite LazyList starting at `start` and incrementing by - * step `step`. - * - * @param start the start value of the LazyList - * @param step the increment value of the LazyList - * @return the LazyList starting at value `start`. - */ - def from(start: Int, step: Int): LazyList[Int] = - newLL(sCons(start, from(start + step, step))) - - /** - * Create an infinite LazyList starting at `start` and incrementing by `1`. - * - * @param start the start value of the LazyList - * @return the LazyList starting at value `start`. - */ - def from(start: Int): LazyList[Int] = from(start, 1) - - /** - * Create an infinite LazyList containing the given element expression (which - * is computed for each occurrence). - * - * @param elem the element composing the resulting LazyList - * @return the LazyList containing an infinite number of elem - */ - def continually[A](elem: => A): LazyList[A] = newLL(sCons(elem, continually(elem))) - - override def fill[A](n: Int)(elem: => A): LazyList[A] = - if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty - - override def tabulate[A](n: Int)(f: Int => A): LazyList[A] = { - def at(index: Int): LazyList[A] = - if (index < n) newLL(sCons(f(index), at(index + 1))) else empty - - at(0) - } - - // significantly simpler than the iterator returned by Iterator.unfold - def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyList[A] = - newLL { - f(init) match { - case Some((elem, state)) => sCons(elem, unfold(state)(f)) - case None => State.Empty - } - } - - /** The builder returned by this method only evaluates elements - * of collections added to it as needed. - * - * @tparam A the type of the ${coll}’s elements - * @return A builder for $Coll objects. - */ - def newBuilder[A]: Builder[A, LazyList[A]] = new LazyBuilder[A] - - private class LazyIterator[+A](private[this] var lazyList: LazyList[A]) - extends AbstractIterator[A] { - override def hasNext: Boolean = !lazyList.isEmpty - - override def next(): A = - if (lazyList.isEmpty) Iterator.empty.next() - else { - val res = lazyList.head - lazyList = lazyList.tail - res - } - } - - private class SlidingIterator[A](private[this] var lazyList: LazyList[A], size: Int, step: Int) - extends AbstractIterator[LazyList[A]] { - private val minLen = size - step max 0 - private var first = true - - def hasNext: Boolean = - if (first) !lazyList.isEmpty - else lazyList.lengthGt(minLen) - - def next(): LazyList[A] = { - if (!hasNext) Iterator.empty.next() - else { - first = false - val list = lazyList - lazyList = list.drop(step) - list.take(size) - } - } - } - - private final class WithFilter[A] private[LazyList] (lazyList: LazyList[A], p: A => Boolean) - extends FilterMonadic[A, LazyList[A]] { - private[this] val filtered = lazyList.filter(p) - def map[B, That](f: A => B)(implicit bf: CanBuildFrom[LazyList[A], B, That]): That = - filtered.map(f) - def flatMap[B, That](f: A => GenTraversableOnce[B])( - implicit bf: CanBuildFrom[LazyList[A], B, That]): That = filtered.flatMap(f) - def foreach[U](f: A => U): Unit = filtered.foreach(f) - def withFilter(q: A => Boolean): FilterMonadic[A, LazyList[A]] = new WithFilter(filtered, q) - } - - class LazyListCanBuildFrom[A] extends GenericCanBuildFrom[A] - - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LazyList[A]] = new LazyListCanBuildFrom[A] - - private final class LazyBuilder[A] extends Builder[A, LazyList[A]] { - import LazyBuilder._ - - private[this] var next: DeferredState[A] = _ - private[this] var list: LazyList[A] = _ - - clear() - - override def clear(): Unit = { - val deferred = new DeferredState[A] - list = newLL(deferred.eval()) - next = deferred - } - - override def result(): LazyList[A] = { - next init State.Empty - list - } - - override def +=(elem: A): this.type = { - val deferred = new DeferredState[A] - next init sCons(elem, newLL(deferred.eval())) - next = deferred - this - } - - // lazy implementation which doesn't evaluate the collection being added - override def ++=(xs: TraversableOnce[A]): this.type = { - val deferred = new DeferredState[A] - next init stateFromIteratorConcatSuffix(xs.toIterator)(deferred.eval()) - next = deferred - this - } - } - - private object LazyBuilder { - final class DeferredState[A] { - private[this] var _state: () => State[A] = _ - - def eval(): State[A] = { - val state = _state - if (state == null) throw new IllegalStateException("uninitialized") - state() - } - - // racy - def init(state: => State[A]): Unit = { - if (_state != null) throw new IllegalStateException("already initialized") - _state = () => state - } - } - } - - private case object SerializeEnd - - /** This serialization proxy is used for LazyLists which start with a sequence of evaluated cons cells. - * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses - * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization - * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. - */ - @SerialVersionUID(3L) - final class SerializationProxy[A](@transient protected var coll: LazyList[A]) - extends Serializable { - - private[this] def writeObject(out: ObjectOutputStream): Unit = { - out.defaultWriteObject() - var these = coll - while (these.knownNonEmpty) { - out.writeObject(these.head) - these = these.tail - } - out.writeObject(SerializeEnd) - out.writeObject(these) - } - - private[this] def readObject(in: ObjectInputStream): Unit = { - in.defaultReadObject() - val init = new ArrayBuffer[A] - var initRead = false - while (!initRead) in.readObject match { - case SerializeEnd => initRead = true - case a => init += a.asInstanceOf[A] - } - val tail = in.readObject().asInstanceOf[LazyList[A]] - coll = tail.prependedAllToLL(init) - } - - private[this] def readResolve(): Any = coll - } - - override def iterate[A](start: A, len: Int)(f: A => A): LazyList[A] = - iterate(start)(f).take(len) - - override def range[A: Integral](start: A, end: A): LazyList[A] = - from(NumericRange(start, end, implicitly[Integral[A]].one)) - - override def range[A: Integral](start: A, end: A, step: A): LazyList[A] = - from(NumericRange(start, end, step)) -} \ No newline at end of file diff --git a/scalalib/overrides-2.12/scala/collection/compat/package.scala b/scalalib/overrides-2.12/scala/collection/compat/package.scala deleted file mode 100644 index cd3f627625..0000000000 --- a/scalalib/overrides-2.12/scala/collection/compat/package.scala +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.collection.generic.{CanBuildFrom, GenericOrderedCompanion, IsTraversableLike} -import scala.{collection => c} -import scala.runtime.Tuple2Zipped -import scala.collection.{immutable => i, mutable => m} -import scala.language.higherKinds - -package object compat extends compat.PackageShared { - implicit class MutableTreeMapExtensions2(private val fact: m.TreeMap.type) extends AnyVal { - def from[K: Ordering, V](source: TraversableOnce[(K, V)]): m.TreeMap[K, V] = - build(m.TreeMap.newBuilder[K, V], source) - } - - implicit class MutableSortedMapExtensions(private val fact: m.SortedMap.type) extends AnyVal { - def from[K: Ordering, V](source: TraversableOnce[(K, V)]): m.SortedMap[K, V] = - build(m.SortedMap.newBuilder[K, V], source) - } - - implicit def genericOrderedCompanionToCBF[A, CC[X] <: Traversable[X]]( - fact: GenericOrderedCompanion[CC])( - implicit ordering: Ordering[A]): CanBuildFrom[Any, A, CC[A]] = - CompatImpl.simpleCBF(fact.newBuilder[A]) - - // CanBuildFrom instances for `IterableView[(K, V), Map[K, V]]` that preserve - // the strict type of the view to be `Map` instead of `Iterable` - // Instances produced by this method are used to chain `filterKeys` after `mapValues` - implicit def canBuildFromIterableViewMapLike[K, V, L, W, CC[X, Y] <: Map[X, Y]] - : CanBuildFrom[IterableView[(K, V), CC[K, V]], (L, W), IterableView[(L, W), CC[L, W]]] = - new CanBuildFrom[IterableView[(K, V), CC[K, V]], (L, W), IterableView[(L, W), CC[L, W]]] { - // `CanBuildFrom` parameters are used as type constraints, they are not used - // at run-time, hence the dummy builder implementations - def apply(from: IterableView[(K, V), CC[K, V]]) = new TraversableView.NoBuilder - def apply() = new TraversableView.NoBuilder - } - - implicit def toTraversableLikeExtensionMethods[Repr](self: Repr)( - implicit traversable: IsTraversableLike[Repr]) - : TraversableLikeExtensionMethods[traversable.A, Repr] = - new TraversableLikeExtensionMethods[traversable.A, Repr](traversable.conversion(self)) - - implicit def toSeqExtensionMethods[A](self: c.Seq[A]): SeqExtensionMethods[A] = - new SeqExtensionMethods[A](self) - - implicit def toTrulyTraversableLikeExtensionMethods[T1, El1, Repr1](self: T1)( - implicit w1: T1 => TraversableLike[El1, Repr1] - ): TrulyTraversableLikeExtensionMethods[El1, Repr1] = - new TrulyTraversableLikeExtensionMethods[El1, Repr1](w1(self)) - - implicit def toTuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2]( - self: Tuple2Zipped[El1, Repr1, El2, Repr2]) - : Tuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2] = - new Tuple2ZippedExtensionMethods[El1, Repr1, El2, Repr2](self) - - implicit def toImmutableQueueExtensionMethods[A]( - self: i.Queue[A]): ImmutableQueueExtensionMethods[A] = - new ImmutableQueueExtensionMethods[A](self) - - implicit def toMutableQueueExtensionMethods[A]( - self: m.Queue[A]): MutableQueueExtensionMethods[A] = - new MutableQueueExtensionMethods[A](self) -} \ No newline at end of file diff --git a/scalalib/overrides-2.13/scala/collection/compat/immutable/package.scala b/scalalib/overrides-2.13/scala/collection/compat/immutable/package.scala deleted file mode 100644 index d217b45551..0000000000 --- a/scalalib/overrides-2.13/scala/collection/compat/immutable/package.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.compat - -package object immutable { - type ArraySeq[+T] = scala.collection.immutable.ArraySeq[T] - val ArraySeq = scala.collection.immutable.ArraySeq - - type LazyList[+T] = scala.collection.immutable.LazyList[T] - val LazyList = scala.collection.immutable.LazyList -} \ No newline at end of file diff --git a/scalalib/overrides-2.13/scala/collection/compat/package.scala b/scalalib/overrides-2.13/scala/collection/compat/package.scala deleted file mode 100644 index cbebca0331..0000000000 --- a/scalalib/overrides-2.13/scala/collection/compat/package.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -package object compat { - type Factory[-A, +C] = scala.collection.Factory[A, C] - val Factory = scala.collection.Factory - - type BuildFrom[-From, -A, +C] = scala.collection.BuildFrom[From, A, C] - val BuildFrom = scala.collection.BuildFrom - - type IterableOnce[+X] = scala.collection.IterableOnce[X] - val IterableOnce = scala.collection.IterableOnce -} \ No newline at end of file From 379f32496fb739446f166646bf15183c95a4be6a Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Tue, 6 Oct 2020 21:37:09 +0200 Subject: [PATCH 31/75] javalib Streams adapted to not use collections-compat --- build.sbt | 3 --- .../scalanative/compat/ScalaStream.scala | 21 +++++++++++++++++++ .../scalanative/compat/ScalaStream.scala | 20 ++++++++++++++++++ .../scalanative/compat/ScalaStream.scala | 21 +++++++++++++++++++ .../main/scala/java/io/BufferedReader.scala | 2 +- .../src/main/scala/java/nio/file/Files.scala | 9 ++++---- .../main/scala/java/util/regex/Pattern.scala | 4 ++-- .../main/scala/java/util/stream/Stream.scala | 4 ++-- .../java/util/stream/WrappedScalaStream.scala | 6 +++--- .../scalanative/compat/StreamsCompat.scala | 17 +++++++++++++++ 10 files changed, 91 insertions(+), 16 deletions(-) create mode 100644 javalib/src/main/scala-2.11/scala/scalanative/compat/ScalaStream.scala create mode 100644 javalib/src/main/scala-2.12/scala/scalanative/compat/ScalaStream.scala create mode 100644 javalib/src/main/scala-2.13/scala/scalanative/compat/ScalaStream.scala create mode 100644 javalib/src/main/scala/scala/scalanative/compat/StreamsCompat.scala diff --git a/build.sbt b/build.sbt index 867bb55a93..7a3bea2daf 100644 --- a/build.sbt +++ b/build.sbt @@ -407,9 +407,6 @@ lazy val javalib = .enablePlugins(MyScalaNativePlugin) .settings(mavenPublishSettings) .settings( - libraryDependencies ++= Seq( - collectionsCompatLib - ) ++ parallelCollectionsLib(scalaVersion.value), Compile / doc / sources := Nil, // doc generation currently broken // This is required to have incremental compilation to work in javalib. // We put our classes on scalac's `javabootclasspath` so that it uses them diff --git a/javalib/src/main/scala-2.11/scala/scalanative/compat/ScalaStream.scala b/javalib/src/main/scala-2.11/scala/scalanative/compat/ScalaStream.scala new file mode 100644 index 0000000000..37d6b61e66 --- /dev/null +++ b/javalib/src/main/scala-2.11/scala/scalanative/compat/ScalaStream.scala @@ -0,0 +1,21 @@ +package scala.scalanative.compat + +import java.util.stream.WrappedScalaStream +import scala.collection.immutable +import scala.language.implicitConversions + +private[scalanative] object ScalaStream { + type Underlying[T] = immutable.Stream[T] + val Underlying = immutable.Stream + + implicit class ScalaStreamImpl[T](val underyling: Underlying[T]) + extends AnyVal { + def wrappedStream(closeHanlder: Option[Runnable] = None) = + new WrappedScalaStream[T](underyling, closeHanlder) + } + + implicit def seqToScalaStream[T](seq: Iterable[T]): Underlying[T] = { + seq.to[Underlying] + } + +} diff --git a/javalib/src/main/scala-2.12/scala/scalanative/compat/ScalaStream.scala b/javalib/src/main/scala-2.12/scala/scalanative/compat/ScalaStream.scala new file mode 100644 index 0000000000..054d96476e --- /dev/null +++ b/javalib/src/main/scala-2.12/scala/scalanative/compat/ScalaStream.scala @@ -0,0 +1,20 @@ +package scala.scalanative.compat + +import java.util.stream.WrappedScalaStream +import scala.collection.immutable +import scala.language.implicitConversions + +private[scalanative] object ScalaStream { + type Underlying[T] = immutable.Stream[T] + val Underlying = immutable.Stream + + implicit class ScalaStreamImpl[T](val underyling: Underlying[T]) + extends AnyVal { + def wrappedStream(closeHanlder: Option[Runnable] = None) = + new WrappedScalaStream[T](underyling, closeHanlder) + } + + implicit def seqToScalaStream[T](seq: Iterable[T]): Underlying[T] = { + seq.to[Underlying] + } +} diff --git a/javalib/src/main/scala-2.13/scala/scalanative/compat/ScalaStream.scala b/javalib/src/main/scala-2.13/scala/scalanative/compat/ScalaStream.scala new file mode 100644 index 0000000000..2915dd9c66 --- /dev/null +++ b/javalib/src/main/scala-2.13/scala/scalanative/compat/ScalaStream.scala @@ -0,0 +1,21 @@ +package scala.scalanative.compat + +import java.util.stream.WrappedScalaStream +import scala.collection.immutable +import scala.language.implicitConversions + +private[scalanative] object ScalaStream { + type Underlying[T] = immutable.LazyList[T] + val Underlying = immutable.LazyList + + implicit class ScalaStreamImpl[T](val underyling: Underlying[T]) + extends AnyVal { + def wrappedStream(closeHanlder: Option[Runnable] = None) = + new WrappedScalaStream[T](underyling, closeHanlder) + } + + implicit def seqToScalaStream[T](seq: Iterable[T]): Underlying[T] = { + seq.to(Underlying) + } + +} diff --git a/javalib/src/main/scala/java/io/BufferedReader.scala b/javalib/src/main/scala/java/io/BufferedReader.scala index b2007eb067..93026c90f1 100644 --- a/javalib/src/main/scala/java/io/BufferedReader.scala +++ b/javalib/src/main/scala/java/io/BufferedReader.scala @@ -1,6 +1,6 @@ package java.io -import scala.collection.compat.immutable.{LazyList => SStream} +import scala.scalanative.compat.StreamsCompat._ import java.util.stream.{Stream, WrappedScalaStream} class BufferedReader(in: Reader, sz: Int) extends Reader { diff --git a/javalib/src/main/scala/java/nio/file/Files.scala b/javalib/src/main/scala/java/nio/file/Files.scala index 2b05169ecf..d20e30532e 100644 --- a/javalib/src/main/scala/java/nio/file/Files.scala +++ b/javalib/src/main/scala/java/nio/file/Files.scala @@ -36,7 +36,7 @@ import scalanative.libc._ import scalanative.posix.{dirent, fcntl, limits, unistd}, dirent._ import scalanative.posix.sys.stat import scalanative.nio.fs.{FileHelpers, UnixException} -import scala.collection.compat.immutable.{LazyList => SStream} +import scalanative.compat.StreamsCompat._ import scala.collection.immutable.{Map => SMap, Set => SSet} import StandardCopyOption._ @@ -258,7 +258,7 @@ object Files { matcher.test(p, attributes) } - new WrappedScalaStream(stream.to(SStream), None) + new WrappedScalaStream(stream, None) } def getAttribute(path: Path, @@ -356,7 +356,7 @@ object Files { def list(dir: Path): Stream[Path] = new WrappedScalaStream( - FileHelpers.list(dir.toString, (n, _) => dir.resolve(n)).to(SStream), + FileHelpers.list(dir.toString, (n, _) => dir.resolve(n)).toScalaStream, None) def move(source: Path, target: Path, options: Array[CopyOption]): Path = { @@ -578,13 +578,12 @@ object Files { currentDepth: Int, options: Array[FileVisitOption], visited: SSet[Path]): SStream[Path] = { - start #:: { if (!isDirectory(start, linkOptsFromFileVisitOpts(options))) SStream.empty else { FileHelpers .list(start.toString, (n, t) => (n, t)) - .to(SStream) + .toScalaStream .flatMap { case (name: String, tpe) if tpe == DT_LNK() && options.contains( diff --git a/javalib/src/main/scala/java/util/regex/Pattern.scala b/javalib/src/main/scala/java/util/regex/Pattern.scala index dd611f50df..94a03c4e43 100644 --- a/javalib/src/main/scala/java/util/regex/Pattern.scala +++ b/javalib/src/main/scala/java/util/regex/Pattern.scala @@ -6,7 +6,7 @@ import scalanative.{regex => snRegex} import java.util.function.Predicate import java.util.stream.Stream import java.util.stream.WrappedScalaStream -import scala.collection.compat.immutable.LazyList +import scala.scalanative.compat.StreamsCompat._ // Inspired & informed by: // https://github.com/google/re2j/blob/master/java/com/google/re2j/Pattern.java @@ -132,7 +132,7 @@ final class Pattern private[regex] (_regex: String, _flags: Int) { compiled.split(input, limit) def splitAsStream(input: CharSequence): Stream[String] = - new WrappedScalaStream(split(input).to(LazyList), None) + new WrappedScalaStream(split(input).toScalaStream, None) override def toString: String = _regex } diff --git a/javalib/src/main/scala/java/util/stream/Stream.scala b/javalib/src/main/scala/java/util/stream/Stream.scala index 946faf8acb..834ad04c79 100644 --- a/javalib/src/main/scala/java/util/stream/Stream.scala +++ b/javalib/src/main/scala/java/util/stream/Stream.scala @@ -1,7 +1,7 @@ package java.util.stream import java.util.function.{Function, Predicate} -import scala.collection.compat.immutable.{LazyList => SStream} +import scala.scalanative.compat.StreamsCompat._ trait Stream[+T] extends BaseStream[T, Stream[T]] { def flatMap[R](mapper: Function[_ >: T, _ <: Stream[_ <: R]]): Stream[R] @@ -21,5 +21,5 @@ object Stream { def builder[T](): Builder[T] = new WrappedScalaStream.Builder[T] def empty[T](): Stream[T] = new WrappedScalaStream(SStream.empty[T], None) def of[T](values: Array[AnyRef]): Stream[T] = - new WrappedScalaStream(values.asInstanceOf[Array[T]].to(SStream), None) + new WrappedScalaStream(values.asInstanceOf[Array[T]].toScalaStream, None) } diff --git a/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala b/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala index 5f6fc22b6f..00d90c5681 100644 --- a/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala +++ b/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala @@ -1,7 +1,7 @@ package java.util.stream import java.util.Iterator -import scala.collection.compat.immutable.{LazyList => SStream} +import scalanative.compat.StreamsCompat._ import java.util.function.{Function, Predicate} class WrappedScalaStream[T](private val underlying: SStream[T], @@ -19,7 +19,7 @@ class WrappedScalaStream[T](private val underlying: SStream[T], override def flatMap[R]( mapper: Function[_ >: T, _ <: Stream[_ <: R]]): Stream[R] = { - val streams: Seq[Stream[R]] = underlying.map(v => mapper(v)) + val streams = underlying.map(v => mapper(v).asInstanceOf[Stream[R]]) new CompositeStream(streams, closeHandler) } @@ -32,7 +32,7 @@ object WrappedScalaStream { val buffer = new scala.collection.mutable.ListBuffer[T]() override def accept(t: T): Unit = buffer += t override def build(): Stream[T] = - new WrappedScalaStream(buffer.to(SStream), None) + new WrappedScalaStream(buffer.toScalaStream, None) } def scala2javaIterator[T]( diff --git a/javalib/src/main/scala/scala/scalanative/compat/StreamsCompat.scala b/javalib/src/main/scala/scala/scalanative/compat/StreamsCompat.scala new file mode 100644 index 0000000000..a74f72582d --- /dev/null +++ b/javalib/src/main/scala/scala/scalanative/compat/StreamsCompat.scala @@ -0,0 +1,17 @@ +package scala.scalanative.compat +import scala.language.implicitConversions + +object StreamsCompat { + type SStream[T] = scalanative.compat.ScalaStream.Underlying[T] + val SStreamImpl = scalanative.compat.ScalaStream + val SStream = SStreamImpl.Underlying + + implicit class ArrayToScalaStream[T](val arr: Array[T]) extends AnyVal { + def toScalaStream: SStream[T] = SStreamImpl.seqToScalaStream[T](arr) + } + + implicit class IterableToScalaStream[T](val seq: Iterable[T]) extends AnyVal { + def toScalaStream: SStream[T] = SStreamImpl.seqToScalaStream[T](seq) + } + +} From f06d440d420576446a6530f86dc6d5f61985e6db Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Tue, 6 Oct 2020 21:56:51 +0200 Subject: [PATCH 32/75] cleanup --- build.sbt | 9 +-------- .../scala/scala/scalanative/nscplugin/NirGenPhase.scala | 1 - 2 files changed, 1 insertion(+), 9 deletions(-) diff --git a/build.sbt b/build.sbt index 7a3bea2daf..b850603c4e 100644 --- a/build.sbt +++ b/build.sbt @@ -13,14 +13,6 @@ def projectName(project: sbt.ResolvedProject): String = { convertCamelKebab(project.id) } -def parallelCollectionsDependencies(scalaVersion: String): Seq[ModuleID] = { - CrossVersion.partialVersion(scalaVersion) match { - case Some((2, n)) if n >= 13 => - Seq("org.scala-lang.modules" %% "scala-parallel-collections" % "0.2.0") - case _ => Nil - } -} - // Provide consistent project name pattern. lazy val nameSettings: Seq[Setting[_]] = Seq( name := projectName(thisProject.value) // Maven @@ -612,6 +604,7 @@ lazy val tests = ) .settings(noPublishSettings) .settings( + libraryDependencies += collectionsCompatLib, // nativeOptimizerReporter := OptimizerReporter.toDirectory( // crossTarget.value), // nativeLinkerReporter := LinkerReporter.toFile( diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala index 83d35cc4af..6045bb5cbe 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala @@ -9,7 +9,6 @@ import scala.scalanative.nir._ import scala.scalanative.util.ScopedVar.scoped import scala.tools.nsc.plugins._ import scala.tools.nsc.{util => _, _} -import scala.jdk.CollectionConverters._ abstract class NirGenPhase[G <: NscGlobal](val global: G) extends PluginComponent From c3e94fa220fb9efff37960cfaad1d9a5c9509b56 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 7 Oct 2020 12:44:14 +0200 Subject: [PATCH 33/75] build.sbt cleanup & fixes --- build.sbt | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/build.sbt b/build.sbt index b850603c4e..89aadea9df 100644 --- a/build.sbt +++ b/build.sbt @@ -1,6 +1,7 @@ import java.io.File.pathSeparator import scala.collection.mutable import scala.util.Try + import build.ScalaVersions._ // Convert "SomeName" to "some-name". @@ -210,7 +211,6 @@ lazy val toolSettings: Seq[Setting[_]] = Def.settings( sbtVersion := sbt10Version, crossSbtVersions := List(sbt10Version), - scalaVersion := sbt10ScalaVersion, crossScalaVersions := Seq(sbt10ScalaVersion), javacOptions ++= Seq("-encoding", "utf8") ) @@ -252,7 +252,6 @@ lazy val nirparser = .settings(toolSettings) .settings(noPublishSettings) .settings( - crossScalaVersions := Seq(sbt10ScalaVersion), libraryDependencies ++= Seq( "com.lihaoyi" %% "fastparse" % "1.0.0", "com.lihaoyi" %% "scalaparse" % "1.0.0", @@ -441,7 +440,7 @@ lazy val scalalib = .in(file("scalalib")) .enablePlugins(MyScalaNativePlugin) .settings( - // This build uses Scala 2.11 version 2.11.12 to compile + // This build uses Scala 2.11 to compile // what appears to be 2.11.0 sources. This yields 114 // deprecations. Editing those sources is not an option (long story), // so do not spend compile time looking for the deprecations. @@ -605,10 +604,6 @@ lazy val tests = .settings(noPublishSettings) .settings( libraryDependencies += collectionsCompatLib, - // nativeOptimizerReporter := OptimizerReporter.toDirectory( - // crossTarget.value), - // nativeLinkerReporter := LinkerReporter.toFile( - // target.value / "out.dot"), testFrameworks ++= Seq( new TestFramework("tests.NativeFramework"), new TestFramework("com.novocode.junit.JUnitFramework") @@ -638,10 +633,6 @@ lazy val sandbox = .enablePlugins(MyScalaNativePlugin) .settings(scalacOptions -= "-Xfatal-warnings") .settings(noPublishSettings) - .settings( - // nativeOptimizerReporter := OptimizerReporter.toDirectory( - // crossTarget.value), - ) .dependsOn(nscplugin % "plugin", allCoreLibs, testInterface % Test) lazy val testingCompilerInterface = @@ -753,7 +744,8 @@ lazy val junitTestOutputsNative = Test / scalacOptions ++= { val jar = (junitPlugin / Compile / packageBin).value Seq(s"-Xplugin:$jar") - } + }, + libraryDependencies += collectionsCompatLib ) .dependsOn( nscplugin % "plugin", From 970cdbb5ee7454a12269f362edd64598aa7357b8 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 7 Oct 2020 12:47:45 +0200 Subject: [PATCH 34/75] Prevent compiling tests with wrong scala version --- build.sbt | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/build.sbt b/build.sbt index 89aadea9df..cde361da1e 100644 --- a/build.sbt +++ b/build.sbt @@ -59,16 +59,8 @@ inThisBuild( addCommandAlias( "test-all", Seq( - "sandbox/run", - "testRunner/test", - "testInterface/test", - "tools/test", - "tests/test", - "nirparser/test", - "sbtScalaNative/scripted", - "tools/mimaReportBinaryIssues", - "junitTestOutputsJVM/test", - "junitTestOutputsNative/test" + "test-tools", + "test-runtime" ).mkString(";") ) @@ -210,6 +202,7 @@ lazy val noPublishSettings: Seq[Setting[_]] = Seq( lazy val toolSettings: Seq[Setting[_]] = Def.settings( sbtVersion := sbt10Version, + scalaVersion := sbt10ScalaVersion, crossSbtVersions := List(sbt10Version), crossScalaVersions := Seq(sbt10ScalaVersion), javacOptions ++= Seq("-encoding", "utf8") @@ -780,6 +773,8 @@ lazy val junitAsyncJVM = project .in(file("junit-async/jvm")) .settings( + scalaVersion := sbt10ScalaVersion, + crossScalaVersions := Seq(sbt10ScalaVersion), nameSettings, publishArtifact := false ) From 234de4a73bfacca44cf9559ba3a11accbc7a1de6 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Tue, 13 Oct 2020 21:04:28 +0200 Subject: [PATCH 35/75] Replaced usage of deprecated usage of mutable.ListMap with immutable variant --- .../scalanative/nir/serialization/BinarySerializer.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala b/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala index a10c310b22..72b89cb49d 100644 --- a/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala +++ b/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala @@ -5,6 +5,7 @@ package serialization import java.net.URI import java.io.{DataOutputStream, OutputStream} import java.nio.charset.StandardCharsets +import scala.collection.immutable.ListMap import scala.collection.mutable import scala.scalanative.nir.serialization.{Tags => T} @@ -13,7 +14,7 @@ final class BinarySerializer { private[this] val buffer = new DataOutputStream(bufferUnderyling) private[this] var lastPosition: Position = Position.NoPosition - private[this] val fileIndexMap = mutable.ListMap.empty[URI, Int] + private[this] var fileIndexMap = ListMap.empty[URI, Int] // Methods were renamed in order to not pollute git blame history. // Original implementation used ByteBuffers @@ -568,9 +569,9 @@ final class BinarySerializer { def initFile(pos: Position): Unit = { val file = pos.source if (pos.isDefined) - fileIndexMap.getOrElseUpdate(file, { + fileIndexMap.getOrElse(file, { filesList += file.toString - fileIndexMap.size + fileIndexMap = fileIndexMap.updated(file, filesList.size) }) } defns.foreach { From cd4cda038e5898a098414952e024d0be097b0f34 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Tue, 13 Oct 2020 21:05:42 +0200 Subject: [PATCH 36/75] after rebase fixes --- build.sbt | 3 ++- .../serialization/BinaryDeserializer.scala | 13 +++---------- .../nir/serialization/BinarySerializer.scala | 4 +++- .../scalanative/nscplugin/NirGenExpr.scala | 19 +++++++++++-------- 4 files changed, 19 insertions(+), 20 deletions(-) diff --git a/build.sbt b/build.sbt index cde361da1e..ba65ac3e1b 100644 --- a/build.sbt +++ b/build.sbt @@ -60,7 +60,8 @@ addCommandAlias( "test-all", Seq( "test-tools", - "test-runtime" + "test-runtime", + "test-scripted" ).mkString(";") ) diff --git a/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala b/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala index b9eb7621be..b496ed0e36 100644 --- a/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala +++ b/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala @@ -6,9 +6,7 @@ import java.net.URI import java.nio.ByteBuffer import java.nio.charset.StandardCharsets import scala.collection.mutable -import scala.collection.compat.immutable.ArraySeq import scala.scalanative.nir.serialization.{Tags => T} -import scala.scalanative.util.StringUtils final class BinaryDeserializer(buffer: ByteBuffer) { import buffer._ @@ -282,14 +280,9 @@ final class BinaryDeserializer(buffer: ByteBuffer) { case T.DoubleVal => Val.Double(getDouble) case T.StructValueVal => Val.StructValue(getVals()) case T.ArrayValueVal => Val.ArrayValue(getType(), getVals()) - case T.CharsVal => - Val.Chars { - ArraySeq.unsafeWrapArray { - getBytes() - } - } - case T.LocalVal => Val.Local(getLocal(), getType()) - case T.GlobalVal => Val.Global(getGlobal(), getType()) + case T.CharsVal => Val.Chars(getBytes().toIndexedSeq) + case T.LocalVal => Val.Local(getLocal(), getType()) + case T.GlobalVal => Val.Global(getGlobal(), getType()) case T.UnitVal => Val.Unit case T.ConstVal => Val.Const(getVal()) diff --git a/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala b/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala index 72b89cb49d..d5d5b11f5b 100644 --- a/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala +++ b/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala @@ -570,8 +570,10 @@ final class BinarySerializer { val file = pos.source if (pos.isDefined) fileIndexMap.getOrElse(file, { + val idx = filesList.size filesList += file.toString - fileIndexMap = fileIndexMap.updated(file, filesList.size) + fileIndexMap = fileIndexMap.updated(file, idx) + idx }) } defns.foreach { diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala index 5076ee1e5d..8171c0e622 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala @@ -209,13 +209,13 @@ trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => def genMatch(m: Match): Val = { val Match(scrutp, allcaseps) = m - type Case = (Local, Val, Tree) + type Case = (Local, Val, Tree, nir.Position) // Extract switch cases and assign unique names to them. val caseps: Seq[Case] = allcaseps.flatMap { case CaseDef(Ident(nme.WILDCARD), _, _) => Seq() - case CaseDef(pat, guard, body) => + case cd @ CaseDef(pat, guard, body) => assert(guard.isEmpty, "CaseDef guard was not empty") val vals: Seq[Val] = pat match { case lit: Literal => @@ -227,7 +227,8 @@ trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => case _ => Nil } - vals.map((fresh(), _, body)) + val pos: nir.Position = cd.pos + vals.map((fresh(), _, body, pos)) } // Extract default case. @@ -241,7 +242,7 @@ trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => // Generate code for the switch and its cases. def genSwitch(): Val = { // Generate some more fresh names and types. - val casenexts = caseps.map { case (n, v, _) => Next.Case(v, n) } + val casenexts = caseps.map { case (n, v, _,_) => Next.Case(v, n) } val defaultnext = Next(fresh()) val merge = fresh() val mergev = Val.Local(fresh(), retty) @@ -254,10 +255,10 @@ trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => buf.label(defaultnext.name)(defaultp.pos) buf.jump(merge, Seq(genExpr(defaultp)))(defaultp.pos) caseps.foreach { - case (n, _, expr) => - buf.label(n)(expr.pos) + case (n, _, expr, pos) => + buf.label(n)(pos) val caseres = genExpr(expr) - buf.jump(merge, Seq(caseres)) + buf.jump(merge, Seq(caseres))(pos) } buf.label(merge, Seq(mergev)) mergev @@ -266,7 +267,9 @@ trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => def genIfsChain(): Val = { def loop(cases: List[Case]): Val = { cases match { - case (_, caze, body) :: elsep => + case (_, caze, body, p) :: elsep => + implicit val pos: nir.Position = p + val cond = buf.genClassEquality(leftp = ValTree(scrut), rightp = ValTree(caze), From d2ff2f7f9836b14d026c5aaebeb9cc3f65ef09d3 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 14 Oct 2020 19:48:18 +0200 Subject: [PATCH 37/75] Move logger to `util` module in order to allow cross testing testRunner --- build.sbt | 5 +---- .../src/main/scala/scala/scalanative/build/Logger.scala | 1 - 2 files changed, 1 insertion(+), 5 deletions(-) rename {tools => util}/src/main/scala/scala/scalanative/build/Logger.scala (99%) diff --git a/build.sbt b/build.sbt index ba65ac3e1b..2e582c5c20 100644 --- a/build.sbt +++ b/build.sbt @@ -680,7 +680,6 @@ lazy val testInterfaceSbtDefs = lazy val testRunner = project .in(file("test-runner")) - .settings(toolSettings) .settings(mavenPublishSettings) .settings(testInterfaceCommonSourcesSettings) .settings( @@ -689,7 +688,7 @@ lazy val testRunner = "com.novocode" % "junit-interface" % "0.11" % "test" ) ) - .dependsOn(tools, junitAsyncJVM % "test") + .dependsOn(util, junitAsyncJVM % "test") // JUnit modules and settings ------------------------------------------------ @@ -774,8 +773,6 @@ lazy val junitAsyncJVM = project .in(file("junit-async/jvm")) .settings( - scalaVersion := sbt10ScalaVersion, - crossScalaVersions := Seq(sbt10ScalaVersion), nameSettings, publishArtifact := false ) diff --git a/tools/src/main/scala/scala/scalanative/build/Logger.scala b/util/src/main/scala/scala/scalanative/build/Logger.scala similarity index 99% rename from tools/src/main/scala/scala/scalanative/build/Logger.scala rename to util/src/main/scala/scala/scalanative/build/Logger.scala index 6f6c9d671c..b2201d4fe1 100644 --- a/tools/src/main/scala/scala/scalanative/build/Logger.scala +++ b/util/src/main/scala/scala/scalanative/build/Logger.scala @@ -1,7 +1,6 @@ package scala.scalanative.build import java.lang.System.{err, out, lineSeparator => nl} - import scala.sys.process.ProcessLogger /** Interface to report and/or collect messages given by the toolchain. */ From a0d9fd127badd72ae4e49cbec4abff35ed1ea0b0 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 14 Oct 2020 19:51:15 +0200 Subject: [PATCH 38/75] Build fixes --- build.sbt | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/build.sbt b/build.sbt index 2e582c5c20..e0d332557e 100644 --- a/build.sbt +++ b/build.sbt @@ -68,11 +68,11 @@ addCommandAlias( addCommandAlias( "test-tools", Seq( - "testRunner/test", - "testInterface/test", - "tools/test", "nirparser/test", - "tools/mimaReportBinaryIssues" + "tools/test", + "tools/mimaReportBinaryIssues", + "testRunner/test", + "testInterface/test" ).mkString(";") ) @@ -357,9 +357,16 @@ lazy val sbtScalaNative = testRunner / publishLocal ) .value - } + }, + /* Unmanaged dependencies were used instead of dependsOn(testRunner) in order remove errors (invalid version suffix) + * and allow to cross-build junitTestOutputs. We also need to add testRunner dependencies */ + Compile / unmanagedSourceDirectories ++= Seq( + (testRunner / Compile / scalaSource).value, + baseDirectory.value.getParentFile / "test-interface-common/src/main/scala" + ), + libraryDependencies += collectionsCompatLib ) - .dependsOn(tools, testRunner) + .dependsOn(tools) lazy val nativelib = project @@ -686,9 +693,12 @@ lazy val testRunner = libraryDependencies ++= Seq( "org.scala-sbt" % "test-interface" % "1.0", "com.novocode" % "junit-interface" % "0.11" % "test" - ) + ), + // Fix for invalid version suffix in cross-build, see sbtScalaNative comment + Compile / unmanagedSourceDirectories += (util / Compile / scalaSource).value, + libraryDependencies += collectionsCompatLib ) - .dependsOn(util, junitAsyncJVM % "test") + .dependsOn(junitAsyncJVM % "test") // JUnit modules and settings ------------------------------------------------ From 41edd883da522b67764b35a552cf17ceca4ad558 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Thu, 22 Oct 2020 12:52:06 +0200 Subject: [PATCH 39/75] After rebase fix: Port changes tests to JUnit --- unit-tests/src/test/scala/java/util/ArrayListTest.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/unit-tests/src/test/scala/java/util/ArrayListTest.scala b/unit-tests/src/test/scala/java/util/ArrayListTest.scala index 344fe39165..5a0d8f8469 100644 --- a/unit-tests/src/test/scala/java/util/ArrayListTest.scala +++ b/unit-tests/src/test/scala/java/util/ArrayListTest.scala @@ -179,7 +179,8 @@ class ArrayListTest { // Discovering code in re2s ExecTestSuite used .deep not sameElements. // Should have same result as sameElements, but via different path. - assertTrue("a1.toArray.deep != data.deep", arr1.deep == data.deep) + assertTrue("a1.toArray.deep != data.deep", + Arrays.deepEquals(arr1, data.asInstanceOf[Array[AnyRef]])) } @Test def toArrayArrayWhenArrayIsShorter(): Unit = { From 16e39e7eab8e9ede9ed062e17940e90b28302db9 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Fri, 6 Nov 2020 12:55:06 +0100 Subject: [PATCH 40/75] After rebase fix --- unit-tests/src/test/scala/java/nio/file/PathTest.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/unit-tests/src/test/scala/java/nio/file/PathTest.scala b/unit-tests/src/test/scala/java/nio/file/PathTest.scala index d11ac372b6..6a50ec4713 100644 --- a/unit-tests/src/test/scala/java/nio/file/PathTest.scala +++ b/unit-tests/src/test/scala/java/nio/file/PathTest.scala @@ -120,7 +120,7 @@ class PathTest { import scala.collection.mutable.UnrolledBuffer val buf = new UnrolledBuffer[T]() while (it.hasNext) buf += it.next() - buf + buf.toSeq } assertTrue(Paths.get("").iterator.map(_.toString) == Seq("")) From 395cbd6c3eb0f14fcc9d779db7217c2208cb8eb8 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Fri, 6 Nov 2020 14:13:10 +0100 Subject: [PATCH 41/75] Revert not strictly needed build.sbt changes for 2.13 support --- build.sbt | 32 ++++++++----------- .../scala/scalanative/build/Logger.scala | 0 2 files changed, 13 insertions(+), 19 deletions(-) rename {util => tools}/src/main/scala/scala/scalanative/build/Logger.scala (100%) diff --git a/build.sbt b/build.sbt index e0d332557e..78e6b74495 100644 --- a/build.sbt +++ b/build.sbt @@ -68,11 +68,11 @@ addCommandAlias( addCommandAlias( "test-tools", Seq( - "nirparser/test", - "tools/test", - "tools/mimaReportBinaryIssues", "testRunner/test", - "testInterface/test" + "testInterface/test", + "tools/test", + "nirparser/test", + "tools/mimaReportBinaryIssues" ).mkString(";") ) @@ -329,7 +329,6 @@ lazy val sbtScalaNative = .enablePlugins(SbtPlugin) .settings(sbtPluginSettings) .settings( - scalaVersion := sbt10ScalaVersion, crossScalaVersions := Seq(sbt10ScalaVersion), addSbtPlugin("org.portable-scala" % "sbt-platform-deps" % "1.0.0"), sbtTestDirectory := (ThisBuild / baseDirectory).value / "scripted-tests", @@ -357,16 +356,9 @@ lazy val sbtScalaNative = testRunner / publishLocal ) .value - }, - /* Unmanaged dependencies were used instead of dependsOn(testRunner) in order remove errors (invalid version suffix) - * and allow to cross-build junitTestOutputs. We also need to add testRunner dependencies */ - Compile / unmanagedSourceDirectories ++= Seq( - (testRunner / Compile / scalaSource).value, - baseDirectory.value.getParentFile / "test-interface-common/src/main/scala" - ), - libraryDependencies += collectionsCompatLib + } ) - .dependsOn(tools) + .dependsOn(tools, testRunner) lazy val nativelib = project @@ -687,18 +679,16 @@ lazy val testInterfaceSbtDefs = lazy val testRunner = project .in(file("test-runner")) + .settings(toolSettings) .settings(mavenPublishSettings) .settings(testInterfaceCommonSourcesSettings) .settings( libraryDependencies ++= Seq( "org.scala-sbt" % "test-interface" % "1.0", "com.novocode" % "junit-interface" % "0.11" % "test" - ), - // Fix for invalid version suffix in cross-build, see sbtScalaNative comment - Compile / unmanagedSourceDirectories += (util / Compile / scalaSource).value, - libraryDependencies += collectionsCompatLib + ) ) - .dependsOn(junitAsyncJVM % "test") + .dependsOn(tools, junitAsyncJVM % "test") // JUnit modules and settings ------------------------------------------------ @@ -762,6 +752,8 @@ lazy val junitTestOutputsJVM = .in(file("junit-test/output-jvm")) .settings( commonJUnitTestOutputsSettings, + crossScalaVersions := Seq(sbt10ScalaVersion), + scalaVersion := sbt10ScalaVersion, libraryDependencies ++= Seq( "com.novocode" % "junit-interface" % "0.11" % "test", collectionsCompatLib @@ -783,6 +775,8 @@ lazy val junitAsyncJVM = project .in(file("junit-async/jvm")) .settings( + crossScalaVersions := Seq(sbt10ScalaVersion), + scalaVersion := sbt10ScalaVersion, nameSettings, publishArtifact := false ) diff --git a/util/src/main/scala/scala/scalanative/build/Logger.scala b/tools/src/main/scala/scala/scalanative/build/Logger.scala similarity index 100% rename from util/src/main/scala/scala/scalanative/build/Logger.scala rename to tools/src/main/scala/scala/scalanative/build/Logger.scala From c14c0a5ed68da712b8f9029bab1683c951f60daf Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 9 Nov 2020 16:35:54 +0100 Subject: [PATCH 42/75] Set default scala version 2.12 in build --- build.sbt | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/build.sbt b/build.sbt index 78e6b74495..29fe624b95 100644 --- a/build.sbt +++ b/build.sbt @@ -42,7 +42,7 @@ inThisBuild( Def.settings( organization := "org.scala-native", // Maven version := nativeVersion, // Maven - scalaVersion := scala213, + scalaVersion := scala212, crossScalaVersions := libCrossScalaVersions, scalacOptions ++= Seq( "-deprecation", @@ -203,7 +203,6 @@ lazy val noPublishSettings: Seq[Setting[_]] = Seq( lazy val toolSettings: Seq[Setting[_]] = Def.settings( sbtVersion := sbt10Version, - scalaVersion := sbt10ScalaVersion, crossSbtVersions := List(sbt10Version), crossScalaVersions := Seq(sbt10ScalaVersion), javacOptions ++= Seq("-encoding", "utf8") @@ -753,7 +752,6 @@ lazy val junitTestOutputsJVM = .settings( commonJUnitTestOutputsSettings, crossScalaVersions := Seq(sbt10ScalaVersion), - scalaVersion := sbt10ScalaVersion, libraryDependencies ++= Seq( "com.novocode" % "junit-interface" % "0.11" % "test", collectionsCompatLib @@ -776,7 +774,6 @@ lazy val junitAsyncJVM = .in(file("junit-async/jvm")) .settings( crossScalaVersions := Seq(sbt10ScalaVersion), - scalaVersion := sbt10ScalaVersion, nameSettings, publishArtifact := false ) From 5ebdc1a64e9adefed6c0025131739a0d90c03914 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 9 Nov 2020 17:35:06 +0100 Subject: [PATCH 43/75] SocketImpl methods close and shutdownInput/Output have empty param arguments list --- javalib/src/main/scala/java/net/PlainSocketImpl.scala | 6 +++--- javalib/src/main/scala/java/net/ServerSocket.scala | 2 +- javalib/src/main/scala/java/net/Socket.scala | 6 +++--- javalib/src/main/scala/java/net/SocketImpl.scala | 6 +++--- javalib/src/main/scala/java/net/SocketInputStream.scala | 2 +- javalib/src/main/scala/java/net/SocketOutputStream.scala | 2 +- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/javalib/src/main/scala/java/net/PlainSocketImpl.scala b/javalib/src/main/scala/java/net/PlainSocketImpl.scala index 318968acd1..289979c7a9 100644 --- a/javalib/src/main/scala/java/net/PlainSocketImpl.scala +++ b/javalib/src/main/scala/java/net/PlainSocketImpl.scala @@ -319,7 +319,7 @@ private[net] class PlainSocketImpl extends SocketImpl { } } - override def close: Unit = { + override def close(): Unit = { if (fd.fd != -1) { cClose(fd.fd) fd = new FileDescriptor @@ -346,7 +346,7 @@ private[net] class PlainSocketImpl extends SocketImpl { new SocketInputStream(this) } - override def shutdownOutput: Unit = { + override def shutdownOutput(): Unit = { socket.shutdown(fd.fd, 1) match { case 0 => shutOutput = true case _ => @@ -354,7 +354,7 @@ private[net] class PlainSocketImpl extends SocketImpl { } } - override def shutdownInput: Unit = { + override def shutdownInput(): Unit = { socket.shutdown(fd.fd, 0) match { case 0 => shutInput = true case _ => diff --git a/javalib/src/main/scala/java/net/ServerSocket.scala b/javalib/src/main/scala/java/net/ServerSocket.scala index a00eb0f473..8bbe51941d 100644 --- a/javalib/src/main/scala/java/net/ServerSocket.scala +++ b/javalib/src/main/scala/java/net/ServerSocket.scala @@ -135,7 +135,7 @@ class ServerSocket(private var port: Int, } override def close(): Unit = { - impl.close + impl.close() closed = true } diff --git a/javalib/src/main/scala/java/net/Socket.scala b/javalib/src/main/scala/java/net/Socket.scala index 1bfb57fc48..e2b25eaee0 100644 --- a/javalib/src/main/scala/java/net/Socket.scala +++ b/javalib/src/main/scala/java/net/Socket.scala @@ -270,18 +270,18 @@ class Socket protected (private[net] val impl: SocketImpl, } def shutdownInput(): Unit = { - impl.shutdownInput + impl.shutdownInput() inputShutdown = true } def shutdownOutput(): Unit = { - impl.shutdownOutput + impl.shutdownOutput() outputShutdown = true } override def close(): Unit = { closed = true - impl.close + impl.close() } // def setPerformancePreferences(connectionTime: Int, latency: Int, bandwith: Int): Unit diff --git a/javalib/src/main/scala/java/net/SocketImpl.scala b/javalib/src/main/scala/java/net/SocketImpl.scala index 47ca7455ca..cb7cc80572 100644 --- a/javalib/src/main/scala/java/net/SocketImpl.scala +++ b/javalib/src/main/scala/java/net/SocketImpl.scala @@ -13,7 +13,7 @@ abstract class SocketImpl extends SocketOptions { protected[net] def accept(s: SocketImpl): Unit protected[net] def available: Int protected[net] def bind(host: InetAddress, port: Int): Unit - protected[net] def close: Unit + protected[net] def close(): Unit protected[net] def connect(address: InetAddress, port: Int): Unit protected[net] def connect(address: SocketAddress, timeout: Int): Unit protected[net] def connect(host: String, port: Int): Unit @@ -27,8 +27,8 @@ abstract class SocketImpl extends SocketOptions { protected[net] def listen(backlog: Int): Unit //protected[net] def sendUrgentData(data: Int): Unit //protected[net] def setPerformancePreferences(connectionTime: Int, latency: Int, bandwith: Int): Unit - protected[net] def shutdownInput: Unit - protected[net] def shutdownOutput: Unit + protected[net] def shutdownInput(): Unit + protected[net] def shutdownOutput(): Unit //protected[net] def supportsUrgentData: Boolean override def toString: String = diff --git a/javalib/src/main/scala/java/net/SocketInputStream.scala b/javalib/src/main/scala/java/net/SocketInputStream.scala index 80fb5fe248..20c4090add 100644 --- a/javalib/src/main/scala/java/net/SocketInputStream.scala +++ b/javalib/src/main/scala/java/net/SocketInputStream.scala @@ -6,7 +6,7 @@ import java.io.InputStream private[net] class SocketInputStream(socket: PlainSocketImpl) extends InputStream { - override def close(): Unit = socket.close + override def close(): Unit = socket.close() override def available(): Int = socket.available diff --git a/javalib/src/main/scala/java/net/SocketOutputStream.scala b/javalib/src/main/scala/java/net/SocketOutputStream.scala index 0c1eec70b4..91f08e72e8 100644 --- a/javalib/src/main/scala/java/net/SocketOutputStream.scala +++ b/javalib/src/main/scala/java/net/SocketOutputStream.scala @@ -7,7 +7,7 @@ private[net] class SocketOutputStream(socket: PlainSocketImpl) extends OutputStream { override def close(): Unit = { - socket.close + socket.close() } override def write(b: Array[Byte]) = { From 478d606deadba2c6fdc906bdf69f105139ca0c75 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 9 Nov 2020 17:43:53 +0100 Subject: [PATCH 44/75] Applied styling fixes --- .../java/lang/AbstractStringBuilder.scala | 2 +- .../main/scala/java/lang/ProcessBuilder.scala | 20 +++++++++---------- .../main/scala/java/lang/UnixProcess.scala | 2 +- .../src/main/scala/java/math/BigDecimal.scala | 4 ++-- .../src/main/scala/java/net/InetAddress.scala | 2 +- javalib/src/main/scala/java/net/URI.scala | 16 ++++++++------- .../src/main/scala/java/nio/file/Files.scala | 17 ++++++++-------- .../main/scala/java/util/AbstractMap.scala | 4 ++-- .../src/main/scala/java/util/TreeSet.scala | 4 ++-- .../scala/java/util/zip/GZIPInputStream.scala | 2 +- .../scala/java/util/zip/ZipOutputStream.scala | 2 +- .../nscplugin/NirDefinitions.scala | 1 + .../scalanative/nscplugin/NirGenExpr.scala | 5 ++--- 13 files changed, 41 insertions(+), 40 deletions(-) diff --git a/javalib/src/main/scala/java/lang/AbstractStringBuilder.scala b/javalib/src/main/scala/java/lang/AbstractStringBuilder.scala index b9dc0165de..adc3e6fcb0 100644 --- a/javalib/src/main/scala/java/lang/AbstractStringBuilder.scala +++ b/javalib/src/main/scala/java/lang/AbstractStringBuilder.scala @@ -202,7 +202,7 @@ abstract class AbstractStringBuilder private (unit: Unit) { System.arraycopy(value, start, dest, destStart, end - start) } - final def insert0(index: scala.Int, chars: Array[Char]) = { + final def insert0(index: scala.Int, chars: Array[Char]): Unit = { if (0 > index || index > count) { throw new StringIndexOutOfBoundsException(index) } diff --git a/javalib/src/main/scala/java/lang/ProcessBuilder.scala b/javalib/src/main/scala/java/lang/ProcessBuilder.scala index 9ca14598ac..6704ff5b59 100644 --- a/javalib/src/main/scala/java/lang/ProcessBuilder.scala +++ b/javalib/src/main/scala/java/lang/ProcessBuilder.scala @@ -18,10 +18,10 @@ final class ProcessBuilder(private var _command: List[String]) { def command(): List[String] = _command def command(command: Array[String]): ProcessBuilder = - set { _command = Arrays.asList(command); () } + set { _command = Arrays.asList(command) } def command(command: List[String]): ProcessBuilder = set { - _command = command; () + _command = command } def environment(): Map[String, String] = _environment @@ -33,7 +33,7 @@ final class ProcessBuilder(private var _command: List[String]) { _directory = dir match { case null => defaultDirectory case _ => dir - }; () + } } def inheritIO(): ProcessBuilder = { @@ -43,38 +43,38 @@ final class ProcessBuilder(private var _command: List[String]) { } def redirectError(destination: Redirect): ProcessBuilder = destination match { - case null => set { _redirectOutput = Redirect.PIPE; () } + case null => set { _redirectOutput = Redirect.PIPE } case d => d.`type`() match { case Redirect.Type.READ => throw new IllegalArgumentException( s"Redirect.READ cannot be used for error.") case _ => - set { _redirectError = destination; () } + set { _redirectError = destination } } } def redirectInput(source: Redirect): ProcessBuilder = source match { - case null => set { _redirectInput = Redirect.PIPE; () } + case null => set { _redirectInput = Redirect.PIPE } case s => s.`type`() match { case Redirect.Type.WRITE | Redirect.Type.APPEND => throw new IllegalArgumentException(s"$s cannot be used for input.") case _ => - set { _redirectInput = source; () } + set { _redirectInput = source } } } def redirectOutput(destination: Redirect): ProcessBuilder = destination match { - case null => set { _redirectOutput = Redirect.PIPE; () } + case null => set { _redirectOutput = Redirect.PIPE } case s => s.`type`() match { case Redirect.Type.READ => throw new IllegalArgumentException( s"Redirect.READ cannot be used for output.") case _ => - set { _redirectOutput = destination; () } + set { _redirectOutput = destination } } } @@ -99,7 +99,7 @@ final class ProcessBuilder(private var _command: List[String]) { def redirectErrorStream(): scala.Boolean = _redirectErrorStream def redirectErrorStream(redirectErrorStream: scala.Boolean): ProcessBuilder = - set { _redirectErrorStream = redirectErrorStream; () } + set { _redirectErrorStream = redirectErrorStream } def start(): Process = { if (_command.isEmpty()) throw new IndexOutOfBoundsException() diff --git a/javalib/src/main/scala/java/lang/UnixProcess.scala b/javalib/src/main/scala/java/lang/UnixProcess.scala index 6d3578860e..8a237afa5d 100644 --- a/javalib/src/main/scala/java/lang/UnixProcess.scala +++ b/javalib/src/main/scala/java/lang/UnixProcess.scala @@ -213,7 +213,7 @@ object UnixProcess { } } - @inline private def nullTerminate(seq: collection.Iterable[String])( + @inline private def nullTerminate(seq: collection.Seq[String])( implicit z: Zone) = { val res = alloc[CString](seq.size + 1) seq.zipWithIndex foreach { case (s, i) => !(res + i) = toCString(s) } diff --git a/javalib/src/main/scala/java/math/BigDecimal.scala b/javalib/src/main/scala/java/math/BigDecimal.scala index 648d2139c1..e77a23c3da 100644 --- a/javalib/src/main/scala/java/math/BigDecimal.scala +++ b/javalib/src/main/scala/java/math/BigDecimal.scala @@ -1514,8 +1514,8 @@ class BigDecimal() extends Number with Comparable[BigDecimal] { override def floatValue(): Float = { /* A similar code like in doubleValue() could be repeated here, * but this simple implementation is quite efficient. */ - val powerOfTwo = this._bitLength - (_scale / Log2).toLong - val floatResult0: Float = signum().toFloat + val powerOfTwo = this._bitLength - (_scale / Log2).toLong + val floatResult0 = signum().toFloat val floatResult: Float = { if (powerOfTwo < -149 || floatResult0 == 0.0f) // 'this' is very small floatResult0 * 0.0f diff --git a/javalib/src/main/scala/java/net/InetAddress.scala b/javalib/src/main/scala/java/net/InetAddress.scala index b396143781..39e9060ca4 100644 --- a/javalib/src/main/scala/java/net/InetAddress.scala +++ b/javalib/src/main/scala/java/net/InetAddress.scala @@ -584,7 +584,7 @@ class InetAddress private[net] (ipAddress: Array[Byte], if (obj == null || obj.getClass != this.getClass) { false } else { - val objIPAddress = obj.asInstanceOf[InetAddress].getAddress(); + val objIPAddress = obj.asInstanceOf[InetAddress].getAddress() objIPAddress.indices.forall(i => objIPAddress(i) == ipAddress(i)) } } diff --git a/javalib/src/main/scala/java/net/URI.scala b/javalib/src/main/scala/java/net/URI.scala index aa588c6ad2..6585311592 100644 --- a/javalib/src/main/scala/java/net/URI.scala +++ b/javalib/src/main/scala/java/net/URI.scala @@ -89,8 +89,9 @@ final class URI private () extends Comparable[URI] with Serializable { earlyStop = true } if (!earlyStop) { - if (scheme != null && path != null && path - .length() > 0 && path.charAt(0) != '/') { + if (scheme != null && path != null && + path.length() > 0 && + path.charAt(0) != '/') { throw new URISyntaxException(path, "Relative path") } val uri: StringBuilder = new StringBuilder() @@ -140,8 +141,9 @@ final class URI private () extends Comparable[URI] with Serializable { query: String, fragment: String) = { this() - if (scheme != null && path != null && path - .length() > 0 && path.charAt(0) != '/') { + if (scheme != null && path != null && + path.length() > 0 && + path.charAt(0) != '/') { throw new URISyntaxException(path, "Relative path") } val uri: StringBuilder = new StringBuilder() @@ -205,9 +207,9 @@ final class URI private () extends Comparable[URI] with Serializable { schemespecificpart = temp } - if (scheme == null || schemespecificpart - .length() > 0 && schemespecificpart - .charAt(0) == '/') { + if (scheme == null || + schemespecificpart.length() > 0 && + schemespecificpart.charAt(0) == '/') { opaque = false temp = schemespecificpart diff --git a/javalib/src/main/scala/java/nio/file/Files.scala b/javalib/src/main/scala/java/nio/file/Files.scala index d20e30532e..72291e8592 100644 --- a/javalib/src/main/scala/java/nio/file/Files.scala +++ b/javalib/src/main/scala/java/nio/file/Files.scala @@ -58,9 +58,9 @@ object Files { val out = if (!targetFile.exists() || (targetFile.isFile() && replaceExisting)) { new FileOutputStream(targetFile, append = false) - } else if (targetFile.isDirectory() && targetFile - .list() - .isEmpty && replaceExisting) { + } else if (targetFile.isDirectory() && + targetFile.list().isEmpty && + replaceExisting) { if (!targetFile.delete()) throw new IOException() new FileOutputStream(targetFile, append = false) } else { @@ -362,8 +362,8 @@ object Files { def move(source: Path, target: Path, options: Array[CopyOption]): Path = { if (!exists(source.toAbsolutePath(), Array.empty)) { throw new NoSuchFileException(source.toString) - } else if (!exists(target.toAbsolutePath(), Array.empty) || options - .contains(REPLACE_EXISTING)) { + } else if (!exists(target.toAbsolutePath(), Array.empty) || + options.contains(REPLACE_EXISTING)) { Zone { implicit z => if (stdio.rename(toCString(source.toAbsolutePath().toString), toCString(target.toAbsolutePath().toString)) != 0) { @@ -585,7 +585,7 @@ object Files { .list(start.toString, (n, t) => (n, t)) .toScalaStream .flatMap { - case (name: String, tpe) + case (name, tpe) if tpe == DT_LNK() && options.contains( FileVisitOption.FOLLOW_LINKS) => val path = start.resolve(name) @@ -594,15 +594,14 @@ object Files { if (newVisited.contains(target)) throw new FileSystemLoopException(path.toString) else walk(path, maxDepth, currentDepth + 1, options, newVisited) - case (name: String, tpe) - if tpe == DT_DIR() && currentDepth < maxDepth => + case (name, tpe) if tpe == DT_DIR() && currentDepth < maxDepth => val path = start.resolve(name) val newVisited = if (options.contains(FileVisitOption.FOLLOW_LINKS)) visited + path else visited walk(path, maxDepth, currentDepth + 1, options, newVisited) - case (name: String, _) => + case (name, _) => start.resolve(name) #:: SStream.empty } } diff --git a/javalib/src/main/scala/java/util/AbstractMap.scala b/javalib/src/main/scala/java/util/AbstractMap.scala index 139ca7b6e4..6662f24e3b 100644 --- a/javalib/src/main/scala/java/util/AbstractMap.scala +++ b/javalib/src/main/scala/java/util/AbstractMap.scala @@ -8,8 +8,8 @@ object AbstractMap { private def entryEquals[K, V](entry: Map.Entry[K, V], other: Any): Boolean = { other match { case other: Map.Entry[_, _] => - entry.getKey() === other.getKey() && entry.getValue() === other - .getValue() + entry.getKey() === other.getKey() && + entry.getValue() === other.getValue() case _ => false } } diff --git a/javalib/src/main/scala/java/util/TreeSet.scala b/javalib/src/main/scala/java/util/TreeSet.scala index 3c883cf5f2..154e5dcd3d 100644 --- a/javalib/src/main/scala/java/util/TreeSet.scala +++ b/javalib/src/main/scala/java/util/TreeSet.scala @@ -145,10 +145,10 @@ class TreeSet[E](_comparator: Comparator[_ >: E]) var base = new mutable.TreeSet[Box[E]] base ++= inner.range(boxedFrom, boxedTo) if (!fromInclusive) - base = base diff Set(boxedFrom) + base = base.diff(Set(boxedFrom)) if (toInclusive && inner.contains(boxedTo)) - base = base diff Set(boxedTo) + base = base.diff(Set(boxedTo)) base } diff --git a/javalib/src/main/scala/java/util/zip/GZIPInputStream.scala b/javalib/src/main/scala/java/util/zip/GZIPInputStream.scala index 4f970e00aa..355e24075e 100644 --- a/javalib/src/main/scala/java/util/zip/GZIPInputStream.scala +++ b/javalib/src/main/scala/java/util/zip/GZIPInputStream.scala @@ -115,7 +115,7 @@ class GZIPInputStream(in: InputStream, size: Int) } } - private def readFully(buffer: Array[Byte], offset: Int, length: Int) = { + private def readFully(buffer: Array[Byte], offset: Int, length: Int): Unit = { var result: Int = 0 var off: Int = offset var l: Int = length diff --git a/javalib/src/main/scala/java/util/zip/ZipOutputStream.scala b/javalib/src/main/scala/java/util/zip/ZipOutputStream.scala index f9c123a548..ccfdc5b86d 100644 --- a/javalib/src/main/scala/java/util/zip/ZipOutputStream.scala +++ b/javalib/src/main/scala/java/util/zip/ZipOutputStream.scala @@ -36,7 +36,7 @@ class ZipOutputStream(_out: OutputStream, charset: Charset) } } - def closeEntry() = { + def closeEntry(): Unit = { if (cDir == null) { throw new IOException() } else if (currentEntry == null) { diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirDefinitions.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirDefinitions.scala index a31fce1572..0eec73d3d6 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirDefinitions.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirDefinitions.scala @@ -229,6 +229,7 @@ trait NirDefinitions { private def mapValue[K, V1, V2](fn: V1 => V2)(in: (K, V1)): (K, V2) = (in._1, fn(in._2)) + lazy val RuntimeArrayModule: Map[Char, Symbol] = RuntimeArrayClass.map(mapValue(_.companion)) diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala index 8171c0e622..b9b7c47e0f 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala @@ -7,7 +7,7 @@ import scalanative.util.{StringUtils, unsupported} import scalanative.util.ScopedVar.scoped import scalanative.nscplugin.NirPrimitives._ -trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => +trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => import global._ import definitions._ import treeInfo.hasSynthCaseSymbol @@ -227,8 +227,7 @@ trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => case _ => Nil } - val pos: nir.Position = cd.pos - vals.map((fresh(), _, body, pos)) + vals.map((fresh(), _, body, cd.pos: nir.Position)) } // Extract default case. From ccc56c0e5c1b383666c589ebe4a37b686befe4cd Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 9 Nov 2020 18:14:40 +0100 Subject: [PATCH 45/75] Add comment to laziness of j.l.System.systemProperties --- javalib/src/main/scala/java/lang/System.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/javalib/src/main/scala/java/lang/System.scala b/javalib/src/main/scala/java/lang/System.scala index c823670c7b..207edb3020 100644 --- a/javalib/src/main/scala/java/lang/System.scala +++ b/javalib/src/main/scala/java/lang/System.scala @@ -91,6 +91,10 @@ object System { var err: PrintStream = new PrintStream(new FileOutputStream(FileDescriptor.err)) + /* Laziness for this val was enforced due to changes in Scala 2.13 Vector implementation + * Vector uses system property to define some of its default parameters and to allow user to tune it. + * This problem exists because current implementation of java.lang.System depends on Scala collections, + * this problem should be addressed in the future */ private lazy val systemProperties = loadProperties() Platform.setOSProps(new CFuncPtr2[CString, CString, Unit] { def apply(key: CString, value: CString): Unit = From 2b75b52368e6b416aeb8d95753bc6f49bfdf4162 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 9 Nov 2020 18:52:16 +0100 Subject: [PATCH 46/75] Use mutable HashMpa in BinarySerializer to store indexes of files --- .../scalanative/nir/serialization/BinarySerializer.scala | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala b/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala index d5d5b11f5b..a2001b3342 100644 --- a/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala +++ b/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala @@ -14,7 +14,7 @@ final class BinarySerializer { private[this] val buffer = new DataOutputStream(bufferUnderyling) private[this] var lastPosition: Position = Position.NoPosition - private[this] var fileIndexMap = ListMap.empty[URI, Int] + private[this] val fileIndexMap = mutable.Map.empty[URI, Int] // Methods were renamed in order to not pollute git blame history. // Original implementation used ByteBuffers @@ -569,10 +569,9 @@ final class BinarySerializer { def initFile(pos: Position): Unit = { val file = pos.source if (pos.isDefined) - fileIndexMap.getOrElse(file, { + fileIndexMap.getOrElseUpdate(file, { val idx = filesList.size filesList += file.toString - fileIndexMap = fileIndexMap.updated(file, idx) idx }) } From ea76b3496e1884f2399eac86d0fe8725adea63e8 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 9 Nov 2020 19:01:45 +0100 Subject: [PATCH 47/75] Revert to using genLiteralValue directly instead of genExpr --- .../main/scala/scala/scalanative/nscplugin/NirGenExpr.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala index b9b7c47e0f..54704f59f7 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala @@ -219,10 +219,10 @@ trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => assert(guard.isEmpty, "CaseDef guard was not empty") val vals: Seq[Val] = pat match { case lit: Literal => - List(genExpr(lit)) + List(genLiteralValue(lit)) case Alternative(alts) => alts.map { - case lit: Literal => genExpr(lit) + case lit: Literal => genLiteralValue(lit) } case _ => Nil From 7c5da5fe5ec9f02dac8b00cab0e48900c326bc62 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 9 Nov 2020 19:08:33 +0100 Subject: [PATCH 48/75] Revert removed scalalib overrides --- scalalib/overrides-2.11/scala/Predef.scala | 518 ++++++++++++++ scalalib/overrides-2.11/scala/package.scala | 133 ++++ scalalib/overrides-2.12/scala/Predef.scala | 646 ++++++++++++++++++ scalalib/overrides-2.12/scala/package.scala | 136 ++++ .../scala/runtime/ScalaRunTime.scala | 267 ++++++++ 5 files changed, 1700 insertions(+) create mode 100644 scalalib/overrides-2.11/scala/Predef.scala create mode 100644 scalalib/overrides-2.11/scala/package.scala create mode 100644 scalalib/overrides-2.12/scala/Predef.scala create mode 100644 scalalib/overrides-2.12/scala/package.scala create mode 100644 scalalib/overrides-2.12/scala/runtime/ScalaRunTime.scala diff --git a/scalalib/overrides-2.11/scala/Predef.scala b/scalalib/overrides-2.11/scala/Predef.scala new file mode 100644 index 0000000000..a8a831a75a --- /dev/null +++ b/scalalib/overrides-2.11/scala/Predef.scala @@ -0,0 +1,518 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala + +import scala.collection.{ mutable, immutable, generic } +import immutable.StringOps +import mutable.ArrayOps +import generic.CanBuildFrom +import scala.annotation.{ elidable, implicitNotFound } +import scala.annotation.elidable.ASSERTION +import scala.language.{implicitConversions, existentials} +import scala.io.StdIn +import scala.scalanative.annotation.alwaysinline + +/** The `Predef` object provides definitions that are accessible in all Scala + * compilation units without explicit qualification. + * + * === Commonly Used Types === + * Predef provides type aliases for types which are commonly used, such as + * the immutable collection types [[scala.collection.immutable.Map]], + * [[scala.collection.immutable.Set]], and the [[scala.collection.immutable.List]] + * constructors ([[scala.collection.immutable.::]] and + * [[scala.collection.immutable.Nil]]). + * + * === Console I/O === + * Predef provides a number of simple functions for console I/O, such as + * `print`, `println`, `readLine`, `readInt`, etc. These functions are all + * aliases of the functions provided by [[scala.Console]]. + * + * === Assertions === + * + * A set of `assert` functions are provided for use as a way to document + * and dynamically check invariants in code. Invocations of `assert` can be elided + * at compile time by providing the command line option `-Xdisable-assertions`, + * which raises `-Xelide-below` above `elidable.ASSERTION`, to the `scalac` command. + * + * Variants of `assert` intended for use with static analysis tools are also + * provided: `assume`, `require` and `ensuring`. `require` and `ensuring` are + * intended for use as a means of design-by-contract style specification + * of pre- and post-conditions on functions, with the intention that these + * specifications could be consumed by a static analysis tool. For instance, + * + * {{{ + * def addNaturals(nats: List[Int]): Int = { + * require(nats forall (_ >= 0), "List contains negative numbers") + * nats.foldLeft(0)(_ + _) + * } ensuring(_ >= 0) + * }}} + * + * The declaration of `addNaturals` states that the list of integers passed should + * only contain natural numbers (i.e. non-negative), and that the result returned + * will also be natural. `require` is distinct from `assert` in that if the + * condition fails, then the caller of the function is to blame rather than a + * logical error having been made within `addNaturals` itself. `ensuring` is a + * form of `assert` that declares the guarantee the function is providing with + * regards to its return value. + * + * === Implicit Conversions === + * A number of commonly applied implicit conversions are also defined here, and + * in the parent type [[scala.LowPriorityImplicits]]. Implicit conversions + * are provided for the "widening" of numeric values, for instance, converting a + * Short value to a Long value as required, and to add additional higher-order + * functions to Array values. These are described in more detail in the documentation of [[scala.Array]]. + */ +object Predef extends LowPriorityImplicits with DeprecatedPredef { + /** + * Retrieve the runtime representation of a class type. `classOf[T]` is equivalent to + * the class literal `T.class` in Java. + * + * @example {{{ + * val listClass = classOf[List[_]] + * // listClass is java.lang.Class[List[_]] = class scala.collection.immutable.List + * + * val mapIntString = classOf[Map[Int,String]] + * // mapIntString is java.lang.Class[Map[Int,String]] = interface scala.collection.immutable.Map + * }}} + */ + def classOf[T]: Class[T] = null // This is a stub method. The actual implementation is filled in by the compiler. + + /** The `String` type in Scala has methods that come either from the underlying + * Java String (see the documentation corresponding to your Java version, for + * example [[http://docs.oracle.com/javase/8/docs/api/java/lang/String.html]]) or + * are added implicitly through [[scala.collection.immutable.StringOps]]. + */ + type String = java.lang.String + type Class[T] = java.lang.Class[T] + + // miscellaneous ----------------------------------------------------- + scala.`package` // to force scala package object to be seen. + scala.collection.immutable.List // to force Nil, :: to be seen. + + type Function[-A, +B] = Function1[A, B] + + type Map[A, +B] = immutable.Map[A, B] + type Set[A] = immutable.Set[A] + @inline def Map = immutable.Map + @inline def Set = immutable.Set + + // Manifest types, companions, and incantations for summoning + @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.") + @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0") + type ClassManifest[T] = scala.reflect.ClassManifest[T] + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") + type OptManifest[T] = scala.reflect.OptManifest[T] + @annotation.implicitNotFound(msg = "No Manifest available for ${T}.") + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") + type Manifest[T] = scala.reflect.Manifest[T] + @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0") + @inline def ClassManifest = scala.reflect.ClassManifest + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") + @inline def Manifest = scala.reflect.Manifest + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") + @inline def NoManifest = scala.reflect.NoManifest + + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("Use scala.reflect.classTag[T] and scala.reflect.runtime.universe.typeTag[T] instead", "2.10.0") + @inline def manifest[T](implicit m: Manifest[T]) = m + @deprecated("Use scala.reflect.classTag[T] instead", "2.10.0") + @inline def classManifest[T](implicit m: ClassManifest[T]) = m + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") + @inline def optManifest[T](implicit m: OptManifest[T]) = m + + // Minor variations on identity functions + @inline def identity[A](x: A): A = x // @see `conforms` for the implicit version + @alwaysinline def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world -- TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero` + @inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements + + // errors and asserts ------------------------------------------------- + + // !!! Remove this when possible - ideally for 2.11. + // We are stuck with it a while longer because sbt's compiler interface + // still calls it as of 0.12.2. + @deprecated("Use `sys.error(message)` instead", "2.9.0") + @inline def error(message: String): Nothing = sys.error(message) + + /** Tests an expression, throwing an `AssertionError` if false. + * Calls to this method will not be generated if `-Xelide-below` + * is at least `ASSERTION`. + * + * @see elidable + * @param assertion the expression to test + */ + @elidable(ASSERTION) + @inline def assert(assertion: Boolean) { + if (!assertion) + throw new java.lang.AssertionError("assertion failed") + } + + /** Tests an expression, throwing an `AssertionError` if false. + * Calls to this method will not be generated if `-Xelide-below` + * is at least `ASSERTION`. + * + * @see elidable + * @param assertion the expression to test + * @param message a String to include in the failure message + */ + @elidable(ASSERTION) @inline + final def assert(assertion: Boolean, message: => Any) { + if (!assertion) + throw new java.lang.AssertionError("assertion failed: "+ message) + } + + /** Tests an expression, throwing an `AssertionError` if false. + * This method differs from assert only in the intent expressed: + * assert contains a predicate which needs to be proven, while + * assume contains an axiom for a static checker. Calls to this method + * will not be generated if `-Xelide-below` is at least `ASSERTION`. + * + * @see elidable + * @param assumption the expression to test + */ + @elidable(ASSERTION) + @inline def assume(assumption: Boolean) { + if (!assumption) + throw new java.lang.AssertionError("assumption failed") + } + + /** Tests an expression, throwing an `AssertionError` if false. + * This method differs from assert only in the intent expressed: + * assert contains a predicate which needs to be proven, while + * assume contains an axiom for a static checker. Calls to this method + * will not be generated if `-Xelide-below` is at least `ASSERTION`. + * + * @see elidable + * @param assumption the expression to test + * @param message a String to include in the failure message + */ + @elidable(ASSERTION) @inline + final def assume(assumption: Boolean, message: => Any) { + if (!assumption) + throw new java.lang.AssertionError("assumption failed: "+ message) + } + + /** Tests an expression, throwing an `IllegalArgumentException` if false. + * This method is similar to `assert`, but blames the caller of the method + * for violating the condition. + * + * @param requirement the expression to test + */ + @inline def require(requirement: Boolean) { + if (!requirement) + throw new IllegalArgumentException("requirement failed") + } + + /** Tests an expression, throwing an `IllegalArgumentException` if false. + * This method is similar to `assert`, but blames the caller of the method + * for violating the condition. + * + * @param requirement the expression to test + * @param message a String to include in the failure message + */ + @inline final def require(requirement: Boolean, message: => Any) { + if (!requirement) + throw new IllegalArgumentException("requirement failed: "+ message) + } + + /** `???` can be used for marking methods that remain to be implemented. + * @throws NotImplementedError + */ + @inline def ??? : Nothing = throw new NotImplementedError + + // tupling ------------------------------------------------------------ + + @deprecated("Use built-in tuple syntax or Tuple2 instead", "2.11.0") + type Pair[+A, +B] = Tuple2[A, B] + @deprecated("Use built-in tuple syntax or Tuple2 instead", "2.11.0") + object Pair { + def apply[A, B](x: A, y: B) = Tuple2(x, y) + def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) + } + + @deprecated("Use built-in tuple syntax or Tuple3 instead", "2.11.0") + type Triple[+A, +B, +C] = Tuple3[A, B, C] + @deprecated("Use built-in tuple syntax or Tuple3 instead", "2.11.0") + object Triple { + def apply[A, B, C](x: A, y: B, z: C) = Tuple3(x, y, z) + def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x) + } + + // implicit classes ----------------------------------------------------- + + implicit final class ArrowAssoc[A](private val self: A) extends AnyVal { + @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(self, y) + def →[B](y: B): Tuple2[A, B] = ->(y) + } + + implicit final class Ensuring[A](private val self: A) extends AnyVal { + def ensuring(cond: Boolean): A = { assert(cond); self } + def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); self } + def ensuring(cond: A => Boolean): A = { assert(cond(self)); self } + def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(self), msg); self } + } + + implicit final class StringFormat[A](private val self: A) extends AnyVal { + /** Returns string formatted according to given `format` string. + * Format strings are as for `String.format` + * (@see java.lang.String.format). + */ + @inline def formatted(fmtstr: String): String = fmtstr format self + } + + // TODO: remove, only needed for binary compatibility of 2.11.0-RC1 with 2.11.0-M8 + // note that `private[scala]` becomes `public` in bytecode + private[scala] final class StringAdd[A](private val self: A) extends AnyVal { + def +(other: String): String = String.valueOf(self) + other + } + private[scala] def StringAdd(x: Any): Any = new StringAdd(x) + + // SI-8229 retaining the pre 2.11 name for source compatibility in shadowing this implicit + implicit final class any2stringadd[A](private val self: A) extends AnyVal { + def +(other: String): String = String.valueOf(self) + other + } + + implicit final class RichException(private val self: Throwable) extends AnyVal { + import scala.compat.Platform.EOL + @deprecated("Use Throwable#getStackTrace", "2.11.0") def getStackTraceString = self.getStackTrace().mkString("", EOL, EOL) + } + + implicit final class SeqCharSequence(val __sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence { + def length: Int = __sequenceOfChars.length + def charAt(index: Int): Char = __sequenceOfChars(index) + def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(__sequenceOfChars.slice(start, end)) + override def toString = __sequenceOfChars mkString "" + } + + implicit final class ArrayCharSequence(val __arrayOfChars: Array[Char]) extends CharSequence { + def length: Int = __arrayOfChars.length + def charAt(index: Int): Char = __arrayOfChars(index) + def subSequence(start: Int, end: Int): CharSequence = new runtime.ArrayCharSequence(__arrayOfChars, start, end) + override def toString = __arrayOfChars mkString "" + } + + private object StringCanBuildFromInstance extends CanBuildFrom[String, Char, String] { + def apply(from: String) = apply() + def apply() = mutable.StringBuilder.newBuilder + } + + @inline implicit def StringCanBuildFrom: CanBuildFrom[String, Char, String] = StringCanBuildFromInstance + + @inline implicit def augmentString(x: String): StringOps = new StringOps(x) + @inline implicit def unaugmentString(x: StringOps): String = x.repr + + // printing ----------------------------------------------------------- + + @inline def print(x: Any) = Console.print(x) + @inline def println() = Console.println() + @inline def println(x: Any) = Console.println(x) + @inline def printf(text: String, xs: Any*) = Console.print(text.format(xs: _*)) + + // views -------------------------------------------------------------- + + @inline implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)) = new runtime.Tuple2Zipped.Ops(x) + @inline implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)) = new runtime.Tuple3Zipped.Ops(x) + + @inline implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match { + case x: Array[AnyRef] => refArrayOps[AnyRef](x) + case x: Array[Boolean] => booleanArrayOps(x) + case x: Array[Byte] => byteArrayOps(x) + case x: Array[Char] => charArrayOps(x) + case x: Array[Double] => doubleArrayOps(x) + case x: Array[Float] => floatArrayOps(x) + case x: Array[Int] => intArrayOps(x) + case x: Array[Long] => longArrayOps(x) + case x: Array[Short] => shortArrayOps(x) + case x: Array[Unit] => unitArrayOps(x) + case null => null + }).asInstanceOf[ArrayOps[T]] + + @inline implicit def booleanArrayOps(xs: Array[Boolean]): ArrayOps[Boolean] = new ArrayOps.ofBoolean(xs) + @inline implicit def byteArrayOps(xs: Array[Byte]): ArrayOps[Byte] = new ArrayOps.ofByte(xs) + @inline implicit def charArrayOps(xs: Array[Char]): ArrayOps[Char] = new ArrayOps.ofChar(xs) + @inline implicit def doubleArrayOps(xs: Array[Double]): ArrayOps[Double] = new ArrayOps.ofDouble(xs) + @inline implicit def floatArrayOps(xs: Array[Float]): ArrayOps[Float] = new ArrayOps.ofFloat(xs) + @inline implicit def intArrayOps(xs: Array[Int]): ArrayOps[Int] = new ArrayOps.ofInt(xs) + @inline implicit def longArrayOps(xs: Array[Long]): ArrayOps[Long] = new ArrayOps.ofLong(xs) + @inline implicit def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps.ofRef[T](xs) + @inline implicit def shortArrayOps(xs: Array[Short]): ArrayOps[Short] = new ArrayOps.ofShort(xs) + @inline implicit def unitArrayOps(xs: Array[Unit]): ArrayOps[Unit] = new ArrayOps.ofUnit(xs) + + // "Autoboxing" and "Autounboxing" --------------------------------------------------- + + @inline implicit def byte2Byte(x: Byte) = java.lang.Byte.valueOf(x) + @inline implicit def short2Short(x: Short) = java.lang.Short.valueOf(x) + @inline implicit def char2Character(x: Char) = java.lang.Character.valueOf(x) + @inline implicit def int2Integer(x: Int) = java.lang.Integer.valueOf(x) + @inline implicit def long2Long(x: Long) = java.lang.Long.valueOf(x) + @inline implicit def float2Float(x: Float) = java.lang.Float.valueOf(x) + @inline implicit def double2Double(x: Double) = java.lang.Double.valueOf(x) + @inline implicit def boolean2Boolean(x: Boolean) = java.lang.Boolean.valueOf(x) + + @inline implicit def Byte2byte(x: java.lang.Byte): Byte = x.byteValue + @inline implicit def Short2short(x: java.lang.Short): Short = x.shortValue + @inline implicit def Character2char(x: java.lang.Character): Char = x.charValue + @inline implicit def Integer2int(x: java.lang.Integer): Int = x.intValue + @inline implicit def Long2long(x: java.lang.Long): Long = x.longValue + @inline implicit def Float2float(x: java.lang.Float): Float = x.floatValue + @inline implicit def Double2double(x: java.lang.Double): Double = x.doubleValue + @inline implicit def Boolean2boolean(x: java.lang.Boolean): Boolean = x.booleanValue + + // Type Constraints -------------------------------------------------------------- + + /** + * An instance of `A <:< B` witnesses that `A` is a subtype of `B`. + * Requiring an implicit argument of the type `A <:< B` encodes + * the generalized constraint `A <: B`. + * + * @note we need a new type constructor `<:<` and evidence `conforms`, + * as reusing `Function1` and `identity` leads to ambiguities in + * case of type errors (`any2stringadd` is inferred) + * + * To constrain any abstract type T that's in scope in a method's + * argument list (not just the method's own type parameters) simply + * add an implicit argument of type `T <:< U`, where `U` is the required + * upper bound; or for lower-bounds, use: `L <:< T`, where `L` is the + * required lower bound. + * + * In part contributed by Jason Zaugg. + */ + @implicitNotFound(msg = "Cannot prove that ${From} <:< ${To}.") + sealed abstract class <:<[-From, +To] extends (From => To) with Serializable + private[this] lazy val singleton_<:< = new <:<[Any,Any] { def apply(x: Any): Any = x } + // The dollar prefix is to dodge accidental shadowing of this method + // by a user-defined method of the same name (SI-7788). + // The collections rely on this method. + @inline implicit def $conforms[A]: A <:< A = singleton_<:<.asInstanceOf[A <:< A] + + @deprecated("Use `implicitly[T <:< U]` or `identity` instead.", "2.11.0") + def conforms[A]: A <:< A = $conforms[A] + + /** An instance of `A =:= B` witnesses that the types `A` and `B` are equal. + * + * @see `<:<` for expressing subtyping constraints + */ + @implicitNotFound(msg = "Cannot prove that ${From} =:= ${To}.") + sealed abstract class =:=[From, To] extends (From => To) with Serializable + private[this] lazy val singleton_=:= = new =:=[Any,Any] { def apply(x: Any): Any = x } + object =:= { + @inline implicit def tpEquals[A]: A =:= A = singleton_=:=.asInstanceOf[A =:= A] + } + + /** A type for which there is always an implicit value. + * @see [[scala.Array$]], method `fallbackCanBuildFrom` + */ + class DummyImplicit + + object DummyImplicit { + + /** An implicit value yielding a `DummyImplicit`. + * @see [[scala.Array$]], method `fallbackCanBuildFrom` + */ + @inline implicit def dummyImplicit: DummyImplicit = new DummyImplicit + } +} + +private[scala] trait DeprecatedPredef { + self: Predef.type => + + // Deprecated stubs for any who may have been calling these methods directly. + @deprecated("Use `ArrowAssoc`", "2.11.0") def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x) + @deprecated("Use `Ensuring`", "2.11.0") def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x) + @deprecated("Use `StringFormat`", "2.11.0") def any2stringfmt(x: Any): StringFormat[Any] = new StringFormat(x) + @deprecated("Use `Throwable` directly", "2.11.0") def exceptionWrapper(exc: Throwable) = new RichException(exc) + @deprecated("Use `SeqCharSequence`", "2.11.0") def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new SeqCharSequence(xs) + @deprecated("Use `ArrayCharSequence`", "2.11.0") def arrayToCharSequence(xs: Array[Char]): CharSequence = new ArrayCharSequence(xs) + + @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLine(): String = StdIn.readLine() + @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLine(text: String, args: Any*) = StdIn.readLine(text, args: _*) + @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readBoolean() = StdIn.readBoolean() + @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readByte() = StdIn.readByte() + @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readShort() = StdIn.readShort() + @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readChar() = StdIn.readChar() + @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readInt() = StdIn.readInt() + @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLong() = StdIn.readLong() + @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readFloat() = StdIn.readFloat() + @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readDouble() = StdIn.readDouble() + @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf(format: String) = StdIn.readf(format) + @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf1(format: String) = StdIn.readf1(format) + @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf2(format: String) = StdIn.readf2(format) + @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf3(format: String) = StdIn.readf3(format) +} + +/** The `LowPriorityImplicits` class provides implicit values that +* are valid in all Scala compilation units without explicit qualification, +* but that are partially overridden by higher-priority conversions in object +* `Predef`. +* +* @author Martin Odersky +* @since 2.8 +*/ +// SI-7335 Parents of Predef are defined in the same compilation unit to avoid +// cyclic reference errors compiling the standard library *without* a previously +// compiled copy on the classpath. +private[scala] abstract class LowPriorityImplicits { + import mutable.WrappedArray + import immutable.WrappedString + + /** We prefer the java.lang.* boxed types to these wrappers in + * any potential conflicts. Conflicts do exist because the wrappers + * need to implement ScalaNumber in order to have a symmetric equals + * method, but that implies implementing java.lang.Number as well. + * + * Note - these are inlined because they are value classes, but + * the call to xxxWrapper is not eliminated even though it does nothing. + * Even inlined, every call site does a no-op retrieval of Predef's MODULE$ + * because maybe loading Predef has side effects! + */ + @inline implicit def byteWrapper(x: Byte) = new runtime.RichByte(x) + @inline implicit def shortWrapper(x: Short) = new runtime.RichShort(x) + @inline implicit def intWrapper(x: Int) = new runtime.RichInt(x) + @inline implicit def charWrapper(c: Char) = new runtime.RichChar(c) + @inline implicit def longWrapper(x: Long) = new runtime.RichLong(x) + @inline implicit def floatWrapper(x: Float) = new runtime.RichFloat(x) + @inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x) + @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x) + + @inline implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] = + if (xs eq null) null + else WrappedArray.make(xs) + + // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef] + // is as good as another for all T <: AnyRef. Instead of creating 100,000,000 + // unique ones by way of this implicit, let's share one. + @inline implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = { + if (xs eq null) null + else if (xs.length == 0) WrappedArray.empty[T] + else new WrappedArray.ofRef[T](xs) + } + + @inline implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null + @inline implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null + @inline implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null + @inline implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null + @inline implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null + @inline implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null + @inline implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null + @inline implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null + @inline implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null + + @inline implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null + @inline implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null + + @inline implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] = + new CanBuildFrom[String, T, immutable.IndexedSeq[T]] { + def apply(from: String) = immutable.IndexedSeq.newBuilder[T] + def apply() = immutable.IndexedSeq.newBuilder[T] + } +} diff --git a/scalalib/overrides-2.11/scala/package.scala b/scalalib/overrides-2.11/scala/package.scala new file mode 100644 index 0000000000..41db14e080 --- /dev/null +++ b/scalalib/overrides-2.11/scala/package.scala @@ -0,0 +1,133 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + +/** + * Core Scala types. They are always available without an explicit import. + * @contentDiagram hideNodes "scala.Serializable" + */ +package object scala { + type Throwable = java.lang.Throwable + type Exception = java.lang.Exception + type Error = java.lang.Error + + type RuntimeException = java.lang.RuntimeException + type NullPointerException = java.lang.NullPointerException + type ClassCastException = java.lang.ClassCastException + type IndexOutOfBoundsException = java.lang.IndexOutOfBoundsException + type ArrayIndexOutOfBoundsException = java.lang.ArrayIndexOutOfBoundsException + type StringIndexOutOfBoundsException = java.lang.StringIndexOutOfBoundsException + type UnsupportedOperationException = java.lang.UnsupportedOperationException + type IllegalArgumentException = java.lang.IllegalArgumentException + type NoSuchElementException = java.util.NoSuchElementException + type NumberFormatException = java.lang.NumberFormatException + type AbstractMethodError = java.lang.AbstractMethodError + type InterruptedException = java.lang.InterruptedException + + // A dummy used by the specialization annotation. + lazy val AnyRef = new Specializable { + override def toString = "object AnyRef" + } + + type TraversableOnce[+A] = scala.collection.TraversableOnce[A] + + type Traversable[+A] = scala.collection.Traversable[A] + lazy val Traversable = scala.collection.Traversable + + type Iterable[+A] = scala.collection.Iterable[A] + lazy val Iterable = scala.collection.Iterable + + type Seq[+A] = scala.collection.Seq[A] + lazy val Seq = scala.collection.Seq + + type IndexedSeq[+A] = scala.collection.IndexedSeq[A] + lazy val IndexedSeq = scala.collection.IndexedSeq + + type Iterator[+A] = scala.collection.Iterator[A] + lazy val Iterator = scala.collection.Iterator + + type BufferedIterator[+A] = scala.collection.BufferedIterator[A] + + type List[+A] = scala.collection.immutable.List[A] + lazy val List = scala.collection.immutable.List + + lazy val Nil = scala.collection.immutable.Nil + + type ::[A] = scala.collection.immutable.::[A] + lazy val :: = scala.collection.immutable.:: + + lazy val +: = scala.collection.+: + lazy val :+ = scala.collection.:+ + + type Stream[+A] = scala.collection.immutable.Stream[A] + lazy val Stream = scala.collection.immutable.Stream + lazy val #:: = scala.collection.immutable.Stream.#:: + + type Vector[+A] = scala.collection.immutable.Vector[A] + lazy val Vector = scala.collection.immutable.Vector + + type StringBuilder = scala.collection.mutable.StringBuilder + lazy val StringBuilder = scala.collection.mutable.StringBuilder + + type Range = scala.collection.immutable.Range + lazy val Range = scala.collection.immutable.Range + + // Numeric types which were moved into scala.math.* + + type BigDecimal = scala.math.BigDecimal + lazy val BigDecimal = scala.math.BigDecimal + + type BigInt = scala.math.BigInt + lazy val BigInt = scala.math.BigInt + + type Equiv[T] = scala.math.Equiv[T] + lazy val Equiv = scala.math.Equiv + + type Fractional[T] = scala.math.Fractional[T] + lazy val Fractional = scala.math.Fractional + + type Integral[T] = scala.math.Integral[T] + lazy val Integral = scala.math.Integral + + type Numeric[T] = scala.math.Numeric[T] + lazy val Numeric = scala.math.Numeric + + type Ordered[T] = scala.math.Ordered[T] + lazy val Ordered = scala.math.Ordered + + type Ordering[T] = scala.math.Ordering[T] + lazy val Ordering = scala.math.Ordering + + type PartialOrdering[T] = scala.math.PartialOrdering[T] + type PartiallyOrdered[T] = scala.math.PartiallyOrdered[T] + + type Either[+A, +B] = scala.util.Either[A, B] + lazy val Either = scala.util.Either + + type Left[+A, +B] = scala.util.Left[A, B] + lazy val Left = scala.util.Left + + type Right[+A, +B] = scala.util.Right[A, B] + lazy val Right = scala.util.Right + + // Annotations which we might move to annotation.* +/* + type SerialVersionUID = annotation.SerialVersionUID + type deprecated = annotation.deprecated + type deprecatedName = annotation.deprecatedName + type inline = annotation.inline + type native = annotation.native + type noinline = annotation.noinline + type remote = annotation.remote + type specialized = annotation.specialized + type transient = annotation.transient + type throws = annotation.throws + type unchecked = annotation.unchecked.unchecked + type volatile = annotation.volatile + */ +} diff --git a/scalalib/overrides-2.12/scala/Predef.scala b/scalalib/overrides-2.12/scala/Predef.scala new file mode 100644 index 0000000000..90b49e892a --- /dev/null +++ b/scalalib/overrides-2.12/scala/Predef.scala @@ -0,0 +1,646 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +import scala.language.implicitConversions + +import scala.collection.{ mutable, immutable, generic } +import immutable.StringOps +import mutable.ArrayOps +import generic.CanBuildFrom +import scala.annotation.{ elidable, implicitNotFound } +import scala.annotation.elidable.ASSERTION +import scala.io.StdIn + +/** The `Predef` object provides definitions that are accessible in all Scala + * compilation units without explicit qualification. + * + * === Commonly Used Types === + * Predef provides type aliases for types which are commonly used, such as + * the immutable collection types [[scala.collection.immutable.Map]], + * [[scala.collection.immutable.Set]], and the [[scala.collection.immutable.List]] + * constructors ([[scala.collection.immutable.::]] and + * [[scala.collection.immutable.Nil]]). + * + * === Console Output === + * For basic console output, `Predef` provides convenience methods [[print(x:Any* print]] and [[println(x:Any* println]], + * which are aliases of the methods in the object [[scala.Console]]. + * + * === Assertions === + * A set of `assert` functions are provided for use as a way to document + * and dynamically check invariants in code. Invocations of `assert` can be elided + * at compile time by providing the command line option `-Xdisable-assertions`, + * which raises `-Xelide-below` above `elidable.ASSERTION`, to the `scalac` command. + * + * Variants of `assert` intended for use with static analysis tools are also + * provided: `assume`, `require` and `ensuring`. `require` and `ensuring` are + * intended for use as a means of design-by-contract style specification + * of pre- and post-conditions on functions, with the intention that these + * specifications could be consumed by a static analysis tool. For instance, + * + * {{{ + * def addNaturals(nats: List[Int]): Int = { + * require(nats forall (_ >= 0), "List contains negative numbers") + * nats.foldLeft(0)(_ + _) + * } ensuring(_ >= 0) + * }}} + * + * The declaration of `addNaturals` states that the list of integers passed should + * only contain natural numbers (i.e. non-negative), and that the result returned + * will also be natural. `require` is distinct from `assert` in that if the + * condition fails, then the caller of the function is to blame rather than a + * logical error having been made within `addNaturals` itself. `ensuring` is a + * form of `assert` that declares the guarantee the function is providing with + * regards to its return value. + * + * === Implicit Conversions === + * A number of commonly applied implicit conversions are also defined here, and + * in the parent type [[scala.LowPriorityImplicits]]. Implicit conversions + * are provided for the "widening" of numeric values, for instance, converting a + * Short value to a Long value as required, and to add additional higher-order + * functions to Array values. These are described in more detail in the documentation of [[scala.Array]]. + * + * @groupname utilities Utility Methods + * @groupprio utilities 10 + * + * @groupname assertions Assertions + * @groupprio assertions 20 + * @groupdesc assertions These methods support program verification and runtime correctness. + * + * @groupname console-output Console Output + * @groupprio console-output 30 + * @groupdesc console-output These methods provide output via the console. + * + * @groupname type-constraints Type Constraints + * @groupprio type-constraints 40 + * @groupdesc type-constraints These entities allows constraints between types to be stipulated. + * + * @groupname aliases Aliases + * @groupprio aliases 50 + * @groupdesc aliases These aliases bring selected immutable types into scope without any imports. + * + * @groupname conversions-string String Conversions + * @groupprio conversions-string 60 + * @groupdesc conversions-string Conversions to and from String and StringOps. + * + * @groupname implicit-classes-any Implicit Classes + * @groupprio implicit-classes-any 70 + * @groupdesc implicit-classes-any These implicit classes add useful extension methods to every type. + * + * @groupname implicit-classes-char CharSequence Conversions + * @groupprio implicit-classes-char 80 + * @groupdesc implicit-classes-char These implicit classes add CharSequence methods to Array[Char] and IndexedSeq[Char] instances. + * + * @groupname conversions-java-to-anyval Java to Scala + * @groupprio conversions-java-to-anyval 90 + * @groupdesc conversions-java-to-anyval Implicit conversion from Java primitive wrapper types to Scala equivalents. + * + * @groupname conversions-anyval-to-java Scala to Java + * @groupprio conversions-anyval-to-java 100 + * @groupdesc conversions-anyval-to-java Implicit conversion from Scala AnyVals to Java primitive wrapper types equivalents. + * + * @groupname conversions-array-to-wrapped-array Array to WrappedArray + * @groupprio conversions-array-to-wrapped-array 110 + * @groupdesc conversions-array-to-wrapped-array Conversions from Arrays to WrappedArrays. + */ +object Predef extends LowPriorityImplicits with DeprecatedPredef { + /** + * Retrieve the runtime representation of a class type. `classOf[T]` is equivalent to + * the class literal `T.class` in Java. + * + * @example {{{ + * val listClass = classOf[List[_]] + * // listClass is java.lang.Class[List[_]] = class scala.collection.immutable.List + * + * val mapIntString = classOf[Map[Int,String]] + * // mapIntString is java.lang.Class[Map[Int,String]] = interface scala.collection.immutable.Map + * }}} + * @group utilities + */ + def classOf[T]: Class[T] = null // This is a stub method. The actual implementation is filled in by the compiler. + + /** The `String` type in Scala has methods that come either from the underlying + * Java String (see the documentation corresponding to your Java version, for + * example [[http://docs.oracle.com/javase/8/docs/api/java/lang/String.html]]) or + * are added implicitly through [[scala.collection.immutable.StringOps]]. + * @group aliases + */ + type String = java.lang.String + /** @group aliases */ + type Class[T] = java.lang.Class[T] + + // miscellaneous ----------------------------------------------------- + scala.`package` // to force scala package object to be seen. + scala.collection.immutable.List // to force Nil, :: to be seen. + + /** @group aliases */ + type Function[-A, +B] = Function1[A, B] + + /** @group aliases */ + type Map[A, +B] = immutable.Map[A, B] + /** @group aliases */ + type Set[A] = immutable.Set[A] + /** @group aliases */ + @inline def Map = immutable.Map + /** @group aliases */ + @inline def Set = immutable.Set + + // Manifest types, companions, and incantations for summoning + @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.") + @deprecated("use `scala.reflect.ClassTag` instead", "2.10.0") + type ClassManifest[T] = scala.reflect.ClassManifest[T] + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") + type OptManifest[T] = scala.reflect.OptManifest[T] + @annotation.implicitNotFound(msg = "No Manifest available for ${T}.") + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") + type Manifest[T] = scala.reflect.Manifest[T] + @deprecated("use `scala.reflect.ClassTag` instead", "2.10.0") + @inline def ClassManifest = scala.reflect.ClassManifest + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") + @inline def Manifest = scala.reflect.Manifest + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") + @inline def NoManifest = scala.reflect.NoManifest + + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("use scala.reflect.classTag[T] and scala.reflect.runtime.universe.typeTag[T] instead", "2.10.0") + def manifest[T](implicit m: Manifest[T]) = m + @deprecated("use scala.reflect.classTag[T] instead", "2.10.0") + def classManifest[T](implicit m: ClassManifest[T]) = m + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") + def optManifest[T](implicit m: OptManifest[T]) = m + + // Minor variations on identity functions + /** @group utilities */ + @inline def identity[A](x: A): A = x // @see `conforms` for the implicit version + /** @group utilities */ + @inline def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world -- TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero` + /** @group utilities */ + @inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements + + // assertions --------------------------------------------------------- + + /** Tests an expression, throwing an `AssertionError` if false. + * Calls to this method will not be generated if `-Xelide-below` + * is greater than `ASSERTION`. + * + * @see [[scala.annotation.elidable elidable]] + * @param assertion the expression to test + * @group assertions + */ + @elidable(ASSERTION) + def assert(assertion: Boolean) { + if (!assertion) + throw new java.lang.AssertionError("assertion failed") + } + + /** Tests an expression, throwing an `AssertionError` if false. + * Calls to this method will not be generated if `-Xelide-below` + * is greater than `ASSERTION`. + * + * @see [[scala.annotation.elidable elidable]] + * @param assertion the expression to test + * @param message a String to include in the failure message + * @group assertions + */ + @elidable(ASSERTION) @inline + final def assert(assertion: Boolean, message: => Any) { + if (!assertion) + throw new java.lang.AssertionError("assertion failed: "+ message) + } + + /** Tests an expression, throwing an `AssertionError` if false. + * This method differs from assert only in the intent expressed: + * assert contains a predicate which needs to be proven, while + * assume contains an axiom for a static checker. Calls to this method + * will not be generated if `-Xelide-below` is greater than `ASSERTION`. + * + * @see [[scala.annotation.elidable elidable]] + * @param assumption the expression to test + * @group assertions + */ + @elidable(ASSERTION) + def assume(assumption: Boolean) { + if (!assumption) + throw new java.lang.AssertionError("assumption failed") + } + + /** Tests an expression, throwing an `AssertionError` if false. + * This method differs from assert only in the intent expressed: + * assert contains a predicate which needs to be proven, while + * assume contains an axiom for a static checker. Calls to this method + * will not be generated if `-Xelide-below` is greater than `ASSERTION`. + * + * @see [[scala.annotation.elidable elidable]] + * @param assumption the expression to test + * @param message a String to include in the failure message + * @group assertions + */ + @elidable(ASSERTION) @inline + final def assume(assumption: Boolean, message: => Any) { + if (!assumption) + throw new java.lang.AssertionError("assumption failed: "+ message) + } + + /** Tests an expression, throwing an `IllegalArgumentException` if false. + * This method is similar to `assert`, but blames the caller of the method + * for violating the condition. + * + * @param requirement the expression to test + * @group assertions + */ + def require(requirement: Boolean) { + if (!requirement) + throw new IllegalArgumentException("requirement failed") + } + + /** Tests an expression, throwing an `IllegalArgumentException` if false. + * This method is similar to `assert`, but blames the caller of the method + * for violating the condition. + * + * @param requirement the expression to test + * @param message a String to include in the failure message + * @group assertions + */ + @inline final def require(requirement: Boolean, message: => Any) { + if (!requirement) + throw new IllegalArgumentException("requirement failed: "+ message) + } + + /** `???` can be used for marking methods that remain to be implemented. + * @throws NotImplementedError + * @group utilities + */ + def ??? : Nothing = throw new NotImplementedError + + // tupling ------------------------------------------------------------ + + @deprecated("use built-in tuple syntax or Tuple2 instead", "2.11.0") + type Pair[+A, +B] = Tuple2[A, B] + @deprecated("use built-in tuple syntax or Tuple2 instead", "2.11.0") + object Pair { + def apply[A, B](x: A, y: B) = Tuple2(x, y) + def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) + } + + @deprecated("use built-in tuple syntax or Tuple3 instead", "2.11.0") + type Triple[+A, +B, +C] = Tuple3[A, B, C] + @deprecated("use built-in tuple syntax or Tuple3 instead", "2.11.0") + object Triple { + def apply[A, B, C](x: A, y: B, z: C) = Tuple3(x, y, z) + def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x) + } + + // implicit classes ----------------------------------------------------- + + /** @group implicit-classes-any */ + implicit final class ArrowAssoc[A](private val self: A) extends AnyVal { + @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(self, y) + def →[B](y: B): Tuple2[A, B] = ->(y) + } + + /** @group implicit-classes-any */ + implicit final class Ensuring[A](private val self: A) extends AnyVal { + def ensuring(cond: Boolean): A = { assert(cond); self } + def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); self } + def ensuring(cond: A => Boolean): A = { assert(cond(self)); self } + def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(self), msg); self } + } + + /** @group implicit-classes-any */ + implicit final class StringFormat[A](private val self: A) extends AnyVal { + /** Returns string formatted according to given `format` string. + * Format strings are as for `String.format` + * (@see java.lang.String.format). + */ + @inline def formatted(fmtstr: String): String = fmtstr format self + } + + // scala/bug#8229 retaining the pre 2.11 name for source compatibility in shadowing this implicit + /** @group implicit-classes-any */ + implicit final class any2stringadd[A](private val self: A) extends AnyVal { + def +(other: String): String = String.valueOf(self) + other + } + + implicit final class RichException(private val self: Throwable) extends AnyVal { + import scala.compat.Platform.EOL + @deprecated("use Throwable#getStackTrace", "2.11.0") def getStackTraceString = self.getStackTrace().mkString("", EOL, EOL) + } + + // Sadly we have to do `@deprecatedName(null, "2.12.0")` because + // `@deprecatedName(since="2.12.0")` incurs a warning about + // Usage of named or default arguments transformed this annotation constructor call into a block. + // The corresponding AnnotationInfo will contain references to local values and default getters + // instead of the actual argument trees + // and `@deprecatedName(Symbol(""), "2.12.0")` crashes scalac with + // scala.reflect.internal.Symbols$CyclicReference: illegal cyclic reference involving object Symbol + // in run/repl-no-imports-no-predef-power.scala. + /** @group implicit-classes-char */ + implicit final class SeqCharSequence(@deprecated("will be made private", "2.12.0") @deprecatedName(null, "2.12.0") val __sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence { + def length: Int = __sequenceOfChars.length + def charAt(index: Int): Char = __sequenceOfChars(index) + def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(__sequenceOfChars.slice(start, end)) + override def toString = __sequenceOfChars mkString "" + } + + /** @group implicit-classes-char */ + implicit final class ArrayCharSequence(@deprecated("will be made private", "2.12.0") @deprecatedName(null, "2.12.0") val __arrayOfChars: Array[Char]) extends CharSequence { + def length: Int = __arrayOfChars.length + def charAt(index: Int): Char = __arrayOfChars(index) + def subSequence(start: Int, end: Int): CharSequence = new runtime.ArrayCharSequence(__arrayOfChars, start, end) + override def toString = __arrayOfChars mkString "" + } + + implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] { + def apply(from: String) = apply() + def apply() = mutable.StringBuilder.newBuilder + } + + /** @group conversions-string */ + @inline implicit def augmentString(x: String): StringOps = new StringOps(x) + /** @group conversions-string */ + @inline implicit def unaugmentString(x: StringOps): String = x.repr + + // printing ----------------------------------------------------------- + + /** Prints an object to `out` using its `toString` method. + * + * @param x the object to print; may be null. + * @group console-output + */ + def print(x: Any) = Console.print(x) + + /** Prints a newline character on the default output. + * @group console-output + */ + def println() = Console.println() + + /** Prints out an object to the default output, followed by a newline character. + * + * @param x the object to print. + * @group console-output + */ + def println(x: Any) = Console.println(x) + + /** Prints its arguments as a formatted string to the default output, + * based on a string pattern (in a fashion similar to printf in C). + * + * The interpretation of the formatting patterns is described in + * [[java.util.Formatter]]. + * + * Consider using the [[scala.StringContext.f f interpolator]] as more type safe and idiomatic. + * + * @param text the pattern for formatting the arguments. + * @param args the arguments used to instantiating the pattern. + * @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments + * + * @see [[scala.StringContext.f StringContext.f]] + * @group console-output + */ + def printf(text: String, xs: Any*) = Console.print(text.format(xs: _*)) + + // views -------------------------------------------------------------- + + implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)) = new runtime.Tuple2Zipped.Ops(x) + implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)) = new runtime.Tuple3Zipped.Ops(x) + + implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match { + case x: Array[AnyRef] => refArrayOps[AnyRef](x) + case x: Array[Boolean] => booleanArrayOps(x) + case x: Array[Byte] => byteArrayOps(x) + case x: Array[Char] => charArrayOps(x) + case x: Array[Double] => doubleArrayOps(x) + case x: Array[Float] => floatArrayOps(x) + case x: Array[Int] => intArrayOps(x) + case x: Array[Long] => longArrayOps(x) + case x: Array[Short] => shortArrayOps(x) + case x: Array[Unit] => unitArrayOps(x) + case null => null + }).asInstanceOf[ArrayOps[T]] + + implicit def booleanArrayOps(xs: Array[Boolean]): ArrayOps.ofBoolean = new ArrayOps.ofBoolean(xs) + implicit def byteArrayOps(xs: Array[Byte]): ArrayOps.ofByte = new ArrayOps.ofByte(xs) + implicit def charArrayOps(xs: Array[Char]): ArrayOps.ofChar = new ArrayOps.ofChar(xs) + implicit def doubleArrayOps(xs: Array[Double]): ArrayOps.ofDouble = new ArrayOps.ofDouble(xs) + implicit def floatArrayOps(xs: Array[Float]): ArrayOps.ofFloat = new ArrayOps.ofFloat(xs) + implicit def intArrayOps(xs: Array[Int]): ArrayOps.ofInt = new ArrayOps.ofInt(xs) + implicit def longArrayOps(xs: Array[Long]): ArrayOps.ofLong = new ArrayOps.ofLong(xs) + implicit def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps.ofRef[T] = new ArrayOps.ofRef[T](xs) + implicit def shortArrayOps(xs: Array[Short]): ArrayOps.ofShort = new ArrayOps.ofShort(xs) + implicit def unitArrayOps(xs: Array[Unit]): ArrayOps.ofUnit = new ArrayOps.ofUnit(xs) + + // "Autoboxing" and "Autounboxing" --------------------------------------------------- + + /** @group conversions-anyval-to-java */ + implicit def byte2Byte(x: Byte): java.lang.Byte = x.asInstanceOf[java.lang.Byte] + /** @group conversions-anyval-to-java */ + implicit def short2Short(x: Short): java.lang.Short = x.asInstanceOf[java.lang.Short] + /** @group conversions-anyval-to-java */ + implicit def char2Character(x: Char): java.lang.Character = x.asInstanceOf[java.lang.Character] + /** @group conversions-anyval-to-java */ + implicit def int2Integer(x: Int): java.lang.Integer = x.asInstanceOf[java.lang.Integer] + /** @group conversions-anyval-to-java */ + implicit def long2Long(x: Long): java.lang.Long = x.asInstanceOf[java.lang.Long] + /** @group conversions-anyval-to-java */ + implicit def float2Float(x: Float): java.lang.Float = x.asInstanceOf[java.lang.Float] + /** @group conversions-anyval-to-java */ + implicit def double2Double(x: Double): java.lang.Double = x.asInstanceOf[java.lang.Double] + /** @group conversions-anyval-to-java */ + implicit def boolean2Boolean(x: Boolean): java.lang.Boolean = x.asInstanceOf[java.lang.Boolean] + + /** @group conversions-java-to-anyval */ + implicit def Byte2byte(x: java.lang.Byte): Byte = x.asInstanceOf[Byte] + /** @group conversions-java-to-anyval */ + implicit def Short2short(x: java.lang.Short): Short = x.asInstanceOf[Short] + /** @group conversions-java-to-anyval */ + implicit def Character2char(x: java.lang.Character): Char = x.asInstanceOf[Char] + /** @group conversions-java-to-anyval */ + implicit def Integer2int(x: java.lang.Integer): Int = x.asInstanceOf[Int] + /** @group conversions-java-to-anyval */ + implicit def Long2long(x: java.lang.Long): Long = x.asInstanceOf[Long] + /** @group conversions-java-to-anyval */ + implicit def Float2float(x: java.lang.Float): Float = x.asInstanceOf[Float] + /** @group conversions-java-to-anyval */ + implicit def Double2double(x: java.lang.Double): Double = x.asInstanceOf[Double] + /** @group conversions-java-to-anyval */ + implicit def Boolean2boolean(x: java.lang.Boolean): Boolean = x.asInstanceOf[Boolean] + + // Type Constraints -------------------------------------------------------------- + + /** + * An instance of `A <:< B` witnesses that `A` is a subtype of `B`. + * Requiring an implicit argument of the type `A <:< B` encodes + * the generalized constraint `A <: B`. + * + * @note we need a new type constructor `<:<` and evidence `conforms`, + * as reusing `Function1` and `identity` leads to ambiguities in + * case of type errors (`any2stringadd` is inferred) + * + * To constrain any abstract type T that's in scope in a method's + * argument list (not just the method's own type parameters) simply + * add an implicit argument of type `T <:< U`, where `U` is the required + * upper bound; or for lower-bounds, use: `L <:< T`, where `L` is the + * required lower bound. + * + * In part contributed by Jason Zaugg. + * @group type-constraints + */ + @implicitNotFound(msg = "Cannot prove that ${From} <:< ${To}.") + sealed abstract class <:<[-From, +To] extends (From => To) with Serializable + private[this] final val singleton_<:< = new <:<[Any,Any] { def apply(x: Any): Any = x } + // The dollar prefix is to dodge accidental shadowing of this method + // by a user-defined method of the same name (scala/bug#7788). + // The collections rely on this method. + /** @group type-constraints */ + implicit def $conforms[A]: A <:< A = singleton_<:<.asInstanceOf[A <:< A] + + @deprecated("use `implicitly[T <:< U]` or `identity` instead.", "2.11.0") + def conforms[A]: A <:< A = $conforms[A] + + /** An instance of `A =:= B` witnesses that the types `A` and `B` are equal. + * + * @see `<:<` for expressing subtyping constraints + * @group type-constraints + */ + @implicitNotFound(msg = "Cannot prove that ${From} =:= ${To}.") + sealed abstract class =:=[From, To] extends (From => To) with Serializable + private[this] final val singleton_=:= = new =:=[Any,Any] { def apply(x: Any): Any = x } + /** @group type-constraints */ + object =:= { + implicit def tpEquals[A]: A =:= A = singleton_=:=.asInstanceOf[A =:= A] + } + + /** A type for which there is always an implicit value. + * @see [[scala.Array$]], method `fallbackCanBuildFrom` + */ + class DummyImplicit + + object DummyImplicit { + + /** An implicit value yielding a `DummyImplicit`. + * @see [[scala.Array$]], method `fallbackCanBuildFrom` + */ + implicit def dummyImplicit: DummyImplicit = new DummyImplicit + } +} + +private[scala] trait DeprecatedPredef { + self: Predef.type => + + // Deprecated stubs for any who may have been calling these methods directly. + @deprecated("use `ArrowAssoc`", "2.11.0") def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x) + @deprecated("use `Ensuring`", "2.11.0") def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x) + @deprecated("use `StringFormat`", "2.11.0") def any2stringfmt(x: Any): StringFormat[Any] = new StringFormat(x) + @deprecated("use `Throwable` directly", "2.11.0") def exceptionWrapper(exc: Throwable) = new RichException(exc) + @deprecated("use `SeqCharSequence`", "2.11.0") def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new SeqCharSequence(xs) + @deprecated("use `ArrayCharSequence`", "2.11.0") def arrayToCharSequence(xs: Array[Char]): CharSequence = new ArrayCharSequence(xs) + + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLine(): String = StdIn.readLine() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLine(text: String, args: Any*) = StdIn.readLine(text, args: _*) + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readBoolean() = StdIn.readBoolean() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readByte() = StdIn.readByte() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readShort() = StdIn.readShort() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readChar() = StdIn.readChar() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readInt() = StdIn.readInt() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLong() = StdIn.readLong() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readFloat() = StdIn.readFloat() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readDouble() = StdIn.readDouble() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf(format: String) = StdIn.readf(format) + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf1(format: String) = StdIn.readf1(format) + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf2(format: String) = StdIn.readf2(format) + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf3(format: String) = StdIn.readf3(format) +} + +/** The `LowPriorityImplicits` class provides implicit values that +* are valid in all Scala compilation units without explicit qualification, +* but that are partially overridden by higher-priority conversions in object +* `Predef`. +* +* @author Martin Odersky +* @since 2.8 +*/ +// scala/bug#7335 Parents of Predef are defined in the same compilation unit to avoid +// cyclic reference errors compiling the standard library *without* a previously +// compiled copy on the classpath. +private[scala] abstract class LowPriorityImplicits { + import mutable.WrappedArray + import immutable.WrappedString + + /** We prefer the java.lang.* boxed types to these wrappers in + * any potential conflicts. Conflicts do exist because the wrappers + * need to implement ScalaNumber in order to have a symmetric equals + * method, but that implies implementing java.lang.Number as well. + * + * Note - these are inlined because they are value classes, but + * the call to xxxWrapper is not eliminated even though it does nothing. + * Even inlined, every call site does a no-op retrieval of Predef's MODULE$ + * because maybe loading Predef has side effects! + */ + @inline implicit def byteWrapper(x: Byte) = new runtime.RichByte(x) + @inline implicit def shortWrapper(x: Short) = new runtime.RichShort(x) + @inline implicit def intWrapper(x: Int) = new runtime.RichInt(x) + @inline implicit def charWrapper(c: Char) = new runtime.RichChar(c) + @inline implicit def longWrapper(x: Long) = new runtime.RichLong(x) + @inline implicit def floatWrapper(x: Float) = new runtime.RichFloat(x) + @inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x) + @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x) + + /** @group conversions-array-to-wrapped-array */ + implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] = + if (xs eq null) null + else WrappedArray.make(xs) + + // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef] + // is as good as another for all T <: AnyRef. Instead of creating 100,000,000 + // unique ones by way of this implicit, let's share one. + /** @group conversions-array-to-wrapped-array */ + implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = { + if (xs eq null) null + else if (xs.length == 0) WrappedArray.empty[T] + else new WrappedArray.ofRef[T](xs) + } + + /** @group conversions-array-to-wrapped-array */ + implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null + + /** @group conversions-string */ + implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null + /** @group conversions-string */ + implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null + + implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] = + new CanBuildFrom[String, T, immutable.IndexedSeq[T]] { + def apply(from: String) = immutable.IndexedSeq.newBuilder[T] + def apply() = immutable.IndexedSeq.newBuilder[T] + } +} diff --git a/scalalib/overrides-2.12/scala/package.scala b/scalalib/overrides-2.12/scala/package.scala new file mode 100644 index 0000000000..35f0a87ede --- /dev/null +++ b/scalalib/overrides-2.12/scala/package.scala @@ -0,0 +1,136 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +/** + * Core Scala types. They are always available without an explicit import. + * @contentDiagram hideNodes "scala.Serializable" + */ +package object scala { + type Throwable = java.lang.Throwable + type Exception = java.lang.Exception + type Error = java.lang.Error + + type RuntimeException = java.lang.RuntimeException + type NullPointerException = java.lang.NullPointerException + type ClassCastException = java.lang.ClassCastException + type IndexOutOfBoundsException = java.lang.IndexOutOfBoundsException + type ArrayIndexOutOfBoundsException = java.lang.ArrayIndexOutOfBoundsException + type StringIndexOutOfBoundsException = java.lang.StringIndexOutOfBoundsException + type UnsupportedOperationException = java.lang.UnsupportedOperationException + type IllegalArgumentException = java.lang.IllegalArgumentException + type NoSuchElementException = java.util.NoSuchElementException + type NumberFormatException = java.lang.NumberFormatException + type AbstractMethodError = java.lang.AbstractMethodError + type InterruptedException = java.lang.InterruptedException + + // A dummy used by the specialization annotation. + lazy val AnyRef = new Specializable { + override def toString = "object AnyRef" + } + + type TraversableOnce[+A] = scala.collection.TraversableOnce[A] + + type Traversable[+A] = scala.collection.Traversable[A] + lazy val Traversable = scala.collection.Traversable + + type Iterable[+A] = scala.collection.Iterable[A] + lazy val Iterable = scala.collection.Iterable + + type Seq[+A] = scala.collection.Seq[A] + lazy val Seq = scala.collection.Seq + + type IndexedSeq[+A] = scala.collection.IndexedSeq[A] + lazy val IndexedSeq = scala.collection.IndexedSeq + + type Iterator[+A] = scala.collection.Iterator[A] + lazy val Iterator = scala.collection.Iterator + + type BufferedIterator[+A] = scala.collection.BufferedIterator[A] + + type List[+A] = scala.collection.immutable.List[A] + lazy val List = scala.collection.immutable.List + + lazy val Nil = scala.collection.immutable.Nil + + type ::[A] = scala.collection.immutable.::[A] + lazy val :: = scala.collection.immutable.:: + + lazy val +: = scala.collection.+: + lazy val :+ = scala.collection.:+ + + type Stream[+A] = scala.collection.immutable.Stream[A] + lazy val Stream = scala.collection.immutable.Stream + lazy val #:: = scala.collection.immutable.Stream.#:: + + type Vector[+A] = scala.collection.immutable.Vector[A] + lazy val Vector = scala.collection.immutable.Vector + + type StringBuilder = scala.collection.mutable.StringBuilder + lazy val StringBuilder = scala.collection.mutable.StringBuilder + + type Range = scala.collection.immutable.Range + lazy val Range = scala.collection.immutable.Range + + // Numeric types which were moved into scala.math.* + + type BigDecimal = scala.math.BigDecimal + lazy val BigDecimal = scala.math.BigDecimal + + type BigInt = scala.math.BigInt + lazy val BigInt = scala.math.BigInt + + type Equiv[T] = scala.math.Equiv[T] + lazy val Equiv = scala.math.Equiv + + type Fractional[T] = scala.math.Fractional[T] + lazy val Fractional = scala.math.Fractional + + type Integral[T] = scala.math.Integral[T] + lazy val Integral = scala.math.Integral + + type Numeric[T] = scala.math.Numeric[T] + lazy val Numeric = scala.math.Numeric + + type Ordered[T] = scala.math.Ordered[T] + lazy val Ordered = scala.math.Ordered + + type Ordering[T] = scala.math.Ordering[T] + lazy val Ordering = scala.math.Ordering + + type PartialOrdering[T] = scala.math.PartialOrdering[T] + type PartiallyOrdered[T] = scala.math.PartiallyOrdered[T] + + type Either[+A, +B] = scala.util.Either[A, B] + lazy val Either = scala.util.Either + + type Left[+A, +B] = scala.util.Left[A, B] + lazy val Left = scala.util.Left + + type Right[+A, +B] = scala.util.Right[A, B] + lazy val Right = scala.util.Right + + // Annotations which we might move to annotation.* +/* + type SerialVersionUID = annotation.SerialVersionUID + type deprecated = annotation.deprecated + type deprecatedName = annotation.deprecatedName + type inline = annotation.inline + type native = annotation.native + type noinline = annotation.noinline + type remote = annotation.remote + type specialized = annotation.specialized + type transient = annotation.transient + type throws = annotation.throws + type unchecked = annotation.unchecked.unchecked + type volatile = annotation.volatile + */ +} diff --git a/scalalib/overrides-2.12/scala/runtime/ScalaRunTime.scala b/scalalib/overrides-2.12/scala/runtime/ScalaRunTime.scala new file mode 100644 index 0000000000..40bac101cc --- /dev/null +++ b/scalalib/overrides-2.12/scala/runtime/ScalaRunTime.scala @@ -0,0 +1,267 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package runtime + +import scala.collection.{ TraversableView, AbstractIterator, GenIterable } +import scala.collection.mutable.WrappedArray +import scala.collection.immutable.{ StringLike, NumericRange } +import scala.collection.generic.{ Sorted, IsTraversableLike } +import scala.reflect.{ ClassTag, classTag } +import java.lang.{ Class => jClass } + +import java.lang.reflect.{ Method => JMethod } + +/** The object ScalaRunTime provides support methods required by + * the scala runtime. All these methods should be considered + * outside the API and subject to change or removal without notice. + */ +object ScalaRunTime { + def isArray(x: Any, atLevel: Int = 1): Boolean = + x != null && isArrayClass(x.getClass, atLevel) + + private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean = + clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1)) + + // A helper method to make my life in the pattern matcher a lot easier. + def drop[Repr](coll: Repr, num: Int)(implicit traversable: IsTraversableLike[Repr]): Repr = + traversable conversion coll drop num + + /** Return the class object representing an array with element class `clazz`. + */ + def arrayClass(clazz: jClass[_]): jClass[_] = { + // newInstance throws an exception if the erasure is Void.TYPE. see scala/bug#5680 + if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]] + else java.lang.reflect.Array.newInstance(clazz, 0).getClass + } + + /** Return the class object representing an unboxed value type, + * e.g., classOf[int], not classOf[java.lang.Integer]. The compiler + * rewrites expressions like 5.getClass to come here. + */ + def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] = + classTag[T].runtimeClass.asInstanceOf[jClass[T]] + + /** Retrieve generic array element */ + @inline def array_apply(xs: AnyRef, idx: Int): Any = { + if (xs == null) { + throw new NullPointerException + } else { + xs.asInstanceOf[scala.scalanative.runtime.Array[Any]].apply(idx) + } + } + + /** update generic array element */ + @inline def array_update(xs: AnyRef, idx: Int, value: Any): Unit = { + if (xs == null) { + throw new NullPointerException + } else { + xs.asInstanceOf[scala.scalanative.runtime.Array[Any]].update(idx, value) + } + } + + /** Get generic array length */ + def array_length(xs: AnyRef): Int = { + if (xs == null) { + throw new NullPointerException + } else { + xs.asInstanceOf[scala.scalanative.runtime.Array[Any]].length + } + } + + def array_clone(xs: AnyRef): AnyRef = { + if (xs == null) { + throw new NullPointerException + } else { + xs.asInstanceOf[scala.scalanative.runtime.Array[Any]].clone() + } + } + + /** Convert an array to an object array. + * Needed to deal with vararg arguments of primitive types that are passed + * to a generic Java vararg parameter T ... + */ + def toObjectArray(src: AnyRef): Array[Object] = { + def copy[@specialized T <: AnyVal](src: Array[T]): Array[Object] = { + val length = src.length + if (length == 0) Array.emptyObjectArray + else { + val dest = new Array[Object](length) + var i = 0 + while (i < length) { + dest(i) = src(i).asInstanceOf[AnyRef] + i += 1 + } + dest + } + } + src match { + case x: Array[AnyRef] => x + case x: Array[Int] => copy(x) + case x: Array[Double] => copy(x) + case x: Array[Long] => copy(x) + case x: Array[Float] => copy(x) + case x: Array[Char] => copy(x) + case x: Array[Byte] => copy(x) + case x: Array[Short] => copy(x) + case x: Array[Boolean] => copy(x) + case x: Array[Unit] => copy(x) + case null => throw new NullPointerException + } + } + + def toArray[T](xs: scala.collection.Seq[T]) = { + if (xs.isEmpty) Array.emptyObjectArray + else { + val arr = new Array[AnyRef](xs.length) + val it = xs.iterator + var i = 0 + while (it.hasNext) { + arr(i) = it.next().asInstanceOf[AnyRef] + i += 1 + } + arr + } + } + + // Java bug: https://bugs.java.com/view_bug.do?bug_id=4071957 + // More background at ticket #2318. + def ensureAccessible(m: JMethod): JMethod = scala.reflect.ensureAccessible(m) + + def _toString(x: Product): String = + x.productIterator.mkString(x.productPrefix + "(", ",", ")") + + def _hashCode(x: Product): Int = scala.util.hashing.MurmurHash3.productHash(x) + + /** A helper for case classes. */ + def typedProductIterator[T](x: Product): Iterator[T] = { + new AbstractIterator[T] { + private var c: Int = 0 + private val cmax = x.productArity + def hasNext = c < cmax + def next() = { + val result = x.productElement(c) + c += 1 + result.asInstanceOf[T] + } + } + } + + /** Old implementation of `##`. */ + @deprecated("Use scala.runtime.Statics.anyHash instead.", "2.12.0") + def hash(x: Any): Int = Statics.anyHash(x.asInstanceOf[Object]) + + /** Given any Scala value, convert it to a String. + * + * The primary motivation for this method is to provide a means for + * correctly obtaining a String representation of a value, while + * avoiding the pitfalls of naively calling toString on said value. + * In particular, it addresses the fact that (a) toString cannot be + * called on null and (b) depending on the apparent type of an + * array, toString may or may not print it in a human-readable form. + * + * @param arg the value to stringify + * @return a string representation of arg. + */ + def stringOf(arg: Any): String = stringOf(arg, scala.Int.MaxValue) + def stringOf(arg: Any, maxElements: Int): String = { + def packageOf(x: AnyRef) = x.getClass.getPackage match { + case null => "" + case p => p.getName + } + def isScalaClass(x: AnyRef) = packageOf(x) startsWith "scala." + def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc." + + // includes specialized subclasses and future proofed against hypothetical TupleN (for N > 22) + def isTuple(x: Any) = x != null && x.getClass.getName.startsWith("scala.Tuple") + + // We use reflection because the scala.xml package might not be available + def isSubClassOf(potentialSubClass: Class[_], ofClass: String) = + try { + val classLoader = potentialSubClass.getClassLoader + val clazz = Class.forName(ofClass, /*initialize =*/ false, classLoader) + clazz.isAssignableFrom(potentialSubClass) + } catch { + case cnfe: ClassNotFoundException => false + } + def isXmlNode(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.Node") + def isXmlMetaData(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.MetaData") + + // When doing our own iteration is dangerous + def useOwnToString(x: Any) = x match { + // Range/NumericRange have a custom toString to avoid walking a gazillion elements + case _: Range | _: NumericRange[_] => true + // Sorted collections to the wrong thing (for us) on iteration - ticket #3493 + case _: Sorted[_, _] => true + // StringBuilder(a, b, c) and similar not so attractive + case _: StringLike[_] => true + // Don't want to evaluate any elements in a view + case _: TraversableView[_, _] => true + // Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData] + // -> catch those by isXmlNode and isXmlMetaData. + // Don't want to a) traverse infinity or b) be overly helpful with peoples' custom + // collections which may have useful toString methods - ticket #3710 + // or c) print AbstractFiles which are somehow also Iterable[AbstractFile]s. + case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x) || isXmlNode(x.getClass) || isXmlMetaData(x.getClass) + // Otherwise, nothing could possibly go wrong + case _ => false + } + + // A variation on inner for maps so they print -> instead of bare tuples + def mapInner(arg: Any): String = arg match { + case (k, v) => inner(k) + " -> " + inner(v) + case _ => inner(arg) + } + + // Special casing Unit arrays, the value class which uses a reference array type. + def arrayToString(x: AnyRef) = { + if (x.getClass.getComponentType == classOf[BoxedUnit]) + 0 until (array_length(x) min maxElements) map (_ => "()") mkString ("Array(", ", ", ")") + else + WrappedArray make x take maxElements map inner mkString ("Array(", ", ", ")") + } + + // The recursively applied attempt to prettify Array printing. + // Note that iterator is used if possible and foreach is used as a + // last resort, because the parallel collections "foreach" in a + // random order even on sequences. + def inner(arg: Any): String = arg match { + case null => "null" + case "" => "\"\"" + case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\"" + x + "\"" else x + case x if useOwnToString(x) => x.toString + case x: AnyRef if isArray(x) => arrayToString(x) + case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")") + case x: GenIterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") + case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") + case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma + case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")") + case x => x.toString + } + + // The try/catch is defense against iterables which aren't actually designed + // to be iterated, such as some scala.tools.nsc.io.AbstractFile derived classes. + try inner(arg) + catch { + case _: UnsupportedOperationException | _: AssertionError => "" + arg + } + } + + /** stringOf formatted for use in a repl result. */ + def replStringOf(arg: Any, maxElements: Int): String = { + val s = stringOf(arg, maxElements) + val nl = if (s contains "\n") "\n" else "" + + nl + s + "\n" + } +} From 8a7cf756f72a8a2a1eb191933c9bb1915e422c4d Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 9 Nov 2020 19:10:03 +0100 Subject: [PATCH 49/75] Remove scala 2.13.x Symbol overrides --- scalalib/overrides-2.13.0/scala/Symbol.scala | 31 -------------------- scalalib/overrides-2.13.1/scala/Symbol.scala | 31 -------------------- scalalib/overrides-2.13.2/scala/Symbol.scala | 31 -------------------- 3 files changed, 93 deletions(-) delete mode 100644 scalalib/overrides-2.13.0/scala/Symbol.scala delete mode 100644 scalalib/overrides-2.13.1/scala/Symbol.scala delete mode 100644 scalalib/overrides-2.13.2/scala/Symbol.scala diff --git a/scalalib/overrides-2.13.0/scala/Symbol.scala b/scalalib/overrides-2.13.0/scala/Symbol.scala deleted file mode 100644 index 04d24cc921..0000000000 --- a/scalalib/overrides-2.13.0/scala/Symbol.scala +++ /dev/null @@ -1,31 +0,0 @@ -package scala - -// Ported from Scala.js. -// Modified to use collection.mutable.Map instead of java.util.WeakHashMap. - -final class Symbol private (val name: String) extends Serializable { - override def toString(): String = "'" + name - - @throws(classOf[java.io.ObjectStreamException]) - private def readResolve(): Any = Symbol.apply(name) - override def hashCode = name.hashCode() - override def equals(other: Any) = this eq other.asInstanceOf[AnyRef] -} - -object Symbol extends UniquenessCache[Symbol] { - override def apply(name: String): Symbol = super.apply(name) - protected def valueFromKey(name: String): Symbol = new Symbol(name) - protected def keyFromValue(sym: Symbol): Option[String] = Some(sym.name) -} - -private[scala] abstract class UniquenessCache[V] { - private val cache = collection.mutable.Map.empty[String, V] - - protected def valueFromKey(k: String): V - protected def keyFromValue(v: V): Option[String] - - def apply(name: String): V = - cache.getOrElseUpdate(name, valueFromKey(name)) - - def unapply(other: V): Option[String] = keyFromValue(other) -} diff --git a/scalalib/overrides-2.13.1/scala/Symbol.scala b/scalalib/overrides-2.13.1/scala/Symbol.scala deleted file mode 100644 index 04d24cc921..0000000000 --- a/scalalib/overrides-2.13.1/scala/Symbol.scala +++ /dev/null @@ -1,31 +0,0 @@ -package scala - -// Ported from Scala.js. -// Modified to use collection.mutable.Map instead of java.util.WeakHashMap. - -final class Symbol private (val name: String) extends Serializable { - override def toString(): String = "'" + name - - @throws(classOf[java.io.ObjectStreamException]) - private def readResolve(): Any = Symbol.apply(name) - override def hashCode = name.hashCode() - override def equals(other: Any) = this eq other.asInstanceOf[AnyRef] -} - -object Symbol extends UniquenessCache[Symbol] { - override def apply(name: String): Symbol = super.apply(name) - protected def valueFromKey(name: String): Symbol = new Symbol(name) - protected def keyFromValue(sym: Symbol): Option[String] = Some(sym.name) -} - -private[scala] abstract class UniquenessCache[V] { - private val cache = collection.mutable.Map.empty[String, V] - - protected def valueFromKey(k: String): V - protected def keyFromValue(v: V): Option[String] - - def apply(name: String): V = - cache.getOrElseUpdate(name, valueFromKey(name)) - - def unapply(other: V): Option[String] = keyFromValue(other) -} diff --git a/scalalib/overrides-2.13.2/scala/Symbol.scala b/scalalib/overrides-2.13.2/scala/Symbol.scala deleted file mode 100644 index 04d24cc921..0000000000 --- a/scalalib/overrides-2.13.2/scala/Symbol.scala +++ /dev/null @@ -1,31 +0,0 @@ -package scala - -// Ported from Scala.js. -// Modified to use collection.mutable.Map instead of java.util.WeakHashMap. - -final class Symbol private (val name: String) extends Serializable { - override def toString(): String = "'" + name - - @throws(classOf[java.io.ObjectStreamException]) - private def readResolve(): Any = Symbol.apply(name) - override def hashCode = name.hashCode() - override def equals(other: Any) = this eq other.asInstanceOf[AnyRef] -} - -object Symbol extends UniquenessCache[Symbol] { - override def apply(name: String): Symbol = super.apply(name) - protected def valueFromKey(name: String): Symbol = new Symbol(name) - protected def keyFromValue(sym: Symbol): Option[String] = Some(sym.name) -} - -private[scala] abstract class UniquenessCache[V] { - private val cache = collection.mutable.Map.empty[String, V] - - protected def valueFromKey(k: String): V - protected def keyFromValue(v: V): Option[String] - - def apply(name: String): V = - cache.getOrElseUpdate(name, valueFromKey(name)) - - def unapply(other: V): Option[String] = keyFromValue(other) -} From 7343b1872ad8cb3229a411b08f31e4a856236971 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 9 Nov 2020 19:12:08 +0100 Subject: [PATCH 50/75] Removes illegal jdk.CollectionConventers in scalalib overrides --- scalalib/overrides-2.11/scala/jdk/CollectionConventers.scala | 5 ----- scalalib/overrides-2.12/scala/jdk/CollectionConventers.scala | 5 ----- 2 files changed, 10 deletions(-) delete mode 100644 scalalib/overrides-2.11/scala/jdk/CollectionConventers.scala delete mode 100644 scalalib/overrides-2.12/scala/jdk/CollectionConventers.scala diff --git a/scalalib/overrides-2.11/scala/jdk/CollectionConventers.scala b/scalalib/overrides-2.11/scala/jdk/CollectionConventers.scala deleted file mode 100644 index abd9071ae1..0000000000 --- a/scalalib/overrides-2.11/scala/jdk/CollectionConventers.scala +++ /dev/null @@ -1,5 +0,0 @@ -package scala.jdk - -import scala.collection.convert.{DecorateAsJava, DecorateAsScala} - -object CollectionConverters extends DecorateAsJava with DecorateAsScala \ No newline at end of file diff --git a/scalalib/overrides-2.12/scala/jdk/CollectionConventers.scala b/scalalib/overrides-2.12/scala/jdk/CollectionConventers.scala deleted file mode 100644 index abd9071ae1..0000000000 --- a/scalalib/overrides-2.12/scala/jdk/CollectionConventers.scala +++ /dev/null @@ -1,5 +0,0 @@ -package scala.jdk - -import scala.collection.convert.{DecorateAsJava, DecorateAsScala} - -object CollectionConverters extends DecorateAsJava with DecorateAsScala \ No newline at end of file From 695d424ea8be7a97017a1af6cda51da0ef4459be Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 9 Nov 2020 19:14:55 +0100 Subject: [PATCH 51/75] Additional styling fixes --- .../src/main/scala/scalanative/compiler/NIRCompiler.scala | 5 ++--- tools/src/main/scala/scala/scalanative/build/Logger.scala | 1 + .../test/scala/scala/scalanative/regex/ApiTestUtils.scala | 2 +- .../test/scala/scala/scalanative/regex/CharClassSuite.scala | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/testing-compiler/src/main/scala/scalanative/compiler/NIRCompiler.scala b/testing-compiler/src/main/scala/scalanative/compiler/NIRCompiler.scala index 3f6c8e40dd..89d4047952 100644 --- a/testing-compiler/src/main/scala/scalanative/compiler/NIRCompiler.scala +++ b/testing-compiler/src/main/scala/scalanative/compiler/NIRCompiler.scala @@ -58,9 +58,8 @@ class NIRCompiler(outputDir: Path) extends api.NIRCompiler { extends CompatReporter { override def add(pos: Position, msg: String, severity: Severity): Unit = severity match { - case ERROR => reportError(msg) - case INFO | WARNING => () - case _ => () + case ERROR => reportError(msg) + case _ => () } } diff --git a/tools/src/main/scala/scala/scalanative/build/Logger.scala b/tools/src/main/scala/scala/scalanative/build/Logger.scala index b2201d4fe1..6f6c9d671c 100644 --- a/tools/src/main/scala/scala/scalanative/build/Logger.scala +++ b/tools/src/main/scala/scala/scalanative/build/Logger.scala @@ -1,6 +1,7 @@ package scala.scalanative.build import java.lang.System.{err, out, lineSeparator => nl} + import scala.sys.process.ProcessLogger /** Interface to report and/or collect messages given by the toolchain. */ diff --git a/unit-tests/src/test/scala/scala/scalanative/regex/ApiTestUtils.scala b/unit-tests/src/test/scala/scala/scalanative/regex/ApiTestUtils.scala index 366368d698..7f7213b4e7 100644 --- a/unit-tests/src/test/scala/scala/scalanative/regex/ApiTestUtils.scala +++ b/unit-tests/src/test/scala/scala/scalanative/regex/ApiTestUtils.scala @@ -8,7 +8,7 @@ import ScalaTestCompat.fail object ApiTestUtils extends tests.Suite { def assertArrayEquals[A](arr1: Array[A], arr2: Array[A]) = - assert(arr1 sameElements arr2) + assert(arr1.sameElements(arr2)) /** * Asserts that IllegalArgumentException is thrown from compile with flags. diff --git a/unit-tests/src/test/scala/scala/scalanative/regex/CharClassSuite.scala b/unit-tests/src/test/scala/scala/scalanative/regex/CharClassSuite.scala index 8e6559dde8..89afce97c2 100644 --- a/unit-tests/src/test/scala/scala/scalanative/regex/CharClassSuite.scala +++ b/unit-tests/src/test/scala/scala/scalanative/regex/CharClassSuite.scala @@ -10,7 +10,7 @@ object CharClassSuite extends tests.Suite { private def assertClass(cc: CharClass, expected: Array[Int]): Unit = { val actual = cc.toArray - if (!(actual sameElements expected)) + if (!(actual.sameElements(expected))) throw new AssertionError( "Incorrect CharClass value:\n" + "Expected: " + expected .mkString(", ") + "\n" + "Actual: " + actual.mkString(", ")) From 4164b87d308d24d6db70ec25aad6f839a0a92c1d Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 9 Nov 2020 19:17:27 +0100 Subject: [PATCH 52/75] Remove illegal ussages of collectionsCompat lib in build --- build.sbt | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/build.sbt b/build.sbt index 29fe624b95..1a6083794b 100644 --- a/build.sbt +++ b/build.sbt @@ -301,8 +301,7 @@ lazy val nscplugin = ), libraryDependencies ++= Seq( "org.scala-lang" % "scala-compiler" % scalaVersion.value, - "org.scala-lang" % "scala-reflect" % scalaVersion.value, - collectionsCompatLib + "org.scala-lang" % "scala-reflect" % scalaVersion.value ), exportJars := true ) @@ -432,11 +431,6 @@ lazy val scalalib = .in(file("scalalib")) .enablePlugins(MyScalaNativePlugin) .settings( - // This build uses Scala 2.11 to compile - // what appears to be 2.11.0 sources. This yields 114 - // deprecations. Editing those sources is not an option (long story), - // so do not spend compile time looking for the deprecations. - // Keep the log file clean so that real issues stand out. scalacOptions -= "-deprecation", scalacOptions += "-deprecation:false", // The option below is needed since Scala 2.12.12. @@ -595,7 +589,6 @@ lazy val tests = ) .settings(noPublishSettings) .settings( - libraryDependencies += collectionsCompatLib, testFrameworks ++= Seq( new TestFramework("tests.NativeFramework"), new TestFramework("com.novocode.junit.JUnitFramework") @@ -736,8 +729,7 @@ lazy val junitTestOutputsNative = Test / scalacOptions ++= { val jar = (junitPlugin / Compile / packageBin).value Seq(s"-Xplugin:$jar") - }, - libraryDependencies += collectionsCompatLib + } ) .dependsOn( nscplugin % "plugin", @@ -753,8 +745,7 @@ lazy val junitTestOutputsJVM = commonJUnitTestOutputsSettings, crossScalaVersions := Seq(sbt10ScalaVersion), libraryDependencies ++= Seq( - "com.novocode" % "junit-interface" % "0.11" % "test", - collectionsCompatLib + "com.novocode" % "junit-interface" % "0.11" % "test" ) ) .dependsOn(junitAsyncJVM % "test") From 3745d4a0ced40462bb20713c5bf9bd1af5140bc9 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 9 Nov 2020 21:34:10 +0100 Subject: [PATCH 53/75] Remove nir.Show collectionsConverter dependency --- .../scala/scala/scalanative/nir/Show.scala | 35 ++++--------------- 1 file changed, 7 insertions(+), 28 deletions(-) diff --git a/nir/src/main/scala/scala/scalanative/nir/Show.scala b/nir/src/main/scala/scala/scalanative/nir/Show.scala index 76500ec81e..77bd97c185 100644 --- a/nir/src/main/scala/scala/scalanative/nir/Show.scala +++ b/nir/src/main/scala/scala/scalanative/nir/Show.scala @@ -5,7 +5,6 @@ import java.nio.charset.StandardCharsets import scala.collection.mutable import scala.scalanative.util.ShowBuilder.InMemoryShowBuilder import scalanative.util.{ShowBuilder, unreachable} -import scala.jdk.CollectionConverters._ import java.util.stream.{Stream => JStream} import java.util.function.{Function => JFunction, Consumer => JConsumer} @@ -39,36 +38,16 @@ object Show { type DefnString = (Global, String) def dump(defns: Seq[Defn], fileName: String): Unit = { - - val collectDefs = new JFunction[(Int, Seq[Defn]), JStream[DefnString]] { - override def apply(t: (Int, Seq[Defn])): JStream[DefnString] = { - val (_, defns) = t - defns - .collect { - case defn if defn != null => - (defn.name, defn.show) - } - .asJavaCollection - .stream() - } - } - val pw = new java.io.PrintWriter(fileName) - val writeToFile = new JConsumer[DefnString] { - override def accept(t: (Global, String)): Unit = { - val (_, shown) = t - pw.write(shown) - pw.write("\n") - } - } try { - val groupedDefns = util.partitionBy(defns.filter(_ != null))(_.name) - util - .parallelStream(groupedDefns) - .flatMap(collectDefs) - .sorted(Ordering.by(_._1)) - .forEach(writeToFile) + defns + .filterNot(_ != null) + .sortBy(_.name) + .foreach { defn => + pw.write(defn.show) + pw.write("\n") + } } finally { pw.close() } From c7ce2a9827f6ca55589dc0999506bc34032e4ae0 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Mon, 9 Nov 2020 21:44:21 +0100 Subject: [PATCH 54/75] Remove util collectionsConverter dependency --- util/src/main/scala/scala/scalanative/io/VirtualDirectory.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/util/src/main/scala/scala/scalanative/io/VirtualDirectory.scala b/util/src/main/scala/scala/scalanative/io/VirtualDirectory.scala index b5acef2418..1d99381db7 100644 --- a/util/src/main/scala/scala/scalanative/io/VirtualDirectory.scala +++ b/util/src/main/scala/scala/scalanative/io/VirtualDirectory.scala @@ -1,7 +1,6 @@ package scala.scalanative package io -import scala.jdk.CollectionConverters._ import java.io.Writer import java.net.URI import java.nio.ByteBuffer From 39b2aad06fdaa4da9c9fd134d97ea3b9690a9950 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Tue, 10 Nov 2020 09:11:19 +0100 Subject: [PATCH 55/75] Remove unitTests CollectionConverters dependency --- .../compat/CollectionConverters.scala | 61 +++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 unit-tests/src/main/scala/scala/scalanative/compat/CollectionConverters.scala diff --git a/unit-tests/src/main/scala/scala/scalanative/compat/CollectionConverters.scala b/unit-tests/src/main/scala/scala/scalanative/compat/CollectionConverters.scala new file mode 100644 index 0000000000..a8eef418de --- /dev/null +++ b/unit-tests/src/main/scala/scala/scalanative/compat/CollectionConverters.scala @@ -0,0 +1,61 @@ +package scala.scalanative.compat + +import java.util.{LinkedHashMap, LinkedHashSet, LinkedList} +import scala.collection.mutable +import scala.reflect.ClassTag + +/** Set of helper method replacing problematic Scala collection.JavaConverters as they cause problems + * in cross compile between 2.13+ and older Scala versions */ +object CollectionConverters { + implicit class ScalaToJavaCollections[T: ClassTag]( + private val self: Iterable[T]) { + def toJavaList: LinkedList[T] = { + val list = new LinkedList[T]() + self.foreach(list.add) + list + } + + def toJavaSet: java.util.Set[T] = { + val s = new LinkedHashSet[T]() + self.foreach(s.add) + s + } + + def toJavaMap[K, V](implicit ev: T =:= (K, V)): java.util.Map[K, V] = { + val m = new LinkedHashMap[K, V]() + self.iterator.asInstanceOf[Iterator[(K, V)]].foreach { + case (k, v) => m.put(k, v) + } + m + } + } + + implicit class JavaToScalaCollections[T: ClassTag]( + private val self: java.util.Collection[T]) { + private def buf = self.iterator().toScalaSeq + def toScalaSeq: Seq[T] = buf.toSeq + def toScalaMap[K: ClassTag, V: ClassTag]( + implicit ev: T =:= java.util.Map.Entry[K, V]): mutable.Map[K, V] = { + val map = mutable.Map.empty[K, V] + self + .iterator() + .asInstanceOf[Iterator[java.util.Map.Entry[K, V]]] + .foreach { v => map.put(v.getKey(), v.getValue()) } + map + } + def toScalaSet: Set[T] = self.iterator().toScalaSet + } + + implicit class JavaIteratorToScala[T: ClassTag]( + private val self: java.util.Iterator[T]) { + val toScalaSeq: mutable.UnrolledBuffer[T] = { + val b = new mutable.UnrolledBuffer[T]() + while (self.hasNext) { + b += self.next() + } + b + } + def toScalaSet: Set[T] = toScalaSeq.toSet + } + +} From 3404b243df9b0f6262fafe0962335f23af6dac64 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 11 Nov 2020 14:38:07 +0100 Subject: [PATCH 56/75] Remove parallelStream from scalanative.util package --- .../scala/scala/scalanative/nscplugin/NirGenPhase.scala | 6 ++++-- util/src/main/scala/scala/scalanative/util/package.scala | 4 ---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala index 6045bb5cbe..772d6f1b00 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala @@ -2,6 +2,7 @@ package scala.scalanative package nscplugin import java.nio.file.{Path => JPath} +import java.util.stream.{Stream => JStream} import java.util.function.{Consumer => JConsumer} import scala.collection.mutable import scala.language.implicitConversions @@ -107,8 +108,9 @@ abstract class NirGenPhase[G <: NscGlobal](val global: G) } } - util - .parallelStream(allFiles) + JStream + .of(allFiles.toSeq: _*) + .parallel() .forEach(generateIRFile) } } diff --git a/util/src/main/scala/scala/scalanative/util/package.scala b/util/src/main/scala/scala/scalanative/util/package.scala index 9fe9b84e73..53b31af07b 100644 --- a/util/src/main/scala/scala/scalanative/util/package.scala +++ b/util/src/main/scala/scala/scalanative/util/package.scala @@ -54,8 +54,4 @@ package object util { def partitionBy[T](elems: Seq[T], batches: Int)( f: T => Any): Map[Int, Seq[T]] = elems.groupBy { elem => Math.abs(f(elem).##) % batches } - - def parallelStream[T](elems: Iterable[T]): JStream[T] = { - JStream.of(elems.toSeq: _*).parallel() - } } From c9ad9b8c48c45718816725dac92157de1253f1b0 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 11 Nov 2020 14:59:22 +0100 Subject: [PATCH 57/75] Remove implicit conversion for UnrolledBuffer->Seq, and Seq -> immutable.Seq --- nir/src/main/scala/scala/scalanative/nir/Attrs.scala | 9 ++++----- .../main/scala/scala/scalanative/nir/Buffer.scala | 2 +- .../scala/scala/scalanative/nir/ControlFlow.scala | 2 +- .../main/scala/scala/scalanative/nir/Unmangle.scala | 4 ++-- .../nir/serialization/BinaryDeserializer.scala | 2 +- .../nir/serialization/BinarySerializer.scala | 2 +- .../scala/scalanative/nscplugin/NirGenExpr.scala | 4 ++-- .../scala/scalanative/nscplugin/NirGenStat.scala | 2 +- util/src/main/scala/scala/scalanative/package.scala | 12 ------------ 9 files changed, 13 insertions(+), 26 deletions(-) delete mode 100644 util/src/main/scala/scala/scalanative/package.scala diff --git a/nir/src/main/scala/scala/scalanative/nir/Attrs.scala b/nir/src/main/scala/scala/scalanative/nir/Attrs.scala index e3d57fab1d..a276a259f1 100644 --- a/nir/src/main/scala/scala/scalanative/nir/Attrs.scala +++ b/nir/src/main/scala/scala/scalanative/nir/Attrs.scala @@ -41,7 +41,7 @@ final case class Attrs(inlineHint: Inline = MayInline, isAbstract: Boolean = false, links: Seq[Attr.Link] = Seq()) { def toSeq: Seq[Attr] = { - val out = mutable.UnrolledBuffer.empty[Attr] + val out = Seq.newBuilder[Attr] if (inlineHint != MayInline) out += inlineHint if (specialize != MaySpecialize) out += specialize @@ -52,7 +52,7 @@ final case class Attrs(inlineHint: Inline = MayInline, if (isAbstract) out += Abstract out ++= links - out + out.result() } } object Attrs { @@ -66,8 +66,7 @@ object Attrs { var isDyn = false var isStub = false var isAbstract = false - val overrides = mutable.UnrolledBuffer.empty[Global] - val links = mutable.UnrolledBuffer.empty[Attr.Link] + val links = Seq.newBuilder[Attr.Link] attrs.foreach { case attr: Inline => inline = attr @@ -87,6 +86,6 @@ object Attrs { isDyn, isStub, isAbstract, - links) + links.result()) } } diff --git a/nir/src/main/scala/scala/scalanative/nir/Buffer.scala b/nir/src/main/scala/scala/scalanative/nir/Buffer.scala index 966738a670..0f51ed60c6 100644 --- a/nir/src/main/scala/scala/scalanative/nir/Buffer.scala +++ b/nir/src/main/scala/scala/scalanative/nir/Buffer.scala @@ -12,7 +12,7 @@ class Buffer(implicit fresh: Fresh) { def ++=(other: Buffer): Unit = buffer ++= other.buffer def toSeq: Seq[Inst] = - buffer + buffer.toSeq def size: Int = buffer.size diff --git a/nir/src/main/scala/scala/scalanative/nir/ControlFlow.scala b/nir/src/main/scala/scala/scalanative/nir/ControlFlow.scala index 2c976927a2..0d91c6128c 100644 --- a/nir/src/main/scala/scala/scalanative/nir/ControlFlow.scala +++ b/nir/src/main/scala/scala/scalanative/nir/ControlFlow.scala @@ -87,7 +87,7 @@ object ControlFlow { body += insts(i) } while (!insts(i).isInstanceOf[Inst.Cf]) - val block = new Block(n, params, body, isEntry = k == 0) + val block = Block(n, params, body.toSeq, isEntry = k == 0) blocks(local) = block todo ::= block block diff --git a/nir/src/main/scala/scala/scalanative/nir/Unmangle.scala b/nir/src/main/scala/scala/scalanative/nir/Unmangle.scala index 59c9f3b0b7..820a778256 100644 --- a/nir/src/main/scala/scala/scalanative/nir/Unmangle.scala +++ b/nir/src/main/scala/scala/scalanative/nir/Unmangle.scala @@ -137,12 +137,12 @@ object Unmangle { } def readTypes(): Seq[Type] = { - val buf = collection.mutable.UnrolledBuffer.empty[Type] + val buf = Seq.newBuilder[Type] while (peek() != 'E') { buf += readType() } next() - buf + buf.result() } def readIdent(): String = { diff --git a/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala b/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala index b496ed0e36..f2beba03fb 100644 --- a/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala +++ b/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala @@ -33,7 +33,7 @@ final class BinaryDeserializer(buffer: ByteBuffer) { buffer.position(offset) allDefns += getDefn() } - allDefns + allDefns.toSeq } private def getSeq[T](getT: => T): Seq[T] = diff --git a/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala b/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala index a2001b3342..45e11a8ab9 100644 --- a/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala +++ b/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala @@ -581,6 +581,6 @@ final class BinarySerializer { insts.foreach(inst => initFile(inst.pos)) case defn => initFile(defn.pos) } - filesList + filesList.toSeq } } diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala index 54704f59f7..f10f8465f3 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala @@ -1943,7 +1943,7 @@ trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => if (!sym.owner.isExternModule) { genSimpleArgs(argsp) } else { - val res = mutable.UnrolledBuffer.empty[Val] + val res = Seq.newBuilder[Val] argsp.zip(sym.tpe.params).foreach { case (argp, paramSym) => @@ -1951,7 +1951,7 @@ trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => res += toExtern(externType, genExpr(argp))(argp.pos) } - res + res.result() } def genSimpleArgs(argsp: Seq[Tree]): Seq[Val] = { diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenStat.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenStat.scala index 1114891354..0a25f28c2d 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenStat.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenStat.scala @@ -69,7 +69,7 @@ trait NirGenStat[G <: NscGlobal] { self: NirGenPhase[G] => class StatBuffer { private val buf = mutable.UnrolledBuffer.empty[nir.Defn] - def toSeq: Seq[nir.Defn] = buf + def toSeq: Seq[nir.Defn] = buf.toSeq def +=(defn: nir.Defn): Unit = { buf += defn diff --git a/util/src/main/scala/scala/scalanative/package.scala b/util/src/main/scala/scala/scalanative/package.scala deleted file mode 100644 index 3427252817..0000000000 --- a/util/src/main/scala/scala/scalanative/package.scala +++ /dev/null @@ -1,12 +0,0 @@ -package scala -import scala.collection.mutable -import scala.language.implicitConversions - -package object scalanative { - implicit def bufferToSeq[T]( - buf: mutable.UnrolledBuffer[T]): collection.Seq[T] = - buf.toSeq - - implicit def seqToImmutableSeq[T]( - seq: collection.Seq[T]): collection.immutable.Seq[T] = seq.toIndexedSeq -} From 28f8a0357fd4b852066a27d62dd408b86b7e6e7f Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 11 Nov 2020 17:40:08 +0100 Subject: [PATCH 58/75] Revert Scala version dependent HashCode tests after rebase --- .../src/test/scala/scala/HashCodeTest.scala | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/unit-tests/src/test/scala/scala/HashCodeTest.scala b/unit-tests/src/test/scala/scala/HashCodeTest.scala index 5e35526a79..6aaa1b76f5 100644 --- a/unit-tests/src/test/scala/scala/HashCodeTest.scala +++ b/unit-tests/src/test/scala/scala/HashCodeTest.scala @@ -2,15 +2,29 @@ package scala import org.junit.Test import org.junit.Assert._ +import scala.scalanative.buildinfo.ScalaNativeBuildInfo class HashCodeTest { case class MyData(string: String, num: Int) + def scala212orOlder: Boolean = + ScalaNativeBuildInfo.scalaVersion + .split('.') + .take(3) + .map(_.toInt) match { + case Array(2, n, _) if n <= 12 => true + case _ => false + } + @Test def hashCodeOfStringMatchesScalaJVM(): Unit = { assertTrue("hello".hashCode == 99162322) } @Test def hashCodeOfCaseClassMatchesScalaJVM(): Unit = { - assertTrue(MyData("hello", 12345).hashCode == -1824015247) + val expectedHashCode = + if (scala212orOlder) -1824015247 + else -715875225 + assertTrue(MyData("hello", 12345).hashCode == expectedHashCode) } + } From d08010d09df85590851355b4de481b83c4bfc55d Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 11 Nov 2020 17:40:34 +0100 Subject: [PATCH 59/75] After rebase fixes --- javalib/src/main/scala/java/lang/UnixProcess.scala | 1 - javalib/src/main/scala/java/util/Hashtable.scala | 8 +++++--- javalib/src/main/scala/java/util/ScalaOps.scala | 4 ++-- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/javalib/src/main/scala/java/lang/UnixProcess.scala b/javalib/src/main/scala/java/lang/UnixProcess.scala index 8a237afa5d..a5a8d6627d 100644 --- a/javalib/src/main/scala/java/lang/UnixProcess.scala +++ b/javalib/src/main/scala/java/lang/UnixProcess.scala @@ -146,7 +146,6 @@ object UnixProcess { .scalaOps .toSeq .map(e => s"${e.getKey()}=${e.getValue()}") - } } /* diff --git a/javalib/src/main/scala/java/util/Hashtable.scala b/javalib/src/main/scala/java/util/Hashtable.scala index 0e12abd73c..d6a60009f9 100644 --- a/javalib/src/main/scala/java/util/Hashtable.scala +++ b/javalib/src/main/scala/java/util/Hashtable.scala @@ -24,10 +24,10 @@ class Hashtable[K, V] private (inner: mutable.HashMap[Box[Any], V]) } def size(): Int = - inner.size() + inner.size def isEmpty(): Boolean = - inner.isEmpty() + inner.isEmpty def keys(): ju.Enumeration[K] = Collections.enumeration(keySet()) @@ -71,7 +71,9 @@ class Hashtable[K, V] private (inner: mutable.HashMap[Box[Any], V]) } def putAll(m: ju.Map[_ <: K, _ <: V]): Unit = - m.entrySet().scalaOps.foreach { e => inner.put(Box(e.getKey), e.getValue) } + m.entrySet().scalaOps.foreach { e => + inner.put(Box(e.getKey()), e.getValue()) + } def clear(): Unit = inner.clear() diff --git a/javalib/src/main/scala/java/util/ScalaOps.scala b/javalib/src/main/scala/java/util/ScalaOps.scala index 8ffba6928c..efc48048a9 100644 --- a/javalib/src/main/scala/java/util/ScalaOps.scala +++ b/javalib/src/main/scala/java/util/ScalaOps.scala @@ -88,8 +88,8 @@ private[java] object ScalaOps { } @inline def map[U](f: A => U): Iterator[U] = new Iterator[U] { - override def hasNext: Boolean = __self.hasNext - override def next(): U = f(__self.next()) + override def hasNext(): Boolean = __self.hasNext() + override def next(): U = f(__self.next()) } @inline def count(f: A => Boolean): Int = From c22e377b0736fcdb1b242bc2da989e921022c24a Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 11 Nov 2020 17:46:11 +0100 Subject: [PATCH 60/75] Remove duplicated CollectionConverters.scala --- .../compat/CollectionConverters.scala | 61 ------------------- 1 file changed, 61 deletions(-) delete mode 100644 unit-tests/src/main/scala/scala/scalanative/compat/CollectionConverters.scala diff --git a/unit-tests/src/main/scala/scala/scalanative/compat/CollectionConverters.scala b/unit-tests/src/main/scala/scala/scalanative/compat/CollectionConverters.scala deleted file mode 100644 index a8eef418de..0000000000 --- a/unit-tests/src/main/scala/scala/scalanative/compat/CollectionConverters.scala +++ /dev/null @@ -1,61 +0,0 @@ -package scala.scalanative.compat - -import java.util.{LinkedHashMap, LinkedHashSet, LinkedList} -import scala.collection.mutable -import scala.reflect.ClassTag - -/** Set of helper method replacing problematic Scala collection.JavaConverters as they cause problems - * in cross compile between 2.13+ and older Scala versions */ -object CollectionConverters { - implicit class ScalaToJavaCollections[T: ClassTag]( - private val self: Iterable[T]) { - def toJavaList: LinkedList[T] = { - val list = new LinkedList[T]() - self.foreach(list.add) - list - } - - def toJavaSet: java.util.Set[T] = { - val s = new LinkedHashSet[T]() - self.foreach(s.add) - s - } - - def toJavaMap[K, V](implicit ev: T =:= (K, V)): java.util.Map[K, V] = { - val m = new LinkedHashMap[K, V]() - self.iterator.asInstanceOf[Iterator[(K, V)]].foreach { - case (k, v) => m.put(k, v) - } - m - } - } - - implicit class JavaToScalaCollections[T: ClassTag]( - private val self: java.util.Collection[T]) { - private def buf = self.iterator().toScalaSeq - def toScalaSeq: Seq[T] = buf.toSeq - def toScalaMap[K: ClassTag, V: ClassTag]( - implicit ev: T =:= java.util.Map.Entry[K, V]): mutable.Map[K, V] = { - val map = mutable.Map.empty[K, V] - self - .iterator() - .asInstanceOf[Iterator[java.util.Map.Entry[K, V]]] - .foreach { v => map.put(v.getKey(), v.getValue()) } - map - } - def toScalaSet: Set[T] = self.iterator().toScalaSet - } - - implicit class JavaIteratorToScala[T: ClassTag]( - private val self: java.util.Iterator[T]) { - val toScalaSeq: mutable.UnrolledBuffer[T] = { - val b = new mutable.UnrolledBuffer[T]() - while (self.hasNext) { - b += self.next() - } - b - } - def toScalaSet: Set[T] = toScalaSeq.toSet - } - -} From 86672f6cdeedcd7f97bf28a8cd5215d66421472e Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 11 Nov 2020 18:01:10 +0100 Subject: [PATCH 61/75] Remove no longer needed collectionsCompat and parallel-collections lib dependencies --- build.sbt | 20 ++----------------- .../scala/scalanative/build/Discover.scala | 2 +- .../scala/scala/scalanative/build/IO.scala | 3 +-- .../scala/scala/scalanative/build/LLVM.scala | 3 --- .../scala/scalanative/util/package.scala | 2 -- 5 files changed, 4 insertions(+), 26 deletions(-) diff --git a/build.sbt b/build.sbt index 1a6083794b..99445502f9 100644 --- a/build.sbt +++ b/build.sbt @@ -96,18 +96,6 @@ addCommandAlias( lazy val publishSnapshot = taskKey[Unit]("Publish snapshot to sonatype on every commit to master.") -val collectionsCompatLib = { - "org.scala-lang.modules" %% "scala-collection-compat" % "2.2.0" -} - -def parallelCollectionsLib(scalaVersion: String): Seq[ModuleID] = { - CrossVersion.partialVersion(scalaVersion) match { - case Some((2, n)) if n >= 13 => - Seq("org.scala-lang.modules" %% "scala-parallel-collections" % "0.2.0") - case _ => Nil - } -} - // to publish plugin (we only need to do this once, it's already done!) // follow: https://www.scala-sbt.org/1.x/docs/Bintray-For-Plugins.html // then add a new package @@ -225,9 +213,6 @@ lazy val util = .in(file("util")) .settings(toolSettings) .settings(mavenPublishSettings) - .settings( - libraryDependencies += collectionsCompatLib - ) lazy val nir = project @@ -264,9 +249,8 @@ lazy val tools = .settings( libraryDependencies ++= Seq( scalacheckDep, - scalatestDep, - collectionsCompatLib - ) ++ parallelCollectionsLib(scalaVersion.value), + scalatestDep + ), Test / fork := true, Test / javaOptions ++= { val nscpluginjar = (nscplugin / Compile / Keys.`package`).value diff --git a/tools/src/main/scala/scala/scalanative/build/Discover.scala b/tools/src/main/scala/scala/scalanative/build/Discover.scala index 4b985a5ee0..0ca8cab1a7 100644 --- a/tools/src/main/scala/scala/scalanative/build/Discover.scala +++ b/tools/src/main/scala/scala/scalanative/build/Discover.scala @@ -2,7 +2,7 @@ package scala.scalanative package build import java.nio.file.{Files, Path, Paths} -import scala.jdk.CollectionConverters._ +import scala.collection.JavaConverters._ import scala.util.Try import scala.sys.process._ import scalanative.build.IO.RichPath diff --git a/tools/src/main/scala/scala/scalanative/build/IO.scala b/tools/src/main/scala/scala/scalanative/build/IO.scala index 3848eac5b9..55fb49b4b5 100644 --- a/tools/src/main/scala/scala/scalanative/build/IO.scala +++ b/tools/src/main/scala/scala/scalanative/build/IO.scala @@ -13,7 +13,6 @@ import java.nio.file.{ } import java.nio.file.attribute.BasicFileAttributes import java.util.EnumSet -import java.util.zip.{ZipEntry, ZipInputStream} import java.security.{DigestInputStream, MessageDigest} /** Internal I/O utilities. */ @@ -100,7 +99,7 @@ private[scalanative] object IO { /** Look for a zip entry path string using a matcher function */ def existsInJar(path: Path, matcher: String => Boolean): Boolean = { import java.util.zip.ZipFile - import scala.jdk.CollectionConverters._ + import scala.collection.JavaConverters._ val zf = new ZipFile(path.toFile) val it = zf.entries().asScala it.exists(e => matcher(e.getName)) diff --git a/tools/src/main/scala/scala/scalanative/build/LLVM.scala b/tools/src/main/scala/scala/scalanative/build/LLVM.scala index 1318959d5e..1b6a2ec132 100644 --- a/tools/src/main/scala/scala/scalanative/build/LLVM.scala +++ b/tools/src/main/scala/scala/scalanative/build/LLVM.scala @@ -3,11 +3,8 @@ package build import java.nio.file.{Files, Path, Paths, StandardCopyOption} import java.util.Arrays -import scala.jdk.CollectionConverters._ -import scala.util.Try import scala.sys.process._ import scalanative.build.IO.RichPath -import scalanative.build.Discover._ import scalanative.build.NativeLib._ /** Internal utilities to interact with LLVM command-line tools. */ diff --git a/util/src/main/scala/scala/scalanative/util/package.scala b/util/src/main/scala/scala/scalanative/util/package.scala index 53b31af07b..cd2697497d 100644 --- a/util/src/main/scala/scala/scalanative/util/package.scala +++ b/util/src/main/scala/scala/scalanative/util/package.scala @@ -1,7 +1,5 @@ package scala.scalanative -import java.util.stream.{Stream => JStream} - package object util { /** Marker methods, called whenever a specific control-flow branch From a96d1e48ca39c6ca3c8116f3d46e35bc48d2bc56 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 11 Nov 2020 18:18:32 +0100 Subject: [PATCH 62/75] Add Predef and `scala` package optimization used in previous versions --- scalalib/overrides-2.13/scala/Predef.scala | 586 ++++++++++++++++++++ scalalib/overrides-2.13/scala/package.scala | 147 +++++ 2 files changed, 733 insertions(+) create mode 100644 scalalib/overrides-2.13/scala/Predef.scala create mode 100644 scalalib/overrides-2.13/scala/package.scala diff --git a/scalalib/overrides-2.13/scala/Predef.scala b/scalalib/overrides-2.13/scala/Predef.scala new file mode 100644 index 0000000000..bb2f83e47a --- /dev/null +++ b/scalalib/overrides-2.13/scala/Predef.scala @@ -0,0 +1,586 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +import scala.language.implicitConversions + +import scala.collection.{mutable, immutable, ArrayOps, StringOps}, immutable.WrappedString +import scala.annotation.{elidable, implicitNotFound}, elidable.ASSERTION +import scala.annotation.meta.{ companionClass, companionMethod } + +/** The `Predef` object provides definitions that are accessible in all Scala + * compilation units without explicit qualification. + * + * === Commonly Used Types === + * Predef provides type aliases for types which are commonly used, such as + * the immutable collection types [[scala.collection.immutable.Map]] and + * [[scala.collection.immutable.Set]]. + * + * === Console Output === + * For basic console output, `Predef` provides convenience methods [[print(x:Any* print]] and [[println(x:Any* println]], + * which are aliases of the methods in the object [[scala.Console]]. + * + * === Assertions === + * A set of `assert` functions are provided for use as a way to document + * and dynamically check invariants in code. Invocations of `assert` can be elided + * at compile time by providing the command line option `-Xdisable-assertions`, + * which raises `-Xelide-below` above `elidable.ASSERTION`, to the `scalac` command. + * + * Variants of `assert` intended for use with static analysis tools are also + * provided: `assume`, `require` and `ensuring`. `require` and `ensuring` are + * intended for use as a means of design-by-contract style specification + * of pre- and post-conditions on functions, with the intention that these + * specifications could be consumed by a static analysis tool. For instance, + * + * {{{ + * def addNaturals(nats: List[Int]): Int = { + * require(nats forall (_ >= 0), "List contains negative numbers") + * nats.foldLeft(0)(_ + _) + * } ensuring(_ >= 0) + * }}} + * + * The declaration of `addNaturals` states that the list of integers passed should + * only contain natural numbers (i.e. non-negative), and that the result returned + * will also be natural. `require` is distinct from `assert` in that if the + * condition fails, then the caller of the function is to blame rather than a + * logical error having been made within `addNaturals` itself. `ensuring` is a + * form of `assert` that declares the guarantee the function is providing with + * regards to its return value. + * + * === Implicit Conversions === + * A number of commonly applied implicit conversions are also defined here, and + * in the parent type [[scala.LowPriorityImplicits]]. Implicit conversions + * are provided for the "widening" of numeric values, for instance, converting a + * Short value to a Long value as required, and to add additional higher-order + * functions to Array values. These are described in more detail in the documentation of [[scala.Array]]. + * + * @groupname utilities Utility Methods + * @groupprio utilities 10 + * + * @groupname assertions Assertions + * @groupprio assertions 20 + * @groupdesc assertions These methods support program verification and runtime correctness. + * + * @groupname console-output Console Output + * @groupprio console-output 30 + * @groupdesc console-output These methods provide output via the console. + * + * @groupname aliases Aliases + * @groupprio aliases 50 + * @groupdesc aliases These aliases bring selected immutable types into scope without any imports. + * + * @groupname conversions-string String Conversions + * @groupprio conversions-string 60 + * @groupdesc conversions-string Conversions from String to StringOps or WrappedString. + * + * @groupname implicit-classes-any Implicit Classes + * @groupprio implicit-classes-any 70 + * @groupdesc implicit-classes-any These implicit classes add useful extension methods to every type. + * + * @groupname char-sequence-wrappers CharSequence Wrappers + * @groupprio char-sequence-wrappers 80 + * @groupdesc char-sequence-wrappers Wrappers that implements CharSequence and were implicit classes. + * + * @groupname conversions-java-to-anyval Java to Scala + * @groupprio conversions-java-to-anyval 90 + * @groupdesc conversions-java-to-anyval Implicit conversion from Java primitive wrapper types to Scala equivalents. + * + * @groupname conversions-anyval-to-java Scala to Java + * @groupprio conversions-anyval-to-java 100 + * @groupdesc conversions-anyval-to-java Implicit conversion from Scala AnyVals to Java primitive wrapper types equivalents. + * + * @groupname conversions-array-to-wrapped-array Array to ArraySeq + * @groupprio conversions-array-to-wrapped-array 110 + * @groupdesc conversions-array-to-wrapped-array Conversions from Arrays to ArraySeqs. + */ +object Predef extends LowPriorityImplicits { + /** + * Retrieve the runtime representation of a class type. `classOf[T]` is equivalent to + * the class literal `T.class` in Java. + * + * @example {{{ + * val listClass = classOf[List[_]] + * // listClass is java.lang.Class[List[_]] = class scala.collection.immutable.List + * + * val mapIntString = classOf[Map[Int,String]] + * // mapIntString is java.lang.Class[Map[Int,String]] = interface scala.collection.immutable.Map + * }}} + * + * @return The runtime [[Class]] representation of type `T`. + * @group utilities + */ + def classOf[T]: Class[T] = null // This is a stub method. The actual implementation is filled in by the compiler. + + /** + * Retrieve the single value of a type with a unique inhabitant. + * + * @example {{{ + * object Foo + * val foo = valueOf[Foo.type] + * // foo is Foo.type = Foo + * + * val bar = valueOf[23] + * // bar is 23.type = 23 + * }}} + * @group utilities + */ + @inline def valueOf[T](implicit vt: ValueOf[T]): T = vt.value + + /** The `String` type in Scala has all the methods of the underlying + * `java.lang.String`, of which it is just an alias. + * (See the documentation corresponding to your Java version, + * for example [[http://docs.oracle.com/javase/8/docs/api/java/lang/String.html]].) + * In addition, extension methods in [[scala.collection.StringOps]] + * are added implicitly through the conversion [[augmentString]]. + * @group aliases + */ + type String = java.lang.String + /** @group aliases */ + type Class[T] = java.lang.Class[T] + + // miscellaneous ----------------------------------------------------- + scala.`package` // to force scala package object to be seen. + scala.collection.immutable.List // to force Nil, :: to be seen. + + /** @group aliases */ + type Function[-A, +B] = Function1[A, B] + + /** @group aliases */ + type Map[K, +V] = immutable.Map[K, V] + /** @group aliases */ + type Set[A] = immutable.Set[A] + /** @group aliases */ + @inline def Map = immutable.Map + /** @group aliases */ + @inline def Set = immutable.Set + + /** + * Allows destructuring tuples with the same syntax as constructing them. + * + * @example {{{ + * val tup = "foobar" -> 3 + * + * val c = tup match { + * case str -> i => str.charAt(i) + * } + * }}} + * @group aliases + */ + val -> = Tuple2 + + // Manifest types, companions, and incantations for summoning + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") + type OptManifest[T] = scala.reflect.OptManifest[T] + @implicitNotFound(msg = "No Manifest available for ${T}.") + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") + type Manifest[T] = scala.reflect.Manifest[T] + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") + @inline def Manifest = scala.reflect.Manifest + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") + @inline def NoManifest = scala.reflect.NoManifest + + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("use scala.reflect.classTag[T] and scala.reflect.runtime.universe.typeTag[T] instead", "2.10.0") + def manifest[T](implicit m: Manifest[T]): Manifest[T] = m + // TODO undeprecated until Scala reflection becomes non-experimental + // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") + def optManifest[T](implicit m: OptManifest[T]): OptManifest[T] = m + + // Minor variations on identity functions + + /** + * A method that returns its input value. + * @tparam A type of the input value x. + * @param x the value of type `A` to be returned. + * @return the value `x`. + * @group utilities */ + @inline def identity[A](x: A): A = x // see `$conforms` for the implicit version + + /** Summon an implicit value of type `T`. Usually, the argument is not passed explicitly. + * + * @tparam T the type of the value to be summoned + * @return the implicit value of type `T` + * @group utilities + */ + @inline def implicitly[T](implicit e: T): T = e // TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero` + + /** Used to mark code blocks as being expressions, instead of being taken as part of anonymous classes and the like. + * This is just a different name for [[identity]]. + * + * @example Separating code blocks from `new`: + * {{{ + * val x = new AnyRef + * { + * val y = ... + * println(y) + * } + * // the { ... } block is seen as the body of an anonymous class + * + * val x = new AnyRef + * + * { + * val y = ... + * println(y) + * } + * // an empty line is a brittle "fix" + * + * val x = new AnyRef + * locally { + * val y = ... + * println(y) + * } + * // locally guards the block and helps communicate intent + * }}} + * @group utilities + */ + @inline def locally[T](@deprecatedName("x") x: T): T = x + + // assertions --------------------------------------------------------- + + /** Tests an expression, throwing an `AssertionError` if false. + * Calls to this method will not be generated if `-Xelide-below` + * is greater than `ASSERTION`. + * + * @see [[scala.annotation.elidable elidable]] + * @param assertion the expression to test + * @group assertions + */ + @elidable(ASSERTION) + def assert(assertion: Boolean): Unit = { + if (!assertion) + throw new java.lang.AssertionError("assertion failed") + } + + /** Tests an expression, throwing an `AssertionError` if false. + * Calls to this method will not be generated if `-Xelide-below` + * is greater than `ASSERTION`. + * + * @see [[scala.annotation.elidable elidable]] + * @param assertion the expression to test + * @param message a String to include in the failure message + * @group assertions + */ + @elidable(ASSERTION) @inline + final def assert(assertion: Boolean, message: => Any): Unit = { + if (!assertion) + throw new java.lang.AssertionError("assertion failed: "+ message) + } + + /** Tests an expression, throwing an `AssertionError` if false. + * This method differs from assert only in the intent expressed: + * assert contains a predicate which needs to be proven, while + * assume contains an axiom for a static checker. Calls to this method + * will not be generated if `-Xelide-below` is greater than `ASSERTION`. + * + * @see [[scala.annotation.elidable elidable]] + * @param assumption the expression to test + * @group assertions + */ + @elidable(ASSERTION) + def assume(assumption: Boolean): Unit = { + if (!assumption) + throw new java.lang.AssertionError("assumption failed") + } + + /** Tests an expression, throwing an `AssertionError` if false. + * This method differs from assert only in the intent expressed: + * assert contains a predicate which needs to be proven, while + * assume contains an axiom for a static checker. Calls to this method + * will not be generated if `-Xelide-below` is greater than `ASSERTION`. + * + * @see [[scala.annotation.elidable elidable]] + * @param assumption the expression to test + * @param message a String to include in the failure message + * @group assertions + */ + @elidable(ASSERTION) @inline + final def assume(assumption: Boolean, message: => Any): Unit = { + if (!assumption) + throw new java.lang.AssertionError("assumption failed: "+ message) + } + + /** Tests an expression, throwing an `IllegalArgumentException` if false. + * This method is similar to `assert`, but blames the caller of the method + * for violating the condition. + * + * @param requirement the expression to test + * @group assertions + */ + def require(requirement: Boolean): Unit = { + if (!requirement) + throw new IllegalArgumentException("requirement failed") + } + + /** Tests an expression, throwing an `IllegalArgumentException` if false. + * This method is similar to `assert`, but blames the caller of the method + * for violating the condition. + * + * @param requirement the expression to test + * @param message a String to include in the failure message + * @group assertions + */ + @inline final def require(requirement: Boolean, message: => Any): Unit = { + if (!requirement) + throw new IllegalArgumentException("requirement failed: "+ message) + } + + /** `???` can be used for marking methods that remain to be implemented. + * @throws NotImplementedError when `???` is invoked. + * @group utilities + */ + def ??? : Nothing = throw new NotImplementedError + + // implicit classes ----------------------------------------------------- + + /** @group implicit-classes-any */ + implicit final class ArrowAssoc[A](private val self: A) extends AnyVal { + @inline def -> [B](y: B): (A, B) = (self, y) + @deprecated("Use `->` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code.", "2.13.0") + def →[B](y: B): (A, B) = ->(y) + } + + /** @group implicit-classes-any */ + implicit final class Ensuring[A](private val self: A) extends AnyVal { + def ensuring(cond: Boolean): A = { assert(cond); self } + def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); self } + def ensuring(cond: A => Boolean): A = { assert(cond(self)); self } + def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(self), msg); self } + } + + /** @group implicit-classes-any */ + implicit final class StringFormat[A](private val self: A) extends AnyVal { + /** Returns string formatted according to given `format` string. + * Format strings are as for `String.format` + * (@see java.lang.String.format). + */ + @inline def formatted(fmtstr: String): String = fmtstr format self + } + + /** Injects String concatenation operator `+` to any classes. + * @group implicit-classes-any + */ + @(deprecated @companionMethod)("Implicit injection of + is deprecated. Convert to String to call +", "2.13.0") + @(deprecated @companionClass)("Implicit injection of + is deprecated. Convert to String to call +", "2.13.0") // for Scaladoc + // scala/bug#8229 retaining the pre 2.11 name for source compatibility in shadowing this implicit + implicit final class any2stringadd[A](private val self: A) extends AnyVal { + def +(other: String): String = String.valueOf(self) + other + } + + /** @group char-sequence-wrappers */ + final class SeqCharSequence(sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence { + def length: Int = sequenceOfChars.length + def charAt(index: Int): Char = sequenceOfChars(index) + def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(sequenceOfChars.slice(start, end)) + override def toString = sequenceOfChars.mkString + } + + /** @group char-sequence-wrappers */ + def SeqCharSequence(sequenceOfChars: scala.collection.IndexedSeq[Char]): SeqCharSequence = new SeqCharSequence(sequenceOfChars) + + /** @group char-sequence-wrappers */ + final class ArrayCharSequence(arrayOfChars: Array[Char]) extends CharSequence { + def length: Int = arrayOfChars.length + def charAt(index: Int): Char = arrayOfChars(index) + def subSequence(start: Int, end: Int): CharSequence = new runtime.ArrayCharSequence(arrayOfChars, start, end) + override def toString = arrayOfChars.mkString + } + + /** @group char-sequence-wrappers */ + def ArrayCharSequence(arrayOfChars: Array[Char]): ArrayCharSequence = new ArrayCharSequence(arrayOfChars) + + /** @group conversions-string */ + @inline implicit def augmentString(x: String): StringOps = new StringOps(x) + + // printing ----------------------------------------------------------- + + /** Prints an object to `out` using its `toString` method. + * + * @param x the object to print; may be null. + * @group console-output + */ + def print(x: Any): Unit = Console.print(x) + + /** Prints a newline character on the default output. + * @group console-output + */ + def println(): Unit = Console.println() + + /** Prints out an object to the default output, followed by a newline character. + * + * @param x the object to print. + * @group console-output + */ + def println(x: Any): Unit = Console.println(x) + + /** Prints its arguments as a formatted string to the default output, + * based on a string pattern (in a fashion similar to printf in C). + * + * The interpretation of the formatting patterns is described in + * [[java.util.Formatter]]. + * + * Consider using the [[scala.StringContext.f f interpolator]] as more type safe and idiomatic. + * + * @param text the pattern for formatting the arguments. + * @param xs the arguments used to instantiate the pattern. + * @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments + * + * @see [[scala.StringContext.f StringContext.f]] + * @group console-output + */ + def printf(text: String, xs: Any*): Unit = Console.print(text.format(xs: _*)) + + // views -------------------------------------------------------------- + + // these two are morally deprecated but the @deprecated annotation has been moved to the extension method themselves, + // in order to provide a more specific deprecation method. + implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)): runtime.Tuple2Zipped.Ops[T1, T2] = new runtime.Tuple2Zipped.Ops(x) + implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)): runtime.Tuple3Zipped.Ops[T1, T2, T3] = new runtime.Tuple3Zipped.Ops(x) + + // Not specialized anymore since 2.13 but we still need separate methods + // to avoid https://github.com/scala/bug/issues/10746 + // TODO: should not need @inline. add heuristic to inline factories for value classes. + @inline implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = new ArrayOps(xs) + @inline implicit def booleanArrayOps(xs: Array[Boolean]): ArrayOps[Boolean] = new ArrayOps(xs) + @inline implicit def byteArrayOps(xs: Array[Byte]): ArrayOps[Byte] = new ArrayOps(xs) + @inline implicit def charArrayOps(xs: Array[Char]): ArrayOps[Char] = new ArrayOps(xs) + @inline implicit def doubleArrayOps(xs: Array[Double]): ArrayOps[Double] = new ArrayOps(xs) + @inline implicit def floatArrayOps(xs: Array[Float]): ArrayOps[Float] = new ArrayOps(xs) + @inline implicit def intArrayOps(xs: Array[Int]): ArrayOps[Int] = new ArrayOps(xs) + @inline implicit def longArrayOps(xs: Array[Long]): ArrayOps[Long] = new ArrayOps(xs) + @inline implicit def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps(xs) + @inline implicit def shortArrayOps(xs: Array[Short]): ArrayOps[Short] = new ArrayOps(xs) + @inline implicit def unitArrayOps(xs: Array[Unit]): ArrayOps[Unit] = new ArrayOps(xs) + + // "Autoboxing" and "Autounboxing" --------------------------------------------------- + + /** @group conversions-anyval-to-java */ + implicit def byte2Byte(x: Byte): java.lang.Byte = x.asInstanceOf[java.lang.Byte] + /** @group conversions-anyval-to-java */ + implicit def short2Short(x: Short): java.lang.Short = x.asInstanceOf[java.lang.Short] + /** @group conversions-anyval-to-java */ + implicit def char2Character(x: Char): java.lang.Character = x.asInstanceOf[java.lang.Character] + /** @group conversions-anyval-to-java */ + implicit def int2Integer(x: Int): java.lang.Integer = x.asInstanceOf[java.lang.Integer] + /** @group conversions-anyval-to-java */ + implicit def long2Long(x: Long): java.lang.Long = x.asInstanceOf[java.lang.Long] + /** @group conversions-anyval-to-java */ + implicit def float2Float(x: Float): java.lang.Float = x.asInstanceOf[java.lang.Float] + /** @group conversions-anyval-to-java */ + implicit def double2Double(x: Double): java.lang.Double = x.asInstanceOf[java.lang.Double] + /** @group conversions-anyval-to-java */ + implicit def boolean2Boolean(x: Boolean): java.lang.Boolean = x.asInstanceOf[java.lang.Boolean] + + /** @group conversions-java-to-anyval */ + implicit def Byte2byte(x: java.lang.Byte): Byte = x.asInstanceOf[Byte] + /** @group conversions-java-to-anyval */ + implicit def Short2short(x: java.lang.Short): Short = x.asInstanceOf[Short] + /** @group conversions-java-to-anyval */ + implicit def Character2char(x: java.lang.Character): Char = x.asInstanceOf[Char] + /** @group conversions-java-to-anyval */ + implicit def Integer2int(x: java.lang.Integer): Int = x.asInstanceOf[Int] + /** @group conversions-java-to-anyval */ + implicit def Long2long(x: java.lang.Long): Long = x.asInstanceOf[Long] + /** @group conversions-java-to-anyval */ + implicit def Float2float(x: java.lang.Float): Float = x.asInstanceOf[Float] + /** @group conversions-java-to-anyval */ + implicit def Double2double(x: java.lang.Double): Double = x.asInstanceOf[Double] + /** @group conversions-java-to-anyval */ + implicit def Boolean2boolean(x: java.lang.Boolean): Boolean = x.asInstanceOf[Boolean] + + /** An implicit of type `A => A` is available for all `A` because it can always + * be implemented using the identity function. This also means that an + * implicit of type `A => B` is always available when `A <: B`, because + * `(A => A) <: (A => B)`. + */ + // $ to avoid accidental shadowing (e.g. scala/bug#7788) + implicit def $conforms[A]: A => A = <:<.refl +} + +/** The `LowPriorityImplicits` class provides implicit values that +* are valid in all Scala compilation units without explicit qualification, +* but that are partially overridden by higher-priority conversions in object +* `Predef`. +*/ +// scala/bug#7335 Parents of Predef are defined in the same compilation unit to avoid +// cyclic reference errors compiling the standard library *without* a previously +// compiled copy on the classpath. +private[scala] abstract class LowPriorityImplicits extends LowPriorityImplicits2 { + import mutable.ArraySeq + + /** We prefer the java.lang.* boxed types to these wrappers in + * any potential conflicts. Conflicts do exist because the wrappers + * need to implement ScalaNumber in order to have a symmetric equals + * method, but that implies implementing java.lang.Number as well. + * + * Note - these are inlined because they are value classes, but + * the call to xxxWrapper is not eliminated even though it does nothing. + * Even inlined, every call site does a no-op retrieval of Predef's MODULE$ + * because maybe loading Predef has side effects! + */ + @inline implicit def byteWrapper(x: Byte): runtime.RichByte = new runtime.RichByte(x) + @inline implicit def shortWrapper(x: Short): runtime.RichShort = new runtime.RichShort(x) + @inline implicit def intWrapper(x: Int): runtime.RichInt = new runtime.RichInt(x) + @inline implicit def charWrapper(c: Char): runtime.RichChar = new runtime.RichChar(c) + @inline implicit def longWrapper(x: Long): runtime.RichLong = new runtime.RichLong(x) + @inline implicit def floatWrapper(x: Float): runtime.RichFloat = new runtime.RichFloat(x) + @inline implicit def doubleWrapper(x: Double): runtime.RichDouble = new runtime.RichDouble(x) + @inline implicit def booleanWrapper(x: Boolean): runtime.RichBoolean = new runtime.RichBoolean(x) + + /** @group conversions-array-to-wrapped-array */ + implicit def genericWrapArray[T](xs: Array[T]): ArraySeq[T] = + if (xs eq null) null + else ArraySeq.make(xs) + + // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef] + // is as good as another for all T <: AnyRef. Instead of creating 100,000,000 + // unique ones by way of this implicit, let's share one. + /** @group conversions-array-to-wrapped-array */ + implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): ArraySeq.ofRef[T] = { + if (xs eq null) null + else if (xs.length == 0) ArraySeq.empty[AnyRef].asInstanceOf[ArraySeq.ofRef[T]] + else new ArraySeq.ofRef[T](xs) + } + + /** @group conversions-array-to-wrapped-array */ + implicit def wrapIntArray(xs: Array[Int]): ArraySeq.ofInt = if (xs ne null) new ArraySeq.ofInt(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapDoubleArray(xs: Array[Double]): ArraySeq.ofDouble = if (xs ne null) new ArraySeq.ofDouble(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapLongArray(xs: Array[Long]): ArraySeq.ofLong = if (xs ne null) new ArraySeq.ofLong(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapFloatArray(xs: Array[Float]): ArraySeq.ofFloat = if (xs ne null) new ArraySeq.ofFloat(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapCharArray(xs: Array[Char]): ArraySeq.ofChar = if (xs ne null) new ArraySeq.ofChar(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapByteArray(xs: Array[Byte]): ArraySeq.ofByte = if (xs ne null) new ArraySeq.ofByte(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapShortArray(xs: Array[Short]): ArraySeq.ofShort = if (xs ne null) new ArraySeq.ofShort(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapBooleanArray(xs: Array[Boolean]): ArraySeq.ofBoolean = if (xs ne null) new ArraySeq.ofBoolean(xs) else null + /** @group conversions-array-to-wrapped-array */ + implicit def wrapUnitArray(xs: Array[Unit]): ArraySeq.ofUnit = if (xs ne null) new ArraySeq.ofUnit(xs) else null + + /** @group conversions-string */ + implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null +} + +private[scala] abstract class LowPriorityImplicits2 { + @deprecated("Implicit conversions from Array to immutable.IndexedSeq are implemented by copying; Use the more efficient non-copying ArraySeq.unsafeWrapArray or an explicit toIndexedSeq call", "2.13.0") + implicit def copyArrayToImmutableIndexedSeq[T](xs: Array[T]): IndexedSeq[T] = + if (xs eq null) null + else new ArrayOps(xs).toIndexedSeq +} \ No newline at end of file diff --git a/scalalib/overrides-2.13/scala/package.scala b/scalalib/overrides-2.13/scala/package.scala new file mode 100644 index 0000000000..f86718e02b --- /dev/null +++ b/scalalib/overrides-2.13/scala/package.scala @@ -0,0 +1,147 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +import scala.annotation.migration + +/** + * Core Scala types. They are always available without an explicit import. + * @contentDiagram hideNodes "scala.Serializable" + */ +package object scala { + type Cloneable = java.lang.Cloneable + type Serializable = java.io.Serializable + + type Throwable = java.lang.Throwable + type Exception = java.lang.Exception + type Error = java.lang.Error + + type RuntimeException = java.lang.RuntimeException + type NullPointerException = java.lang.NullPointerException + type ClassCastException = java.lang.ClassCastException + type IndexOutOfBoundsException = java.lang.IndexOutOfBoundsException + type ArrayIndexOutOfBoundsException = java.lang.ArrayIndexOutOfBoundsException + type StringIndexOutOfBoundsException = java.lang.StringIndexOutOfBoundsException + type UnsupportedOperationException = java.lang.UnsupportedOperationException + type IllegalArgumentException = java.lang.IllegalArgumentException + type NoSuchElementException = java.util.NoSuchElementException + type NumberFormatException = java.lang.NumberFormatException + type AbstractMethodError = java.lang.AbstractMethodError + type InterruptedException = java.lang.InterruptedException + + // A dummy used by the specialization annotation. + lazy val AnyRef = new Specializable { + override def toString = "object AnyRef" + } + + @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") + type TraversableOnce[+A] = scala.collection.IterableOnce[A] + + type IterableOnce[+A] = scala.collection.IterableOnce[A] + + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[+A] = scala.collection.Iterable[A] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + lazy val Traversable = scala.collection.Iterable + + type Iterable[+A] = scala.collection.Iterable[A] + lazy val Iterable = scala.collection.Iterable + + @migration("scala.Seq is now scala.collection.immutable.Seq instead of scala.collection.Seq", "2.13.0") + type Seq[+A] = scala.collection.immutable.Seq[A] + lazy val Seq = scala.collection.immutable.Seq + + @migration("scala.IndexedSeq is now scala.collection.immutable.IndexedSeq instead of scala.collection.IndexedSeq", "2.13.0") + type IndexedSeq[+A] = scala.collection.immutable.IndexedSeq[A] + lazy val IndexedSeq = scala.collection.immutable.IndexedSeq + + type Iterator[+A] = scala.collection.Iterator[A] + lazy val Iterator = scala.collection.Iterator + + @deprecated("Use scala.collection.BufferedIterator instead of scala.BufferedIterator", "2.13.0") + type BufferedIterator[+A] = scala.collection.BufferedIterator[A] + + type List[+A] = scala.collection.immutable.List[A] + lazy val List = scala.collection.immutable.List + + lazy val Nil = scala.collection.immutable.Nil + + type ::[+A] = scala.collection.immutable.::[A] + lazy val :: = scala.collection.immutable.:: + + lazy val +: = scala.collection.+: + lazy val :+ = scala.collection.:+ + + @deprecated("Use LazyList instead of Stream", "2.13.0") + type Stream[+A] = scala.collection.immutable.Stream[A] + @deprecated("Use LazyList instead of Stream", "2.13.0") + lazy val Stream = scala.collection.immutable.Stream + + type LazyList[+A] = scala.collection.immutable.LazyList[A] + lazy val LazyList = scala.collection.immutable.LazyList + // This should be an alias to LazyList.#:: but we need to support Stream, too + //lazy val #:: = scala.collection.immutable.LazyList.#:: + object #:: { + def unapply[A](s: LazyList[A]): Option[(A, LazyList[A])] = + if (s.nonEmpty) Some((s.head, s.tail)) else None + @deprecated("Prefer LazyList instead", since = "2.13.0") + def unapply[A](s: Stream[A]): Option[(A, Stream[A])] = + if (s.nonEmpty) Some((s.head, s.tail)) else None + } + + type Vector[+A] = scala.collection.immutable.Vector[A] + lazy val Vector = scala.collection.immutable.Vector + + type StringBuilder = scala.collection.mutable.StringBuilder + lazy val StringBuilder = scala.collection.mutable.StringBuilder + + type Range = scala.collection.immutable.Range + lazy val Range = scala.collection.immutable.Range + + // Numeric types which were moved into scala.math.* + + type BigDecimal = scala.math.BigDecimal + lazy val BigDecimal = scala.math.BigDecimal + + type BigInt = scala.math.BigInt + lazy val BigInt = scala.math.BigInt + + type Equiv[T] = scala.math.Equiv[T] + lazy val Equiv = scala.math.Equiv + + type Fractional[T] = scala.math.Fractional[T] + lazy val Fractional = scala.math.Fractional + + type Integral[T] = scala.math.Integral[T] + lazy val Integral = scala.math.Integral + + type Numeric[T] = scala.math.Numeric[T] + lazy val Numeric = scala.math.Numeric + + type Ordered[T] = scala.math.Ordered[T] + lazy val Ordered = scala.math.Ordered + + type Ordering[T] = scala.math.Ordering[T] + lazy val Ordering = scala.math.Ordering + + type PartialOrdering[T] = scala.math.PartialOrdering[T] + type PartiallyOrdered[T] = scala.math.PartiallyOrdered[T] + + type Either[+A, +B] = scala.util.Either[A, B] + lazy val Either = scala.util.Either + + type Left[+A, +B] = scala.util.Left[A, B] + lazy val Left = scala.util.Left + + type Right[+A, +B] = scala.util.Right[A, B] + lazy val Right = scala.util.Right + +} From 7c7d87542d4cbadf04cfba32dcf0ec838f7aa594 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 11 Nov 2020 19:01:08 +0100 Subject: [PATCH 63/75] Remove remaining usages of JavaConverters in tools --- .../scala/scala/scalanative/build/Discover.scala | 16 +++++++--------- .../main/scala/scala/scalanative/build/IO.scala | 10 +++++++--- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/tools/src/main/scala/scala/scalanative/build/Discover.scala b/tools/src/main/scala/scala/scalanative/build/Discover.scala index 0ca8cab1a7..7e8bc3da5e 100644 --- a/tools/src/main/scala/scala/scalanative/build/Discover.scala +++ b/tools/src/main/scala/scala/scalanative/build/Discover.scala @@ -2,7 +2,6 @@ package scala.scalanative package build import java.nio.file.{Files, Path, Paths} -import scala.collection.JavaConverters._ import scala.util.Try import scala.sys.process._ import scalanative.build.IO.RichPath @@ -83,14 +82,13 @@ object Discover { if (exit != 0) { fail } else { - Files - .readAllLines(targetll) - .asScala - .collectFirst { - case line if line.startsWith("target triple") => - line.split("\"").apply(1) - } - .getOrElse(fail) + val linesIter = Files.readAllLines(targetll).iterator() + while (linesIter.hasNext()) { + val line = linesIter.next() + if (line.startsWith("target triple")) + return line.split("\"").apply(1) + } + fail } } diff --git a/tools/src/main/scala/scala/scalanative/build/IO.scala b/tools/src/main/scala/scala/scalanative/build/IO.scala index 55fb49b4b5..1961a53ba4 100644 --- a/tools/src/main/scala/scala/scalanative/build/IO.scala +++ b/tools/src/main/scala/scala/scalanative/build/IO.scala @@ -13,6 +13,7 @@ import java.nio.file.{ } import java.nio.file.attribute.BasicFileAttributes import java.util.EnumSet +import java.util.zip.{ZipEntry, ZipInputStream} import java.security.{DigestInputStream, MessageDigest} /** Internal I/O utilities. */ @@ -99,10 +100,13 @@ private[scalanative] object IO { /** Look for a zip entry path string using a matcher function */ def existsInJar(path: Path, matcher: String => Boolean): Boolean = { import java.util.zip.ZipFile - import scala.collection.JavaConverters._ val zf = new ZipFile(path.toFile) - val it = zf.entries().asScala - it.exists(e => matcher(e.getName)) + val it = zf.entries() + while (it.hasMoreElements()) { + if (matcher(it.nextElement().getName())) + return true + } + false } /** Deletes recursively `directory` and all its content. */ From a6c00a51dac8dcf0c68bc4e7cc3e7b20b8fdb42e Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 11 Nov 2020 19:24:57 +0100 Subject: [PATCH 64/75] Remove laziness of Vector if `scala` package --- scalalib/overrides-2.13/scala/Predef.scala | 8 ++++---- scalalib/overrides-2.13/scala/package.scala | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/scalalib/overrides-2.13/scala/Predef.scala b/scalalib/overrides-2.13/scala/Predef.scala index bb2f83e47a..d540f0424b 100644 --- a/scalalib/overrides-2.13/scala/Predef.scala +++ b/scalalib/overrides-2.13/scala/Predef.scala @@ -160,9 +160,9 @@ object Predef extends LowPriorityImplicits { /** @group aliases */ type Set[A] = immutable.Set[A] /** @group aliases */ - @inline def Map = immutable.Map + @inline def Map = immutable.Map /** @group aliases */ - @inline def Set = immutable.Set + @inline def Set = immutable.Set /** * Allows destructuring tuples with the same syntax as constructing them. @@ -188,10 +188,10 @@ object Predef extends LowPriorityImplicits { type Manifest[T] = scala.reflect.Manifest[T] // TODO undeprecated until Scala reflection becomes non-experimental // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") - @inline def Manifest = scala.reflect.Manifest + @inline def Manifest = scala.reflect.Manifest // TODO undeprecated until Scala reflection becomes non-experimental // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") - @inline def NoManifest = scala.reflect.NoManifest + @inline def NoManifest = scala.reflect.NoManifest // TODO undeprecated until Scala reflection becomes non-experimental // @deprecated("use scala.reflect.classTag[T] and scala.reflect.runtime.universe.typeTag[T] instead", "2.10.0") diff --git a/scalalib/overrides-2.13/scala/package.scala b/scalalib/overrides-2.13/scala/package.scala index f86718e02b..1f3c633d22 100644 --- a/scalalib/overrides-2.13/scala/package.scala +++ b/scalalib/overrides-2.13/scala/package.scala @@ -97,8 +97,9 @@ package object scala { if (s.nonEmpty) Some((s.head, s.tail)) else None } + // It cannot be lazy, because of Vector <-> System.properties cyclic dependency in SN implementation type Vector[+A] = scala.collection.immutable.Vector[A] - lazy val Vector = scala.collection.immutable.Vector + val Vector = scala.collection.immutable.Vector type StringBuilder = scala.collection.mutable.StringBuilder lazy val StringBuilder = scala.collection.mutable.StringBuilder From 4ad48f5d435e51b52a07c96dd6e625fbca30c36c Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 11 Nov 2020 19:26:08 +0100 Subject: [PATCH 65/75] Remove laziness of System.systemProperties, it was fixed by scalalib 2.13 overrides --- javalib/src/main/scala/java/lang/System.scala | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/javalib/src/main/scala/java/lang/System.scala b/javalib/src/main/scala/java/lang/System.scala index 207edb3020..b6f2fb0f69 100644 --- a/javalib/src/main/scala/java/lang/System.scala +++ b/javalib/src/main/scala/java/lang/System.scala @@ -91,11 +91,7 @@ object System { var err: PrintStream = new PrintStream(new FileOutputStream(FileDescriptor.err)) - /* Laziness for this val was enforced due to changes in Scala 2.13 Vector implementation - * Vector uses system property to define some of its default parameters and to allow user to tune it. - * This problem exists because current implementation of java.lang.System depends on Scala collections, - * this problem should be addressed in the future */ - private lazy val systemProperties = loadProperties() + private val systemProperties = loadProperties() Platform.setOSProps(new CFuncPtr2[CString, CString, Unit] { def apply(key: CString, value: CString): Unit = systemProperties.setProperty(fromCString(key), fromCString(value)) From 3dcbf8ee16fc233181bffba89d3243fc9b2dd7d4 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Wed, 11 Nov 2020 23:58:18 +0100 Subject: [PATCH 66/75] Fix empty code dumps --- nir/src/main/scala/scala/scalanative/nir/Show.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nir/src/main/scala/scala/scalanative/nir/Show.scala b/nir/src/main/scala/scala/scalanative/nir/Show.scala index 77bd97c185..c972164143 100644 --- a/nir/src/main/scala/scala/scalanative/nir/Show.scala +++ b/nir/src/main/scala/scala/scalanative/nir/Show.scala @@ -41,8 +41,8 @@ object Show { val pw = new java.io.PrintWriter(fileName) try { - defns - .filterNot(_ != null) + defns.view + .filter(_ != null) .sortBy(_.name) .foreach { defn => pw.write(defn.show) From 95895520634cad1c06ba8ed83a636465a789ebbb Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Thu, 12 Nov 2020 09:48:01 +0100 Subject: [PATCH 67/75] Don't use view in Show.dump. It does not contains sortBy method in 2.13 --- nir/src/main/scala/scala/scalanative/nir/Show.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nir/src/main/scala/scala/scalanative/nir/Show.scala b/nir/src/main/scala/scala/scalanative/nir/Show.scala index c972164143..8c27a93d5b 100644 --- a/nir/src/main/scala/scala/scalanative/nir/Show.scala +++ b/nir/src/main/scala/scala/scalanative/nir/Show.scala @@ -41,7 +41,7 @@ object Show { val pw = new java.io.PrintWriter(fileName) try { - defns.view + defns .filter(_ != null) .sortBy(_.name) .foreach { defn => From 7ab5ba49b8f724019edff3be2cf0d0f53601ced5 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Thu, 12 Nov 2020 11:51:04 +0100 Subject: [PATCH 68/75] Port 2.13 scala.reflect overrides from scala.js --- .../scala/reflect/ClassTag.scala | 142 ++++++ .../scala/reflect/Manifest.scala | 463 ++++++++++++++++++ 2 files changed, 605 insertions(+) create mode 100644 scalalib/overrides-2.13/scala/reflect/ClassTag.scala create mode 100644 scalalib/overrides-2.13/scala/reflect/Manifest.scala diff --git a/scalalib/overrides-2.13/scala/reflect/ClassTag.scala b/scalalib/overrides-2.13/scala/reflect/ClassTag.scala new file mode 100644 index 0000000000..c43360e8d2 --- /dev/null +++ b/scalalib/overrides-2.13/scala/reflect/ClassTag.scala @@ -0,0 +1,142 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package reflect + +import java.lang.{Class => jClass} + +import scala.annotation.{implicitNotFound, nowarn} + +/** + * + * A `ClassTag[T]` stores the erased class of a given type `T`, accessible via the `runtimeClass` + * field. This is particularly useful for instantiating `Array`s whose element types are unknown + * at compile time. + * + * `ClassTag`s are a weaker special case of [[scala.reflect.api.TypeTags.TypeTag]]s, in that they + * wrap only the runtime class of a given type, whereas a `TypeTag` contains all static type + * information. That is, `ClassTag`s are constructed from knowing only the top-level class of a + * type, without necessarily knowing all of its argument types. This runtime information is enough + * for runtime `Array` creation. + * + * For example: + * {{{ + * scala> def mkArray[T : ClassTag](elems: T*) = Array[T](elems: _*) + * mkArray: [T](elems: T*)(implicit evidence\$1: scala.reflect.ClassTag[T])Array[T] + * + * scala> mkArray(42, 13) + * res0: Array[Int] = Array(42, 13) + * + * scala> mkArray("Japan","Brazil","Germany") + * res1: Array[String] = Array(Japan, Brazil, Germany) + * }}} + * + * See [[scala.reflect.api.TypeTags]] for more examples, or the + * [[http://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]] + * for more details. + * + */ +@nowarn("""cat=deprecation&origin=scala\.reflect\.ClassManifestDeprecatedApis""") +@implicitNotFound(msg = "No ClassTag available for ${T}") +trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serializable { + // please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder` + // class tags, and all tags in general, should be as minimalistic as possible + + /** A class representing the type `U` to which `T` would be erased. + * Note that there is no subtyping relationship between `T` and `U`. + */ + def runtimeClass: jClass[_] + + /** Produces a `ClassTag` that knows how to instantiate an `Array[Array[T]]` */ + def wrap: ClassTag[Array[T]] = ClassTag[Array[T]](arrayClass(runtimeClass)) + + /** Produces a new array with element type `T` and length `len` */ + def newArray(len: Int): Array[T] = + java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] + + /** A ClassTag[T] can serve as an extractor that matches only objects of type T. + * + * The compiler tries to turn unchecked type tests in pattern matches into checked ones + * by wrapping a `(_: T)` type pattern as `ct(_: T)`, where `ct` is the `ClassTag[T]` instance. + * Type tests necessary before calling other extractors are treated similarly. + * `SomeExtractor(...)` is turned into `ct(SomeExtractor(...))` if `T` in `SomeExtractor.unapply(x: T)` + * is uncheckable, but we have an instance of `ClassTag[T]`. + */ + def unapply(x: Any): Option[T] = + if (runtimeClass.isInstance(x)) Some(x.asInstanceOf[T]) + else None + + // case class accessories + override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]] + override def equals(x: Any) = x.isInstanceOf[ClassTag[_]] && this.runtimeClass == x.asInstanceOf[ClassTag[_]].runtimeClass + override def hashCode = runtimeClass.## + override def toString = { + def prettyprint(clazz: jClass[_]): String = + if (clazz.isArray) s"Array[${prettyprint(clazz.getComponentType)}]" else + clazz.getName + prettyprint(runtimeClass) + } +} + +/** + * Class tags corresponding to primitive types and constructor/extractor for ClassTags. + */ +object ClassTag { + import ManifestFactory._ + + @inline def Byte : ByteManifest = Manifest.Byte + @inline def Short : ShortManifest = Manifest.Short + @inline def Char : CharManifest = Manifest.Char + @inline def Int : IntManifest = Manifest.Int + @inline def Long : LongManifest = Manifest.Long + @inline def Float : FloatManifest = Manifest.Float + @inline def Double : DoubleManifest = Manifest.Double + @inline def Boolean : BooleanManifest = Manifest.Boolean + @inline def Unit : UnitManifest = Manifest.Unit + @inline def Any : ClassTag[scala.Any] = Manifest.Any + @inline def Object : ClassTag[java.lang.Object] = Manifest.Object + @inline def AnyVal : ClassTag[scala.AnyVal] = Manifest.AnyVal + @inline def AnyRef : ClassTag[scala.AnyRef] = Manifest.AnyRef + @inline def Nothing : ClassTag[scala.Nothing] = Manifest.Nothing + @inline def Null : ClassTag[scala.Null] = Manifest.Null + + @SerialVersionUID(1L) + private class GenericClassTag[T](val runtimeClass: jClass[_]) extends ClassTag[T] { + override def newArray(len: Int): Array[T] = { + java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] + } + } + + def apply[T](runtimeClass1: jClass[_]): ClassTag[T] = + runtimeClass1 match { + case java.lang.Byte.TYPE => ClassTag.Byte.asInstanceOf[ClassTag[T]] + case java.lang.Short.TYPE => ClassTag.Short.asInstanceOf[ClassTag[T]] + case java.lang.Character.TYPE => ClassTag.Char.asInstanceOf[ClassTag[T]] + case java.lang.Integer.TYPE => ClassTag.Int.asInstanceOf[ClassTag[T]] + case java.lang.Long.TYPE => ClassTag.Long.asInstanceOf[ClassTag[T]] + case java.lang.Float.TYPE => ClassTag.Float.asInstanceOf[ClassTag[T]] + case java.lang.Double.TYPE => ClassTag.Double.asInstanceOf[ClassTag[T]] + case java.lang.Boolean.TYPE => ClassTag.Boolean.asInstanceOf[ClassTag[T]] + case java.lang.Void.TYPE => ClassTag.Unit.asInstanceOf[ClassTag[T]] + case _ => + if (classOf[java.lang.Object] == runtimeClass1) + ClassTag.Object.asInstanceOf[ClassTag[T]] + else if (classOf[scala.runtime.Nothing$] == runtimeClass1) + ClassTag.Nothing.asInstanceOf[ClassTag[T]] + else if (classOf[scala.runtime.Null$] == runtimeClass1) + ClassTag.Null.asInstanceOf[ClassTag[T]] + else new GenericClassTag[T](runtimeClass1) + } + + def unapply[T](ctag: ClassTag[T]): Option[Class[_]] = Some(ctag.runtimeClass) +} \ No newline at end of file diff --git a/scalalib/overrides-2.13/scala/reflect/Manifest.scala b/scalalib/overrides-2.13/scala/reflect/Manifest.scala new file mode 100644 index 0000000000..39580741cf --- /dev/null +++ b/scalalib/overrides-2.13/scala/reflect/Manifest.scala @@ -0,0 +1,463 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package reflect + +import scala.collection.mutable.{ArrayBuilder, ArraySeq} + +/** A `Manifest[T]` is an opaque descriptor for type T. Its supported use + * is to give access to the erasure of the type as a `Class` instance, as + * is necessary for the creation of native `Arrays` if the class is not + * known at compile time. + * + * The type-relation operators `<:<` and `=:=` should be considered + * approximations only, as there are numerous aspects of type conformance + * which are not yet adequately represented in manifests. + * + * Example usages: + * {{{ + * def arr[T] = new Array[T](0) // does not compile + * def arr[T](implicit m: Manifest[T]) = new Array[T](0) // compiles + * def arr[T: Manifest] = new Array[T](0) // shorthand for the preceding + * + * // Methods manifest and optManifest are in [[scala.Predef]]. + * def isApproxSubType[T: Manifest, U: Manifest] = manifest[T] <:< manifest[U] + * isApproxSubType[List[String], List[AnyRef]] // true + * isApproxSubType[List[String], List[Int]] // false + * + * def methods[T: Manifest] = manifest[T].runtimeClass.getMethods + * def retType[T: Manifest](name: String) = + * methods[T] find (_.getName == name) map (_.getGenericReturnType) + * + * retType[Map[_, _]]("values") // Some(scala.collection.Iterable) + * }}} + */ +@scala.annotation.implicitNotFound(msg = "No Manifest available for ${T}.") +// TODO undeprecated until Scala reflection becomes non-experimental +// @deprecated("use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") +trait Manifest[T] extends ClassManifest[T] with Equals { + override def typeArguments: List[Manifest[_]] = Nil + + override def arrayManifest: Manifest[Array[T]] = + Manifest.classType[Array[T]](arrayClass[T](runtimeClass), this) + + override def canEqual(that: Any): Boolean = that match { + case _: Manifest[_] => true + case _ => false + } + /** Note: testing for erasure here is important, as it is many times + * faster than <:< and rules out most comparisons. + */ + override def equals(that: Any): Boolean = that match { + case m: Manifest[_] => (m canEqual this) && (this.runtimeClass == m.runtimeClass) && (this <:< m) && (m <:< this) + case _ => false + } + override def hashCode = this.runtimeClass.## +} + +/** The object `Manifest` defines factory methods for manifests. + * It is intended for use by the compiler and should not be used in client code. + */ +// TODO undeprecated until Scala reflection becomes non-experimental +// @deprecated("use scala.reflect.ClassTag (to capture erasures), scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") +object Manifest { + /* Forward all the public members of ManifestFactory, since this object used + * to be a `private val Manifest = ManifestFactory` in the package object. It + * was moved here because it needs to be in the same file as `trait Manifest` + * defined above. + */ + + def valueManifests: List[AnyValManifest[_]] = + ManifestFactory.valueManifests + + @inline def Byte: ManifestFactory.ByteManifest = ManifestFactory.Byte + @inline def Short: ManifestFactory.ShortManifest = ManifestFactory.Short + @inline def Char: ManifestFactory.CharManifest = ManifestFactory.Char + @inline def Int: ManifestFactory.IntManifest = ManifestFactory.Int + @inline def Long: ManifestFactory.LongManifest = ManifestFactory.Long + @inline def Float: ManifestFactory.FloatManifest = ManifestFactory.Float + @inline def Double: ManifestFactory.DoubleManifest = ManifestFactory.Double + @inline def Boolean: ManifestFactory.BooleanManifest = ManifestFactory.Boolean + @inline def Unit: ManifestFactory.UnitManifest = ManifestFactory.Unit + + @inline def Any: Manifest[scala.Any] = ManifestFactory.Any + @inline def Object: Manifest[java.lang.Object] = ManifestFactory.Object + @inline def AnyRef: Manifest[scala.AnyRef] = ManifestFactory.AnyRef + @inline def AnyVal: Manifest[scala.AnyVal] = ManifestFactory.AnyVal + @inline def Null: Manifest[scala.Null] = ManifestFactory.Null + @inline def Nothing: Manifest[scala.Nothing] = ManifestFactory.Nothing + + /** Manifest for the singleton type `value.type`. */ + def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = + ManifestFactory.singleType[T](value) + + /** Manifest for the class type `clazz[args]`, where `clazz` is + * a top-level or static class. + * @note This no-prefix, no-arguments case is separate because we + * it's called from ScalaRunTime.boxArray itself. If we + * pass varargs as arrays into this, we get an infinitely recursive call + * to boxArray. (Besides, having a separate case is more efficient) + */ + def classType[T](clazz: Predef.Class[_]): Manifest[T] = + ManifestFactory.classType[T](clazz) + + /** Manifest for the class type `clazz`, where `clazz` is + * a top-level or static class and args are its type arguments. */ + def classType[T](clazz: Predef.Class[T], arg1: Manifest[_], args: Manifest[_]*): Manifest[T] = + ManifestFactory.classType[T](clazz, arg1, args: _*) + + /** Manifest for the class type `clazz[args]`, where `clazz` is + * a class with non-package prefix type `prefix` and type arguments `args`. + */ + def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] = + ManifestFactory.classType[T](prefix, clazz, args: _*) + + def arrayType[T](arg: Manifest[_]): Manifest[Array[T]] = + ManifestFactory.arrayType[T](arg) + + /** Manifest for the abstract type `prefix # name`. `upperBound` is not + * strictly necessary as it could be obtained by reflection. It was + * added so that erasure can be calculated without reflection. */ + def abstractType[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Manifest[_]*): Manifest[T] = + ManifestFactory.abstractType[T](prefix, name, upperBound, args: _*) + + /** Manifest for the unknown type `_ >: L <: U` in an existential. */ + def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] = + ManifestFactory.wildcardType[T](lowerBound, upperBound) + + /** Manifest for the intersection type `parents_0 with ... with parents_n`. */ + def intersectionType[T](parents: Manifest[_]*): Manifest[T] = + ManifestFactory.intersectionType[T](parents: _*) + +} + +// TODO undeprecated until Scala reflection becomes non-experimental +// @deprecated("use type tags and manually check the corresponding class or type instead", "2.10.0") +@SerialVersionUID(1L) +abstract class AnyValManifest[T <: AnyVal](override val toString: String) extends Manifest[T] with Equals { + override def <:<(that: ClassManifest[_]): Boolean = + (that eq this) || (that eq Manifest.Any) || (that eq Manifest.AnyVal) + override def canEqual(other: Any) = other match { + case _: AnyValManifest[_] => true + case _ => false + } + override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] + @transient + override val hashCode = System.identityHashCode(this) +} + +/** `ManifestFactory` defines factory methods for manifests. + * It is intended for use by the compiler and should not be used in client code. + * + * Unlike `Manifest`, this factory isn't annotated with a deprecation warning. + * This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests. + * Why so complicated? Read up the comments for `ClassManifestFactory`. + */ +object ManifestFactory { + def valueManifests: List[AnyValManifest[_]] = + List(Byte, Short, Char, Int, Long, Float, Double, Boolean, Unit) + + @SerialVersionUID(1L) + private[reflect] class ByteManifest extends AnyValManifest[scala.Byte]("Byte") { + def runtimeClass = java.lang.Byte.TYPE + @inline override def newArray(len: Int): Array[Byte] = new Array[Byte](len) + override def newWrappedArray(len: Int): ArraySeq[Byte] = new ArraySeq.ofByte(new Array[Byte](len)) + override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte() + override def unapply(x: Any): Option[Byte] = { + x match { + case d: Byte => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Byte + } + private object ByteManifest extends ByteManifest + def Byte: ByteManifest = ByteManifest + + @SerialVersionUID(1L) + private[reflect] class ShortManifest extends AnyValManifest[scala.Short]("Short") { + def runtimeClass = java.lang.Short.TYPE + @inline override def newArray(len: Int): Array[Short] = new Array[Short](len) + override def newWrappedArray(len: Int): ArraySeq[Short] = new ArraySeq.ofShort(new Array[Short](len)) + override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort() + override def unapply(x: Any): Option[Short] = { + x match { + case d: Short => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Short + } + private object ShortManifest extends ShortManifest + def Short: ShortManifest = ShortManifest + + @SerialVersionUID(1L) + private[reflect] class CharManifest extends AnyValManifest[scala.Char]("Char") { + def runtimeClass = java.lang.Character.TYPE + @inline override def newArray(len: Int): Array[Char] = new Array[Char](len) + override def newWrappedArray(len: Int): ArraySeq[Char] = new ArraySeq.ofChar(new Array[Char](len)) + override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar() + override def unapply(x: Any): Option[Char] = { + x match { + case d: Char => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Char + } + private object CharManifest extends CharManifest + def Char: CharManifest = CharManifest + + @SerialVersionUID(1L) + private[reflect] class IntManifest extends AnyValManifest[scala.Int]("Int") { + def runtimeClass = java.lang.Integer.TYPE + @inline override def newArray(len: Int): Array[Int] = new Array[Int](len) + override def newWrappedArray(len: Int): ArraySeq[Int] = new ArraySeq.ofInt(new Array[Int](len)) + override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt() + override def unapply(x: Any): Option[Int] = { + x match { + case d: Int => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Int + } + private object IntManifest extends IntManifest + def Int: IntManifest = IntManifest + + @SerialVersionUID(1L) + private[reflect] class LongManifest extends AnyValManifest[scala.Long]("Long") { + def runtimeClass = java.lang.Long.TYPE + @inline override def newArray(len: Int): Array[Long] = new Array[Long](len) + override def newWrappedArray(len: Int): ArraySeq[Long] = new ArraySeq.ofLong(new Array[Long](len)) + override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong() + override def unapply(x: Any): Option[Long] = { + x match { + case d: Long => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Long + } + private object LongManifest extends LongManifest + def Long: LongManifest = LongManifest + + @SerialVersionUID(1L) + private[reflect] class FloatManifest extends AnyValManifest[scala.Float]("Float") { + def runtimeClass = java.lang.Float.TYPE + @inline override def newArray(len: Int): Array[Float] = new Array[Float](len) + override def newWrappedArray(len: Int): ArraySeq[Float] = new ArraySeq.ofFloat(new Array[Float](len)) + override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat() + override def unapply(x: Any): Option[Float] = { + x match { + case d: Float => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Float + } + private object FloatManifest extends FloatManifest + def Float: FloatManifest = FloatManifest + + @SerialVersionUID(1L) + private[reflect] class DoubleManifest extends AnyValManifest[scala.Double]("Double") { + def runtimeClass = java.lang.Double.TYPE + @inline override def newArray(len: Int): Array[Double] = new Array[Double](len) + override def newWrappedArray(len: Int): ArraySeq[Double] = new ArraySeq.ofDouble(new Array[Double](len)) + override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble() + + override def unapply(x: Any): Option[Double] = { + x match { + case d: Double => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Double + } + private object DoubleManifest extends DoubleManifest + def Double: DoubleManifest = DoubleManifest + + @SerialVersionUID(1L) + private[reflect] class BooleanManifest extends AnyValManifest[scala.Boolean]("Boolean") { + def runtimeClass = java.lang.Boolean.TYPE + @inline override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len) + override def newWrappedArray(len: Int): ArraySeq[Boolean] = new ArraySeq.ofBoolean(new Array[Boolean](len)) + override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean() + override def unapply(x: Any): Option[Boolean] = { + x match { + case d: Boolean => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Boolean + } + private object BooleanManifest extends BooleanManifest + def Boolean: BooleanManifest = BooleanManifest + + @SerialVersionUID(1L) + private[reflect] class UnitManifest extends AnyValManifest[scala.Unit]("Unit") { + def runtimeClass = java.lang.Void.TYPE + @inline override def newArray(len: Int): Array[Unit] = new Array[Unit](len) + override def newWrappedArray(len: Int): ArraySeq[Unit] = new ArraySeq.ofUnit(new Array[Unit](len)) + override def newArrayBuilder(): ArrayBuilder[Unit] = new ArrayBuilder.ofUnit() + override protected def arrayClass[T](tp: Class[_]): Class[Array[T]] = + if (tp eq runtimeClass) classOf[Array[scala.runtime.BoxedUnit]].asInstanceOf[Class[Array[T]]] + else super.arrayClass(tp) + override def unapply(x: Any): Option[Unit] = { + x match { + case d: Unit => Some(d) + case _ => None + } + } + private def readResolve(): Any = Manifest.Unit + } + private object UnitManifest extends UnitManifest + def Unit: UnitManifest = UnitManifest + + private object AnyManifest extends PhantomManifest[scala.Any](classOf[java.lang.Object], "Any") { + override def runtimeClass = classOf[java.lang.Object] + override def newArray(len: Int) = new Array[scala.Any](len) + override def <:<(that: ClassManifest[_]): Boolean = (that eq this) + private def readResolve(): Any = Manifest.Any + } + def Any: Manifest[scala.Any] = AnyManifest + + private object ObjectManifest extends PhantomManifest[java.lang.Object](classOf[java.lang.Object], "Object") { + override def runtimeClass = classOf[java.lang.Object] + override def newArray(len: Int) = new Array[java.lang.Object](len) + override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) + private def readResolve(): Any = Manifest.Object + } + def Object: Manifest[java.lang.Object] = ObjectManifest + + def AnyRef: Manifest[scala.AnyRef] = Object.asInstanceOf[Manifest[scala.AnyRef]] + + private object AnyValManifest extends PhantomManifest[scala.AnyVal](classOf[java.lang.Object], "AnyVal") { + override def runtimeClass = classOf[java.lang.Object] + override def newArray(len: Int) = new Array[scala.AnyVal](len) + override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) + private def readResolve(): Any = Manifest.AnyVal + } + def AnyVal: Manifest[scala.AnyVal] = AnyValManifest + + private object NullManifest extends PhantomManifest[scala.Null](classOf[scala.runtime.Null$], "Null") { + override def runtimeClass = classOf[scala.runtime.Null$] + override def newArray(len: Int) = new Array[scala.Null](len) + override def <:<(that: ClassManifest[_]): Boolean = + (that ne null) && (that ne Nothing) && !(that <:< AnyVal) + private def readResolve(): Any = Manifest.Null + } + def Null: Manifest[scala.Null] = NullManifest + + private object NothingManifest extends PhantomManifest[scala.Nothing](classOf[scala.runtime.Nothing$], "Nothing") { + override def runtimeClass = classOf[scala.runtime.Nothing$] + override def newArray(len: Int) = new Array[scala.Nothing](len) + override def <:<(that: ClassManifest[_]): Boolean = (that ne null) + private def readResolve(): Any = Manifest.Nothing + } + def Nothing: Manifest[scala.Nothing] = NothingManifest + + @SerialVersionUID(1L) + private class SingletonTypeManifest[T <: AnyRef](value: AnyRef) extends Manifest[T] { + lazy val runtimeClass = value.getClass + override lazy val toString = value.toString + ".type" + } + + /** Manifest for the singleton type `value.type`. */ + def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = + new SingletonTypeManifest[T](value) + + /** Manifest for the class type `clazz[args]`, where `clazz` is + * a top-level or static class. + * @note This no-prefix, no-arguments case is separate because we + * it's called from ScalaRunTime.boxArray itself. If we + * pass varargs as arrays into this, we get an infinitely recursive call + * to boxArray. (Besides, having a separate case is more efficient) + */ + def classType[T](clazz: Predef.Class[_]): Manifest[T] = + new ClassTypeManifest[T](None, clazz, Nil) + + /** Manifest for the class type `clazz`, where `clazz` is + * a top-level or static class and args are its type arguments. */ + def classType[T](clazz: Predef.Class[T], arg1: Manifest[_], args: Manifest[_]*): Manifest[T] = + new ClassTypeManifest[T](None, clazz, arg1 :: args.toList) + + /** Manifest for the class type `clazz[args]`, where `clazz` is + * a class with non-package prefix type `prefix` and type arguments `args`. + */ + def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] = + new ClassTypeManifest[T](Some(prefix), clazz, args.toList) + + @SerialVersionUID(1L) + private abstract class PhantomManifest[T](_runtimeClass: Predef.Class[_], + override val toString: String) extends ClassTypeManifest[T](None, _runtimeClass, Nil) { + override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] + @transient + override val hashCode = System.identityHashCode(this) + } + + /** Manifest for the class type `clazz[args]`, where `clazz` is + * a top-level or static class. */ + @SerialVersionUID(1L) + private class ClassTypeManifest[T](prefix: Option[Manifest[_]], + runtimeClass1: Predef.Class[_], + override val typeArguments: List[Manifest[_]]) extends Manifest[T] { + def runtimeClass: Predef.Class[_] = runtimeClass1 + override def toString = + (if (prefix.isEmpty) "" else prefix.get.toString+"#") + + (if (runtimeClass.isArray) "Array" else runtimeClass.getName) + + argString + } + + def arrayType[T](arg: Manifest[_]): Manifest[Array[T]] = + arg.asInstanceOf[Manifest[T]].arrayManifest + + @SerialVersionUID(1L) + private class AbstractTypeManifest[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: scala.collection.Seq[Manifest[_]]) extends Manifest[T] { + def runtimeClass = upperBound + override val typeArguments = args.toList + override def toString = prefix.toString+"#"+name+argString + } + + /** Manifest for the abstract type `prefix # name`. `upperBound` is not + * strictly necessary as it could be obtained by reflection. It was + * added so that erasure can be calculated without reflection. */ + def abstractType[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Manifest[_]*): Manifest[T] = + new AbstractTypeManifest[T](prefix, name, upperBound, args) + + @SerialVersionUID(1L) + private class WildcardManifest[T](lowerBound: Manifest[_], upperBound: Manifest[_]) extends Manifest[T] { + def runtimeClass = upperBound.runtimeClass + override def toString = + "_" + + (if (lowerBound eq Nothing) "" else " >: "+lowerBound) + + (if (upperBound eq Nothing) "" else " <: "+upperBound) + } + + /** Manifest for the unknown type `_ >: L <: U` in an existential. + */ + def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] = + new WildcardManifest[T](lowerBound, upperBound) + + @SerialVersionUID(1L) + private class IntersectionTypeManifest[T](parents: Array[Manifest[_]]) extends Manifest[T] { + // We use an `Array` instead of a `Seq` for `parents` to avoid cyclic dependencies during deserialization + // which can cause serialization proxies to leak and cause a ClassCastException. + def runtimeClass = parents(0).runtimeClass + override def toString = parents.mkString(" with ") + } + + /** Manifest for the intersection type `parents_0 with ... with parents_n`. */ + def intersectionType[T](parents: Manifest[_]*): Manifest[T] = + new IntersectionTypeManifest[T](parents.toArray) +} From 5d5afa1b25f93bb2c6e8474784c7b3513ae404fc Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Thu, 12 Nov 2020 11:51:46 +0100 Subject: [PATCH 69/75] Revert "Remove laziness of System.systemProperties, it was fixed by scalalib 2.13 overrides" This reverts commit 4ad48f5d --- javalib/src/main/scala/java/lang/System.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/javalib/src/main/scala/java/lang/System.scala b/javalib/src/main/scala/java/lang/System.scala index b6f2fb0f69..207edb3020 100644 --- a/javalib/src/main/scala/java/lang/System.scala +++ b/javalib/src/main/scala/java/lang/System.scala @@ -91,7 +91,11 @@ object System { var err: PrintStream = new PrintStream(new FileOutputStream(FileDescriptor.err)) - private val systemProperties = loadProperties() + /* Laziness for this val was enforced due to changes in Scala 2.13 Vector implementation + * Vector uses system property to define some of its default parameters and to allow user to tune it. + * This problem exists because current implementation of java.lang.System depends on Scala collections, + * this problem should be addressed in the future */ + private lazy val systemProperties = loadProperties() Platform.setOSProps(new CFuncPtr2[CString, CString, Unit] { def apply(key: CString, value: CString): Unit = systemProperties.setProperty(fromCString(key), fromCString(value)) From 373b22d7db1c0cbf05e05dd395b24a5ab0729f68 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Thu, 12 Nov 2020 14:30:35 +0100 Subject: [PATCH 70/75] Removed no longer need libDependency in project/build.sbt --- project/build.sbt | 2 -- 1 file changed, 2 deletions(-) diff --git a/project/build.sbt b/project/build.sbt index eb617ae792..63edd48246 100644 --- a/project/build.sbt +++ b/project/build.sbt @@ -11,8 +11,6 @@ Compile / unmanagedSourceDirectories ++= { ).map(dir => root / s"$dir/src/main/scala") } -libraryDependencies += "org.scala-lang.modules" %% "scala-collection-compat" % "2.2.0" - addSbtPlugin("org.portable-scala" % "sbt-platform-deps" % "1.0.0") addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.4") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.0") From a8306f4e4cbd8d5b54404f0546335d64a3cf1ce6 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Thu, 12 Nov 2020 14:33:17 +0100 Subject: [PATCH 71/75] Styling fixes in javalib --- javalib/src/main/scala/java/io/BufferedOutputStream.scala | 2 +- javalib/src/main/scala/java/math/BigDecimal.scala | 3 +-- javalib/src/main/scala/java/net/PlainSocketImpl.scala | 4 ++-- .../scala/java/nio/file/attribute/PosixFilePermissions.scala | 4 ++-- javalib/src/main/scala/java/util/AbstractCollection.scala | 2 +- javalib/src/main/scala/java/util/AbstractMap.scala | 4 ++-- javalib/src/main/scala/java/util/Arrays.scala | 1 - javalib/src/main/scala/java/util/Hashtable.scala | 5 ++--- .../src/main/scala/java/util/stream/WrappedScalaStream.scala | 2 +- 9 files changed, 12 insertions(+), 15 deletions(-) diff --git a/javalib/src/main/scala/java/io/BufferedOutputStream.scala b/javalib/src/main/scala/java/io/BufferedOutputStream.scala index ad466db49b..9cdfdf1936 100644 --- a/javalib/src/main/scala/java/io/BufferedOutputStream.scala +++ b/javalib/src/main/scala/java/io/BufferedOutputStream.scala @@ -73,7 +73,7 @@ class BufferedOutputStream(out: OutputStream, size: Int) count += 1 } - private def flushInternal() = { + private def flushInternal(): Unit = { if (count > 0) { out.write(buf, 0, count) count = 0 diff --git a/javalib/src/main/scala/java/math/BigDecimal.scala b/javalib/src/main/scala/java/math/BigDecimal.scala index e77a23c3da..1c33f0de03 100644 --- a/javalib/src/main/scala/java/math/BigDecimal.scala +++ b/javalib/src/main/scala/java/math/BigDecimal.scala @@ -202,9 +202,8 @@ object BigDecimal { else 0 } - private[math] def newArrayOfPows(len: Int, pow: Int): Array[Long] = { + private[math] def newArrayOfPows(len: Int, pow: Int): Array[Long] = new Array[Long](len - 1).scanLeft(1L)((z, e) => z * pow) - } /** Return an increment that can be -1,0 or 1, depending on {@code roundingMode}. * diff --git a/javalib/src/main/scala/java/net/PlainSocketImpl.scala b/javalib/src/main/scala/java/net/PlainSocketImpl.scala index 289979c7a9..381b8badcc 100644 --- a/javalib/src/main/scala/java/net/PlainSocketImpl.scala +++ b/javalib/src/main/scala/java/net/PlainSocketImpl.scala @@ -102,8 +102,8 @@ private[net] class PlainSocketImpl extends SocketImpl { this.localport = fetchLocalPort(family).getOrElse { throw new BindException( - "Couldn't bind to address: " + addr - .getHostAddress() + " on port: " + port) + "Couldn't bind to address: " + addr.getHostAddress() + + " on port: " + port) } } diff --git a/javalib/src/main/scala/java/nio/file/attribute/PosixFilePermissions.scala b/javalib/src/main/scala/java/nio/file/attribute/PosixFilePermissions.scala index 0a9eaa9778..d37bab1f5b 100644 --- a/javalib/src/main/scala/java/nio/file/attribute/PosixFilePermissions.scala +++ b/javalib/src/main/scala/java/nio/file/attribute/PosixFilePermissions.scala @@ -6,8 +6,8 @@ object PosixFilePermissions { def asFileAttribute(perms: Set[PosixFilePermission]) : FileAttribute[Set[PosixFilePermission]] = new FileAttribute[Set[PosixFilePermission]] { - override def name(): String = "posix:permissions" - override def value: Set[PosixFilePermission] = perms + override def name(): String = "posix:permissions" + override def value(): Set[PosixFilePermission] = perms } def fromString(perms: String): Set[PosixFilePermission] = diff --git a/javalib/src/main/scala/java/util/AbstractCollection.scala b/javalib/src/main/scala/java/util/AbstractCollection.scala index f0978bb5fd..abbb852cde 100644 --- a/javalib/src/main/scala/java/util/AbstractCollection.scala +++ b/javalib/src/main/scala/java/util/AbstractCollection.scala @@ -25,7 +25,7 @@ abstract class AbstractCollection[E] protected () extends Collection[E] { .asInstanceOf[Array[T]] val iter = iterator() - for (i <- Range(0, size())) toFill(i) = iter.next().asInstanceOf[T] + for (i <- 0 until size()) toFill(i) = iter.next().asInstanceOf[T] if (toFill.length > size()) toFill(size()) = null.asInstanceOf[T] toFill diff --git a/javalib/src/main/scala/java/util/AbstractMap.scala b/javalib/src/main/scala/java/util/AbstractMap.scala index 6662f24e3b..1b42e4a958 100644 --- a/javalib/src/main/scala/java/util/AbstractMap.scala +++ b/javalib/src/main/scala/java/util/AbstractMap.scala @@ -49,7 +49,7 @@ object AbstractMap { entryHashCode(this) override def toString(): String = - getKey().toString + "=" + getValue() + "" + getKey() + "=" + getValue() } class SimpleImmutableEntry[K, V](key: K, value: V) @@ -73,7 +73,7 @@ object AbstractMap { entryHashCode(this) override def toString(): String = - getKey().toString + "=" + getValue() + "" + getKey() + "=" + getValue() } } diff --git a/javalib/src/main/scala/java/util/Arrays.scala b/javalib/src/main/scala/java/util/Arrays.scala index 786d192a35..9994ae4a5d 100644 --- a/javalib/src/main/scala/java/util/Arrays.scala +++ b/javalib/src/main/scala/java/util/Arrays.scala @@ -972,5 +972,4 @@ object Arrays { } } } - } diff --git a/javalib/src/main/scala/java/util/Hashtable.scala b/javalib/src/main/scala/java/util/Hashtable.scala index d6a60009f9..a10dbf0041 100644 --- a/javalib/src/main/scala/java/util/Hashtable.scala +++ b/javalib/src/main/scala/java/util/Hashtable.scala @@ -29,8 +29,7 @@ class Hashtable[K, V] private (inner: mutable.HashMap[Box[Any], V]) def isEmpty(): Boolean = inner.isEmpty - def keys(): ju.Enumeration[K] = - Collections.enumeration(keySet()) + def keys(): ju.Enumeration[K] = Collections.enumeration(keySet()) def elements(): ju.Enumeration[V] = Collections.enumeration(values()) @@ -83,7 +82,7 @@ class Hashtable[K, V] private (inner: mutable.HashMap[Box[Any], V]) override def toString(): String = inner.iterator - .map(kv => kv._1.inner.toString + "=" + kv._2) + .map(kv => "" + kv._1.inner + "=" + kv._2) .mkString("{", ", ", "}") def keySet(): ju.Set[K] = { diff --git a/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala b/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala index 00d90c5681..25b79e76a2 100644 --- a/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala +++ b/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala @@ -52,7 +52,7 @@ private final class CompositeStream[T](substreams: Seq[Stream[T]], closeHandler.foreach(_.run()) } override def isParallel(): Boolean = false - override def iterator: Iterator[T] = + override def iterator(): Iterator[T] = new Iterator[T] { private val its = substreams.iterator private var currentIt: Iterator[_ <: T] = EmptyIterator From cd40d25028d62073a73b69c22c36d25a83dfc2bb Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Thu, 12 Nov 2020 14:39:44 +0100 Subject: [PATCH 72/75] Fix checking for scala version in HashCodeTest --- unit-tests/src/test/scala/scala/HashCodeTest.scala | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/unit-tests/src/test/scala/scala/HashCodeTest.scala b/unit-tests/src/test/scala/scala/HashCodeTest.scala index 6aaa1b76f5..ea098c7614 100644 --- a/unit-tests/src/test/scala/scala/HashCodeTest.scala +++ b/unit-tests/src/test/scala/scala/HashCodeTest.scala @@ -2,19 +2,15 @@ package scala import org.junit.Test import org.junit.Assert._ -import scala.scalanative.buildinfo.ScalaNativeBuildInfo +import scala.scalanative.buildinfo.ScalaNativeBuildInfo.scalaVersion class HashCodeTest { case class MyData(string: String, num: Int) - def scala212orOlder: Boolean = - ScalaNativeBuildInfo.scalaVersion - .split('.') - .take(3) - .map(_.toInt) match { - case Array(2, n, _) if n <= 12 => true - case _ => false - } + def scala212orOlder: Boolean = { + scalaVersion.startsWith("2.11.") || + scalaVersion.startsWith("2.12.") + } @Test def hashCodeOfStringMatchesScalaJVM(): Unit = { assertTrue("hello".hashCode == 99162322) From d0c5cf29d6f212f1556ea0dcb135a0c8aaaca31b Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Thu, 12 Nov 2020 14:58:37 +0100 Subject: [PATCH 73/75] Add missing param braces in ju.BaseStream and FileAttribute --- .../src/main/scala/java/nio/file/DirectoryStreamImpl.scala | 2 +- javalib/src/main/scala/java/nio/file/Files.scala | 6 +++--- .../main/scala/java/nio/file/attribute/FileAttribute.scala | 2 +- javalib/src/main/scala/java/util/stream/BaseStream.scala | 2 +- .../main/scala/java/util/stream/WrappedScalaStream.scala | 4 ++-- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/javalib/src/main/scala/java/nio/file/DirectoryStreamImpl.scala b/javalib/src/main/scala/java/nio/file/DirectoryStreamImpl.scala index 17fb4f5eaf..54cb942cda 100644 --- a/javalib/src/main/scala/java/nio/file/DirectoryStreamImpl.scala +++ b/javalib/src/main/scala/java/nio/file/DirectoryStreamImpl.scala @@ -14,7 +14,7 @@ class DirectoryStreamImpl[T](stream: Stream[T], val predicate = new Predicate[T] { override def test(t: T): Boolean = filter.accept(t) } - stream.filter(predicate).iterator + stream.filter(predicate).iterator() } override def iterator(): Iterator[T] = diff --git a/javalib/src/main/scala/java/nio/file/Files.scala b/javalib/src/main/scala/java/nio/file/Files.scala index 72291e8592..75376df386 100644 --- a/javalib/src/main/scala/java/nio/file/Files.scala +++ b/javalib/src/main/scala/java/nio/file/Files.scala @@ -133,7 +133,7 @@ object Files { if (exists(dir, Array.empty)) { if (!isDirectory(dir, Array.empty)) { throw new FileAlreadyExistsException(dir.toString) - } else if (list(dir).iterator.hasNext()) { + } else if (list(dir).iterator().hasNext()) { throw new DirectoryNotEmptyException(dir.toString) } dir @@ -466,7 +466,7 @@ object Files { def readAllLines(path: Path, cs: Charset): List[String] = { val list = new LinkedList[String]() val reader = newBufferedReader(path, cs) - val lines = reader.lines().iterator + val lines = reader.lines().iterator() while (lines.hasNext()) { list.add(lines.next()) } @@ -734,7 +734,7 @@ object Files { write(path, lines, StandardCharsets.UTF_8, options) private def setAttributes(path: Path, attrs: Array[FileAttribute[_]]): Unit = - attrs.map(a => (a.name(), a.value)).toMap.foreach { + attrs.map(a => (a.name(), a.value())).toMap.foreach { case (name, value) => setAttribute(path, name, value.asInstanceOf[AnyRef], Array.empty) } diff --git a/javalib/src/main/scala/java/nio/file/attribute/FileAttribute.scala b/javalib/src/main/scala/java/nio/file/attribute/FileAttribute.scala index 86e2555367..95f108bac0 100644 --- a/javalib/src/main/scala/java/nio/file/attribute/FileAttribute.scala +++ b/javalib/src/main/scala/java/nio/file/attribute/FileAttribute.scala @@ -2,5 +2,5 @@ package java.nio.file.attribute trait FileAttribute[T] { def name(): String - def value: T + def value(): T } diff --git a/javalib/src/main/scala/java/util/stream/BaseStream.scala b/javalib/src/main/scala/java/util/stream/BaseStream.scala index 814663a03d..6ac783af38 100644 --- a/javalib/src/main/scala/java/util/stream/BaseStream.scala +++ b/javalib/src/main/scala/java/util/stream/BaseStream.scala @@ -7,7 +7,7 @@ trait BaseStream[+T, +S <: BaseStream[T, S]] extends AutoCloseable { def close(): Unit def isParallel(): Boolean - def iterator: Iterator[_ <: T] + def iterator(): Iterator[_ <: T] def onClose(closeHandler: Runnable): S def parallel(): S def sequential(): S diff --git a/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala b/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala index 25b79e76a2..98671dffa5 100644 --- a/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala +++ b/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala @@ -9,7 +9,7 @@ class WrappedScalaStream[T](private val underlying: SStream[T], extends Stream[T] { override def close(): Unit = closeHandler.foreach(_.run()) override def isParallel(): Boolean = false - override def iterator: Iterator[T] = + override def iterator(): Iterator[T] = WrappedScalaStream.scala2javaIterator(underlying.iterator) override def parallel(): Stream[T] = this override def sequential(): Stream[T] = this @@ -60,7 +60,7 @@ private final class CompositeStream[T](substreams: Seq[Stream[T]], override def hasNext(): Boolean = if (currentIt.hasNext()) true else if (its.hasNext) { - currentIt = its.next().iterator + currentIt = its.next().iterator() hasNext() } else { false From bbcc16057c75fd401e7a3d93fd43d16da5f3f8cd Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Thu, 12 Nov 2020 15:22:00 +0100 Subject: [PATCH 74/75] Remove laziness of ju.System.systemProperties. Set 2.13 scala package Vector override lazy --- javalib/src/main/scala/java/lang/System.scala | 6 +----- scalalib/overrides-2.13/scala/package.scala | 3 +-- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/javalib/src/main/scala/java/lang/System.scala b/javalib/src/main/scala/java/lang/System.scala index 207edb3020..b6f2fb0f69 100644 --- a/javalib/src/main/scala/java/lang/System.scala +++ b/javalib/src/main/scala/java/lang/System.scala @@ -91,11 +91,7 @@ object System { var err: PrintStream = new PrintStream(new FileOutputStream(FileDescriptor.err)) - /* Laziness for this val was enforced due to changes in Scala 2.13 Vector implementation - * Vector uses system property to define some of its default parameters and to allow user to tune it. - * This problem exists because current implementation of java.lang.System depends on Scala collections, - * this problem should be addressed in the future */ - private lazy val systemProperties = loadProperties() + private val systemProperties = loadProperties() Platform.setOSProps(new CFuncPtr2[CString, CString, Unit] { def apply(key: CString, value: CString): Unit = systemProperties.setProperty(fromCString(key), fromCString(value)) diff --git a/scalalib/overrides-2.13/scala/package.scala b/scalalib/overrides-2.13/scala/package.scala index 1f3c633d22..f86718e02b 100644 --- a/scalalib/overrides-2.13/scala/package.scala +++ b/scalalib/overrides-2.13/scala/package.scala @@ -97,9 +97,8 @@ package object scala { if (s.nonEmpty) Some((s.head, s.tail)) else None } - // It cannot be lazy, because of Vector <-> System.properties cyclic dependency in SN implementation type Vector[+A] = scala.collection.immutable.Vector[A] - val Vector = scala.collection.immutable.Vector + lazy val Vector = scala.collection.immutable.Vector type StringBuilder = scala.collection.mutable.StringBuilder lazy val StringBuilder = scala.collection.mutable.StringBuilder From 70eb2bae088688b654060dde17a3beb7cb5085a8 Mon Sep 17 00:00:00 2001 From: wojciechmazur Date: Thu, 12 Nov 2020 15:31:15 +0100 Subject: [PATCH 75/75] Explicitly use `nsc.Global with Singleton` instead of using type alias --- .../main/scala/scala/scalanative/nscplugin/NirCompat.scala | 2 +- .../main/scala/scala/scalanative/nscplugin/NirGenExpr.scala | 3 ++- .../main/scala/scala/scalanative/nscplugin/NirGenFile.scala | 3 ++- .../main/scala/scala/scalanative/nscplugin/NirGenName.scala | 3 ++- .../main/scala/scala/scalanative/nscplugin/NirGenPhase.scala | 4 ++-- .../main/scala/scala/scalanative/nscplugin/NirGenStat.scala | 3 ++- .../main/scala/scala/scalanative/nscplugin/NirGenType.scala | 3 ++- .../main/scala/scala/scalanative/nscplugin/NirGenUtil.scala | 4 ++-- .../main/scala/scala/scalanative/nscplugin/NirPlugin.scala | 3 ++- .../scala/scalanative/nscplugin/PrepNativeInterop.scala | 2 +- .../src/main/scala/scala/scalanative/nscplugin/package.scala | 5 ----- 11 files changed, 18 insertions(+), 17 deletions(-) delete mode 100644 nscplugin/src/main/scala/scala/scalanative/nscplugin/package.scala diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirCompat.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirCompat.scala index d7f70dcbab..cd9dd7ca1e 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirCompat.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirCompat.scala @@ -4,7 +4,7 @@ package nscplugin import scala.reflect.internal.Flags import scala.tools.nsc._ -trait NirCompat[G <: NscGlobal] { self: NirGenPhase[G] => +trait NirCompat[G <: Global with Singleton] { self: NirGenPhase[G] => import NirCompat.{infiniteLoop, noImplClasses} import global._ diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala index f10f8465f3..5c9d11b6ed 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenExpr.scala @@ -2,12 +2,13 @@ package scala.scalanative package nscplugin import scala.collection.mutable +import scala.tools.nsc import scalanative.nir._ import scalanative.util.{StringUtils, unsupported} import scalanative.util.ScopedVar.scoped import scalanative.nscplugin.NirPrimitives._ -trait NirGenExpr[G <: NscGlobal] { self: NirGenPhase[G] => +trait NirGenExpr[G <: nsc.Global with Singleton] { self: NirGenPhase[G] => import global._ import definitions._ import treeInfo.hasSynthCaseSymbol diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenFile.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenFile.scala index 374de920b6..656bf09bf2 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenFile.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenFile.scala @@ -4,9 +4,10 @@ package nscplugin import java.io.FileOutputStream import java.nio.file.{Path, Paths} import scala.scalanative.nir.serialization.serializeBinary +import scala.tools.nsc.Global import scala.tools.nsc.io.AbstractFile -trait NirGenFile[G <: NscGlobal] { self: NirGenPhase[G] => +trait NirGenFile[G <: Global with Singleton] { self: NirGenPhase[G] => import global._ def genPathFor(cunit: CompilationUnit, ownerName: nir.Global): Path = { diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenName.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenName.scala index 323e2927b0..ba900bc4b0 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenName.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenName.scala @@ -1,9 +1,10 @@ package scala.scalanative package nscplugin +import scala.tools.nsc.Global import scalanative.util.unreachable -trait NirGenName[G <: NscGlobal] { self: NirGenPhase[G] => +trait NirGenName[G <: Global with Singleton] { self: NirGenPhase[G] => import global.{Name => _, _}, definitions._ import nirAddons.nirDefinitions._ import SimpleType.{fromSymbol, fromType} diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala index 772d6f1b00..2d8c59149a 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenPhase.scala @@ -9,9 +9,9 @@ import scala.language.implicitConversions import scala.scalanative.nir._ import scala.scalanative.util.ScopedVar.scoped import scala.tools.nsc.plugins._ -import scala.tools.nsc.{util => _, _} +import scala.tools.nsc.{Global, util => _, _} -abstract class NirGenPhase[G <: NscGlobal](val global: G) +abstract class NirGenPhase[G <: Global with Singleton](val global: G) extends PluginComponent with NirGenStat[G] with NirGenExpr[G] diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenStat.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenStat.scala index 0a25f28c2d..f006a39939 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenStat.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenStat.scala @@ -6,9 +6,10 @@ import scala.reflect.internal.Flags._ import scala.scalanative.nir._ import scala.scalanative.util.unsupported import scala.scalanative.util.ScopedVar.scoped +import scala.tools.nsc import scalanative.nir.ControlFlow.removeDeadBlocks -trait NirGenStat[G <: NscGlobal] { self: NirGenPhase[G] => +trait NirGenStat[G <: nsc.Global with Singleton] { self: NirGenPhase[G] => import global._ import definitions._ diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenType.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenType.scala index 9843146fec..a2110d19cd 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenType.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenType.scala @@ -1,7 +1,8 @@ package scala.scalanative package nscplugin +import scala.tools.nsc.Global -trait NirGenType[G <: NscGlobal] { self: NirGenPhase[G] => +trait NirGenType[G <: Global with Singleton] { self: NirGenPhase[G] => import SimpleType.{fromSymbol, fromType} import global._ import definitions._ diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenUtil.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenUtil.scala index b6588d30d1..d458ea856d 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenUtil.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirGenUtil.scala @@ -1,9 +1,9 @@ package scala.scalanative package nscplugin -import scalanative.util.unsupported +import scala.tools.nsc.Global -trait NirGenUtil[G <: NscGlobal] { self: NirGenPhase[G] => +trait NirGenUtil[G <: Global with Singleton] { self: NirGenPhase[G] => import global._ import definitions._ import nirAddons._ diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirPlugin.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirPlugin.scala index 469340df09..15cc821f05 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirPlugin.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/NirPlugin.scala @@ -12,7 +12,8 @@ class NirPlugin(val global: Global) extends Plugin { /** A trick to avoid early initializers while still enforcing that `global` * is initialized early. */ - abstract class NirGlobalAddonsEarlyInit[G <: NscGlobal](val global: G) + abstract class NirGlobalAddonsEarlyInit[G <: Global with Singleton]( + val global: G) extends NirGlobalAddons object nirAddons extends NirGlobalAddonsEarlyInit[global.type](global) diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/PrepNativeInterop.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/PrepNativeInterop.scala index b3ae4d94a6..99d5e1464d 100644 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/PrepNativeInterop.scala +++ b/nscplugin/src/main/scala/scala/scalanative/nscplugin/PrepNativeInterop.scala @@ -11,7 +11,7 @@ import scala.tools.nsc._ * - Rewrite the body `scala.util.PropertiesTrait.scalaProps` to * be statically determined at compile-time. */ -abstract class PrepNativeInterop[G <: NscGlobal](val global: G) +abstract class PrepNativeInterop[G <: Global with Singleton](val global: G) extends plugins.PluginComponent with transform.Transform { import PrepNativeInterop._ diff --git a/nscplugin/src/main/scala/scala/scalanative/nscplugin/package.scala b/nscplugin/src/main/scala/scala/scalanative/nscplugin/package.scala deleted file mode 100644 index bcd839c0d8..0000000000 --- a/nscplugin/src/main/scala/scala/scalanative/nscplugin/package.scala +++ /dev/null @@ -1,5 +0,0 @@ -package scala.scalanative - -package object nscplugin { - type NscGlobal = scala.tools.nsc.Global with Singleton -}