From 41703dfef181caa7877aec77e90249264fd37e02 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 24 Feb 2013 23:49:22 +0100 Subject: [PATCH] More explicit empty paren lists in method calls. --- .../backend/opt/InlineExceptionHandlers.scala | 2 +- .../tools/nsc/backend/opt/Inliners.scala | 2 +- .../nsc/symtab/classfile/ICodeReader.scala | 2 +- src/library/scala/Array.scala | 10 +- src/library/scala/Enumeration.scala | 4 +- src/library/scala/beans/ScalaBeanInfo.scala | 2 +- src/library/scala/collection/BitSetLike.scala | 2 +- src/library/scala/collection/DefaultMap.scala | 4 +- .../scala/collection/IndexedSeqLike.scala | 4 +- .../collection/IndexedSeqOptimized.scala | 10 +- .../scala/collection/IterableLike.scala | 48 ++--- src/library/scala/collection/Iterator.scala | 18 +- .../scala/collection/LinearSeqLike.scala | 4 +- .../scala/collection/LinearSeqOptimized.scala | 12 +- src/library/scala/collection/MapLike.scala | 4 +- .../scala/collection/Parallelizable.scala | 2 +- src/library/scala/collection/SeqLike.scala | 38 ++-- src/library/scala/collection/SetLike.scala | 8 +- src/library/scala/collection/SortedMap.scala | 4 +- .../scala/collection/TraversableOnce.scala | 6 +- .../scala/collection/concurrent/TrieMap.scala | 2 +- .../scala/collection/convert/Wrappers.scala | 8 +- .../collection/generic/GenMapFactory.scala | 2 +- .../generic/GenTraversableFactory.scala | 10 +- .../generic/GenericClassTagCompanion.scala | 4 +- .../collection/generic/GenericCompanion.scala | 4 +- .../generic/GenericOrderedCompanion.scala | 4 +- .../generic/GenericTraversableTemplate.scala | 10 +- .../scala/collection/generic/Signalling.scala | 2 +- .../scala/collection/generic/Sorted.scala | 10 +- .../collection/generic/SortedMapFactory.scala | 2 +- .../collection/generic/SortedSetFactory.scala | 2 +- .../scala/collection/immutable/BitSet.scala | 2 +- .../collection/immutable/DefaultMap.scala | 4 +- .../scala/collection/immutable/ListSet.scala | 4 +- .../scala/collection/immutable/MapLike.scala | 2 +- .../scala/collection/immutable/PagedSeq.scala | 4 +- .../collection/immutable/RedBlackTree.scala | 2 +- .../collection/immutable/SortedMap.scala | 4 +- .../scala/collection/immutable/Stream.scala | 2 +- .../collection/immutable/StringLike.scala | 4 +- .../scala/collection/immutable/TreeMap.scala | 2 +- .../scala/collection/immutable/TreeSet.scala | 2 +- .../collection/immutable/TrieIterator.scala | 4 +- .../scala/collection/immutable/Vector.scala | 22 +-- .../scala/collection/mutable/AVLTree.scala | 4 +- .../scala/collection/mutable/ArrayOps.scala | 4 +- .../scala/collection/mutable/ArraySeq.scala | 2 +- .../scala/collection/mutable/ArrayStack.scala | 4 +- .../scala/collection/mutable/BufferLike.scala | 4 +- .../collection/mutable/BufferProxy.scala | 2 +- .../scala/collection/mutable/Builder.scala | 2 +- .../collection/mutable/DoubleLinkedList.scala | 2 +- .../collection/mutable/FlatHashTable.scala | 4 +- .../scala/collection/mutable/HashMap.scala | 8 +- .../scala/collection/mutable/HashSet.scala | 4 +- .../scala/collection/mutable/HashTable.scala | 2 +- .../scala/collection/mutable/History.scala | 6 +- .../collection/mutable/LinkedHashMap.scala | 6 +- .../collection/mutable/LinkedHashSet.scala | 2 +- .../collection/mutable/LinkedListLike.scala | 2 +- .../collection/mutable/MutableList.scala | 2 +- .../collection/mutable/ObservableBuffer.scala | 2 +- .../collection/mutable/ObservableMap.scala | 2 +- .../collection/mutable/ObservableSet.scala | 2 +- .../collection/mutable/OpenHashMap.scala | 2 +- .../collection/mutable/PriorityQueue.scala | 4 +- .../mutable/PriorityQueueProxy.scala | 4 +- .../scala/collection/mutable/Publisher.scala | 2 +- .../scala/collection/mutable/Queue.scala | 2 +- .../scala/collection/mutable/QueueProxy.scala | 4 +- .../mutable/RevertibleHistory.scala | 4 +- .../scala/collection/mutable/SetLike.scala | 2 +- .../scala/collection/mutable/StackProxy.scala | 4 +- .../mutable/SynchronizedBuffer.scala | 2 +- .../mutable/SynchronizedPriorityQueue.scala | 4 +- .../mutable/SynchronizedQueue.scala | 4 +- .../collection/mutable/SynchronizedSet.scala | 2 +- .../mutable/SynchronizedStack.scala | 4 +- .../collection/mutable/UnrolledBuffer.scala | 4 +- .../scala/collection/parallel/Combiner.scala | 2 +- .../collection/parallel/ParIterableLike.scala | 28 +-- .../parallel/ParIterableViewLike.scala | 2 +- .../collection/parallel/ParMapLike.scala | 4 +- .../collection/parallel/ParSeqLike.scala | 8 +- .../collection/parallel/ParSeqViewLike.scala | 2 +- .../collection/parallel/RemainsIterator.scala | 84 ++++---- .../scala/collection/parallel/Splitter.scala | 2 +- .../scala/collection/parallel/Tasks.scala | 16 +- .../parallel/immutable/ParHashMap.scala | 2 +- .../parallel/immutable/ParHashSet.scala | 2 +- .../parallel/immutable/ParRange.scala | 2 +- .../parallel/mutable/ParArray.scala | 6 +- .../parallel/mutable/ParFlatHashTable.scala | 2 +- .../mutable/UnrolledParArrayCombiner.scala | 2 +- src/library/scala/concurrent/Future.scala | 6 +- .../scala/concurrent/duration/Duration.scala | 4 +- .../scala/concurrent/impl/Promise.scala | 6 +- src/library/scala/io/BufferedSource.scala | 2 +- src/library/scala/io/Source.scala | 12 +- src/library/scala/math/BigDecimal.scala | 2 +- src/library/scala/math/BigInt.scala | 2 +- src/library/scala/math/Ordering.scala | 4 +- .../scala/math/ScalaNumericConversions.scala | 14 +- .../scala/runtime/ScalaNumberProxy.scala | 4 +- src/library/scala/runtime/Tuple2Zipped.scala | 26 +-- src/library/scala/runtime/Tuple3Zipped.scala | 26 +-- src/library/scala/sys/process/BasicIO.scala | 2 +- .../scala/sys/process/ProcessImpl.scala | 10 +- src/library/scala/util/Random.scala | 2 +- src/library/scala/util/matching/Regex.scala | 6 +- src/library/scala/xml/PrettyPrinter.scala | 2 +- src/library/scala/xml/Utility.scala | 16 +- .../scala/xml/dtd/ContentModelParser.scala | 40 ++-- src/library/scala/xml/dtd/DocType.scala | 2 +- src/library/scala/xml/dtd/Scanner.scala | 28 +-- .../xml/dtd/impl/SubsetConstruction.scala | 2 +- src/library/scala/xml/factory/XMLLoader.scala | 2 +- .../scala/xml/parsing/FactoryAdapter.scala | 10 +- .../scala/xml/parsing/MarkupParser.scala | 184 +++++++++--------- .../xml/parsing/MarkupParserCommon.scala | 30 +-- .../scala/xml/parsing/XhtmlParser.scala | 2 +- .../xml/persistent/CachedFileStorage.scala | 10 +- .../scala/xml/pull/XMLEventReader.scala | 6 +- .../scala/reflect/internal/Printers.scala | 12 +- .../scala/reflect/internal/TreeInfo.scala | 8 +- .../scala/reflect/internal/Types.scala | 2 +- .../reflect/internal/util/Collections.scala | 2 +- src/reflect/scala/reflect/io/PlainFile.scala | 2 +- src/reflect/scala/reflect/io/Streamable.scala | 6 +- .../scala/reflect/io/VirtualDirectory.scala | 6 +- .../scala/reflect/io/VirtualFile.scala | 6 +- src/reflect/scala/reflect/io/ZipArchive.scala | 14 +- 133 files changed, 552 insertions(+), 550 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala index 7f76839ae5ae..dcf059095172 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala @@ -262,7 +262,7 @@ abstract class InlineExceptionHandlers extends SubComponent { if (analyzedMethod eq NoIMethod) { analyzedMethod = bblock.method tfa.init(bblock.method) - tfa.run + tfa.run() log(" performed tfa on method: " + bblock.method) for (block <- bblock.method.blocks.sortBy(_.label)) diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index 010f5b83195d..d183b3a29119 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -232,7 +232,7 @@ abstract class Inliners extends SubComponent { val hasRETURN = containsRETURN(incm.code.blocksList) || (incm.exh exists { eh => containsRETURN(eh.blocks) }) var a: analysis.MethodTFA = null - if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run } + if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run() } if(forceable) { recentTFAs.put(incm.symbol, (hasRETURN, a)) } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index 39788ee3e7d2..6e99129ee5c2 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -764,7 +764,7 @@ abstract class ICodeReader extends ClassfileParser { // method.dump tfa.init(method) - tfa.run + tfa.run() for (bb <- linearizer.linearize(method)) { var info = tfa.in(bb) for (i <- bb.toList) { diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala index b9f51803ec26..aede6a5d37fa 100644 --- a/src/library/scala/Array.scala +++ b/src/library/scala/Array.scala @@ -242,7 +242,7 @@ object Array extends FallbackArrayBuilding { val b = newBuilder[T] b.sizeHint(xss.map(_.size).sum) for (xs <- xss) b ++= xs - b.result + b.result() } /** Returns an array that contains the results of some element computation a number @@ -267,7 +267,7 @@ object Array extends FallbackArrayBuilding { b += elem i += 1 } - b.result + b.result() } /** Returns a two-dimensional array that contains the results of some element @@ -331,7 +331,7 @@ object Array extends FallbackArrayBuilding { b += f(i) i += 1 } - b.result + b.result() } /** Returns a two-dimensional array containing values of a given function @@ -406,7 +406,7 @@ object Array extends FallbackArrayBuilding { b += i i += step } - b.result + b.result() } /** Returns an array containing repeated applications of a function to a start value. @@ -431,7 +431,7 @@ object Array extends FallbackArrayBuilding { b += acc } } - b.result + b.result() } /** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`. diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala index d522539e8358..59be0cdfa357 100644 --- a/src/library/scala/Enumeration.scala +++ b/src/library/scala/Enumeration.scala @@ -95,7 +95,7 @@ abstract class Enumeration (initial: Int) extends Serializable { protected var nextName: Iterator[String] = _ private def nextNameOrNull = - if (nextName != null && nextName.hasNext) nextName.next else null + if (nextName != null && nextName.hasNext) nextName.next() else null /** The highest integer amongst those used to identify values in this * enumeration. */ @@ -277,7 +277,7 @@ abstract class Enumeration (initial: Int) extends Serializable { def newBuilder: mutable.Builder[Value, ValueSet] = new mutable.Builder[Value, ValueSet] { private[this] val b = new mutable.BitSet def += (x: Value) = { b += (x.id - bottomId); this } - def clear() = b.clear + def clear() = b.clear() def result() = new ValueSet(b.toImmutable) } /** The implicit builder for value sets */ diff --git a/src/library/scala/beans/ScalaBeanInfo.scala b/src/library/scala/beans/ScalaBeanInfo.scala index c192a990f1b9..ac8fa263d7f5 100644 --- a/src/library/scala/beans/ScalaBeanInfo.scala +++ b/src/library/scala/beans/ScalaBeanInfo.scala @@ -27,7 +27,7 @@ abstract class ScalaBeanInfo(clazz: java.lang.Class[_], for (m <- clazz.getMethods if methods.exists(_ == m.getName)) yield new MethodDescriptor(m) - init + init() override def getPropertyDescriptors() = pd override def getMethodDescriptors() = md diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala index bf05331cb169..72a6713ffd6f 100644 --- a/src/library/scala/collection/BitSetLike.scala +++ b/src/library/scala/collection/BitSetLike.scala @@ -109,7 +109,7 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe } def next(): Int = if (hasNext) { val r = current; current += 1; r } - else Iterator.empty.next + else Iterator.empty.next() } override def foreach[B](f: Int => B) { diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala index cbd7e3f8b9f3..bbd6b2c2fc0f 100644 --- a/src/library/scala/collection/DefaultMap.scala +++ b/src/library/scala/collection/DefaultMap.scala @@ -30,7 +30,7 @@ trait DefaultMap[A, +B] extends Map[A, B] { self => val b = Map.newBuilder[A, B1] b ++= this b += ((kv._1, kv._2)) - b.result + b.result() } /** A default implementation which creates a new immutable map. @@ -38,6 +38,6 @@ trait DefaultMap[A, +B] extends Map[A, B] { self => override def - (key: A): Map[A, B] = { val b = newBuilder b ++= this filter (key != _._1) - b.result + b.result() } } diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala index 1d8e2b1583ae..473202a8eb78 100644 --- a/src/library/scala/collection/IndexedSeqLike.scala +++ b/src/library/scala/collection/IndexedSeqLike.scala @@ -59,7 +59,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] { def next(): A = { if (index >= end) - Iterator.empty.next + Iterator.empty.next() val x = self(index) index += 1 @@ -68,7 +68,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] { def head = { if (index >= end) - Iterator.empty.next + Iterator.empty.next() self(index) } diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala index 9721a42e9120..ade04e4de884 100755 --- a/src/library/scala/collection/IndexedSeqOptimized.scala +++ b/src/library/scala/collection/IndexedSeqOptimized.scala @@ -88,7 +88,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { b += ((this(i), that(i).asInstanceOf[B])) i += 1 } - b.result + b.result() case _ => super.zip[A1, B, That](that)(bf) } @@ -103,7 +103,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { b += ((this(i), i)) i += 1 } - b.result + b.result() } override /*IterableLike*/ @@ -119,7 +119,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { b += self(i) i += 1 } - b.result + b.result() } override /*IterableLike*/ @@ -220,7 +220,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { i -= 1 b += this(i) } - b.result + b.result() } override /*SeqLike*/ @@ -231,7 +231,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { if (0 < i) { i -= 1 self(i) - } else Iterator.empty.next + } else Iterator.empty.next() } override /*SeqLike*/ diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala index 540bd84b791b..b043d1f2a6e8 100644 --- a/src/library/scala/collection/IterableLike.scala +++ b/src/library/scala/collection/IterableLike.scala @@ -88,13 +88,13 @@ self => override /*TraversableLike*/ def toIterator: Iterator[A] = iterator override /*TraversableLike*/ def head: A = - iterator.next + iterator.next() override /*TraversableLike*/ def slice(from: Int, until: Int): Repr = { val lo = math.max(from, 0) val elems = until - lo val b = newBuilder - if (elems <= 0) b.result + if (elems <= 0) b.result() else { b.sizeHintBounded(elems, this) var i = 0 @@ -103,14 +103,14 @@ self => b += it.next i += 1 } - b.result + b.result() } } override /*TraversableLike*/ def take(n: Int): Repr = { val b = newBuilder - if (n <= 0) b.result + if (n <= 0) b.result() else { b.sizeHintBounded(n, this) var i = 0 @@ -119,7 +119,7 @@ self => b += it.next i += 1 } - b.result + b.result() } } @@ -130,21 +130,21 @@ self => var i = 0 val it = iterator while (i < n && it.hasNext) { - it.next + it.next() i += 1 } - (b ++= it).result + (b ++= it).result() } override /*TraversableLike*/ def takeWhile(p: A => Boolean): Repr = { val b = newBuilder val it = iterator while (it.hasNext) { - val x = it.next - if (!p(x)) return b.result + val x = it.next() + if (!p(x)) return b.result() b += x } - b.result + b.result() } /** Partitions elements in fixed size ${coll}s. @@ -158,7 +158,7 @@ self => for (xs <- iterator grouped size) yield { val b = newBuilder b ++= xs - b.result + b.result() } /** Groups elements in fixed size blocks by passing a "sliding window" @@ -187,7 +187,7 @@ self => for (xs <- iterator.sliding(size, step)) yield { val b = newBuilder b ++= xs - b.result + b.result() } /** Selects last ''n'' elements. @@ -203,11 +203,11 @@ self => val lead = this.iterator drop n var go = false for (x <- this.seq) { - if (lead.hasNext) lead.next + if (lead.hasNext) lead.next() else go = true if (go) b += x } - b.result + b.result() } /** Selects all elements except last ''n'' ones. @@ -224,9 +224,9 @@ self => val it = iterator while (lead.hasNext) { b += it.next - lead.next + lead.next() } - b.result + b.result() } override /*TraversableLike*/ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { @@ -234,7 +234,7 @@ self => val end = (start + len) min xs.length val it = iterator while (i < end && it.hasNext) { - xs(i) = it.next + xs(i) = it.next() i += 1 } } @@ -244,8 +244,8 @@ self => val these = this.iterator val those = that.iterator while (these.hasNext && those.hasNext) - b += ((these.next, those.next)) - b.result + b += ((these.next(), those.next())) + b.result() } def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = { @@ -253,12 +253,12 @@ self => val these = this.iterator val those = that.iterator while (these.hasNext && those.hasNext) - b += ((these.next, those.next)) + b += ((these.next(), those.next())) while (these.hasNext) - b += ((these.next, thatElem)) + b += ((these.next(), thatElem)) while (those.hasNext) - b += ((thisElem, those.next)) - b.result + b += ((thisElem, those.next())) + b.result() } def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = { @@ -268,7 +268,7 @@ self => b += ((x, i)) i +=1 } - b.result + b.result() } def sameElements[B >: A](that: GenIterable[B]): Boolean = { diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index cb7d2095bc6e..77baad71d364 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -368,7 +368,7 @@ trait Iterator[+A] extends TraversableOnce[A] { def flatMap[B](f: A => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] { private var cur: Iterator[B] = empty def hasNext: Boolean = - cur.hasNext || self.hasNext && { cur = f(self.next).toIterator; hasNext } + cur.hasNext || self.hasNext && { cur = f(self.next()).toIterator; hasNext } def next(): B = (if (hasNext) cur else empty).next() } @@ -408,7 +408,7 @@ trait Iterator[+A] extends TraversableOnce[A] { def corresponds[B](that: GenTraversableOnce[B])(p: (A, B) => Boolean): Boolean = { val that0 = that.toIterator while (hasNext && that0.hasNext) - if (!p(next, that0.next)) return false + if (!p(next(), that0.next())) return false hasNext == that0.hasNext } @@ -630,7 +630,7 @@ trait Iterator[+A] extends TraversableOnce[A] { */ def zip[B](that: Iterator[B]): Iterator[(A, B)] = new AbstractIterator[(A, B)] { def hasNext = self.hasNext && that.hasNext - def next = (self.next, that.next) + def next = (self.next(), that.next()) } /** Appends an element value to this iterator until a given target length is reached. @@ -650,9 +650,9 @@ trait Iterator[+A] extends TraversableOnce[A] { def hasNext = self.hasNext || count < len def next = { count += 1 - if (self.hasNext) self.next + if (self.hasNext) self.next() else if (count <= len) elem - else empty.next + else empty.next() } } @@ -667,7 +667,7 @@ trait Iterator[+A] extends TraversableOnce[A] { var idx = 0 def hasNext = self.hasNext def next = { - val ret = (self.next, idx) + val ret = (self.next(), idx) idx += 1 ret } @@ -1052,12 +1052,12 @@ trait Iterator[+A] extends TraversableOnce[A] { val e = self.next() gap enqueue e e - } else gap.dequeue + } else gap.dequeue() } // to verify partnerhood we use reference equality on gap because // type testing does not discriminate based on origin. private def compareGap(queue: scala.collection.mutable.Queue[A]) = gap eq queue - override def hashCode = gap.hashCode + override def hashCode = gap.hashCode() override def equals(other: Any) = other match { case x: Partner => x.compareGap(gap) && gap.isEmpty case _ => super.equals(other) @@ -1139,7 +1139,7 @@ trait Iterator[+A] extends TraversableOnce[A] { def toTraversable: Traversable[A] = toStream def toIterator: Iterator[A] = self def toStream: Stream[A] = - if (self.hasNext) Stream.cons(self.next, self.toStream) + if (self.hasNext) Stream.cons(self.next(), self.toStream) else Stream.empty[A] diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala index 2a824bcff3c3..a4bb194f8a43 100644 --- a/src/library/scala/collection/LinearSeqLike.scala +++ b/src/library/scala/collection/LinearSeqLike.scala @@ -55,14 +55,14 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr def next(): A = if (hasNext) { val result = these.head; these = these.tail; result - } else Iterator.empty.next + } else Iterator.empty.next() /** Have to clear `these` so the iterator is exhausted like * it would be without the optimization. */ override def toList: List[A] = { val xs = these.toList - these = newBuilder.result + these = newBuilder.result() xs } } diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala index ed5f2406e808..de4d5e2ba28c 100755 --- a/src/library/scala/collection/LinearSeqOptimized.scala +++ b/src/library/scala/collection/LinearSeqOptimized.scala @@ -151,7 +151,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea b += these.head these = these.tail } - b.result + b.result() } override /*TraversableLike*/ @@ -186,7 +186,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea these = these.tail lead = lead.tail } - b.result + b.result() } override /*IterableLike*/ @@ -194,7 +194,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea var these: Repr = repr var count = from max 0 if (until <= count) - return newBuilder.result + return newBuilder.result() val b = newBuilder var sliceElems = until - count @@ -207,7 +207,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea b += these.head these = these.tail } - b.result + b.result() } override /*IterableLike*/ @@ -218,7 +218,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea b += these.head these = these.tail } - b.result + b.result() } override /*TraversableLike*/ @@ -229,7 +229,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea b += these.head these = these.tail } - (b.result, these) + (b.result(), these) } override /*IterableLike*/ diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala index 93d02a435ceb..cc0129202fe2 100644 --- a/src/library/scala/collection/MapLike.scala +++ b/src/library/scala/collection/MapLike.scala @@ -181,7 +181,7 @@ self => def keysIterator: Iterator[A] = new AbstractIterator[A] { val iter = self.iterator def hasNext = iter.hasNext - def next() = iter.next._1 + def next() = iter.next()._1 } /** Collects all keys of this map in an iterable collection. @@ -213,7 +213,7 @@ self => def valuesIterator: Iterator[B] = new AbstractIterator[B] { val iter = self.iterator def hasNext = iter.hasNext - def next() = iter.next._2 + def next() = iter.next()._2 } /** Defines the default value computation for the map, diff --git a/src/library/scala/collection/Parallelizable.scala b/src/library/scala/collection/Parallelizable.scala index d97c44abc098..626dfa40329e 100644 --- a/src/library/scala/collection/Parallelizable.scala +++ b/src/library/scala/collection/Parallelizable.scala @@ -39,7 +39,7 @@ trait Parallelizable[+A, +ParRepr <: Parallel] extends Any { def par: ParRepr = { val cb = parCombiner for (x <- seq) cb += x - cb.result + cb.result() } /** The default `par` implementation uses the combiner provided by this method diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala index 35df68078362..307ee3f2a822 100644 --- a/src/library/scala/collection/SeqLike.scala +++ b/src/library/scala/collection/SeqLike.scala @@ -127,7 +127,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ def lastIndexWhere(p: A => Boolean, end: Int): Int = { var i = length - 1 val it = reverseIterator - while (it.hasNext && { val elem = it.next; (i > end || !p(elem)) }) i -= 1 + while (it.hasNext && { val elem = it.next(); (i > end || !p(elem)) }) i -= 1 i } @@ -156,10 +156,10 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ def hasNext = _hasNext def next(): Repr = { if (!hasNext) - Iterator.empty.next + Iterator.empty.next() val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms - val result = (self.newBuilder ++= forcedElms).result + val result = (self.newBuilder ++= forcedElms).result() var i = idxs.length - 2 while(i >= 0 && idxs(i) >= idxs(i+1)) i -= 1 @@ -208,13 +208,13 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ def hasNext = _hasNext def next(): Repr = { if (!hasNext) - Iterator.empty.next + Iterator.empty.next() /** Calculate this result. */ val buf = self.newBuilder for(k <- 0 until nums.length; j <- 0 until nums(k)) buf += elms(offs(k)+j) - val res = buf.result + val res = buf.result() /** Prepare for the next call to next. */ var idx = nums.length - 1 @@ -268,7 +268,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ b.sizeHint(this) for (x <- xs) b += x - b.result + b.result() } def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { @@ -279,7 +279,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ for (x <- xs) b += f(x) - b.result + b.result() } /** An iterator yielding elements in reversed order. @@ -442,7 +442,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ for (x <- this) if (occ(x) == 0) b += x else occ(x) -= 1 - b.result + b.result() } /** Computes the multiset intersection between this $coll and another sequence. @@ -473,7 +473,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ b += x occ(x) -= 1 } - b.result + b.result() } private def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = { @@ -496,7 +496,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ seen += x } } - b.result + b.result() } def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = { @@ -505,7 +505,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ b ++= toCollection(prefix) b ++= patch.seq b ++= toCollection(rest).view drop replaced - b.result + b.result() } def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { @@ -514,21 +514,21 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ b ++= toCollection(prefix) b += elem b ++= toCollection(rest).view.tail - b.result + b.result() } def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { val b = bf(repr) b += elem b ++= thisCollection - b.result + b.result() } def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { val b = bf(repr) b ++= thisCollection b += elem - b.result + b.result() } def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { @@ -540,14 +540,14 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ b += elem diff -= 1 } - b.result + b.result() } def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = { val i = this.iterator val j = that.iterator while (i.hasNext && j.hasNext) - if (!p(i.next, j.next)) + if (!p(i.next(), j.next())) return false !i.hasNext && !j.hasNext @@ -616,7 +616,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ val b = newBuilder b.sizeHint(len) for (x <- arr) b += x - b.result + b.result() } /** Converts this $coll to a sequence. @@ -682,7 +682,7 @@ object SeqLike { val wit = W.iterator.drop(n0) var i = if (forward) 0 else (n1-n0-1) while (i != done) { - Warr(i) = wit.next.asInstanceOf[AnyRef] + Warr(i) = wit.next().asInstanceOf[AnyRef] i += delta } @@ -786,7 +786,7 @@ object SeqLike { var answer = -1 while (m+m0+n1-n0 <= m1) { while (i+m >= largest) { - cache(largest%(n1-n0)) = iter.next.asInstanceOf[AnyRef] + cache(largest%(n1-n0)) = iter.next().asInstanceOf[AnyRef] largest += 1 } if (Wopt(i) == cache((i+m)%(n1-n0))) { diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala index a6ebcc0e200d..9fd24317f292 100644 --- a/src/library/scala/collection/SetLike.scala +++ b/src/library/scala/collection/SetLike.scala @@ -180,14 +180,14 @@ self => def hasNext = len <= elms.size || itr.hasNext def next = { if (!itr.hasNext) { - if (len > elms.size) Iterator.empty.next + if (len > elms.size) Iterator.empty.next() else { itr = new SubsetsItr(elms, len) len += 1 } } - itr.next + itr.next() } } @@ -205,11 +205,11 @@ self => def hasNext = _hasNext def next(): This = { - if (!hasNext) Iterator.empty.next + if (!hasNext) Iterator.empty.next() val buf = self.newBuilder idxs.slice(0, len) foreach (idx => buf += elms(idx)) - val result = buf.result + val result = buf.result() var i = len - 1 while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1 diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala index c81c16e8bbb7..86fcfac94d44 100644 --- a/src/library/scala/collection/SortedMap.scala +++ b/src/library/scala/collection/SortedMap.scala @@ -40,13 +40,13 @@ object SortedMap extends SortedMapFactory[SortedMap] { val b = SortedMap.newBuilder[A, B1] b ++= this b += ((kv._1, kv._2)) - b.result + b.result() } override def - (key: A): SortedMap[A, B] = { val b = newBuilder for (kv <- this; if kv._1 != key) b += kv - b.result + b.result() } } diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala index 7345ef8328f4..679e8e3e614f 100644 --- a/src/library/scala/collection/TraversableOnce.scala +++ b/src/library/scala/collection/TraversableOnce.scala @@ -269,7 +269,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = { val b = cbf() b ++= seq - b.result + b.result() } def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = { @@ -277,7 +277,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { for (x <- self) b += x - b.result + b.result() } def mkString(start: String, sep: String, end: String): String = @@ -422,7 +422,7 @@ object TraversableOnce { def flatten: Iterator[A] = new AbstractIterator[A] { val its = travs.toIterator private var it: Iterator[A] = Iterator.empty - def hasNext: Boolean = it.hasNext || its.hasNext && { it = its.next.toIterator; hasNext } + def hasNext: Boolean = it.hasNext || its.hasNext && { it = its.next().toIterator; hasNext } def next(): A = if (hasNext) it.next() else Iterator.empty.next() } } diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index 14b475dd1f42..6bf9c1056a99 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -437,7 +437,7 @@ extends MainNode[K, V] { val updmap = listmap - k if (updmap.size > 1) new LNode(updmap) else { - val (k, v) = updmap.iterator.next + val (k, v) = updmap.iterator.next() new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses } } diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala index 0f4506b5d534..b121f32ba6cf 100644 --- a/src/library/scala/collection/convert/Wrappers.scala +++ b/src/library/scala/collection/convert/Wrappers.scala @@ -27,9 +27,9 @@ private[collection] trait Wrappers { case class IteratorWrapper[A](underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] { def hasNext = underlying.hasNext - def next() = underlying.next + def next() = underlying.next() def hasMoreElements = underlying.hasNext - def nextElement() = underlying.next + def nextElement() = underlying.next() def remove() = throw new UnsupportedOperationException } @@ -108,7 +108,7 @@ private[collection] trait Wrappers { val ui = underlying.iterator var prev: Option[A] = None def hasNext = ui.hasNext - def next = { val e = ui.next; prev = Some(e); e } + def next = { val e = ui.next(); prev = Some(e); e } def remove = prev match { case Some(e) => underlying match { @@ -180,7 +180,7 @@ private[collection] trait Wrappers { def hasNext = ui.hasNext def next() = { - val (k, v) = ui.next + val (k, v) = ui.next() prev = Some(k) new ju.Map.Entry[A, B] { import scala.util.hashing.byteswap32 diff --git a/src/library/scala/collection/generic/GenMapFactory.scala b/src/library/scala/collection/generic/GenMapFactory.scala index e869bba51a42..5a183c307b67 100644 --- a/src/library/scala/collection/generic/GenMapFactory.scala +++ b/src/library/scala/collection/generic/GenMapFactory.scala @@ -44,7 +44,7 @@ abstract class GenMapFactory[CC[A, B] <: GenMap[A, B] with GenMapLike[A, B, CC[A * @tparam B the type of the associated values * @return a new $coll consisting key/value pairs given by `elems`. */ - def apply[A, B](elems: (A, B)*): CC[A, B] = (newBuilder[A, B] ++= elems).result + def apply[A, B](elems: (A, B)*): CC[A, B] = (newBuilder[A, B] ++= elems).result() /** The default builder for $Coll objects. * @tparam A the type of the keys diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala index b36dd3ccaf77..0b8c9835da33 100644 --- a/src/library/scala/collection/generic/GenTraversableFactory.scala +++ b/src/library/scala/collection/generic/GenTraversableFactory.scala @@ -73,7 +73,7 @@ extends GenericCompanion[CC] { b.sizeHint(xss.map(_.size).sum) for (xs <- xss.seq) b ++= xs - b.result + b.result() } /** Produces a $coll containing the results of some element computation a number of times. @@ -89,7 +89,7 @@ extends GenericCompanion[CC] { b += elem i += 1 } - b.result + b.result() } /** Produces a two-dimensional $coll containing the results of some element computation a number of times. @@ -147,7 +147,7 @@ extends GenericCompanion[CC] { b += f(i) i += 1 } - b.result + b.result() } /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. @@ -222,7 +222,7 @@ extends GenericCompanion[CC] { b += i i += step } - b.result + b.result() } /** Produces a $coll containing repeated applications of a function to a start value. @@ -246,6 +246,6 @@ extends GenericCompanion[CC] { b += acc } } - b.result + b.result() } } diff --git a/src/library/scala/collection/generic/GenericClassTagCompanion.scala b/src/library/scala/collection/generic/GenericClassTagCompanion.scala index 76c12d118ee7..cdfee5252f7f 100644 --- a/src/library/scala/collection/generic/GenericClassTagCompanion.scala +++ b/src/library/scala/collection/generic/GenericClassTagCompanion.scala @@ -23,11 +23,11 @@ abstract class GenericClassTagCompanion[+CC[X] <: Traversable[X]] { def newBuilder[A](implicit ord: ClassTag[A]): Builder[A, CC[A]] - def empty[A: ClassTag]: CC[A] = newBuilder[A].result + def empty[A: ClassTag]: CC[A] = newBuilder[A].result() def apply[A](elems: A*)(implicit ord: ClassTag[A]): CC[A] = { val b = newBuilder[A] b ++= elems - b.result + b.result() } } diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala index b966ce51dbb1..66052d0e6f46 100644 --- a/src/library/scala/collection/generic/GenericCompanion.scala +++ b/src/library/scala/collection/generic/GenericCompanion.scala @@ -34,7 +34,7 @@ abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] { /** An empty collection of type `$Coll[A]` * @tparam A the type of the ${coll}'s elements */ - def empty[A]: CC[A] = newBuilder[A].result + def empty[A]: CC[A] = newBuilder[A].result() /** Creates a $coll with the specified elements. * @tparam A the type of the ${coll}'s elements @@ -46,7 +46,7 @@ abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] { else { val b = newBuilder[A] b ++= elems - b.result + b.result() } } } diff --git a/src/library/scala/collection/generic/GenericOrderedCompanion.scala b/src/library/scala/collection/generic/GenericOrderedCompanion.scala index 094912c75af9..7a0c0a63e8ea 100644 --- a/src/library/scala/collection/generic/GenericOrderedCompanion.scala +++ b/src/library/scala/collection/generic/GenericOrderedCompanion.scala @@ -23,12 +23,12 @@ abstract class GenericOrderedCompanion[+CC[X] <: Traversable[X]] { def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] - def empty[A: Ordering]: CC[A] = newBuilder[A].result + def empty[A: Ordering]: CC[A] = newBuilder[A].result() def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = { val b = newBuilder[A] b ++= elems - b.result + b.result() } } diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala index f7a8a9aa88b7..908aa5b12699 100644 --- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala +++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala @@ -88,7 +88,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew b1 += x b2 += y } - (b1.result, b2.result) + (b1.result(), b2.result()) } /** Converts this $coll of triples into three collections of the first, second, @@ -113,7 +113,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew b2 += y b3 += z } - (b1.result, b2.result, b3.result) + (b1.result(), b2.result(), b3.result()) } /** Converts this $coll of traversable collections into @@ -144,7 +144,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew val b = genericBuilder[B] for (xs <- sequential) b ++= asTraversable(xs).seq - b.result + b.result() } /** Transposes this $coll of traversable collections into @@ -161,7 +161,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew @migration("`transpose` throws an `IllegalArgumentException` if collections are not uniformly sized.", "2.9.0") def transpose[B](implicit asTraversable: A => /*<: val b = Map.newBuilder[A, B1] b ++= this b += ((kv._1, kv._2)) - b.result + b.result() } /** A default implementation which creates a new immutable map. @@ -46,7 +46,7 @@ trait DefaultMap[A, +B] extends Map[A, B] { self => override def - (key: A): Map[A, B] = { val b = newBuilder for (kv <- this.seq ; if kv._1 != key) b += kv - b.result + b.result() } } diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala index fd23276c8d6c..def3d7eb230e 100644 --- a/src/library/scala/collection/immutable/ListSet.scala +++ b/src/library/scala/collection/immutable/ListSet.scala @@ -100,7 +100,7 @@ class ListSet[A] extends AbstractSet[A] */ override def ++(xs: GenTraversableOnce[A]): ListSet[A] = if (xs.isEmpty) this - else (new ListSet.ListSetBuilder(this) ++= xs.seq).result + else (new ListSet.ListSetBuilder(this) ++= xs.seq).result() private[ListSet] def unchecked_+(e: A): ListSet[A] = new Node(e) private[ListSet] def unchecked_outer: ListSet[A] = @@ -120,7 +120,7 @@ class ListSet[A] extends AbstractSet[A] that = that.tail res } - else Iterator.empty.next + else Iterator.empty.next() } /** diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala index 7e60f078474c..1c2ab1c66287 100644 --- a/src/library/scala/collection/immutable/MapLike.scala +++ b/src/library/scala/collection/immutable/MapLike.scala @@ -123,7 +123,7 @@ self => def transform[C, That](f: (A, B) => C)(implicit bf: CanBuildFrom[This, (A, C), That]): That = { val b = bf(repr) for ((key, value) <- this) b += ((key, f(key, value))) - b.result + b.result() } } diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala index 952107bf7888..4069f6f0e453 100644 --- a/src/library/scala/collection/immutable/PagedSeq.scala +++ b/src/library/scala/collection/immutable/PagedSeq.scala @@ -30,7 +30,7 @@ object PagedSeq { new PagedSeq[T]((data: Array[T], start: Int, len: Int) => { var i = 0 while (i < len && source.hasNext) { - data(start + i) = source.next + data(start + i) = source.next() i += 1 } if (i == 0) -1 else i @@ -51,7 +51,7 @@ object PagedSeq { if (cnt == len) cnt else (more(data, start + cnt, len - cnt) max 0) + cnt } else if (source.hasNext) { - current = source.next + current = source.next() more(data, start, len) } else -1 new PagedSeq(more(_: Array[Char], _: Int, _: Int)) diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index d3ce3ab58c3e..19414f8e106f 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -510,7 +510,7 @@ object RedBlackTree { */ private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else { @tailrec def find(tree: Tree[A, B]): Tree[A, B] = - if (tree eq null) popNext + if (tree eq null) popNext() else find( if (ordering.lteq(key, tree.key)) goLeft(tree) else goRight(tree) diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala index 5e833f87af78..73cc55df0050 100644 --- a/src/library/scala/collection/immutable/SortedMap.scala +++ b/src/library/scala/collection/immutable/SortedMap.scala @@ -112,13 +112,13 @@ object SortedMap extends ImmutableSortedMapFactory[SortedMap] { val b = SortedMap.newBuilder[A, B1] b ++= this b += ((kv._1, kv._2)) - b.result + b.result() } override def - (key: A): SortedMap[A, B] = { val b = newBuilder for (kv <- this; if kv._1 != key) b += kv - b.result + b.result() } } } diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index e2719df531c4..0770bd317553 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -998,7 +998,7 @@ final class StreamIterator[+A] private() extends AbstractIterator[A] with Iterat def hasNext: Boolean = these.v.nonEmpty def next(): A = - if (isEmpty) Iterator.empty.next + if (isEmpty) Iterator.empty.next() else { val cur = these.v val result = cur.head diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index 663318330c52..389e1579f296 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -58,8 +58,8 @@ self => val start = from max 0 val end = until min length - if (start >= end) newBuilder.result - else (newBuilder ++= toString.substring(start, end)).result + if (start >= end) newBuilder.result() + else (newBuilder ++= toString.substring(start, end)).result() } /** Return the current string concatenated `n` times. diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index a6a6b75c3284..109317717220 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -108,7 +108,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi private[this] def countWhile(p: ((A, B)) => Boolean): Int = { var result = 0 val it = iterator - while (it.hasNext && p(it.next)) result += 1 + while (it.hasNext && p(it.next())) result += 1 result } override def dropWhile(p: ((A, B)) => Boolean) = drop(countWhile(p)) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 67668b3bef72..26c3d44bbbb5 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -89,7 +89,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin private[this] def countWhile(p: A => Boolean): Int = { var result = 0 val it = iterator - while (it.hasNext && p(it.next)) result += 1 + while (it.hasNext && p(it.next())) result += 1 result } override def dropWhile(p: A => Boolean) = drop(countWhile(p)) diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala index 550f4cd7e062..dbe013d6e811 100644 --- a/src/library/scala/collection/immutable/TrieIterator.scala +++ b/src/library/scala/collection/immutable/TrieIterator.scala @@ -94,7 +94,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e def hasNext = (subIter ne null) || depth >= 0 def next(): T = { if (subIter ne null) { - val el = subIter.next + val el = subIter.next() if (!subIter.hasNext) subIter = null el @@ -135,7 +135,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e } else { subIter = m.iterator - next + next() } // The much slower version: // diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index abaffd9d6ab3..571e6775c8b8 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -104,7 +104,7 @@ override def companion: GenericCompanion[Vector] = Vector if (0 < i) { i -= 1 self(i) - } else Iterator.empty.next + } else Iterator.empty.next() } // TODO: reverse @@ -261,7 +261,7 @@ override def companion: GenericCompanion[Vector] = Vector //println("----- appendFront " + value + " at " + (startIndex - 1) + " reached block start") if (shift != 0) { // case A: we can shift right on the top level - debug + debug() //println("shifting right by " + shiftBlocks + " at level " + (depth-1) + " (had "+freeSpace+" free space)") if (depth > 1) { @@ -271,7 +271,7 @@ override def companion: GenericCompanion[Vector] = Vector s.initFrom(this) s.dirty = dirty s.shiftTopLevel(0, shiftBlocks) // shift right by n blocks - s.debug + s.debug() s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // maybe create pos; prepare for writing s.display0(lo) = value.asInstanceOf[AnyRef] //assert(depth == s.depth) @@ -289,7 +289,7 @@ override def companion: GenericCompanion[Vector] = Vector s.shiftTopLevel(0, shiftBlocks) // shift right by n elements s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // prepare for writing s.display0(shift-1) = value.asInstanceOf[AnyRef] - s.debug + s.debug() s } } else if (blockIndex < 0) { @@ -304,10 +304,10 @@ override def companion: GenericCompanion[Vector] = Vector val s = new Vector(startIndex - 1 + move, endIndex + move, newBlockIndex) s.initFrom(this) s.dirty = dirty - s.debug + s.debug() s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // could optimize: we know it will create a whole branch s.display0(lo) = value.asInstanceOf[AnyRef] - s.debug + s.debug() //assert(s.depth == depth+1) s } else { @@ -357,7 +357,7 @@ override def companion: GenericCompanion[Vector] = Vector //println("----- appendBack " + value + " at " + endIndex + " reached block end") if (shift != 0) { - debug + debug() //println("shifting left by " + shiftBlocks + " at level " + (depth-1) + " (had "+startIndex+" free space)") if (depth > 1) { val newBlockIndex = blockIndex - shift @@ -366,10 +366,10 @@ override def companion: GenericCompanion[Vector] = Vector s.initFrom(this) s.dirty = dirty s.shiftTopLevel(shiftBlocks, 0) // shift left by n blocks - s.debug + s.debug() s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) s.display0(lo) = value.asInstanceOf[AnyRef] - s.debug + s.debug() //assert(depth == s.depth) s } else { @@ -385,7 +385,7 @@ override def companion: GenericCompanion[Vector] = Vector s.shiftTopLevel(shiftBlocks, 0) // shift right by n elements s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) s.display0(32 - shift) = value.asInstanceOf[AnyRef] - s.debug + s.debug() s } } else { @@ -400,7 +400,7 @@ override def companion: GenericCompanion[Vector] = Vector //assert(s.depth == depth+1) might or might not create new level! if (s.depth == depth+1) { //println("creating new level " + s.depth + " (had "+0+" free space)") - s.debug + s.debug() } s } diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala index da63778fcc19..878ea949878a 100644 --- a/src/library/scala/collection/mutable/AVLTree.scala +++ b/src/library/scala/collection/mutable/AVLTree.scala @@ -229,11 +229,11 @@ private class AVLIterator[A](root: Node[A]) extends Iterator[A] { private def engageRight(): Unit = { if (Leaf != stack.head.right) { val right: Node[A] = stack.head.right.asInstanceOf[Node[A]] - stack.pop + stack.pop() stack.push(right) diveLeft() } else - stack.pop + stack.pop() } override def hasNext: Boolean = !stack.isEmpty diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala index 6b778b26f55b..fcbfd2773869 100644 --- a/src/library/scala/collection/mutable/ArrayOps.scala +++ b/src/library/scala/collection/mutable/ArrayOps.scala @@ -80,7 +80,7 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza b.sizeHint(map{case is: scala.collection.IndexedSeq[_] => is.size case _ => 0}.sum) for (xs <- this) b ++= asTrav(xs) - b.result + b.result() } /** Transposes a two dimensional array. @@ -101,7 +101,7 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza } val bb: Builder[Array[U], Array[Array[U]]] = Array.newBuilder(ClassTag[Array[U]](elementClass)) for (b <- bs) bb += b.result - bb.result + bb.result() } def seq = thisCollection diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala index 33f6949662d3..334b26ae03da 100644 --- a/src/library/scala/collection/mutable/ArraySeq.scala +++ b/src/library/scala/collection/mutable/ArraySeq.scala @@ -90,7 +90,7 @@ extends AbstractSeq[A] } override def clone(): ArraySeq[A] = { - val cloned = array.clone.asInstanceOf[Array[AnyRef]] + val cloned = array.clone().asInstanceOf[Array[AnyRef]] new ArraySeq[A](length) { override val array = cloned } diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala index 670558ab06e5..e05d668519a9 100644 --- a/src/library/scala/collection/mutable/ArrayStack.scala +++ b/src/library/scala/collection/mutable/ArrayStack.scala @@ -150,7 +150,7 @@ extends AbstractSeq[T] * * @param f The function to drain to. */ - def drain(f: T => Unit) = while (!isEmpty) f(pop) + def drain(f: T => Unit) = while (!isEmpty) f(pop()) /** Pushes all the provided elements in the traversable object onto the stack. * @@ -190,7 +190,7 @@ extends AbstractSeq[T] * * @param f The function to apply to the top two elements. */ - def combine(f: (T, T) => T): Unit = push(f(pop, pop)) + def combine(f: (T, T) => T): Unit = push(f(pop(), pop())) /** Repeatedly combine the top elements of the stack until the stack contains only * one element. diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala index 5935a2858aa0..322522fdd2df 100644 --- a/src/library/scala/collection/mutable/BufferLike.scala +++ b/src/library/scala/collection/mutable/BufferLike.scala @@ -198,7 +198,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]] case Remove(Index(n), x) => if (this(n) == x) remove(n) case Remove(NoLo, x) => this -= x - case Reset() => clear + case Reset() => clear() case s: Script[_] => s.iterator foreach << case _ => throw new UnsupportedOperationException("message " + cmd + " not understood") } @@ -260,6 +260,6 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]] override def clone(): This = { val bf = newBuilder bf ++= this - bf.result.asInstanceOf[This] + bf.result().asInstanceOf[This] } } diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala index ade0b94230a8..d3f96f69ad98 100644 --- a/src/library/scala/collection/mutable/BufferProxy.scala +++ b/src/library/scala/collection/mutable/BufferProxy.scala @@ -124,7 +124,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy { /** Clears the buffer contents. */ - def clear() { self.clear } + def clear() { self.clear() } /** Send a message to this scriptable object. * diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala index 5c0681df1d39..75560580ccaa 100644 --- a/src/library/scala/collection/mutable/Builder.scala +++ b/src/library/scala/collection/mutable/Builder.scala @@ -121,7 +121,7 @@ trait Builder[-Elem, +To] extends Growable[Elem] { override def ++=(xs: TraversableOnce[Elem]): this.type = { self ++= xs; this } override def sizeHint(size: Int) = self.sizeHint(size) override def sizeHintBounded(size: Int, boundColl: TraversableLike[_, _]) = self.sizeHintBounded(size, boundColl) - def result: NewTo = f(self.result) + def result: NewTo = f(self.result()) } } diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala index 18a1e234f615..a106794912b3 100644 --- a/src/library/scala/collection/mutable/DoubleLinkedList.scala +++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala @@ -68,7 +68,7 @@ class DoubleLinkedList[A]() extends AbstractSeq[A] override def clone(): DoubleLinkedList[A] = { val builder = newBuilder builder ++= this - builder.result + builder.result() } } diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala index 7f4a8d1cbd47..4ffc5be7ad72 100644 --- a/src/library/scala/collection/mutable/FlatHashTable.scala +++ b/src/library/scala/collection/mutable/FlatHashTable.scala @@ -208,7 +208,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { } def next(): A = if (hasNext) { i += 1; entryToElem(table(i - 1)) } - else Iterator.empty.next + else Iterator.empty.next() } private def growTable() { @@ -358,7 +358,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { seedvalue = c.seedvalue sizemap = c.sizemap } - if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild + if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild() } } diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala index 3cd7f07d835f..69439677919c 100644 --- a/src/library/scala/collection/mutable/HashMap.scala +++ b/src/library/scala/collection/mutable/HashMap.scala @@ -111,21 +111,21 @@ extends AbstractMap[A, B] override def keysIterator: Iterator[A] = new AbstractIterator[A] { val iter = entriesIterator def hasNext = iter.hasNext - def next() = iter.next.key + def next() = iter.next().key } /* Override to avoid tuple allocation */ override def valuesIterator: Iterator[B] = new AbstractIterator[B] { val iter = entriesIterator def hasNext = iter.hasNext - def next() = iter.next.value + def next() = iter.next().value } /** Toggles whether a size map is used to track hash map statistics. */ def useSizeMap(t: Boolean) = if (t) { - if (!isSizeMapDefined) sizeMapInitAndRebuild - } else sizeMapDisable + if (!isSizeMapDefined) sizeMapInitAndRebuild() + } else sizeMapDisable() protected def createNewEntry[B1](key: A, value: B1): Entry = { new Entry(key, value.asInstanceOf[B]) diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala index c4c68fdb7a61..753f7f8d019f 100644 --- a/src/library/scala/collection/mutable/HashSet.scala +++ b/src/library/scala/collection/mutable/HashSet.scala @@ -92,8 +92,8 @@ extends AbstractSet[A] /** Toggles whether a size map is used to track hash map statistics. */ def useSizeMap(t: Boolean) = if (t) { - if (!isSizeMapDefined) sizeMapInitAndRebuild - } else sizeMapDisable + if (!isSizeMapDefined) sizeMapInitAndRebuild() + } else sizeMapDisable() } diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala index 23b68b796978..83ffc4a03002 100644 --- a/src/library/scala/collection/mutable/HashTable.scala +++ b/src/library/scala/collection/mutable/HashTable.scala @@ -365,7 +365,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU seedvalue = c.seedvalue sizemap = c.sizemap } - if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild + if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild() } private[collection] def hashTableContents = new HashTable.Contents( diff --git a/src/library/scala/collection/mutable/History.scala b/src/library/scala/collection/mutable/History.scala index 2b8d1922b8d8..34e8f7d5b89d 100644 --- a/src/library/scala/collection/mutable/History.scala +++ b/src/library/scala/collection/mutable/History.scala @@ -41,7 +41,7 @@ extends AbstractIterable[(Pub, Evt)] */ def notify(pub: Pub, event: Evt) { if (log.length >= maxHistory) - log.dequeue + log.dequeue() log.enqueue((pub, event)) } @@ -50,7 +50,7 @@ extends AbstractIterable[(Pub, Evt)] def iterator: Iterator[(Pub, Evt)] = log.iterator def events: Iterator[Evt] = log.iterator map (_._2) - def clear() { log.clear } + def clear() { log.clear() } /** Checks if two history objects are structurally identical. * @@ -60,5 +60,5 @@ extends AbstractIterable[(Pub, Evt)] case that: History[_, _] => this.log equals that.log case _ => false } - override def hashCode = log.hashCode + override def hashCode = log.hashCode() } diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala index da2c36ac2d82..14f30d74e8db 100644 --- a/src/library/scala/collection/mutable/LinkedHashMap.scala +++ b/src/library/scala/collection/mutable/LinkedHashMap.scala @@ -92,7 +92,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B] def hasNext = cur ne null def next = if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res } - else Iterator.empty.next + else Iterator.empty.next() } protected class FilteredKeys(p: A => Boolean) extends super.FilteredKeys(p) { @@ -118,7 +118,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B] def hasNext = cur ne null def next = if (hasNext) { val res = cur.key; cur = cur.later; res } - else Iterator.empty.next + else Iterator.empty.next() } override def valuesIterator: Iterator[B] = new AbstractIterator[B] { @@ -126,7 +126,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B] def hasNext = cur ne null def next = if (hasNext) { val res = cur.value; cur = cur.later; res } - else Iterator.empty.next + else Iterator.empty.next() } override def foreach[U](f: ((A, B)) => U) { diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala index 172325843391..5641a78d464b 100644 --- a/src/library/scala/collection/mutable/LinkedHashSet.scala +++ b/src/library/scala/collection/mutable/LinkedHashSet.scala @@ -78,7 +78,7 @@ class LinkedHashSet[A] extends AbstractSet[A] def hasNext = cur ne null def next = if (hasNext) { val res = cur.key; cur = cur.later; res } - else Iterator.empty.next + else Iterator.empty.next() } override def foreach[U](f: A => U) { diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala index b3470ed3cd45..30030800600f 100644 --- a/src/library/scala/collection/mutable/LinkedListLike.scala +++ b/src/library/scala/collection/mutable/LinkedListLike.scala @@ -185,6 +185,6 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq override def clone(): This = { val bf = newBuilder bf ++= this - bf.result + bf.result() } } diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala index fd92d2e5556e..03110569c406 100644 --- a/src/library/scala/collection/mutable/MutableList.scala +++ b/src/library/scala/collection/mutable/MutableList.scala @@ -148,7 +148,7 @@ extends AbstractSeq[A] override def clone(): MutableList[A] = { val bf = newBuilder bf ++= seq - bf.result + bf.result() } } diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala index bcaf97772749..7a2fce912882 100644 --- a/src/library/scala/collection/mutable/ObservableBuffer.scala +++ b/src/library/scala/collection/mutable/ObservableBuffer.scala @@ -65,7 +65,7 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa } abstract override def clear(): Unit = { - super.clear + super.clear() publish(new Reset with Undoable { def undo() { throw new UnsupportedOperationException("cannot undo") } }) diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala index d81c90bf4c4d..3544275300f3 100644 --- a/src/library/scala/collection/mutable/ObservableMap.scala +++ b/src/library/scala/collection/mutable/ObservableMap.scala @@ -60,7 +60,7 @@ trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with } abstract override def clear(): Unit = { - super.clear + super.clear() publish(new Reset with Undoable { def undo(): Unit = throw new UnsupportedOperationException("cannot undo") }) diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala index 3e795064133a..81580316ff70 100644 --- a/src/library/scala/collection/mutable/ObservableSet.scala +++ b/src/library/scala/collection/mutable/ObservableSet.scala @@ -44,7 +44,7 @@ trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable] } abstract override def clear(): Unit = { - super.clear + super.clear() publish(new Reset with Undoable { def undo(): Unit = throw new UnsupportedOperationException("cannot undo") }) diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala index ad001fd79c88..a0aea43121ed 100644 --- a/src/library/scala/collection/mutable/OpenHashMap.scala +++ b/src/library/scala/collection/mutable/OpenHashMap.scala @@ -124,7 +124,7 @@ extends AbstractMap[Key, Value] put(key, hashOf(key), value) private def put(key: Key, hash: Int, value: Value): Option[Value] = { - if (2 * (size + deleted) > mask) growTable + if (2 * (size + deleted) > mask) growTable() val index = findIndex(key, hash) val entry = table(index) if (entry == null) { diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala index f59cbe878c87..4e8b92315559 100644 --- a/src/library/scala/collection/mutable/PriorityQueue.scala +++ b/src/library/scala/collection/mutable/PriorityQueue.scala @@ -134,11 +134,11 @@ class PriorityQueue[A](implicit val ord: Ordering[A]) throw new NoSuchElementException("no element to remove from heap") def dequeueAll[A1 >: A, That](implicit bf: CanBuildFrom[_, A1, That]): That = { - val b = bf.apply + val b = bf.apply() while (nonEmpty) { b += dequeue() } - b.result + b.result() } /** Returns the element with the highest priority in the queue, diff --git a/src/library/scala/collection/mutable/PriorityQueueProxy.scala b/src/library/scala/collection/mutable/PriorityQueueProxy.scala index 52a37550073a..ee5437073105 100644 --- a/src/library/scala/collection/mutable/PriorityQueueProxy.scala +++ b/src/library/scala/collection/mutable/PriorityQueueProxy.scala @@ -66,7 +66,7 @@ abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends Priority * * @return the element with the highest priority. */ - override def dequeue(): A = self.dequeue + override def dequeue(): A = self.dequeue() /** Returns the element with the highest priority in the queue, * or throws an error if there is no element contained in the queue. @@ -78,7 +78,7 @@ abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends Priority /** Removes all elements from the queue. After this operation is completed, * the queue will be empty. */ - override def clear(): Unit = self.clear + override def clear(): Unit = self.clear() /** Returns a regular queue containing the same elements. */ diff --git a/src/library/scala/collection/mutable/Publisher.scala b/src/library/scala/collection/mutable/Publisher.scala index e31205b477a7..8c2ef0d3a372 100644 --- a/src/library/scala/collection/mutable/Publisher.scala +++ b/src/library/scala/collection/mutable/Publisher.scala @@ -45,7 +45,7 @@ trait Publisher[Evt] { def suspendSubscription(sub: Sub) { suspended += sub } def activateSubscription(sub: Sub) { suspended -= sub } def removeSubscription(sub: Sub) { filters -= sub } - def removeSubscriptions() { filters.clear } + def removeSubscriptions() { filters.clear() } protected def publish(event: Evt) { filters.keys.foreach(sub => diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala index b947fa3ccaba..f1a572381865 100644 --- a/src/library/scala/collection/mutable/Queue.scala +++ b/src/library/scala/collection/mutable/Queue.scala @@ -178,7 +178,7 @@ extends MutableList[A] override def clone(): Queue[A] = { val bf = newBuilder bf ++= seq - bf.result + bf.result() } private[this] def decrementLength() { diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala index c286a340e3d4..051b1219cddf 100644 --- a/src/library/scala/collection/mutable/QueueProxy.scala +++ b/src/library/scala/collection/mutable/QueueProxy.scala @@ -67,7 +67,7 @@ trait QueueProxy[A] extends Queue[A] with Proxy { * * @return the first element of the queue. */ - override def dequeue(): A = self.dequeue + override def dequeue(): A = self.dequeue() /** Returns the first element in the queue, or throws an error if there * is no element contained in the queue. @@ -79,7 +79,7 @@ trait QueueProxy[A] extends Queue[A] with Proxy { /** Removes all elements from the queue. After this operation is completed, * the queue will be empty. */ - override def clear(): Unit = self.clear + override def clear(): Unit = self.clear() /** Returns an iterator over all elements on the queue. * diff --git a/src/library/scala/collection/mutable/RevertibleHistory.scala b/src/library/scala/collection/mutable/RevertibleHistory.scala index 5544a21a555a..9b8554669bde 100644 --- a/src/library/scala/collection/mutable/RevertibleHistory.scala +++ b/src/library/scala/collection/mutable/RevertibleHistory.scala @@ -30,7 +30,7 @@ class RevertibleHistory[Evt <: Undoable, Pub] extends History[Evt, Pub] with Und */ def undo(): Unit = { val old = log.toList.reverse - clear - old.foreach { case (sub, event) => event.undo } + clear() + old.foreach { case (sub, event) => event.undo() } } } diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala index 4a907e7dc40d..8dfcde16ce4a 100644 --- a/src/library/scala/collection/mutable/SetLike.scala +++ b/src/library/scala/collection/mutable/SetLike.scala @@ -210,7 +210,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]] def <<(cmd: Message[A]): Unit = cmd match { case Include(_, x) => this += x case Remove(_, x) => this -= x - case Reset() => clear + case Reset() => clear() case s: Script[_] => s.iterator foreach << case _ => throw new UnsupportedOperationException("message " + cmd + " not understood") } diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala index 16f13ff42cfa..8792738339d6 100644 --- a/src/library/scala/collection/mutable/StackProxy.scala +++ b/src/library/scala/collection/mutable/StackProxy.scala @@ -69,13 +69,13 @@ trait StackProxy[A] extends Stack[A] with Proxy { /** Removes the top element from the stack. */ - override def pop(): A = self.pop + override def pop(): A = self.pop() /** * Removes all elements from the stack. After this operation completed, * the stack will be empty. */ - override def clear(): Unit = self.clear + override def clear(): Unit = self.clear() /** Returns an iterator over all elements on the stack. This iterator * is stable with respect to state changes in the stack object; i.e. diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala index bf9a70c5b7a8..14ec85b90693 100644 --- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala +++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala @@ -157,7 +157,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] { /** Clears the buffer contents. */ abstract override def clear(): Unit = synchronized { - super.clear + super.clear() } override def <<(cmd: Message[A]): Unit = synchronized { diff --git a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala index 0065d4c556e4..52e55677bde3 100644 --- a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala +++ b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala @@ -64,7 +64,7 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu * * @return the element with the highest priority. */ - override def dequeue(): A = synchronized { super.dequeue } + override def dequeue(): A = synchronized { super.dequeue() } /** Returns the element with the highest priority in the queue, * or throws an error if there is no element contained in the queue. @@ -76,7 +76,7 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu /** Removes all elements from the queue. After this operation is completed, * the queue will be empty. */ - override def clear(): Unit = synchronized { super.clear } + override def clear(): Unit = synchronized { super.clear() } /** Returns an iterator which yield all the elements of the priority * queue in descending priority order. diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala index c5f133eec78a..57beab39b699 100644 --- a/src/library/scala/collection/mutable/SynchronizedQueue.scala +++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala @@ -56,7 +56,7 @@ class SynchronizedQueue[A] extends Queue[A] { * * @return the first element of the queue. */ - override def dequeue(): A = synchronized { super.dequeue } + override def dequeue(): A = synchronized { super.dequeue() } /** Returns the first element in the queue which satisfies the * given predicate, and removes this element from the queue. @@ -85,7 +85,7 @@ class SynchronizedQueue[A] extends Queue[A] { /** Removes all elements from the queue. After this operation is completed, * the queue will be empty. */ - override def clear(): Unit = synchronized { super.clear } + override def clear(): Unit = synchronized { super.clear() } /** Checks if two queues are structurally identical. * diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala index bc9873880c87..27a696895db2 100644 --- a/src/library/scala/collection/mutable/SynchronizedSet.scala +++ b/src/library/scala/collection/mutable/SynchronizedSet.scala @@ -69,7 +69,7 @@ trait SynchronizedSet[A] extends Set[A] { } abstract override def clear(): Unit = synchronized { - super.clear + super.clear() } override def subsetOf(that: scala.collection.GenSet[A]) = synchronized { diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala index 5d7c9f60737b..09cdcca99e28 100644 --- a/src/library/scala/collection/mutable/SynchronizedStack.scala +++ b/src/library/scala/collection/mutable/SynchronizedStack.scala @@ -67,13 +67,13 @@ class SynchronizedStack[A] extends Stack[A] { /** Removes the top element from the stack. */ - override def pop(): A = synchronized { super.pop } + override def pop(): A = synchronized { super.pop() } /** * Removes all elements from the stack. After this operation completed, * the stack will be empty. */ - override def clear(): Unit = synchronized { super.clear } + override def clear(): Unit = synchronized { super.clear() } /** Returns an iterator over all elements on the stack. This iterator * is stable with respect to state changes in the stack object; i.e. diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala index 9b48c8f24f24..ac634f43aa63 100644 --- a/src/library/scala/collection/mutable/UnrolledBuffer.scala +++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala @@ -87,7 +87,7 @@ extends scala.collection.mutable.AbstractBuffer[T] // `that` is no longer usable, so clear it // here we rely on the fact that `clear` allocates // new nodes instead of modifying the previous ones - that.clear + that.clear() // return a reference to this this @@ -123,7 +123,7 @@ extends scala.collection.mutable.AbstractBuffer[T] val r = node.array(pos) scan() r - } else Iterator.empty.next + } else Iterator.empty.next() } // this should be faster than the iterator diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala index 00993c09ff87..00e20e761687 100644 --- a/src/library/scala/collection/parallel/Combiner.scala +++ b/src/library/scala/collection/parallel/Combiner.scala @@ -86,7 +86,7 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel { * if this is applicable. */ def resultWithTaskSupport: To = { - val res = result + val res = result() setTaskSupport(res, combinerTaskSupport) } diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 33af99067deb..f0b0fd2aa076 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -214,7 +214,7 @@ self: ParIterableLike[T, Repr, Sequential] => def nonEmpty = size != 0 - def head = iterator.next + def head = iterator.next() def headOption = if (nonEmpty) Some(head) else None @@ -627,7 +627,7 @@ self: ParIterableLike[T, Repr, Sequential] => val b = bf(repr) this.splitter.copy2builder[U, That, Builder[U, That]](b) for (elem <- that.seq) b += elem - setTaskSupport(b.result, tasksupport) + setTaskSupport(b.result(), tasksupport) } } @@ -728,7 +728,7 @@ self: ParIterableLike[T, Repr, Sequential] => tree => tasksupport.executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => bf(repr).asCombiner)) mapResult { cb => cb.resultWithTaskSupport }) - }) else setTaskSupport((bf(repr) += z).result, tasksupport) + }) else setTaskSupport((bf(repr) += z).result(), tasksupport) } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport) } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport) @@ -904,7 +904,7 @@ self: ParIterableLike[T, Repr, Sequential] => protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp] def shouldSplitFurther = pit.shouldSplitFurther(self.repr, tasksupport.parallelismLevel) def split = pit.splitWithSignalling.map(newSubtask(_)) // default split procedure - private[parallel] override def signalAbort = pit.abort + private[parallel] override def signalAbort = pit.abort() override def toString = this.getClass.getSimpleName + "(" + pit.toString + ")(" + result + ")(supername: " + super.toString + ")" } @@ -921,8 +921,8 @@ self: ParIterableLike[T, Repr, Sequential] => def combineResults(fr: FR, sr: SR): R @volatile var result: R = null.asInstanceOf[R] private[parallel] override def signalAbort() { - ft.signalAbort - st.signalAbort + ft.signalAbort() + st.signalAbort() } protected def mergeSubtasks() { ft mergeThrowables st @@ -938,7 +938,7 @@ self: ParIterableLike[T, Repr, Sequential] => def leaf(prevr: Option[R]) = { tasksupport.executeAndWaitResult(ft) : Any tasksupport.executeAndWaitResult(st) : Any - mergeSubtasks + mergeSubtasks() } } @@ -950,7 +950,7 @@ self: ParIterableLike[T, Repr, Sequential] => val ftfuture: () => Any = tasksupport.execute(ft) tasksupport.executeAndWaitResult(st) : Any ftfuture() - mergeSubtasks + mergeSubtasks() } } @@ -963,7 +963,7 @@ self: ParIterableLike[T, Repr, Sequential] => result = map(initialResult) } private[parallel] override def signalAbort() { - inner.signalAbort + inner.signalAbort() } override def requiresStrictSplitters = inner.requiresStrictSplitters } @@ -1085,7 +1085,7 @@ self: ParIterableLike[T, Repr, Sequential] => protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Forall] { @volatile var result: Boolean = true - def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort } + def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort() } protected[this] def newSubtask(p: IterableSplitter[T]) = new Forall(pred, p) override def merge(that: Forall) = result = result && that.result } @@ -1093,7 +1093,7 @@ self: ParIterableLike[T, Repr, Sequential] => protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Exists] { @volatile var result: Boolean = false - def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort } + def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort() } protected[this] def newSubtask(p: IterableSplitter[T]) = new Exists(pred, p) override def merge(that: Exists) = result = result || that.result } @@ -1101,7 +1101,7 @@ self: ParIterableLike[T, Repr, Sequential] => protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Find[U]] { @volatile var result: Option[U] = None - def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort } + def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort() } protected[this] def newSubtask(p: IterableSplitter[T]) = new Find(pred, p) override def merge(that: Find[U]) = if (this.result == None) result = that.result } @@ -1153,7 +1153,7 @@ self: ParIterableLike[T, Repr, Sequential] => // note: HashMapCombiner doesn't merge same keys until evaluation val cb = mcf() while (pit.hasNext) { - val elem = pit.next + val elem = pit.next() cb += f(elem) -> elem } result = cb @@ -1489,7 +1489,7 @@ self: ParIterableLike[T, Repr, Sequential] => def debugBuffer: ArrayBuffer[String] = null private[parallel] def debugclear() = synchronized { - debugBuffer.clear + debugBuffer.clear() } private[parallel] def debuglog(s: String) = synchronized { diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala index b2105e1e9e5b..aaf83e49af39 100644 --- a/src/library/scala/collection/parallel/ParIterableViewLike.scala +++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala @@ -140,7 +140,7 @@ self => } otherwise { val b = bf(underlying) b ++= this.iterator - b.result + b.result() } /* wrapper virtual ctors */ diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala index 56594bec96c6..798ba71b95ad 100644 --- a/src/library/scala/collection/parallel/ParMapLike.scala +++ b/src/library/scala/collection/parallel/ParMapLike.scala @@ -67,7 +67,7 @@ self => i => val iter = s def hasNext = iter.hasNext - def next() = iter.next._1 + def next() = iter.next()._1 def split = { val ss = iter.split.map(keysIterator(_)) ss.foreach { _.signalDelegate = i.signalDelegate } @@ -84,7 +84,7 @@ self => i => val iter = s def hasNext = iter.hasNext - def next() = iter.next._2 + def next() = iter.next()._2 def split = { val ss = iter.split.map(valuesIterator(_)) ss.foreach { _.signalDelegate = i.signalDelegate } diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala index 4aaadbaac500..68bc1bc12c84 100644 --- a/src/library/scala/collection/parallel/ParSeqLike.scala +++ b/src/library/scala/collection/parallel/ParSeqLike.scala @@ -68,7 +68,7 @@ self => val x = self(i) i += 1 x - } else Iterator.empty.next + } else Iterator.empty.next() def head = self(i) @@ -228,7 +228,7 @@ self => b ++= pits(0) b ++= patch b ++= pits(2) - setTaskSupport(b.result, tasksupport) + setTaskSupport(b.result(), tasksupport) } def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) { @@ -423,7 +423,7 @@ self => @volatile var result: Boolean = true def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { result = pit.sameElements(otherpit) - if (!result) pit.abort + if (!result) pit.abort() } protected[this] def newSubtask(p: SuperParIterator) = unsupported override def split = { @@ -471,7 +471,7 @@ self => @volatile var result: Boolean = true def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { result = pit.corresponds(corr)(otherpit) - if (!result) pit.abort + if (!result) pit.abort() } protected[this] def newSubtask(p: SuperParIterator) = unsupported override def split = { diff --git a/src/library/scala/collection/parallel/ParSeqViewLike.scala b/src/library/scala/collection/parallel/ParSeqViewLike.scala index d03b3778603a..22773464ed14 100644 --- a/src/library/scala/collection/parallel/ParSeqViewLike.scala +++ b/src/library/scala/collection/parallel/ParSeqViewLike.scala @@ -173,7 +173,7 @@ self => } otherwise { val b = bf(underlying) b ++= this.iterator - b.result + b.result() } /* tasks */ diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala index 726f5a2e93c1..a3a47e2e4006 100644 --- a/src/library/scala/collection/parallel/RemainsIterator.scala +++ b/src/library/scala/collection/parallel/RemainsIterator.scala @@ -47,47 +47,47 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ override def count(p: T => Boolean): Int = { var i = 0 - while (hasNext) if (p(next)) i += 1 + while (hasNext) if (p(next())) i += 1 i } override def reduce[U >: T](op: (U, U) => U): U = { - var r: U = next - while (hasNext) r = op(r, next) + var r: U = next() + while (hasNext) r = op(r, next()) r } override def fold[U >: T](z: U)(op: (U, U) => U): U = { var r = z - while (hasNext) r = op(r, next) + while (hasNext) r = op(r, next()) r } override def sum[U >: T](implicit num: Numeric[U]): U = { var r: U = num.zero - while (hasNext) r = num.plus(r, next) + while (hasNext) r = num.plus(r, next()) r } override def product[U >: T](implicit num: Numeric[U]): U = { var r: U = num.one - while (hasNext) r = num.times(r, next) + while (hasNext) r = num.times(r, next()) r } override def min[U >: T](implicit ord: Ordering[U]): T = { - var r = next + var r = next() while (hasNext) { - val curr = next + val curr = next() if (ord.lteq(curr, r)) r = curr } r } override def max[U >: T](implicit ord: Ordering[U]): T = { - var r = next + var r = next() while (hasNext) { - val curr = next + val curr = next() if (ord.gteq(curr, r)) r = curr } r @@ -97,16 +97,16 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ var i = from val until = from + len while (i < until && hasNext) { - array(i) = next + array(i) = next() i += 1 } } def reduceLeft[U >: T](howmany: Int, op: (U, U) => U): U = { var i = howmany - 1 - var u: U = next + var u: U = next() while (i > 0 && hasNext) { - u = op(u, next) + u = op(u, next()) i -= 1 } u @@ -117,7 +117,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { //val cb = pbf(repr) if (isRemainingCheap) cb.sizeHint(remaining) - while (hasNext) cb += f(next) + while (hasNext) cb += f(next()) cb } @@ -125,7 +125,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ //val cb = pbf(repr) val runWith = pf.runWith(cb += _) while (hasNext) { - val curr = next + val curr = next() runWith(curr) } cb @@ -134,7 +134,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = { //val cb = pbf(repr) while (hasNext) { - val traversable = f(next).seq + val traversable = f(next()).seq if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator else cb ++= traversable } @@ -149,7 +149,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def filter2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = { while (hasNext) { - val curr = next + val curr = next() if (pred(curr)) cb += curr } cb @@ -157,7 +157,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def filterNot2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = { while (hasNext) { - val curr = next + val curr = next() if (!pred(curr)) cb += curr } cb @@ -165,7 +165,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def partition2combiners[U >: T, This](pred: T => Boolean, btrue: Combiner[U, This], bfalse: Combiner[U, This]) = { while (hasNext) { - val curr = next + val curr = next() if (pred(curr)) btrue += curr else bfalse += curr } @@ -215,7 +215,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def takeWhile2combiner[U >: T, This](p: T => Boolean, cb: Combiner[U, This]) = { var loop = true while (hasNext && loop) { - val curr = next + val curr = next() if (p(curr)) cb += curr else loop = false } @@ -225,7 +225,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def span2combiners[U >: T, This](p: T => Boolean, before: Combiner[U, This], after: Combiner[U, This]) = { var isBefore = true while (hasNext && isBefore) { - val curr = next + val curr = next() if (p(curr)) before += curr else { if (isRemainingCheap) after.sizeHint(remaining + 1) @@ -241,7 +241,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ var last = z var i = from while (hasNext) { - last = op(last, next) + last = op(last, next()) array(i) = last i += 1 } @@ -250,7 +250,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def scanToCombiner[U >: T, That](startValue: U, op: (U, U) => U, cb: Combiner[U, That]) = { var curr = startValue while (hasNext) { - curr = op(curr, next) + curr = op(curr, next()) cb += curr } cb @@ -260,7 +260,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ var curr = startValue var left = howmany while (left > 0) { - curr = op(curr, next) + curr = op(curr, next()) cb += curr left -= 1 } @@ -270,16 +270,16 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def zip2combiner[U >: T, S, That](otherpit: RemainsIterator[S], cb: Combiner[(U, S), That]): Combiner[(U, S), That] = { if (isRemainingCheap && otherpit.isRemainingCheap) cb.sizeHint(remaining min otherpit.remaining) while (hasNext && otherpit.hasNext) { - cb += ((next, otherpit.next)) + cb += ((next(), otherpit.next())) } cb } def zipAll2combiner[U >: T, S, That](that: RemainsIterator[S], thiselem: U, thatelem: S, cb: Combiner[(U, S), That]): Combiner[(U, S), That] = { if (isRemainingCheap && that.isRemainingCheap) cb.sizeHint(remaining max that.remaining) - while (this.hasNext && that.hasNext) cb += ((this.next, that.next)) - while (this.hasNext) cb += ((this.next, thatelem)) - while (that.hasNext) cb += ((thiselem, that.next)) + while (this.hasNext && that.hasNext) cb += ((this.next(), that.next())) + while (this.hasNext) cb += ((this.next(), thatelem)) + while (that.hasNext) cb += ((thiselem, that.next())) cb } @@ -299,7 +299,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter var total = 0 var loop = true while (hasNext && loop) { - if (pred(next)) total += 1 + if (pred(next())) total += 1 else loop = false } total @@ -309,7 +309,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter var i = 0 var loop = true while (hasNext && loop) { - if (pred(next)) loop = false + if (pred(next())) loop = false else i += 1 } if (loop) -1 else i @@ -319,7 +319,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter var pos = -1 var i = 0 while (hasNext) { - if (pred(next)) pos = i + if (pred(next())) pos = i i += 1 } pos @@ -327,7 +327,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter def corresponds[S](corr: (T, S) => Boolean)(that: Iterator[S]): Boolean = { while (hasNext && that.hasNext) { - if (!corr(next, that.next)) return false + if (!corr(next(), that.next())) return false } hasNext == that.hasNext } @@ -349,7 +349,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter //val cb = cbf(repr) if (isRemainingCheap) cb.sizeHint(remaining) var lst = List[S]() - while (hasNext) lst ::= f(next) + while (hasNext) lst ::= f(next()) while (lst != Nil) { cb += lst.head lst = lst.tail @@ -364,7 +364,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter while (hasNext) { if (j == index) { cb += elem - next + next() } else cb += next j += 1 } @@ -439,7 +439,7 @@ self => class Taken(taken: Int) extends IterableSplitter[T] { var remaining = taken min self.remaining def hasNext = remaining > 0 - def next = { remaining -= 1; self.next } + def next = { remaining -= 1; self.next() } def dup: IterableSplitter[T] = self.dup.take(taken) def split: Seq[IterableSplitter[T]] = takeSeq(self.split) { (p, n) => p.take(n) } protected[this] def takeSeq[PI <: IterableSplitter[T]](sq: Seq[PI])(taker: (PI, Int) => PI) = { @@ -467,7 +467,7 @@ self => class Mapped[S](f: T => S) extends IterableSplitter[S] { signalDelegate = self.signalDelegate def hasNext = self.hasNext - def next = f(self.next) + def next = f(self.next()) def remaining = self.remaining def dup: IterableSplitter[S] = self.dup map f def split: Seq[IterableSplitter[S]] = self.split.map { _ map f } @@ -484,8 +484,8 @@ self => } else false def next = if (curr eq self) { hasNext - curr.next - } else curr.next + curr.next() + } else curr.next() def remaining = if (curr eq self) curr.remaining + that.remaining else curr.remaining protected def firstNonEmpty = (curr eq self) && curr.hasNext def dup: IterableSplitter[U] = self.dup.appendParIterable[U, PI](that) @@ -497,7 +497,7 @@ self => class Zipped[S](protected val that: SeqSplitter[S]) extends IterableSplitter[(T, S)] { signalDelegate = self.signalDelegate def hasNext = self.hasNext && that.hasNext - def next = (self.next, that.next) + def next = (self.next(), that.next()) def remaining = self.remaining min that.remaining def dup: IterableSplitter[(T, S)] = self.dup.zipParSeq(that) def split: Seq[IterableSplitter[(T, S)]] = { @@ -515,9 +515,9 @@ self => signalDelegate = self.signalDelegate def hasNext = self.hasNext || that.hasNext def next = if (self.hasNext) { - if (that.hasNext) (self.next, that.next) - else (self.next, thatelem) - } else (thiselem, that.next) + if (that.hasNext) (self.next(), that.next()) + else (self.next(), thatelem) + } else (thiselem, that.next()) def remaining = self.remaining max that.remaining def dup: IterableSplitter[(U, S)] = self.dup.zipAllParSeq(that, thiselem, thatelem) diff --git a/src/library/scala/collection/parallel/Splitter.scala b/src/library/scala/collection/parallel/Splitter.scala index dc49bcf9d7cb..458742df968c 100644 --- a/src/library/scala/collection/parallel/Splitter.scala +++ b/src/library/scala/collection/parallel/Splitter.scala @@ -52,7 +52,7 @@ trait Splitter[+T] extends Iterator[T] { object Splitter { def empty[T]: Splitter[T] = new Splitter[T] { def hasNext = false - def next = Iterator.empty.next + def next = Iterator.empty.next() def split = Seq(this) } } diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala index ec1bcbb27ad2..441c4269c367 100644 --- a/src/library/scala/collection/parallel/Tasks.scala +++ b/src/library/scala/collection/parallel/Tasks.scala @@ -54,13 +54,13 @@ trait Task[R, +Tp] { leaf(lastres) result = result // ensure that effects of `leaf` are visible to readers of `result` } catchBreak { - signalAbort + signalAbort() } } catch { case thr: Exception => result = result // ensure that effects of `leaf` are visible throwable = thr - signalAbort + signalAbort() } } @@ -302,7 +302,7 @@ trait ThreadPoolTasks extends Tasks { () => { t.sync() - t.body.forwardThrowable + t.body.forwardThrowable() t.body.result } } @@ -314,7 +314,7 @@ trait ThreadPoolTasks extends Tasks { t.start() t.sync() - t.body.forwardThrowable + t.body.forwardThrowable() t.body.result } @@ -402,8 +402,8 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool { } () => { - fjtask.sync - fjtask.body.forwardThrowable + fjtask.sync() + fjtask.body.forwardThrowable() fjtask.body.result } } @@ -424,9 +424,9 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool { forkJoinPool.execute(fjtask) } - fjtask.sync + fjtask.sync() // if (fjtask.body.throwable != null) println("throwing: " + fjtask.body.throwable + " at " + fjtask.body) - fjtask.body.forwardThrowable + fjtask.body.forwardThrowable() fjtask.body.result } diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala index b25230bbeb72..f3be47ea030d 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala @@ -109,7 +109,7 @@ self => } def next(): (K, V) = { i += 1 - val r = triter.next + val r = triter.next() r } def hasNext: Boolean = { diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala index e7e64eb2adf9..4f34993b85ab 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala @@ -106,7 +106,7 @@ self => } def next(): T = { i += 1 - triter.next + triter.next() } def hasNext: Boolean = { i < sz diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala index a3f473c6a799..78cde1724b58 100644 --- a/src/library/scala/collection/parallel/immutable/ParRange.scala +++ b/src/library/scala/collection/parallel/immutable/ParRange.scala @@ -60,7 +60,7 @@ self => val r = range.apply(ind) ind += 1 r - } else Iterator.empty.next + } else Iterator.empty.next() private def rangeleft = range.drop(ind) diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala index 0e9eac62e298..68c43e682eb2 100644 --- a/src/library/scala/collection/parallel/mutable/ParArray.scala +++ b/src/library/scala/collection/parallel/mutable/ParArray.scala @@ -226,7 +226,7 @@ self => if (all) i = nextuntil else { i = until - abort + abort() } if (isAborted) return false @@ -254,7 +254,7 @@ self => some = exists_quick(p, array, nextuntil, i) if (some) { i = until - abort + abort() } else i = nextuntil if (isAborted) return true @@ -283,7 +283,7 @@ self => if (r != None) { i = until - abort + abort() } else i = nextuntil if (isAborted) return r diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala index b151e45d6596..aa790dd54855 100644 --- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala +++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala @@ -48,7 +48,7 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] { idx += 1 if (hasNext) scan() r - } else Iterator.empty.next + } else Iterator.empty.next() def dup = newIterator(idx, until, totalsize) def split = if (remaining > 1) { val divpt = (until + idx) / 2 diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala index f5c0b10526c6..7766f07e236d 100644 --- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala +++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala @@ -47,7 +47,7 @@ extends Combiner[T, ParArray[T]] { } def clear() { - buff.clear + buff.clear() } override def sizeHint(sz: Int) = { diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index 0670da137cb4..95b393dd0eba 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -576,7 +576,7 @@ object Future { def sequence[A, M[_] <: TraversableOnce[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = { in.foldLeft(Promise.successful(cbf(in)).future) { (fr, fa) => for (r <- fr; a <- fa.asInstanceOf[Future[A]]) yield (r += a) - } map (_.result) + } map (_.result()) } /** Returns a `Future` to the result of the first future in the list that is completed. @@ -638,7 +638,7 @@ object Future { * }}} */ def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { - if (futures.isEmpty) Promise[R].failure(new NoSuchElementException("reduce attempted on empty collection")).future + if (futures.isEmpty) Promise[R]().failure(new NoSuchElementException("reduce attempted on empty collection")).future else sequence(futures).map(_ reduceLeft op) } @@ -654,7 +654,7 @@ object Future { in.foldLeft(Promise.successful(cbf(in)).future) { (fr, a) => val fb = fn(a.asInstanceOf[A]) for (r <- fr; b <- fb) yield (r += b) - }.map(_.result) + }.map(_.result()) // This is used to run callbacks which are internal // to scala.concurrent; our own callbacks are only diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala index 0353d61b220c..6c6155279da5 100644 --- a/src/library/scala/concurrent/duration/Duration.scala +++ b/src/library/scala/concurrent/duration/Duration.scala @@ -103,7 +103,7 @@ object Duration { * Extract length and time unit out of a duration, if it is finite. */ def unapply(d: Duration): Option[(Long, TimeUnit)] = - if (d.isFinite) Some((d.length, d.unit)) else None + if (d.isFinite()) Some((d.length, d.unit)) else None /** * Construct a possibly infinite or undefined Duration from the given number of nanoseconds. @@ -623,7 +623,7 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio // if this is made a constant, then scalac will elide the conditional and always return +0.0, SI-6331 private[this] def minusZero = -0d def /(divisor: Duration): Double = - if (divisor.isFinite) toNanos.toDouble / divisor.toNanos + if (divisor.isFinite()) toNanos.toDouble / divisor.toNanos else if (divisor eq Undefined) Double.NaN else if ((length < 0) ^ (divisor > Zero)) 0d else minusZero diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index 52f10751370f..7af70400ef3c 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -83,7 +83,7 @@ private[concurrent] object Promise { import Duration.Undefined atMost match { case u if u eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period") - case Duration.Inf => awaitUnbounded + case Duration.Inf => awaitUnbounded() case Duration.MinusInf => isCompleted case f: FiniteDuration => if (f > Duration.Zero) awaitUnsafe(f.fromNow, f) else isCompleted } @@ -135,7 +135,7 @@ private[concurrent] object Promise { } def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = { - val preparedEC = executor.prepare + val preparedEC = executor.prepare() val runnable = new CallbackRunnable[T](preparedEC, func) @tailrec //Tries to add the callback, if already completed, it dispatches the callback to be executed @@ -162,7 +162,7 @@ private[concurrent] object Promise { def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = { val completedAs = value.get - val preparedEC = executor.prepare + val preparedEC = executor.prepare() (new CallbackRunnable(preparedEC, func)).executeWithValue(completedAs) } diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala index 767f06fd3f56..e250da27c3ca 100644 --- a/src/library/scala/io/BufferedSource.scala +++ b/src/library/scala/io/BufferedSource.scala @@ -73,7 +73,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod if (nextLine == null) lineReader.readLine else try nextLine finally nextLine = null } - if (result == null) Iterator.empty.next + if (result == null) Iterator.empty.next() else result } } diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala index b13729aefe40..f976c7eb0ac1 100644 --- a/src/library/scala/io/Source.scala +++ b/src/library/scala/io/Source.scala @@ -194,11 +194,11 @@ abstract class Source extends Iterator[Char] { lazy val iter: BufferedIterator[Char] = Source.this.iter.buffered def isNewline(ch: Char) = ch == '\r' || ch == '\n' def getc() = iter.hasNext && { - val ch = iter.next + val ch = iter.next() if (ch == '\n') false else if (ch == '\r') { if (iter.hasNext && iter.head == '\n') - iter.next + iter.next() false } @@ -209,7 +209,7 @@ abstract class Source extends Iterator[Char] { } def hasNext = iter.hasNext def next = { - sb.clear + sb.clear() while (getc()) { } sb.toString } @@ -227,7 +227,7 @@ abstract class Source extends Iterator[Char] { /** Returns next character. */ - def next(): Char = positioner.next + def next(): Char = positioner.next() class Positioner(encoder: Position) { def this() = this(RelaxedPosition) @@ -245,7 +245,7 @@ abstract class Source extends Iterator[Char] { var tabinc = 4 def next(): Char = { - ch = iter.next + ch = iter.next() pos = encoder.encode(cline, ccol) ch match { case '\n' => @@ -267,7 +267,7 @@ abstract class Source extends Iterator[Char] { } object RelaxedPositioner extends Positioner(RelaxedPosition) { } object NoPositioner extends Positioner(Position) { - override def next(): Char = iter.next + override def next(): Char = iter.next() } def ch = positioner.ch def pos = positioner.pos diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala index f3aabc29741c..d8f4337b8f1f 100644 --- a/src/library/scala/math/BigDecimal.scala +++ b/src/library/scala/math/BigDecimal.scala @@ -171,7 +171,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable { * with unequal hashCodes. */ override def hashCode(): Int = - if (isWhole) unifiedPrimitiveHashcode + if (isWhole()) unifiedPrimitiveHashcode() else doubleValue.## /** Compares this BigDecimal with the specified value for equality. diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index feb538033bcd..719099b405d0 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -112,7 +112,7 @@ object BigInt { class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericConversions with Serializable { /** Returns the hash code for this BigInt. */ override def hashCode(): Int = - if (isValidLong) unifiedPrimitiveHashcode + if (isValidLong) unifiedPrimitiveHashcode() else bigInteger.## /** Compares this BigInt with the specified value for equality. diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala index aea512a54132..d1a4e7c35c91 100644 --- a/src/library/scala/math/Ordering.scala +++ b/src/library/scala/math/Ordering.scala @@ -173,7 +173,7 @@ object Ordering extends LowPriorityOrderingImplicits { val ye = y.iterator while (xe.hasNext && ye.hasNext) { - val res = ord.compare(xe.next, ye.next) + val res = ord.compare(xe.next(), ye.next()) if (res != 0) return res } @@ -347,7 +347,7 @@ object Ordering extends LowPriorityOrderingImplicits { val ye = y.iterator while (xe.hasNext && ye.hasNext) { - val res = ord.compare(xe.next, ye.next) + val res = ord.compare(xe.next(), ye.next()) if (res != 0) return res } diff --git a/src/library/scala/math/ScalaNumericConversions.scala b/src/library/scala/math/ScalaNumericConversions.scala index 59fc7f27b208..e748841c12a3 100644 --- a/src/library/scala/math/ScalaNumericConversions.scala +++ b/src/library/scala/math/ScalaNumericConversions.scala @@ -32,37 +32,37 @@ trait ScalaNumericAnyConversions extends Any { /** Returns the value of this as a [[scala.Char]]. This may involve * rounding or truncation. */ - def toChar = intValue.toChar + def toChar = intValue().toChar /** Returns the value of this as a [[scala.Byte]]. This may involve * rounding or truncation. */ - def toByte = byteValue + def toByte = byteValue() /** Returns the value of this as a [[scala.Short]]. This may involve * rounding or truncation. */ - def toShort = shortValue + def toShort = shortValue() /** Returns the value of this as an [[scala.Int]]. This may involve * rounding or truncation. */ - def toInt = intValue + def toInt = intValue() /** Returns the value of this as a [[scala.Long]]. This may involve * rounding or truncation. */ - def toLong = longValue + def toLong = longValue() /** Returns the value of this as a [[scala.Float]]. This may involve * rounding or truncation. */ - def toFloat = floatValue + def toFloat = floatValue() /** Returns the value of this as a [[scala.Double]]. This may involve * rounding or truncation. */ - def toDouble = doubleValue + def toDouble = doubleValue() /** Returns `true` iff this has a zero fractional part, and is within the * range of [[scala.Byte]] MinValue and MaxValue; otherwise returns `false`. diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala index 76fc38b26709..e8460a203b32 100644 --- a/src/library/scala/runtime/ScalaNumberProxy.scala +++ b/src/library/scala/runtime/ScalaNumberProxy.scala @@ -28,8 +28,8 @@ trait ScalaNumberProxy[T] extends Any with ScalaNumericAnyConversions with Typed def floatValue() = num.toFloat(self) def longValue() = num.toLong(self) def intValue() = num.toInt(self) - def byteValue() = intValue.toByte - def shortValue() = intValue.toShort + def byteValue() = intValue().toByte + def shortValue() = intValue().toShort def min(that: T): T = num.min(self, that) def max(that: T): T = num.max(self, that) diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala index ef29075ac3fd..bde69a0f549d 100644 --- a/src/library/scala/runtime/Tuple2Zipped.scala +++ b/src/library/scala/runtime/Tuple2Zipped.scala @@ -37,12 +37,12 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 for (el1 <- colls._1) { if (elems2.hasNext) - b += f(el1, elems2.next) + b += f(el1, elems2.next()) else - return b.result + return b.result() } - b.result + b.result() } def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = { @@ -51,12 +51,12 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 for (el1 <- colls._1) { if (elems2.hasNext) - b ++= f(el1, elems2.next) + b ++= f(el1, elems2.next()) else - return b.result + return b.result() } - b.result + b.result() } def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = { @@ -66,16 +66,16 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 for (el1 <- colls._1) { if (elems2.hasNext) { - val el2 = elems2.next + val el2 = elems2.next() if (f(el1, el2)) { b1 += el1 b2 += el2 } } - else return (b1.result, b2.result) + else return (b1.result(), b2.result()) } - (b1.result, b2.result) + (b1.result(), b2.result()) } def exists(f: (El1, El2) => Boolean): Boolean = { @@ -83,7 +83,7 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 for (el1 <- colls._1) { if (elems2.hasNext) { - if (f(el1, elems2.next)) + if (f(el1, elems2.next())) return true } else return false @@ -99,7 +99,7 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 for (el1 <- colls._1) { if (elems2.hasNext) - f(el1, elems2.next) + f(el1, elems2.next()) else return } @@ -117,9 +117,9 @@ object Tuple2Zipped { val it1 = x._1.toIterator val it2 = x._2.toIterator while (it1.hasNext && it2.hasNext) - buf += ((it1.next, it2.next)) + buf += ((it1.next(), it2.next())) - buf.result + buf.result() } def zipped[El1, Repr1, El2, Repr2] diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala index 3f2afaf77291..34da42462a51 100644 --- a/src/library/scala/runtime/Tuple3Zipped.scala +++ b/src/library/scala/runtime/Tuple3Zipped.scala @@ -34,11 +34,11 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers for (el1 <- colls._1) { if (elems2.hasNext && elems3.hasNext) - b += f(el1, elems2.next, elems3.next) + b += f(el1, elems2.next(), elems3.next()) else - return b.result + return b.result() } - b.result + b.result() } def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = { @@ -48,11 +48,11 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers for (el1 <- colls._1) { if (elems2.hasNext && elems3.hasNext) - b ++= f(el1, elems2.next, elems3.next) + b ++= f(el1, elems2.next(), elems3.next()) else - return b.result + return b.result() } - b.result + b.result() } def filter[To1, To2, To3](f: (El1, El2, El3) => Boolean)( @@ -64,12 +64,12 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers val b3 = cbf3(colls._3.repr) val elems2 = colls._2.iterator val elems3 = colls._3.iterator - def result = (b1.result, b2.result, b3.result) + def result = (b1.result(), b2.result(), b3.result()) for (el1 <- colls._1) { if (elems2.hasNext && elems3.hasNext) { - val el2 = elems2.next - val el3 = elems3.next + val el2 = elems2.next() + val el3 = elems3.next() if (f(el1, el2, el3)) { b1 += el1 @@ -89,7 +89,7 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers for (el1 <- colls._1) { if (elems2.hasNext && elems3.hasNext) { - if (f(el1, elems2.next, elems3.next)) + if (f(el1, elems2.next(), elems3.next())) return true } else return false @@ -106,7 +106,7 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers for (el1 <- colls._1) { if (elems2.hasNext && elems3.hasNext) - f(el1, elems2.next, elems3.next) + f(el1, elems2.next(), elems3.next()) else return } @@ -126,9 +126,9 @@ object Tuple3Zipped { val it2 = x._2.toIterator val it3 = x._3.toIterator while (it1.hasNext && it2.hasNext && it3.hasNext) - buf += ((it1.next, it2.next, it3.next)) + buf += ((it1.next(), it2.next(), it3.next())) - buf.result + buf.result() } def zipped[El1, Repr1, El2, Repr2, El3, Repr3] diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala index 0003df6c52ac..e2c4f138308c 100644 --- a/src/library/scala/sys/process/BasicIO.scala +++ b/src/library/scala/sys/process/BasicIO.scala @@ -46,7 +46,7 @@ object BasicIO { def next(): Stream[T] = q.take match { case Left(0) => Stream.empty case Left(code) => if (nonzeroException) scala.sys.error("Nonzero exit code: " + code) else Stream.empty - case Right(s) => Stream.cons(s, next) + case Right(s) => Stream.cons(s, next()) } new Streamed((s: T) => q put Right(s), code => q put Left(code), () => next()) } diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala index c21c0daa5e60..bfd3551a65f7 100644 --- a/src/library/scala/sys/process/ProcessImpl.scala +++ b/src/library/scala/sys/process/ProcessImpl.scala @@ -32,7 +32,7 @@ private[process] trait ProcessImpl { try result set Right(f) catch { case e: Exception => result set Left(e) } - Spawn(run) + Spawn(run()) () => result.get match { case Right(value) => value @@ -68,10 +68,10 @@ private[process] trait ProcessImpl { protected[this] override def runAndExitValue() = { val first = a.run(io) - runInterruptible(first.exitValue)(first.destroy()) flatMap { codeA => + runInterruptible(first.exitValue())(first.destroy()) flatMap { codeA => if (evaluateSecondProcess(codeA)) { val second = b.run(io) - runInterruptible(second.exitValue)(second.destroy()) + runInterruptible(second.exitValue())(second.destroy()) } else Some(codeA) } @@ -132,10 +132,10 @@ private[process] trait ProcessImpl { val first = a.run(firstIO) try { runInterruptible { - val exit1 = first.exitValue + val exit1 = first.exitValue() currentSource put None currentSink put None - val exit2 = second.exitValue + val exit2 = second.exitValue() // Since file redirection (e.g. #>) is implemented as a piped process, // we ignore its exit value so cmd #> file doesn't always return 0. if (b.hasExitValue) exit2 else exit1 diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala index 2b11594f6651..b3a8617f156f 100644 --- a/src/library/scala/util/Random.scala +++ b/src/library/scala/util/Random.scala @@ -117,7 +117,7 @@ class Random(val self: java.util.Random) extends AnyRef with Serializable { swap(n - 1, k) } - (bf(xs) ++= buf).result + (bf(xs) ++= buf).result() } /** Returns a Stream of pseudorandomly chosen alphanumeric characters, diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 0cd0cfd7f6f1..981d9af02f58 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -233,7 +233,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends new Iterator[Match] { def hasNext = matchIterator.hasNext def next: Match = { - matchIterator.next + matchIterator.next() new Match(matchIterator.source, matchIterator.matcher, matchIterator.groupNames).force } } @@ -622,14 +622,14 @@ object Regex { /** Convert to an iterator that yields MatchData elements instead of Strings */ def matchData: Iterator[Match] = new AbstractIterator[Match] { def hasNext = self.hasNext - def next = { self.next; new Match(source, matcher, groupNames).force } + def next = { self.next(); new Match(source, matcher, groupNames).force } } /** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support */ private[matching] def replacementData = new AbstractIterator[Match] with Replacement { def matcher = self.matcher def hasNext = self.hasNext - def next = { self.next; new Match(source, matcher, groupNames).force } + def next = { self.next(); new Match(source, matcher, groupNames).force } } } diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala index f9157802c6a3..98807a40a4d8 100755 --- a/src/library/scala/xml/PrettyPrinter.scala +++ b/src/library/scala/xml/PrettyPrinter.scala @@ -141,7 +141,7 @@ class PrettyPrinter(width: Int, step: Int) { case Text(s) if s.trim() == "" => ; case _:Atom[_] | _:Comment | _:EntityRef | _:ProcInstr => - makeBox( ind, node.toString.trim() ) + makeBox( ind, node.toString().trim() ) case g @ Group(xs) => traverse(xs.iterator, pscope, ind) case _ => diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala index f3c162fcc8ba..06fd46701a8a 100755 --- a/src/library/scala/xml/Utility.scala +++ b/src/library/scala/xml/Utility.scala @@ -245,10 +245,10 @@ object Utility extends AnyRef with parsing.TokenTests { if (children.isEmpty) return else if (children forall isAtomAndNotText) { // add space val it = children.iterator - val f = it.next + val f = it.next() serialize(f, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags) while (it.hasNext) { - val x = it.next + val x = it.next() sb.append(' ') serialize(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags) } @@ -333,22 +333,22 @@ object Utility extends AnyRef with parsing.TokenTests { val it = value.iterator while (it.hasNext) { - var c = it.next + var c = it.next() // entity! flush buffer into text node if (c == '&') { - c = it.next + c = it.next() if (c == '#') { - c = it.next - val theChar = parseCharRef ({ ()=> c },{ () => c = it.next },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)}) + c = it.next() + val theChar = parseCharRef ({ ()=> c },{ () => c = it.next() },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)}) sb.append(theChar) } else { if (rfb eq null) rfb = new StringBuilder() rfb append c - c = it.next + c = it.next() while (c != ';') { rfb.append(c) - c = it.next + c = it.next() } val ref = rfb.toString() rfb.clear() diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala index 6bc9c0583269..ca84bcad7030 100644 --- a/src/library/scala/xml/dtd/ContentModelParser.scala +++ b/src/library/scala/xml/dtd/ContentModelParser.scala @@ -26,14 +26,14 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # scala.sys.error("expected "+token2string(tok)+ ", got unexpected token:"+token2string(token)) } - nextToken + nextToken() } // s [ '+' | '*' | '?' ] def maybeSuffix(s: RegExp) = token match { - case STAR => nextToken; Star(s) - case PLUS => nextToken; Sequ(s, Star(s)) - case OPT => nextToken; Alt(Eps, s) + case STAR => nextToken(); Star(s) + case PLUS => nextToken(); Sequ(s, Star(s)) + case OPT => nextToken(); Alt(Eps, s) case _ => s } @@ -48,18 +48,18 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # } case LPAREN => - nextToken - sOpt + nextToken() + sOpt() if (token != TOKEN_PCDATA) ELEMENTS(regexp) else { - nextToken + nextToken() token match { case RPAREN => PCDATA case CHOICE => val res = MIXED(choiceRest(Eps)) - sOpt + sOpt() accept( RPAREN ) accept( STAR ) res @@ -72,7 +72,7 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # scala.sys.error("unexpected token:" + token2string(token) ) } // sopt ::= S? - def sOpt() = if( token == S ) nextToken + def sOpt() = if( token == S ) nextToken() // (' S? mixed ::= '#PCDATA' S? ')' // | '#PCDATA' (S? '|' S? atom)* S? ')*' @@ -80,9 +80,9 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # // '(' S? regexp ::= cp S? [seqRest|choiceRest] ')' [ '+' | '*' | '?' ] def regexp: RegExp = { val p = particle - sOpt + sOpt() maybeSuffix(token match { - case RPAREN => nextToken; p + case RPAREN => nextToken(); p case CHOICE => val q = choiceRest( p );accept( RPAREN ); q case COMMA => val q = seqRest( p ); accept( RPAREN ); q }) @@ -92,10 +92,10 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # def seqRest(p: RegExp) = { var k = List(p) while( token == COMMA ) { - nextToken - sOpt + nextToken() + sOpt() k = particle::k - sOpt + sOpt() } Sequ( k.reverse:_* ) } @@ -104,10 +104,10 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # def choiceRest( p:RegExp ) = { var k = List( p ) while( token == CHOICE ) { - nextToken - sOpt + nextToken() + sOpt() k = particle::k - sOpt + sOpt() } Alt( k.reverse:_* ) } @@ -115,14 +115,14 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # // particle ::= '(' S? regexp // | name [ '+' | '*' | '?' ] def particle = token match { - case LPAREN => nextToken; sOpt; regexp - case NAME => val a = Letter(ElemName(value)); nextToken; maybeSuffix(a) + case LPAREN => nextToken(); sOpt(); regexp + case NAME => val a = Letter(ElemName(value)); nextToken(); maybeSuffix(a) case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token)) } // atom ::= name def atom = token match { - case NAME => val a = Letter(ElemName(value)); nextToken; a + case NAME => val a = Letter(ElemName(value)); nextToken(); a case _ => scala.sys.error("expected Name, got:"+token2string(token)) } } diff --git a/src/library/scala/xml/dtd/DocType.scala b/src/library/scala/xml/dtd/DocType.scala index b2510baa1816..af7e77e76f60 100644 --- a/src/library/scala/xml/dtd/DocType.scala +++ b/src/library/scala/xml/dtd/DocType.scala @@ -28,7 +28,7 @@ case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl]) { if (intSubset.isEmpty) "" else intSubset.mkString("[", "", "]") - """""".format(name, extID.toString, intString) + """""".format(name, extID.toString(), intString) } } diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala index d4d648c8dfc0..53404e34a730 100644 --- a/src/library/scala/xml/dtd/Scanner.scala +++ b/src/library/scala/xml/dtd/Scanner.scala @@ -28,8 +28,8 @@ class Scanner extends Tokens with parsing.TokenTests { value = "" it = (s).iterator token = 1+END - next - nextToken + next() + nextToken() } /** scans the next token */ @@ -41,27 +41,27 @@ class Scanner extends Tokens with parsing.TokenTests { final def isIdentChar = ( ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z')) - final def next() = if (it.hasNext) c = it.next else c = ENDCH + final def next() = if (it.hasNext) c = it.next() else c = ENDCH final def acc(d: Char) { - if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !") + if (c == d) next() else scala.sys.error("expected '"+d+"' found '"+c+"' !") } final def accS(ds: Seq[Char]) { ds foreach acc } final def readToken: Int = if (isSpace(c)) { - while (isSpace(c)) c = it.next + while (isSpace(c)) c = it.next() S } else c match { - case '(' => next; LPAREN - case ')' => next; RPAREN - case ',' => next; COMMA - case '*' => next; STAR - case '+' => next; PLUS - case '?' => next; OPT - case '|' => next; CHOICE - case '#' => next; accS( "PCDATA" ); TOKEN_PCDATA + case '(' => next(); LPAREN + case ')' => next(); RPAREN + case ',' => next(); COMMA + case '*' => next(); STAR + case '+' => next(); PLUS + case '?' => next(); OPT + case '|' => next(); CHOICE + case '#' => next(); accS( "PCDATA" ); TOKEN_PCDATA case ENDCH => END case _ => if (isNameStart(c)) name; // NAME @@ -70,7 +70,7 @@ class Scanner extends Tokens with parsing.TokenTests { final def name = { val sb = new StringBuilder() - do { sb.append(c); next } while (isNameChar(c)) + do { sb.append(c); next() } while (isNameChar(c)) value = sb.toString() NAME } diff --git a/src/library/scala/xml/dtd/impl/SubsetConstruction.scala b/src/library/scala/xml/dtd/impl/SubsetConstruction.scala index 8e4b5cc0f08e..d1ea4b6e9e94 100644 --- a/src/library/scala/xml/dtd/impl/SubsetConstruction.scala +++ b/src/library/scala/xml/dtd/impl/SubsetConstruction.scala @@ -50,7 +50,7 @@ private[dtd] class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) addFinal(q0) // initial state may also be a final state while (!rest.isEmpty) { - val P = rest.pop + val P = rest.pop() // assign a number to this bitset indexMap = indexMap.updated(P, ix) invIndexMap = invIndexMap.updated(ix, P) diff --git a/src/library/scala/xml/factory/XMLLoader.scala b/src/library/scala/xml/factory/XMLLoader.scala index efa241e388b0..bd18f2a699b1 100644 --- a/src/library/scala/xml/factory/XMLLoader.scala +++ b/src/library/scala/xml/factory/XMLLoader.scala @@ -38,7 +38,7 @@ trait XMLLoader[T <: Node] newAdapter.scopeStack push TopScope parser.parse(source, newAdapter) - newAdapter.scopeStack.pop + newAdapter.scopeStack.pop() newAdapter.rootElem.asInstanceOf[T] } diff --git a/src/library/scala/xml/parsing/FactoryAdapter.scala b/src/library/scala/xml/parsing/FactoryAdapter.scala index 5f776f529965..8659d3f0c4f2 100644 --- a/src/library/scala/xml/parsing/FactoryAdapter.scala +++ b/src/library/scala/xml/parsing/FactoryAdapter.scala @@ -26,7 +26,7 @@ trait ConsoleErrorHandler extends DefaultHandler { val s = "[%s]:%d:%d: %s".format( errtype, ex.getLineNumber, ex.getColumnNumber, ex.getMessage) Console.println(s) - Console.flush + Console.flush() } } @@ -91,7 +91,7 @@ abstract class FactoryAdapter extends DefaultHandler with factory.XMLLoader[Node else { var it = ch.slice(offset, offset + length).iterator while (it.hasNext) { - val c = it.next + val c = it.next() val isSpace = c.isWhitespace buffer append (if (isSpace) ' ' else c) if (isSpace) @@ -164,17 +164,17 @@ abstract class FactoryAdapter extends DefaultHandler with factory.XMLLoader[Node */ override def endElement(uri: String , _localName: String, qname: String): Unit = { captureText() - val metaData = attribStack.pop + val metaData = attribStack.pop() // reverse order to get it right val v = (Iterator continually hStack.pop takeWhile (_ != null)).toList.reverse val (pre, localName) = splitName(qname) - val scp = scopeStack.pop + val scp = scopeStack.pop() // create element rootElem = createNode(pre, localName, metaData, scp, v) hStack push rootElem - curTag = tagStack.pop + curTag = tagStack.pop() capture = curTag != null && nodeContainsText(curTag) // root level } diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala index 228043e18364..8129165b1b9a 100755 --- a/src/library/scala/xml/parsing/MarkupParser.scala +++ b/src/library/scala/xml/parsing/MarkupParser.scala @@ -102,7 +102,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests def ch: Char = { if (nextChNeeded) { if (curInput.hasNext) { - lastChRead = curInput.next + lastChRead = curInput.next() pos = curInput.pos } else { val ilen = inpStack.length @@ -138,7 +138,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests * }}} */ def xmlProcInstr(): MetaData = { xToken("xml") - xSpace + xSpace() val (md,scp) = xAttributes(TopScope) if (scp != TopScope) reportSyntaxError("no xmlns definitions here, please.") @@ -158,7 +158,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests var n = 0 if (isProlog) - xSpaceOpt + xSpaceOpt() m("version") match { case null => @@ -223,10 +223,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests return null } - nextch // is prolog ? + nextch() // is prolog ? var children: NodeSeq = null if ('?' == ch) { - nextch + nextch() info_prolog = prolog() doc.version = info_prolog._1 doc.encoding = info_prolog._2 @@ -272,7 +272,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests * after construction, this method formalizes that suboptimal reality. */ def initialize: this.type = { - nextch + nextch() this } @@ -304,7 +304,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests var aMap: MetaData = Null while (isNameStart(ch)) { val qname = xName - xEQ // side effect + xEQ() // side effect val value = xAttributeValue() Utility.prefix(qname) match { @@ -324,7 +324,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests } if ((ch != '/') && (ch != '>') && ('?' != ch)) - xSpace + xSpace() } if(!aMap.wellformed(scope)) @@ -341,12 +341,12 @@ trait MarkupParser extends MarkupParserCommon with TokenTests */ def xEntityValue(): String = { val endch = ch - nextch + nextch() while (ch != endch && !eof) { putChar(ch) - nextch + nextch() } - nextch + nextch() val str = cbuf.toString() cbuf.length = 0 str @@ -375,13 +375,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests val sb: StringBuilder = new StringBuilder() xToken("--") while (true) { - if (ch == '-' && { sb.append(ch); nextch; ch == '-' }) { + if (ch == '-' && { sb.append(ch); nextch(); ch == '-' }) { sb.length = sb.length - 1 - nextch + nextch() xToken('>') return handle.comment(pos, sb.toString()) } else sb.append(ch) - nextch + nextch() } throw FatalError("this cannot happen") } @@ -402,7 +402,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests def content1(pscope: NamespaceBinding, ts: NodeBuffer) { ch match { case '!' => - nextch + nextch() if ('[' == ch) // CDATA ts &+ xCharData else if ('D' == ch) // doctypedecl, parse DTD // @todo REMOVE HACK @@ -410,7 +410,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests else // comment ts &+ xComment case '?' => // PI - nextch + nextch() ts &+ xProcInstr case _ => ts &+ element1(pscope) // child @@ -435,17 +435,17 @@ trait MarkupParser extends MarkupParserCommon with TokenTests ch match { case '<' => // another tag - nextch; ch match { + nextch(); ch match { case '/' => exit = true // end tag case _ => content1(pscope, ts) } // postcond: xEmbeddedBlock == false! case '&' => // EntityRef or CharRef - nextch; ch match { + nextch(); ch match { case '#' => // CharacterRef - nextch - val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch)) + nextch() + val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch())) xToken(';') ts &+ theChar case _ => // EntityRef @@ -470,16 +470,16 @@ trait MarkupParser extends MarkupParserCommon with TokenTests * }}} */ def externalID(): ExternalID = ch match { case 'S' => - nextch + nextch() xToken("YSTEM") - xSpace + xSpace() val sysID = systemLiteral() new SystemID(sysID) case 'P' => - nextch; xToken("UBLIC") - xSpace + nextch(); xToken("UBLIC") + xSpace() val pubID = pubidLiteral() - xSpace + xSpace() val sysID = systemLiteral() new PublicID(pubID, sysID) } @@ -495,13 +495,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests if (this.dtd ne null) reportSyntaxError("unexpected character (DOCTYPE already defined") xToken("DOCTYPE") - xSpace + xSpace() val n = xName - xSpace + xSpace() //external ID if ('S' == ch || 'P' == ch) { extID = externalID() - xSpaceOpt + xSpaceOpt() } /* parse external subset of DTD @@ -518,12 +518,12 @@ trait MarkupParser extends MarkupParserCommon with TokenTests } if ('[' == ch) { // internal subset - nextch + nextch() /* TODO */ intSubset() // TODO: do the DTD parsing?? ?!?!?!?!! xToken(']') - xSpaceOpt + xSpaceOpt() } xToken('>') this.dtd = new DTD { @@ -580,7 +580,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests var exit = false while (! exit) { putChar(ch) - nextch + nextch() exit = eof || ( ch == '<' ) || ( ch == '&' ) } @@ -598,12 +598,12 @@ trait MarkupParser extends MarkupParserCommon with TokenTests val endch = ch if (ch != '\'' && ch != '"') reportSyntaxError("quote ' or \" expected") - nextch + nextch() while (ch != endch && !eof) { putChar(ch) - nextch + nextch() } - nextch + nextch() val str = cbuf.toString() cbuf.length = 0 str @@ -616,15 +616,15 @@ trait MarkupParser extends MarkupParserCommon with TokenTests val endch = ch if (ch!='\'' && ch != '"') reportSyntaxError("quote ' or \" expected") - nextch + nextch() while (ch != endch && !eof) { putChar(ch) //println("hello '"+ch+"'"+isPubIDChar(ch)) if (!isPubIDChar(ch)) reportSyntaxError("char '"+ch+"' is not allowed in public id") - nextch + nextch() } - nextch + nextch() val str = cbuf.toString cbuf.length = 0 str @@ -637,9 +637,9 @@ trait MarkupParser extends MarkupParserCommon with TokenTests def extSubset(): Unit = { var textdecl: (Option[String],Option[String]) = null if (ch == '<') { - nextch + nextch() if (ch == '?') { - nextch + nextch() textdecl = textDecl() } else markupDecl1() @@ -650,13 +650,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests def markupDecl1() = { def doInclude() = { - xToken('['); while(']' != ch) markupDecl(); nextch // ']' + xToken('['); while(']' != ch) markupDecl(); nextch() // ']' } def doIgnore() = { - xToken('['); while(']' != ch) nextch; nextch // ']' + xToken('['); while(']' != ch) nextch(); nextch() // ']' } if ('?' == ch) { - nextch + nextch() xProcInstr // simply ignore processing instructions! } else { xToken('!') @@ -665,35 +665,35 @@ trait MarkupParser extends MarkupParserCommon with TokenTests xComment // ignore comments case 'E' => - nextch + nextch() if ('L' == ch) { - nextch + nextch() elementDecl() } else entityDecl() case 'A' => - nextch + nextch() attrDecl() case 'N' => - nextch + nextch() notationDecl() case '[' if inpStack.length >= extIndex => - nextch - xSpaceOpt + nextch() + xSpaceOpt() ch match { case '%' => - nextch + nextch() val ent = xName xToken(';') - xSpaceOpt + xSpaceOpt() push(ent) - xSpaceOpt + xSpaceOpt() val stmt = xName - xSpaceOpt + xSpaceOpt() stmt match { // parameter entity @@ -701,15 +701,15 @@ trait MarkupParser extends MarkupParserCommon with TokenTests case "IGNORE" => doIgnore() } case 'I' => - nextch + nextch() ch match { case 'G' => - nextch + nextch() xToken("NORE") - xSpaceOpt + xSpaceOpt() doIgnore() case 'N' => - nextch + nextch() xToken("NCLUDE") doInclude() } @@ -720,14 +720,14 @@ trait MarkupParser extends MarkupParserCommon with TokenTests case _ => curInput.reportError(pos, "unexpected character '"+ch+"', expected some markupdecl") while (ch!='>') - nextch + nextch() } } } def markupDecl(): Unit = ch match { case '%' => // parameter entity reference - nextch + nextch() val ent = xName xToken(';') if (!isValidating) @@ -737,20 +737,20 @@ trait MarkupParser extends MarkupParserCommon with TokenTests //peReference case '<' => - nextch + nextch() markupDecl1() case _ if isSpace(ch) => - xSpace + xSpace() case _ => reportSyntaxError("markupdecl: unexpected character '"+ch+"' #" + ch.toInt) - nextch + nextch() } /** "rec-xml/#ExtSubset" pe references may not occur within markup declarations */ def intSubset() { //Console.println("(DEBUG) intSubset()") - xSpace + xSpace() while (']' != ch) markupDecl() } @@ -759,16 +759,16 @@ trait MarkupParser extends MarkupParserCommon with TokenTests */ def elementDecl() { xToken("EMENT") - xSpace + xSpace() val n = xName - xSpace + xSpace() while ('>' != ch) { //Console.println("["+ch+"]") putChar(ch) - nextch + nextch() } //Console.println("END["+ch+"]") - nextch + nextch() val cmstr = cbuf.toString() cbuf.length = 0 handle.elemDecl(n, cmstr) @@ -779,20 +779,20 @@ trait MarkupParser extends MarkupParserCommon with TokenTests * }}} */ def attrDecl() = { xToken("TTLIST") - xSpace + xSpace() val n = xName - xSpace + xSpace() var attList: List[AttrDecl] = Nil // later: find the elemDecl for n while ('>' != ch) { val aname = xName - xSpace + xSpace() // could be enumeration (foo,bar) parse this later :-/ while ('"' != ch && '\'' != ch && '#' != ch && '<' != ch) { if (!isSpace(ch)) cbuf.append(ch) - nextch + nextch() } val atpe = cbuf.toString cbuf.length = 0 @@ -802,21 +802,21 @@ trait MarkupParser extends MarkupParserCommon with TokenTests DEFAULT(false, xAttributeValue()) case '#' => - nextch + nextch() xName match { - case "FIXED" => xSpace ; DEFAULT(true, xAttributeValue()) + case "FIXED" => xSpace() ; DEFAULT(true, xAttributeValue()) case "IMPLIED" => IMPLIED case "REQUIRED" => REQUIRED } case _ => null } - xSpaceOpt + xSpaceOpt() attList ::= AttrDecl(aname, atpe, defdecl) cbuf.length = 0 } - nextch + nextch() handle.attListDecl(n, attList.reverse) } @@ -826,39 +826,39 @@ trait MarkupParser extends MarkupParserCommon with TokenTests def entityDecl() = { var isParameterEntity = false xToken("NTITY") - xSpace + xSpace() if ('%' == ch) { - nextch + nextch() isParameterEntity = true - xSpace + xSpace() } val n = xName - xSpace + xSpace() ch match { case 'S' | 'P' => //sy val extID = externalID() if (isParameterEntity) { - xSpaceOpt + xSpaceOpt() xToken('>') handle.parameterEntityDecl(n, ExtDef(extID)) } else { // notation? - xSpace + xSpace() if ('>' != ch) { xToken("NDATA") - xSpace + xSpace() val notat = xName - xSpaceOpt + xSpaceOpt() xToken('>') handle.unparsedEntityDecl(n, extID, notat) } else { - nextch + nextch() handle.parsedEntityDecl(n, ExtDef(extID)) } } case '"' | '\'' => val av = xEntityValue() - xSpaceOpt + xSpaceOpt() xToken('>') if (isParameterEntity) handle.parameterEntityDecl(n, IntDef(av)) @@ -873,19 +873,19 @@ trait MarkupParser extends MarkupParserCommon with TokenTests * }}} */ def notationDecl() { xToken("OTATION") - xSpace + xSpace() val notat = xName - xSpace + xSpace() val extID = if (ch == 'S') { externalID() } else if (ch == 'P') { /** PublicID (without system, only used in NOTATION) */ - nextch + nextch() xToken("UBLIC") - xSpace + xSpace() val pubID = pubidLiteral() - xSpaceOpt + xSpaceOpt() val sysID = if (ch != '>') systemLiteral() else @@ -895,7 +895,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests reportSyntaxError("PUBLIC or SYSTEM expected") scala.sys.error("died parsing notationdecl") } - xSpaceOpt + xSpaceOpt() xToken('>') handle.notationDecl(notat, extID) } @@ -912,7 +912,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests ch curInput = replacementText(entityName) - nextch + nextch() } def pushExternal(systemId: String) { @@ -923,7 +923,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests ch curInput = externalSource(systemId) - nextch + nextch() } def pop() { diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala index 43ec5399313f..7bfbcc7fffb3 100644 --- a/src/library/scala/xml/parsing/MarkupParserCommon.scala +++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala @@ -38,7 +38,7 @@ private[scala] trait MarkupParserCommon extends TokenTests { */ protected def xTag(pscope: NamespaceType): (String, AttributesType) = { val name = xName - xSpaceOpt + xSpaceOpt() (name, mkAttributes(name, pscope)) } @@ -49,7 +49,7 @@ private[scala] trait MarkupParserCommon extends TokenTests { */ def xProcInstr: ElementType = { val n = xName - xSpaceOpt + xSpaceOpt() xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>") } @@ -77,7 +77,7 @@ private[scala] trait MarkupParserCommon extends TokenTests { private def takeUntilChar(it: Iterator[Char], end: Char): String = { val buf = new StringBuilder - while (it.hasNext) it.next match { + while (it.hasNext) it.next() match { case `end` => return buf.toString case ch => buf append ch } @@ -91,7 +91,7 @@ private[scala] trait MarkupParserCommon extends TokenTests { if (xName != startName) errorNoEnd(startName) - xSpaceOpt + xSpaceOpt() xToken('>') } @@ -138,9 +138,9 @@ private[scala] trait MarkupParserCommon extends TokenTests { val buf = new StringBuilder val it = attval.iterator.buffered - while (it.hasNext) buf append (it.next match { + while (it.hasNext) buf append (it.next() match { case ' ' | '\t' | '\n' | '\r' => " " - case '&' if it.head == '#' => it.next ; xCharRef(it) + case '&' if it.head == '#' => it.next() ; xCharRef(it) case '&' => attr_unescape(takeUntilChar(it, ';')) case c => c }) @@ -157,11 +157,11 @@ private[scala] trait MarkupParserCommon extends TokenTests { Utility.parseCharRef(ch, nextch, reportSyntaxError _, truncatedError _) def xCharRef(it: Iterator[Char]): String = { - var c = it.next - Utility.parseCharRef(() => c, () => { c = it.next }, reportSyntaxError _, truncatedError _) + var c = it.next() + Utility.parseCharRef(() => c, () => { c = it.next() }, reportSyntaxError _, truncatedError _) } - def xCharRef: String = xCharRef(() => ch, () => nextch) + def xCharRef: String = xCharRef(() => ch, () => nextch()) /** Create a lookahead reader which does not influence the input */ def lookahead(): BufferedIterator[Char] @@ -194,20 +194,20 @@ private[scala] trait MarkupParserCommon extends TokenTests { } def xToken(that: Char) { - if (ch == that) nextch + if (ch == that) nextch() else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch)) } def xToken(that: Seq[Char]) { that foreach xToken } /** scan [S] '=' [S]*/ - def xEQ() = { xSpaceOpt; xToken('='); xSpaceOpt } + def xEQ() = { xSpaceOpt(); xToken('='); xSpaceOpt() } /** skip optional space S? */ - def xSpaceOpt() = while (isSpace(ch) && !eof) nextch + def xSpaceOpt() = while (isSpace(ch) && !eof) nextch() /** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */ def xSpace() = - if (isSpace(ch)) { nextch; xSpaceOpt } + if (isSpace(ch)) { nextch(); xSpaceOpt() } else xHandleError(ch, "whitespace expected") /** Apply a function and return the passed value */ @@ -240,7 +240,7 @@ private[scala] trait MarkupParserCommon extends TokenTests { truncatedError("") // throws TruncatedXMLControl in compiler sb append ch - nextch + nextch() } unreachable } @@ -253,7 +253,7 @@ private[scala] trait MarkupParserCommon extends TokenTests { private def peek(lookingFor: String): Boolean = (lookahead() take lookingFor.length sameElements lookingFor.iterator) && { // drop the chars from the real reader (all lookahead + orig) - (0 to lookingFor.length) foreach (_ => nextch) + (0 to lookingFor.length) foreach (_ => nextch()) true } } diff --git a/src/library/scala/xml/parsing/XhtmlParser.scala b/src/library/scala/xml/parsing/XhtmlParser.scala index d08cb1fa9c52..33b94c9bd72b 100644 --- a/src/library/scala/xml/parsing/XhtmlParser.scala +++ b/src/library/scala/xml/parsing/XhtmlParser.scala @@ -26,5 +26,5 @@ class XhtmlParser(val input: Source) extends ConstructingHandler with MarkupPars * @author Burak Emir */ object XhtmlParser { - def apply(source: Source): NodeSeq = new XhtmlParser(source).initialize.document + def apply(source: Source): NodeSeq = new XhtmlParser(source).initialize.document() } diff --git a/src/library/scala/xml/persistent/CachedFileStorage.scala b/src/library/scala/xml/persistent/CachedFileStorage.scala index 916a1a0cf7ca..c0fad30da67a 100644 --- a/src/library/scala/xml/persistent/CachedFileStorage.scala +++ b/src/library/scala/xml/persistent/CachedFileStorage.scala @@ -76,8 +76,8 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo log("[load]\nloading "+theFile) val src = Source.fromFile(theFile) log("parsing "+theFile) - val res = ConstructingParser.fromSource(src,false).document.docElem(0) - switch + val res = ConstructingParser.fromSource(src, false).document().docElem(0) + switch() log("[load done]") res.child.iterator } @@ -102,7 +102,7 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo c.close fos.close dirty = false - switch + switch() log("[save done]") } @@ -112,7 +112,7 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo log("[run]\nstarting storage thread, checking every "+interval+" ms") while (true) { Thread.sleep( this.interval ) - save + save() } } @@ -120,6 +120,6 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo * update. */ def flush() = { this.dirty = true - save + save() } } diff --git a/src/library/scala/xml/pull/XMLEventReader.scala b/src/library/scala/xml/pull/XMLEventReader.scala index 428c30505507..3f9584fd04c9 100755 --- a/src/library/scala/xml/pull/XMLEventReader.scala +++ b/src/library/scala/xml/pull/XMLEventReader.scala @@ -139,10 +139,10 @@ trait ProducerConsumerIterator[T >: Null] extends Iterator[T] { def hasNext = !eos && (buffer != null || fillBuffer) def next() = { - if (eos) throw new NoSuchElementException("ProducerConsumerIterator") - if (buffer == null) fillBuffer + if (eos()) throw new NoSuchElementException("ProducerConsumerIterator") + if (buffer == null) fillBuffer() - drainBuffer + drainBuffer() } def available() = isElement(buffer) || isElement(queue.peek) diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index 9e72fb91459c..55f7704056d3 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -91,8 +91,8 @@ trait Printers extends api.Printers { self: SymbolTable => } def printColumn(ts: List[Tree], start: String, sep: String, end: String) { - print(start); indent; println() - printSeq(ts){print(_)}{print(sep); println()}; undent; println(); print(end) + print(start); indent(); println() + printSeq(ts){print(_)}{print(sep); println()}; undent(); println(); print(end) } def printRow(ts: List[Tree], start: String, sep: String, end: String) { @@ -327,10 +327,10 @@ trait Printers extends api.Printers { self: SymbolTable => print(lhs, " = ", rhs) case If(cond, thenp, elsep) => - print("if (", cond, ")"); indent; println() - print(thenp); undent + print("if (", cond, ")"); indent(); println() + print(thenp); undent() if (!elsep.isEmpty) { - println(); print("else"); indent; println(); print(elsep); undent + println(); print("else"); indent(); println(); print(elsep); undent() } case Return(expr) => @@ -652,7 +652,7 @@ trait Printers extends api.Printers { self: SymbolTable => print("(") val it = iterable.iterator while (it.hasNext) { - body(it.next) + body(it.next()) print(if (it.hasNext) ", " else "") } print(")") diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 1edfa84c044d..c7d2fa42d36b 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -140,7 +140,7 @@ abstract class TreeInfo { def mapMethodParamsAndArgs[R](params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => R): List[R] = { val b = List.newBuilder[R] foreachMethodParamAndArg(params, args)((param, arg) => b += f(param, arg)) - b.result + b.result() } def foreachMethodParamAndArg(params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => Unit): Boolean = { val plen = params.length @@ -154,21 +154,21 @@ abstract class TreeInfo { } if (plen == alen) foreach2(params, args)(f) - else if (params.isEmpty) return fail + else if (params.isEmpty) return fail() else if (isVarArgsList(params)) { val plenInit = plen - 1 if (alen == plenInit) { if (alen == 0) Nil // avoid calling mismatched zip else foreach2(params.init, args)(f) } - else if (alen < plenInit) return fail + else if (alen < plenInit) return fail() else { foreach2(params.init, args take plenInit)(f) val remainingArgs = args drop plenInit foreach2(List.fill(remainingArgs.size)(params.last), remainingArgs)(f) } } - else return fail + else return fail() true } diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 09f78d1d5b6c..361c00935026 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -2355,7 +2355,7 @@ trait Types extends api.Types { self: SymbolTable => h = mix(h, pre.hashCode) h = mix(h, sym.hashCode) if (hasArgs) - finalizeHash(mix(h, args.hashCode), 3) + finalizeHash(mix(h, args.hashCode()), 3) else finalizeHash(h, 2) } diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index 0d644aa73ec0..63b7f73386ad 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -126,7 +126,7 @@ trait Collections { ys1 = ys1.tail ys2 = ys2.tail } - buf.result + buf.result() } final def foreach2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Unit): Unit = { var ys1 = xs1 diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala index 0d4d55bdec69..31df78f9952c 100644 --- a/src/reflect/scala/reflect/io/PlainFile.scala +++ b/src/reflect/scala/reflect/io/PlainFile.scala @@ -42,7 +42,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile { override def sizeOption = Some(givenPath.length.toInt) override def toString = path - override def hashCode(): Int = fpath.hashCode + override def hashCode(): Int = fpath.hashCode() override def equals(that: Any): Boolean = that match { case x: PlainFile => fpath == x.fpath case _ => false diff --git a/src/reflect/scala/reflect/io/Streamable.scala b/src/reflect/scala/reflect/io/Streamable.scala index b45cffb1507e..6184c6776a63 100644 --- a/src/reflect/scala/reflect/io/Streamable.scala +++ b/src/reflect/scala/reflect/io/Streamable.scala @@ -88,7 +88,7 @@ object Streamable { /** Obtains an InputStreamReader wrapped around a FileInputStream. */ - def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream, codec.charSet) + def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream(), codec.charSet) /** Wraps a BufferedReader around the result of reader(). */ @@ -115,7 +115,9 @@ object Streamable { finally stream.close() def bytes(is: => InputStream): Array[Byte] = - (new Bytes { def inputStream() = is }).toByteArray + (new Bytes { + def inputStream() = is + }).toByteArray() def slurp(is: => InputStream)(implicit codec: Codec): String = new Chars { def inputStream() = is } slurp codec diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala index 09b99087e636..ae0dd2032c16 100644 --- a/src/reflect/scala/reflect/io/VirtualDirectory.scala +++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala @@ -34,15 +34,15 @@ extends AbstractFile { override def output = sys.error("directories cannot be written") /** Does this abstract file denote an existing file? */ - def create() { unsupported } + def create() { unsupported() } /** Delete the underlying file or directory (recursively). */ - def delete() { unsupported } + def delete() { unsupported() } /** Returns an abstract file with the given name. It does not * check that it exists. */ - def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported + def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported() private val files = mutable.Map.empty[String, AbstractFile] diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala index 6f98b8385bfa..b28ad9f340c3 100644 --- a/src/reflect/scala/reflect/io/VirtualFile.scala +++ b/src/reflect/scala/reflect/io/VirtualFile.scala @@ -71,10 +71,10 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF } /** Does this abstract file denote an existing file? */ - def create() { unsupported } + def create() { unsupported() } /** Delete the underlying file or directory (recursively). */ - def delete() { unsupported } + def delete() { unsupported() } /** * Returns the abstract file in this abstract directory with the @@ -90,5 +90,5 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF /** Returns an abstract file with the given name. It does not * check that it exists. */ - def lookupNameUnchecked(name: String, directory: Boolean) = unsupported + def lookupNameUnchecked(name: String, directory: Boolean) = unsupported() } diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 097d3cb71ccc..78fc8d9cc888 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -61,13 +61,13 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq override def underlyingSource = Some(this) def isDirectory = true - def lookupName(name: String, directory: Boolean) = unsupported - def lookupNameUnchecked(name: String, directory: Boolean) = unsupported - def create() = unsupported - def delete() = unsupported - def output = unsupported - def container = unsupported - def absolute = unsupported + def lookupName(name: String, directory: Boolean) = unsupported() + def lookupNameUnchecked(name: String, directory: Boolean) = unsupported() + def create() = unsupported() + def delete() = unsupported() + def output = unsupported() + def container = unsupported() + def absolute = unsupported() private def walkIterator(its: Iterator[AbstractFile]): Iterator[AbstractFile] = { its flatMap { f =>