Skip to content
This repository
Browse code

Merge branch 'master' into attrapi

  • Loading branch information...
commit 6803a507fbc096db64fe7301afc66dfa1b86d488 2 parents 09b1450 + bf30b77
Joshua Arnold authored October 01, 2011

Showing 23 changed files with 1,836 additions and 2,377 deletions. Show diff stats Hide diff stats

  1. 21  CHANGELOG.rst
  2. 2  build.sbt
  3. 36  src/main/scala/com/codecommit/antixml/CanBuildFromWithDeepZipper.scala
  4. 195  src/main/scala/com/codecommit/antixml/CanBuildFromWithZipper.scala
  5. 49  src/main/scala/com/codecommit/antixml/CanProduceZipper.scala
  6. 776  src/main/scala/com/codecommit/antixml/DeepZipper.scala
  7. 97  src/main/scala/com/codecommit/antixml/Group.scala
  8. 169  src/main/scala/com/codecommit/antixml/PathCreator.scala
  9. 178  src/main/scala/com/codecommit/antixml/Selectable.scala
  10. 28  src/main/scala/com/codecommit/antixml/Selector.scala
  11. 11  src/main/scala/com/codecommit/antixml/XMLSerializer.scala
  12. 437  src/main/scala/com/codecommit/antixml/Zipper.scala
  13. 26  src/main/scala/com/codecommit/antixml/ZipperMergeContext.scala
  14. 133  src/main/scala/com/codecommit/antixml/ZipperMergeStrategy.scala
  15. 13  src/main/scala/com/codecommit/antixml/node.scala
  16. 17  src/main/scala/com/codecommit/antixml/package.scala
  17. 7  src/test/scala/com/codecommit/antixml/ConversionSpecs.scala
  18. 679  src/test/scala/com/codecommit/antixml/DeepZipperSpecs.scala
  19. 107  src/test/scala/com/codecommit/antixml/GroupSpecs.scala
  20. 163  src/test/scala/com/codecommit/antixml/PathCreatorSpecs.scala
  21. 44  src/test/scala/com/codecommit/antixml/SelectorSpecs.scala
  22. 128  src/test/scala/com/codecommit/antixml/ZipperMergeStrategySpecs.scala
  23. 897  src/test/scala/com/codecommit/antixml/ZipperSpecs.scala
21  CHANGELOG.rst
Source Rendered
@@ -2,9 +2,21 @@
2 2
 CHANGELOG
3 3
 =========
4 4
 
  5
+0.4
  6
+===
  7
+
5 8
 0.3
6 9
 ===
7 10
 
  11
+* New selection methods
  12
+
  13
+  * ``\\!`` – Performs a deep selection, but stops the recursion once a match is
  14
+    reached (thus, if a parent and its child both match, only the parent will be
  15
+    returned).  The results are guaranteed to have a zipper with an non-conflicting
  16
+    ``unselect`` on all operations
  17
+  * ``select`` – Performs a selection at the *current* level, without descent
  18
+  
  19
+* Deep selection (``\\``) is now depth-first, rather than breadth-first
8 20
 * Zipper now works for both ``\`` and ``\\`` methods
9 21
 * New utility methods supported by Zipper
10 22
 
@@ -13,8 +25,6 @@ CHANGELOG
13 25
   * ``splitAt``
14 26
   * ``take``
15 27
   
16  
-* Deep-select now returns a ``Group`` rather than a ``Zipper`` with an invalid
17  
-  context
18 28
 * Implicit conversions from ``String`` and ``Symbol`` to ``Selector`` are now
19 29
   hidden in the ``Selector`` companion object
20 30
 * Explicit converters now use ``convert`` instead of ``anti``
@@ -28,10 +38,9 @@ CHANGELOG
28 38
     results have been dropped
29 39
   * ``Zipper#unselect`` on empty selection results no functions appropriately
30 40
   * `Issue 12`_ – Utility Operations on Group Return an Invalid Zipper
31  
-
32  
-    * Utility methods on ``Group`` now return ``Group`` when possible, rather
33  
-	  than ``Zipper``.  This also changes the ``CanBuildFromWithZipper`` API
34  
-	  by splitting it entirely from ``CanBuildFrom``.
  41
+  * Utility methods on ``Group`` now return ``Group`` when possible, rather
  42
+    than ``Zipper``.  This also changes the ``CanBuildFromWithZipper`` API
  43
+    by splitting it entirely from ``CanBuildFrom``.
35 44
 
36 45
 
37 46
 .. _Issue #40: https://github.com/djspiewak/anti-xml/issues/40
2  build.sbt
@@ -2,7 +2,7 @@ name := "anti-xml"
2 2
 
3 3
 organization := "com.codecommit"
4 4
 
5  
-version := "0.3-SNAPSHOT"
  5
+version := "0.4-SNAPSHOT"
6 6
 
7 7
 crossScalaVersions := Seq("2.9.1", "2.9.0-1", "2.9.0")
8 8
 
36  src/main/scala/com/codecommit/antixml/CanBuildFromWithDeepZipper.scala
... ...
@@ -1,36 +0,0 @@
1  
-package com.codecommit.antixml
2  
-
3  
-import scala.collection.mutable.Builder
4  
-import scala.collection.generic.CanBuildFrom
5  
-import DeepZipper._
6  
-
7  
-/** A factory for [[DeepZipper]] instances. 
8  
- * @tparam N The type of nodes to be contained in the [[DeepZipper]] (if any).
9  
- */
10  
-trait CanBuildFromWithDeepZipper[-From, -Elem, To] {
11  
-    /** Creates a new builder.
12  
-     * 
13  
-     *  @param parent The parent of the zipper
14  
-     *  @param contexts The contexts from which the zipper should be composed.
15  
-     *  The contexts will be merged to the builder's input to produce a zipper.
16  
-     *  @parent emptiesSet A set of empty locations in the zipper. */
17  
-	def apply(parent: Option[From], contexts: Vector[LocationContext], emptiesSet: EmptiesSet): Builder[Elem, To]
18  
-}
19  
-
20  
-/** A marker interface for [[CanBuildFrom]] instances that can be lifted into
21  
- * [[CanBuildFromWithDeepZipper]] instances which operate on [[Node]] types. */
22  
-trait CanProduceDeepZipper[-From, A <: Node, To] { this: CanBuildFrom[From, A, _ >: To] =>
23  
-  def lift: CanBuildFromWithDeepZipper[From, A, To]
24  
-}
25  
-
26  
-/** Different implicit implementations of [[CanBuildFromWithDeepZipper]]. */
27  
-object CanBuildFromWithDeepZipper {
28  
-  
29  
-  /** Implicitly lifts [[CanBuildFrom]] instances into instances of [[CanBuildFromWithDeepZipper]]. */
30  
-  implicit def identityCanBuildFrom[From, Elem, To](implicit cbf: CanBuildFrom[From, Elem, To]) = {
31  
-    new CanBuildFromWithDeepZipper[From, Elem, To] {
32  
-      /** Creates a builder that just ignores anything [[DeepZipper]] related. */
33  
-      def apply(parent: Option[From], contexts: Vector[LocationContext], emptiesSet: EmptiesSet) = cbf()
34  
-    }
35  
-  }
36  
-}
195  src/main/scala/com/codecommit/antixml/CanBuildFromWithZipper.scala
... ...
@@ -1,111 +1,84 @@
1  
-/*
2  
- * Copyright (c) 2011, Daniel Spiewak
3  
- * All rights reserved.
4  
- * 
5  
- * Redistribution and use in source and binary forms, with or without modification,
6  
- * are permitted provided that the following conditions are met:
7  
- * 
8  
- * - Redistributions of source code must retain the above copyright notice, this
9  
- *   list of conditions and the following disclaimer. 
10  
- * - Redistributions in binary form must reproduce the above copyright notice, this
11  
- *   list of conditions and the following disclaimer in the documentation and/or
12  
- *   other materials provided with the distribution.
13  
- * - Neither the name of "Anti-XML" nor the names of its contributors may
14  
- *   be used to endorse or promote products derived from this software without
15  
- *   specific prior written permission.
16  
- * 
17  
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
18  
- * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  
- * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  
- * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
21  
- * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  
- * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
24  
- * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
26  
- * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  
- */
28  
-
29  
-package com.codecommit.antixml
30  
-
31  
-import scala.collection.generic.CanBuildFrom
32  
-import scala.collection.immutable.Vector
33  
-import scala.collection.mutable.Builder
34  
-
35  
-/**
36  
- * An implicit factory creator for use in selection in the style
37  
- * of [[scala.collection.generic.CanBuildFrom]] with functionality required to
38  
- * generate zippers.  In addition to providing the standard CanBuildFrom
39  
- * functionality (producing an instanceof [[scala.collection.mutable.Builder]])
40  
- * with respect to a provided zipper context, this typeclass also provides a
41  
- * monoidal append method on the `To` type.   This is required for deep-select on
42  
- * [[com.codecommit.antixml.Group]].
43  
- *
44  
- * A default implicit "lifting" is provided from [[scala.collection.generic.CanBuildFrom]] to
45  
- * an instances of [[com.codecommit.antixml.CanBuildFromWithZipper]] for all target
46  
- * types which are implicitly convertable to [[scala.collection.Traversable]].
47  
- * (the reason for this restriction is to allow a default implementation of the
48  
- * aforementioned monoidal append)  This implicit lifting is defined in the
49  
- * companion object for this trait, giving it lower priority in the implicit
50  
- * resolution, but still accessible without requiring an explicit import.
51  
- */
52  
-trait CanBuildFromWithZipper[-From, -Elem, To] { self =>
53  
-  def apply(parent: From, map: =>Vector[Option[ZContext]]): Builder[Elem, To]
54  
-  def apply(map: =>Vector[Option[ZContext]]): Builder[Elem, To]
55  
-  
56  
-  /**
57  
-   */
58  
-  def append(left: To, right: To): To
59  
-  
60  
-  /**
61  
-   * Equivalent to `(left /: rest)(append)`. Subclasses may provide a more efficient
62  
-   * implementation.
63  
-   */
64  
-  def appendAll(left: To, rest: TraversableOnce[To]): To = {
65  
-    (left /: rest)(append)
66  
-  }
67  
-  
68  
-  def lift[CC >: To]: CanBuildFrom[From, Elem, CC] = new CanBuildFrom[From, Elem, CC] {
69  
-    def apply(from: From) = apply()
70  
-    def apply() = self(Vector())
71  
-  }
72  
-}
73  
-
74  
-/**
75  
- * Serves as a simple container for the implicit lifting
76  
- * of [[scala.collection.generic.CanBuildFrom]] to [[com.codecommit.antixml.CanBuildFromWithZipper]].
77  
- */
78  
-object CanBuildFromWithZipper {
79  
-  
80  
-  /**
81  
-   * Implicitly "lifts" an existing instance of [[scala.collection.generic.CanBuildFrom]] into
82  
-   * an instance of [[com.codecommit.antixml.CanBuildFromWithZipper]], provided
83  
-   * that the result type of the builder is implicitly convertable (potentially
84  
-   * via `identity`) to [[scala.collection.Traversable]].  In practice, this
85  
-   * should be effectively all conceivable instances of [[scala.collection.generic.CanBuildFrom]],
86  
-   * so this should not be a problematic restriction.
87  
-   *
88  
-   * This implicit lifting makes it possible to define instances
89  
-   * of [[com.codecommit.antixml.Selector]] which produce
90  
-   * non-[[com.codecommit.antixml.Node]] result types.  More precisely, it allows
91  
-   * such selectors to be ''used'' with the `\` and `\\` methods
92  
-   * on [[com.codecommit.antixml.Group]].
93  
-   */
94  
-  implicit def identityCanBuildFrom[From, Elem, To](implicit cbf: CanBuildFrom[From, Elem, To], coerce: To => Traversable[Elem]): CanBuildFromWithZipper[From, Elem, To] = new CanBuildFromWithZipper[From, Elem, To] {
95  
-    def apply(parent: From, map: =>Vector[Option[ZContext]]) = cbf()
96  
-    def apply(map: =>Vector[Option[ZContext]]) = cbf()
97  
-    
98  
-    def append(left: To, right: To) = {
99  
-      val builder = cbf()
100  
-      builder ++= left
101  
-      builder ++= right
102  
-      builder.result()
103  
-    }
104  
-    
105  
-    override def appendAll(left: To, rest: TraversableOnce[To]): To = {
106  
-      val builder = cbf() ++= left
107  
-      rest foreach {builder ++= _}
108  
-      builder.result()
109  
-    }
110  
-  }
111  
-}
  1
+package com.codecommit.antixml
  2
+
  3
+import scala.collection.GenTraversableOnce
  4
+import scala.collection.mutable.Builder
  5
+import scala.collection.generic.CanBuildFrom
  6
+
  7
+/** A factory for [[com.codecommit.antixml.Zipper]] instances.
  8
+ * This trait is similar to [[scala.collection.mutable.CanBuildFrom]], however its builders accept instances
  9
+ * of `ElemsWithContext[Elem]` rather than `Elem` instances.  In addition, its `apply`
  10
+ * methods accept an optional reference to the zipper's parent.
  11
+ *
  12
+ * @tparam From The type of collection that is producing the zipper.
  13
+ * @tparam Elem The type of nodes to be contained in the result (if any).
  14
+ * @tparam To the type of collection being produced.  
  15
+ */
  16
+trait CanBuildFromWithZipper[-From, -Elem, To] {
  17
+  import CanBuildFromWithZipper.ElemsWithContext
  18
+  
  19
+    /** Creates a new builder.
  20
+     * 
  21
+     *  @param parent The parent of the zipper.  If `None`, the zipper will 
  22
+     *  still function as an IndexedSeq, but zipper unselection will fail.
  23
+     */
  24
+	def apply(parent: Option[Zipper[Node]]): Builder[ElemsWithContext[Elem], To]
  25
+	
  26
+    /** Creates a new builder.
  27
+     *  @param parent The parent of the zipper.  If `None`, the zipper will 
  28
+     *  still function as an IndexedSeq, but zipper unselection will fail.
  29
+     *  @param from The collection producing the zipper
  30
+     */
  31
+  def apply(parent: Option[Zipper[Node]], from: From): Builder[ElemsWithContext[Elem], To] = this(parent)
  32
+
  33
+}
  34
+
  35
+/** A marker interface for [[scala.collection.mutable.CanBuildFrom]] instances that can be lifted into
  36
+ * [[com.codecommit.antixml.CanBuildFromWithZipper]] instances that operate on [[com.codecommit.antixml.Node]] types. */
  37
+trait CanProduceZipper[-From, A <: Node, To] { this: CanBuildFrom[From, A, _ >: To] =>
  38
+  def lift: CanBuildFromWithZipper[From, A, To]
  39
+}
  40
+
  41
+/** Different implicit implementations of [[com.codecommit.antixml.CanBuildFromWithZipper]]. */
  42
+object CanBuildFromWithZipper {
  43
+  
  44
+  /**
  45
+   * Decorates a sequence of zipper elements with a zipper context and an update time.  This is the
  46
+   * basic unit of information used to construct zippers.  
  47
+   *
  48
+   * @tparam Elem the type of node that will be contained in the zipper.
  49
+   * @param path Identifies a location in the zipper's parent.  The path order is from top to bottom
  50
+   * (the first item specifies the index of a top-level node within the parent).  When building a zipper,
  51
+   * it is legal for multiple ElemsWithContexts to specify the same path;  In such cases, all of the
  52
+   * corresponding Elems will be added to the zipper and they will all be associated with that path.
  53
+   * @param updateTime the update time associated with these elements.  One context is considered to have
  54
+   * been updated later than another if its updateTime is greater.
  55
+   * @param elements the actual elements to be added to the zipper.  Note that this sequence may be
  56
+   * empty.  This would happen, for example, if `flatMap` operation removed all items for a given path. 
  57
+   */
  58
+  case class ElemsWithContext[+Elem](path: Seq[Int], updateTime: Int, elements: GenTraversableOnce[Elem])
  59
+  
  60
+  /** Implicitly lifts [[scala.collection.mutable.CanBuildFrom]] instances into instances of [[com.codecommit.antixml.CanBuildFromWithZipper]]. The resulting builders simply ignore
  61
+    * the extra information in `ElemsWithContext` and produce their collections as usual.
  62
+    */
  63
+  implicit def identityCanBuildFrom[From, Elem, To](implicit cbf: CanBuildFrom[From, Elem, To]): CanBuildFromWithZipper[From, Elem, To] = {
  64
+    new CanBuildFromWithZipper[From, Elem, To] {
  65
+      
  66
+      /** Creates a builder that just ignores anything [[com.codecommit.antixml.Zipper]] related. */
  67
+      override def apply(parent: Option[Zipper[Node]], from: From) = liftBuilder(cbf(from))
  68
+      
  69
+      /** Creates a builder that just ignores anything [[com.codecommit.antixml.Zipper]] related. */
  70
+      override def apply(parent: Option[Zipper[Node]]) = liftBuilder(cbf())
  71
+      
  72
+      private def liftBuilder(b: Builder[Elem,To]) = new Builder[ElemsWithContext[Elem], To]() {
  73
+        override def += (x: ElemsWithContext[Elem]) = {
  74
+          b ++= x.elements.seq
  75
+          this
  76
+        }
  77
+        override def clear() {
  78
+          b.clear()
  79
+        }
  80
+        override def result() = b.result()
  81
+      }
  82
+    }
  83
+  }
  84
+}
49  src/main/scala/com/codecommit/antixml/CanProduceZipper.scala
... ...
@@ -1,49 +0,0 @@
1  
-/*
2  
- * Copyright (c) 2011, Daniel Spiewak
3  
- * All rights reserved.
4  
- * 
5  
- * Redistribution and use in source and binary forms, with or without modification,
6  
- * are permitted provided that the following conditions are met:
7  
- * 
8  
- * - Redistributions of source code must retain the above copyright notice, this
9  
- *   list of conditions and the following disclaimer. 
10  
- * - Redistributions in binary form must reproduce the above copyright notice, this
11  
- *   list of conditions and the following disclaimer in the documentation and/or
12  
- *   other materials provided with the distribution.
13  
- * - Neither the name of "Anti-XML" nor the names of its contributors may
14  
- *   be used to endorse or promote products derived from this software without
15  
- *   specific prior written permission.
16  
- * 
17  
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
18  
- * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  
- * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  
- * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
21  
- * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  
- * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
24  
- * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
26  
- * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  
- */
28  
-
29  
-package com.codecommit.antixml
30  
-
31  
-import scala.collection.generic.CanBuildFrom
32  
-
33  
-/**
34  
- * Trait which signifies a special type of [[scala.collection.generic.CanBuildFrom]],
35  
- * capable of lifting itself into an instance of [[com.codecommit.antixml.CanBuildFromWithZipper]].
36  
- * Note that even though all CanBuildFrom instances for Traversable targets are
37  
- * implicitly liftable into [[com.codecommit.antixml.CanBuildFromWithZipper]],
38  
- * this lifting is generic and often different from the results of the `lift`
39  
- * method defined by this trait.
40  
- *
41  
- * In practice, this trait is used as a marker trait by the utility methods
42  
- * on [[com.codecommit.antixml.Zipper]] (particularly `map` and `flatMap`).
43  
- * Without this trait, it would be impossible to distinguish zippable collection
44  
- * types from non-zippable collection types while preserving the override of
45  
- * these methods.
46  
- */
47  
-trait CanProduceZipper[-From, -Elem, To] { this: CanBuildFrom[From, Elem, _ >: To] =>
48  
-  def lift: CanBuildFromWithZipper[From, Elem, To]
49  
-}
776  src/main/scala/com/codecommit/antixml/DeepZipper.scala
... ...
@@ -1,776 +0,0 @@
1  
-package com.codecommit.antixml
2  
-
3  
-import DeepZipper._
4  
-import scala.collection.generic.CanBuildFrom
5  
-import com.codecommit.antixml.util.{VectorCase, Vector0, Vector1}
6  
-import scala.collection.IndexedSeqLike
7  
-import scala.collection.GenTraversableOnce
8  
-
9  
-/** A zipper which allows deep selection.
10  
- *
11  
- *  Zipper instances may be created through factory methods on the companion.
12  
- */
13  
-sealed trait DeepZipper[+A <: Node] extends Group[A] with IndexedSeqLike[A, DeepZipper[A]] { self =>
14  
-
15  
-  /*
16  
-   * All the vectors beneath should have the same length.
17  
-   */
18  
-  
19  
-  import DeepZipper.{FullContext => FullContextParam}
20  
-  type FullContext = FullContextParam[Node]
21  
-
22  
-  /** Keeping track of internal time.
23  
-   *
24  
-   *  Should be initialized to 0 at the initial creation of the zipper.
25  
-   */
26  
-  protected def time: Time
27  
-
28  
-  /** The last update time corresponding to each node in the zipper.
29  
-   *
30  
-   *  Should be initialized to 0 values at the initial creation of the zipper.
31  
-   */
32  
-  protected def updateTimes: Vector[Time]
33  
-
34  
-  protected def parent: Option[DeepZipper[Node]]
35  
-  
36  
-  private def getParent = parent getOrElse sys.error("Root has no parent")
37  
-
38  
-  /** The location of each node in the group in its parent's list of children. */
39  
-  protected def locations: Vector[Location]
40  
-
41  
-  /** Each item in the vector is the list of parents of the corresponding
42  
-   *  node in the group.
43  
-   *
44  
-   *  The order of the parents is given in reverse. i.e. the first item in the
45  
-   *  list is the direct parent of the corresponding node in the group,
46  
-   *  and the last one is at the root of the tree.
47  
-   */
48  
-  protected def parentLists: Vector[ParentsList]
49  
-  
50  
-  /** A set of the location that should be empty (removed) in the zipper upon unselection. */
51  
-  protected def emptiesSet: EmptiesSet
52  
-
53  
-  /** The full context list of the zipper. */
54  
-  private lazy val fullContext = {
55  
-    val nodesWithLocs: Vector[NodeLoc[Node]] = (self.toVector zip locations).map(Function.tupled(NodeLoc[A]))
56  
-
57  
-    // constructing a vector of full context objects from the vectorized constituents
58  
-    val nodesWithContext =
59  
-      for (i <- nodesWithLocs.indices; val updateTime = updateTimes(i)) yield {
60  
-        FullContext(nodesWithLocs(i), parentLists(i), updateTime)
61  
-      }
62  
-
63  
-    nodesWithContext
64  
-  }
65  
-
66  
-  override def updated[B >: A <: Node](index: Int, node: B) = {
67  
-    new Group(super.updated(index, node).toVectorCase) with DeepZipper[B] {
68  
-      val parentLists = self.parentLists
69  
-      val emptiesSet = self.emptiesSet
70  
-      val locations = self.locations
71  
-      def parent = self.parent
72  
-      val mergeDuplicates = self.mergeDuplicates
73  
-
74  
-      // setting new time
75  
-      val time = self.time + 1
76  
-      val updateTimes = self.updateTimes.updated(index, this.time)
77  
-    }
78  
-  }
79  
-
80  
-  override protected[this] def newBuilder = DeepZipper.newBuilder[A]
81  
-
82  
-  // TODO copy coded from Zipper
83  
-  override def slice(from: Int, until: Int): DeepZipper[A] = flatMapWithIndex {
84  
-    case (e,i) if i >= from && i < until => Vector1(e)
85  
-    case (e,_) => Vector0
86  
-  }
87  
-  override def drop(n: Int) = slice(n, size)
88  
-  override def take(n: Int) = slice(0, n)
89  
-  override def splitAt(n: Int) = (take(n), drop(n))
90  
-  override def filter(f: A => Boolean): DeepZipper[A] = collect {
91  
-    case e if f(e) => e
92  
-  }
93  
-  override def collect[B, That](pf: PartialFunction[A, B])(implicit cbf: CanBuildFrom[DeepZipper[A], B, That]): That =
94  
-    flatMap(pf.lift andThen { _.toTraversable })
95  
-  // end copy code
96  
-    
97  
-  override def map[B, That](f: A => B)(implicit cbf: CanBuildFrom[DeepZipper[A], B, That]): That = {
98  
-    val liftedF = (a: A) => Seq(f(a))
99  
-    flatMap(liftedF)(cbf)
100  
-  }
101  
-
102  
-  override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit cbf: CanBuildFrom[DeepZipper[A], B, That]): That = {
103  
-    cbf match {
104  
-      // subtypes of this are the only expected types, hence ignoring type erasure
105  
-      case cbf: CanProduceDeepZipper[DeepZipper[Node], B, That] => {
106  
-        val liftedF = (x: (A, Int)) => f(x._1)
107  
-        flatMapWithIndex(liftedF)(cbf.lift)
108  
-      }
109  
-      
110  
-      case _ => super.flatMap(f)(cbf)
111  
-    }
112  
-  }
113  
-
114  
-  
115  
-  /** A specialized flatMap where the mapping function receives the index of the 
116  
-   * current element as an argument. */
117  
-  private def flatMapWithIndex[B, That](f: ((A, Int)) => GenTraversableOnce[B])(implicit cbfwdz: CanBuildFromWithDeepZipper[DeepZipper[Node], B, That]): That = {
118  
-    val result = toVector.zipWithIndex.map(f)
119  
-    val indices = result.indices
120  
-    
121  
-    val emptyContext = Vector[LocationContext]()
122  
-
123  
-    /* This will hold the true for locations preserved by flatMapping and false
124  
-       for the ones that were removed. */
125  
-    val initLocMap = Map[FullLoc, Boolean]() withDefaultValue false
126  
-
127  
-    val initData = (initLocMap, emptyContext, time)
128  
-
129  
-    val (locMap, contexts, newTime) =
130  
-      indices.foldLeft(initData) { (x, localIndex) =>
131  
-        val (locMap, contexts, time) = x
132  
-        val res = result(localIndex)
133  
-
134  
-        val parent = parentLists(localIndex)
135  
-
136  
-        /* Assuming here that duplicate location come only from flatMapping, 
137  
-	       otherwise the results of unselection will be undefined. */
138  
-        val location = locations(localIndex)
139  
-
140  
-        // each flatMapped segment gets its own time, this way the merging order can be properly defined
141  
-        val newTime = time + 1
142  
-
143  
-        val (newContexts, resSize) =
144  
-          res.foldLeft((emptyContext, 0)) { (ci, n) =>
145  
-            val (contexts, i) = ci
146  
-
147  
-            val context = LocationContext(location, parent, newTime)
148  
-            (contexts :+ context, i + 1)
149  
-          }
150  
-
151  
-        val loc = FullLoc(parent, location)
152  
-        val resEmpty = resSize == 0
153  
-        // if at least one non empty result is present for this loc, we get a true
154  
-        val locEmpty = locMap(loc) || resEmpty
155  
-        val newLocMap = locMap.updated(loc, locEmpty)
156  
-
157  
-        (newLocMap, contexts ++ newContexts, newTime)
158  
-      }
159  
-
160  
-    // assigning the empties the latest time in this flatMap action
161  
-    val newEmpties = locMap.filter(_._2).keySet.map((_, newTime)) // holding on to locations flatMapped to oblivion
162  
-    val builder = cbfwdz(self.parent, contexts, emptiesSet ++ newEmpties)
163  
-    result foreach (builder ++= _.toList)
164  
-    builder.result
165  
-  }
166  
-  
167  
-  /** Transforming a node with its update time into a sequence of nodes with an overall update time. */
168  
-  private type NodeTransform = Node => (Seq[Node], Time)
169  
-  
170  
-  /** Preparing the context of the zipper for unselection.
171  
-   * 
172  
-   *  Given that all duplicate locations in the zipper were created by applications of flatMap:
173  
-   *  * We separate a single entry from each duplicates list which will remain in the full context
174  
-   *  * The leftovers are converted into node transformation functions.
175  
-   *  * The node transforms should be applied at the location to which they were mapped to, to replace the values at these locations (appending the duplicates to the location).
176  
-   *  * In addition to the above transforms, transforms for locations that were removed from the zipper are also provided (to remove the nodes from these locations). 
177  
-   *  
178  
-   *  The unselection data is composed from the non duplicate contexts and the transformation functions. */
179  
-  private def unselectionData: (Vector[FullContext], Map[FullLoc, NodeTransform]) = {
180  
-    val (contexts, transforms) = contextsWithTransforms
181  
-    val allTransforms = transforms ++ emptyTransforms
182  
-    (contexts, allTransforms)
183  
-  }
184  
-
185  
-  /** The contexts objects from the zipper after removing duplicates.
186  
-   *  The removed duplicates are returned as transforms which append the duplicates to a given node mapped to the appropriate location.*/
187  
-  private def contextsWithTransforms: (Vector[FullContext], Map[FullLoc, NodeTransform]) = {
188  
-    val byLoc = fullContext.groupBy(fc => FullLoc(fc.parentsList, fc.nodeLoc.loc))
189  
-
190  
-    val initContexts = Vector[FullContext]()
191  
-    val initTransforms = Map[FullLoc, NodeTransform]()
192  
-
193  
-    val (contexts, transforms) =
194  
-      byLoc.foldLeft((initContexts, initTransforms)) { (ct, le) =>
195  
-        val (cont, trans) = ct
196  
-        val (loc, entry) = le
197  
-
198  
-        val (h, t) = (entry.head, entry.tail) // entries cannot be empty as they were obtained by groupBy
199  
-
200  
-        val newContexts = cont :+ h
201  
-        val newTransforms =
202  
-          if (t.isEmpty) trans
203  
-          else {
204  
-            val transFunc = (n: Node) => {
205  
-              val nodes = t.map(_.nodeLoc.node)
206  
-              val times = t.map(_.updateTime)
207  
-              (n +: nodes, times.max) // appending extras
208  
-            }
209  
-            trans + ((loc, transFunc))
210  
-          }
211  
-
212  
-        (newContexts, newTransforms)
213  
-      }
214  
-    
215  
-    (contexts, transforms)
216  
-  }
217  
-  
218  
-  /** The node transforms that should be applied at locations that were removed from the zipper. */
219  
-  private def emptyTransforms: Set[(FullLoc, NodeTransform)] = {
220  
-    def toEmpty(time: Time) = (_: Node) => (Seq[Node](), time) // removing the node for an empty location
221  
-    val res = emptiesSet.map { lt =>
222  
-      val (loc, time) = lt
223  
-      (loc, toEmpty(time))
224  
-    }
225  
-    res
226  
-  }
227  
-  
228  
-  /** Applying the node updates. */
229  
-  lazy val unselect: DeepZipper[Node] = {
230  
-    val (fullContext, transforms) = unselectionData
231  
-    
232  
-    if (fullContext.isEmpty && transforms.isEmpty) getParent // no updates
233  
-    else {
234  
-      // grouping the nodes by their depth in the tree
235  
-      val byDepthContexts = fullContext.groupBy(_.parentsList.length) withDefaultValue Vector()
236  
-      val byDepthTransforms = transforms.groupBy(_._1.parentsList.length) withDefaultValue Map()
237  
-
238  
-      val newZipper = mergeContext(byDepthContexts, byDepthTransforms)
239  
-
240  
-      newZipper
241  
-    }
242  
-  }
243  
-  
244  
-  /** Converting anything that may be empty into an optional value. */
245  
-  private def toOpt[A <: {def isEmpty: Boolean}](s: A) = if (s.isEmpty) None else Some(s) 
246  
-  
247  
-  /** The zipper context grouped by depth in the tree. */
248  
-  private type DepthContext = Map[Int, Seq[FullContext]]
249  
-  /** Node transforms grouped by depth in the tree. */
250  
-  private type DepthTransforms = Map[Int, Map[FullLoc, NodeTransform]]
251  
-
252  
-  /** Merging all the nodes that were updated in the zipper to provide the new
253  
-   *  values after unselection.
254  
-   *
255  
-   *  Contexts and transforms cannot be both empty simultaneously.
256  
-   */
257  
-  private def mergeContext(context: DepthContext, transforms: DepthTransforms): DeepZipper[Node] = {
258  
-    assert(!(context.isEmpty && transforms.isEmpty), "Cannot merge an empty context") 
259  
-    
260  
-    val optContext = toOpt(context)
261  
-    val optTransforms = toOpt(transforms)
262  
-
263  
-    val maxFunc = (_: Map[Int, _]).maxBy(_._1)._1 // taking the maximal key from a map
264  
-    val contDepth = optContext map maxFunc
265  
-    val transDepth = optTransforms map maxFunc
266  
-    val depths = contDepth ++ transDepth // not empty as per above assertion
267  
-    
268  
-    val maxDepth = depths.max
269  
-    
270  
-    // having only a single depth, the context is fully merged
271  
-    if (maxDepth == 0) mergeRoot(context, transforms) 
272  
-    else {
273  
-      val (deepestContexts, deepestTransforms) =  (context(maxDepth), transforms(maxDepth))
274  
-      val newDepth = maxDepth - 1 // merging a single level
275  
-      val newDepthSeq = (newDepth, context(newDepth) ++ mergeDepth(deepestContexts, deepestTransforms)) // merging with entries at the new depth
276  
-      
277  
-      // removing old depth, setting the new one
278  
-      val newContext = (context - maxDepth) + newDepthSeq 
279  
-      val newTransforms = transforms - maxDepth
280  
-      mergeContext(newContext, newTransforms)
281  
-    }
282  
-  }
283  
-
284  
-  /** Taking contexts and transforms at a single depth and merging them into a list
285  
-   *  of contexts at depth -1 from the original.
286  
-   */
287  
-  private def mergeDepth(singleDepthContexts: Seq[FullContext], singleDepthTransforms: Map[FullLoc, NodeTransform]) = {
288  
-    val contexts = singleDepthContexts.groupBy(_.parentsList) withDefaultValue Seq()
289  
-    val transforms = singleDepthTransforms.groupBy(_._1.parentsList) withDefaultValue Map()
290  
-    
291  
-    val allParents = contexts.keySet ++ transforms.keySet
292  
-    
293  
-    allParents.map(p => mergeParent(p, contexts(p), transforms(p)))
294  
-  }
295  
-
296  
-  /** Taking contexts and transforms under a single parent's list and merging them
297  
-   *  into a single context at the same depth as the lowest parent in the parent's list.
298  
-   */
299  
-  private def mergeParent(parents: ParentsList, contexts: Seq[FullContext], transforms: Map[FullLoc, NodeTransform]) = {
300  
-
301  
-    // The parent list is never empty because the merging stops at the lowest depth.
302  
-    assert(!parents.isEmpty, "Cannot merge under an empty parent.")
303  
-
304  
-    val directParentLoc = parents.head
305  
-    val directParent = directParentLoc.elem
306  
-    val grandParents = parents.tail
307  
-
308  
-    val uniques = uniqueLocations(directParent, parents, contexts)
309  
-    val oldChildren = directParent.children
310  
-    
311  
-    val mergedChildren = mergeOriginalWithContext(oldChildren, uniques)
312  
-    
313  
-    
314  
-    val defaultTransform: NodeTransform = n => (Seq(n), initTime)
315  
-
316  
-    // incorporating the transform into the merged children
317  
-    val transformed =
318  
-      for {
319  
-        i <- mergedChildren.indices
320  
-        loc = FullLoc(parents, i)
321  
-        transform <- transforms.get(loc) orElse Some(defaultTransform) // keeping original node
322  
-        node = mergedChildren(i)
323  
-      } yield transform(node)
324  
-    
325  
-    val (nodes, times) = transformed.unzip
326  
-    
327  
-    // the maximal update time inferred from the context object
328  
-    val maxTimeByContext = toOpt(uniques) map (_.maxBy(_.updateTime).updateTime)
329  
-    
330  
-    // the update time of the parent is the maximal time inferred from both contexts and transforms
331  
-    val newUpdateTime = (times ++ maxTimeByContext).max // both cannot be empty otherwise we wouldn't be merging 
332  
-    val newChildren = Group.fromSeq(nodes.flatten)
333  
-
334  
-    val newParent = directParent.copy(children = newChildren)
335  
-    val newParentLoc = NodeLoc(newParent, directParentLoc.loc)
336  
-
337  
-    FullContext(newParentLoc, grandParents, newUpdateTime)
338  
-  }
339  
-
340  
-  /** Taking a sequence of contexts under the given direct parent and the given
341  
-   *  parent's list and merging nodes which represent the same location under the parent.
342  
-   *  @return A list contexts which are all unique under the given parent.
343  
-   */
344  
-  private def uniqueLocations(directParent: Elem, parents: ParentsList, contexts: Seq[FullContext]) = {
345  
-
346  
-    val byLocation = contexts.groupBy(_.nodeLoc.loc) // grouping by nodes by the location in the parent
347  
-    val locationUnique = byLocation.map { entry =>
348  
-      val (loc, context) = entry
349  
-      val origNode = directParent.children(loc) // the node before updates on the zipper
350  
-      val modNodes = context.map(c => (c.nodeLoc.node, c.updateTime))
351  
-
352  
-      val (newNode, updateTime) = mergeDuplicates(origNode, modNodes)
353  
-      FullContext(NodeLoc(newNode, loc), parents, updateTime)
354  
-    }
355  
-    locationUnique
356  
-  }
357  
-
358  
-  /** Merging a depth context at the root level into the original parent. */
359  
-  private def mergeRoot(rootContext: DepthContext, rootTransforms: DepthTransforms): DeepZipper[Node] = {
360  
-    // the identity function for the different mappings
361  
-    val idTransformMap = (n: Node) => Seq(n)
362  
-    val idContextMap = (n: Node) => n
363  
-
364  
-    // mapping functions obtained from contexts	
365  
-    val contextMaps = contextMapFuncs(rootContext: DepthContext) withDefaultValue idContextMap
366  
-    // mapping functions obtained from transforms
367  
-    val transformMaps = transformMapFuncs(rootTransforms) withDefaultValue idTransformMap
368  
-
369  
-    // this function will merge all the zipper's changes into the parent
370  
-    val flatMapFunc = (ni: (Node, Location)) => {
371  
-      val (n, i) = ni
372  
-      val contextMap = contextMaps(i)
373  
-      val transformMap = transformMaps(i)
374  
-      val fullMap = contextMap andThen transformMap
375  
-
376  
-      fullMap(n)
377  
-    }
378  
-
379  
-    getParent.flatMapWithIndex(flatMapFunc)
380  
-  }
381  
-  
382  
-  /** Creating the mapping functions that should be applied at the root level implied by the transform functions. */
383  
-  private def transformMapFuncs(rootTransforms: DepthTransforms) = {
384  
-    def liftTransform(tr: NodeTransform) = (n: Node) => tr(n)._1 // removing the time stamp from the transform
385  
-    val mapFuncs = // merging all transform under the same root location and lifting transforms
386  
-      rootTransforms.foldLeft(Map[Location, Node => Seq[Node]]()) { (byLoc, e) =>
387  
-        val (_, fullLocMap) = e
388  
-        val newByLoc =
389  
-          fullLocMap.foldLeft(byLoc) { (transformMap, lt) =>
390  
-            val (FullLoc(_, loc), tr) = lt
391  
-            transformMap.updated(loc, liftTransform(tr))
392  
-          }
393  
-        newByLoc
394  
-      }
395  
-    
396  
-    mapFuncs
397  
-  }
398  
-  
399  
-  /** Creating the mapping functions that should be applied at the root level implied by the context objects. */
400  
-  private def contextMapFuncs(rootContext: DepthContext) = {
401  
-    val flat = rootContext.values.flatten
402  
-    val contextByLoc = flat.groupBy(_.nodeLoc.loc)
403  
-    val uniques = mergeRootDuplicates(contextByLoc).toSeq
404  
-    
405  
-    val mergedSeqByLoc = uniques.groupBy(_.loc)
406  
-    
407  
-    // we are at the root, each location has a single context derived entry 
408  
-    val mergedByLoc = mergedSeqByLoc.mapValues(_.head) // can't fail because of groupBy
409  
-    // mapping each location to the nodes acquired from merging
410  
-    val mergedTransforms = mergedByLoc.mapValues { n =>
411  
-      (_: Node) => n.node
412  
-    }
413  
-    
414  
-    mergedTransforms
415  
-  }
416  
-
417  
-  /** Applying the merging strategy to full contexts at duplicate locations at the root level. */
418  
-  private def mergeRootDuplicates(root: Map[Location, Iterable[FullContext]]) = {
419  
-    val uniques = // merging duplicates
420  
-      root.map { lc =>
421  
-        val (l, c) = lc
422  
-        val orig = getParent(l)
423  
-        val alternatives = c.map(fc => (fc.nodeLoc.node, fc.updateTime)).toSeq
424  
-        val (merged, _) = mergeDuplicates(orig, alternatives)
425  
-        NodeLoc(merged, l)
426  
-      }
427  
-    uniques
428  
-  }
429  
-
430  
-  /** Merges an iterable of context objects into the original group representing them. */
431  
-  private def mergeOriginalWithContext(originals: Group[Node], contexts: Iterable[FullContext]) = {
432  
-    mergeWithOriginal(originals, contexts.map(_.nodeLoc))
433  
-  }
434  
-
435  
-  /** Merges an iterable of node locations into the original group representing them. */
436  
-  private def mergeWithOriginal[B >: A <: Node](originals: Group[Node], nodeLocs: Iterable[NodeLoc[B]]) = {
437  
-    val newNodes = nodeLocs.foldLeft(originals) { (oldNodes, nl) =>
438  
-      val NodeLoc(node, loc) = nl
439  
-      oldNodes.updated(loc, node)
440  
-    }
441  
-
442  
-    newNodes
443  
-  }
444  
-
445  
-  /** The merge strategy for the duplicate nodes in the zipper.
446  
-   *
447  
-   *  TODO consider passing it as an implicit to the unselect function, or maybe
448  
-   *  just simple currying
449  
-   */
450  
-  protected val mergeDuplicates: NodeMergeStrategy
451  
-
452  
-}
453  
-
454  
-/** A factory for [[DeepZipper]] instances.
455  
- *  Zippers may be created directly from groups through [[DeepZipper.groupToZipper]] or
456  
- *  through selection using a [[PathFunction]] with [[DeepZipper.fromPath]]
457  
- *
458  
- *  By importing the implicits in this object any [[Selectable]] can be pimped with
459  
- *  shallow/deep selection methods, which directly take selectors as input.
460  
- *  TODO examples
461  
- */
462  
-object DeepZipper {
463  
-
464  
-  /** The number represents the number of the node in its parent's children list.
465  
-   *  In case the node is root, the number is its position in the group to which it belongs.
466  
-   */
467  
-  private[antixml]type Location = Int
468  
-
469  
-  /** A location of a node within its parent. */
470  
-  private[antixml] case class NodeLoc[+A <: Node](node: A, loc: Location) 
471  
-  /** Parents can only be [[Elem]]s. */
472  
-  private[antixml] case class ParentLoc(elem: Elem, loc: Location)
473  
-  /** Containing any data. */
474  
-  private[antixml] case class WithLoc[+A](content: A, loc: Location)
475  
-  
476  
-  /** A location of node under its list of parents. */
477  
-  private[antixml] case class FullLoc(parentsList: ParentsList, loc: Location)
478  
-  
479  
-  /** A set of locations nested in parents lists that are empty locations in a zipper 
480  
-   * coupled with their las update time. */
481  
-  private[antixml] type EmptiesSet = Set[(FullLoc, Time)]
482  
-  
483  
-  /** A default empties set. */
484  
-  private val defaultEmptiesSet: EmptiesSet = Set[(FullLoc, Time)]()
485  
-
486  
-  /** Represents a list of a node's parents, where the first item is the direct
487  
-   *  parent of the node, and the last is the root of the tree.
488  
-   */
489  
-  private[antixml] type ParentsList = List[ParentLoc]
490  
-
491  
-  /** The units in which time is measured in the zipper. Assumed non negative. */
492  
-  private[antixml] type Time = Int
493  
-  
494  
-  /** The initial time of the zipper. */
495  
-  private[antixml] val initTime: Time = 0
496  
-
497  
-  /** A wrapper for the full context of a [[DeepZipper]] location. */
498  
-  private[antixml] case class FullContext[+A <: Node](
499  
-    nodeLoc: NodeLoc[A], 
500  
-    parentsList: ParentsList,
501  
-    updateTime: Time)
502  
-
503  
-  /** A [[DeepZipper]] context for a location, fully describes its surrounding without specifying the content. */
504  
-  private[antixml] case class LocationContext(
505  
-    /** The location of the context beneath its parent. */
506  
-    loc: Location,
507  
-    parentsList: ParentsList,
508  
-    updateTime: Time)
509  
-    
510  
-  private[antixml] case class LocContext(loc: Location, parentsList: ParentsList, updateTime: Time)
511  
-
512  
-  /** A merging function which takes a node, which represents the node before any modifications
513  
-   *  and a sequence of nodes with their corresponding update times,
514  
-   *  which are versions of the same node after some modifications.
515  
-   *
516  
-   *  The function decides how to merge the nodes to produce a single node with corresponding time stamp.
517  
-   *
518  
-   *  It should be taken into account that nodes may be modified directly by the user
519  
-   *  or through mergings from deeper levels.
520  
-   */
521  
-  private[antixml] type NodeMergeStrategy = (Node, Seq[(Node, Time)]) => (Node, Time)
522  
-  
523  
-  /** The values from a path function in raw form. */
524  
-  private[antixml] type PathVals[+A] = Seq[(WithLoc[A], ParentsList)]
525  
-
526  
-  /** A wrapper for path function values, cannot contain duplicate locations. */
527  
-  private[antixml] class Path[+A](vals: PathVals[A]) {
528  
-    private val contexts =
529  
-      vals.map { wp =>
530  
-        val (withLoc, parents) = wp
531  
-        (LocationContext(withLoc.loc, parents, initTime), withLoc.content)
532  
-      }
533  
-    
534  
-    /** The location contexts and the corresponding contents. */
535  
-    val (locs, contents) = contexts.unzip
536  
-    require(locs.toSet.size == locs.size, "Cannot have duplicate locations in path") // enforcing no duplicates policy 
537  
-  }
538  
-  
539  
-  /** A function that creates paths on group, to be used when constructing zippers. */
540  
-  private[antixml] type PathFunction[+A] = Group[Node] => PathVals[A]
541  
-
542  
-  /** Pimping selectables with [[DeepZipper]] methods. */
543  
-  implicit def groupableToSelectable[A <: Node](g: Selectable[A]) = {
544  
-    import PathCreator._
545  
-    new {
546  
-      //TODO using strange names to avoid conflicts
547  
-
548  
-      private def zipper[B, That](path: PathFunction[B])(implicit cbfwdz: CanBuildFromWithDeepZipper[Group[Node], B, That]): That = {
549  
-        fromPathFunc(g.toGroup, path)
550  
-      }
551  
-
552  
-      /** Searching at the current level. */
553  
-      
554  
-      def ~\[B, That](s: Selector[B])(implicit cbfwdz: CanBuildFromWithDeepZipper[Group[Node], B, That]): That = {
555  
-        zipper(fromNodes(s))
556  
-      }
557  
-
558  
-      /** Searching on all levels (breadth first). */
559  
-      def ~\\[B, That](s: Selector[B])(implicit cbfwdz: CanBuildFromWithDeepZipper[Group[Node], B, That]): That = {
560  
-        zipper(all(s))
561  
-      }
562  
-
563  
-      /** Searching one level below. */
564  
-      def >[B, That](s: Selector[B])(implicit cbfwdz: CanBuildFromWithDeepZipper[Group[Node], B, That]): That = {
565  
-        zipper(directChildren(s))
566  
-      }
567  
-
568  
-      /** Searching one level below and beyond (breadth first). */
569  
-      def ~[B, That](s: Selector[B])(implicit cbfwdz: CanBuildFromWithDeepZipper[Group[Node], B, That]): That = {
570  
-        zipper(allChildren(s))
571  
-      }
572  
-
573  
-    }
574  
-  }
575  
-  
576  
-  def newBuilder[A <: Node] = VectorCase.newBuilder[A] mapResult { vec =>
577  
-    groupToZipper(new Group(vec))
578  
-  }
579  
-
580  
-  /** Converts a group into a zipper. */
581  
-  def groupToZipper[A <: Node](group: Group[A]): DeepZipper[A] = {
582  
-    group match {
583  
-      case zipper: DeepZipper[_] => zipper
584  
-      case _ => {
585  
-        val emptyParent: ParentsList = List[ParentLoc]()
586  
-        val locs = Vector(group.indices: _*)
587  
-
588  
-        new Group[A](group.toVectorCase) with DeepZipper[A] {
589  
-          val parentLists = locs.map(_ => emptyParent)
590  
-          val emptiesSet = defaultEmptiesSet
591  
-          val locations = locs
592  
-          def parent = None
593  
-          val mergeDuplicates = BasicNodeMergeStrategy // TODO this should be pluggable
594  
-
595  
-          val time = initTime
596  
-          val updateTimes = locs.map(_ => time)
597  
-        }
598  
-      }
599  
-    }
600  
-  }
601  
-  
602  
-  /** Converts a contexts into zipper instances.
603  
-   * @param parentGroup The parent of the newly created zipper.
604  
-   * @param contexts The contents of the zipper.
605  
-   * @param empties The set of empty locations in the zipper. */
606  
-  def fromContexts[A <: Node](parentGroup: Option[Group[Node]], contexts: Vector[FullContext[A]], empties: EmptiesSet): DeepZipper[A] = {
607  
-    val vals = VectorCase.fromSeq(contexts map (_.nodeLoc.node))
608  
-    val locs = contexts map (_.nodeLoc.loc)
609  
-    val parents = contexts map (_.parentsList)
610  
-    val newUpdateTimes = contexts map (_.updateTime)
611  
-
612  
-    new Group[A](vals) with DeepZipper[A] {
613  
-      val parentLists = parents
614  
-      val emptiesSet = empties
615  
-      val locations = locs
616  
-      def parent = parentGroup map groupToZipper
617  
-      val mergeDuplicates = parent map (_.mergeDuplicates) getOrElse BasicNodeMergeStrategy
618  
-
619  
-      val time = if (newUpdateTimes.isEmpty) initTime else newUpdateTimes.max
620  
-      val updateTimes = newUpdateTimes
621  
-    }
622  
-  }
623  
-  
624  
-  /** Converts a path with parent into a zipper.
625  
-   *  @param parentGroup The parent from which the path was created.
626  
-   *  @param path Cannot contain duplicate locations.
627  
-   */
628  
-  def fromPath[A, That](parent: Group[Node], path: Path[A])(implicit cbfwdz: CanBuildFromWithDeepZipper[Group[Node], A, That]): That = {
629  
-	 import path._
630  
-	 
631  
-	// this is valid only if the path has no duplicate locations
632  
-    val emptiesSet = defaultEmptiesSet 
633  
-    
634  
-    val builder = cbfwdz(Some(parent), Vector(locs: _*), emptiesSet)
635  
-    builder ++= contents
636  
-    
637  
-    builder.result
638  
-  }
639  
-  
640  
-  /** Converts the nodes gathered from applying the path function to the given group into a `That`. */
641  
-  def fromPathFunc[A, That](parent: Group[Node], path: PathFunction[A])(implicit cbfwdz: CanBuildFromWithDeepZipper[Group[Node], A, That]): That = {
642  
-    fromPath(parent, new Path(path(parent)))
643  
-  }
644  
-
645  
-  /** A factory for [[PathFunction]]s  */
646  
-  object PathCreator {
647  
-
648  
-    /*
649  
-     * First applying the paths using the overloads without the selector,
650  
-     * then applying the selector.
651  
-     * This way the traversal is not modified during selection.
652  
-     */
653  
-
654  
-    /** A path function that selects on nodes in the given group. */
655  
-    def fromNodes[A](selector: Selector[A])(nodes: Group[Node]): PathVals[A] = {
656  
-      applySelector(selector)(fromNodesWithParent(Nil, nodes))
657  
-    }
658  
-
659  
-    /** A path function that selects on the given nodes and recursively on the children (breadth first). */
660  
-    def all[A](selector: Selector[A])(nodes: Group[Node]): PathVals[A] = {
661  
-      fromNodes(selector)(nodes) ++ allChildren(selector)(nodes)
662  
-    }
663  
-
664  
-    /** A path function that selects on the children of the given group. */
665  
-    def directChildren[A](selector: Selector[A])(nodes: Group[Node]): PathVals[A] = {
666  
-      applySelector(selector)(directChildren(nodes))
667  
-    }
668  
-
669  
-    /** A path function that selects on the recursively on all the children of the given group (breadth first). */
670  
-    def allChildren[A](selector: Selector[A])(nodes: Group[Node]): PathVals[A] = {
671  
-      applySelector(selector)(allChildren(nodes))
672  
-    }
673  
-
674  
-    /** Lifting the selector so that it can operate on path entries. */
675  
-    private def liftSelector[A](s: Selector[A]): PartialFunction[(WithLoc[Node], ParentsList), (WithLoc[A], ParentsList)] = {
676  
-      case (WithLoc(n, i), p) if s.isDefinedAt(n) => (WithLoc(s(n), i), p)
677  
-    }
678  
-
679  
-    /** Applies the selector to the given path. */
680  
-    private def applySelector[A](s: Selector[A])(path: PathVals[Node]): PathVals[A] = {
681  
-      path.collect(liftSelector(s))
682  
-    }
683  
-
684  
-    /** Converting a group of nodes to the corresponding node locations. */
685  
-    private def nodesToLocs[A <: Node](nodes: Group[Node]) = {
686  
-      nodes.zipWithIndex.map(Function.tupled(WithLoc[Node]))
687  
-    }
688  
-
689  
-    /** Creating a path from this group of nodes. */
690  
-    private def fromNodesWithParent(p: ParentsList, n: Group[Node]) = {
691  
-      nodesToLocs(n) map ((_, p))
692  
-    }
693  
-
694  
-    private def directChildren(nodes: Group[Node]): PathVals[Node] = collectChild(nodes, Nil)
695  
-
696  
-    private def allChildren(nodes: Group[Node]): PathVals[Node] = {
697  
-      allChildren(directChildren(nodes))
698  
-    }
699  
-