Skip to content

Commit

Permalink
[anchor] Simplify dfs logic
Browse files Browse the repository at this point in the history
  • Loading branch information
tanghaibao committed Jun 26, 2018
1 parent 1c635b9 commit 8347afa
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 20 deletions.
10 changes: 5 additions & 5 deletions anchor.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ const iterations = 2
func (r *Anchorer) Run() {
var G Graph
r.ExtractInterContigLinks()
paths := r.makeInitialPath()
paths := r.makeTrivialPaths(r.contigs)
for i := 0; i < iterations; i++ {
log.Noticef("Starting iteration %d with %d paths", i, len(paths))
G = r.makeGraph(paths)
Expand All @@ -59,13 +59,13 @@ func (r *Anchorer) Run() {
log.Notice("Success")
}

// makeInitialPath starts the initial construction of Path object, with one
// makeTrivialPaths starts the initial construction of Path object, with one
// contig per Path (trivial Path)
func (r *Anchorer) makeInitialPath() []Path {
func (r *Anchorer) makeTrivialPaths(contigs []*Contig) []Path {
// Initially make every contig a single Path object
paths := make([]Path, len(r.contigs))
paths := make([]Path, len(contigs))
r.registry = make(Registry)
for i, contig := range r.contigs {
for i, contig := range contigs {
paths[i] = Path{
contigs: []*Contig{contig},
orientations: []int{1},
Expand Down
38 changes: 23 additions & 15 deletions graph.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
package allhic

import (
"fmt"
"math"
"sort"
)
Expand Down Expand Up @@ -110,35 +111,43 @@ func getSecondLargest(a, b []float64) float64 {
}

// generatePathAndCycle makes new paths by merging the unique extensions
// in the graph
// in the graph. This first extends upstream (including the sister edge)
// and then walk downstream until it hits something seen before
func (r *Anchorer) generatePathAndCycle(G Graph) []Path {
visited := map[*Node]bool{}
var isCycle bool
nodeToPath := make(map[*Node]*Path)
paths := []Path{}
contigsUsed := make(map[*Contig]bool)
var path Path

for a := range G {
if _, ok := visited[a]; ok {
continue
}
path1, path2 := []Edge{}, []Edge{}
path1, isCycle = dfs(G, a, path1, visited, true)

if isCycle {
path1 = breakCycle(path1)
paths = append(paths, mergePath(path1, nodeToPath))
continue
} else { // upstream search returns a path, we'll stitch
delete(visited, a)
path2, _ = dfs(G, a, path2, visited, false)
path1 = append(reversePath(path1), path2...)
}
delete(visited, a)
path2, _ = dfs(G, a, path2, visited, false)

path1 = append(reversePath(path1), path2...)
paths = append(paths, mergePath(path1, nodeToPath))
path = mergePath(path1)
fmt.Println("path", path)
paths = append(paths, path)
for _, contig := range path.contigs {
contigsUsed[contig] = true
}
}
log.Noticef("A total of %d nodes mapped to %d paths", len(nodeToPath), len(paths))
log.Noticef("A total of %d contigs mapped to %d paths", len(contigsUsed), len(paths))
return paths
}

// mergePath converts a single edge path into a node path
func mergePath(path []Edge, nodeToPath map[*Node]*Path) Path {
func mergePath(path []Edge) Path {
p := []string{}
s := Path{}
for _, edge := range path {
Expand All @@ -154,18 +163,16 @@ func mergePath(path []Edge, nodeToPath map[*Node]*Path) Path {
// TODO: take orientations into account
s.contigs = append(s.contigs, ep.contigs...)
s.orientations = append(s.orientations, ep.orientations...)
nodeToPath[edge.a] = &s
nodeToPath[edge.b] = &s

// Special care needed for reverse orientation
for _, contig := range ep.contigs {
p = append(p, tag+contig.name)
}
}
s.Length()
// fmt.Println(path)
// fmt.Println(s)
// fmt.Println(p)
fmt.Println(path)
fmt.Println(s)
fmt.Println(p)
return s
}

Expand All @@ -181,6 +188,7 @@ func reversePath(path []Edge) []Edge {
}

// breakCycle breaks a single edge path into two edge paths
// breakage occurs at the weakest link
func breakCycle(path []Edge) []Edge {
minI, minWeight := 0, math.MaxFloat64
for i, edge := range path {
Expand Down

0 comments on commit 8347afa

Please sign in to comment.