Skip to content
23 changes: 13 additions & 10 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,18 +47,21 @@ before_install:
else
sudo apt-get install -qq mono-devel;
pushd ..;
curl -L -o /tmp/IronPython-2.7.5b2.zip "http://download-codeplex.sec.s-msft.com/Download/Release?ProjectName=ironpython&DownloadId=815751&FileTime=130455203824130000&Build=20919";
unzip /tmp/IronPython-2.7.5b2.zip;
curl -L -o /tmp/decorator-3.4.0.tar.gz https://pypi.python.org/packages/source/d/decorator/decorator-3.4.0.tar.gz;
pushd /tmp;
curl -L -o IronPython-2.7.5b2.zip "http://download-codeplex.sec.s-msft.com/Download/Release?ProjectName=ironpython&DownloadId=815751&FileTime=130455203824130000&Build=20919";
unzip IronPython-2.7.5b2.zip;

curl -L -o decorator-3.4.0.tar.gz https://pypi.python.org/packages/source/d/decorator/decorator-3.4.0.tar.gz;
tar xf decorator-3.4.0.tar.gz;
popd;
cp /tmp/decorator-3.4.0/src/decorator.py IronPython-2.7.5b2/Lib;
curl -L -o /tmp/nose-1.3.3.tar.gz https://pypi.python.org/packages/source/n/nose/nose-1.3.3.tar.gz;
pushd /tmp;
cp decorator-3.4.0/src/decorator.py IronPython-2.7.5b2/Lib;

curl -L -o enum34-1.0.tar.gz https://pypi.python.org/packages/source/e/enum34/enum34-1.0.tar.gz;
tar xf enum34-1.0.tar.gz;
cp enum34-1.0/enum/enum.py IronPython-2.7.5b2/Lib;

curl -L -o nose-1.3.3.tar.gz https://pypi.python.org/packages/source/n/nose/nose-1.3.3.tar.gz;
tar xf nose-1.3.3.tar.gz;
popd;
cp -R /tmp/nose-1.3.3/nose IronPython-2.7.5b2/Lib;
cp -R nose-1.3.3/nose IronPython-2.7.5b2/Lib;

popd;
fi

Expand Down
4 changes: 2 additions & 2 deletions networkx/algorithms/centrality/katz.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def katz_centrality(G, alpha=0.1, beta=1.0,

try:
b = dict.fromkeys(G,float(beta))
except (TypeError,ValueError):
except (TypeError,ValueError,AttributeError):
b = beta
if set(beta) != set(G):
raise nx.NetworkXError('beta dictionary '
Expand Down Expand Up @@ -292,7 +292,7 @@ def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True,
nodelist = G.nodes()
try:
b = np.ones((len(nodelist),1))*float(beta)
except (TypeError,ValueError):
except (TypeError,ValueError,AttributeError):
raise nx.NetworkXError('beta must be a number')

A = nx.adj_matrix(G, nodelist=nodelist, weight=weight).todense().T
Expand Down
195 changes: 61 additions & 134 deletions networkx/algorithms/clique.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def find_cliques(G):
To obtain a list of cliques, use list(find_cliques(G)).

Based on the algorithm published by Bron & Kerbosch (1973) [1]_
as adapated by Tomita, Tanaka and Takahashi (2006) [2]_
as adapted by Tomita, Tanaka and Takahashi (2006) [2]_
and discussed in Cazals and Karande (2008) [3]_.
The method essentially unrolls the recursion used in
the references to avoid issues of recursion stack depth.
Expand Down Expand Up @@ -158,89 +158,42 @@ def find_cliques(G):
Volume 407, Issues 1-3, 6 November 2008, Pages 564-568,
http://dx.doi.org/10.1016/j.tcs.2008.05.010
"""
# Cache nbrs and find first pivot (highest degree)
maxconn=-1
nnbrs={}
pivotnbrs=set() # handle empty graph
for n,nbrs in G.adjacency_iter():
nbrs=set(nbrs)
nbrs.discard(n)
conn = len(nbrs)
if conn > maxconn:
nnbrs[n] = pivotnbrs = nbrs
maxconn = conn
else:
nnbrs[n] = nbrs
# Initial setup
cand=set(nnbrs)
smallcand = set(cand - pivotnbrs)
done=set()
stack=[]
clique_so_far=[]
# Start main loop
while smallcand or stack:
try:
# Any nodes left to check?
n=smallcand.pop()
except KeyError:
# back out clique_so_far
cand,done,smallcand = stack.pop()
clique_so_far.pop()
continue
# Add next node to clique
clique_so_far.append(n)
cand.remove(n)
done.add(n)
nn=nnbrs[n]
new_cand = cand & nn
new_done = done & nn
# check if we have more to search
if not new_cand:
if not new_done:
# Found a clique!
yield clique_so_far[:]
clique_so_far.pop()
continue
# Shortcut--only one node left!
if not new_done and len(new_cand)==1:
yield clique_so_far + list(new_cand)
clique_so_far.pop()
continue
# find pivot node (max connected in cand)
# look in done nodes first
numb_cand=len(new_cand)
maxconndone=-1
for n in new_done:
cn = new_cand & nnbrs[n]
conn=len(cn)
if conn > maxconndone:
pivotdonenbrs=cn
maxconndone=conn
if maxconndone==numb_cand:
break
# Shortcut--this part of tree already searched
if maxconndone == numb_cand:
clique_so_far.pop()
continue
# still finding pivot node
# look in cand nodes second
maxconn=-1
for n in new_cand:
cn = new_cand & nnbrs[n]
conn=len(cn)
if conn > maxconn:
pivotnbrs=cn
maxconn=conn
if maxconn == numb_cand-1:
break
# pivot node is max connected in cand from done or cand
if maxconndone > maxconn:
pivotnbrs = pivotdonenbrs
# save search status for later backout
stack.append( (cand, done, smallcand) )
cand=new_cand
done=new_done
smallcand = cand - pivotnbrs
if len(G) == 0:
return

adj = {u: {v for v in G[u] if v != u} for u in G}
Q = [None]

subg = set(G)
cand = set(G)
u = max(subg, key=lambda u: len(cand & adj[u]))
ext_u = cand - adj[u]
stack = []

try:
while True:
if ext_u:
q = ext_u.pop()
cand.remove(q)
Q[-1] = q
adj_q = adj[q]
subg_q = subg & adj_q
if not subg_q:
yield Q[:]
else:
cand_q = cand & adj_q
if cand_q:
stack.append((subg, cand, ext_u))
Q.append(None)
subg = subg_q
cand = cand_q
u = max(subg, key=lambda u: len(cand & adj[u]))
ext_u = cand - adj[u]
else:
Q.pop()
subg, cand, ext_u = stack.pop()
except IndexError:
pass


def find_cliques_recursive(G):
Expand All @@ -261,7 +214,7 @@ def find_cliques_recursive(G):
Notes
-----
Based on the algorithm published by Bron & Kerbosch (1973) [1]_
as adapated by Tomita, Tanaka and Takahashi (2006) [2]_
as adapted by Tomita, Tanaka and Takahashi (2006) [2]_
and discussed in Cazals and Karande (2008) [3]_.

This implementation returns a list of lists each of
Expand Down Expand Up @@ -292,55 +245,29 @@ def find_cliques_recursive(G):
Volume 407, Issues 1-3, 6 November 2008, Pages 564-568,
http://dx.doi.org/10.1016/j.tcs.2008.05.010
"""
nnbrs={}
for n,nbrs in G.adjacency_iter():
nbrs=set(nbrs)
nbrs.discard(n)
nnbrs[n]=nbrs
if not nnbrs: return [] # empty graph
cand=set(nnbrs)
done=set()
clique_so_far=[]
cliques=[]
_extend(nnbrs,cand,done,clique_so_far,cliques)
return cliques

def _extend(nnbrs,cand,done,so_far,cliques):
# find pivot node (max connections in cand)
maxconn=-1
numb_cand=len(cand)
for n in done:
cn = cand & nnbrs[n]
conn=len(cn)
if conn > maxconn:
pivotnbrs=cn
maxconn=conn
if conn==numb_cand:
# All possible cliques already found
return
for n in cand:
cn = cand & nnbrs[n]
conn=len(cn)
if conn > maxconn:
pivotnbrs=cn
maxconn=conn
# Use pivot to reduce number of nodes to examine
smallercand = set(cand - pivotnbrs)
for n in smallercand:
cand.remove(n)
so_far.append(n)
nn=nnbrs[n]
new_cand=cand & nn
new_done=done & nn
if not new_cand and not new_done:
# Found the clique
cliques.append(so_far[:])
elif not new_done and len(new_cand) is 1:
# shortcut if only one node left
cliques.append(so_far+list(new_cand))
else:
_extend(nnbrs, new_cand, new_done, so_far, cliques)
done.add(so_far.pop())
if len(G) == 0:
return iter([])

adj = {u: {v for v in G[u] if v != u} for u in G}
Q = []

def expand(subg, cand):
u = max(subg, key=lambda u: len(cand & adj[u]))
for q in cand - adj[u]:
cand.remove(q)
Q.append(q)
adj_q = adj[q]
subg_q = subg & adj_q
if not subg_q:
yield Q[:]
else:
cand_q = cand & adj_q
if cand_q:
for clique in expand(subg_q, cand_q):
yield clique
Q.pop()

return expand(set(G), set(G))


def make_max_clique_graph(G,create_using=None,name=None):
Expand Down
15 changes: 8 additions & 7 deletions networkx/algorithms/operators/tests/test_product.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def test_tensor_product_size():
P5 = nx.path_graph(5)
K3 = nx.complete_graph(3)
K5 = nx.complete_graph(5)

G=tensor_product(P5,K3)
assert_equal(nx.number_of_nodes(G),5*3)
G=tensor_product(K3,K5)
Expand Down Expand Up @@ -101,12 +101,13 @@ def test_cartesian_product_multigraph():
H.add_edge(3,4,key=0)
H.add_edge(3,4,key=1)
GH=cartesian_product(G,H)
assert_equal( set(GH) , set([(1, 3), (2, 3), (2, 4), (1, 4)]))
assert_equal( set(GH.edges(keys=True)) ,
set([((1, 3), (2, 3), 0), ((1, 3), (2, 3), 1),
((1, 3), (1, 4), 0), ((1, 3), (1, 4), 1),
((2, 3), (2, 4), 0), ((2, 3), (2, 4), 1),
((2, 4), (1, 4), 0), ((2, 4), (1, 4), 1)]))
assert_equal(set(GH), {(1, 3), (2, 3), (2, 4), (1, 4)})
assert_equal({(frozenset([u, v]), k) for u, v, k in GH.edges(keys=True)},
{(frozenset([u, v]), k) for u, v, k in
[((1, 3), (2, 3), 0), ((1, 3), (2, 3), 1),
((1, 3), (1, 4), 0), ((1, 3), (1, 4), 1),
((2, 3), (2, 4), 0), ((2, 3), (2, 4), 1),
((2, 4), (1, 4), 0), ((2, 4), (1, 4), 1)]})

@raises(nx.NetworkXError)
def test_cartesian_product_raises():
Expand Down
26 changes: 20 additions & 6 deletions networkx/algorithms/shortest_paths/tests/test_generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,20 @@
from nose.tools import *
import networkx as nx

def validate_grid_path(r, c, s, t, p):
ok_(isinstance(p, list))
assert_equal(p[0], s)
assert_equal(p[-1], t)
s = ((s - 1) // c, (s - 1) % c)
t = ((t - 1) // c, (t - 1) % c)
assert_equal(len(p), abs(t[0] - s[0]) + abs(t[1] - s[1]) + 1)
p = [((u - 1) // c, (u - 1) % c) for u in p]
for u in p:
ok_(0 <= u[0] < r)
ok_(0 <= u[1] < c)
for u, v in zip(p[:-1], p[1:]):
ok_((abs(v[0] - u[0]), abs(v[1] - u[1])) in [(0, 1), (1, 0)])

class TestGenericPath:

def setUp(self):
Expand All @@ -14,12 +28,12 @@ def setUp(self):
def test_shortest_path(self):
assert_equal(nx.shortest_path(self.cycle,0,3),[0, 1, 2, 3])
assert_equal(nx.shortest_path(self.cycle,0,4),[0, 6, 5, 4])
assert_equal(nx.shortest_path(self.grid,1,12),[1, 2, 3, 4, 8, 12])
validate_grid_path(4, 4, 1, 12, nx.shortest_path(self.grid,1,12))
assert_equal(nx.shortest_path(self.directed_cycle,0,3),[0, 1, 2, 3])
# now with weights
assert_equal(nx.shortest_path(self.cycle,0,3,weight='weight'),[0, 1, 2, 3])
assert_equal(nx.shortest_path(self.cycle,0,4,weight='weight'),[0, 6, 5, 4])
assert_equal(nx.shortest_path(self.grid,1,12,weight='weight'),[1, 2, 3, 4, 8, 12])
validate_grid_path(4, 4, 1, 12, nx.shortest_path(self.grid,1,12,weight='weight'))
assert_equal(nx.shortest_path(self.directed_cycle,0,3,weight='weight'),
[0, 1, 2, 3])

Expand Down Expand Up @@ -47,13 +61,13 @@ def test_single_source_shortest_path(self):
assert_equal(p[3],[0,1,2,3])
assert_equal(p,nx.single_source_shortest_path(self.cycle,0))
p=nx.shortest_path(self.grid,1)
assert_equal(p[12],[1, 2, 3, 4, 8, 12])
validate_grid_path(4, 4, 1, 12, p[12])
# now with weights
p=nx.shortest_path(self.cycle,0,weight='weight')
assert_equal(p[3],[0,1,2,3])
assert_equal(p,nx.single_source_dijkstra_path(self.cycle,0))
p=nx.shortest_path(self.grid,1,weight='weight')
assert_equal(p[12],[1, 2, 3, 4, 8, 12])
validate_grid_path(4, 4, 1, 12, p[12])


def test_single_source_shortest_path_length(self):
Expand All @@ -75,13 +89,13 @@ def test_all_pairs_shortest_path(self):
assert_equal(p[0][3],[0,1,2,3])
assert_equal(p,nx.all_pairs_shortest_path(self.cycle))
p=nx.shortest_path(self.grid)
assert_equal(p[1][12],[1, 2, 3, 4, 8, 12])
validate_grid_path(4, 4, 1, 12, p[1][12])
# now with weights
p=nx.shortest_path(self.cycle,weight='weight')
assert_equal(p[0][3],[0,1,2,3])
assert_equal(p,nx.all_pairs_dijkstra_path(self.cycle))
p=nx.shortest_path(self.grid,weight='weight')
assert_equal(p[1][12],[1, 2, 3, 4, 8, 12])
validate_grid_path(4, 4, 1, 12, p[1][12])


def test_all_pairs_shortest_path_length(self):
Expand Down
Loading