Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master'
Browse files Browse the repository at this point in the history
  • Loading branch information
letiziam committed Jun 7, 2019
2 parents b7d822b + f610b43 commit 62c0a5d
Show file tree
Hide file tree
Showing 11 changed files with 142 additions and 52 deletions.
20 changes: 10 additions & 10 deletions cdlib/algorithms/crisp_partition.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def em(g, k):
communities.append([maps[n] for n in c])
nx.relabel_nodes(g, maps, False)
else:
communities = coms
communities = [list(c) for c in coms]

return NodeClustering(communities, g, "EM", method_parameters={"k": k})

Expand Down Expand Up @@ -301,7 +301,7 @@ def louvain(g, weight='weight', resolution=1., randomize=False):
for n, c in coms.items():
coms_to_node[c].append(n)

coms_louvain = [tuple(c) for c in coms_to_node.values()]
coms_louvain = [list(c) for c in coms_to_node.values()]
return NodeClustering(coms_louvain, g, "Louvain", method_parameters={"weight": weight, "resolution": resolution,
"randomize": randomize})

Expand Down Expand Up @@ -595,8 +595,8 @@ def greedy_modularity(g, weight=None):
"""
g = convert_graph_formats(g, nx.Graph)

gc = nx.algorithms.community.greedy_modularity_communities(g, weight)
coms = [tuple(x) for x in gc]
coms = nx.algorithms.community.greedy_modularity_communities(g, weight)
coms = [list(x) for x in coms]
return NodeClustering(coms, g, "Greedy Modularity", method_parameters={"weight": weight})


Expand Down Expand Up @@ -641,7 +641,7 @@ def infomap(g):
nm = name_map[nid]
coms_to_node[module].append(nm)

coms_infomap = [tuple(c) for c in coms_to_node.values()]
coms_infomap = [list(c) for c in coms_to_node.values()]
return NodeClustering(coms_infomap, g, "Infomap")


Expand Down Expand Up @@ -701,8 +701,8 @@ def label_propagation(g):

g = convert_graph_formats(g, nx.Graph)

lp = list(nx.algorithms.community.label_propagation_communities(g))
coms = [tuple(x) for x in lp]
coms = list(nx.algorithms.community.label_propagation_communities(g))
coms = [list(x) for x in coms]

return NodeClustering(coms, g, "Label Propagation")

Expand Down Expand Up @@ -732,8 +732,8 @@ def async_fluid(g, k):

g = convert_graph_formats(g, nx.Graph)

fluid = nx.algorithms.community.asyn_fluidc(g, k)
coms = [tuple(x) for x in fluid]
coms = nx.algorithms.community.asyn_fluidc(g, k)
coms = [list(x) for x in coms]
return NodeClustering(coms, g, "Fluid")


Expand Down Expand Up @@ -821,7 +821,7 @@ def frc_fgsn(graph, theta, eps, r):
nx.relabel_nodes(g, maps, False)
fuzz_assoc = {maps[nid]: v for nid, v in fuzz_assoc.items()}
else:
coms = [tuple(c) for c in communities]
coms = [list(c) for c in communities]

return FuzzyNodeClustering(coms, fuzz_assoc, graph, "FuzzyComm", method_parameters={"theta": theta,
"eps": eps, "r": r})
4 changes: 2 additions & 2 deletions cdlib/algorithms/edge_clustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def hierarchical_link_community(g):
for e, com in edge2cid.items():
coms[com].append(e)

coms = [tuple(c) for c in coms.values()]
coms = [list(c) for c in coms.values()]
return EdgeClustering(coms, g, "HLC", method_parameters={})


Expand Down Expand Up @@ -83,6 +83,6 @@ def markov_clustering(g, max_loop=1000):

nx.relabel_nodes(g, maps, False)
else:
communities = [tuple(c) for c in coms]
communities = [list(c) for c in coms]

return EdgeClustering(communities, g, "Markov Clustering", method_parameters={"max_loop": max_loop})
30 changes: 18 additions & 12 deletions cdlib/algorithms/overlapping_partition.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ def demon(g, epsilon, min_com_size=3):
with suppress_stdout():
dm = Demon(graph=g, epsilon=epsilon, min_community_size=min_com_size)
coms = dm.execute()
coms = [list(c) for c in coms]

return NodeClustering(coms, g, "DEMON", method_parameters={"epsilon": epsilon, "min_com_size": min_com_size},
overlap=True)
Expand Down Expand Up @@ -110,9 +111,9 @@ def angel(g, threshold, min_community_size=3):
"""

g = convert_graph_formats(g, ig.Graph)

a = Angel(graph=g, min_comsize=min_community_size, threshold=threshold, save=False)
coms = a.execute()
with suppress_stdout():
a = Angel(graph=g, min_comsize=min_community_size, threshold=threshold, save=False)
coms = a.execute()

return NodeClustering(list(coms.values()), g, "ANGEL", method_parameters={"threshold": threshold,
"min_community_size": min_community_size},
Expand Down Expand Up @@ -145,11 +146,18 @@ def node_perception(g, threshold, overlap_threshold, min_comm_size=3):
"""
g = convert_graph_formats(g, nx.Graph)
tp = type(list(g.nodes())[0])

with suppress_stdout():
np = NodePerception(g, sim_threshold=threshold, overlap_threshold=overlap_threshold,
min_comm_size=min_comm_size)
coms = np.execute()
if tp != str:
communities = []
for c in coms:
c = list(map(tp, c))
communities.append(c)
coms = communities

return NodeClustering(coms, g, "Node Perception", method_parameters={"threshold": threshold,
"overlap_threshold": overlap_threshold,
Expand Down Expand Up @@ -245,8 +253,8 @@ def kclique(g, k):

g = convert_graph_formats(g, nx.Graph)

kc = list(nx.algorithms.community.k_clique_communities(g, k))
coms = [tuple(x) for x in kc]
coms = list(nx.algorithms.community.k_clique_communities(g, k))
coms = [list(x) for x in coms]
return NodeClustering(coms, g, "Klique", method_parameters={"k": k}, overlap=True)


Expand Down Expand Up @@ -438,12 +446,10 @@ def lemon(graph, seeds, min_com_size=20, max_com_size=50, expand_step=6, subspac
subspace_dim=subspace_dim, walk_steps=walk_steps, biased=biased)

return NodeClustering([[pos_to_node[n] for n in community]], graph,
"LEMON", method_parameters={"seeds": seeds, "min_com_size": min_com_size,
"max_com_size": max_com_size,
"expand_step": expand_step,
"subspace_dim": subspace_dim,
"walk_steps": walk_steps,
"biased": biased}, overlap=True)
"LEMON", method_parameters=dict(seeds=str(list(seeds)), min_com_size=min_com_size,
max_com_size=max_com_size, expand_step=expand_step,
subspace_dim=subspace_dim, walk_steps=walk_steps,
biased=biased), overlap=True)


def slpa(g, t=21, r=0.1):
Expand Down Expand Up @@ -520,7 +526,7 @@ def multicom(g, seed_node):
communities.append([maps[n] for n in c])
nx.relabel_nodes(g, maps, False)
else:
communities = coms
communities = [list(c) for c in coms]

return NodeClustering(communities, g, "Multicom", method_parameters={"seeds": seed_node}, overlap=True)

Expand Down
1 change: 0 additions & 1 deletion cdlib/classes/clustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ def to_json(self):
:return: a JSON formatted string representing the object
"""

partition = {"communities": self.communities, "algorithm": self.method_name,
"params": self.method_parameters, "overlap": self.overlap, "coverage": self.node_coverage}

Expand Down
2 changes: 1 addition & 1 deletion cdlib/classes/node_clustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def fraction_over_median_degree(self, **kwargs):
"""
if self.__check_graph():
return evaluation.fraction_over_median_degree(self.graph, self,**kwargs)
return evaluation.fraction_over_median_degree(self.graph, self, **kwargs)
else:
raise ValueError("Graph instance not specified")

Expand Down
38 changes: 27 additions & 11 deletions cdlib/evaluation/fitness.py
Original file line number Diff line number Diff line change
Expand Up @@ -483,7 +483,10 @@ def modularity_density(graph, communities):
dint.append(c.degree(node))
dext.append(graph.degree(node) - c.degree(node))

q += (1 / nc) * (np.mean(dint) - np.mean(dext))
try:
q += (1 / nc) * (np.mean(dint) - np.mean(dext))
except ZeroDivisionError:
pass

return q

Expand Down Expand Up @@ -526,7 +529,13 @@ def z_modularity(graph, communities):
mmc += (mc/m)
dc2m += (dc/(2*m))**2

return (mmc - dc2m) / np.sqrt(dc2m * (1 - dc2m))
res = 0
try:
res = (mmc - dc2m) / np.sqrt(dc2m * (1 - dc2m))
except ZeroDivisionError:
pass

return res


def surprise(graph, communities):
Expand Down Expand Up @@ -556,6 +565,7 @@ def surprise(graph, communities):

q = 0
qa = 0
sp = 0

for community in communities.communities:
c = nx.subgraph(graph, community)
Expand All @@ -564,11 +574,14 @@ def surprise(graph, communities):

q += mc
qa += scipy.special.comb(nc, 2, exact=True)
try:
q = q/m
qa = qa/scipy.special.comb(n, 2, exact=True)

q = q/m
qa = qa/scipy.special.comb(n, 2, exact=True)
sp = m*(q*np.log(q/qa) + (1-q)*np.log2((1-q)/(1-qa)))
except ZeroDivisionError:
pass

sp = m*(q*np.log(q/qa) + (1-q)*np.log2((1-q)/(1-qa)))
return sp


Expand Down Expand Up @@ -600,12 +613,15 @@ def significance(graph, communities):
q = 0

for community in communities.communities:
c = nx.subgraph(graph, community)
nc = c.number_of_nodes()
mc = c.number_of_edges()
try:
c = nx.subgraph(graph, community)
nc = c.number_of_nodes()
mc = c.number_of_edges()

binom_c = scipy.special.comb(nc, 2, exact=True)
pc = mc / binom_c
binom_c = scipy.special.comb(nc, 2, exact=True)
pc = mc / binom_c

q += binom_c * (pc * np.log(pc/p) + (1-pc)*np.log((1-pc)/(1-p)))
q += binom_c * (pc * np.log(pc/p) + (1-pc)*np.log((1-pc)/(1-p)))
except ZeroDivisionError:
pass
return q
8 changes: 4 additions & 4 deletions cdlib/readwrite/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def read_community_csv(path, delimiter=",", nodetype=str):
with open(path) as f:
for row in f:
community = list(map(nodetype, row.rstrip().split(delimiter)))
communities.append(tuple(community))
communities.append(list(community))

return NodeClustering(communities, None, "")

Expand Down Expand Up @@ -106,7 +106,7 @@ def read_community_json(path):
with open(path, "r") as f:
coms = json.load(f)

nc = NodeClustering([tuple(c) for c in coms['communities']], None, coms['algorithm'],
nc = NodeClustering([list(c) for c in coms['communities']], None, coms['algorithm'],
coms['params'], coms['overlap'])
nc.node_coverage = coms['coverage']

Expand All @@ -120,7 +120,7 @@ def read_community_json(path):
cm = []
for e in c:
cm.append(tuple(e))
cms.append(tuple(cm))
cms.append(list(cm))
nc.communities = cms
nc.__class__ = EdgeClustering

Expand Down Expand Up @@ -148,7 +148,7 @@ def read_community_from_json_string(json_repr):

coms = json.loads(json_repr)

nc = NodeClustering([tuple(c) for c in coms['communities']], None, coms['algorithm'],
nc = NodeClustering([list(c) for c in coms['communities']], None, coms['algorithm'],
coms['params'], coms['overlap'])
nc.node_coverage = coms['coverage']

Expand Down
Loading

0 comments on commit 62c0a5d

Please sign in to comment.