Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
0ff4057
commit 7c18790
Showing
9 changed files
with
15,495 additions
and
0 deletions.
There are no files selected for viewing
187 changes: 187 additions & 0 deletions
187
Python/Networks/ExplodedNetworkView/exploded_view_3d.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,187 @@ | ||
#!/usr/bin/env python | ||
# encoding: utf-8 | ||
""" | ||
exploded_view_3d.py | ||
The purpose of this script is to create an 'exploded view' | ||
of a network in 3D using heierarchical clustering of geodesic | ||
distances in NetworkX and UbiGraph. | ||
This script is intended as an illustratie proof of concept | ||
for the exploded view visualization. | ||
Created by Drew Conway on 2009-04-24. | ||
Copyright (c) 2009. All rights reserved. | ||
""" | ||
|
||
import sys | ||
import os | ||
import time | ||
# Three NumPy functions needed for data manipulation | ||
from numpy import array,unique1d,zeros | ||
import networkx as NX # Building and manipulating network data | ||
import Pycluster as PC # Calculating heierarchical clustering | ||
import pylab as P # Display chart of E-I ratio | ||
import xmlrpclib # Open connection to UbiGraph server | ||
|
||
|
||
def exploded_view(G,ubi_server,edge_ref,partition,repulsion=0.3): | ||
# Creates exploded view of network based on a given partition | ||
# Need to find the edges that exist outside of the partition groupings. | ||
# Will create subgraphs of each grouping to find these edges | ||
all_edges=G.edges() | ||
internal_edges=[] | ||
groupings=get_partition_groupings(partition) | ||
# Create an edgelist of all edges contained INSIDE partition groupings | ||
for g in groupings.keys(): | ||
sub_graph=NX.subgraph(G,groupings[g]) | ||
for e in sub_graph.edges(): | ||
internal_edges.append(e) | ||
# Find edges remaining OUTSIDE partition groupings | ||
external_edges=[] | ||
for i in all_edges: | ||
if internal_edges.count(i)<1: | ||
external_edges.append(i) | ||
# Explode external edges in UbiGraph | ||
explode(ubi_server,edge_ref,external_edges,repulsion) | ||
return edge_ref,external_edges | ||
|
||
def rebuild(ubi_server,ubi_edges,edgelist,repulsion=1.0): | ||
# Resets view to normal view | ||
normal_view=ubi_server.new_edge_style(0) | ||
ubi_server.set_edge_style_attribute(normal_view,"visible","true") | ||
ubi_server.set_edge_style_attribute(normal_view,"strength",str(repulsion)) | ||
for e in edgelist: | ||
ubi_server.change_edge_style(ubi_edges[e],normal_view) | ||
|
||
|
||
def explode(ubi_server,ubi_edges,edgelist,repulsion=0.3): | ||
# Use UbiGraph edge attribute to hide and repel external edges | ||
exploded_view=ubi_server.new_edge_style(0) | ||
ubi_server.set_edge_style_attribute(exploded_view,"visible","false") | ||
ubi_server.set_edge_style_attribute(exploded_view,"strength",str(repulsion)) | ||
for e in edgelist: | ||
ubi_server.change_edge_style(ubi_edges[e],exploded_view) | ||
time.sleep(0.05) | ||
|
||
|
||
def build_ubigraph(G,ubi_server): | ||
# Note: Not using the built in NX.UbiGraph class for two reasons. | ||
# 1) This shows more of what is occurring under the hood with the code | ||
# 2) Having explict access to the XML-RPC server makes creating the | ||
# exploded view more transparent | ||
ubi_server.clear() | ||
edges={} # Dictionary to record UbiGraph edge ids | ||
# Add nodes | ||
for n in G.nodes(): | ||
ubi_server.new_vertex_w_id(n) | ||
# Add edges | ||
for e in G.edges(): | ||
e_id=ubi_server.new_edge(e[0],e[1]) | ||
edges[e]=e_id | ||
return edges | ||
|
||
def open_ubigraph_server(url='http://127.0.0.1:20738/RPC2'): | ||
# Open connection to UbiGraph XML-RPC server | ||
server_url = url | ||
server = xmlrpclib.Server(server_url) | ||
return server.ubigraph | ||
|
||
def get_partition_groupings(partition): | ||
# Returns dictionary of nodelist for each grouping | ||
groupings={} # dictionary of groupings for each partition | ||
num_clusters=unique1d(partition) # number of clusers in partition | ||
# Create a dictonary of nodelists for each group in partition p | ||
for n in num_clusters: | ||
group=[] | ||
for i in range(0,len(partition)): | ||
if partition[i]==n: # If node i is in group n | ||
group.append(i) # Add i to n's nodelist | ||
groupings[n]=group | ||
return groupings | ||
|
||
def external_internal_ties(G,partitions): | ||
# Calculate the external/internal tie ratio for each partition | ||
# This will help identiy interesting clusters | ||
total_edges=G.number_of_edges() | ||
E={} # will store internal ties for each partition | ||
I={} # external ties | ||
R={} # E-I/total edges | ||
for p in partitions.keys(): | ||
groupings=get_partition_groupings(partitions[p]) | ||
# Generate subgraphs of each nodelist grouping for partition p and calculate | ||
# the number of external and internal ties per paritioning | ||
internal=0. # will track the total internal ties | ||
for g in groupings.keys(): | ||
sub_graph=NX.subgraph(G,groupings[g]) | ||
internal+=sub_graph.number_of_edges() | ||
I[p]=internal | ||
E[p]=total_edges-internal | ||
R[p]=(E[p]-I[p])/total_edges | ||
return E,I,R | ||
|
||
def dict_of_dicts_to_matrx(dict_of_dicts): | ||
# Return a NxN array from a dict-of-dicts | ||
index=dict_of_dicts.keys() | ||
i_0=min(index) | ||
i_k=max(index) | ||
M=zeros((i_k,i_k)) | ||
for i in range(i_0,i_k): | ||
vals=dict_of_dicts[i] | ||
for j in range(i_0,i_k): | ||
M[i,j]=vals[j] | ||
return array(M) | ||
|
||
def get_dist_matrix(G): | ||
# Returns a symmetric NxN array of geodesic distances | ||
geo_dist=NX.path.all_pairs_shortest_path_length(G) | ||
return dict_of_dicts_to_matrx(geo_dist) | ||
|
||
def generate_network_clusters(G): | ||
# Function creates the cluster partitions using heierarchical clustering | ||
# on geodesic distances | ||
# First check to make sure the given network is a single fully | ||
# connected component. | ||
if len(NX.component.connected_component_subgraphs(G)) >1: | ||
raise NX.NetworkXError, 'G must be single component! Extract main component...' | ||
# Now generte clusters | ||
dist_matrix=get_dist_matrix(G) | ||
# Default Heierarchical Clustering algo used | ||
hclus=PC.treecluster(data=None,distancematrix=dist_matrix,method='m') | ||
partitions={} # create dictionary of partitioning at each cut in heierarchy | ||
for c in range(1,len(hclus)+1): # treecluster cuts start at 1 | ||
partitions[c]=hclus.cut(c).tolist() | ||
return partitions | ||
|
||
|
||
def main(): | ||
# First half of the script calculates the partitions, and the metric used | ||
# to identify good candidate partitions for using in the exploded view; | ||
# the ratio of extern tie-internal ties/total network edges | ||
|
||
# G=NX.generators.barabasi_albert_graph(550,2) | ||
G=NX.read_edgelist('test_network.edgelist',create_using=NX.Graph()) | ||
G=NX.convert_node_labels_to_integers(G,first_label=0) # for consistent record keeping | ||
partitions=generate_network_clusters(G) | ||
external,internal,ei_ratio=external_internal_ties(G,partitions) | ||
# Plot E-I ratio and save | ||
P.plot(ei_ratio.values(),ls='-',marker='.',color='r') | ||
P.savefig('ei_plot.png') | ||
#P.show() | ||
#P.savefig('ei_plot.png',dpi=100) | ||
# Looking for large jumps in graph, thoes will be candidate partitions | ||
time.sleep(10) | ||
# Once the candidate partitions have been identified, we use UbiGraph to | ||
# display exploded view | ||
S=open_ubigraph_server() # Open connection to XML-RPC UbiGraph Server | ||
edges=build_ubigraph(G,S) # Build network in UbiGraph | ||
time.sleep(20) | ||
edge_ref,external_edges=exploded_view(G,S,edges,partitions[20],repulsion=0.20738) # Choose partition and display 'exploded view' | ||
time.sleep(20) | ||
rebuild(S,edge_ref,external_edges) | ||
|
||
|
||
|
||
if __name__ == '__main__': | ||
main() | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,85 @@ | ||
#!/usr/bin/env python | ||
# encoding: utf-8 | ||
""" | ||
Experiments for growing fractal networks | ||
Created by Drew Conway on 2009-05-19. | ||
Copyright (c) 2009. All rights reserved. | ||
""" | ||
|
||
import sys | ||
import os | ||
import networkx as NX | ||
from numpy import random | ||
import time | ||
import xmlrpclib | ||
|
||
def fractal_build(base_graph,iterations,server): | ||
# Simple routine for growing fractal networks. Note, if you are not | ||
# visualizing these network with UbiGraph simply comment out the calls | ||
# to ubigraph and the time.sleep commands (will annoyingly slow down things) | ||
G=NX.Graph() # Start with an empty graph | ||
server.clear() | ||
for i in range(0,iterations): | ||
node_index=G.number_of_nodes() # Keeping track of number of nodes | ||
# Create a network indexed from the last node in G from the base graph | ||
T=NX.operators.convert_node_labels_to_integers(base_graph,first_label=node_index) | ||
N=T.nodes() # Get the nodes from the base graph | ||
G=NX.operators.compose(G,T) # Add the base graph to G | ||
ubigraph_add_edges(T.edges(),server) | ||
time.sleep(0.05) | ||
# Drew's fractal growth rules... | ||
if node_index>0: | ||
if node_index%2>0: | ||
# If the number of nodes in G is even... | ||
for n in N: | ||
if n%2>0: | ||
# Find the even labeled node from the base graph... | ||
connect_to=n | ||
while n==connect_to: | ||
# ...and connect it to a random node from the top half (labels) of all nodes in G | ||
connect_to=random.random_integers(node_index/2,node_index) | ||
G.add_edge(n,connect_to) | ||
time.sleep(0.05) | ||
ubigraph_add_edges([(n,connect_to)],server) | ||
else: | ||
# If the number of nodes in G is odd... | ||
for n in N: | ||
if n%2<1: | ||
# Find the odd labeled node from the base graph... | ||
connect_to=n | ||
while n==connect_to: | ||
# ... and connect it to a random node from the bottom half (labels) of all nodes in G | ||
connect_to=random.random_integers(0,node_index/2) | ||
G.add_edge(n,connect_to) | ||
time.sleep(0.05) | ||
ubigraph_add_edges([(n,connect_to)],server) | ||
return G | ||
|
||
|
||
def open_ubigraph_server(url='http://127.0.0.1:20738/RPC2'): | ||
# Open connection to UbiGraph XML-RPC server | ||
server_url = url | ||
server = xmlrpclib.Server(server_url) | ||
return server.ubigraph | ||
|
||
def main(): | ||
ubi_server=open_ubigraph_server() | ||
#base=NX.Graph(data=[(0,1)]) # Dyad | ||
#base=NX.Graph(data=[(0,1),(1,2),(1,3)]) # Disconnected triple | ||
#base=NX.Graph(data=[(0,1),(1,2),(2,0)]) # Triangle | ||
#base=NX.Graph(data=[(0,1),(1,2),(1,3)]) # Three point star | ||
base=NX.generators.petersen_graph() # Petersen Graph | ||
#base=NX.generators.heawood_graph() # Heawood Graph | ||
#base=NX.Graph(data=[(0,1),(2,3),(3,4),(4,2)]) # Dyad and triangle | ||
#base=NX.Graph(data=[(0,1),(2,3),(3,4),(3,5)]) # Dyad and three point star | ||
G=fractal_build(base,100,ubi_server) | ||
#G=NX.barabasi_albert_graph(300,2) | ||
#H=NX.UbiGraph(G) | ||
#print(compact_box_burning(G)) | ||
time.sleep(15) | ||
ubi_server.clear() | ||
|
||
if __name__ == '__main__': | ||
main() | ||
|
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,102 @@ | ||
#!/usr/bin/env python | ||
# encoding: utf-8 | ||
""" | ||
MP_Networks.py | ||
Generate a weighted 2-mode network of British MP's | ||
to various lunch and dinner engaements | ||
Created by Drew Conway on 2010-02-05. | ||
Copyright (c) 2010. All rights reserved. | ||
""" | ||
|
||
import sys | ||
import os | ||
import networkx | ||
import csv | ||
|
||
def get_bipartite_proj(G,proj1_name=None,proj2_name=None): | ||
"""docstring for get_bipartite_proj | ||
Returns the bipartite projection for each set | ||
of nodes in the bipartite graph G | ||
""" | ||
if networkx.is_bipartite(G): | ||
set1,set2=networkx.bipartite_sets(G) | ||
net1=networkx.project(G,set1) | ||
net1.name=proj1_name | ||
net2=networkx.project(G,set2) | ||
net2.name=proj2_name | ||
return net1,net2 | ||
else: | ||
raise networkx.NetworkXError("Network is not bipartite") | ||
|
||
|
||
def dichotomize(G,thresh,remove_isolates=True): | ||
"""docstring for dichotomize | ||
Returns a new Graph where all edges with weight<thresh removed | ||
""" | ||
for e in G.edges(data=True): | ||
if e[2]["weight"]<=thresh: | ||
G.remove_edge(e[0],e[1]) | ||
else: | ||
G.add_edge(e[0],e[1],weight=1) | ||
G.name=G.name+" Dichotomized < "+str(thresh) | ||
if remove_isolates: | ||
deg=G.degree(with_labels=True) | ||
deg=deg.items() | ||
isos=[(a) for (a,b) in deg if b<1] | ||
G.remove_nodes_from(isos) | ||
return G | ||
|
||
def create_network(data_dict): | ||
"""docstring for create_network | ||
Create a NetworkX object from the parsed MP data. | ||
This network will be a two-mode weighted network. | ||
""" | ||
data_net=networkx.Graph(name="MP Network") | ||
for pair in data_dict.keys(): | ||
MP=pair[0].replace(", ","_") | ||
Event=pair[1].replace(" ","_") | ||
# Each edge is weighted by the number of times a | ||
# Member attended this particular event | ||
data_net.add_edge(MP,Event,weight=data_dict[pair]) | ||
return data_net | ||
|
||
def parse_data(csv_obj): | ||
"""docstring for parse_data | ||
Provided a csv.DictReader object, a dic | ||
""" | ||
first=True | ||
for line in csv_obj: | ||
if first: | ||
mp_data=dict() | ||
first=False | ||
else: | ||
data_pair=(line["MP.Name"],line["Event"]) | ||
if mp_data.keys().count(data_pair)<1: | ||
mp_data[data_pair]=1 | ||
else: | ||
mp_data[data_pair]+=1 | ||
return mp_data | ||
|
||
def main(): | ||
# Load and parse raw data | ||
csv_file="MP_DATA_CLEANDATE.csv" | ||
reader=csv.DictReader(open(csv_file,"rU"),fieldnames=["MP.Name","Date","Event","Type","Numbers","Venue.s."]) | ||
parsed_mp=parse_data(reader) | ||
# Create base graph from raw data, and save to file | ||
G=create_network(parsed_mp) | ||
networkx.write_pajek(G,"MP_events.net") | ||
# Dichotomize data to remove all edges with a weight < 2 | ||
G_dichot=dichotomize(G,thresh=1) | ||
G_dichot.remove_node("Association") | ||
networkx.info(G_dichot) | ||
# Generate MP-by-MP and Event-by-Event projections of the bipartite base graph | ||
mp_net,event_net=get_bipartite_proj(G_dichot,proj1_name="MP-by-MP",proj2_name="Event-by-Event") | ||
networkx.write_pajek(mp_net,"mp_net.net") | ||
networkx.write_pajek(event_net,"event_net.net") | ||
# Perform some network cleaning for | ||
|
||
if __name__ == '__main__': | ||
main() | ||
|
Oops, something went wrong.