forked from muthukrishna/Muthukrishna_Schaller_2019_PSPR
-
Notifications
You must be signed in to change notification settings - Fork 1
/
simulation-consolidation-homophily34.py
168 lines (146 loc) · 6.48 KB
/
simulation-consolidation-homophily34.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
#!/usr/bin/env python3
"""
Created on Apr 5, 2018
Converting code to Python 3.4 so it can run on the Harvard server.
This is code for consolidation.
@author: Michael Muthukrishna
"""
import argparse
from human_social_network_generator import human_social_network_iterations, \
human_social_network
from numpy import random
import csv
import copy as C
from MyNetworkFunctions import *
debug_mode = False
output_json_graphs = False
# Create the folder for storing the output files (if it doesn't exist)
data_folder = "./data_consol_homo/"
# pathlib.Path(data_folder).mkdir(exist_ok=True)
beta_params = [[4, 4], [2.5, 3.5], [3.5, 2.5]]
parser = argparse.ArgumentParser(description="Run DSIT simulation over Muthukrishna-Schaller network")
parser.add_argument('-e', '--extraversion', help='-1=negative skew, 0=approximate normal, 1=positive skew',
required=(not debug_mode), default=0)
parser.add_argument('-c', '--conformity', help='-1=negative skew, 0=approximate normal, 1=positive skew',
required=(not debug_mode), default=0)
parser.add_argument('-i', '--iterations', help='int - number of iterations', required=(not debug_mode), default=10)
parser.add_argument('-n', '--sim_num', help='int - number of simulation', required=(not debug_mode), default=-1)
#############################################################################
#### Helper functions #######################################################
#############################################################################
def shouldIChange(graph, nodeNum):
conformity = graph.node[nodeNum]['conformity']
myValue = graph.node[nodeNum]['value']
sameTally = 0
diffTally = 0
neighbors = graph.neighbors(nodeNum)
for n in neighbors:
if graph.node[n]['value'] == myValue:
sameTally = sameTally + 1
else:
diffTally = diffTally + 1
prob_of_conforming = conformity * diffTally / (sameTally + diffTally)
if (random.random() < prob_of_conforming):
return True
return False
def simulate(graph, fileName, iterations=1):
random.seed()
graphSummaryDataFileName = fileName + '.csv'
f = open(graphSummaryDataFileName, 'w')
fields = ['iteration', 'gen', 'influenceMoveCount', '0:1 Distribution']
csvwr = csv.DictWriter(f, fieldnames=fields, delimiter=',')
csvwr.writeheader()
for i in range(0, iterations):
if debug_mode:
print("Iteration:" + str(i))
g = C.deepcopy(graph)
# Randomly pick individual and give opinion to friends until 50% reached.
# Note that since we pick all friends this value can exceed 50%
for node in g.nodes():
g.add_node(node, value=0)
counter = 0
nodes_50pc = len(g.nodes()) / 2
while counter < nodes_50pc:
node = random.choice(g.nodes())
if g.node[node]["value"] == 0:
g.add_node(node, value=1)
counter = counter + 1
friends = g.neighbors(node)
for friend in friends:
if g.node[friend]["value"] == 0:
g.add_node(friend, value=1)
counter = counter + 1
if counter >= nodes_50pc:
break
data = {}
data['iteration'] = i
data['gen'] = 0
data['influenceMoveCount'] = 0
# =======================================================================
# data['meanSimilar'] = meanSimilarityCoefficient(g)
# muthClump = muthukrishnaClumpiness(g)
# data['meanClumpSize'] = N.mean(muthClump)
# data['numClumps'] = len(muthClump)
# valComm = valueCommunities(g)
# data['meanCommunitySize'] = N.mean(map(len, valComm))
# data['numCommunities'] = len(valComm)
# data['influenceMoveCount'] = 0
# =======================================================================
data['0:1 Distribution'] = zeroToOne(g)
csvwr.writerow(data)
# Save graph
save_to_jsonfile(fileName + '_iter_' + str(i) + '_gen_' + str(0) + '.json', g)
# Select random node and apply social influence rules until nNodes generations of no change
nStayedSame = 0
count = 0
numNodes = len(g.nodes())
while (nStayedSame < 2 * numNodes):
if debug_mode:
print("Count:" + str(count))
count = count + 1
randNode = random.choice(g.nodes())
# calculate if value should change and change if necessary
if (shouldIChange(g, randNode)):
newValue = (g.node[randNode]['value'] + 1) % 2
g.add_node(randNode, value=newValue)
nStayedSame = 0
else:
nStayedSame = nStayedSame + 1
# If you want to write every generation, indent this under the while loop.
# Here I'm just outputting at the beginning and end to save space
data = {}
data['iteration'] = i
data['gen'] = count
data['influenceMoveCount'] = count
# ===================================================================
# data['meanSimilar'] = meanSimilarityCoefficient(g)
# muthClump = muthukrishnaClumpiness(g)
# data['meanClumpSize'] = N.mean(muthClump)
# data['numClumps'] = len(muthClump)
# valComm = valueCommunities(g)
# data['meanCommunitySize'] = N.mean(map(len, valComm))
# data['numCommunities'] = len(valComm)
# data['influenceMoveCount'] = count
# ===================================================================
data['0:1 Distribution'] = zeroToOne(g)
csvwr.writerow(data)
# Save graph
if output_json_graphs and count % numNodes == 0:
save_to_jsonfile(fileName + '_iter_' + str(i) + '_gen_' + str(count) + '.json', g)
f.close()
if __name__ == '__main__':
args = parser.parse_args()
Gs = []
if debug_mode:
print("Create network")
G = human_social_network_iterations((30, 30), 50, False, random.beta, *beta_params[int(args.extraversion)])
if debug_mode:
print("Assign conformity values")
for node in G.nodes():
G.add_node(node, conformity=random.beta(*beta_params[int(args.conformity)]))
if debug_mode:
print("Save iterations of the graph")
if debug_mode:
print("Run DSIT")
simulate(G, data_folder + 'graph_ext_' + args.extraversion + '_conf_' + args.conformity + '_simnum_' + args.sim_num,
int(args.iterations))