forked from Thakar-Lab/BONITA-Python3
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathpathway_analysis_score_nodes.py
132 lines (118 loc) · 4.49 KB
/
pathway_analysis_score_nodes.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
# import necessary modules
import argparse as argparse
import operator
import networkx as nx
import pickle
from ctypes import *
from simulation import paramClass, modelClass, NPsync
from utils import genInitValueList, setupEmptyKOKI, writeModel
from GA import GAsearchModel, localSearch
# calculate importance scores
def calcImportance(individual, params, model, sss, knockoutLists, knockinLists, boolC):
importanceScores = [] # holder for impact scores
# loop over all nodes
for node in range(len(model.nodeList)):
SSEs = [] # add SSE across samples
nodeValues = [sss[j][model.nodeList[node]] for j in range(0, len(sss))]
for j in range(
0, len(sss)
): # knock each node out and in to observe differences
ss = sss[j]
initValues = list(model.initValueList[j])
knockerOuter = list(knockoutLists[j])
knockerOuter.append(node) # knock out node
boolValues1 = NPsync(
individual,
model,
params.cells,
initValues,
params,
knockerOuter,
knockinLists[j],
boolC,
)
knockerInner = list(knockinLists[j])
knockerInner.append(node) # knock in node
boolValues2 = NPsync(
individual,
model,
params.cells,
initValues,
params,
knockoutLists[j],
knockerInner,
boolC,
)
# find difference between knockout and knockin
SSE = 0
for i in range(0, len(model.nodeList)):
SSE += (boolValues1[i] - boolValues2[i]) ** 2
SSEs.append(SSE)
importanceScores.append(sum(SSEs))
return importanceScores
if __name__ == "__main__":
import time
start_time = time.time()
# read in arguments from shell scripts
parser = argparse.ArgumentParser()
parser.add_argument("graph")
parser.add_argument("iterNum")
results = parser.parse_args()
graphName = results.graph
iterNum = int(results.iterNum)
name = graphName[:-8] + "_" + results.iterNum
graph = nx.read_gpickle(graphName)
# read in C function to run simulations
updateBooler = cdll.LoadLibrary("./simulator.so")
boolC = updateBooler.syncBool
# load data
sampleList = pickle.Unpickler(open(graphName[:-8] + "_sss.pickle", "rb")).load()
# set up parameters of run, model
params = paramClass()
model = modelClass(graph, sampleList, False)
model.updateCpointers()
storeModel = [
(model.size),
list(model.nodeList),
list(model.individualParse),
list(model.andNodeList),
list(model.andNodeInvertList),
list(model.andLenList),
list(model.nodeList),
dict(model.nodeDict),
list(model.initValueList),
]
# put lack of KOs, initial values into correct format
knockoutLists, knockinLists = setupEmptyKOKI(len(sampleList))
newInitValueList = genInitValueList(sampleList, model)
model.initValueList = newInitValueList
# find rules by doing GA then local search
model1, dev, bruteOut = GAsearchModel(
model, sampleList, params, knockoutLists, knockinLists, name, boolC
) # run GA
bruteOut1, equivalents, dev2 = localSearch(
model1, bruteOut, sampleList, params, knockoutLists, knockinLists, boolC
) # run local search
# output results
storeModel3 = [
(model.size),
list(model.nodeList),
list(model.individualParse),
list(model.andNodeList),
list(model.andNodeInvertList),
list(model.andLenList),
list(model.nodeList),
dict(model.nodeDict),
list(model.initValueList),
]
outputList = [bruteOut1, dev, storeModel, storeModel3, equivalents, dev2]
pickle.dump(outputList, open(name + "_local1.pickle", "wb")) # output rules
# calculate importance scores and output
scores1 = calcImportance(
bruteOut1, params, model1, sampleList, knockoutLists, knockinLists, boolC
)
pickle.dump(scores1, open(name + "_scores1.pickle", "wb"))
# write rules
with open(name + "_rules.txt", "w") as text_file:
text_file.write(writeModel(bruteOut1, model1))
print(("--- %s seconds ---" % (time.time() - start_time)))