-
Notifications
You must be signed in to change notification settings - Fork 29
/
testing_dnd_Q.py
121 lines (104 loc) · 3.29 KB
/
testing_dnd_Q.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
import psyneulink as pnl
import numpy as np
print(pnl.__version__)
# network params
n_input = 2
n_hidden = 5
n_output = 1
max_entries = 7
# training params
num_epochs = 3
learning_rate = .1
wts_init_scale = .1
# layers
input = pnl.TransferMechanism(
name='input',
default_variable=np.zeros(n_input)
)
hidden = pnl.TransferMechanism(
name='hidden',
default_variable=np.zeros(n_hidden),
function=pnl.Logistic()
)
output = pnl.TransferMechanism(
name='output',
default_variable=np.zeros(n_output),
function=pnl.Logistic()
)
# weights
w_ih = pnl.MappingProjection(
name='input_to_hidden',
matrix=np.random.randn(n_input, n_hidden) * wts_init_scale,
sender=input,
receiver=hidden
)
w_ho = pnl.MappingProjection(
name='hidden_to_output',
matrix=np.random.randn(n_hidden, n_output) * wts_init_scale,
sender=hidden,
receiver=output
)
# ContentAddressableMemory
ContentAddressableMemory = pnl.EpisodicMemoryMechanism(
cue_size=n_hidden, assoc_size=n_hidden,
name='ContentAddressableMemory'
)
w_hdc = pnl.MappingProjection(
name='hidden_to_cue',
matrix=np.random.randn(n_hidden, n_hidden) * wts_init_scale,
sender=hidden,
receiver=ContentAddressableMemory.input_ports[pnl.CUE_INPUT]
)
w_hda = pnl.MappingProjection(
name='hidden_to_assoc',
matrix=np.random.randn(n_hidden, n_hidden) * wts_init_scale,
sender=hidden,
receiver=ContentAddressableMemory.input_ports[pnl.ASSOC_INPUT]
)
w_dh = pnl.MappingProjection(
name='em_to_hidden',
matrix=np.random.randn(n_hidden, n_hidden) * wts_init_scale,
sender=ContentAddressableMemory,
receiver=hidden
)
comp = pnl.Composition(name='xor')
# add all nodes
all_nodes = [input, hidden, output, ContentAddressableMemory]
for node in all_nodes:
comp.add_node(node)
# input-hidden-output pathway
comp.add_projection(sender=input, projection=w_ih, receiver=hidden)
comp.add_projection(sender=hidden, projection=w_ho, receiver=output)
# conneciton, ContentAddressableMemory
comp.add_projection(sender=ContentAddressableMemory, projection=w_dh, receiver=hidden)
comp.add_projection(
sender=hidden,
projection=w_hdc,
receiver=ContentAddressableMemory.input_ports[pnl.CUE_INPUT]
)
comp.add_projection(
sender=hidden,
projection=w_hda,
receiver=ContentAddressableMemory.input_ports[pnl.ASSOC_INPUT]
)
# show graph
comp.show_graph()
# # comp.show()
# # the required inputs for ContentAddressableMemory
# print('ContentAddressableMemory input_ports: ', ContentAddressableMemory.input_ports.names)
#
# # currently, ContentAddressableMemory receive info from the following node
# print('ContentAddressableMemory receive: ')
# for ContentAddressableMemory_input in ContentAddressableMemory.input_ports.names:
# afferents = ContentAddressableMemory.input_ports[ContentAddressableMemory_input].path_afferents
# if len(afferents) == 0:
# print(f'- {ContentAddressableMemory_input}: NA')
# else:
# sending_node_name = afferents[0].sender.owner.name
# print(f'- {ContentAddressableMemory_input}: {sending_node_name}')
#
# print('ContentAddressableMemory cue input: ', ContentAddressableMemory.input_ports.names)
#
# print('hidden receive: ')
# for hidden_afferent in hidden.input_ports[0].path_afferents:
# print('- ', hidden_afferent.sender.owner.name)