Skip to content

Commit

Permalink
Merge pull request #71 from Hananel-Hazan/hananel
Browse files Browse the repository at this point in the history
 fix initialization value of self.a_pre
  • Loading branch information
Hananel-Hazan committed Jun 5, 2018
2 parents 6c1433c + f319d29 commit 90bc9c7
Show file tree
Hide file tree
Showing 2 changed files with 66 additions and 65 deletions.
28 changes: 14 additions & 14 deletions bindsnet/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,23 +11,23 @@ class TwoLayerNetwork(Network):
def __init__(self, n_inpt, n_neurons=100, dt=1.0, nu_pre=1e-4, nu_post=1e-2, wmin=0, wmax=1, norm=78.4):
'''
Inputs:
| :code:`n_input` (:code:`int`): Number of input neurons. Matches the 1D size of the input data.
| :code:`n_neurons` (:code:`int`): Number of excitatory, inhibitory neurons.
| :code:`dt` (:code:`float`): Simulation time step.
| :code:`norm` (:code:`float`): Input to excitatory layer connection weights norm.
'''
super().__init__(dt=dt)

self.n_inpt = n_inpt
self.n_neurons = n_neurons
self.dt = dt

self.add_layer(Input(n=self.n_inpt,
traces=True,
trace_tc=5e-2),
name='X')

self.add_layer(LIFNodes(n=self.n_neurons,
traces=True,
rest=-65.0,
Expand All @@ -37,7 +37,7 @@ def __init__(self, n_inpt, n_neurons=100, dt=1.0, nu_pre=1e-4, nu_post=1e-2, wmi
decay=1e-2,
trace_tc=5e-2),
name='Y')

self.add_connection(Connection(source=self.layers['X'],
target=self.layers['Y'],
w=0.3 * torch.rand(self.n_inpt, self.n_neurons),
Expand All @@ -49,7 +49,7 @@ def __init__(self, n_inpt, n_neurons=100, dt=1.0, nu_pre=1e-4, nu_post=1e-2, wmi
norm=norm),
source='X',
target='Y')


class DiehlAndCook2015(Network):
'''
Expand All @@ -60,7 +60,7 @@ def __init__(self, n_inpt, n_neurons=100, exc=22.5, inh=17.5, dt=1.0, nu_pre=1e-
X_Ae_decay=None, Ae_Ai_decay=None, Ai_Ae_decay=None):
'''
Inputs:
| :code:`n_input` (:code:`int`): Number of input neurons. Matches the 1D size of the input data.
| :code:`n_neurons` (:code:`int`): Number of excitatory, inhibitory neurons.
| :code:`exc` (:code:`float`): Strength of synapse weights from excitatory to inhibitory layer.
Expand All @@ -69,18 +69,18 @@ def __init__(self, n_inpt, n_neurons=100, exc=22.5, inh=17.5, dt=1.0, nu_pre=1e-
| :code:`norm` (:code:`float`): Input to excitatory layer connection weights norm.
'''
super().__init__(dt=dt)

self.n_inpt = n_inpt
self.n_neurons = n_neurons
self.exc = exc
self.inh = inh
self.dt = dt

self.add_layer(Input(n=self.n_inpt,
traces=True,
trace_tc=5e-2),
name='X')

self.add_layer(DiehlAndCookNodes(n=self.n_neurons,
traces=True,
rest=-65.0,
Expand All @@ -92,7 +92,7 @@ def __init__(self, n_inpt, n_neurons=100, exc=22.5, inh=17.5, dt=1.0, nu_pre=1e-
theta_plus=theta_plus,
theta_decay=theta_decay),
name='Ae')

self.add_layer(LIFNodes(n=self.n_neurons,
traces=True,
rest=-60.0,
Expand All @@ -102,7 +102,7 @@ def __init__(self, n_inpt, n_neurons=100, exc=22.5, inh=17.5, dt=1.0, nu_pre=1e-
refrac=2,
trace_tc=5e-2),
name='Ai')

self.add_connection(Connection(source=self.layers['X'],
target=self.layers['Ae'],
w=0.3 * torch.rand(self.n_inpt, self.n_neurons),
Expand All @@ -115,7 +115,7 @@ def __init__(self, n_inpt, n_neurons=100, exc=22.5, inh=17.5, dt=1.0, nu_pre=1e-
decay=X_Ae_decay),
source='X',
target='Ae')

self.add_connection(Connection(source=self.layers['Ae'],
target=self.layers['Ai'],
w=self.exc * torch.diag(torch.ones(self.n_neurons)),
Expand All @@ -124,7 +124,7 @@ def __init__(self, n_inpt, n_neurons=100, exc=22.5, inh=17.5, dt=1.0, nu_pre=1e-
decay=Ae_Ai_decay),
source='Ae',
target='Ai')

self.add_connection(Connection(source=self.layers['Ai'],
target=self.layers['Ae'],
w=-self.inh * (torch.ones(self.n_neurons, self.n_neurons) - torch.diag(torch.ones(self.n_neurons))),
Expand Down

0 comments on commit 90bc9c7

Please sign in to comment.