Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 2 additions & 7 deletions bindsnet/network/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,12 +98,6 @@ def run(self, inpts, time):
for monitor in self.monitors:
self.monitors[monitor].record()

# if self.train:
# # Normalize synapse weights.
# for synapse in self.connections:
# if type(self.connections[synapse]) == connections.STDPconnections:
# self.connections[synapse].normalize()

return spikes

def reset(self):
Expand Down Expand Up @@ -133,7 +127,8 @@ def get(self, var):

def record(self):
for var in self.state_vars:
self.recording[var] = torch.cat([self.recording[var], self.obj.__dict__[var]])
data = self.obj.__dict__[var].view(-1, 1)
self.recording[var] = torch.cat([self.recording[var], data], 1)

def reset(self):
self.recording = {var : torch.Tensor() for var in self.state_vars}
5 changes: 3 additions & 2 deletions bindsnet/network/connections.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
import torch


def ETH_STDP(conn, nu_pre=1e-4, nu_post=1e-2):
def ETH_STDP(conn, nu_pre=1e-4, nu_post=1e-2, norm=78.0):
# Post-synaptic.
conn.w += nu_post * (conn.source.x.view(conn.source.n,
1) * conn.target.s.float().view(1, conn.target.n))
# Pre-synaptic.
conn.w -= nu_pre * (conn.source.s.float().view(conn.source.n,
1) * conn.target.x.view(1, conn.target.n))

# Ensure that weights are within [0, self.wmax].
# Bound and re-normalize weights.
conn.w = torch.clamp(conn.w, 0, conn.wmax)
conn.w *= norm / conn.w.sum(0).view(1, -1)


class Connection:
Expand Down
4 changes: 3 additions & 1 deletion bindsnet/network/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ def __init__(self, n, traces=False, threshold=1.0, trace_tc=5e-2):
self.n = n # No. of neurons.
self.traces = traces # Whether to record synpatic traces.
self.threshold = threshold # Spike threshold voltage.
self.v = torch.zeros_like(torch.Tensor(n)) # Neuron voltages.
self.s = torch.zeros_like(torch.Tensor(n)) # Spike occurences.

if self.traces:
Expand All @@ -108,7 +109,8 @@ def step(self, inpts, dt):
of inputs to the layer, with size equal to self.n.
dt (float): Simulation time step.
'''
self.s = inpts >= self.threshold # Check for spiking neurons.
self.v = inpts
self.s = self.v >= self.threshold # Check for spiking neurons.

if self.traces:
# Decay and set spike traces.
Expand Down
Loading