From 820794e14e00054c047580614f6099a992ce5e04 Mon Sep 17 00:00:00 2001 From: Jan Vogelsang Date: Wed, 3 Aug 2022 17:47:28 +0200 Subject: [PATCH 01/14] Added regression test for issue 2437 --- testsuite/regressiontests/issue-2437.py | 231 ++++++++++++++++++++++++ 1 file changed, 231 insertions(+) create mode 100644 testsuite/regressiontests/issue-2437.py diff --git a/testsuite/regressiontests/issue-2437.py b/testsuite/regressiontests/issue-2437.py new file mode 100644 index 0000000000..9c1ff45603 --- /dev/null +++ b/testsuite/regressiontests/issue-2437.py @@ -0,0 +1,231 @@ +# -*- coding: utf-8 -*- +# +# issue-2437.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +""" +This script ensures that the STDP synapses work correctly even for some edge cases. + +This is a regression test for GitHub issue 2437. +""" + +import nest +from math import exp +import numpy as np + + +class TestSTDPPlSynapse: + """ + Compare the STDP power-law synaptic plasticity model against a self-contained Python reference. + + Random pre and post spike times are generated according to a Poisson distribution; some hard-coded spike times are + added to make sure to test for edge cases such as simultaneous pre and post spike. + """ + + def __init__(self): + self.resolution = 0.1 # [ms] + self.simulation_duration = 1E2 # [ms] + self.synapse_model = "stdp_pl_synapse_hom" + self.nest_neuron_model = "iaf_psc_delta" + self.tau_pre = 20.0 + self.tau_post = 33.7 + self.init_weight = .5 + self.dendritic_delay = 1.0 + self.synapse_common_properties = { + "lambda": 0.1, + "alpha": 1.0, + "mu": 0.4, + "tau_plus": self.tau_pre, + } + self.synapse_parameters = { + "synapse_model": self.synapse_model, + "receptor_type": 0, + "delay": self.dendritic_delay, + "weight": self.init_weight + } + self.neuron_parameters = { + "tau_minus": self.tau_post, + "t_ref": 1.0 + } + + self.hardcoded_pre_times = np.array([1.5, 3.1], dtype=float) + self.hardcoded_post_times = np.array([0.2, 2.1, 3.4], dtype=float) + + def do_nest_simulation_and_compare_to_reproduced_weight(self): + pre_spikes, post_spikes, t_weight_by_nest, weight_by_nest = self.do_the_nest_simulation() + + weight_reproduced = self.reproduce_weight_drift(pre_spikes, post_spikes, self.init_weight) + np.testing.assert_allclose(weight_by_nest, weight_reproduced) + + def do_the_nest_simulation(self): + """ + This function is where calls to NEST reside. Returns the generated pre- and post spike sequences and the + resulting weight established by STDP. + """ + nest.set_verbosity('M_WARNING') + nest.ResetKernel() + nest.SetKernelStatus({'resolution': self.resolution}) + + # presynaptic_neuron = nest.Create(self.nest_neuron_model, 1, params=self.neuron_parameters) + presynaptic_neuron = nest.Create("parrot_neuron", 1) + postsynaptic_neuron = nest.Create(self.nest_neuron_model, 1, params=self.neuron_parameters) + + wr = nest.Create('weight_recorder') + nest.CopyModel(self.synapse_model, self.synapse_model + "_rec", {"weight_recorder": wr}) + + spike_senders = nest.Create("spike_generator", 2, params=({"spike_times": self.hardcoded_pre_times}, {"spike_times": self.hardcoded_post_times}) ) + pre_spike_generator = spike_senders[0] + post_spike_generator = spike_senders[1] + + # The recorder is to save the randomly generated spike trains. + spike_recorder = nest.Create("spike_recorder") + + nest.Connect(pre_spike_generator, presynaptic_neuron, syn_spec={"synapse_model": "static_synapse", "weight": 9999.}) + nest.Connect(post_spike_generator, postsynaptic_neuron, syn_spec={"synapse_model": "static_synapse", "weight": 9999.}) + nest.Connect(presynaptic_neuron + postsynaptic_neuron, spike_recorder, syn_spec={"synapse_model": "static_synapse"}) + + nest.SetDefaults(self.synapse_model + "_rec", self.synapse_common_properties) + + # The synapse of interest itself + self.synapse_parameters["synapse_model"] += "_rec" + nest.Connect(presynaptic_neuron, postsynaptic_neuron, syn_spec=self.synapse_parameters) + self.synapse_parameters["synapse_model"] = self.synapse_model + + nest.Simulate(self.simulation_duration) + + all_spikes = nest.GetStatus(spike_recorder, keys='events')[0] + pre_spikes = all_spikes['times'][all_spikes['senders'] == presynaptic_neuron.tolist()[0]] + post_spikes = all_spikes['times'][all_spikes['senders'] == postsynaptic_neuron.tolist()[0]] + + t_hist = nest.GetStatus(wr, "events")[0]["times"] + weight = nest.GetStatus(wr, "events")[0]["weights"] + + return pre_spikes, post_spikes, t_hist, weight + + def reproduce_weight_drift(self, pre_spikes, post_spikes, initial_weight): + """Independent, self-contained model of STDP with power-law""" + + def facilitate(w, Kpre): + return w + self.synapse_common_properties["lambda"] * pow(w, self.synapse_common_properties["mu"]) * Kpre + + def depress(w, Kpost): + new_weight = w - self.synapse_common_properties["alpha"] * self.synapse_common_properties["lambda"] * w * Kpost + return new_weight if new_weight > 0.0 else 0.0 + + def Kpost_at_time(t, spikes, inclusive=True): + t_curr = 0. + Kpost = 0. + for spike_idx, t_sp in enumerate(spikes): + if t < t_sp: + # integrate to t + Kpost *= exp(-(t - t_curr) / self.tau_post) + return Kpost + # integrate to t_sp + Kpost *= exp(-(t_sp - t_curr) / self.tau_post) + if inclusive: + Kpost += 1. + if t == t_sp: + return Kpost + if not inclusive: + Kpost += 1. + t_curr = t_sp + # if we get here, t > t_last_spike + # integrate to t + Kpost *= exp(-(t - t_curr) / self.tau_post) + return Kpost + + eps = 1e-6 + t = 0. + idx_next_pre_spike = 0 + idx_next_post_spike = 0 + t_last_pre_spike = -1 + t_last_post_spike = -1 + Kpre = 0. + weight = initial_weight + + w_log = [] + + post_spikes_delayed = post_spikes + self.dendritic_delay + + while t < self.simulation_duration: + if idx_next_pre_spike >= pre_spikes.size: + t_next_pre_spike = -1 + else: + t_next_pre_spike = pre_spikes[idx_next_pre_spike] + + if idx_next_post_spike >= post_spikes.size: + t_next_post_spike = -1 + else: + t_next_post_spike = post_spikes_delayed[idx_next_post_spike] + + if t_next_post_spike == -1: + a = 1 + + if t_next_post_spike >= 0 and (t_next_post_spike + eps < t_next_pre_spike or t_next_pre_spike < 0): + handle_pre_spike = False + handle_post_spike = True + idx_next_post_spike += 1 + elif t_next_pre_spike >= 0 and (t_next_post_spike > t_next_pre_spike + eps or t_next_post_spike < 0): + handle_pre_spike = True + handle_post_spike = False + idx_next_pre_spike += 1 + else: + # simultaneous spikes (both true) or no more spikes to process (both false) + handle_pre_spike = t_next_pre_spike >= 0 + handle_post_spike = t_next_post_spike >= 0 + idx_next_pre_spike += 1 + idx_next_post_spike += 1 + + # integrate to min(t_next_pre_spike, t_next_post_spike) + t_next = t + if handle_pre_spike: + t_next = max(t, t_next_pre_spike) + if handle_post_spike: + t_next = max(t, t_next_post_spike) + + if t_next == t: + # no more spikes to process + t_next = self.simulation_duration + + h = t_next - t + Kpre *= exp(-h / self.tau_pre) + t = t_next + + if handle_post_spike: + if not handle_pre_spike or abs(t_next_post_spike - t_last_post_spike) > eps: + if abs(t_next_post_spike - t_last_pre_spike) > eps: + weight = facilitate(weight, Kpre) + + if handle_pre_spike: + Kpre += 1. + if not handle_post_spike or abs(t_next_pre_spike - t_last_pre_spike) > eps: + if abs(t_next_pre_spike - t_last_post_spike) > eps: + _Kpost = Kpost_at_time(t - self.dendritic_delay, post_spikes, inclusive=False) + weight = depress(weight, _Kpost) + t_last_pre_spike = t_next_pre_spike + w_log.append(weight) + + if handle_post_spike: + t_last_post_spike = t_next_post_spike + + return w_log + + +if __name__ == "__main__": + TestSTDPPlSynapse().do_nest_simulation_and_compare_to_reproduced_weight() From f97e2bf319460fbb3c922f866f9c8d89a7cfa15c Mon Sep 17 00:00:00 2001 From: Jan Vogelsang Date: Wed, 3 Aug 2022 17:48:56 +0200 Subject: [PATCH 02/14] Added min_delay property to archiving node and adding it in condition in set_spiketime to fix regression test for issue 2437 --- nestkernel/archiving_node.cpp | 7 +++++-- nestkernel/archiving_node.h | 1 + 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/nestkernel/archiving_node.cpp b/nestkernel/archiving_node.cpp index 609c4530c8..e748a77795 100644 --- a/nestkernel/archiving_node.cpp +++ b/nestkernel/archiving_node.cpp @@ -41,6 +41,7 @@ nest::ArchivingNode::ArchivingNode() , tau_minus_inv_( 1. / tau_minus_ ) , tau_minus_triplet_( 110.0 ) , tau_minus_triplet_inv_( 1. / tau_minus_triplet_ ) + , min_delay_ ( std::numeric_limits::max() ) , max_delay_( 0 ) , trace_( 0.0 ) , last_spike_( -1.0 ) @@ -56,6 +57,7 @@ nest::ArchivingNode::ArchivingNode( const ArchivingNode& n ) , tau_minus_inv_( n.tau_minus_inv_ ) , tau_minus_triplet_( n.tau_minus_triplet_ ) , tau_minus_triplet_inv_( n.tau_minus_triplet_inv_ ) + , min_delay_( n.min_delay_ ) , max_delay_( n.max_delay_ ) , trace_( n.trace_ ) , last_spike_( n.last_spike_ ) @@ -66,7 +68,7 @@ void ArchivingNode::register_stdp_connection( double t_first_read, double delay ) { // Mark all entries in the deque, which we will not read in future as read by - // this input input, so that we savely increment the incoming number of + // this input, so that we savely increment the incoming number of // connections afterwards without leaving spikes in the history. // For details see bug #218. MH 08-04-22 @@ -79,6 +81,7 @@ ArchivingNode::register_stdp_connection( double t_first_read, double delay ) n_incoming_++; + min_delay_ = std::min( delay, min_delay_ ); max_delay_ = std::max( delay, max_delay_ ); } @@ -196,7 +199,7 @@ nest::ArchivingNode::set_spiketime( Time const& t_sp, double offset ) { const double next_t_sp = history_[ 1 ].t_; if ( history_.front().access_counter_ >= n_incoming_ - and t_sp_ms - next_t_sp > max_delay_ + kernel().connection_manager.get_stdp_eps() ) + and t_sp_ms - next_t_sp > min_delay_ + max_delay_ + kernel().connection_manager.get_stdp_eps() ) { history_.pop_front(); } diff --git a/nestkernel/archiving_node.h b/nestkernel/archiving_node.h index 5da56af8d7..dcfaee8d7d 100644 --- a/nestkernel/archiving_node.h +++ b/nestkernel/archiving_node.h @@ -166,6 +166,7 @@ class ArchivingNode : public StructuralPlasticityNode double tau_minus_triplet_; double tau_minus_triplet_inv_; + double min_delay_; double max_delay_; double trace_; From 1a7ac1332bc0f9aea57d292879d5116bd280a388 Mon Sep 17 00:00:00 2001 From: Jan Vogelsang Date: Mon, 8 Aug 2022 12:45:41 +0200 Subject: [PATCH 03/14] Fixed comment in regression test 2437 --- testsuite/regressiontests/issue-2437.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/testsuite/regressiontests/issue-2437.py b/testsuite/regressiontests/issue-2437.py index 9c1ff45603..d2545c33ce 100644 --- a/testsuite/regressiontests/issue-2437.py +++ b/testsuite/regressiontests/issue-2437.py @@ -34,8 +34,7 @@ class TestSTDPPlSynapse: """ Compare the STDP power-law synaptic plasticity model against a self-contained Python reference. - Random pre and post spike times are generated according to a Poisson distribution; some hard-coded spike times are - added to make sure to test for edge cases such as simultaneous pre and post spike. + Pre and post spike times are hard-coded to make sure to test for the edge case. """ def __init__(self): From 47c1045d5f77657cd018226119f6acdd7d737881 Mon Sep 17 00:00:00 2001 From: Jan Vogelsang Date: Tue, 4 Oct 2022 23:16:25 +0200 Subject: [PATCH 04/14] Revert "Added min_delay property to archiving node and adding it in condition in set_spiketime to fix regression test for issue 2437" This reverts commit f97e2bf319460fbb3c922f866f9c8d89a7cfa15c. --- nestkernel/archiving_node.cpp | 7 ++----- nestkernel/archiving_node.h | 1 - 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/nestkernel/archiving_node.cpp b/nestkernel/archiving_node.cpp index e748a77795..609c4530c8 100644 --- a/nestkernel/archiving_node.cpp +++ b/nestkernel/archiving_node.cpp @@ -41,7 +41,6 @@ nest::ArchivingNode::ArchivingNode() , tau_minus_inv_( 1. / tau_minus_ ) , tau_minus_triplet_( 110.0 ) , tau_minus_triplet_inv_( 1. / tau_minus_triplet_ ) - , min_delay_ ( std::numeric_limits::max() ) , max_delay_( 0 ) , trace_( 0.0 ) , last_spike_( -1.0 ) @@ -57,7 +56,6 @@ nest::ArchivingNode::ArchivingNode( const ArchivingNode& n ) , tau_minus_inv_( n.tau_minus_inv_ ) , tau_minus_triplet_( n.tau_minus_triplet_ ) , tau_minus_triplet_inv_( n.tau_minus_triplet_inv_ ) - , min_delay_( n.min_delay_ ) , max_delay_( n.max_delay_ ) , trace_( n.trace_ ) , last_spike_( n.last_spike_ ) @@ -68,7 +66,7 @@ void ArchivingNode::register_stdp_connection( double t_first_read, double delay ) { // Mark all entries in the deque, which we will not read in future as read by - // this input, so that we savely increment the incoming number of + // this input input, so that we savely increment the incoming number of // connections afterwards without leaving spikes in the history. // For details see bug #218. MH 08-04-22 @@ -81,7 +79,6 @@ ArchivingNode::register_stdp_connection( double t_first_read, double delay ) n_incoming_++; - min_delay_ = std::min( delay, min_delay_ ); max_delay_ = std::max( delay, max_delay_ ); } @@ -199,7 +196,7 @@ nest::ArchivingNode::set_spiketime( Time const& t_sp, double offset ) { const double next_t_sp = history_[ 1 ].t_; if ( history_.front().access_counter_ >= n_incoming_ - and t_sp_ms - next_t_sp > min_delay_ + max_delay_ + kernel().connection_manager.get_stdp_eps() ) + and t_sp_ms - next_t_sp > max_delay_ + kernel().connection_manager.get_stdp_eps() ) { history_.pop_front(); } diff --git a/nestkernel/archiving_node.h b/nestkernel/archiving_node.h index dcfaee8d7d..5da56af8d7 100644 --- a/nestkernel/archiving_node.h +++ b/nestkernel/archiving_node.h @@ -166,7 +166,6 @@ class ArchivingNode : public StructuralPlasticityNode double tau_minus_triplet_; double tau_minus_triplet_inv_; - double min_delay_; double max_delay_; double trace_; From 696592c2a6a20203ec8cd5dccf809f28a09e1e20 Mon Sep 17 00:00:00 2001 From: Jan Vogelsang Date: Tue, 4 Oct 2022 23:22:20 +0200 Subject: [PATCH 05/14] Added min_delay in set_spiketime --- nestkernel/archiving_node.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nestkernel/archiving_node.cpp b/nestkernel/archiving_node.cpp index 609c4530c8..177f6b6c6f 100644 --- a/nestkernel/archiving_node.cpp +++ b/nestkernel/archiving_node.cpp @@ -196,7 +196,8 @@ nest::ArchivingNode::set_spiketime( Time const& t_sp, double offset ) { const double next_t_sp = history_[ 1 ].t_; if ( history_.front().access_counter_ >= n_incoming_ - and t_sp_ms - next_t_sp > max_delay_ + kernel().connection_manager.get_stdp_eps() ) + and t_sp_ms - next_t_sp + > max_delay_ + kernel().connection_manager.get_min_delay() + kernel().connection_manager.get_stdp_eps() ) { history_.pop_front(); } From 51c4da47baa455910385ddf73928801ca598a381 Mon Sep 17 00:00:00 2001 From: Jan Vogelsang Date: Fri, 4 Nov 2022 15:50:52 +0100 Subject: [PATCH 06/14] Merged regression test and existing stdp test --- nestkernel/archiving_node.cpp | 2 +- testsuite/pytests/test_stdp_synapse.py | 116 ++++++------ testsuite/regressiontests/issue-2437.py | 230 ------------------------ 3 files changed, 60 insertions(+), 288 deletions(-) delete mode 100644 testsuite/regressiontests/issue-2437.py diff --git a/nestkernel/archiving_node.cpp b/nestkernel/archiving_node.cpp index 177f6b6c6f..14b58eb44e 100644 --- a/nestkernel/archiving_node.cpp +++ b/nestkernel/archiving_node.cpp @@ -191,7 +191,7 @@ nest::ArchivingNode::set_spiketime( Time const& t_sp, double offset ) // - its access counter indicates it has been read out by all connected // STDP synapses, and // - there is another, later spike, that is strictly more than - // (max_delay_ + eps) away from the new spike (at t_sp_ms) + // (min_global_delay + max_local_delay + eps) away from the new spike (at t_sp_ms) while ( history_.size() > 1 ) { const double next_t_sp = history_[ 1 ].t_; diff --git a/testsuite/pytests/test_stdp_synapse.py b/testsuite/pytests/test_stdp_synapse.py index ac26431027..b7250d4f75 100644 --- a/testsuite/pytests/test_stdp_synapse.py +++ b/testsuite/pytests/test_stdp_synapse.py @@ -65,6 +65,7 @@ def init_params(self): } self.neuron_parameters = { "tau_minus": self.tau_post, + "t_ref": 1.0 } # While the random sequences, fairly long, would supposedly @@ -75,34 +76,27 @@ def init_params(self): # append some hardcoded spike sequences: # pre: 1 5 6 7 9 11 12 13 # post: 2 3 4 8 9 10 12 - self.hardcoded_pre_times = np.array([1, 5, 6, 7, 9, 11, 12, 13], dtype=float) - self.hardcoded_post_times = np.array([2, 3, 4, 8, 9, 10, 12], dtype=float) + self.hardcoded_pre_times = np.array([1, 5, 6, 7, 9, 11, 12, 13, 14.5, 16.1], dtype=float) + self.hardcoded_post_times = np.array([2, 3, 4, 8, 9, 10, 12, 13.2, 15.1, 16.4], dtype=float) self.hardcoded_trains_length = 2. + max(np.amax(self.hardcoded_pre_times), np.amax(self.hardcoded_post_times)) def do_nest_simulation_and_compare_to_reproduced_weight(self, fname_snip): pre_spikes, post_spikes, t_weight_by_nest, weight_by_nest = self.do_the_nest_simulation() if DEBUG_PLOTS: self.plot_weight_evolution(pre_spikes, post_spikes, - t_weight_by_nest, - weight_by_nest, + t_weight_by_nest, weight_by_nest, fname_snip=fname_snip, title_snip=self.nest_neuron_model + " (NEST)") t_weight_reproduced_independently, weight_reproduced_independently = self.reproduce_weight_drift( - pre_spikes, post_spikes, - self.init_weight, - fname_snip=fname_snip) + pre_spikes, post_spikes, self.init_weight, fname_snip=fname_snip) # ``weight_by_nest`` containts only weight values at pre spike times, ``weight_reproduced_independently`` - # contains the weight at pre *and* post times: check that weights are equal only for pre spike times + # contains the weight at pre *and* post times: check that weights are equal for pre spike times assert len(weight_by_nest) > 0 - for idx_pre_spike_nest, t_pre_spike_nest in enumerate(t_weight_by_nest): - idx_pre_spike_reproduced_independently = \ - np.argmin((t_pre_spike_nest - t_weight_reproduced_independently)**2) - np.testing.assert_allclose(t_pre_spike_nest, - t_weight_reproduced_independently[idx_pre_spike_reproduced_independently]) - np.testing.assert_allclose(weight_by_nest[idx_pre_spike_nest], - weight_reproduced_independently[idx_pre_spike_reproduced_independently]) + np.testing.assert_allclose(t_weight_by_nest, t_weight_reproduced_independently) + np.testing.assert_allclose(weight_by_nest, weight_reproduced_independently) + def do_the_nest_simulation(self): """ @@ -135,9 +129,9 @@ def do_the_nest_simulation(self): "spike_generator", 2, params=({"spike_times": self.hardcoded_pre_times - + self.simulation_duration - self.hardcoded_trains_length}, + + self.simulation_duration - self.hardcoded_trains_length}, {"spike_times": self.hardcoded_post_times - + self.simulation_duration - self.hardcoded_trains_length}) + + self.simulation_duration - self.hardcoded_trains_length}) ) pre_spike_generator = spike_senders[0] post_spike_generator = spike_senders[1] @@ -171,8 +165,8 @@ def reproduce_weight_drift(self, pre_spikes, post_spikes, initial_weight, fname_ """Independent, self-contained model of STDP""" def facilitate(w, Kpre, Wmax_=1.): norm_w = (w / self.synapse_parameters["Wmax"]) + ( - self.synapse_parameters["lambda"] * pow( - 1 - (w / self.synapse_parameters["Wmax"]), self.synapse_parameters["mu_plus"]) * Kpre) + self.synapse_parameters["lambda"] * pow( + 1 - (w / self.synapse_parameters["Wmax"]), self.synapse_parameters["mu_plus"]) * Kpre) if norm_w < 1.0: return norm_w * self.synapse_parameters["Wmax"] else: @@ -180,8 +174,8 @@ def facilitate(w, Kpre, Wmax_=1.): def depress(w, Kpost): norm_w = (w / self.synapse_parameters["Wmax"]) - ( - self.synapse_parameters["alpha"] * self.synapse_parameters["lambda"] * pow( - w / self.synapse_parameters["Wmax"], self.synapse_parameters["mu_minus"]) * Kpost) + self.synapse_parameters["alpha"] * self.synapse_parameters["lambda"] * pow( + w / self.synapse_parameters["Wmax"], self.synapse_parameters["mu_minus"]) * Kpost) if norm_w > 0.0: return norm_w * self.synapse_parameters["Wmax"] else: @@ -209,42 +203,50 @@ def Kpost_at_time(t, spikes, inclusive=True): Kpost *= exp(-(t - t_curr) / self.tau_post) return Kpost + eps = 1e-6 t = 0. + idx_next_pre_spike = 0 + idx_next_post_spike = 0 + t_last_pre_spike = -1 + t_last_post_spike = -1 Kpre = 0. weight = initial_weight - t_log = [] - w_log = [] - Kpre_log = [] - - # logging - t_log.append(t) - w_log.append(weight) - Kpre_log.append(Kpre) + t_log = list() + w_log = dict() + Kpre_log = list() + pre_spike_times = list() post_spikes_delayed = post_spikes + self.dendritic_delay while t < self.simulation_duration: - idx_next_pre_spike = -1 - if np.where((pre_spikes - t) > 0)[0].size > 0: - idx_next_pre_spike = np.where((pre_spikes - t) > 0)[0][0] + if idx_next_pre_spike >= pre_spikes.size: + t_next_pre_spike = -1 + else: t_next_pre_spike = pre_spikes[idx_next_pre_spike] - idx_next_post_spike = -1 - if np.where((post_spikes_delayed - t) > 0)[0].size > 0: - idx_next_post_spike = np.where((post_spikes_delayed - t) > 0)[0][0] + if idx_next_post_spike >= post_spikes.size: + t_next_post_spike = -1 + else: t_next_post_spike = post_spikes_delayed[idx_next_post_spike] - if idx_next_pre_spike >= 0 and idx_next_post_spike >= 0 and t_next_post_spike < t_next_pre_spike: - handle_post_spike = True + if t_next_post_spike == -1: + a = 1 + + if t_next_post_spike >= 0 and (t_next_post_spike + eps < t_next_pre_spike or t_next_pre_spike < 0): handle_pre_spike = False - elif idx_next_pre_spike >= 0 and idx_next_post_spike >= 0 and t_next_post_spike > t_next_pre_spike: - handle_post_spike = False + handle_post_spike = True + idx_next_post_spike += 1 + elif t_next_pre_spike >= 0 and (t_next_post_spike > t_next_pre_spike + eps or t_next_post_spike < 0): handle_pre_spike = True + handle_post_spike = False + idx_next_pre_spike += 1 else: # simultaneous spikes (both true) or no more spikes to process (both false) - handle_post_spike = idx_next_post_spike >= 0 - handle_pre_spike = idx_next_pre_spike >= 0 + handle_pre_spike = t_next_pre_spike >= 0 + handle_post_spike = t_next_post_spike >= 0 + idx_next_pre_spike += 1 + idx_next_post_spike += 1 # integrate to min(t_next_pre_spike, t_next_post_spike) t_next = t @@ -257,37 +259,37 @@ def Kpost_at_time(t, spikes, inclusive=True): # no more spikes to process t_next = self.simulation_duration - '''# max timestep - t_next_ = min(t_next, t + 1E-3) - if t_next != t_next_: - t_next = t_next_ - handle_pre_spike = False - handle_post_spike = False''' - h = t_next - t Kpre *= exp(-h / self.tau_pre) t = t_next if handle_post_spike: - # Kpost += 1. <-- not necessary, will call Kpost_at_time(t) later to compute Kpost for any value t - weight = facilitate(weight, Kpre) + if not handle_pre_spike or abs(t_next_post_spike - t_last_post_spike) > eps: + if abs(t_next_post_spike - t_last_pre_spike) > eps: + weight = facilitate(weight, Kpre) if handle_pre_spike: Kpre += 1. - _Kpost = Kpost_at_time(t - self.dendritic_delay, post_spikes, inclusive=False) - weight = depress(weight, _Kpost) + if not handle_post_spike or abs(t_next_pre_spike - t_last_pre_spike) > eps: + if abs(t_next_pre_spike - t_last_post_spike) > eps: + _Kpost = Kpost_at_time(t - self.dendritic_delay, post_spikes, inclusive=False) + weight = depress(weight, _Kpost) + t_last_pre_spike = t_next_pre_spike + pre_spike_times.append(t) + + if handle_post_spike: + t_last_post_spike = t_next_post_spike - # logging - t_log.append(t) - w_log.append(weight) Kpre_log.append(Kpre) + w_log[t] = weight + t_log.append(t) Kpost_log = [Kpost_at_time(t - self.dendritic_delay, post_spikes) for t in t_log] if DEBUG_PLOTS: - self.plot_weight_evolution(pre_spikes, post_spikes, t_log, w_log, Kpre_log, Kpost_log, + self.plot_weight_evolution(pre_spikes, post_spikes, t_log, w_log.values(), Kpre_log, Kpost_log, fname_snip=fname_snip + "_ref", title_snip="Reference") - return t_log, w_log + return pre_spike_times, [w for t, w in w_log.items() if t in pre_spike_times] def plot_weight_evolution(self, pre_spikes, post_spikes, t_log, w_log, Kpre_log=None, Kpost_log=None, fname_snip="", title_snip=""): diff --git a/testsuite/regressiontests/issue-2437.py b/testsuite/regressiontests/issue-2437.py deleted file mode 100644 index d2545c33ce..0000000000 --- a/testsuite/regressiontests/issue-2437.py +++ /dev/null @@ -1,230 +0,0 @@ -# -*- coding: utf-8 -*- -# -# issue-2437.py -# -# This file is part of NEST. -# -# Copyright (C) 2004 The NEST Initiative -# -# NEST is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 2 of the License, or -# (at your option) any later version. -# -# NEST is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with NEST. If not, see . - -""" -This script ensures that the STDP synapses work correctly even for some edge cases. - -This is a regression test for GitHub issue 2437. -""" - -import nest -from math import exp -import numpy as np - - -class TestSTDPPlSynapse: - """ - Compare the STDP power-law synaptic plasticity model against a self-contained Python reference. - - Pre and post spike times are hard-coded to make sure to test for the edge case. - """ - - def __init__(self): - self.resolution = 0.1 # [ms] - self.simulation_duration = 1E2 # [ms] - self.synapse_model = "stdp_pl_synapse_hom" - self.nest_neuron_model = "iaf_psc_delta" - self.tau_pre = 20.0 - self.tau_post = 33.7 - self.init_weight = .5 - self.dendritic_delay = 1.0 - self.synapse_common_properties = { - "lambda": 0.1, - "alpha": 1.0, - "mu": 0.4, - "tau_plus": self.tau_pre, - } - self.synapse_parameters = { - "synapse_model": self.synapse_model, - "receptor_type": 0, - "delay": self.dendritic_delay, - "weight": self.init_weight - } - self.neuron_parameters = { - "tau_minus": self.tau_post, - "t_ref": 1.0 - } - - self.hardcoded_pre_times = np.array([1.5, 3.1], dtype=float) - self.hardcoded_post_times = np.array([0.2, 2.1, 3.4], dtype=float) - - def do_nest_simulation_and_compare_to_reproduced_weight(self): - pre_spikes, post_spikes, t_weight_by_nest, weight_by_nest = self.do_the_nest_simulation() - - weight_reproduced = self.reproduce_weight_drift(pre_spikes, post_spikes, self.init_weight) - np.testing.assert_allclose(weight_by_nest, weight_reproduced) - - def do_the_nest_simulation(self): - """ - This function is where calls to NEST reside. Returns the generated pre- and post spike sequences and the - resulting weight established by STDP. - """ - nest.set_verbosity('M_WARNING') - nest.ResetKernel() - nest.SetKernelStatus({'resolution': self.resolution}) - - # presynaptic_neuron = nest.Create(self.nest_neuron_model, 1, params=self.neuron_parameters) - presynaptic_neuron = nest.Create("parrot_neuron", 1) - postsynaptic_neuron = nest.Create(self.nest_neuron_model, 1, params=self.neuron_parameters) - - wr = nest.Create('weight_recorder') - nest.CopyModel(self.synapse_model, self.synapse_model + "_rec", {"weight_recorder": wr}) - - spike_senders = nest.Create("spike_generator", 2, params=({"spike_times": self.hardcoded_pre_times}, {"spike_times": self.hardcoded_post_times}) ) - pre_spike_generator = spike_senders[0] - post_spike_generator = spike_senders[1] - - # The recorder is to save the randomly generated spike trains. - spike_recorder = nest.Create("spike_recorder") - - nest.Connect(pre_spike_generator, presynaptic_neuron, syn_spec={"synapse_model": "static_synapse", "weight": 9999.}) - nest.Connect(post_spike_generator, postsynaptic_neuron, syn_spec={"synapse_model": "static_synapse", "weight": 9999.}) - nest.Connect(presynaptic_neuron + postsynaptic_neuron, spike_recorder, syn_spec={"synapse_model": "static_synapse"}) - - nest.SetDefaults(self.synapse_model + "_rec", self.synapse_common_properties) - - # The synapse of interest itself - self.synapse_parameters["synapse_model"] += "_rec" - nest.Connect(presynaptic_neuron, postsynaptic_neuron, syn_spec=self.synapse_parameters) - self.synapse_parameters["synapse_model"] = self.synapse_model - - nest.Simulate(self.simulation_duration) - - all_spikes = nest.GetStatus(spike_recorder, keys='events')[0] - pre_spikes = all_spikes['times'][all_spikes['senders'] == presynaptic_neuron.tolist()[0]] - post_spikes = all_spikes['times'][all_spikes['senders'] == postsynaptic_neuron.tolist()[0]] - - t_hist = nest.GetStatus(wr, "events")[0]["times"] - weight = nest.GetStatus(wr, "events")[0]["weights"] - - return pre_spikes, post_spikes, t_hist, weight - - def reproduce_weight_drift(self, pre_spikes, post_spikes, initial_weight): - """Independent, self-contained model of STDP with power-law""" - - def facilitate(w, Kpre): - return w + self.synapse_common_properties["lambda"] * pow(w, self.synapse_common_properties["mu"]) * Kpre - - def depress(w, Kpost): - new_weight = w - self.synapse_common_properties["alpha"] * self.synapse_common_properties["lambda"] * w * Kpost - return new_weight if new_weight > 0.0 else 0.0 - - def Kpost_at_time(t, spikes, inclusive=True): - t_curr = 0. - Kpost = 0. - for spike_idx, t_sp in enumerate(spikes): - if t < t_sp: - # integrate to t - Kpost *= exp(-(t - t_curr) / self.tau_post) - return Kpost - # integrate to t_sp - Kpost *= exp(-(t_sp - t_curr) / self.tau_post) - if inclusive: - Kpost += 1. - if t == t_sp: - return Kpost - if not inclusive: - Kpost += 1. - t_curr = t_sp - # if we get here, t > t_last_spike - # integrate to t - Kpost *= exp(-(t - t_curr) / self.tau_post) - return Kpost - - eps = 1e-6 - t = 0. - idx_next_pre_spike = 0 - idx_next_post_spike = 0 - t_last_pre_spike = -1 - t_last_post_spike = -1 - Kpre = 0. - weight = initial_weight - - w_log = [] - - post_spikes_delayed = post_spikes + self.dendritic_delay - - while t < self.simulation_duration: - if idx_next_pre_spike >= pre_spikes.size: - t_next_pre_spike = -1 - else: - t_next_pre_spike = pre_spikes[idx_next_pre_spike] - - if idx_next_post_spike >= post_spikes.size: - t_next_post_spike = -1 - else: - t_next_post_spike = post_spikes_delayed[idx_next_post_spike] - - if t_next_post_spike == -1: - a = 1 - - if t_next_post_spike >= 0 and (t_next_post_spike + eps < t_next_pre_spike or t_next_pre_spike < 0): - handle_pre_spike = False - handle_post_spike = True - idx_next_post_spike += 1 - elif t_next_pre_spike >= 0 and (t_next_post_spike > t_next_pre_spike + eps or t_next_post_spike < 0): - handle_pre_spike = True - handle_post_spike = False - idx_next_pre_spike += 1 - else: - # simultaneous spikes (both true) or no more spikes to process (both false) - handle_pre_spike = t_next_pre_spike >= 0 - handle_post_spike = t_next_post_spike >= 0 - idx_next_pre_spike += 1 - idx_next_post_spike += 1 - - # integrate to min(t_next_pre_spike, t_next_post_spike) - t_next = t - if handle_pre_spike: - t_next = max(t, t_next_pre_spike) - if handle_post_spike: - t_next = max(t, t_next_post_spike) - - if t_next == t: - # no more spikes to process - t_next = self.simulation_duration - - h = t_next - t - Kpre *= exp(-h / self.tau_pre) - t = t_next - - if handle_post_spike: - if not handle_pre_spike or abs(t_next_post_spike - t_last_post_spike) > eps: - if abs(t_next_post_spike - t_last_pre_spike) > eps: - weight = facilitate(weight, Kpre) - - if handle_pre_spike: - Kpre += 1. - if not handle_post_spike or abs(t_next_pre_spike - t_last_pre_spike) > eps: - if abs(t_next_pre_spike - t_last_post_spike) > eps: - _Kpost = Kpost_at_time(t - self.dendritic_delay, post_spikes, inclusive=False) - weight = depress(weight, _Kpost) - t_last_pre_spike = t_next_pre_spike - w_log.append(weight) - - if handle_post_spike: - t_last_post_spike = t_next_post_spike - - return w_log - - -if __name__ == "__main__": - TestSTDPPlSynapse().do_nest_simulation_and_compare_to_reproduced_weight() From dd239c5e4ce2d75a90d743a56b814a1ae33f34a3 Mon Sep 17 00:00:00 2001 From: Jan Vogelsang Date: Mon, 14 Nov 2022 12:38:02 +0100 Subject: [PATCH 07/14] Fixed formatting for python test --- testsuite/pytests/test_stdp_synapse.py | 111 ++++++++++++++++--------- 1 file changed, 73 insertions(+), 38 deletions(-) diff --git a/testsuite/pytests/test_stdp_synapse.py b/testsuite/pytests/test_stdp_synapse.py index b7250d4f75..438a9a75a1 100644 --- a/testsuite/pytests/test_stdp_synapse.py +++ b/testsuite/pytests/test_stdp_synapse.py @@ -27,6 +27,7 @@ try: import matplotlib as mpl import matplotlib.pyplot as plt + DEBUG_PLOTS = True except Exception: DEBUG_PLOTS = False @@ -42,11 +43,11 @@ class TestSTDPSynapse: """ def init_params(self): - self.resolution = 0.1 # [ms] - self.simulation_duration = 1E3 # [ms] + self.resolution = 0.1 # [ms] + self.simulation_duration = 1E3 # [ms] self.synapse_model = "stdp_synapse" - self.presynaptic_firing_rate = 20. # [ms^-1] - self.postsynaptic_firing_rate = 20. # [ms^-1] + self.presynaptic_firing_rate = 20. # [ms^-1] + self.postsynaptic_firing_rate = 20. # [ms^-1] self.tau_pre = 16.8 self.tau_post = 33.7 self.init_weight = .5 @@ -76,9 +77,13 @@ def init_params(self): # append some hardcoded spike sequences: # pre: 1 5 6 7 9 11 12 13 # post: 2 3 4 8 9 10 12 - self.hardcoded_pre_times = np.array([1, 5, 6, 7, 9, 11, 12, 13, 14.5, 16.1], dtype=float) - self.hardcoded_post_times = np.array([2, 3, 4, 8, 9, 10, 12, 13.2, 15.1, 16.4], dtype=float) - self.hardcoded_trains_length = 2. + max(np.amax(self.hardcoded_pre_times), np.amax(self.hardcoded_post_times)) + self.hardcoded_pre_times = np.array( + [1, 5, 6, 7, 9, 11, 12, 13, 14.5, 16.1], dtype=float) + self.hardcoded_post_times = np.array( + [2, 3, 4, 8, 9, 10, 12, 13.2, 15.1, 16.4], dtype=float) + self.hardcoded_trains_length = 2. + max( + np.amax(self.hardcoded_pre_times), + np.amax(self.hardcoded_post_times)) def do_nest_simulation_and_compare_to_reproduced_weight(self, fname_snip): pre_spikes, post_spikes, t_weight_by_nest, weight_by_nest = self.do_the_nest_simulation() @@ -94,9 +99,10 @@ def do_nest_simulation_and_compare_to_reproduced_weight(self, fname_snip): # ``weight_by_nest`` containts only weight values at pre spike times, ``weight_reproduced_independently`` # contains the weight at pre *and* post times: check that weights are equal for pre spike times assert len(weight_by_nest) > 0 - np.testing.assert_allclose(t_weight_by_nest, t_weight_reproduced_independently) - np.testing.assert_allclose(weight_by_nest, weight_reproduced_independently) - + np.testing.assert_allclose(t_weight_by_nest, + t_weight_reproduced_independently) + np.testing.assert_allclose(weight_by_nest, + weight_reproduced_independently) def do_the_nest_simulation(self): """ @@ -116,14 +122,17 @@ def do_the_nest_simulation(self): "poisson_generator", 2, params=({"rate": self.presynaptic_firing_rate, - "stop": (self.simulation_duration - self.hardcoded_trains_length)}, + "stop": ( + self.simulation_duration - self.hardcoded_trains_length)}, {"rate": self.postsynaptic_firing_rate, - "stop": (self.simulation_duration - self.hardcoded_trains_length)})) + "stop": ( + self.simulation_duration - self.hardcoded_trains_length)})) presynaptic_generator = generators[0] postsynaptic_generator = generators[1] wr = nest.Create('weight_recorder') - nest.CopyModel(self.synapse_model, self.synapse_model + "_rec", {"weight_recorder": wr}) + nest.CopyModel(self.synapse_model, self.synapse_model + "_rec", + {"weight_recorder": wr}) spike_senders = nest.Create( "spike_generator", @@ -139,34 +148,44 @@ def do_the_nest_simulation(self): # The recorder is to save the randomly generated spike trains. spike_recorder = nest.Create("spike_recorder") - nest.Connect(presynaptic_generator + pre_spike_generator, presynaptic_neuron, - syn_spec={"synapse_model": "static_synapse", "weight": 9999.}) - nest.Connect(postsynaptic_generator + post_spike_generator, postsynaptic_neuron, - syn_spec={"synapse_model": "static_synapse", "weight": 9999.}) + nest.Connect(presynaptic_generator + pre_spike_generator, + presynaptic_neuron, + syn_spec={"synapse_model": "static_synapse", + "weight": 9999.}) + nest.Connect(postsynaptic_generator + post_spike_generator, + postsynaptic_neuron, + syn_spec={"synapse_model": "static_synapse", + "weight": 9999.}) nest.Connect(presynaptic_neuron + postsynaptic_neuron, spike_recorder, syn_spec={"synapse_model": "static_synapse"}) # The synapse of interest itself self.synapse_parameters["synapse_model"] += "_rec" - nest.Connect(presynaptic_neuron, postsynaptic_neuron, syn_spec=self.synapse_parameters) + nest.Connect(presynaptic_neuron, postsynaptic_neuron, + syn_spec=self.synapse_parameters) self.synapse_parameters["synapse_model"] = self.synapse_model nest.Simulate(self.simulation_duration) all_spikes = nest.GetStatus(spike_recorder, keys='events')[0] - pre_spikes = all_spikes['times'][all_spikes['senders'] == presynaptic_neuron.tolist()[0]] - post_spikes = all_spikes['times'][all_spikes['senders'] == postsynaptic_neuron.tolist()[0]] + pre_spikes = all_spikes['times'][ + all_spikes['senders'] == presynaptic_neuron.tolist()[0]] + post_spikes = all_spikes['times'][ + all_spikes['senders'] == postsynaptic_neuron.tolist()[0]] t_hist = nest.GetStatus(wr, "events")[0]["times"] weight = nest.GetStatus(wr, "events")[0]["weights"] return pre_spikes, post_spikes, t_hist, weight - def reproduce_weight_drift(self, pre_spikes, post_spikes, initial_weight, fname_snip=""): + def reproduce_weight_drift(self, pre_spikes, post_spikes, initial_weight, + fname_snip=""): """Independent, self-contained model of STDP""" + def facilitate(w, Kpre, Wmax_=1.): norm_w = (w / self.synapse_parameters["Wmax"]) + ( self.synapse_parameters["lambda"] * pow( - 1 - (w / self.synapse_parameters["Wmax"]), self.synapse_parameters["mu_plus"]) * Kpre) + 1 - (w / self.synapse_parameters["Wmax"]), + self.synapse_parameters["mu_plus"]) * Kpre) if norm_w < 1.0: return norm_w * self.synapse_parameters["Wmax"] else: @@ -174,8 +193,10 @@ def facilitate(w, Kpre, Wmax_=1.): def depress(w, Kpost): norm_w = (w / self.synapse_parameters["Wmax"]) - ( - self.synapse_parameters["alpha"] * self.synapse_parameters["lambda"] * pow( - w / self.synapse_parameters["Wmax"], self.synapse_parameters["mu_minus"]) * Kpost) + self.synapse_parameters["alpha"] * self.synapse_parameters[ + "lambda"] * pow( + w / self.synapse_parameters["Wmax"], + self.synapse_parameters["mu_minus"]) * Kpost) if norm_w > 0.0: return norm_w * self.synapse_parameters["Wmax"] else: @@ -233,11 +254,13 @@ def Kpost_at_time(t, spikes, inclusive=True): if t_next_post_spike == -1: a = 1 - if t_next_post_spike >= 0 and (t_next_post_spike + eps < t_next_pre_spike or t_next_pre_spike < 0): + if t_next_post_spike >= 0 and ( + t_next_post_spike + eps < t_next_pre_spike or t_next_pre_spike < 0): handle_pre_spike = False handle_post_spike = True idx_next_post_spike += 1 - elif t_next_pre_spike >= 0 and (t_next_post_spike > t_next_pre_spike + eps or t_next_post_spike < 0): + elif t_next_pre_spike >= 0 and ( + t_next_post_spike > t_next_pre_spike + eps or t_next_post_spike < 0): handle_pre_spike = True handle_post_spike = False idx_next_pre_spike += 1 @@ -264,15 +287,18 @@ def Kpost_at_time(t, spikes, inclusive=True): t = t_next if handle_post_spike: - if not handle_pre_spike or abs(t_next_post_spike - t_last_post_spike) > eps: + if not handle_pre_spike or abs( + t_next_post_spike - t_last_post_spike) > eps: if abs(t_next_post_spike - t_last_pre_spike) > eps: weight = facilitate(weight, Kpre) if handle_pre_spike: Kpre += 1. - if not handle_post_spike or abs(t_next_pre_spike - t_last_pre_spike) > eps: + if not handle_post_spike or abs( + t_next_pre_spike - t_last_pre_spike) > eps: if abs(t_next_pre_spike - t_last_post_spike) > eps: - _Kpost = Kpost_at_time(t - self.dendritic_delay, post_spikes, inclusive=False) + _Kpost = Kpost_at_time(t - self.dendritic_delay, + post_spikes, inclusive=False) weight = depress(weight, _Kpost) t_last_pre_spike = t_next_pre_spike pre_spike_times.append(t) @@ -284,20 +310,26 @@ def Kpost_at_time(t, spikes, inclusive=True): w_log[t] = weight t_log.append(t) - Kpost_log = [Kpost_at_time(t - self.dendritic_delay, post_spikes) for t in t_log] + Kpost_log = [Kpost_at_time(t - self.dendritic_delay, post_spikes) for t + in t_log] if DEBUG_PLOTS: - self.plot_weight_evolution(pre_spikes, post_spikes, t_log, w_log.values(), Kpre_log, Kpost_log, - fname_snip=fname_snip + "_ref", title_snip="Reference") + self.plot_weight_evolution(pre_spikes, post_spikes, t_log, + w_log.values(), Kpre_log, Kpost_log, + fname_snip=fname_snip + "_ref", + title_snip="Reference") - return pre_spike_times, [w for t, w in w_log.items() if t in pre_spike_times] + return pre_spike_times, [w for t, w in w_log.items() if + t in pre_spike_times] - def plot_weight_evolution(self, pre_spikes, post_spikes, t_log, w_log, Kpre_log=None, Kpost_log=None, + def plot_weight_evolution(self, pre_spikes, post_spikes, t_log, w_log, + Kpre_log=None, Kpost_log=None, fname_snip="", title_snip=""): fig, ax = plt.subplots(nrows=3) n_spikes = len(pre_spikes) for i in range(n_spikes): - ax[0].plot(2 * [pre_spikes[i]], [0, 1], linewidth=2, color="blue", alpha=.4) + ax[0].plot(2 * [pre_spikes[i]], [0, 1], linewidth=2, color="blue", + alpha=.4) ax[0].set_ylabel("Pre spikes") ax0_ = ax[0].twinx() if Kpre_log: @@ -305,7 +337,8 @@ def plot_weight_evolution(self, pre_spikes, post_spikes, t_log, w_log, Kpre_log= n_spikes = len(post_spikes) for i in range(n_spikes): - ax[1].plot(2 * [post_spikes[i]], [0, 1], linewidth=2, color="red", alpha=.4) + ax[1].plot(2 * [post_spikes[i]], [0, 1], linewidth=2, color="red", + alpha=.4) ax1_ = ax[1].twinx() ax[1].set_ylabel("Post spikes") if Kpost_log: @@ -322,7 +355,8 @@ def plot_weight_evolution(self, pre_spikes, post_spikes, t_log, w_log, Kpre_log= _ax.set_xlim(0., self.simulation_duration) fig.suptitle(title_snip) - fig.savefig("/tmp/nest_stdp_synapse_test" + fname_snip + ".png", dpi=300) + fig.savefig("/tmp/nest_stdp_synapse_test" + fname_snip + ".png", + dpi=300) plt.close(fig) def test_stdp_synapse(self): @@ -333,4 +367,5 @@ def test_stdp_synapse(self): for self.nest_neuron_model in ["iaf_psc_exp", "iaf_cond_exp"]: fname_snip = "_[nest_neuron_mdl=" + self.nest_neuron_model + "]" fname_snip += "_[dend_delay=" + str(self.dendritic_delay) + "]" - self.do_nest_simulation_and_compare_to_reproduced_weight(fname_snip=fname_snip) + self.do_nest_simulation_and_compare_to_reproduced_weight( + fname_snip=fname_snip) From 31f2209d1656e195038bca14c1bbf78ef68f84d1 Mon Sep 17 00:00:00 2001 From: Jan Vogelsang Date: Mon, 14 Nov 2022 12:52:42 +0100 Subject: [PATCH 08/14] Fixed formatting for python test --- testsuite/pytests/test_stdp_synapse.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/testsuite/pytests/test_stdp_synapse.py b/testsuite/pytests/test_stdp_synapse.py index 438a9a75a1..f9725f9e1d 100644 --- a/testsuite/pytests/test_stdp_synapse.py +++ b/testsuite/pytests/test_stdp_synapse.py @@ -123,10 +123,10 @@ def do_the_nest_simulation(self): 2, params=({"rate": self.presynaptic_firing_rate, "stop": ( - self.simulation_duration - self.hardcoded_trains_length)}, + self.simulation_duration - self.hardcoded_trains_length)}, {"rate": self.postsynaptic_firing_rate, "stop": ( - self.simulation_duration - self.hardcoded_trains_length)})) + self.simulation_duration - self.hardcoded_trains_length)})) presynaptic_generator = generators[0] postsynaptic_generator = generators[1] @@ -182,21 +182,21 @@ def reproduce_weight_drift(self, pre_spikes, post_spikes, initial_weight, """Independent, self-contained model of STDP""" def facilitate(w, Kpre, Wmax_=1.): - norm_w = (w / self.synapse_parameters["Wmax"]) + ( - self.synapse_parameters["lambda"] * pow( - 1 - (w / self.synapse_parameters["Wmax"]), - self.synapse_parameters["mu_plus"]) * Kpre) + norm_w = (w / self.synapse_parameters["Wmax"]) + \ + (self.synapse_parameters["lambda"] * + pow(1 - (w / self.synapse_parameters["Wmax"]), + self.synapse_parameters["mu_plus"]) * Kpre) if norm_w < 1.0: return norm_w * self.synapse_parameters["Wmax"] else: return self.synapse_parameters["Wmax"] def depress(w, Kpost): - norm_w = (w / self.synapse_parameters["Wmax"]) - ( - self.synapse_parameters["alpha"] * self.synapse_parameters[ - "lambda"] * pow( - w / self.synapse_parameters["Wmax"], - self.synapse_parameters["mu_minus"]) * Kpost) + norm_w = (w / self.synapse_parameters["Wmax"]) - \ + (self.synapse_parameters["alpha"] * + self.synapse_parameters["lambda"] * + pow(w / self.synapse_parameters["Wmax"], + self.synapse_parameters["mu_minus"]) * Kpost) if norm_w > 0.0: return norm_w * self.synapse_parameters["Wmax"] else: From 3e65abcafa1c2decdc404e660987a0159110f6f4 Mon Sep 17 00:00:00 2001 From: Jan Vogelsang Date: Mon, 14 Nov 2022 12:58:17 +0100 Subject: [PATCH 09/14] Fixed formatting for python test --- testsuite/pytests/test_stdp_synapse.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/testsuite/pytests/test_stdp_synapse.py b/testsuite/pytests/test_stdp_synapse.py index f9725f9e1d..b5eb9b4b41 100644 --- a/testsuite/pytests/test_stdp_synapse.py +++ b/testsuite/pytests/test_stdp_synapse.py @@ -138,9 +138,9 @@ def do_the_nest_simulation(self): "spike_generator", 2, params=({"spike_times": self.hardcoded_pre_times - + self.simulation_duration - self.hardcoded_trains_length}, + + self.simulation_duration - self.hardcoded_trains_length}, {"spike_times": self.hardcoded_post_times - + self.simulation_duration - self.hardcoded_trains_length}) + + self.simulation_duration - self.hardcoded_trains_length}) ) pre_spike_generator = spike_senders[0] post_spike_generator = spike_senders[1] From d57a0ef9e7113582f1227d6146b09c32a55f93a0 Mon Sep 17 00:00:00 2001 From: Jan Vogelsang Date: Tue, 6 Dec 2022 11:38:23 +0100 Subject: [PATCH 10/14] Fixed formatting and modified issue-77.sli to work with updated STDP code --- nestkernel/archiving_node.cpp | 5 ++--- testsuite/pytests/test_stdp_synapse.py | 7 ++----- testsuite/regressiontests/issue-77.sli | 5 +++-- 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/nestkernel/archiving_node.cpp b/nestkernel/archiving_node.cpp index 14b58eb44e..b53cea4283 100644 --- a/nestkernel/archiving_node.cpp +++ b/nestkernel/archiving_node.cpp @@ -195,9 +195,8 @@ nest::ArchivingNode::set_spiketime( Time const& t_sp, double offset ) while ( history_.size() > 1 ) { const double next_t_sp = history_[ 1 ].t_; - if ( history_.front().access_counter_ >= n_incoming_ - and t_sp_ms - next_t_sp - > max_delay_ + kernel().connection_manager.get_min_delay() + kernel().connection_manager.get_stdp_eps() ) + if ( history_.front().access_counter_ >= n_incoming_ and t_sp_ms - next_t_sp > max_delay_ + + kernel().connection_manager.get_min_delay() + kernel().connection_manager.get_stdp_eps() ) { history_.pop_front(); } diff --git a/testsuite/pytests/test_stdp_synapse.py b/testsuite/pytests/test_stdp_synapse.py index b5eb9b4b41..df853e74d7 100644 --- a/testsuite/pytests/test_stdp_synapse.py +++ b/testsuite/pytests/test_stdp_synapse.py @@ -251,9 +251,6 @@ def Kpost_at_time(t, spikes, inclusive=True): else: t_next_post_spike = post_spikes_delayed[idx_next_post_spike] - if t_next_post_spike == -1: - a = 1 - if t_next_post_spike >= 0 and ( t_next_post_spike + eps < t_next_pre_spike or t_next_pre_spike < 0): handle_pre_spike = False @@ -322,8 +319,8 @@ def Kpost_at_time(t, spikes, inclusive=True): t in pre_spike_times] def plot_weight_evolution(self, pre_spikes, post_spikes, t_log, w_log, - Kpre_log=None, Kpost_log=None, - fname_snip="", title_snip=""): + Kpre_log=None, Kpost_log=None, fname_snip="", + title_snip=""): fig, ax = plt.subplots(nrows=3) n_spikes = len(pre_spikes) diff --git a/testsuite/regressiontests/issue-77.sli b/testsuite/regressiontests/issue-77.sli index 519fbfd94c..2acf4442ca 100644 --- a/testsuite/regressiontests/issue-77.sli +++ b/testsuite/regressiontests/issue-77.sli @@ -103,9 +103,10 @@ M_ERROR setverbosity /receptor_type 1 >> /iaf_psc_exp_multisynapse << /params << /tau_syn [ 1.0 ] >> /receptor_type 1 >> - /aeif_cond_alpha_multisynapse << /params << /tau_syn [ 2.0 ] >> + /aeif_cond_alpha_multisynapse << /params << /E_rev [ -20.0 ] + /tau_syn [ 2.0 ] >> /receptor_type 1 >> - /aeif_cond_beta_multisynapse << /params << /E_rev [ 0.0 ] + /aeif_cond_beta_multisynapse << /params << /E_rev [ -20.0 ] /tau_rise [ 1.0 ] /tau_decay [ 2.0 ] >> /receptor_type 1 >> From 66080b2a28831235e394944485190dc032323b81 Mon Sep 17 00:00:00 2001 From: Jan Vogelsang Date: Mon, 16 Jan 2023 23:46:00 +0100 Subject: [PATCH 11/14] Applied suggestions --- testsuite/pytests/test_stdp_synapse.py | 106 +++++++++---------------- 1 file changed, 39 insertions(+), 67 deletions(-) diff --git a/testsuite/pytests/test_stdp_synapse.py b/testsuite/pytests/test_stdp_synapse.py index df853e74d7..df7006efeb 100644 --- a/testsuite/pytests/test_stdp_synapse.py +++ b/testsuite/pytests/test_stdp_synapse.py @@ -74,9 +74,7 @@ def init_params(self): # and ours, some low-probability events (say, coinciding # spikes) can well not have occured. To generate and # test every possible combination of pre/post order, we - # append some hardcoded spike sequences: - # pre: 1 5 6 7 9 11 12 13 - # post: 2 3 4 8 9 10 12 + # append some hardcoded spike sequences self.hardcoded_pre_times = np.array( [1, 5, 6, 7, 9, 11, 12, 13, 14.5, 16.1], dtype=float) self.hardcoded_post_times = np.array( @@ -88,21 +86,19 @@ def init_params(self): def do_nest_simulation_and_compare_to_reproduced_weight(self, fname_snip): pre_spikes, post_spikes, t_weight_by_nest, weight_by_nest = self.do_the_nest_simulation() if DEBUG_PLOTS: - self.plot_weight_evolution(pre_spikes, post_spikes, - t_weight_by_nest, weight_by_nest, - fname_snip=fname_snip, + self.plot_weight_evolution(pre_spikes, post_spikes, t_weight_by_nest, weight_by_nest, fname_snip=fname_snip, title_snip=self.nest_neuron_model + " (NEST)") - t_weight_reproduced_independently, weight_reproduced_independently = self.reproduce_weight_drift( - pre_spikes, post_spikes, self.init_weight, fname_snip=fname_snip) + t_weight_reproduced_independently, weight_reproduced_independently = self.reproduce_weight_drift(pre_spikes, + post_spikes, + self.init_weight, + fname_snip=fname_snip) # ``weight_by_nest`` containts only weight values at pre spike times, ``weight_reproduced_independently`` # contains the weight at pre *and* post times: check that weights are equal for pre spike times assert len(weight_by_nest) > 0 - np.testing.assert_allclose(t_weight_by_nest, - t_weight_reproduced_independently) - np.testing.assert_allclose(weight_by_nest, - weight_reproduced_independently) + np.testing.assert_allclose(t_weight_by_nest, t_weight_reproduced_independently) + np.testing.assert_allclose(weight_by_nest, weight_reproduced_independently) def do_the_nest_simulation(self): """ @@ -122,25 +118,22 @@ def do_the_nest_simulation(self): "poisson_generator", 2, params=({"rate": self.presynaptic_firing_rate, - "stop": ( - self.simulation_duration - self.hardcoded_trains_length)}, + "stop": (self.simulation_duration - self.hardcoded_trains_length)}, {"rate": self.postsynaptic_firing_rate, - "stop": ( - self.simulation_duration - self.hardcoded_trains_length)})) + "stop": (self.simulation_duration - self.hardcoded_trains_length)})) presynaptic_generator = generators[0] postsynaptic_generator = generators[1] wr = nest.Create('weight_recorder') - nest.CopyModel(self.synapse_model, self.synapse_model + "_rec", - {"weight_recorder": wr}) + nest.CopyModel(self.synapse_model, self.synapse_model + "_rec", {"weight_recorder": wr}) spike_senders = nest.Create( "spike_generator", 2, params=({"spike_times": self.hardcoded_pre_times - + self.simulation_duration - self.hardcoded_trains_length}, + + self.simulation_duration - self.hardcoded_trains_length}, {"spike_times": self.hardcoded_post_times - + self.simulation_duration - self.hardcoded_trains_length}) + + self.simulation_duration - self.hardcoded_trains_length}) ) pre_spike_generator = spike_senders[0] post_spike_generator = spike_senders[1] @@ -160,43 +153,36 @@ def do_the_nest_simulation(self): syn_spec={"synapse_model": "static_synapse"}) # The synapse of interest itself self.synapse_parameters["synapse_model"] += "_rec" - nest.Connect(presynaptic_neuron, postsynaptic_neuron, - syn_spec=self.synapse_parameters) + nest.Connect(presynaptic_neuron, postsynaptic_neuron, syn_spec=self.synapse_parameters) self.synapse_parameters["synapse_model"] = self.synapse_model nest.Simulate(self.simulation_duration) all_spikes = nest.GetStatus(spike_recorder, keys='events')[0] - pre_spikes = all_spikes['times'][ - all_spikes['senders'] == presynaptic_neuron.tolist()[0]] - post_spikes = all_spikes['times'][ - all_spikes['senders'] == postsynaptic_neuron.tolist()[0]] + pre_spikes = all_spikes['times'][all_spikes['senders'] == presynaptic_neuron.tolist()[0]] + post_spikes = all_spikes['times'][all_spikes['senders'] == postsynaptic_neuron.tolist()[0]] t_hist = nest.GetStatus(wr, "events")[0]["times"] weight = nest.GetStatus(wr, "events")[0]["weights"] return pre_spikes, post_spikes, t_hist, weight - def reproduce_weight_drift(self, pre_spikes, post_spikes, initial_weight, - fname_snip=""): + def reproduce_weight_drift(self, pre_spikes, post_spikes, initial_weight, fname_snip=""): """Independent, self-contained model of STDP""" def facilitate(w, Kpre, Wmax_=1.): - norm_w = (w / self.synapse_parameters["Wmax"]) + \ - (self.synapse_parameters["lambda"] * - pow(1 - (w / self.synapse_parameters["Wmax"]), - self.synapse_parameters["mu_plus"]) * Kpre) + norm_w = (w / self.synapse_parameters["Wmax"]) + ( + self.synapse_parameters["lambda"] * pow(1 - (w / self.synapse_parameters["Wmax"]), + self.synapse_parameters["mu_plus"]) * Kpre) if norm_w < 1.0: return norm_w * self.synapse_parameters["Wmax"] else: return self.synapse_parameters["Wmax"] def depress(w, Kpost): - norm_w = (w / self.synapse_parameters["Wmax"]) - \ - (self.synapse_parameters["alpha"] * - self.synapse_parameters["lambda"] * - pow(w / self.synapse_parameters["Wmax"], - self.synapse_parameters["mu_minus"]) * Kpost) + norm_w = (w / self.synapse_parameters["Wmax"]) - ( + self.synapse_parameters["alpha"] * self.synapse_parameters["lambda"] * pow( + w / self.synapse_parameters["Wmax"], self.synapse_parameters["mu_minus"]) * Kpost) if norm_w > 0.0: return norm_w * self.synapse_parameters["Wmax"] else: @@ -234,7 +220,7 @@ def Kpost_at_time(t, spikes, inclusive=True): weight = initial_weight t_log = list() - w_log = dict() + w_log = list() Kpre_log = list() pre_spike_times = list() @@ -251,13 +237,11 @@ def Kpost_at_time(t, spikes, inclusive=True): else: t_next_post_spike = post_spikes_delayed[idx_next_post_spike] - if t_next_post_spike >= 0 and ( - t_next_post_spike + eps < t_next_pre_spike or t_next_pre_spike < 0): + if t_next_post_spike >= 0 and (t_next_post_spike + eps < t_next_pre_spike or t_next_pre_spike < 0): handle_pre_spike = False handle_post_spike = True idx_next_post_spike += 1 - elif t_next_pre_spike >= 0 and ( - t_next_post_spike > t_next_pre_spike + eps or t_next_post_spike < 0): + elif t_next_pre_spike >= 0 and (t_next_post_spike > t_next_pre_spike + eps or t_next_post_spike < 0): handle_pre_spike = True handle_post_spike = False idx_next_pre_spike += 1 @@ -284,18 +268,15 @@ def Kpost_at_time(t, spikes, inclusive=True): t = t_next if handle_post_spike: - if not handle_pre_spike or abs( - t_next_post_spike - t_last_post_spike) > eps: + if not handle_pre_spike or abs(t_next_post_spike - t_last_post_spike) > eps: if abs(t_next_post_spike - t_last_pre_spike) > eps: weight = facilitate(weight, Kpre) if handle_pre_spike: Kpre += 1. - if not handle_post_spike or abs( - t_next_pre_spike - t_last_pre_spike) > eps: + if not handle_post_spike or abs(t_next_pre_spike - t_last_pre_spike) > eps: if abs(t_next_pre_spike - t_last_post_spike) > eps: - _Kpost = Kpost_at_time(t - self.dendritic_delay, - post_spikes, inclusive=False) + _Kpost = Kpost_at_time(t - self.dendritic_delay, post_spikes, inclusive=False) weight = depress(weight, _Kpost) t_last_pre_spike = t_next_pre_spike pre_spike_times.append(t) @@ -304,29 +285,23 @@ def Kpost_at_time(t, spikes, inclusive=True): t_last_post_spike = t_next_post_spike Kpre_log.append(Kpre) - w_log[t] = weight + w_log.append(weight) t_log.append(t) - Kpost_log = [Kpost_at_time(t - self.dendritic_delay, post_spikes) for t - in t_log] + Kpost_log = [Kpost_at_time(t - self.dendritic_delay, post_spikes) for t in t_log] if DEBUG_PLOTS: - self.plot_weight_evolution(pre_spikes, post_spikes, t_log, - w_log.values(), Kpre_log, Kpost_log, - fname_snip=fname_snip + "_ref", - title_snip="Reference") + self.plot_weight_evolution(pre_spikes, post_spikes, t_log, w_log, Kpre_log, Kpost_log, + fname_snip=fname_snip + "_ref", title_snip="Reference") - return pre_spike_times, [w for t, w in w_log.items() if - t in pre_spike_times] + return pre_spike_times, [w_log[i] for i, t in enumerate(t_log) if t in pre_spike_times] - def plot_weight_evolution(self, pre_spikes, post_spikes, t_log, w_log, - Kpre_log=None, Kpost_log=None, fname_snip="", + def plot_weight_evolution(self, pre_spikes, post_spikes, t_log, w_log, Kpre_log=None, Kpost_log=None, fname_snip="", title_snip=""): fig, ax = plt.subplots(nrows=3) n_spikes = len(pre_spikes) for i in range(n_spikes): - ax[0].plot(2 * [pre_spikes[i]], [0, 1], linewidth=2, color="blue", - alpha=.4) + ax[0].plot(2 * [pre_spikes[i]], [0, 1], linewidth=2, color="blue", alpha=.4) ax[0].set_ylabel("Pre spikes") ax0_ = ax[0].twinx() if Kpre_log: @@ -334,8 +309,7 @@ def plot_weight_evolution(self, pre_spikes, post_spikes, t_log, w_log, n_spikes = len(post_spikes) for i in range(n_spikes): - ax[1].plot(2 * [post_spikes[i]], [0, 1], linewidth=2, color="red", - alpha=.4) + ax[1].plot(2 * [post_spikes[i]], [0, 1], linewidth=2, color="red", alpha=.4) ax1_ = ax[1].twinx() ax[1].set_ylabel("Post spikes") if Kpost_log: @@ -352,8 +326,7 @@ def plot_weight_evolution(self, pre_spikes, post_spikes, t_log, w_log, _ax.set_xlim(0., self.simulation_duration) fig.suptitle(title_snip) - fig.savefig("/tmp/nest_stdp_synapse_test" + fname_snip + ".png", - dpi=300) + fig.savefig("/tmp/nest_stdp_synapse_test" + fname_snip + ".png", dpi=300) plt.close(fig) def test_stdp_synapse(self): @@ -364,5 +337,4 @@ def test_stdp_synapse(self): for self.nest_neuron_model in ["iaf_psc_exp", "iaf_cond_exp"]: fname_snip = "_[nest_neuron_mdl=" + self.nest_neuron_model + "]" fname_snip += "_[dend_delay=" + str(self.dendritic_delay) + "]" - self.do_nest_simulation_and_compare_to_reproduced_weight( - fname_snip=fname_snip) + self.do_nest_simulation_and_compare_to_reproduced_weight(fname_snip=fname_snip) From 0d3ff00ebbac27d2ef2760afca302a5431ebc7aa Mon Sep 17 00:00:00 2001 From: Jan Vogelsang Date: Tue, 4 Apr 2023 09:55:02 +0200 Subject: [PATCH 12/14] Fixed formatting --- nestkernel/archiving_node.cpp | 5 +++-- testsuite/pytests/test_stdp_synapse.py | 8 ++++---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/nestkernel/archiving_node.cpp b/nestkernel/archiving_node.cpp index 00190be138..4bbe09396a 100644 --- a/nestkernel/archiving_node.cpp +++ b/nestkernel/archiving_node.cpp @@ -195,8 +195,9 @@ nest::ArchivingNode::set_spiketime( Time const& t_sp, double offset ) while ( history_.size() > 1 ) { const double next_t_sp = history_[ 1 ].t_; - if ( history_.front().access_counter_ >= n_incoming_ and t_sp_ms - next_t_sp > max_delay_ + - kernel().connection_manager.get_min_delay() + kernel().connection_manager.get_stdp_eps() ) + if ( history_.front().access_counter_ >= n_incoming_ + and t_sp_ms - next_t_sp + > max_delay_ + kernel().connection_manager.get_min_delay() + kernel().connection_manager.get_stdp_eps() ) { history_.pop_front(); } diff --git a/testsuite/pytests/test_stdp_synapse.py b/testsuite/pytests/test_stdp_synapse.py index 8964ae9b9c..54c2a02840 100644 --- a/testsuite/pytests/test_stdp_synapse.py +++ b/testsuite/pytests/test_stdp_synapse.py @@ -172,8 +172,8 @@ def reproduce_weight_drift(self, pre_spikes, post_spikes, initial_weight, fname_ def facilitate(w, Kpre, Wmax_=1.): norm_w = (w / self.synapse_parameters["Wmax"]) + ( - self.synapse_parameters["lambda"] * pow(1 - (w / self.synapse_parameters["Wmax"]), - self.synapse_parameters["mu_plus"]) * Kpre) + self.synapse_parameters["lambda"] * pow(1 - (w / self.synapse_parameters["Wmax"]), + self.synapse_parameters["mu_plus"]) * Kpre) if norm_w < 1.0: return norm_w * self.synapse_parameters["Wmax"] else: @@ -181,8 +181,8 @@ def facilitate(w, Kpre, Wmax_=1.): def depress(w, Kpost): norm_w = (w / self.synapse_parameters["Wmax"]) - ( - self.synapse_parameters["alpha"] * self.synapse_parameters["lambda"] * pow( - w / self.synapse_parameters["Wmax"], self.synapse_parameters["mu_minus"]) * Kpost) + self.synapse_parameters["alpha"] * self.synapse_parameters["lambda"] * pow( + w / self.synapse_parameters["Wmax"], self.synapse_parameters["mu_minus"]) * Kpost) if norm_w > 0.0: return norm_w * self.synapse_parameters["Wmax"] else: From e58832e6a80343e2f1d84cec6773709d4d130059 Mon Sep 17 00:00:00 2001 From: Jan Vogelsang Date: Tue, 4 Apr 2023 10:25:40 +0200 Subject: [PATCH 13/14] Fixed formatting --- testsuite/pytests/test_stdp_synapse.py | 20 ++++----- testsuite/test_debug.py | 60 ++++++++++++++++++++++++++ 2 files changed, 69 insertions(+), 11 deletions(-) create mode 100644 testsuite/test_debug.py diff --git a/testsuite/pytests/test_stdp_synapse.py b/testsuite/pytests/test_stdp_synapse.py index 54c2a02840..8ab4d18a4a 100644 --- a/testsuite/pytests/test_stdp_synapse.py +++ b/testsuite/pytests/test_stdp_synapse.py @@ -89,10 +89,8 @@ def do_nest_simulation_and_compare_to_reproduced_weight(self, fname_snip): self.plot_weight_evolution(pre_spikes, post_spikes, t_weight_by_nest, weight_by_nest, fname_snip=fname_snip, title_snip=self.nest_neuron_model + " (NEST)") - t_weight_reproduced_independently, weight_reproduced_independently = self.reproduce_weight_drift(pre_spikes, - post_spikes, - self.init_weight, - fname_snip=fname_snip) + t_weight_reproduced_independently, weight_reproduced_independently = \ + self.reproduce_weight_drift(pre_spikes, post_spikes, self.init_weight, fname_snip=fname_snip) # ``weight_by_nest`` contains only weight values at pre spike times, ``weight_reproduced_independently`` # contains the weight at pre *and* post times: check that weights are equal for pre spike times @@ -130,10 +128,10 @@ def do_the_nest_simulation(self): spike_senders = nest.Create( "spike_generator", 2, - params=({"spike_times": self.hardcoded_pre_times - + self.simulation_duration - self.hardcoded_trains_length}, - {"spike_times": self.hardcoded_post_times - + self.simulation_duration - self.hardcoded_trains_length}) + params=({"spike_times": self.hardcoded_pre_times + + self.simulation_duration - self.hardcoded_trains_length}, + {"spike_times": self.hardcoded_post_times + + self.simulation_duration - self.hardcoded_trains_length}) ) pre_spike_generator = spike_senders[0] post_spike_generator = spike_senders[1] @@ -180,9 +178,9 @@ def facilitate(w, Kpre, Wmax_=1.): return self.synapse_parameters["Wmax"] def depress(w, Kpost): - norm_w = (w / self.synapse_parameters["Wmax"]) - ( - self.synapse_parameters["alpha"] * self.synapse_parameters["lambda"] * pow( - w / self.synapse_parameters["Wmax"], self.synapse_parameters["mu_minus"]) * Kpost) + norm_w = (w / self.synapse_parameters["Wmax"]) - \ + (self.synapse_parameters["alpha"] * self.synapse_parameters["lambda"] * + pow(w / self.synapse_parameters["Wmax"], self.synapse_parameters["mu_minus"]) * Kpost) if norm_w > 0.0: return norm_w * self.synapse_parameters["Wmax"] else: diff --git a/testsuite/test_debug.py b/testsuite/test_debug.py new file mode 100644 index 0000000000..a066f4c16b --- /dev/null +++ b/testsuite/test_debug.py @@ -0,0 +1,60 @@ +import nest + +nest.rng_seed = 42 + +model_params = { + 'E_L': 0.0, # Resting membrane potential(mV) + 'C_m': 250.0, # Capacity of the membrane(pF) + 'tau_m': 10.0, # Membrane time constant(ms) + 't_ref': 0.5, # Duration of refractory period(ms) + 'V_th': 20.0, # Threshold(mV) + 'V_reset': 0.0, # Reset Potential(mV) + # time const. postsynaptic excitatory currents(ms) + 'tau_syn_ex': 0.326, + # time const. postsynaptic inhibitory currents(ms) + 'tau_syn_in': 0.326, + 'tau_minus': 30.0, # time constant for STDP(depression) + # V can be randomly initialized see below + 'V_m': 5.7 # mean value of membrane potential +} + +E_neurons = nest.Create('iaf_psc_alpha', 1000, params=model_params) + +I_neurons = nest.Create('iaf_psc_alpha', 200, params=model_params) + +stimulus = nest.Create('poisson_generator', 1, {'rate': 300}) + +recorder = nest.Create('spike_recorder') + +nest.Connect(stimulus, E_neurons, {'rule': 'all_to_all'}, + {'synapse_model': 'static_synapse_hpc', 'weight': 1000}) +nest.Connect(stimulus, I_neurons, {'rule': 'all_to_all'}, + {'synapse_model': 'static_synapse_hpc', 'weight': 1000}) + +nest.Connect(E_neurons, E_neurons, + {'rule': 'fixed_indegree', 'indegree': 1000, + 'allow_autapses': False, 'allow_multapses': True}, + {'synapse_model': 'stdp_pl_synapse_hom_hpc', 'weight': 10}) + +nest.Connect(I_neurons, E_neurons, + {'rule': 'fixed_indegree', 'indegree': 200, + 'allow_autapses': False, 'allow_multapses': True}, + {'synapse_model': 'static_synapse_hpc', 'weight': 1000}) + +nest.Connect(E_neurons, I_neurons, + {'rule': 'fixed_indegree', 'indegree': 1000, + 'allow_autapses': False, 'allow_multapses': True}, + {'synapse_model': 'static_synapse_hpc', 'weight': 1000}) + +nest.Connect(I_neurons, I_neurons, + {'rule': 'fixed_indegree', 'indegree': 200, + 'allow_autapses': False, 'allow_multapses': True}, + {'synapse_model': 'static_synapse_hpc', 'weight': 1000}) + +nest.Connect(E_neurons + I_neurons, recorder, 'all_to_all', 'static_synapse_hpc') +# nest.Connect(stimulus, recorder, 'all_to_all', 'static_synapse_hpc') + +nest.Simulate(100) + +all_spikes = nest.GetStatus(recorder, keys='events')[0] +print(f"Num: {len(all_spikes['times'])}, Times: {all_spikes['times']}, Senders: {all_spikes['senders']}") From d77aa835d576a34139afc70085f864f937b6d8b9 Mon Sep 17 00:00:00 2001 From: Jan Vogelsang Date: Tue, 4 Apr 2023 10:31:45 +0200 Subject: [PATCH 14/14] Removing accidentally added file --- testsuite/test_debug.py | 60 ----------------------------------------- 1 file changed, 60 deletions(-) delete mode 100644 testsuite/test_debug.py diff --git a/testsuite/test_debug.py b/testsuite/test_debug.py deleted file mode 100644 index a066f4c16b..0000000000 --- a/testsuite/test_debug.py +++ /dev/null @@ -1,60 +0,0 @@ -import nest - -nest.rng_seed = 42 - -model_params = { - 'E_L': 0.0, # Resting membrane potential(mV) - 'C_m': 250.0, # Capacity of the membrane(pF) - 'tau_m': 10.0, # Membrane time constant(ms) - 't_ref': 0.5, # Duration of refractory period(ms) - 'V_th': 20.0, # Threshold(mV) - 'V_reset': 0.0, # Reset Potential(mV) - # time const. postsynaptic excitatory currents(ms) - 'tau_syn_ex': 0.326, - # time const. postsynaptic inhibitory currents(ms) - 'tau_syn_in': 0.326, - 'tau_minus': 30.0, # time constant for STDP(depression) - # V can be randomly initialized see below - 'V_m': 5.7 # mean value of membrane potential -} - -E_neurons = nest.Create('iaf_psc_alpha', 1000, params=model_params) - -I_neurons = nest.Create('iaf_psc_alpha', 200, params=model_params) - -stimulus = nest.Create('poisson_generator', 1, {'rate': 300}) - -recorder = nest.Create('spike_recorder') - -nest.Connect(stimulus, E_neurons, {'rule': 'all_to_all'}, - {'synapse_model': 'static_synapse_hpc', 'weight': 1000}) -nest.Connect(stimulus, I_neurons, {'rule': 'all_to_all'}, - {'synapse_model': 'static_synapse_hpc', 'weight': 1000}) - -nest.Connect(E_neurons, E_neurons, - {'rule': 'fixed_indegree', 'indegree': 1000, - 'allow_autapses': False, 'allow_multapses': True}, - {'synapse_model': 'stdp_pl_synapse_hom_hpc', 'weight': 10}) - -nest.Connect(I_neurons, E_neurons, - {'rule': 'fixed_indegree', 'indegree': 200, - 'allow_autapses': False, 'allow_multapses': True}, - {'synapse_model': 'static_synapse_hpc', 'weight': 1000}) - -nest.Connect(E_neurons, I_neurons, - {'rule': 'fixed_indegree', 'indegree': 1000, - 'allow_autapses': False, 'allow_multapses': True}, - {'synapse_model': 'static_synapse_hpc', 'weight': 1000}) - -nest.Connect(I_neurons, I_neurons, - {'rule': 'fixed_indegree', 'indegree': 200, - 'allow_autapses': False, 'allow_multapses': True}, - {'synapse_model': 'static_synapse_hpc', 'weight': 1000}) - -nest.Connect(E_neurons + I_neurons, recorder, 'all_to_all', 'static_synapse_hpc') -# nest.Connect(stimulus, recorder, 'all_to_all', 'static_synapse_hpc') - -nest.Simulate(100) - -all_spikes = nest.GetStatus(recorder, keys='events')[0] -print(f"Num: {len(all_spikes['times'])}, Times: {all_spikes['times']}, Senders: {all_spikes['senders']}")