From 21b30fff0db2c574814dccd56848fbdae9e0f3a3 Mon Sep 17 00:00:00 2001 From: yunhui <38786521+CloudyDory@users.noreply.github.com> Date: Thu, 22 Feb 2024 22:15:44 +0800 Subject: [PATCH 1/2] Fix delayvar not correct in concat mode --- brainpy/_src/math/delayvars.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/brainpy/_src/math/delayvars.py b/brainpy/_src/math/delayvars.py index 390e04dd7..676e4286b 100644 --- a/brainpy/_src/math/delayvars.py +++ b/brainpy/_src/math/delayvars.py @@ -473,7 +473,7 @@ def update(self, value: Union[numbers.Number, Array, jax.Array] = None): elif self.update_method == CONCAT_UPDATE: if self.num_delay_step >= 2: - self.data.value = concatenate([expand_dims(value, 0), self.data[1:]], axis=0) + self.data.value = concatenate([expand_dims(value, 0), self.data[:-1]], axis=0) else: self.data[:] = value From e5b66abb581ea495cbd6834a0e4544e2abdc1e86 Mon Sep 17 00:00:00 2001 From: yunhui <38786521+CloudyDory@users.noreply.github.com> Date: Fri, 29 Mar 2024 13:00:43 +0800 Subject: [PATCH 2/2] Update AdamW in optimizer.py Fixed "amsgrad" is used before being defined. --- brainpy/_src/optimizers/optimizer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/brainpy/_src/optimizers/optimizer.py b/brainpy/_src/optimizers/optimizer.py index c2aec25a0..75dfef124 100644 --- a/brainpy/_src/optimizers/optimizer.py +++ b/brainpy/_src/optimizers/optimizer.py @@ -901,6 +901,7 @@ def __init__( amsgrad: bool = False, name: Optional[str] = None, ): + self.amsgrad = amsgrad super(AdamW, self).__init__(lr=lr, train_vars=train_vars, weight_decay=weight_decay, @@ -919,7 +920,6 @@ def __init__( self.beta2 = beta2 self.eps = eps self.weight_decay = weight_decay - self.amsgrad = amsgrad def __repr__(self): return (f"{self.__class__.__name__}(lr={self.lr}, "