Skip to content

Commit

Permalink
Fix
Browse files Browse the repository at this point in the history
  • Loading branch information
Routhleck committed May 9, 2024
1 parent 69812a2 commit d30a552
Show file tree
Hide file tree
Showing 13 changed files with 14 additions and 15 deletions.
2 changes: 1 addition & 1 deletion brainpy/_src/analysis/highdim/slow_points.py
Original file line number Diff line number Diff line change
Expand Up @@ -329,7 +329,7 @@ def find_fps_with_gd_method(
"""
# optimization settings
if optimizer is None:
optimizer = optim.Adam(lr=optim.ExponentialDecay(0.2, 1, 0.9999),
optimizer = optim.Adam(lr=optim.ExponentialDecayLR(0.2, 1, 0.9999),
beta1=0.9, beta2=0.999, eps=1e-8)
else:
if not isinstance(optimizer, optim.Optimizer):
Expand Down
2 changes: 1 addition & 1 deletion brainpy/_src/analysis/lowdim/tests/test_bifurcation.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def dw(w, t, V, a=0.7, b=0.8):
self.int_w = bp.odeint(dw, method=method)

def update(self, tdi):
t, dt = tdi['t'], tdi['dt']
t, dt = bp.share['t'], bp.share['dt']
self.V.value = self.int_V(self.V, t, self.w, self.Iext, dt)
self.w.value = self.int_w(self.w, t, self.V, self.a, self.b, dt)
self.Iext[:] = 0.
Expand Down
2 changes: 1 addition & 1 deletion brainpy/_src/dyn/rates/tests/test_nvar.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ class Test_NVAR(parameterized.TestCase):
def test_NVAR(self,mode):
bm.random.seed()
input=bm.random.randn(1,5)
layer=bp.dnn.NVAR(num_in=5,
layer=bp.dyn.NVAR(num_in=5,
delay=10,
mode=mode)
if mode in [bm.NonBatchingMode()]:
Expand Down
4 changes: 2 additions & 2 deletions brainpy/_src/initialize/tests/test_decay_inits.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
# visualization
def mat_visualize(matrix, cmap=None):
if cmap is None:
cmap = plt.cm.get_cmap('coolwarm')
plt.cm.get_cmap('coolwarm')
cmap = plt.colormaps.get_cmap('coolwarm')
plt.colormaps.get_cmap('coolwarm')
im = plt.matshow(matrix, cmap=cmap)
plt.colorbar(mappable=im, shrink=0.8, aspect=15)
plt.show()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def dV(self, V, t, h, n, Iext):
return dVdt

def update(self, tdi):
t, dt = tdi.t, tdi.dt
t, dt = bp.share['t'], bp.share['dt']
V, h, n = self.integral(self.V, self.h, self.n, t, self.input, dt=dt)
self.spike.value = bm.logical_and(self.V < self.V_th, V >= self.V_th)
self.V.value = V
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def test_nodes():
A.pre = B
B.pre = A

net = bp.dyn.Network(A, B)
net = bp.Network(A, B)
abs_nodes = net.nodes(method='absolute')
rel_nodes = net.nodes(method='relative')
print()
Expand Down
4 changes: 2 additions & 2 deletions brainpy/_src/math/object_transform/tests/test_collector.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import brainpy as bp


class GABAa_without_Variable(bp.TwoEndConn):
class GABAa_without_Variable(bp.synapses.TwoEndConn):
def __init__(self, pre, post, conn, delay=0., g_max=0.1, E=-75.,
alpha=12., beta=0.1, T=1.0, T_duration=1.0, **kwargs):
super(GABAa_without_Variable, self).__init__(pre=pre, post=post, **kwargs)
Expand Down Expand Up @@ -192,7 +192,7 @@ def test_neu_nodes_1():
assert len(neu.nodes(method='relative', include_self=False)) == 1


class GABAa_with_Variable(bp.TwoEndConn):
class GABAa_with_Variable(bp.synapses.TwoEndConn):
def __init__(self, pre, post, conn, delay=0., g_max=0.1, E=-75.,
alpha=12., beta=0.1, T=1.0, T_duration=1.0, **kwargs):
super(GABAa_with_Variable, self).__init__(pre=pre, post=post, **kwargs)
Expand Down
1 change: 0 additions & 1 deletion brainpy/_src/math/object_transform/tests/test_controls.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,6 @@ def f1():
branches=[f1,
lambda: 2, lambda: 3,
lambda: 4, lambda: 5],
dyn_vars=var_a,
show_code=True)

self.assertTrue(f(11) == 1)
Expand Down
2 changes: 1 addition & 1 deletion brainpy/_src/optimizers/tests/test_ModifyLr.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def train_data():
class RNN(bp.DynamicalSystem):
def __init__(self, num_in, num_hidden):
super(RNN, self).__init__()
self.rnn = bp.dnn.RNNCell(num_in, num_hidden, train_state=True)
self.rnn = bp.dyn.RNNCell(num_in, num_hidden, train_state=True)
self.out = bp.dnn.Dense(num_hidden, 1)

def update(self, x):
Expand Down
2 changes: 1 addition & 1 deletion brainpy/_src/train/back_propagation.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def __init__(

# optimizer
if optimizer is None:
lr = optim.ExponentialDecay(lr=0.025, decay_steps=1, decay_rate=0.99975)
lr = optim.ExponentialDecayLR(lr=0.025, decay_steps=1, decay_rate=0.99975)
optimizer = optim.Adam(lr=lr)
self.optimizer: optim.Optimizer = optimizer
if len(self.optimizer.vars_to_train) == 0:
Expand Down
2 changes: 1 addition & 1 deletion examples/dynamics_analysis/3d_reduced_trn_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ def derivative(self, V, y, z, t, Isyn):
return dvdt, dydt, dzdt

def update(self, tdi):
t, dt = tdi['t'], tdi['dt']
t, dt = bp.share['t'], bp.share['dt']
if isinstance(self.int_V, bp.ode.ExponentialEuler):
V = self.int_V(self.V, t, self.y, self.z, self.input, dt)
self.y.value = self.int_y(self.y, t, self.V, dt)
Expand Down
2 changes: 1 addition & 1 deletion examples/dynamics_analysis/highdim_RNN_Analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def loss(predictions, targets):
tolerance=1e-5,
num_batch=200,
num_opt=int(2e4),
optimizer=bp.optim.Adam(lr=bp.optim.ExponentialDecay(0.01, 2, 0.9999)),
optimizer=bp.optim.Adam(lr=bp.optim.ExponentialDecayLR(0.01, 2, 0.9999)),
)
finder.filter_loss(tolerance=1e-5)
finder.keep_unique(tolerance=0.005)
Expand Down
2 changes: 1 addition & 1 deletion examples/training_ann_models/mnist_ResNet.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ def loss_fun(X, Y, fit=True):
grad_fun = bm.grad(loss_fun, grad_vars=net.train_vars().unique(), has_aux=True, return_value=True)

# optimizer
optimizer = bp.optim.Adam(bp.optim.ExponentialDecay(args.lr, 1, 0.9999),
optimizer = bp.optim.Adam(bp.optim.ExponentialDecayLR(args.lr, 1, 0.9999),
train_vars=net.train_vars().unique())

@bm.jit
Expand Down

0 comments on commit d30a552

Please sign in to comment.