From 51d82a7812e0be7cfda59a724a8d9dad0958cce3 Mon Sep 17 00:00:00 2001 From: RaulMurillo Date: Thu, 4 Mar 2021 14:43:08 +0100 Subject: [PATCH 1/2] Fixed bug in layers.set_params() --- numpy_ml/neural_nets/layers/layers.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/numpy_ml/neural_nets/layers/layers.py b/numpy_ml/neural_nets/layers/layers.py index 39aa9fa..44e1a3f 100644 --- a/numpy_ml/neural_nets/layers/layers.py +++ b/numpy_ml/neural_nets/layers/layers.py @@ -119,12 +119,12 @@ def set_params(self, summary_dict): if k in self.hyperparameters: if k == "act_fn": layer.act_fn = ActivationInitializer(v)() - if k == "optimizer": + elif k == "optimizer": layer.optimizer = OptimizerInitializer(sd[k])() - if k not in ["wrappers", "optimizer"]: - setattr(layer, k, v) - if k == "wrappers": + elif k == "wrappers": layer = init_wrappers(layer, sd[k]) + elif k not in ["wrappers", "optimizer"]: + setattr(layer, k, v) return layer def summary(self): From 3db6423e9d7f37cc99fb1c35932a7d4d48093fec Mon Sep 17 00:00:00 2001 From: RaulMurillo Date: Thu, 4 Mar 2021 16:12:09 +0100 Subject: [PATCH 2/2] Fixed bug in NN initializers --- .../neural_nets/initializers/initializers.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/numpy_ml/neural_nets/initializers/initializers.py b/numpy_ml/neural_nets/initializers/initializers.py index 5e4b59e..03d37ef 100644 --- a/numpy_ml/neural_nets/initializers/initializers.py +++ b/numpy_ml/neural_nets/initializers/initializers.py @@ -122,13 +122,14 @@ def init_from_dict(self): raise ValueError("Must have `hyperparameters` key: {}".format(S)) if sc and sc["id"] == "ConstantScheduler": - scheduler = ConstantScheduler().set_params(sc) + scheduler = ConstantScheduler() elif sc and sc["id"] == "ExponentialScheduler": - scheduler = ExponentialScheduler().set_params(sc) + scheduler = ExponentialScheduler() elif sc and sc["id"] == "NoamScheduler": - scheduler = NoamScheduler().set_params(sc) + scheduler = NoamScheduler() elif sc: raise NotImplementedError("{}".format(sc["id"])) + scheduler.set_params(sc) return scheduler @@ -182,15 +183,16 @@ def init_from_dict(self): raise ValueError("Must have `hyperparemeters` key: {}".format(O)) if op and op["id"] == "SGD": - optimizer = SGD().set_params(op, cc) + optimizer = SGD() elif op and op["id"] == "RMSProp": - optimizer = RMSProp().set_params(op, cc) + optimizer = RMSProp() elif op and op["id"] == "AdaGrad": - optimizer = AdaGrad().set_params(op, cc) + optimizer = AdaGrad() elif op and op["id"] == "Adam": - optimizer = Adam().set_params(op, cc) + optimizer = Adam() elif op: raise NotImplementedError("{}".format(op["id"])) + optimizer.set_params(op, cc) return optimizer