diff --git a/numpy_ml/neural_nets/initializers/initializers.py b/numpy_ml/neural_nets/initializers/initializers.py index 5e4b59e..03d37ef 100644 --- a/numpy_ml/neural_nets/initializers/initializers.py +++ b/numpy_ml/neural_nets/initializers/initializers.py @@ -122,13 +122,14 @@ def init_from_dict(self): raise ValueError("Must have `hyperparameters` key: {}".format(S)) if sc and sc["id"] == "ConstantScheduler": - scheduler = ConstantScheduler().set_params(sc) + scheduler = ConstantScheduler() elif sc and sc["id"] == "ExponentialScheduler": - scheduler = ExponentialScheduler().set_params(sc) + scheduler = ExponentialScheduler() elif sc and sc["id"] == "NoamScheduler": - scheduler = NoamScheduler().set_params(sc) + scheduler = NoamScheduler() elif sc: raise NotImplementedError("{}".format(sc["id"])) + scheduler.set_params(sc) return scheduler @@ -182,15 +183,16 @@ def init_from_dict(self): raise ValueError("Must have `hyperparemeters` key: {}".format(O)) if op and op["id"] == "SGD": - optimizer = SGD().set_params(op, cc) + optimizer = SGD() elif op and op["id"] == "RMSProp": - optimizer = RMSProp().set_params(op, cc) + optimizer = RMSProp() elif op and op["id"] == "AdaGrad": - optimizer = AdaGrad().set_params(op, cc) + optimizer = AdaGrad() elif op and op["id"] == "Adam": - optimizer = Adam().set_params(op, cc) + optimizer = Adam() elif op: raise NotImplementedError("{}".format(op["id"])) + optimizer.set_params(op, cc) return optimizer diff --git a/numpy_ml/neural_nets/layers/layers.py b/numpy_ml/neural_nets/layers/layers.py index 39aa9fa..44e1a3f 100644 --- a/numpy_ml/neural_nets/layers/layers.py +++ b/numpy_ml/neural_nets/layers/layers.py @@ -119,12 +119,12 @@ def set_params(self, summary_dict): if k in self.hyperparameters: if k == "act_fn": layer.act_fn = ActivationInitializer(v)() - if k == "optimizer": + elif k == "optimizer": layer.optimizer = OptimizerInitializer(sd[k])() - if k not in ["wrappers", "optimizer"]: - setattr(layer, k, v) - if k == "wrappers": + elif k == "wrappers": layer = init_wrappers(layer, sd[k]) + elif k not in ["wrappers", "optimizer"]: + setattr(layer, k, v) return layer def summary(self):