Skip to content

Commit

Permalink
Revert "Merge pull request chainer#3488 from Crissman/opt-setup"
Browse files Browse the repository at this point in the history
This reverts commit 016cffa, reversing
changes made to a054cd6.
  • Loading branch information
toslunar committed Dec 22, 2017
1 parent 7d0d6e7 commit 84e6762
Show file tree
Hide file tree
Showing 10 changed files with 19 additions and 27 deletions.
4 changes: 1 addition & 3 deletions chainer/optimizer.py
Expand Up @@ -525,12 +525,10 @@ class GradientMethod(Optimizer):
"""

def __init__(self, link=None):
def __init__(self):
super(GradientMethod, self).__init__()
self.hyperparam = Hyperparameter()
self._use_fp32_update = False
if isinstance(link, link_module.Link):
self.setup(link)

def setup(self, link):
super(GradientMethod, self).setup(link)
Expand Down
4 changes: 2 additions & 2 deletions chainer/optimizers/ada_delta.py
Expand Up @@ -84,8 +84,8 @@ class AdaDelta(optimizer.GradientMethod):
"""

def __init__(self, rho=_default_hyperparam.rho,
eps=_default_hyperparam.eps, model=None):
super(AdaDelta, self).__init__(model)
eps=_default_hyperparam.eps):
super(AdaDelta, self).__init__()
self.hyperparam.rho = rho
self.hyperparam.eps = eps

Expand Down
5 changes: 2 additions & 3 deletions chainer/optimizers/ada_grad.py
Expand Up @@ -74,9 +74,8 @@ class AdaGrad(optimizer.GradientMethod):
"""

def __init__(self, lr=_default_hyperparam.lr,
eps=_default_hyperparam.eps, model=None):
super(AdaGrad, self).__init__(model)
def __init__(self, lr=_default_hyperparam.lr, eps=_default_hyperparam.eps):
super(AdaGrad, self).__init__()
self.hyperparam.lr = lr
self.hyperparam.eps = eps

Expand Down
6 changes: 2 additions & 4 deletions chainer/optimizers/adam.py
Expand Up @@ -151,10 +151,8 @@ def __init__(self,
beta2=_default_hyperparam.beta2,
eps=_default_hyperparam.eps,
eta=_default_hyperparam.eta,
weight_decay_rate=_default_hyperparam.weight_decay_rate,
model=None):
super(Adam, self).__init__(model)

weight_decay_rate=_default_hyperparam.weight_decay_rate):
super(Adam, self).__init__()
self.hyperparam.alpha = alpha
self.hyperparam.beta1 = beta1
self.hyperparam.beta2 = beta2
Expand Down
4 changes: 2 additions & 2 deletions chainer/optimizers/momentum_sgd.py
Expand Up @@ -69,8 +69,8 @@ class MomentumSGD(optimizer.GradientMethod):
"""

def __init__(self, lr=_default_hyperparam.lr,
momentum=_default_hyperparam.momentum, model=None):
super(MomentumSGD, self).__init__(model)
momentum=_default_hyperparam.momentum):
super(MomentumSGD, self).__init__()
self.hyperparam.lr = lr
self.hyperparam.momentum = momentum

Expand Down
4 changes: 2 additions & 2 deletions chainer/optimizers/nesterov_ag.py
Expand Up @@ -76,8 +76,8 @@ class NesterovAG(optimizer.GradientMethod):
"""

def __init__(self, lr=_default_hyperparam.lr,
momentum=_default_hyperparam.momentum, model=None):
super(NesterovAG, self).__init__(model)
momentum=_default_hyperparam.momentum):
super(NesterovAG, self).__init__()
self.hyperparam.lr = lr
self.hyperparam.momentum = momentum

Expand Down
5 changes: 2 additions & 3 deletions chainer/optimizers/rmsprop.py
Expand Up @@ -91,9 +91,8 @@ class RMSprop(optimizer.GradientMethod):
"""

def __init__(self, lr=_default_hyperparam.lr,
alpha=_default_hyperparam.alpha, eps=_default_hyperparam.eps,
model=None):
super(RMSprop, self).__init__(model)
alpha=_default_hyperparam.alpha, eps=_default_hyperparam.eps):
super(RMSprop, self).__init__()
self.hyperparam.lr = lr
self.hyperparam.alpha = alpha
self.hyperparam.eps = eps
Expand Down
5 changes: 2 additions & 3 deletions chainer/optimizers/rmsprop_graves.py
Expand Up @@ -102,9 +102,8 @@ class RMSpropGraves(optimizer.GradientMethod):
def __init__(self, lr=_default_hyperparam.lr,
alpha=_default_hyperparam.alpha,
momentum=_default_hyperparam.momentum,
eps=_default_hyperparam.eps,
model=None):
super(RMSpropGraves, self).__init__(model)
eps=_default_hyperparam.eps):
super(RMSpropGraves, self).__init__()
self.hyperparam.lr = lr
self.hyperparam.alpha = alpha
self.hyperparam.momentum = momentum
Expand Down
4 changes: 2 additions & 2 deletions chainer/optimizers/sgd.py
Expand Up @@ -50,8 +50,8 @@ class SGD(optimizer.GradientMethod):
"""

def __init__(self, lr=_default_hyperparam.lr, model=None):
super(SGD, self).__init__(model)
def __init__(self, lr=_default_hyperparam.lr):
super(SGD, self).__init__()
self.hyperparam.lr = lr

lr = optimizer.HyperparameterProxy('lr')
Expand Down
5 changes: 2 additions & 3 deletions chainer/optimizers/smorms3.py
Expand Up @@ -87,9 +87,8 @@ class SMORMS3(optimizer.GradientMethod):
"""

def __init__(self, lr=_default_hyperparam.lr,
eps=_default_hyperparam.eps, model=None):
super(SMORMS3, self).__init__(model)
def __init__(self, lr=_default_hyperparam.lr, eps=_default_hyperparam.eps):
super(SMORMS3, self).__init__()
self.hyperparam.lr = lr
self.hyperparam.eps = eps

Expand Down

0 comments on commit 84e6762

Please sign in to comment.