Skip to content

Commit

Permalink
Fix default value for decay parameter in cycl-SGD (#1361)
Browse files Browse the repository at this point in the history
  • Loading branch information
pmichel31415 authored and neubig committed Apr 25, 2018
1 parent 4f46736 commit f129862
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion dynet/training.h
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ struct CyclicalSGDTrainer : public Trainer {
* \param gamma Learning rate upper bound decay parameter
* \param edecay Learning rate decay parameter. Ideally you shouldn't use this with cyclical learning rate since decay is already handled by \f$\gamma\f$
*/
explicit CyclicalSGDTrainer(ParameterCollection& m, float learning_rate_min = 0.01, float learning_rate_max = 0.1, float step_size = 2000, float gamma = 0.0, float edecay = 0.0) : Trainer(m, learning_rate_min), e_min(learning_rate_min), e_max(learning_rate_max), step_size(step_size), gamma(gamma), it(0) {}
explicit CyclicalSGDTrainer(ParameterCollection& m, float learning_rate_min = 0.01, float learning_rate_max = 0.1, float step_size = 2000, float gamma = 1.0, float edecay = 0.0) : Trainer(m, learning_rate_min), e_min(learning_rate_min), e_max(learning_rate_max), step_size(step_size), gamma(gamma), it(0) {}
void restart() override {};
using Trainer::restart;
void update() override {
Expand Down
4 changes: 2 additions & 2 deletions python/_dynet.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -6061,10 +6061,10 @@ cdef class CyclicalSGDTrainer(Trainer):
learning_rate_min (number): Lower learning rate (default: {0.01})
learning_rate_max (number): Upper learning rate (default: {0.1})
step_size (number): Period of the triangular function in number of iterations (__not__ epochs). According to the original paper, this should be set around (2-8) x (training iterations in epoch) (default: {2000})
gamma (number): Learning rate upper bound decay parameter (default: {0.0})
gamma (number): Learning rate upper bound decay parameter (1.0 = no decay) (default: {1.0})
"""
cdef CCyclicalSGDTrainer *thischildptr
def __cinit__(self, ParameterCollection m, float learning_rate_min = 0.01, float learning_rate_max = 0.1, float step_size = 2000, float gamma = 0.0):
def __cinit__(self, ParameterCollection m, float learning_rate_min = 0.01, float learning_rate_max = 0.1, float step_size = 2000, float gamma = 1.0):
self.thischildptr = self.thisptr = new CCyclicalSGDTrainer(m.thisptr, learning_rate_min, learning_rate_max, step_size, gamma)
cpdef update(self):
self.thischildptr.update()
Expand Down

0 comments on commit f129862

Please sign in to comment.