Skip to content
This repository has been archived by the owner on Nov 22, 2022. It is now read-only.

Commit

Permalink
(Easy) Add eps to Adam
Browse files Browse the repository at this point in the history
Summary: The Adam optimizer has a parameter called epsilon which is useful for numeric stability. It's useful to explicitly set epsilon especially for unit-tests. This diff adds epsilon to Adam's config

Differential Revision: D16573250

fbshipit-source-id: c371dd209c0909e72c9b69e714dcaaf809fc5f15
  • Loading branch information
Kshitiz Malik authored and facebook-github-bot committed Jul 30, 2019
1 parent 5a07441 commit 0554bc6
Showing 1 changed file with 4 additions and 3 deletions.
7 changes: 4 additions & 3 deletions pytext/optimizer/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,14 @@ class Adam(torch.optim.Adam, Optimizer):
class Config(Optimizer.Config):
lr: float = 0.001
weight_decay: float = 0.00001
eps: float = 1e-8

def __init__(self, parameters, lr, weight_decay):
super().__init__(parameters, lr=lr, weight_decay=weight_decay)
def __init__(self, parameters, lr, weight_decay, eps):
super().__init__(parameters, lr=lr, weight_decay=weight_decay, eps=eps)

@classmethod
def from_config(cls, config: Config, model: torch.nn.Module):
return cls(model.parameters(), config.lr, config.weight_decay)
return cls(model.parameters(), config.lr, config.weight_decay, config.eps)


class SGD(torch.optim.SGD, Optimizer):
Expand Down

0 comments on commit 0554bc6

Please sign in to comment.