From 0554bc6d80f46cdde931ad761ab6865fdee4ab93 Mon Sep 17 00:00:00 2001 From: Kshitiz Malik Date: Tue, 30 Jul 2019 16:34:50 -0700 Subject: [PATCH] (Easy) Add eps to Adam Summary: The Adam optimizer has a parameter called epsilon which is useful for numeric stability. It's useful to explicitly set epsilon especially for unit-tests. This diff adds epsilon to Adam's config Differential Revision: D16573250 fbshipit-source-id: c371dd209c0909e72c9b69e714dcaaf809fc5f15 --- pytext/optimizer/optimizers.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pytext/optimizer/optimizers.py b/pytext/optimizer/optimizers.py index d1ed3a3c4..58682eeb6 100644 --- a/pytext/optimizer/optimizers.py +++ b/pytext/optimizer/optimizers.py @@ -34,13 +34,14 @@ class Adam(torch.optim.Adam, Optimizer): class Config(Optimizer.Config): lr: float = 0.001 weight_decay: float = 0.00001 + eps: float = 1e-8 - def __init__(self, parameters, lr, weight_decay): - super().__init__(parameters, lr=lr, weight_decay=weight_decay) + def __init__(self, parameters, lr, weight_decay, eps): + super().__init__(parameters, lr=lr, weight_decay=weight_decay, eps=eps) @classmethod def from_config(cls, config: Config, model: torch.nn.Module): - return cls(model.parameters(), config.lr, config.weight_decay) + return cls(model.parameters(), config.lr, config.weight_decay, config.eps) class SGD(torch.optim.SGD, Optimizer):