Skip to content

Commit

Permalink
update examples from master
Browse files Browse the repository at this point in the history
  • Loading branch information
thomwolf committed Nov 17, 2018
2 parents c8cba67 + 02173a1 commit 47a7d4e
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 6 deletions.
2 changes: 1 addition & 1 deletion README.md
Expand Up @@ -395,7 +395,7 @@ python run_squad.py \
--num_train_epochs 2.0 \
--max_seq_length 384 \
--doc_stride 128 \
--output_dir ../debug_squad/
--output_dir /tmp/debug_squad/
```

Training with the previous hyper-parameters gave us the following results:
Expand Down
13 changes: 8 additions & 5 deletions examples/run_classifier.py
Expand Up @@ -327,11 +327,14 @@ def set_optimizer_params_grad(named_params_optimizer, named_params_model, test_n
if name_opti != name_model:
logger.error("name_opti != name_model: {} {}".format(name_opti, name_model))
raise ValueError
if test_nan and torch.isnan(param_model.grad).sum() > 0:
is_nan = True
if param_opti.grad is None:
param_opti.grad = torch.nn.Parameter(param_opti.data.new().resize_(*param_opti.data.size()))
param_opti.grad.data.copy_(param_model.grad.data)
if param_model.grad is not None:
if test_nan and torch.isnan(param_model.grad).sum() > 0:
is_nan = True
if param_opti.grad is None:
param_opti.grad = torch.nn.Parameter(param_opti.data.new().resize_(*param_opti.data.size()))
param_opti.grad.data.copy_(param_model.grad.data)
else:
param_opti.grad = None
return is_nan

def main():
Expand Down
2 changes: 2 additions & 0 deletions examples/run_squad.py
Expand Up @@ -693,6 +693,8 @@ def set_optimizer_params_grad(named_params_optimizer, named_params_model, test_n
if param_opti.grad is None:
param_opti.grad = torch.nn.Parameter(param_opti.data.new().resize_(*param_opti.data.size()))
param_opti.grad.data.copy_(param_model.grad.data)
else:
param_opti.grad = None
return is_nan

def main():
Expand Down

0 comments on commit 47a7d4e

Please sign in to comment.