From 3202896ec9d87089d21707a069bca63976713d98 Mon Sep 17 00:00:00 2001 From: Anton Lozhkov Date: Wed, 6 Oct 2021 19:40:51 +0300 Subject: [PATCH] Fix nan-loss condition (#13911) --- src/transformers/trainer.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py index 68788091f39f8..ff8201b50003c 100755 --- a/src/transformers/trainer.py +++ b/src/transformers/trainer.py @@ -1315,10 +1315,13 @@ def train( else: tr_loss_step = self.training_step(model, inputs) - if args.logging_nan_inf_filter and not is_torch_tpu_available(): - if torch.isnan(tr_loss_step) or torch.isinf(tr_loss_step): - # if loss is nan or inf simply add the average of previous logged losses - tr_loss += tr_loss / (1 + self.state.global_step - self._globalstep_last_logged) + if ( + args.logging_nan_inf_filter + and not is_torch_tpu_available() + and (torch.isnan(tr_loss_step) or torch.isinf(tr_loss_step)) + ): + # if loss is nan or inf simply add the average of previous logged losses + tr_loss += tr_loss / (1 + self.state.global_step - self._globalstep_last_logged) else: tr_loss += tr_loss_step