Skip to content

Commit

Permalink
not affect loss_dict values for logging
Browse files Browse the repository at this point in the history
  • Loading branch information
Coobiw committed Oct 28, 2023
1 parent d485c10 commit 4ad76d1
Showing 1 changed file with 2 additions and 3 deletions.
5 changes: 2 additions & 3 deletions lavis/tasks/base_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def train_step(self, model, samples):
loss_dict = {}
for k,v in output.items():
if "loss" in k:
loss_dict[k] = v
loss_dict[k] = v.detach().clone() # not affect loss_dict values for logging
return output["loss"], loss_dict

def valid_step(self, model, samples):
Expand Down Expand Up @@ -223,8 +223,7 @@ def _train_inner_loop(

with torch.cuda.amp.autocast(enabled=use_amp):
loss, loss_dict = self.train_step(model=model, samples=samples)
loss /= accum_grad_iters #TODO: not affect loss_dict values for logging

loss /= accum_grad_iters # not affect loss_dict values for logging
# if not (torch.isnan(loss) or torch.isinf(loss)):
# print(f"Valid loss in process {torch.distributed.get_rank()}: {loss.item()}")
# else:
Expand Down

0 comments on commit 4ad76d1

Please sign in to comment.