Skip to content

Commit

Permalink
[Fix] fix loss smooth when loss name doesn't start with loss (open-…
Browse files Browse the repository at this point in the history
…mmlab#539)

* fix loss mean

* update
  • Loading branch information
liuyanyi authored and C1rN09 committed Nov 1, 2022
1 parent 1471aee commit ca3ced4
Showing 1 changed file with 2 additions and 3 deletions.
5 changes: 2 additions & 3 deletions mmengine/runner/log_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def get_log_after_iter(self, runner, batch_idx: int,
log_str = (f'Iter({mode}) '
f'[{cur_iter}/{runner.max_iters}] ')
else:
log_str = (f'Iter({mode}) [{batch_idx+1}'
log_str = (f'Iter({mode}) [{batch_idx + 1}'
f'/{len(current_loop.dataloader)}] ')
# Concatenate lr, momentum string with log header.
log_str += f'{lr_str} '
Expand Down Expand Up @@ -266,8 +266,7 @@ def _collect_scalars(self, custom_cfg: List[dict], runner,
mode_history_scalars[key] = log_buffer
for key in mode_history_scalars:
# Update the latest learning rate and smoothed time logs.
if key.startswith('loss') or key in ('time', 'data_time',
'grad_norm'):
if 'loss' in key or key in ('time', 'data_time', 'grad_norm'):
tag[key] = mode_history_scalars[key].mean(self.window_size)
else:
# Default statistic method is current.
Expand Down

0 comments on commit ca3ced4

Please sign in to comment.