Skip to content

Commit

Permalink
Slight improvements to logging.
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 359763463
  • Loading branch information
Lukasz Kaiser authored and Copybara-Service committed Feb 26, 2021
1 parent 78643ea commit 889768b
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 1 deletion.
2 changes: 1 addition & 1 deletion trax/rl/training.py
Expand Up @@ -259,7 +259,7 @@ def run(self, n_epochs=1, n_epochs_is_total_epochs=False):
for steps in self._eval_steps:
for eval_t in self._eval_temperatures:
sw.scalar(
'rl/avg_return_temperature%d_steps%d' % (eval_t, steps),
'rl/avg_return_temperature%.2f_steps%d' % (eval_t, steps),
self._avg_returns_temperatures[eval_t][steps][-1],
step=self._epoch)
sw.scalar('rl/n_interactions', self.task.n_interactions(),
Expand Down
3 changes: 3 additions & 0 deletions trax/supervised/training.py
Expand Up @@ -402,6 +402,9 @@ def run(self, n_steps=1):
(optimizer_metrics, loss))

loss_acc += loss
# Log loss every 50 steps, every step in memory-efficient trainer.
if self._step % 50 == 0 or self._use_memory_efficient_trainer:
self._log_step('Loss: %.4f' % loss, stdout=False)
step_acc += 1
for metric_name, value in optimizer_metrics.items():
optimizer_metrics_acc[metric_name] += value
Expand Down

0 comments on commit 889768b

Please sign in to comment.