Skip to content

Commit

Permalink
Fix LR schedulers when optimizers with frequencies
Browse files Browse the repository at this point in the history
  • Loading branch information
milesial committed Jan 9, 2023
1 parent e2d309a commit 2539dba
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion src/pytorch_lightning/loops/epoch/training_epoch_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,7 +390,7 @@ def update_lr_schedulers(self, interval: str, update_plateau_schedulers: bool) -
if interval == "step" and self._should_accumulate():
return
active_optimizers = _get_active_optimizers(
self.trainer.optimizers, self.trainer.optimizer_frequencies, self.total_batch_idx
self.trainer.optimizers, self.trainer.optimizer_frequencies, self.batch_idx
)
self._update_learning_rates(
interval=interval,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ def configure_optimizers(self):
(dict(step_size=5), dict(T_max=2)),
("epoch", "epoch"),
(5, 10),
(2, 3),
(4, 1),
3,
),
],
Expand Down

0 comments on commit 2539dba

Please sign in to comment.