Skip to content

Commit c275e1f

Browse files
swaps lr sched order (Lightning-AI#2356)
* swaps lr sched order * Update optimizers.py * added amdim encoder choice
1 parent b6ab7ca commit c275e1f

File tree

1 file changed

+13
-3
lines changed

1 file changed

+13
-3
lines changed

pytorch_lightning/trainer/optimizers.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -111,15 +111,25 @@ def configure_schedulers(self, schedulers: list):
111111
def reinit_scheduler_properties(self, optimizers: list, schedulers: list):
112112
# Reinitialize optimizer.step properties added by schedulers
113113
for scheduler in schedulers:
114+
scheduler = scheduler['scheduler']
115+
114116
for optimizer in optimizers:
115-
scheduler = scheduler['scheduler']
116117
# check that we dont mix users optimizers and schedulers
117118
if scheduler.optimizer == optimizer:
118119
# Find the mro belonging to the base lr scheduler class
119120
for i, mro in enumerate(scheduler.__class__.__mro__):
120-
if mro == optim.lr_scheduler._LRScheduler:
121+
if (
122+
mro == optim.lr_scheduler._LRScheduler
123+
or mro == optim.lr_scheduler.ReduceLROnPlateau
124+
):
121125
idx = i
122-
scheduler.__class__.__mro__[idx].__init__(scheduler, optimizer)
126+
state = scheduler.state_dict()
127+
else:
128+
state = None
129+
130+
scheduler.__class__.__mro__[idx].__init__(scheduler, optimizer)
131+
if state is not None:
132+
scheduler.load_state_dict(state)
123133

124134

125135
class _MockOptimizer(Optimizer):

0 commit comments

Comments
 (0)