Skip to content

Commit

Permalink
swaps lr sched order (#2356)
Browse files Browse the repository at this point in the history
* swaps lr sched order

* Update optimizers.py

* added amdim encoder choice
  • Loading branch information
williamFalcon committed Jun 25, 2020
1 parent b6ab7ca commit c275e1f
Showing 1 changed file with 13 additions and 3 deletions.
16 changes: 13 additions & 3 deletions pytorch_lightning/trainer/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,15 +111,25 @@ def configure_schedulers(self, schedulers: list):
def reinit_scheduler_properties(self, optimizers: list, schedulers: list):
# Reinitialize optimizer.step properties added by schedulers
for scheduler in schedulers:
scheduler = scheduler['scheduler']

for optimizer in optimizers:
scheduler = scheduler['scheduler']
# check that we dont mix users optimizers and schedulers
if scheduler.optimizer == optimizer:
# Find the mro belonging to the base lr scheduler class
for i, mro in enumerate(scheduler.__class__.__mro__):
if mro == optim.lr_scheduler._LRScheduler:
if (
mro == optim.lr_scheduler._LRScheduler
or mro == optim.lr_scheduler.ReduceLROnPlateau
):
idx = i
scheduler.__class__.__mro__[idx].__init__(scheduler, optimizer)
state = scheduler.state_dict()
else:
state = None

scheduler.__class__.__mro__[idx].__init__(scheduler, optimizer)
if state is not None:
scheduler.load_state_dict(state)


class _MockOptimizer(Optimizer):
Expand Down

0 comments on commit c275e1f

Please sign in to comment.