Skip to content

Commit

Permalink
[skip ci] Add doctest for LR Schedulers (#2384)
Browse files Browse the repository at this point in the history
* [skip ci] doctest for CosineAnnealingScheduler

* doctest for ConcatScheduler

Co-authored-by: Desroziers <sylvain.desroziers@michelin.com>
  • Loading branch information
sdesrozis and Desroziers authored Dec 24, 2021
1 parent f8c2094 commit 075da54
Showing 1 changed file with 85 additions and 37 deletions.
122 changes: 85 additions & 37 deletions ignite/handlers/param_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,8 +365,6 @@ class LinearCyclicalScheduler(CyclicalScheduler):
.. testcode:: 1
from ignite.handlers.param_scheduler import LinearCyclicalScheduler
# Linearly increases the learning rate from 0.0 to 1.0 and back to 0.0
# over a cycle of 4 iterations
scheduler = LinearCyclicalScheduler(default_optimizer, "lr", 0.0, 1.0, 4)
Expand All @@ -389,8 +387,6 @@ def print_lr():
.. testcode:: 2
from ignite.handlers.param_scheduler import LinearCyclicalScheduler
optimizer = torch.optim.SGD(
[
{"params": default_model.base.parameters(), "lr": 0.001},
Expand All @@ -400,19 +396,19 @@ def print_lr():
# Linearly increases the learning rate from 0.0 to 1.0 and back to 0.0
# over a cycle of 4 iterations
scheduler1 = LinearCyclicalScheduler(optimizer, "lr", 0.0, 1.0, 4, param_group_index=0)
scheduler1 = LinearCyclicalScheduler(optimizer, "lr (base)", 0.0, 1.0, 4, param_group_index=0)
# Linearly increases the learning rate from 1.0 to 0.0 and back to 0.1
# Linearly increases the learning rate from 0.0 to 0.1 and back to 0.0
# over a cycle of 4 iterations
scheduler2 = LinearCyclicalScheduler(optimizer, "lr", 0.0, 0.1, 4, param_group_index=1)
scheduler2 = LinearCyclicalScheduler(optimizer, "lr (fc)", 0.0, 0.1, 4, param_group_index=1)
default_trainer.add_event_handler(Events.ITERATION_STARTED, scheduler1)
default_trainer.add_event_handler(Events.ITERATION_STARTED, scheduler2)
@default_trainer.on(Events.ITERATION_COMPLETED)
def print_lr():
print(optimizer.param_groups[0]["lr"],
optimizer.param_groups[1]["lr"])
print(optimizer.param_groups[0]["lr (base)"],
optimizer.param_groups[1]["lr (fc)"])
default_trainer.run([0] * 9, max_epochs=1)
Expand Down Expand Up @@ -460,33 +456,67 @@ class CosineAnnealingScheduler(CyclicalScheduler):
usually be the number of batches in an epoch.
Examples:
.. code-block:: python
from ignite.handlers.param_scheduler import CosineAnnealingScheduler
.. testsetup:: *
scheduler = CosineAnnealingScheduler(optimizer, 'lr', 1e-1, 1e-3, len(train_loader))
trainer.add_event_handler(Events.ITERATION_STARTED, scheduler)
#
# Anneals the learning rate from 1e-1 to 1e-3 over the course of 1 epoch.
#
default_trainer = get_default_trainer()
.. code-block:: python
.. testcode:: 1
# CosineAnnealing increases the learning rate from 0.0 to 1.0
# over a cycle of 4 iterations
scheduler = CosineAnnealingScheduler(default_optimizer, "lr", 0.0, 1.0, 4)
default_trainer.add_event_handler(Events.ITERATION_STARTED, scheduler)
@default_trainer.on(Events.ITERATION_COMPLETED)
def print_lr():
print(default_optimizer.param_groups[0]["lr"])
default_trainer.run([0] * 9, max_epochs=1)
from ignite.handlers.param_scheduler import CosineAnnealingScheduler
from ignite.handlers.param_scheduler import LinearCyclicalScheduler
.. testoutput:: 1
optimizer = SGD(
0.0
0.1464...
0.4999...
0.8535...
...
.. testcode:: 2
optimizer = torch.optim.SGD(
[
{"params": model.base.parameters(), 'lr': 0.001},
{"params": model.fc.parameters(), 'lr': 0.01},
{"params": default_model.base.parameters(), "lr": 0.001},
{"params": default_model.fc.parameters(), "lr": 0.01},
]
)
scheduler1 = LinearCyclicalScheduler(optimizer, 'lr', 1e-7, 1e-5, len(train_loader), param_group_index=0)
trainer.add_event_handler(Events.ITERATION_STARTED, scheduler1, "lr (base)")
# CosineAnnealing increases the learning rate from 0.0 to 1.0
# over a cycle of 4 iterations
scheduler1 = CosineAnnealingScheduler(optimizer, "lr (base)", 0.0, 1.0, 4, param_group_index=0)
scheduler2 = CosineAnnealingScheduler(optimizer, 'lr', 1e-5, 1e-3, len(train_loader), param_group_index=1)
trainer.add_event_handler(Events.ITERATION_STARTED, scheduler2, "lr (fc)")
# CosineAnnealing increases the learning rate from 0.0 to 0.1
# over a cycle of 4 iterations
scheduler2 = CosineAnnealingScheduler(optimizer, "lr (fc)", 0.0, 0.1, 4, param_group_index=1)
default_trainer.add_event_handler(Events.ITERATION_STARTED, scheduler1)
default_trainer.add_event_handler(Events.ITERATION_STARTED, scheduler2)
@default_trainer.on(Events.ITERATION_COMPLETED)
def print_lr():
print(optimizer.param_groups[0]["lr (base)"],
optimizer.param_groups[1]["lr (fc)"])
default_trainer.run([0] * 9, max_epochs=1)
.. testoutput:: 2
0.0 0.0
0.1464... 0.01464...
0.4999... 0.04999...
0.8535... 0.08535...
...
.. [Smith17] Smith, Leslie N. "Cyclical learning rates for training neural networks."
Applications of Computer Vision (WACV), 2017 IEEE Winter Conference on. IEEE, 2017
Expand All @@ -513,21 +543,39 @@ class ConcatScheduler(ParamScheduler):
`engine.state.param_history`, (default=False).
Examples:
.. code-block:: python
from ignite.handlers.param_scheduler import ConcatScheduler
from ignite.handlers.param_scheduler import LinearCyclicalScheduler
from ignite.handlers.param_scheduler import CosineAnnealingScheduler
.. testsetup::
scheduler_1 = LinearCyclicalScheduler(optimizer, "lr", start_value=0.1, end_value=0.5, cycle_size=60)
scheduler_2 = CosineAnnealingScheduler(optimizer, "lr", start_value=0.5, end_value=0.01, cycle_size=60)
default_trainer = get_default_trainer()
.. testcode::
combined_scheduler = ConcatScheduler(schedulers=[scheduler_1, scheduler_2], durations=[30, ])
trainer.add_event_handler(Events.ITERATION_STARTED, combined_scheduler)
#
# Sets the Learning rate linearly from 0.1 to 0.5 over 30 iterations. Then
# starts an annealing schedule from 0.5 to 0.01 over 60 iterations.
scheduler_1 = LinearCyclicalScheduler(default_optimizer, "lr", 0.0, 1.0, 8)
scheduler_2 = CosineAnnealingScheduler(default_optimizer, "lr", 1.0, 0.2, 4)
# Sets the Learning rate linearly from 0.0 to 1.0 over 4 iterations. Then
# starts an annealing schedule from 1.0 to 0.2 over the next 4 iterations.
# The annealing cycles are repeated indefinitely.
combined_scheduler = ConcatScheduler(schedulers=[scheduler_1, scheduler_2], durations=[4, ])
default_trainer.add_event_handler(Events.ITERATION_STARTED, combined_scheduler)
@default_trainer.on(Events.ITERATION_COMPLETED)
def print_lr():
print(default_optimizer.param_groups[0]["lr"])
default_trainer.run([0] * 8, max_epochs=1)
.. testoutput::
0.0
0.25
0.5
0.75
1.0
0.8828...
0.6000...
0.3171...
.. versionadded:: 0.4.5
"""
Expand Down

0 comments on commit 075da54

Please sign in to comment.