Skip to content

Commit

Permalink
Fixed docs issue in ReduceLROnPlateauScheduler (#2976)
Browse files Browse the repository at this point in the history
* Update param_scheduler.py

* Update param_scheduler.py

* Update param_scheduler.py
  • Loading branch information
vfdev-5 authored Jun 29, 2023
1 parent b41c627 commit eba5aae
Showing 1 changed file with 7 additions and 8 deletions.
15 changes: 7 additions & 8 deletions ignite/handlers/param_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -1508,20 +1508,19 @@ class ReduceLROnPlateauScheduler(ParamScheduler):
Default: False.
param_group_index: `optimizer`'s parameters group
to use. Default: None. Use all `optimizer`'s paramater groups.
**scheduler_kwargs: Keyword arguments to be passed to the wrapped
`ReduceLROnPlateau`.
scheduler_kwargs: Keyword arguments to be passed to the wrapped ``ReduceLROnPlateau``.
Examples:
.. code-block python
.. code-block:: python
# Metric 'metric-name' should surpass its best value by
# Metric "accuracy" should increase the best value by
# more than 1 unit after at most 2 epochs, otherwise LR
# would get multiplied by 0.5 .
scheduler = ReduceLROnPlateauScheduler(
default_optimizer,
metric_name="metric-name", mode="max",
metric_name="accuracy", mode="max",
factor=0.5, patience=1, threshold_mode='abs',
threshold=1, trainer=trainer
)
Expand All @@ -1538,10 +1537,10 @@ class ReduceLROnPlateauScheduler(ParamScheduler):
default_trainer = get_default_trainer()
# Metric `loss` should decrease more than
# a tenth of best loss after at most
# Metric "loss" should decrease more than
# 0.1 of best loss after at most
# three iterations. Then best loss would get
# updated, otherwise lr is multiplied by 2
# updated, otherwise lr is multiplied by 0.5
scheduler = ReduceLROnPlateauScheduler(
default_optimizer, "loss",
Expand Down

0 comments on commit eba5aae

Please sign in to comment.