Skip to content

Commit

Permalink
default sched (#6062)
Browse files Browse the repository at this point in the history
  • Loading branch information
rohitgr7 committed Feb 18, 2021
1 parent 8f82823 commit 5d6a091
Showing 1 changed file with 3 additions and 10 deletions.
13 changes: 3 additions & 10 deletions pytorch_lightning/plugins/training_type/deepspeed.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from pytorch_lightning.overrides.base import _LightningModuleWrapperBase
from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment
from pytorch_lightning.plugins.training_type.ddp import DDPPlugin
from pytorch_lightning.trainer.optimizers import _get_default_scheduler_config
from pytorch_lightning.utilities import AMPType
from pytorch_lightning.utilities.apply_func import apply_to_collection
from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_only
Expand Down Expand Up @@ -240,16 +241,8 @@ def _initialize_deepspeed_inference(self, model):
)

def configure_scheduler(self, lr_scheduler):
# this duplicates the defaults from init_optimizers
scheduler = {
'scheduler': lr_scheduler,
'name': None, # no custom name
'interval': 'epoch', # after epoch is over
'frequency': 1, # every epoch/batch
'reduce_on_plateau': False, # most often not ReduceLROnPlateau scheduler
'monitor': None, # value to monitor for ReduceLROnPlateau
'strict': True, # enforce that the monitor exists for ReduceLROnPlateau
}
scheduler = _get_default_scheduler_config()
scheduler["scheduler"] = lr_scheduler
return [scheduler]

@property
Expand Down

0 comments on commit 5d6a091

Please sign in to comment.