From 7952b797a582918e9bae729870a1a5355c206c7c Mon Sep 17 00:00:00 2001 From: Noam Date: Fri, 15 Jan 2021 18:32:38 +0200 Subject: [PATCH 01/22] started to write failing test. just getting into the framework... --- tests/trainer/test_lr_finder.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/tests/trainer/test_lr_finder.py b/tests/trainer/test_lr_finder.py index 401584d920c9b..3ebb6f84cd956 100755 --- a/tests/trainer/test_lr_finder.py +++ b/tests/trainer/test_lr_finder.py @@ -20,6 +20,7 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.base import EvalModelTemplate from tests.base.datamodules import TrialMNISTDataModule +from tests.base import BoringModel def test_error_on_more_than_1_optimizer(tmpdir): @@ -262,3 +263,30 @@ def test_suggestion_with_non_finite_values(tmpdir): assert before_lr == after_lr, \ 'Learning rate was altered because of non-finite loss values' + + +@pytest.mark.parametrize('use_hparams', [False, True]) +def test_lr_finder_fails_fast_on_bad_config(tmpdir, use_hparams): + """ Test that setting trainer arg to bool works """ + hparams = EvalModelTemplate.get_default_hparams() + model = EvalModelTemplate(**hparams) + before_lr = hparams.get('learning_rate') + if use_hparams: + del model.learning_rate + # model.configure_optimizers = model.configure_optimizers__lr_from_hparams + + # logger file to get meta + trainer = Trainer( + default_root_dir=tmpdir, + max_epochs=2, + auto_lr_find=True, + ) + + trainer.tune(model) + if use_hparams: + after_lr = model.hparams.learning_rate + else: + after_lr = model.learning_rate + + assert before_lr != after_lr, \ + 'Learning rate was not altered after running learning rate finder' \ No newline at end of file From 7e2fa622417c772dd875f2175a78c0c1164c4b15 Mon Sep 17 00:00:00 2001 From: Noam Date: Sun, 17 Jan 2021 11:20:39 +0200 Subject: [PATCH 02/22] started to write failing test. just getting into the framework... --- tests/trainer/test_lr_finder.py | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/tests/trainer/test_lr_finder.py b/tests/trainer/test_lr_finder.py index 3ebb6f84cd956..c5310391b9a30 100755 --- a/tests/trainer/test_lr_finder.py +++ b/tests/trainer/test_lr_finder.py @@ -265,15 +265,11 @@ def test_suggestion_with_non_finite_values(tmpdir): 'Learning rate was altered because of non-finite loss values' -@pytest.mark.parametrize('use_hparams', [False, True]) -def test_lr_finder_fails_fast_on_bad_config(tmpdir, use_hparams): +def test_lr_finder_fails_fast_on_bad_config(tmpdir): """ Test that setting trainer arg to bool works """ hparams = EvalModelTemplate.get_default_hparams() model = EvalModelTemplate(**hparams) before_lr = hparams.get('learning_rate') - if use_hparams: - del model.learning_rate - # model.configure_optimizers = model.configure_optimizers__lr_from_hparams # logger file to get meta trainer = Trainer( @@ -282,11 +278,21 @@ def test_lr_finder_fails_fast_on_bad_config(tmpdir, use_hparams): auto_lr_find=True, ) + deleted_lr_existance = False + try: + del model.lr + deleted_lr_existance = True + except AttributeError: + pass + + try: + del model.learning_rate + deleted_lr_existance = True + except AttributeError: + pass + trainer.tune(model) - if use_hparams: - after_lr = model.hparams.learning_rate - else: - after_lr = model.learning_rate + after_lr = model.learning_rate assert before_lr != after_lr, \ - 'Learning rate was not altered after running learning rate finder' \ No newline at end of file + 'Learning rate was not altered after running learning rate finder' From edc9ed9e821266cf5f8cd49761311d2aa732acbc Mon Sep 17 00:00:00 2001 From: Noam Date: Sun, 24 Jan 2021 01:10:35 +0200 Subject: [PATCH 03/22] added failing test for misconfiguration of lr finder --- tests/trainer/test_lr_finder.py | 27 ++++++--------------------- 1 file changed, 6 insertions(+), 21 deletions(-) diff --git a/tests/trainer/test_lr_finder.py b/tests/trainer/test_lr_finder.py index c5310391b9a30..bfb9ffcfbdfea 100755 --- a/tests/trainer/test_lr_finder.py +++ b/tests/trainer/test_lr_finder.py @@ -265,34 +265,19 @@ def test_suggestion_with_non_finite_values(tmpdir): 'Learning rate was altered because of non-finite loss values' +@pytest.mark.timeout(1) def test_lr_finder_fails_fast_on_bad_config(tmpdir): - """ Test that setting trainer arg to bool works """ - hparams = EvalModelTemplate.get_default_hparams() - model = EvalModelTemplate(**hparams) - before_lr = hparams.get('learning_rate') + """ Test that misconfiguration of learning_rate or lr in model fails BEFORE lr optimization and not after it. """ + import time + model = BoringModel() + dm = TrialMNISTDataModule(tmpdir) - # logger file to get meta trainer = Trainer( default_root_dir=tmpdir, max_epochs=2, auto_lr_find=True, ) - deleted_lr_existance = False - try: - del model.lr - deleted_lr_existance = True - except AttributeError: - pass - - try: - del model.learning_rate - deleted_lr_existance = True - except AttributeError: - pass - + lr_finder = trainer.tuner.lr_find(model, datamodule=dm) trainer.tune(model) - after_lr = model.learning_rate - assert before_lr != after_lr, \ - 'Learning rate was not altered after running learning rate finder' From d24b797767085aa437ec81098b7d076989f8c850 Mon Sep 17 00:00:00 2001 From: Noam Date: Sun, 24 Jan 2021 13:32:28 +0200 Subject: [PATCH 04/22] made test startup quickly. making sure without the fix it also fails slowly --- pytorch_lightning/tuner/lr_finder.py | 40 ++++++++++++++++------------ tests/trainer/test_dataloaders.py | 2 +- tests/trainer/test_lr_finder.py | 24 +++++++++++++---- 3 files changed, 43 insertions(+), 23 deletions(-) diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index 2982454d02f70..5a9a36cc0b60d 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -13,7 +13,7 @@ # limitations under the License. import importlib import os -from typing import List, Optional, Sequence, Union, Callable +from typing import List, Optional, Sequence, Union, Callable, Any from functools import wraps import numpy as np @@ -40,34 +40,40 @@ from tqdm import tqdm -def _run_lr_finder_internally(trainer, model: LightningModule): - """ Call lr finder internally during Trainer.fit() """ - lr_finder = lr_find(trainer, model) - - if lr_finder is None: - return - - lr = lr_finder.suggestion() - - # TODO: log lr.results to self.logger +def __choose_lr_assigner(trainer, model: LightningModule) -> Callable[[Any], None]: if isinstance(trainer.auto_lr_find, str): - # Try to find requested field, may be nested - if lightning_hasattr(model, trainer.auto_lr_find): - lightning_setattr(model, trainer.auto_lr_find, lr) - else: + if not lightning_hasattr(model, trainer.auto_lr_find): raise MisconfigurationException( f'`auto_lr_find` was set to {trainer.auto_lr_find}, however' ' could not find this as a field in `model` or `model.hparams`.') + else: + return lambda val: lightning_setattr(model, trainer.auto_lr_find, val) else: if lightning_hasattr(model, 'lr'): - lightning_setattr(model, 'lr', lr) + return lambda val: lightning_setattr(model, 'lr', val) elif lightning_hasattr(model, 'learning_rate'): - lightning_setattr(model, 'learning_rate', lr) + return lambda val: lightning_setattr(model, 'learning_rate', val) else: raise MisconfigurationException( 'When auto_lr_find is set to True, expects that `model` or' ' `model.hparams` either has field `lr` or `learning_rate`' ' that can overridden') + + +def _run_lr_finder_internally(trainer, model: LightningModule): + """ Call lr finder internally during Trainer.fit() """ + lr_assigner = __choose_lr_assigner(trainer, model) + + lr_finder = lr_find(trainer, model) + + if lr_finder is None: + return + + lr = lr_finder.suggestion() + + # TODO: log lr.results to self.logger + lr_assigner(lr) + log.info(f'Learning rate set to {lr}') diff --git a/tests/trainer/test_dataloaders.py b/tests/trainer/test_dataloaders.py index ef8a39c5d8abf..c95361d16193b 100644 --- a/tests/trainer/test_dataloaders.py +++ b/tests/trainer/test_dataloaders.py @@ -1148,7 +1148,6 @@ def test_replace_sampler_with_multiprocessing_context(tmpdir): train = DataLoader(train, batch_size=32, num_workers=2, multiprocessing_context=context, shuffle=True) class ExtendedBoringModel(BoringModel): - def train_dataloader(self): return train @@ -1158,5 +1157,6 @@ def train_dataloader(self): overfit_batches=5, ) + new_data_loader = trainer.replace_sampler(train, SequentialSampler(train.dataset)) assert (new_data_loader.multiprocessing_context == train.multiprocessing_context) diff --git a/tests/trainer/test_lr_finder.py b/tests/trainer/test_lr_finder.py index bfb9ffcfbdfea..6babf91c53651 100755 --- a/tests/trainer/test_lr_finder.py +++ b/tests/trainer/test_lr_finder.py @@ -18,9 +18,12 @@ from pytorch_lightning import Trainer from pytorch_lightning.utilities.exceptions import MisconfigurationException +from pytorch_lightning import seed_everything from tests.base import EvalModelTemplate from tests.base.datamodules import TrialMNISTDataModule -from tests.base import BoringModel +from tests.base import BoringModel, RandomDataset +from torch.utils.data.sampler import SequentialSampler +from torch.utils.data.dataloader import DataLoader def test_error_on_more_than_1_optimizer(tmpdir): @@ -269,15 +272,26 @@ def test_suggestion_with_non_finite_values(tmpdir): def test_lr_finder_fails_fast_on_bad_config(tmpdir): """ Test that misconfiguration of learning_rate or lr in model fails BEFORE lr optimization and not after it. """ import time - model = BoringModel() - dm = TrialMNISTDataModule(tmpdir) + train = RandomDataset(32, 64) + context = 'spawn' + train = DataLoader(train, batch_size=32, num_workers=1, multiprocessing_context=context, shuffle=True) + + class ExtendedBoringModel(BoringModel): + def train_dataloader(self): + return train + model = ExtendedBoringModel() trainer = Trainer( default_root_dir=tmpdir, max_epochs=2, auto_lr_find=True, + deterministic=True, + overfit_batches=1, ) + train_data_loader = trainer.replace_sampler(train, SequentialSampler(train.dataset)) - lr_finder = trainer.tuner.lr_find(model, datamodule=dm) - trainer.tune(model) + try: + trainer.tune(model, train_dataloader=train_data_loader) + except MisconfigurationException as ex: + print('fine') From b51a754d6f00421a8bd0c0f4e5c7e8e0098af817 Mon Sep 17 00:00:00 2001 From: Noam Date: Sun, 24 Jan 2021 16:38:42 +0200 Subject: [PATCH 05/22] improved test --- tests/trainer/test_lr_finder.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/tests/trainer/test_lr_finder.py b/tests/trainer/test_lr_finder.py index 6babf91c53651..3345eebcfa933 100755 --- a/tests/trainer/test_lr_finder.py +++ b/tests/trainer/test_lr_finder.py @@ -290,8 +290,5 @@ def train_dataloader(self): ) train_data_loader = trainer.replace_sampler(train, SequentialSampler(train.dataset)) - try: - trainer.tune(model, train_dataloader=train_data_loader) - except MisconfigurationException as ex: - print('fine') - + with pytest.raises(MisconfigurationException): + trainer.tune(model, train_dataloader=train_data_loader) \ No newline at end of file From c34042cee6b8875549e88e637bf155b1681ed33c Mon Sep 17 00:00:00 2001 From: Noam Date: Sun, 24 Jan 2021 16:58:32 +0200 Subject: [PATCH 06/22] fixed for linter --- pytorch_lightning/tuner/lr_finder.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index 5a9a36cc0b60d..f817fb88f19bf 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -51,13 +51,12 @@ def __choose_lr_assigner(trainer, model: LightningModule) -> Callable[[Any], Non else: if lightning_hasattr(model, 'lr'): return lambda val: lightning_setattr(model, 'lr', val) - elif lightning_hasattr(model, 'learning_rate'): + if lightning_hasattr(model, 'learning_rate'): return lambda val: lightning_setattr(model, 'learning_rate', val) - else: - raise MisconfigurationException( - 'When auto_lr_find is set to True, expects that `model` or' - ' `model.hparams` either has field `lr` or `learning_rate`' - ' that can overridden') + raise MisconfigurationException( + 'When auto_lr_find is set to True, expects that `model` or' + ' `model.hparams` either has field `lr` or `learning_rate`' + ' that can overridden') def _run_lr_finder_internally(trainer, model: LightningModule): From e7a6c369e881221044cccd59437a50bd615fb88d Mon Sep 17 00:00:00 2001 From: Noam Date: Sun, 24 Jan 2021 17:03:32 +0200 Subject: [PATCH 07/22] fixed for linter --- pytorch_lightning/tuner/lr_finder.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index f817fb88f19bf..625594618d3db 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -46,8 +46,7 @@ def __choose_lr_assigner(trainer, model: LightningModule) -> Callable[[Any], Non raise MisconfigurationException( f'`auto_lr_find` was set to {trainer.auto_lr_find}, however' ' could not find this as a field in `model` or `model.hparams`.') - else: - return lambda val: lightning_setattr(model, trainer.auto_lr_find, val) + return lambda val: lightning_setattr(model, trainer.auto_lr_find, val) else: if lightning_hasattr(model, 'lr'): return lambda val: lightning_setattr(model, 'lr', val) From 874178cd10da63f7d62a77290c8284f3100f8315 Mon Sep 17 00:00:00 2001 From: Noam Date: Sun, 24 Jan 2021 17:07:42 +0200 Subject: [PATCH 08/22] yet another fix for the linter --- pytorch_lightning/tuner/lr_finder.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index 625594618d3db..e3dbada744bd9 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -47,15 +47,15 @@ def __choose_lr_assigner(trainer, model: LightningModule) -> Callable[[Any], Non f'`auto_lr_find` was set to {trainer.auto_lr_find}, however' ' could not find this as a field in `model` or `model.hparams`.') return lambda val: lightning_setattr(model, trainer.auto_lr_find, val) - else: - if lightning_hasattr(model, 'lr'): - return lambda val: lightning_setattr(model, 'lr', val) - if lightning_hasattr(model, 'learning_rate'): - return lambda val: lightning_setattr(model, 'learning_rate', val) - raise MisconfigurationException( - 'When auto_lr_find is set to True, expects that `model` or' - ' `model.hparams` either has field `lr` or `learning_rate`' - ' that can overridden') + + if lightning_hasattr(model, 'lr'): + return lambda val: lightning_setattr(model, 'lr', val) + if lightning_hasattr(model, 'learning_rate'): + return lambda val: lightning_setattr(model, 'learning_rate', val) + raise MisconfigurationException( + 'When auto_lr_find is set to True, expects that `model` or' + ' `model.hparams` either has field `lr` or `learning_rate`' + ' that can overridden') def _run_lr_finder_internally(trainer, model: LightningModule): From 404bc9ce41823ee6960021e9d4b0d2e449ed14ee Mon Sep 17 00:00:00 2001 From: Noam Date: Sun, 24 Jan 2021 17:11:18 +0200 Subject: [PATCH 09/22] yet another fix for the linter --- tests/trainer/test_dataloaders.py | 1 - tests/trainer/test_lr_finder.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/trainer/test_dataloaders.py b/tests/trainer/test_dataloaders.py index c95361d16193b..e7dde65fecedd 100644 --- a/tests/trainer/test_dataloaders.py +++ b/tests/trainer/test_dataloaders.py @@ -1157,6 +1157,5 @@ def train_dataloader(self): overfit_batches=5, ) - new_data_loader = trainer.replace_sampler(train, SequentialSampler(train.dataset)) assert (new_data_loader.multiprocessing_context == train.multiprocessing_context) diff --git a/tests/trainer/test_lr_finder.py b/tests/trainer/test_lr_finder.py index 3345eebcfa933..e88050040095b 100755 --- a/tests/trainer/test_lr_finder.py +++ b/tests/trainer/test_lr_finder.py @@ -291,4 +291,4 @@ def train_dataloader(self): train_data_loader = trainer.replace_sampler(train, SequentialSampler(train.dataset)) with pytest.raises(MisconfigurationException): - trainer.tune(model, train_dataloader=train_data_loader) \ No newline at end of file + trainer.tune(model, train_dataloader=train_data_loader) From c6dedc8be07a65296b56e527bd90ce83e111c3f5 Mon Sep 17 00:00:00 2001 From: Noam Date: Mon, 25 Jan 2021 03:04:45 +0200 Subject: [PATCH 10/22] fixed comment by @carmocca --- pytorch_lightning/tuner/lr_finder.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index e3dbada744bd9..9da6e61b8baa8 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -40,18 +40,18 @@ from tqdm import tqdm -def __choose_lr_assigner(trainer, model: LightningModule) -> Callable[[Any], None]: +def __choose_lr_assigner(trainer, model: LightningModule) -> str: if isinstance(trainer.auto_lr_find, str): if not lightning_hasattr(model, trainer.auto_lr_find): raise MisconfigurationException( f'`auto_lr_find` was set to {trainer.auto_lr_find}, however' ' could not find this as a field in `model` or `model.hparams`.') - return lambda val: lightning_setattr(model, trainer.auto_lr_find, val) + return trainer.auto_lr_find if lightning_hasattr(model, 'lr'): - return lambda val: lightning_setattr(model, 'lr', val) + return 'lr' if lightning_hasattr(model, 'learning_rate'): - return lambda val: lightning_setattr(model, 'learning_rate', val) + return 'learning_rate' raise MisconfigurationException( 'When auto_lr_find is set to True, expects that `model` or' ' `model.hparams` either has field `lr` or `learning_rate`' @@ -60,17 +60,15 @@ def __choose_lr_assigner(trainer, model: LightningModule) -> Callable[[Any], Non def _run_lr_finder_internally(trainer, model: LightningModule): """ Call lr finder internally during Trainer.fit() """ - lr_assigner = __choose_lr_assigner(trainer, model) - + lr_attr_name = __choose_lr_assigner(trainer, model) lr_finder = lr_find(trainer, model) - if lr_finder is None: return lr = lr_finder.suggestion() # TODO: log lr.results to self.logger - lr_assigner(lr) + lightning_setattr(model, lr_attr_name, lr) log.info(f'Learning rate set to {lr}') From 03756f2dffaba2992f3a264178a86ef6b84ec6ce Mon Sep 17 00:00:00 2001 From: Noam Date: Mon, 25 Jan 2021 03:09:45 +0200 Subject: [PATCH 11/22] fixed comment by @carmocca --- pytorch_lightning/tuner/lr_finder.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index 9da6e61b8baa8..1dac5b3253141 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -40,7 +40,7 @@ from tqdm import tqdm -def __choose_lr_assigner(trainer, model: LightningModule) -> str: +def __determine_lr_attr_name(trainer, model: LightningModule) -> str: if isinstance(trainer.auto_lr_find, str): if not lightning_hasattr(model, trainer.auto_lr_find): raise MisconfigurationException( @@ -60,7 +60,7 @@ def __choose_lr_assigner(trainer, model: LightningModule) -> str: def _run_lr_finder_internally(trainer, model: LightningModule): """ Call lr finder internally during Trainer.fit() """ - lr_attr_name = __choose_lr_assigner(trainer, model) + lr_attr_name = __determine_lr_attr_name(trainer, model) lr_finder = lr_find(trainer, model) if lr_finder is None: return From 8e6862dcd27e8afeca765b2179b7c8652a2c0a84 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Thu, 28 Jan 2021 20:50:50 +0100 Subject: [PATCH 12/22] Fix test --- tests/trainer/test_lr_finder.py | 35 ++++++++------------------------- 1 file changed, 8 insertions(+), 27 deletions(-) diff --git a/tests/trainer/test_lr_finder.py b/tests/trainer/test_lr_finder.py index e88050040095b..7f6016a46ca67 100755 --- a/tests/trainer/test_lr_finder.py +++ b/tests/trainer/test_lr_finder.py @@ -13,17 +13,15 @@ # limitations under the License. import os from copy import deepcopy + import pytest import torch from pytorch_lightning import Trainer from pytorch_lightning.utilities.exceptions import MisconfigurationException -from pytorch_lightning import seed_everything +from tests.base import BoringModel from tests.base import EvalModelTemplate from tests.base.datamodules import TrialMNISTDataModule -from tests.base import BoringModel, RandomDataset -from torch.utils.data.sampler import SequentialSampler -from torch.utils.data.dataloader import DataLoader def test_error_on_more_than_1_optimizer(tmpdir): @@ -268,27 +266,10 @@ def test_suggestion_with_non_finite_values(tmpdir): 'Learning rate was altered because of non-finite loss values' -@pytest.mark.timeout(1) def test_lr_finder_fails_fast_on_bad_config(tmpdir): - """ Test that misconfiguration of learning_rate or lr in model fails BEFORE lr optimization and not after it. """ - import time - train = RandomDataset(32, 64) - context = 'spawn' - train = DataLoader(train, batch_size=32, num_workers=1, multiprocessing_context=context, shuffle=True) - - class ExtendedBoringModel(BoringModel): - def train_dataloader(self): - return train - model = ExtendedBoringModel() - - trainer = Trainer( - default_root_dir=tmpdir, - max_epochs=2, - auto_lr_find=True, - deterministic=True, - overfit_batches=1, - ) - train_data_loader = trainer.replace_sampler(train, SequentialSampler(train.dataset)) - - with pytest.raises(MisconfigurationException): - trainer.tune(model, train_dataloader=train_data_loader) + """ Test that tune fails if the model does not have a lr BEFORE running lr find """ + # note: this did not raise an exception before #5648 because lr_find is skipped + # during fast_dev_run and the lr attribute check was done after lr_find + trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, auto_lr_find=True) + with pytest.raises(MisconfigurationException, match='either has field `lr`'): + trainer.tune(BoringModel()) From 78500a49ac4a5decde787b15964d09dfdda04a44 Mon Sep 17 00:00:00 2001 From: Jirka Borovec Date: Fri, 29 Jan 2021 10:33:37 +0100 Subject: [PATCH 13/22] chlog --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f27368191f1f1..062026a1ce999 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed a race condition in `ModelCheckpoint` when checking if a checkpoint file exists ([#5144](https://github.com/PyTorchLightning/pytorch-lightning/pull/5144)) +- Fixed auto learning rate ordering ([#5638](https://github.com/PyTorchLightning/pytorch-lightning/pull/5638)) + + ## [1.1.6] - 2021-01-26 ### Changed From 6e9382a82a0cecc857ae9d4a7d7c6bed6b5df039 Mon Sep 17 00:00:00 2001 From: Jirka Borovec Date: Fri, 29 Jan 2021 10:38:40 +0100 Subject: [PATCH 14/22] Apply suggestions from code review --- pytorch_lightning/tuner/lr_finder.py | 11 ++++++----- tests/trainer/test_lr_finder.py | 2 +- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index a5310fb242de5..c8142f88f6391 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -50,13 +50,14 @@ def __determine_lr_attr_name(trainer, model: LightningModule) -> str: ' could not find this as a field in `model` or `model.hparams`.') return trainer.auto_lr_find - if lightning_hasattr(model, 'lr'): - return 'lr' - if lightning_hasattr(model, 'learning_rate'): - return 'learning_rate' + attr_options = ('lr', 'learning_rate') + for attr in attr_options: + if lightning_hasattr(model, attr): + return attr + raise MisconfigurationException( 'When auto_lr_find is set to True, expects that `model` or' - ' `model.hparams` either has field `lr` or `learning_rate`' + f' `model.hparams` either has one of these fields {attr_options}' ' that can overridden') diff --git a/tests/trainer/test_lr_finder.py b/tests/trainer/test_lr_finder.py index 7f6016a46ca67..37706a81234f0 100755 --- a/tests/trainer/test_lr_finder.py +++ b/tests/trainer/test_lr_finder.py @@ -271,5 +271,5 @@ def test_lr_finder_fails_fast_on_bad_config(tmpdir): # note: this did not raise an exception before #5648 because lr_find is skipped # during fast_dev_run and the lr attribute check was done after lr_find trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, auto_lr_find=True) - with pytest.raises(MisconfigurationException, match='either has field `lr`'): + with pytest.raises(MisconfigurationException, match='either has one of these fields'): trainer.tune(BoringModel()) From fa7255eccdb39919733f34e534af7d7015757b30 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Fri, 29 Jan 2021 14:12:58 +0100 Subject: [PATCH 15/22] Fix test --- tests/trainer/flags/test_fast_dev_run.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/trainer/flags/test_fast_dev_run.py b/tests/trainer/flags/test_fast_dev_run.py index 2eaa6fd7f888d..e94c607866ecf 100644 --- a/tests/trainer/flags/test_fast_dev_run.py +++ b/tests/trainer/flags/test_fast_dev_run.py @@ -15,6 +15,7 @@ def test_skip_on_fast_dev_run_tuner(tmpdir, tuner_alg): """ Test that tuner algorithms are skipped if fast dev run is enabled """ model = BoringModel() + model.lr = 0.1 # avoid no-lr-found exception trainer = Trainer( default_root_dir=tmpdir, max_epochs=2, From 15ec3cd65e1271ad66f809c3296d3b1ce7d30ff8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Mon, 1 Feb 2021 01:55:20 +0100 Subject: [PATCH 16/22] Update pytorch_lightning/tuner/lr_finder.py Co-authored-by: Rohit Gupta --- pytorch_lightning/tuner/lr_finder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index c8142f88f6391..6c390fc2a271a 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -42,7 +42,7 @@ log = logging.getLogger(__name__) -def __determine_lr_attr_name(trainer, model: LightningModule) -> str: +def _determine_lr_attr_name(trainer, model: LightningModule) -> str: if isinstance(trainer.auto_lr_find, str): if not lightning_hasattr(model, trainer.auto_lr_find): raise MisconfigurationException( From 1008706090ca1423888c5bb8a268fb464f9db984 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Mon, 1 Feb 2021 01:55:28 +0100 Subject: [PATCH 17/22] Update pytorch_lightning/tuner/lr_finder.py Co-authored-by: Rohit Gupta --- pytorch_lightning/tuner/lr_finder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index 6c390fc2a271a..173a235f481ef 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -63,7 +63,7 @@ def _determine_lr_attr_name(trainer, model: LightningModule) -> str: def _run_lr_finder_internally(trainer, model: LightningModule): """ Call lr finder internally during Trainer.fit() """ - lr_attr_name = __determine_lr_attr_name(trainer, model) + lr_attr_name = _determine_lr_attr_name(trainer, model) lr_finder = lr_find(trainer, model) if lr_finder is None: return From 030d6e83773cc0e58714fe4c2630252b6cfb21d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Mon, 1 Feb 2021 01:56:06 +0100 Subject: [PATCH 18/22] Update tests/trainer/test_lr_finder.py --- tests/trainer/test_lr_finder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/trainer/test_lr_finder.py b/tests/trainer/test_lr_finder.py index 37706a81234f0..c53ff5df7f4f8 100755 --- a/tests/trainer/test_lr_finder.py +++ b/tests/trainer/test_lr_finder.py @@ -268,7 +268,7 @@ def test_suggestion_with_non_finite_values(tmpdir): def test_lr_finder_fails_fast_on_bad_config(tmpdir): """ Test that tune fails if the model does not have a lr BEFORE running lr find """ - # note: this did not raise an exception before #5648 because lr_find is skipped + # note: this did not raise an exception before #5638 because lr_find is skipped # during fast_dev_run and the lr attribute check was done after lr_find trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, auto_lr_find=True) with pytest.raises(MisconfigurationException, match='either has one of these fields'): From 70359a335448dd9175b6676d7ed44d2fcbf62b2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Mon, 1 Feb 2021 01:57:30 +0100 Subject: [PATCH 19/22] Update pytorch_lightning/tuner/lr_finder.py Co-authored-by: Rohit Gupta --- pytorch_lightning/tuner/lr_finder.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index 173a235f481ef..d2975b189e845 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -47,7 +47,8 @@ def _determine_lr_attr_name(trainer, model: LightningModule) -> str: if not lightning_hasattr(model, trainer.auto_lr_find): raise MisconfigurationException( f'`auto_lr_find` was set to {trainer.auto_lr_find}, however' - ' could not find this as a field in `model` or `model.hparams`.') + ' could not find this as a field in `model` or `model.hparams`.' + ) return trainer.auto_lr_find attr_options = ('lr', 'learning_rate') From 99f09f75feb28988cc3abeee4dda6416961284fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Mon, 1 Feb 2021 01:58:00 +0100 Subject: [PATCH 20/22] Update pytorch_lightning/tuner/lr_finder.py Co-authored-by: Rohit Gupta --- pytorch_lightning/tuner/lr_finder.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index d2975b189e845..058cf26989814 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -57,9 +57,9 @@ def _determine_lr_attr_name(trainer, model: LightningModule) -> str: return attr raise MisconfigurationException( - 'When auto_lr_find is set to True, expects that `model` or' - f' `model.hparams` either has one of these fields {attr_options}' - ' that can overridden') + 'When `auto_lr_find=True`, either `model` or `model.hparams` should' + f' have one of these fields: {attr_options}', that can be overridden.' + ) def _run_lr_finder_internally(trainer, model: LightningModule): From 035b72f7739ebd35c9d0302aae2728166d270251 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Mon, 1 Feb 2021 12:32:05 +0100 Subject: [PATCH 21/22] Update pytorch_lightning/tuner/lr_finder.py --- pytorch_lightning/tuner/lr_finder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index 058cf26989814..cf0adc5a83653 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -58,7 +58,7 @@ def _determine_lr_attr_name(trainer, model: LightningModule) -> str: raise MisconfigurationException( 'When `auto_lr_find=True`, either `model` or `model.hparams` should' - f' have one of these fields: {attr_options}', that can be overridden.' + f' have one of these fields: {attr_options} overridden.' ) From 71b36d226e7aa6ccde8f1693abcbeb253ab4f4cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Mon, 1 Feb 2021 12:52:14 +0100 Subject: [PATCH 22/22] Update tests/trainer/test_lr_finder.py --- tests/trainer/test_lr_finder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/trainer/test_lr_finder.py b/tests/trainer/test_lr_finder.py index c53ff5df7f4f8..f39bd17090901 100755 --- a/tests/trainer/test_lr_finder.py +++ b/tests/trainer/test_lr_finder.py @@ -271,5 +271,5 @@ def test_lr_finder_fails_fast_on_bad_config(tmpdir): # note: this did not raise an exception before #5638 because lr_find is skipped # during fast_dev_run and the lr attribute check was done after lr_find trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, auto_lr_find=True) - with pytest.raises(MisconfigurationException, match='either has one of these fields'): + with pytest.raises(MisconfigurationException, match='should have one of these fields'): trainer.tune(BoringModel())