Skip to content

Commit

Permalink
Revert "Merge branch 'feature/resume-6-1' into feature/resume-6-2"
Browse files Browse the repository at this point in the history
This reverts commit d68cbb8.
  • Loading branch information
awaelchli committed Jun 10, 2021
1 parent d68cbb8 commit 0711eed
Show file tree
Hide file tree
Showing 25 changed files with 979 additions and 698 deletions.
9 changes: 0 additions & 9 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
* `trainer.{logged,progress_bar,callback}_metrics` are now updated on-demand ([#7882](https://github.com/PyTorchLightning/pytorch-lightning/pull/7882))
* Completely overhaul the `Result` object in favor of `ResultMetric` ([#7882](https://github.com/PyTorchLightning/pytorch-lightning/pull/7882))
* Improve epoch-level reduction time and overall memory usage ([#7882](https://github.com/PyTorchLightning/pytorch-lightning/pull/7882))
* Allow passing `self.log(batch_size=...)` ([#7891](https://github.com/PyTorchLightning/pytorch-lightning/pull/7891))
* Each of the training loops now keeps its own results collection ([#7891](https://github.com/PyTorchLightning/pytorch-lightning/pull/7891))


- Moved `ignore_scalar_return_in_dp` warning suppression to the DataParallelPlugin class ([#7421](https://github.com/PyTorchLightning/pytorch-lightning/pull/7421/))

Expand Down Expand Up @@ -158,18 +155,12 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
### Deprecated


- Deprecated `DataModule` properties: `has_prepared_data`, `has_setup_fit`, `has_setup_validate`, `has_setup_test`, `has_setup_predict`, `has_teardown_fit`, `has_teardown_validate`, `has_teardown_test`, `has_teardown_predict` ([#7657](https://github.com/PyTorchLightning/pytorch-lightning/pull/7657/))


- Deprecated `TrainerModelHooksMixin` in favor of `pytorch_lightning.utilities.signature_utils` ([#7422](https://github.com/PyTorchLightning/pytorch-lightning/pull/7422))


- Deprecated `num_nodes` and `sync_batchnorm` arguments in `DDPPlugin` and `DDPSpawnPlugin` ([#7026](https://github.com/PyTorchLightning/pytorch-lightning/pull/7026))


- Deprecated `self.log(sync_dist_op)` in favor of `self.log(reduce_fx)`. ([#7891](https://github.com/PyTorchLightning/pytorch-lightning/pull/7891))


### Removed

- Removed `ProfilerConnector` ([#7654](https://github.com/PyTorchLightning/pytorch-lightning/pull/7654))
Expand Down
4 changes: 0 additions & 4 deletions docs/source/extensions/logging.rst
Original file line number Diff line number Diff line change
Expand Up @@ -68,10 +68,6 @@ except functions with `batch_start` in their names.
def training_step(self, batch, batch_idx):
self.log('my_metric', x)
# or a dict
def training_step(self, batch, batch_idx):
self.log('performance', {'acc': acc, 'recall': recall})
Depending on where log is called from, Lightning auto-determines the correct logging mode for you. \
But of course you can override the default behavior by manually setting the :func:`~~pytorch_lightning.core.lightning.LightningModule.log` parameters.

Expand Down
63 changes: 3 additions & 60 deletions pytorch_lightning/core/datamodule.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
from torch.utils.data import DataLoader, Dataset, IterableDataset

from pytorch_lightning.core.hooks import CheckpointHooks, DataHooks
from pytorch_lightning.utilities import rank_zero_only
from pytorch_lightning.utilities.argparse import add_argparse_args, from_argparse_args, get_init_arguments_and_types
from pytorch_lightning.utilities.distributed import rank_zero_deprecation, rank_zero_only


class LightningDataModule(CheckpointHooks, DataHooks):
Expand Down Expand Up @@ -160,13 +160,7 @@ def has_prepared_data(self) -> bool:
Returns:
bool: True if ``datamodule.prepare_data()`` has been called. False by default.
.. deprecated:: v1.4
Will be removed in v1.6.0.
"""
rank_zero_deprecation(
'DataModule property `has_prepared_data` was deprecated in v1.4 and will be removed in v1.6.'
)
return self._has_prepared_data

@property
Expand All @@ -175,11 +169,7 @@ def has_setup_fit(self) -> bool:
Returns:
bool: True ``if datamodule.setup(stage='fit')`` has been called. False by default.
.. deprecated:: v1.4
Will be removed in v1.6.0.
"""
rank_zero_deprecation('DataModule property `has_setup_fit` was deprecated in v1.4 and will be removed in v1.6.')
return self._has_setup_fit

@property
Expand All @@ -188,13 +178,7 @@ def has_setup_validate(self) -> bool:
Returns:
bool: True if ``datamodule.setup(stage='validate')`` has been called. False by default.
.. deprecated:: v1.4
Will be removed in v1.6.0.
"""
rank_zero_deprecation(
'DataModule property `has_setup_validate` was deprecated in v1.4 and will be removed in v1.6.'
)
return self._has_setup_validate

@property
Expand All @@ -203,13 +187,7 @@ def has_setup_test(self) -> bool:
Returns:
bool: True if ``datamodule.setup(stage='test')`` has been called. False by default.
.. deprecated:: v1.4
Will be removed in v1.6.0.
"""
rank_zero_deprecation(
'DataModule property `has_setup_test` was deprecated in v1.4 and will be removed in v1.6.'
)
return self._has_setup_test

@property
Expand All @@ -218,13 +196,7 @@ def has_setup_predict(self) -> bool:
Returns:
bool: True if ``datamodule.setup(stage='predict')`` has been called. False by default.
.. deprecated:: v1.4
Will be removed in v1.6.0.
"""
rank_zero_deprecation(
'DataModule property `has_setup_predict` was deprecated in v1.4 and will be removed in v1.6.'
)
return self._has_setup_predict

@property
Expand All @@ -233,13 +205,7 @@ def has_teardown_fit(self) -> bool:
Returns:
bool: True ``if datamodule.teardown(stage='fit')`` has been called. False by default.
.. deprecated:: v1.4
Will be removed in v1.6.0.
"""
rank_zero_deprecation(
'DataModule property `has_teardown_fit` was deprecated in v1.4 and will be removed in v1.6.'
)
return self._has_teardown_fit

@property
Expand All @@ -248,13 +214,7 @@ def has_teardown_validate(self) -> bool:
Returns:
bool: True if ``datamodule.teardown(stage='validate')`` has been called. False by default.
.. deprecated:: v1.4
Will be removed in v1.6.0.
"""
rank_zero_deprecation(
'DataModule property `has_teardown_validate` was deprecated in v1.4 and will be removed in v1.6.'
)
return self._has_teardown_validate

@property
Expand All @@ -263,13 +223,7 @@ def has_teardown_test(self) -> bool:
Returns:
bool: True if ``datamodule.teardown(stage='test')`` has been called. False by default.
.. deprecated:: v1.4
Will be removed in v1.6.0.
"""
rank_zero_deprecation(
'DataModule property `has_teardown_test` was deprecated in v1.4 and will be removed in v1.6.'
)
return self._has_teardown_test

@property
Expand All @@ -278,13 +232,7 @@ def has_teardown_predict(self) -> bool:
Returns:
bool: True if ``datamodule.teardown(stage='predict')`` has been called. False by default.
.. deprecated:: v1.4
Will be removed in v1.6.0.
"""
rank_zero_deprecation(
'DataModule property `has_teardown_predict` was deprecated in v1.4 and will be removed in v1.6.'
)
return self._has_teardown_predict

@classmethod
Expand Down Expand Up @@ -433,13 +381,8 @@ def wrapped_fn(*args: str, **kwargs: Optional[str]) -> Any:
has_run = obj._has_prepared_data
obj._has_prepared_data = True

if has_run:
rank_zero_deprecation(
f"DataModule.{name} has already been called, so it will not be called again. "
f"In v1.6 this behavior will change to always call DataModule.{name}."
)
else:
fn(*args, **kwargs)
if not has_run:
return fn(*args, **kwargs)

return wrapped_fn

Expand Down
Loading

0 comments on commit 0711eed

Please sign in to comment.