From 0346b63c605feb045d1484da33fedb5e03523974 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 25 Jan 2021 06:31:45 +0100 Subject: [PATCH] fix duplicate console logging bug / properly configure logging (#5509) * duplicate logs * remove unused * configure basic logging * missing import * update docs for logging * import order * flake8 * test * fix test * flake8 * import warning * add changelog * isort * stderr Co-authored-by: chaton --- CHANGELOG.md | 3 +++ docs/source/logging.rst | 12 +++++++++--- .../computer_vision_fine_tuning.py | 3 ++- pytorch_lightning/__init__.py | 18 +++++++++++------- .../accelerators/accelerator_connector.py | 5 ++++- .../accelerators/ddp2_accelerator.py | 3 ++- .../accelerators/ddp_accelerator.py | 3 ++- .../accelerators/ddp_cpu_hpc_accelerator.py | 1 - .../accelerators/ddp_cpu_spawn_accelerator.py | 3 ++- .../accelerators/ddp_hpc_accelerator.py | 3 ++- .../accelerators/ddp_spawn_accelerator.py | 3 ++- .../accelerators/dp_accelerator.py | 1 - .../accelerators/gpu_accelerator.py | 1 - .../accelerators/horovod_accelerator.py | 1 - .../accelerators/tpu_accelerator.py | 3 ++- pytorch_lightning/callbacks/early_stopping.py | 1 - .../callbacks/model_checkpoint.py | 5 +++-- .../cluster_environments/slurm_environment.py | 6 ++++-- .../torchelastic_environment.py | 6 ++++-- pytorch_lightning/core/lightning.py | 3 ++- pytorch_lightning/core/saving.py | 3 ++- pytorch_lightning/loggers/comet.py | 3 ++- pytorch_lightning/loggers/csv_logs.py | 4 +++- pytorch_lightning/loggers/mlflow.py | 7 ++----- pytorch_lightning/loggers/neptune.py | 3 ++- pytorch_lightning/loggers/tensorboard.py | 4 +++- pytorch_lightning/plugins/ddp_plugin.py | 4 +++- .../plugins/ddp_sequential_plugin.py | 4 +++- pytorch_lightning/profiler/profilers.py | 4 +++- .../trainer/configuration_validator.py | 1 - .../trainer/connectors/checkpoint_connector.py | 1 - .../trainer/connectors/precision_connector.py | 5 ++++- .../trainer/connectors/slurm_connector.py | 4 +++- pytorch_lightning/trainer/trainer.py | 3 ++- pytorch_lightning/trainer/training_tricks.py | 3 ++- pytorch_lightning/tuner/batch_size_scaling.py | 4 +++- pytorch_lightning/tuner/lr_finder.py | 4 +++- pytorch_lightning/utilities/distributed.py | 3 ++- pytorch_lightning/utilities/seed.py | 5 ++++- .../utilities/upgrade_checkpoint.py | 4 +++- tests/__init__.py | 4 ++++ tests/callbacks/test_early_stopping.py | 5 ++++- tests/checkpointing/test_model_checkpoint.py | 4 +++- tests/test_profiler.py | 5 +++-- 44 files changed, 117 insertions(+), 58 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index edbcd8d46aefe..c4b54b232c60c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -36,6 +36,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed `val_check_interval` with `fast_dev_run` ([#5540](https://github.com/PyTorchLightning/pytorch-lightning/pull/5540)) +- Fixed duplicate logs appearing in console when using the python logging module ([#5509](https://github.com/PyTorchLightning/pytorch-lightning/pull/5509)) + + ## [1.1.4] - 2021-01-12 ### Added diff --git a/docs/source/logging.rst b/docs/source/logging.rst index 79452b0ca8788..8f32d72f0dbda 100644 --- a/docs/source/logging.rst +++ b/docs/source/logging.rst @@ -259,13 +259,19 @@ Configure console logging ************************* Lightning logs useful information about the training process and user warnings to the console. -You can retrieve the Lightning logger and change it to your liking. For example, increase the logging level -to see fewer messages like so: +You can retrieve the Lightning logger and change it to your liking. For example, adjust the logging level +or redirect output for certain modules to log files: .. code-block:: python import logging - logging.getLogger("lightning").setLevel(logging.ERROR) + + # configure logging at the root level of lightning + logging.getLogger("pytorch_lightning").setLevel(logging.ERROR) + + # configure logging on module level, redirect to file + logger = logging.getLogger("pytorch_lightning.core") + logger.addHandler(logging.FileHandler("core.log")) Read more about custom Python logging `here `_. diff --git a/pl_examples/domain_templates/computer_vision_fine_tuning.py b/pl_examples/domain_templates/computer_vision_fine_tuning.py index 733fd8646142e..41e014c644756 100644 --- a/pl_examples/domain_templates/computer_vision_fine_tuning.py +++ b/pl_examples/domain_templates/computer_vision_fine_tuning.py @@ -40,6 +40,7 @@ """ import argparse +import logging from collections import OrderedDict from pathlib import Path from tempfile import TemporaryDirectory @@ -58,8 +59,8 @@ import pytorch_lightning as pl from pl_examples import cli_lightning_logo -from pytorch_lightning import _logger as log +log = logging.getLogger(__name__) BN_TYPES = (torch.nn.BatchNorm1d, torch.nn.BatchNorm2d, torch.nn.BatchNorm3d) DATA_URL = "https://storage.googleapis.com/mledu-datasets/cats_and_dogs_filtered.zip" diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index 8b8e3328375bb..7bf73f8091765 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -1,7 +1,8 @@ """Root package info.""" -import logging as python_logging +import logging import os +import sys import time _this_year = time.strftime("%Y") @@ -37,10 +38,15 @@ - https://pytorch-lightning.readthedocs.io/en/latest - https://pytorch-lightning.readthedocs.io/en/stable """ +_root_logger = logging.getLogger() +_logger = logging.getLogger(__name__) +_logger.setLevel(logging.INFO) + +# if root logger has handlers, propagate messages up and let root logger process them +if not _root_logger.hasHandlers(): + _logger.addHandler(logging.StreamHandler()) + _logger.propagate = False -_logger = python_logging.getLogger("lightning") -_logger.addHandler(python_logging.StreamHandler()) -_logger.setLevel(python_logging.INFO) PACKAGE_ROOT = os.path.dirname(__file__) PROJECT_ROOT = os.path.dirname(PACKAGE_ROOT) @@ -53,9 +59,7 @@ except NameError: __LIGHTNING_SETUP__: bool = False -if __LIGHTNING_SETUP__: - import sys # pragma: no-cover - +if __LIGHTNING_SETUP__: # pragma: no-cover sys.stdout.write(f'Partial import of `{__name__}` during the build process.\n') # pragma: no-cover # We are not importing the rest of the lightning during the build process, as it may not be compiled yet else: diff --git a/pytorch_lightning/accelerators/accelerator_connector.py b/pytorch_lightning/accelerators/accelerator_connector.py index c911225d0b29f..e376be44cf58e 100644 --- a/pytorch_lightning/accelerators/accelerator_connector.py +++ b/pytorch_lightning/accelerators/accelerator_connector.py @@ -11,12 +11,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging import os import torch from pytorch_lightning.utilities import HOROVOD_AVAILABLE -from pytorch_lightning import _logger as log + from pytorch_lightning import accelerators from pytorch_lightning.accelerators.accelerator import Accelerator from pytorch_lightning.cluster_environments.slurm_environment import SLURMEnvironment @@ -28,6 +29,8 @@ if HOROVOD_AVAILABLE: import horovod.torch as hvd +log = logging.getLogger(__name__) + class AcceleratorConnector: diff --git a/pytorch_lightning/accelerators/ddp2_accelerator.py b/pytorch_lightning/accelerators/ddp2_accelerator.py index 373406589d855..ce0db96e9f737 100644 --- a/pytorch_lightning/accelerators/ddp2_accelerator.py +++ b/pytorch_lightning/accelerators/ddp2_accelerator.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License +import logging import os from typing import Any, List, Optional, Union @@ -18,7 +19,6 @@ import torch.distributed as torch_distrib from torch.nn.parallel import DistributedDataParallel -from pytorch_lightning import _logger as log from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule @@ -29,6 +29,7 @@ from pytorch_lightning.utilities import AMPType, HYDRA_AVAILABLE from pytorch_lightning.utilities.distributed import all_gather_ddp_if_available, rank_zero_only, sync_ddp_if_available +log = logging.getLogger(__name__) if HYDRA_AVAILABLE: from hydra.core.hydra_config import HydraConfig from hydra.utils import get_original_cwd, to_absolute_path diff --git a/pytorch_lightning/accelerators/ddp_accelerator.py b/pytorch_lightning/accelerators/ddp_accelerator.py index 0fde9da158c94..24b7c295eb0df 100644 --- a/pytorch_lightning/accelerators/ddp_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_accelerator.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License +import logging import os from os.path import abspath import subprocess @@ -23,7 +24,6 @@ import torch.distributed as torch_distrib from torch.nn.parallel import DistributedDataParallel -from pytorch_lightning import _logger as log from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule @@ -40,6 +40,7 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.seed import seed_everything +log = logging.getLogger(__name__) if HYDRA_AVAILABLE: from hydra.core.hydra_config import HydraConfig from hydra.utils import get_original_cwd, to_absolute_path diff --git a/pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py b/pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py index 4694a31438ca6..9b7c5c5613b5c 100644 --- a/pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py @@ -13,7 +13,6 @@ # limitations under the License from typing import Optional -from pytorch_lightning import _logger as log from pytorch_lightning.accelerators.ddp_hpc_accelerator import DDPHPCAccelerator from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.plugins.ddp_plugin import DDPPlugin diff --git a/pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py b/pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py index f9ccaa200bbf4..dd3ebafc66a33 100644 --- a/pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License +import logging import os from typing import Any, List, Optional, Union @@ -19,7 +20,6 @@ import torch.multiprocessing as mp from torch.nn.parallel import DistributedDataParallel -from pytorch_lightning import _logger as log from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule @@ -35,6 +35,7 @@ sync_ddp_if_available, ) +log = logging.getLogger(__name__) if HYDRA_AVAILABLE: from hydra.core.hydra_config import HydraConfig from hydra.utils import get_original_cwd, to_absolute_path diff --git a/pytorch_lightning/accelerators/ddp_hpc_accelerator.py b/pytorch_lightning/accelerators/ddp_hpc_accelerator.py index bdc4631b5d017..e58820550ee50 100644 --- a/pytorch_lightning/accelerators/ddp_hpc_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_hpc_accelerator.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License +import logging import os from typing import Any, List, Optional, Union @@ -19,7 +20,6 @@ import torch.distributed as torch_distrib from torch.nn.parallel import DistributedDataParallel -from pytorch_lightning import _logger as log from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule @@ -29,6 +29,7 @@ from pytorch_lightning.utilities import AMPType, HYDRA_AVAILABLE from pytorch_lightning.utilities.distributed import all_gather_ddp_if_available, rank_zero_only, sync_ddp_if_available +log = logging.getLogger(__name__) if HYDRA_AVAILABLE: from hydra.core.hydra_config import HydraConfig from hydra.utils import get_original_cwd, to_absolute_path diff --git a/pytorch_lightning/accelerators/ddp_spawn_accelerator.py b/pytorch_lightning/accelerators/ddp_spawn_accelerator.py index eb4ff24e39dd4..a273579cb14f9 100644 --- a/pytorch_lightning/accelerators/ddp_spawn_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_spawn_accelerator.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License +import logging import os import re from typing import Any, List, Optional, Union @@ -20,7 +21,6 @@ import torch.multiprocessing as mp from torch.nn.parallel import DistributedDataParallel -from pytorch_lightning import _logger as log from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule @@ -39,6 +39,7 @@ ) from pytorch_lightning.utilities.seed import seed_everything +log = logging.getLogger(__name__) if HYDRA_AVAILABLE: from hydra.core.hydra_config import HydraConfig from hydra.utils import get_original_cwd, to_absolute_path diff --git a/pytorch_lightning/accelerators/dp_accelerator.py b/pytorch_lightning/accelerators/dp_accelerator.py index 03c9ebb442fb2..4eb35c898e50f 100644 --- a/pytorch_lightning/accelerators/dp_accelerator.py +++ b/pytorch_lightning/accelerators/dp_accelerator.py @@ -16,7 +16,6 @@ import torch from torch import optim -from pytorch_lightning import _logger as log from pytorch_lightning.accelerators.accelerator import Accelerator from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule diff --git a/pytorch_lightning/accelerators/gpu_accelerator.py b/pytorch_lightning/accelerators/gpu_accelerator.py index d65b19bbd9bb1..51272d50707a4 100644 --- a/pytorch_lightning/accelerators/gpu_accelerator.py +++ b/pytorch_lightning/accelerators/gpu_accelerator.py @@ -15,7 +15,6 @@ import torch -from pytorch_lightning import _logger as log from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.distributed.dist import LightningDistributed diff --git a/pytorch_lightning/accelerators/horovod_accelerator.py b/pytorch_lightning/accelerators/horovod_accelerator.py index 6e11a13064513..fbcf7d621343a 100644 --- a/pytorch_lightning/accelerators/horovod_accelerator.py +++ b/pytorch_lightning/accelerators/horovod_accelerator.py @@ -17,7 +17,6 @@ import torch from torch.optim.lr_scheduler import _LRScheduler -from pytorch_lightning import _logger as log from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.utilities import AMPType, HOROVOD_AVAILABLE diff --git a/pytorch_lightning/accelerators/tpu_accelerator.py b/pytorch_lightning/accelerators/tpu_accelerator.py index 286004bc0976e..1c3ebf1f76289 100644 --- a/pytorch_lightning/accelerators/tpu_accelerator.py +++ b/pytorch_lightning/accelerators/tpu_accelerator.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import io +import logging import os import re from typing import Any, Callable, Optional, Union @@ -20,7 +21,6 @@ import torch.multiprocessing as mp from torch.optim import Optimizer -from pytorch_lightning import _logger as log from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core import LightningModule @@ -35,6 +35,7 @@ from pytorch_lightning.utilities.cloud_io import atomic_save from pytorch_lightning.utilities.exceptions import MisconfigurationException +log = logging.getLogger(__name__) if TPU_AVAILABLE: import torch_xla import torch_xla.core.xla_model as xm diff --git a/pytorch_lightning/callbacks/early_stopping.py b/pytorch_lightning/callbacks/early_stopping.py index 3e15d8462350c..72e7a7944cfcf 100644 --- a/pytorch_lightning/callbacks/early_stopping.py +++ b/pytorch_lightning/callbacks/early_stopping.py @@ -25,7 +25,6 @@ import numpy as np import torch -from pytorch_lightning import _logger as log from pytorch_lightning.callbacks.base import Callback from pytorch_lightning.metrics.metric import Metric from pytorch_lightning.utilities import rank_zero_info, rank_zero_warn, TPU_AVAILABLE diff --git a/pytorch_lightning/callbacks/model_checkpoint.py b/pytorch_lightning/callbacks/model_checkpoint.py index e5c960b3c002b..6f10bc8bb63b8 100644 --- a/pytorch_lightning/callbacks/model_checkpoint.py +++ b/pytorch_lightning/callbacks/model_checkpoint.py @@ -19,7 +19,7 @@ Automatically save model checkpoints during training. """ - +import logging import numbers import os import re @@ -31,13 +31,14 @@ import torch import yaml -from pytorch_lightning import _logger as log from pytorch_lightning.callbacks.base import Callback from pytorch_lightning.metrics.metric import Metric from pytorch_lightning.utilities import rank_zero_info, rank_zero_only, rank_zero_warn from pytorch_lightning.utilities.cloud_io import get_filesystem from pytorch_lightning.utilities.exceptions import MisconfigurationException +log = logging.getLogger(__name__) + class ModelCheckpoint(Callback): r""" diff --git a/pytorch_lightning/cluster_environments/slurm_environment.py b/pytorch_lightning/cluster_environments/slurm_environment.py index cb8db4d440178..ce22127a8d636 100644 --- a/pytorch_lightning/cluster_environments/slurm_environment.py +++ b/pytorch_lightning/cluster_environments/slurm_environment.py @@ -11,12 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import logging import os import re -from pytorch_lightning import _logger as log + from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment +log = logging.getLogger(__name__) + class SLURMEnvironment(ClusterEnvironment): diff --git a/pytorch_lightning/cluster_environments/torchelastic_environment.py b/pytorch_lightning/cluster_environments/torchelastic_environment.py index a4d769518d252..712c6c3e36661 100644 --- a/pytorch_lightning/cluster_environments/torchelastic_environment.py +++ b/pytorch_lightning/cluster_environments/torchelastic_environment.py @@ -11,12 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import logging import os -from pytorch_lightning import _logger as log + from pytorch_lightning.utilities import rank_zero_warn from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment +log = logging.getLogger(__name__) + class TorchElasticEnvironment(ClusterEnvironment): diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 76d6a9d6f9227..02015f6dc6229 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -17,6 +17,7 @@ import collections import copy import inspect +import logging import os import re import tempfile @@ -30,7 +31,6 @@ from torch.nn import Module from torch.optim.optimizer import Optimizer -from pytorch_lightning import _logger as log from pytorch_lightning.core.grads import GradInformation from pytorch_lightning.core.hooks import CheckpointHooks, DataHooks, ModelHooks from pytorch_lightning.core.memory import ModelSummary @@ -42,6 +42,7 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.parsing import AttributeDict, collect_init_args, get_init_args +log = logging.getLogger(__name__) if TPU_AVAILABLE: import torch_xla.core.xla_model as xm diff --git a/pytorch_lightning/core/saving.py b/pytorch_lightning/core/saving.py index 12a29246888f7..3a2f58d1ccf10 100644 --- a/pytorch_lightning/core/saving.py +++ b/pytorch_lightning/core/saving.py @@ -15,6 +15,7 @@ import ast import csv import inspect +import logging import os from argparse import Namespace from copy import deepcopy @@ -25,13 +26,13 @@ import torch import yaml -from pytorch_lightning import _logger as log from pytorch_lightning.utilities import AttributeDict, OMEGACONF_AVAILABLE, rank_zero_warn from pytorch_lightning.utilities.apply_func import apply_to_collection from pytorch_lightning.utilities.cloud_io import get_filesystem from pytorch_lightning.utilities.cloud_io import load as pl_load from pytorch_lightning.utilities.parsing import parse_class_init_keys +log = logging.getLogger(__name__) PRIMITIVE_TYPES = (bool, int, float, str) ALLOWED_CONFIG_TYPES = (AttributeDict, MutableMapping, Namespace) diff --git a/pytorch_lightning/loggers/comet.py b/pytorch_lightning/loggers/comet.py index 869bce831f0c2..2d23293e881b5 100644 --- a/pytorch_lightning/loggers/comet.py +++ b/pytorch_lightning/loggers/comet.py @@ -17,6 +17,7 @@ ------------ """ +import logging import os from argparse import Namespace from typing import Any, Dict, Optional, Union @@ -24,11 +25,11 @@ import torch from torch import is_tensor -from pytorch_lightning import _logger as log from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities import rank_zero_only, _module_available from pytorch_lightning.utilities.exceptions import MisconfigurationException +log = logging.getLogger(__name__) _COMET_AVAILABLE = _module_available("comet_ml") if _COMET_AVAILABLE: diff --git a/pytorch_lightning/loggers/csv_logs.py b/pytorch_lightning/loggers/csv_logs.py index d47cff1db0e1b..92190161dbf6a 100644 --- a/pytorch_lightning/loggers/csv_logs.py +++ b/pytorch_lightning/loggers/csv_logs.py @@ -21,17 +21,19 @@ """ import csv import io +import logging import os from argparse import Namespace from typing import Any, Dict, Optional, Union import torch -from pytorch_lightning import _logger as log from pytorch_lightning.core.saving import save_hparams_to_yaml from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn +log = logging.getLogger(__name__) + class ExperimentWriter(object): r""" diff --git a/pytorch_lightning/loggers/mlflow.py b/pytorch_lightning/loggers/mlflow.py index 4987d050c925d..c53ed2c9b3ccc 100644 --- a/pytorch_lightning/loggers/mlflow.py +++ b/pytorch_lightning/loggers/mlflow.py @@ -16,20 +16,17 @@ MLflow Logger ------------- """ +import logging import re from argparse import Namespace from time import time from typing import Any, Dict, Optional, Union - -from pytorch_lightning import _logger as log from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities import rank_zero_only, rank_zero_warn, _module_available - +log = logging.getLogger(__name__) LOCAL_FILE_URI_PREFIX = "file:" - - _MLFLOW_AVAILABLE = _module_available("mlflow") try: import mlflow diff --git a/pytorch_lightning/loggers/neptune.py b/pytorch_lightning/loggers/neptune.py index 9ee0443965ba2..b7077b7d3e6fe 100644 --- a/pytorch_lightning/loggers/neptune.py +++ b/pytorch_lightning/loggers/neptune.py @@ -17,15 +17,16 @@ -------------- """ from argparse import Namespace +import logging from typing import Any, Dict, Iterable, Optional, Union import torch from torch import is_tensor -from pytorch_lightning import _logger as log from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities import rank_zero_only, _module_available +log = logging.getLogger(__name__) _NEPTUNE_AVAILABLE = _module_available("neptune") if _NEPTUNE_AVAILABLE: diff --git a/pytorch_lightning/loggers/tensorboard.py b/pytorch_lightning/loggers/tensorboard.py index f8e984c6ff5bc..fe55b92567a68 100644 --- a/pytorch_lightning/loggers/tensorboard.py +++ b/pytorch_lightning/loggers/tensorboard.py @@ -17,6 +17,7 @@ ------------------ """ +import logging import os from argparse import Namespace from typing import Any, Dict, Optional, Union @@ -25,13 +26,14 @@ from torch.utils.tensorboard import SummaryWriter from torch.utils.tensorboard.summary import hparams -from pytorch_lightning import _logger as log from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.core.saving import save_hparams_to_yaml from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities import rank_zero_only, rank_zero_warn, OMEGACONF_AVAILABLE from pytorch_lightning.utilities.cloud_io import get_filesystem +log = logging.getLogger(__name__) + if OMEGACONF_AVAILABLE: from omegaconf import Container, OmegaConf diff --git a/pytorch_lightning/plugins/ddp_plugin.py b/pytorch_lightning/plugins/ddp_plugin.py index 6d5ad1e9e2119..57671a0c57d37 100644 --- a/pytorch_lightning/plugins/ddp_plugin.py +++ b/pytorch_lightning/plugins/ddp_plugin.py @@ -1,3 +1,4 @@ +import logging import os from contextlib import contextmanager from typing import Any, Dict, List, Union @@ -6,11 +7,12 @@ import torch.distributed as torch_distrib from torch.optim import Optimizer -from pytorch_lightning import _logger as log from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel from pytorch_lightning.plugins.plugin import LightningPlugin +log = logging.getLogger(__name__) + class DDPPlugin(LightningPlugin): """ diff --git a/pytorch_lightning/plugins/ddp_sequential_plugin.py b/pytorch_lightning/plugins/ddp_sequential_plugin.py index 069b1754fbce0..39fcce9cb44cd 100644 --- a/pytorch_lightning/plugins/ddp_sequential_plugin.py +++ b/pytorch_lightning/plugins/ddp_sequential_plugin.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License +import logging import os from typing import Any, List, Optional @@ -19,13 +20,14 @@ import torch.distributed as torch_distrib from torch.nn.parallel import DistributedDataParallel -from pytorch_lightning import _logger as log from pytorch_lightning import LightningModule from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel from pytorch_lightning.plugins.rpc_plugin import RPCPlugin from pytorch_lightning.utilities import FAIRSCALE_PIPE_AVAILABLE, rank_zero_only from pytorch_lightning.utilities.exceptions import MisconfigurationException +log = logging.getLogger(__name__) + if FAIRSCALE_PIPE_AVAILABLE: from fairscale.nn import PipeRPCWrapper import fairscale.nn.model_parallel as mpu diff --git a/pytorch_lightning/profiler/profilers.py b/pytorch_lightning/profiler/profilers.py index ac410b0231453..fc3afa8c0a993 100644 --- a/pytorch_lightning/profiler/profilers.py +++ b/pytorch_lightning/profiler/profilers.py @@ -16,6 +16,7 @@ import cProfile import io +import logging import os import pstats import time @@ -27,9 +28,10 @@ import fsspec import numpy as np -from pytorch_lightning import _logger as log from pytorch_lightning.utilities.cloud_io import get_filesystem +log = logging.getLogger(__name__) + class BaseProfiler(ABC): """ diff --git a/pytorch_lightning/trainer/configuration_validator.py b/pytorch_lightning/trainer/configuration_validator.py index 20992255ba29e..d82454f697d02 100644 --- a/pytorch_lightning/trainer/configuration_validator.py +++ b/pytorch_lightning/trainer/configuration_validator.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from pytorch_lightning import _logger as log from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.utilities import rank_zero_warn from pytorch_lightning.utilities.exceptions import MisconfigurationException diff --git a/pytorch_lightning/trainer/connectors/checkpoint_connector.py b/pytorch_lightning/trainer/connectors/checkpoint_connector.py index 03d46132fb177..34d572de84c51 100644 --- a/pytorch_lightning/trainer/connectors/checkpoint_connector.py +++ b/pytorch_lightning/trainer/connectors/checkpoint_connector.py @@ -20,7 +20,6 @@ import torch import pytorch_lightning -from pytorch_lightning import _logger as log from pytorch_lightning.callbacks import ModelCheckpoint from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.utilities import AMPType, APEX_AVAILABLE, OMEGACONF_AVAILABLE, rank_zero_info, rank_zero_warn diff --git a/pytorch_lightning/trainer/connectors/precision_connector.py b/pytorch_lightning/trainer/connectors/precision_connector.py index 822c3ef634fdc..6ec4b01fd8f10 100644 --- a/pytorch_lightning/trainer/connectors/precision_connector.py +++ b/pytorch_lightning/trainer/connectors/precision_connector.py @@ -12,11 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from pytorch_lightning import _logger as log +import logging + from pytorch_lightning.plugins.apex import ApexPlugin from pytorch_lightning.plugins.native_amp import NativeAMPPlugin from pytorch_lightning.utilities import AMPType, APEX_AVAILABLE, NATIVE_AMP_AVAILABLE, rank_zero_warn +log = logging.getLogger(__name__) + class PrecisionConnector: diff --git a/pytorch_lightning/trainer/connectors/slurm_connector.py b/pytorch_lightning/trainer/connectors/slurm_connector.py index 9c04c10559b6d..f1905043cc72d 100644 --- a/pytorch_lightning/trainer/connectors/slurm_connector.py +++ b/pytorch_lightning/trainer/connectors/slurm_connector.py @@ -1,12 +1,14 @@ +import logging import os import re import signal from subprocess import call -from pytorch_lightning import _logger as log from pytorch_lightning.utilities.distributed import rank_zero_info import torch.distributed as torch_distrib import torch +log = logging.getLogger(__name__) + class SLURMConnector: diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index c3ef0e507789e..bbfc3fd3202f2 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -14,6 +14,7 @@ """Trainer to automate the training.""" +import logging import os import warnings from pathlib import Path @@ -22,7 +23,6 @@ import torch from torch.utils.data import DataLoader -from pytorch_lightning import _logger as log from pytorch_lightning.accelerators.accelerator import Accelerator from pytorch_lightning.accelerators.accelerator_connector import AcceleratorConnector from pytorch_lightning.callbacks import Callback @@ -64,6 +64,7 @@ from pytorch_lightning.utilities.memory import recursive_detach from pytorch_lightning.utilities.model_utils import is_overridden +log = logging.getLogger(__name__) # warnings to ignore in trainer warnings.filterwarnings( 'ignore', message='torch.distributed.reduce_op is deprecated, ' 'please use torch.distributed.ReduceOp instead' diff --git a/pytorch_lightning/trainer/training_tricks.py b/pytorch_lightning/trainer/training_tricks.py index be9793e9e5cdb..5129e1a5a0b7c 100644 --- a/pytorch_lightning/trainer/training_tricks.py +++ b/pytorch_lightning/trainer/training_tricks.py @@ -13,15 +13,16 @@ # limitations under the License. from abc import ABC, abstractmethod +import logging import torch from torch import Tensor -from pytorch_lightning import _logger as log from pytorch_lightning.core.lightning import LightningModule EPSILON = 1e-6 EPSILON_FP16 = 1e-5 +log = logging.getLogger(__name__) class TrainerTrainingTricksMixin(ABC): diff --git a/pytorch_lightning/tuner/batch_size_scaling.py b/pytorch_lightning/tuner/batch_size_scaling.py index 52662f6172d8d..03f95cd66891b 100644 --- a/pytorch_lightning/tuner/batch_size_scaling.py +++ b/pytorch_lightning/tuner/batch_size_scaling.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License +import logging import os from typing import Optional, Tuple @@ -21,9 +22,10 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.memory import is_oom_error, garbage_collection_cuda from pytorch_lightning.loggers.base import DummyLogger -from pytorch_lightning import _logger as log from pytorch_lightning.utilities.cloud_io import get_filesystem +log = logging.getLogger(__name__) + def scale_batch_size(trainer, model: LightningModule, diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index 2982454d02f70..c037a51f88d38 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import importlib +import logging import os from typing import List, Optional, Sequence, Union, Callable from functools import wraps @@ -22,7 +23,6 @@ from torch.optim.lr_scheduler import _LRScheduler from torch.utils.data import DataLoader -from pytorch_lightning import _logger as log from pytorch_lightning.callbacks import Callback from pytorch_lightning.core.datamodule import LightningDataModule from pytorch_lightning.core.lightning import LightningModule @@ -39,6 +39,8 @@ else: from tqdm import tqdm +log = logging.getLogger(__name__) + def _run_lr_finder_internally(trainer, model: LightningModule): """ Call lr finder internally during Trainer.fit() """ diff --git a/pytorch_lightning/utilities/distributed.py b/pytorch_lightning/utilities/distributed.py index 2a0b989e9b9cd..33ffcbbb9b4d5 100644 --- a/pytorch_lightning/utilities/distributed.py +++ b/pytorch_lightning/utilities/distributed.py @@ -19,7 +19,8 @@ import torch -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) if torch.distributed.is_available(): from torch.distributed import ReduceOp, group diff --git a/pytorch_lightning/utilities/seed.py b/pytorch_lightning/utilities/seed.py index 16bc39bd7f142..b235e9420d59c 100644 --- a/pytorch_lightning/utilities/seed.py +++ b/pytorch_lightning/utilities/seed.py @@ -14,15 +14,18 @@ """Helper functions to help with reproducibility of models. """ +import logging import os import random from typing import Optional import numpy as np import torch -from pytorch_lightning import _logger as log + from pytorch_lightning.utilities import rank_zero_warn +log = logging.getLogger(__name__) + def seed_everything(seed: Optional[int] = None) -> int: """ diff --git a/pytorch_lightning/utilities/upgrade_checkpoint.py b/pytorch_lightning/utilities/upgrade_checkpoint.py index 2e767542cd9bd..4896845f10263 100644 --- a/pytorch_lightning/utilities/upgrade_checkpoint.py +++ b/pytorch_lightning/utilities/upgrade_checkpoint.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. import argparse +import logging from shutil import copyfile import torch -from pytorch_lightning import _logger as log from pytorch_lightning.callbacks import EarlyStopping, ModelCheckpoint KEYS_MAPPING = { @@ -27,6 +27,8 @@ "early_stop_callback_patience": (EarlyStopping, "patience"), } +log = logging.getLogger(__name__) + def upgrade_checkpoint(filepath): checkpoint = torch.load(filepath) diff --git a/tests/__init__.py b/tests/__init__.py index b4a7291dfec66..e035d738ce2be 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging import os import numpy as np @@ -31,3 +32,6 @@ if not os.path.isdir(TEMP_PATH): os.mkdir(TEMP_PATH) + + +logging.basicConfig(level=logging.ERROR) diff --git a/tests/callbacks/test_early_stopping.py b/tests/callbacks/test_early_stopping.py index 5c54f6a84805d..6fa7e5f3567b6 100644 --- a/tests/callbacks/test_early_stopping.py +++ b/tests/callbacks/test_early_stopping.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging import os import pickle from unittest import mock @@ -20,11 +21,13 @@ import pytest import torch -from pytorch_lightning import _logger, seed_everything, Trainer +from pytorch_lightning import seed_everything, Trainer from pytorch_lightning.callbacks import EarlyStopping, ModelCheckpoint from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.base import BoringModel, EvalModelTemplate +_logger = logging.getLogger(__name__) + class EarlyStoppingTestRestore(EarlyStopping): # this class has to be defined outside the test function, otherwise we get pickle error diff --git a/tests/checkpointing/test_model_checkpoint.py b/tests/checkpointing/test_model_checkpoint.py index 3de26ef1a6fb6..f2a01ee8e8b71 100644 --- a/tests/checkpointing/test_model_checkpoint.py +++ b/tests/checkpointing/test_model_checkpoint.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging import os import pickle import platform @@ -570,7 +571,8 @@ def test_model_checkpoint_save_last_warning(tmpdir, caplog, max_epochs, should_v save_top_k=0, save_last=save_last)], max_epochs=max_epochs, ) - trainer.fit(model) + with caplog.at_level(logging.INFO): + trainer.fit(model) assert caplog.messages.count('Saving latest checkpoint...') == save_last diff --git a/tests/test_profiler.py b/tests/test_profiler.py index 4728b11582dfc..ec9834458ae60 100644 --- a/tests/test_profiler.py +++ b/tests/test_profiler.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import logging import os import time from pathlib import Path @@ -99,7 +99,8 @@ def test_simple_profiler_overhead(simple_profiler, n_iter=5): def test_simple_profiler_describe(caplog, simple_profiler): """Ensure the profiler won't fail when reporting the summary.""" - simple_profiler.describe() + with caplog.at_level(logging.INFO): + simple_profiler.describe() assert "Profiler Report" in caplog.text