Skip to content

Commit

Permalink
fix duplicate console logging bug v2 (#6275)
Browse files Browse the repository at this point in the history
Co-authored-by: chaton <thomas@grid.ai>
Co-authored-by: Jirka Borovec <Borda@users.noreply.github.com>
  • Loading branch information
3 people committed Mar 9, 2021
1 parent a17a29c commit e8be96c
Show file tree
Hide file tree
Showing 34 changed files with 104 additions and 314 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Fixed error thrown when using valid distributed mode in multi node ([#6297](https://github.com/PyTorchLightning/pytorch-lightning/pull/6297)


- Fixed duplicate logs appearing in console when using the python logging module ([#5509](https://github.com/PyTorchLightning/pytorch-lightning/pull/5509), [#6275](https://github.com/PyTorchLightning/pytorch-lightning/pull/6275))


## [1.2.1] - 2021-02-23

### Fixed
Expand Down
14 changes: 10 additions & 4 deletions docs/source/extensions/logging.rst
Original file line number Diff line number Diff line change
Expand Up @@ -259,13 +259,19 @@ Configure console logging
*************************

Lightning logs useful information about the training process and user warnings to the console.
You can retrieve the Lightning logger and change it to your liking. For example, increase the logging level
to see fewer messages like so:
You can retrieve the Lightning logger and change it to your liking. For example, adjust the logging level
or redirect output for certain modules to log files:

.. code-block:: python
.. testcode::

import logging
logging.getLogger("lightning").setLevel(logging.ERROR)

# configure logging at the root level of lightning
logging.getLogger("pytorch_lightning").setLevel(logging.ERROR)

# configure logging on module level, redirect to file
logger = logging.getLogger("pytorch_lightning.core")
logger.addHandler(logging.FileHandler("core.log"))

Read more about custom Python logging `here <https://docs.python.org/3/library/logging.html>`_.

Expand Down
3 changes: 2 additions & 1 deletion pl_examples/domain_templates/computer_vision_fine_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
See: https://pytorch.org/tutorials/beginner/transfer_learning_tutorial.html
"""
import argparse
import logging
import os
from pathlib import Path
from typing import Union
Expand All @@ -54,11 +55,11 @@

import pytorch_lightning as pl
from pl_examples import cli_lightning_logo
from pytorch_lightning import _logger as log
from pytorch_lightning import LightningDataModule
from pytorch_lightning.callbacks.finetuning import BaseFinetuning
from pytorch_lightning.utilities import rank_zero_info

log = logging.getLogger(__name__)
DATA_URL = "https://storage.googleapis.com/mledu-datasets/cats_and_dogs_filtered.zip"

# --- Finetuning Callback ---
Expand Down
17 changes: 10 additions & 7 deletions pytorch_lightning/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
"""Root package info."""

import logging as python_logging
import logging
import os
import sys
import time

_this_year = time.strftime("%Y")
Expand Down Expand Up @@ -37,10 +38,14 @@
- https://pytorch-lightning.readthedocs.io/en/latest
- https://pytorch-lightning.readthedocs.io/en/stable
"""
_root_logger = logging.getLogger()
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.INFO)

_logger = python_logging.getLogger("lightning")
_logger.addHandler(python_logging.StreamHandler())
_logger.setLevel(python_logging.INFO)
# if root logger has handlers, propagate messages up and let root logger process them
if not _root_logger.hasHandlers():
_logger.addHandler(logging.StreamHandler())
_logger.propagate = False

_PACKAGE_ROOT = os.path.dirname(__file__)
_PROJECT_ROOT = os.path.dirname(_PACKAGE_ROOT)
Expand All @@ -53,9 +58,7 @@
except NameError:
__LIGHTNING_SETUP__: bool = False

if __LIGHTNING_SETUP__:
import sys # pragma: no-cover

if __LIGHTNING_SETUP__: # pragma: no-cover
sys.stdout.write(f'Partial import of `{__name__}` during the build process.\n') # pragma: no-cover
# We are not importing the rest of the lightning during the build process, as it may not be compiled yet
else:
Expand Down
4 changes: 3 additions & 1 deletion pytorch_lightning/callbacks/finetuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
^^^^^^^^^^^^^^^^^^^^
Freeze and unfreeze models for finetuning purposes
"""
import logging
from typing import Callable, Generator, Iterable, List, Optional, Union

import torch
Expand All @@ -24,12 +25,13 @@
from torch.nn.modules.container import Container, ModuleDict, ModuleList, Sequential
from torch.optim.optimizer import Optimizer

from pytorch_lightning import _logger as log
from pytorch_lightning.callbacks.base import Callback
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.utilities import rank_zero_warn
from pytorch_lightning.utilities.exceptions import MisconfigurationException

log = logging.getLogger(__name__)


def multiplicative(epoch):
return 2
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/callbacks/model_checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
Automatically save model checkpoints during training.
"""

import logging
import os
import re
from copy import deepcopy
Expand All @@ -29,13 +29,13 @@
import torch
import yaml

from pytorch_lightning import _logger as log
from pytorch_lightning.callbacks.base import Callback
from pytorch_lightning.utilities import rank_zero_info, rank_zero_only, rank_zero_warn
from pytorch_lightning.utilities.cloud_io import get_filesystem
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.warnings import WarningCache

log = logging.getLogger(__name__)
warning_cache = WarningCache()


Expand Down
4 changes: 3 additions & 1 deletion pytorch_lightning/callbacks/pruning.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
^^^^^^^^^^^^
"""
import inspect
import logging
from copy import deepcopy
from functools import partial
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
Expand All @@ -24,12 +25,13 @@
import torch.nn.utils.prune as pytorch_prune
from torch import nn

from pytorch_lightning import _logger as log
from pytorch_lightning.callbacks.base import Callback
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.utilities.distributed import rank_zero_debug, rank_zero_only
from pytorch_lightning.utilities.exceptions import MisconfigurationException

log = logging.getLogger(__name__)

_PYTORCH_PRUNING_FUNCTIONS = {
"ln_structured": pytorch_prune.ln_structured,
"l1_unstructured": pytorch_prune.l1_unstructured,
Expand Down
4 changes: 3 additions & 1 deletion pytorch_lightning/core/lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import collections
import copy
import inspect
import logging
import os
import re
import tempfile
Expand All @@ -31,7 +32,6 @@
from torch.nn import Module
from torch.optim.optimizer import Optimizer

from pytorch_lightning import _logger as log
from pytorch_lightning.core.grads import GradInformation
from pytorch_lightning.core.hooks import CheckpointHooks, DataHooks, ModelHooks
from pytorch_lightning.core.memory import ModelSummary
Expand All @@ -44,6 +44,8 @@
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.parsing import AttributeDict, collect_init_args, get_init_args

log = logging.getLogger(__name__)


class LightningModule(
ABC,
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/core/saving.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import ast
import csv
import inspect
import logging
import os
from argparse import Namespace
from copy import deepcopy
Expand All @@ -25,13 +26,13 @@
import torch
import yaml

from pytorch_lightning import _logger as log
from pytorch_lightning.utilities import _OMEGACONF_AVAILABLE, AttributeDict, rank_zero_warn
from pytorch_lightning.utilities.apply_func import apply_to_collection
from pytorch_lightning.utilities.cloud_io import get_filesystem
from pytorch_lightning.utilities.cloud_io import load as pl_load
from pytorch_lightning.utilities.parsing import parse_class_init_keys

log = logging.getLogger(__name__)
PRIMITIVE_TYPES = (bool, int, float, str)
ALLOWED_CONFIG_TYPES = (AttributeDict, MutableMapping, Namespace)

Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/loggers/comet.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,19 +16,20 @@
------------
"""

import logging
import os
from argparse import Namespace
from typing import Any, Dict, Optional, Union

import torch
from torch import is_tensor

from pytorch_lightning import _logger as log
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
from pytorch_lightning.utilities import _module_available, rank_zero_only
from pytorch_lightning.utilities.exceptions import MisconfigurationException

log = logging.getLogger(__name__)
_COMET_AVAILABLE = _module_available("comet_ml")

if _COMET_AVAILABLE:
Expand Down
4 changes: 3 additions & 1 deletion pytorch_lightning/loggers/csv_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,19 @@
"""
import csv
import io
import logging
import os
from argparse import Namespace
from typing import Any, Dict, Optional, Union

import torch

from pytorch_lightning import _logger as log
from pytorch_lightning.core.saving import save_hparams_to_yaml
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn

log = logging.getLogger(__name__)


class ExperimentWriter(object):
r"""
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/loggers/mlflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,17 @@
MLflow Logger
-------------
"""
import logging
import re
from argparse import Namespace
from time import time
from typing import Any, Dict, Optional, Union

from pytorch_lightning import _logger as log
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
from pytorch_lightning.utilities import _module_available, rank_zero_only, rank_zero_warn

log = logging.getLogger(__name__)
LOCAL_FILE_URI_PREFIX = "file:"

_MLFLOW_AVAILABLE = _module_available("mlflow")
try:
import mlflow
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/loggers/neptune.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,17 @@
Neptune Logger
--------------
"""
import logging
from argparse import Namespace
from typing import Any, Dict, Iterable, Optional, Union

import torch
from torch import is_tensor

from pytorch_lightning import _logger as log
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
from pytorch_lightning.utilities import _module_available, rank_zero_only

log = logging.getLogger(__name__)
_NEPTUNE_AVAILABLE = _module_available("neptune")

if _NEPTUNE_AVAILABLE:
Expand Down
4 changes: 3 additions & 1 deletion pytorch_lightning/loggers/tensorboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
------------------
"""

import logging
import os
from argparse import Namespace
from typing import Any, Dict, Optional, Union
Expand All @@ -24,13 +25,14 @@
from torch.utils.tensorboard import SummaryWriter
from torch.utils.tensorboard.summary import hparams

from pytorch_lightning import _logger as log
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.core.saving import save_hparams_to_yaml
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
from pytorch_lightning.utilities import _OMEGACONF_AVAILABLE, rank_zero_only, rank_zero_warn
from pytorch_lightning.utilities.cloud_io import get_filesystem

log = logging.getLogger(__name__)

if _OMEGACONF_AVAILABLE:
from omegaconf import Container, OmegaConf

Expand Down
4 changes: 3 additions & 1 deletion pytorch_lightning/plugins/environments/slurm_environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import logging
import os
import re

from pytorch_lightning import _logger as log
from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment

log = logging.getLogger(__name__)


class SLURMEnvironment(ClusterEnvironment):

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import logging
import os

from pytorch_lightning import _logger as log
from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment
from pytorch_lightning.utilities import rank_zero_warn

log = logging.getLogger(__name__)


class TorchElasticEnvironment(ClusterEnvironment):

Expand Down
4 changes: 3 additions & 1 deletion pytorch_lightning/plugins/training_type/ddp.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import subprocess
import sys
Expand All @@ -23,7 +24,6 @@
from torch.nn.parallel.distributed import DistributedDataParallel
from torch.optim import Optimizer

from pytorch_lightning import _logger as log
from pytorch_lightning.distributed import LightningDistributed
from pytorch_lightning.overrides import LightningDistributedModule
from pytorch_lightning.overrides.distributed import prepare_for_backward
Expand All @@ -43,6 +43,8 @@
from hydra.core.hydra_config import HydraConfig
from hydra.utils import get_original_cwd, to_absolute_path

log = logging.getLogger(__name__)


class DDPPlugin(ParallelPlugin):
"""
Expand Down
4 changes: 3 additions & 1 deletion pytorch_lightning/plugins/training_type/ddp_spawn.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import re
from typing import Any, Dict, List, Optional, Union
Expand All @@ -21,7 +22,6 @@
from torch.nn.parallel.distributed import DistributedDataParallel
from torch.optim import Optimizer

from pytorch_lightning import _logger as log
from pytorch_lightning.distributed.dist import LightningDistributed
from pytorch_lightning.overrides import LightningDistributedModule
from pytorch_lightning.overrides.distributed import prepare_for_backward
Expand All @@ -39,6 +39,8 @@
)
from pytorch_lightning.utilities.seed import seed_everything

log = logging.getLogger(__name__)


class DDPSpawnPlugin(ParallelPlugin):

Expand Down
Loading

0 comments on commit e8be96c

Please sign in to comment.