Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix duplicate console logging bug / properly configure logging #5509

Merged
merged 24 commits into from
Jan 25, 2021
Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Fixed `reinit_scheduler_properties` with correct optimizer ([#5519](https://github.com/PyTorchLightning/pytorch-lightning/pull/5519))


- Fixed duplicate logs appearing in console when using the python logging module ([#5509](https://github.com/PyTorchLightning/pytorch-lightning/pull/5509))


## [1.1.4] - 2021-01-12

### Added
Expand Down
12 changes: 9 additions & 3 deletions docs/source/logging.rst
Original file line number Diff line number Diff line change
Expand Up @@ -259,13 +259,19 @@ Configure console logging
*************************

Lightning logs useful information about the training process and user warnings to the console.
You can retrieve the Lightning logger and change it to your liking. For example, increase the logging level
to see fewer messages like so:
You can retrieve the Lightning logger and change it to your liking. For example, adjust the logging level
or redirect output for certain modules to log files:

.. code-block:: python

import logging
logging.getLogger("lightning").setLevel(logging.ERROR)

# configure logging at the root level of lightning
logging.getLogger("pytorch_lightning").setLevel(logging.ERROR)
carmocca marked this conversation as resolved.
Show resolved Hide resolved

# configure logging on module level, redirect to file
logger = logging.getLogger("pytorch_lightning.core")
logger.addHandler(logging.FileHandler("core.log"))

Read more about custom Python logging `here <https://docs.python.org/3/library/logging.html>`_.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
"""

import argparse
import logging
from collections import OrderedDict
from pathlib import Path
from tempfile import TemporaryDirectory
Expand All @@ -58,8 +59,8 @@

import pytorch_lightning as pl
from pl_examples import cli_lightning_logo
from pytorch_lightning import _logger as log

log = logging.getLogger(__name__)
BN_TYPES = (torch.nn.BatchNorm1d, torch.nn.BatchNorm2d, torch.nn.BatchNorm3d)
DATA_URL = "https://storage.googleapis.com/mledu-datasets/cats_and_dogs_filtered.zip"

Expand Down
14 changes: 7 additions & 7 deletions pytorch_lightning/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import logging as python_logging
import os
import sys
import time

_this_year = time.strftime("%Y")
Expand Down Expand Up @@ -37,10 +38,11 @@
- https://pytorch-lightning.readthedocs.io/en/latest
- https://pytorch-lightning.readthedocs.io/en/stable
"""

_logger = python_logging.getLogger("lightning")
_logger.addHandler(python_logging.StreamHandler())
_logger.setLevel(python_logging.INFO)
python_logging.basicConfig(
stream=sys.stdout,
awaelchli marked this conversation as resolved.
Show resolved Hide resolved
format="%(message)s",
level=python_logging.INFO,
)

PACKAGE_ROOT = os.path.dirname(__file__)
PROJECT_ROOT = os.path.dirname(PACKAGE_ROOT)
Expand All @@ -53,9 +55,7 @@
except NameError:
__LIGHTNING_SETUP__: bool = False

if __LIGHTNING_SETUP__:
import sys # pragma: no-cover

if __LIGHTNING_SETUP__: # pragma: no-cover
sys.stdout.write(f'Partial import of `{__name__}` during the build process.\n') # pragma: no-cover
# We are not importing the rest of the lightning during the build process, as it may not be compiled yet
else:
Expand Down
5 changes: 4 additions & 1 deletion pytorch_lightning/accelerators/accelerator_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,13 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os

import torch

from pytorch_lightning.utilities import HOROVOD_AVAILABLE
from pytorch_lightning import _logger as log

from pytorch_lightning import accelerators
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.cluster_environments.slurm_environment import SLURMEnvironment
Expand All @@ -28,6 +29,8 @@
if HOROVOD_AVAILABLE:
import horovod.torch as hvd

log = logging.getLogger(__name__)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

lets be consistent and use the same name as you defined in __init__:

_logger = python_logging.getLogger(__name__)



class AcceleratorConnector:

Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/accelerators/ddp2_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
import logging
import os
from typing import Any, List, Optional, Union

import torch
import torch.distributed as torch_distrib
from torch.nn.parallel import DistributedDataParallel

from pytorch_lightning import _logger as log
from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp
from pytorch_lightning.cluster_environments import ClusterEnvironment
from pytorch_lightning.core.lightning import LightningModule
Expand All @@ -29,6 +29,7 @@
from pytorch_lightning.utilities import AMPType, HYDRA_AVAILABLE
from pytorch_lightning.utilities.distributed import all_gather_ddp_if_available, rank_zero_only, sync_ddp_if_available

log = logging.getLogger(__name__)
if HYDRA_AVAILABLE:
from hydra.core.hydra_config import HydraConfig
from hydra.utils import get_original_cwd, to_absolute_path
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/accelerators/ddp_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
import logging
import os
from os.path import abspath
import subprocess
Expand All @@ -23,7 +24,6 @@
import torch.distributed as torch_distrib
from torch.nn.parallel import DistributedDataParallel

from pytorch_lightning import _logger as log
from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp
from pytorch_lightning.cluster_environments import ClusterEnvironment
from pytorch_lightning.core.lightning import LightningModule
Expand All @@ -40,6 +40,7 @@
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.seed import seed_everything

log = logging.getLogger(__name__)
if HYDRA_AVAILABLE:
from hydra.core.hydra_config import HydraConfig
from hydra.utils import get_original_cwd, to_absolute_path
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
# limitations under the License
from typing import Optional

from pytorch_lightning import _logger as log
from pytorch_lightning.accelerators.ddp_hpc_accelerator import DDPHPCAccelerator
from pytorch_lightning.cluster_environments import ClusterEnvironment
from pytorch_lightning.plugins.ddp_plugin import DDPPlugin
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
import logging
import os
from typing import Any, List, Optional, Union

Expand All @@ -19,7 +20,6 @@
import torch.multiprocessing as mp
from torch.nn.parallel import DistributedDataParallel

from pytorch_lightning import _logger as log
from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp
from pytorch_lightning.cluster_environments import ClusterEnvironment
from pytorch_lightning.core.lightning import LightningModule
Expand All @@ -35,6 +35,7 @@
sync_ddp_if_available,
)

log = logging.getLogger(__name__)
if HYDRA_AVAILABLE:
from hydra.core.hydra_config import HydraConfig
from hydra.utils import get_original_cwd, to_absolute_path
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/accelerators/ddp_hpc_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
import logging
import os
from typing import Any, List, Optional, Union

Expand All @@ -19,7 +20,6 @@
import torch.distributed as torch_distrib
from torch.nn.parallel import DistributedDataParallel

from pytorch_lightning import _logger as log
from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp
from pytorch_lightning.cluster_environments import ClusterEnvironment
from pytorch_lightning.core.lightning import LightningModule
Expand All @@ -29,6 +29,7 @@
from pytorch_lightning.utilities import AMPType, HYDRA_AVAILABLE
from pytorch_lightning.utilities.distributed import all_gather_ddp_if_available, rank_zero_only, sync_ddp_if_available

log = logging.getLogger(__name__)
if HYDRA_AVAILABLE:
from hydra.core.hydra_config import HydraConfig
from hydra.utils import get_original_cwd, to_absolute_path
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/accelerators/ddp_spawn_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
import logging
import os
import re
from typing import Any, List, Optional, Union
Expand All @@ -20,7 +21,6 @@
import torch.multiprocessing as mp
from torch.nn.parallel import DistributedDataParallel

from pytorch_lightning import _logger as log
from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp
from pytorch_lightning.cluster_environments import ClusterEnvironment
from pytorch_lightning.core.lightning import LightningModule
Expand All @@ -39,6 +39,7 @@
)
from pytorch_lightning.utilities.seed import seed_everything

log = logging.getLogger(__name__)
if HYDRA_AVAILABLE:
from hydra.core.hydra_config import HydraConfig
from hydra.utils import get_original_cwd, to_absolute_path
Expand Down
1 change: 0 additions & 1 deletion pytorch_lightning/accelerators/dp_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
import torch
from torch import optim

from pytorch_lightning import _logger as log
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.cluster_environments import ClusterEnvironment
from pytorch_lightning.core.lightning import LightningModule
Expand Down
1 change: 0 additions & 1 deletion pytorch_lightning/accelerators/gpu_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@

import torch

from pytorch_lightning import _logger as log
from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp
from pytorch_lightning.cluster_environments import ClusterEnvironment
from pytorch_lightning.distributed.dist import LightningDistributed
Expand Down
1 change: 0 additions & 1 deletion pytorch_lightning/accelerators/horovod_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import torch
from torch.optim.lr_scheduler import _LRScheduler

from pytorch_lightning import _logger as log
from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp
from pytorch_lightning.cluster_environments import ClusterEnvironment
from pytorch_lightning.utilities import AMPType, HOROVOD_AVAILABLE
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/accelerators/tpu_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import logging
import os
import re
from typing import Any, Callable, Optional, Union
Expand All @@ -20,7 +21,6 @@
import torch.multiprocessing as mp
from torch.optim import Optimizer

from pytorch_lightning import _logger as log
from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp
from pytorch_lightning.cluster_environments import ClusterEnvironment
from pytorch_lightning.core import LightningModule
Expand All @@ -35,6 +35,7 @@
from pytorch_lightning.utilities.cloud_io import atomic_save
from pytorch_lightning.utilities.exceptions import MisconfigurationException

log = logging.getLogger(__name__)
if TPU_AVAILABLE:
import torch_xla
import torch_xla.core.xla_model as xm
Expand Down
1 change: 0 additions & 1 deletion pytorch_lightning/callbacks/early_stopping.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import numpy as np
import torch

from pytorch_lightning import _logger as log
from pytorch_lightning.callbacks.base import Callback
from pytorch_lightning.metrics.metric import Metric
from pytorch_lightning.utilities import rank_zero_info, rank_zero_warn, TPU_AVAILABLE
Expand Down
5 changes: 3 additions & 2 deletions pytorch_lightning/callbacks/model_checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
Automatically save model checkpoints during training.

"""

import logging
import numbers
import os
import re
Expand All @@ -31,13 +31,14 @@
import torch
import yaml

from pytorch_lightning import _logger as log
from pytorch_lightning.callbacks.base import Callback
from pytorch_lightning.metrics.metric import Metric
from pytorch_lightning.utilities import rank_zero_info, rank_zero_only, rank_zero_warn
from pytorch_lightning.utilities.cloud_io import get_filesystem
from pytorch_lightning.utilities.exceptions import MisconfigurationException

log = logging.getLogger(__name__)


class ModelCheckpoint(Callback):
r"""
Expand Down
6 changes: 4 additions & 2 deletions pytorch_lightning/cluster_environments/slurm_environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,14 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import logging
import os
import re
from pytorch_lightning import _logger as log

from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment

log = logging.getLogger(__name__)


class SLURMEnvironment(ClusterEnvironment):

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,14 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import logging
import os
from pytorch_lightning import _logger as log

from pytorch_lightning.utilities import rank_zero_warn
from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment

log = logging.getLogger(__name__)


class TorchElasticEnvironment(ClusterEnvironment):

Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/core/lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import collections
import copy
import inspect
import logging
import os
import re
import tempfile
Expand All @@ -30,7 +31,6 @@
from torch.nn import Module
from torch.optim.optimizer import Optimizer

from pytorch_lightning import _logger as log
from pytorch_lightning.core.grads import GradInformation
from pytorch_lightning.core.hooks import CheckpointHooks, DataHooks, ModelHooks
from pytorch_lightning.core.memory import ModelSummary
Expand All @@ -42,6 +42,7 @@
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.parsing import AttributeDict, collect_init_args, get_init_args

log = logging.getLogger(__name__)
if TPU_AVAILABLE:
import torch_xla.core.xla_model as xm

Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/core/saving.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import ast
import csv
import inspect
import logging
import os
from argparse import Namespace
from copy import deepcopy
Expand All @@ -25,13 +26,13 @@
import torch
import yaml

from pytorch_lightning import _logger as log
from pytorch_lightning.utilities import AttributeDict, OMEGACONF_AVAILABLE, rank_zero_warn
from pytorch_lightning.utilities.apply_func import apply_to_collection
from pytorch_lightning.utilities.cloud_io import get_filesystem
from pytorch_lightning.utilities.cloud_io import load as pl_load
from pytorch_lightning.utilities.parsing import parse_class_init_keys

log = logging.getLogger(__name__)
PRIMITIVE_TYPES = (bool, int, float, str)
ALLOWED_CONFIG_TYPES = (AttributeDict, MutableMapping, Namespace)

Expand Down
Loading