Skip to content

Commit

Permalink
update logging docs and decorators (#4431)
Browse files Browse the repository at this point in the history
* update logging docs

* experiment

* add decorators to base and csv logger methods

* fix

* doc fix

* update docs

* update docs

* Update pytorch_lightning/loggers/base.py

Co-authored-by: chaton <thomas@grid.ai>
  • Loading branch information
rohitgr7 and tchaton authored Dec 1, 2020
1 parent c2e6e68 commit ef762a0
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 16 deletions.
13 changes: 9 additions & 4 deletions docs/source/logging.rst
Original file line number Diff line number Diff line change
Expand Up @@ -124,24 +124,28 @@ Once your training starts, you can view the logs by using your favorite logger o
Make a custom logger
********************

You can implement your own logger by writing a class that inherits from
:class:`LightningLoggerBase`. Use the :func:`~pytorch_lightning.loggers.base.rank_zero_only`
decorator to make sure that only the first process in DDP training logs data.
You can implement your own logger by writing a class that inherits from :class:`~pytorch_lightning.loggers.base.LightningLoggerBase`.
Use the :func:`~pytorch_lightning.loggers.base.rank_zero_experiment` and :func:`~pytorch_lightning.utilities.distributed.rank_zero_only` decorators to make sure that only the first process in DDP training creates the experiment and logs the data respectively.

.. testcode::

from pytorch_lightning.utilities import rank_zero_only
from pytorch_lightning.loggers import LightningLoggerBase
from pytorch_lightning.loggers.base import rank_zero_experiment

class MyLogger(LightningLoggerBase):

@property
def name(self):
return 'MyLogger'

@property
@rank_zero_experiment
def experiment(self):
# Return the experiment object associated with this logger.
pass


@property
def version(self):
# Return the experiment version, int or str.
return '0.1'
Expand All @@ -158,6 +162,7 @@ decorator to make sure that only the first process in DDP training logs data.
# your code to record metrics goes here
pass

@rank_zero_only
def save(self):
# Optional. Any code necessary to save logger data goes here
# If you implement this, remember to call `super().save()`
Expand Down
24 changes: 13 additions & 11 deletions pytorch_lightning/loggers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,17 @@
from pytorch_lightning.utilities import rank_zero_only


def rank_zero_experiment(fn: Callable) -> Callable:
""" Returns the real experiment on rank 0 and otherwise the DummyExperiment. """
@wraps(fn)
def experiment(self):
@rank_zero_only
def get_experiment():
return fn(self)
return get_experiment() or DummyExperiment()
return experiment


class LightningLoggerBase(ABC):
"""
Base class for experiment loggers.
Expand Down Expand Up @@ -410,9 +421,11 @@ def __init__(self):
def experiment(self):
return self._experiment

@rank_zero_only
def log_metrics(self, metrics, step):
pass

@rank_zero_only
def log_hyperparams(self, params):
pass

Expand Down Expand Up @@ -477,14 +490,3 @@ def merge_dicts(
d_out[k] = (fn or default_func)(values_to_agg)

return d_out


def rank_zero_experiment(fn: Callable) -> Callable:
""" Returns the real experiment on rank 0 and otherwise the DummyExperiment. """
@wraps(fn)
def experiment(self):
@rank_zero_only
def get_experiment():
return fn(self)
return get_experiment() or DummyExperiment()
return experiment
3 changes: 2 additions & 1 deletion pytorch_lightning/loggers/csv_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@

from pytorch_lightning import _logger as log
from pytorch_lightning.core.saving import save_hparams_to_yaml
from pytorch_lightning.loggers.base import LightningLoggerBase
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn


Expand Down Expand Up @@ -162,6 +162,7 @@ def save_dir(self) -> Optional[str]:
return self._save_dir

@property
@rank_zero_experiment
def experiment(self) -> ExperimentWriter:
r"""
Expand Down
1 change: 1 addition & 0 deletions pytorch_lightning/loggers/wandb.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,7 @@ def version(self) -> Optional[str]:
# don't create an experiment if we don't have one
return self._experiment.id if self._experiment else self._id

@rank_zero_only
def finalize(self, status: str) -> None:
# offset future training logged on same W&B run
if self._experiment is not None:
Expand Down

0 comments on commit ef762a0

Please sign in to comment.