update logging docs and decorators (#4431)

* update logging docs

* experiment

* add decorators to base and csv logger methods

* fix

* doc fix

* update docs

* update docs

* Update pytorch_lightning/loggers/base.py

Co-authored-by: chaton <thomas@grid.ai>
This commit is contained in:
Rohit Gupta 2020-12-01 11:35:00 +05:30 committed by GitHub
parent c2e6e68c7e
commit ef762a0d2a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 25 additions and 16 deletions

View File

@ -124,24 +124,28 @@ Once your training starts, you can view the logs by using your favorite logger o
Make a custom logger
********************
You can implement your own logger by writing a class that inherits from
:class:`LightningLoggerBase`. Use the :func:`~pytorch_lightning.loggers.base.rank_zero_only`
decorator to make sure that only the first process in DDP training logs data.
You can implement your own logger by writing a class that inherits from :class:`~pytorch_lightning.loggers.base.LightningLoggerBase`.
Use the :func:`~pytorch_lightning.loggers.base.rank_zero_experiment` and :func:`~pytorch_lightning.utilities.distributed.rank_zero_only` decorators to make sure that only the first process in DDP training creates the experiment and logs the data respectively.
.. testcode::
from pytorch_lightning.utilities import rank_zero_only
from pytorch_lightning.loggers import LightningLoggerBase
from pytorch_lightning.loggers.base import rank_zero_experiment
class MyLogger(LightningLoggerBase):
@property
def name(self):
return 'MyLogger'
@property
@rank_zero_experiment
def experiment(self):
# Return the experiment object associated with this logger.
pass
@property
def version(self):
# Return the experiment version, int or str.
return '0.1'
@ -158,6 +162,7 @@ decorator to make sure that only the first process in DDP training logs data.
# your code to record metrics goes here
pass
@rank_zero_only
def save(self):
# Optional. Any code necessary to save logger data goes here
# If you implement this, remember to call `super().save()`

View File

@ -29,6 +29,17 @@ from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.utilities import rank_zero_only
def rank_zero_experiment(fn: Callable) -> Callable:
""" Returns the real experiment on rank 0 and otherwise the DummyExperiment. """
@wraps(fn)
def experiment(self):
@rank_zero_only
def get_experiment():
return fn(self)
return get_experiment() or DummyExperiment()
return experiment
class LightningLoggerBase(ABC):
"""
Base class for experiment loggers.
@ -410,9 +421,11 @@ class DummyLogger(LightningLoggerBase):
def experiment(self):
return self._experiment
@rank_zero_only
def log_metrics(self, metrics, step):
pass
@rank_zero_only
def log_hyperparams(self, params):
pass
@ -477,14 +490,3 @@ def merge_dicts(
d_out[k] = (fn or default_func)(values_to_agg)
return d_out
def rank_zero_experiment(fn: Callable) -> Callable:
""" Returns the real experiment on rank 0 and otherwise the DummyExperiment. """
@wraps(fn)
def experiment(self):
@rank_zero_only
def get_experiment():
return fn(self)
return get_experiment() or DummyExperiment()
return experiment

View File

@ -29,7 +29,7 @@ import torch
from pytorch_lightning import _logger as log
from pytorch_lightning.core.saving import save_hparams_to_yaml
from pytorch_lightning.loggers.base import LightningLoggerBase
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn
@ -162,6 +162,7 @@ class CSVLogger(LightningLoggerBase):
return self._save_dir
@property
@rank_zero_experiment
def experiment(self) -> ExperimentWriter:
r"""

View File

@ -170,6 +170,7 @@ class WandbLogger(LightningLoggerBase):
# don't create an experiment if we don't have one
return self._experiment.id if self._experiment else self._id
@rank_zero_only
def finalize(self, status: str) -> None:
# offset future training logged on same W&B run
if self._experiment is not None: