From ef762a0d2a42c65f1f0d6a154ad995b7332f2153 Mon Sep 17 00:00:00 2001 From: Rohit Gupta Date: Tue, 1 Dec 2020 11:35:00 +0530 Subject: [PATCH] update logging docs and decorators (#4431) * update logging docs * experiment * add decorators to base and csv logger methods * fix * doc fix * update docs * update docs * Update pytorch_lightning/loggers/base.py Co-authored-by: chaton --- docs/source/logging.rst | 13 +++++++++---- pytorch_lightning/loggers/base.py | 24 +++++++++++++----------- pytorch_lightning/loggers/csv_logs.py | 3 ++- pytorch_lightning/loggers/wandb.py | 1 + 4 files changed, 25 insertions(+), 16 deletions(-) diff --git a/docs/source/logging.rst b/docs/source/logging.rst index 3d6ae87d01..906240ce6e 100644 --- a/docs/source/logging.rst +++ b/docs/source/logging.rst @@ -124,24 +124,28 @@ Once your training starts, you can view the logs by using your favorite logger o Make a custom logger ******************** -You can implement your own logger by writing a class that inherits from -:class:`LightningLoggerBase`. Use the :func:`~pytorch_lightning.loggers.base.rank_zero_only` -decorator to make sure that only the first process in DDP training logs data. +You can implement your own logger by writing a class that inherits from :class:`~pytorch_lightning.loggers.base.LightningLoggerBase`. +Use the :func:`~pytorch_lightning.loggers.base.rank_zero_experiment` and :func:`~pytorch_lightning.utilities.distributed.rank_zero_only` decorators to make sure that only the first process in DDP training creates the experiment and logs the data respectively. .. testcode:: from pytorch_lightning.utilities import rank_zero_only from pytorch_lightning.loggers import LightningLoggerBase + from pytorch_lightning.loggers.base import rank_zero_experiment class MyLogger(LightningLoggerBase): + @property def name(self): return 'MyLogger' + @property + @rank_zero_experiment def experiment(self): # Return the experiment object associated with this logger. pass - + + @property def version(self): # Return the experiment version, int or str. return '0.1' @@ -158,6 +162,7 @@ decorator to make sure that only the first process in DDP training logs data. # your code to record metrics goes here pass + @rank_zero_only def save(self): # Optional. Any code necessary to save logger data goes here # If you implement this, remember to call `super().save()` diff --git a/pytorch_lightning/loggers/base.py b/pytorch_lightning/loggers/base.py index d225db665b..fc40db4e69 100644 --- a/pytorch_lightning/loggers/base.py +++ b/pytorch_lightning/loggers/base.py @@ -29,6 +29,17 @@ from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.utilities import rank_zero_only +def rank_zero_experiment(fn: Callable) -> Callable: + """ Returns the real experiment on rank 0 and otherwise the DummyExperiment. """ + @wraps(fn) + def experiment(self): + @rank_zero_only + def get_experiment(): + return fn(self) + return get_experiment() or DummyExperiment() + return experiment + + class LightningLoggerBase(ABC): """ Base class for experiment loggers. @@ -410,9 +421,11 @@ class DummyLogger(LightningLoggerBase): def experiment(self): return self._experiment + @rank_zero_only def log_metrics(self, metrics, step): pass + @rank_zero_only def log_hyperparams(self, params): pass @@ -477,14 +490,3 @@ def merge_dicts( d_out[k] = (fn or default_func)(values_to_agg) return d_out - - -def rank_zero_experiment(fn: Callable) -> Callable: - """ Returns the real experiment on rank 0 and otherwise the DummyExperiment. """ - @wraps(fn) - def experiment(self): - @rank_zero_only - def get_experiment(): - return fn(self) - return get_experiment() or DummyExperiment() - return experiment diff --git a/pytorch_lightning/loggers/csv_logs.py b/pytorch_lightning/loggers/csv_logs.py index 8dde1e4b88..d47cff1db0 100644 --- a/pytorch_lightning/loggers/csv_logs.py +++ b/pytorch_lightning/loggers/csv_logs.py @@ -29,7 +29,7 @@ import torch from pytorch_lightning import _logger as log from pytorch_lightning.core.saving import save_hparams_to_yaml -from pytorch_lightning.loggers.base import LightningLoggerBase +from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn @@ -162,6 +162,7 @@ class CSVLogger(LightningLoggerBase): return self._save_dir @property + @rank_zero_experiment def experiment(self) -> ExperimentWriter: r""" diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 20ecb8fe40..24007c3a04 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -170,6 +170,7 @@ class WandbLogger(LightningLoggerBase): # don't create an experiment if we don't have one return self._experiment.id if self._experiment else self._id + @rank_zero_only def finalize(self, status: str) -> None: # offset future training logged on same W&B run if self._experiment is not None: