From 1e2c9eaf8965550ba55e7b3032b483eeb168943e Mon Sep 17 00:00:00 2001 From: William Falcon Date: Sat, 25 Apr 2020 13:04:34 -0400 Subject: [PATCH] updated docs --- pytorch_lightning/loggers/__init__.py | 92 +++++++++++++++------------ 1 file changed, 51 insertions(+), 41 deletions(-) diff --git a/pytorch_lightning/loggers/__init__.py b/pytorch_lightning/loggers/__init__.py index 24769c7953..c71a9aec0f 100644 --- a/pytorch_lightning/loggers/__init__.py +++ b/pytorch_lightning/loggers/__init__.py @@ -3,21 +3,27 @@ Lightning supports the most popular logging frameworks (TensorBoard, Comet, Weig To use a logger, simply pass it into the :class:`~pytorch_lightning.trainer.trainer.Trainer`. Lightning uses TensorBoard by default. ->>> from pytorch_lightning import Trainer ->>> from pytorch_lightning import loggers ->>> tb_logger = loggers.TensorBoardLogger('logs/') ->>> trainer = Trainer(logger=tb_logger) +.. code-block:: python + + from pytorch_lightning import Trainer + from pytorch_lightning import loggers + tb_logger = loggers.TensorBoardLogger('logs/') + trainer = Trainer(logger=tb_logger) Choose from any of the others such as MLflow, Comet, Neptune, WandB, ... ->>> comet_logger = loggers.CometLogger(save_dir='logs/') ->>> trainer = Trainer(logger=comet_logger) +.. code-block:: python + + comet_logger = loggers.CometLogger(save_dir='logs/') + trainer = Trainer(logger=comet_logger) To use multiple loggers, simply pass in a ``list`` or ``tuple`` of loggers ... ->>> tb_logger = loggers.TensorBoardLogger('logs/') ->>> comet_logger = loggers.CometLogger(save_dir='logs/') ->>> trainer = Trainer(logger=[tb_logger, comet_logger]) +.. code-block:: python + + tb_logger = loggers.TensorBoardLogger('logs/') + comet_logger = loggers.CometLogger(save_dir='logs/') + trainer = Trainer(logger=[tb_logger, comet_logger]) Note: All loggers log by default to ``os.getcwd()``. To change the path without creating a logger set @@ -30,31 +36,33 @@ You can implement your own logger by writing a class that inherits from :class:`LightningLoggerBase`. Use the :func:`~pytorch_lightning.loggers.base.rank_zero_only` decorator to make sure that only the first process in DDP training logs data. ->>> from pytorch_lightning.utilities import rank_zero_only ->>> from pytorch_lightning.loggers import LightningLoggerBase ->>> class MyLogger(LightningLoggerBase): -... -... @rank_zero_only -... def log_hyperparams(self, params): -... # params is an argparse.Namespace -... # your code to record hyperparameters goes here -... pass -... -... @rank_zero_only -... def log_metrics(self, metrics, step): -... # metrics is a dictionary of metric names and values -... # your code to record metrics goes here -... pass -... -... def save(self): -... # Optional. Any code necessary to save logger data goes here -... pass -... -... @rank_zero_only -... def finalize(self, status): -... # Optional. Any code that needs to be run after training -... # finishes goes here -... pass +.. code-block:: python + + from pytorch_lightning.utilities import rank_zero_only + from pytorch_lightning.loggers import LightningLoggerBase + class MyLogger(LightningLoggerBase): + + @rank_zero_only + def log_hyperparams(self, params): + # params is an argparse.Namespace + # your code to record hyperparameters goes here + pass + + @rank_zero_only + def log_metrics(self, metrics, step): + # metrics is a dictionary of metric names and values + # your code to record metrics goes here + pass + + def save(self): + # Optional. Any code necessary to save logger data goes here + pass + + @rank_zero_only + def finalize(self, status): + # Optional. Any code that needs to be run after training + # finishes goes here + pass If you write a logger that may be useful to others, please send a pull request to add it to Lighting! @@ -65,18 +73,20 @@ Using loggers Call the logger anywhere except ``__init__`` in your :class:`~pytorch_lightning.core.lightning.LightningModule` by doing: ->>> from pytorch_lightning import LightningModule ->>> class LitModel(LightningModule): -... def training_step(self, batch, batch_idx): -... # example -... self.logger.experiment.whatever_method_summary_writer_supports(...) +.. code-block:: python + + from pytorch_lightning import LightningModule + class LitModel(LightningModule): + def training_step(self, batch, batch_idx): + # example + self.logger.experiment.whatever_method_summary_writer_supports(...) # example if logger is a tensorboard logger self.logger.experiment.add_image('images', grid, 0) self.logger.experiment.add_graph(model, images) -... def any_lightning_module_function_or_hook(self): -... self.logger.experiment.add_histogram(...) + def any_lightning_module_function_or_hook(self): + self.logger.experiment.add_histogram(...) Read more in the `Experiment Logging use case <./experiment_logging.html>`_.