Fix loggers and update docs (#964)
* Fix loggers and update docs * Update trainer.py
This commit is contained in:
parent
27a3be0287
commit
f5e0df390c
|
@ -22,13 +22,13 @@ To use CometLogger as your logger do the following.
|
||||||
)
|
)
|
||||||
trainer = Trainer(logger=comet_logger)
|
trainer = Trainer(logger=comet_logger)
|
||||||
|
|
||||||
The CometLogger is available anywhere in your LightningModule
|
The CometLogger is available anywhere except ``__init__`` in your LightningModule
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
class MyModule(pl.LightningModule):
|
class MyModule(pl.LightningModule):
|
||||||
|
|
||||||
def __init__(self, ...):
|
def any_lightning_module_function_or_hook(self, ...):
|
||||||
some_img = fake_image()
|
some_img = fake_image()
|
||||||
self.logger.experiment.add_image('generated_images', some_img, 0)
|
self.logger.experiment.add_image('generated_images', some_img, 0)
|
||||||
|
|
||||||
|
@ -52,13 +52,13 @@ To use Neptune.ai as your logger do the following.
|
||||||
)
|
)
|
||||||
trainer = Trainer(logger=neptune_logger)
|
trainer = Trainer(logger=neptune_logger)
|
||||||
|
|
||||||
The Neptune.ai is available anywhere in your LightningModule
|
The Neptune.ai is available anywhere except ``__init__`` in your LightningModule
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
class MyModule(pl.LightningModule):
|
class MyModule(pl.LightningModule):
|
||||||
|
|
||||||
def __init__(self, ...):
|
def any_lightning_module_function_or_hook(self, ...):
|
||||||
some_img = fake_image()
|
some_img = fake_image()
|
||||||
self.logger.experiment.add_image('generated_images', some_img, 0)
|
self.logger.experiment.add_image('generated_images', some_img, 0)
|
||||||
|
|
||||||
|
@ -76,13 +76,13 @@ To use `Tensorboard <https://pytorch.org/docs/stable/tensorboard.html>`_ as your
|
||||||
logger = TensorBoardLogger("tb_logs", name="my_model")
|
logger = TensorBoardLogger("tb_logs", name="my_model")
|
||||||
trainer = Trainer(logger=logger)
|
trainer = Trainer(logger=logger)
|
||||||
|
|
||||||
The TensorBoardLogger is available anywhere in your LightningModule
|
The TensorBoardLogger is available anywhere except ``__init__`` in your LightningModule
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
class MyModule(pl.LightningModule):
|
class MyModule(pl.LightningModule):
|
||||||
|
|
||||||
def __init__(self, ...):
|
def any_lightning_module_function_or_hook(self, ...):
|
||||||
some_img = fake_image()
|
some_img = fake_image()
|
||||||
self.logger.experiment.add_image('generated_images', some_img, 0)
|
self.logger.experiment.add_image('generated_images', some_img, 0)
|
||||||
|
|
||||||
|
@ -102,13 +102,13 @@ To use TestTube as your logger do the following.
|
||||||
logger = TestTubeLogger("tb_logs", name="my_model")
|
logger = TestTubeLogger("tb_logs", name="my_model")
|
||||||
trainer = Trainer(logger=logger)
|
trainer = Trainer(logger=logger)
|
||||||
|
|
||||||
The TestTubeLogger is available anywhere in your LightningModule
|
The TestTubeLogger is available anywhere except ``__init__`` in your LightningModule
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
class MyModule(pl.LightningModule):
|
class MyModule(pl.LightningModule):
|
||||||
|
|
||||||
def __init__(self, ...):
|
def any_lightning_module_function_or_hook(self, ...):
|
||||||
some_img = fake_image()
|
some_img = fake_image()
|
||||||
self.logger.experiment.add_image('generated_images', some_img, 0)
|
self.logger.experiment.add_image('generated_images', some_img, 0)
|
||||||
|
|
||||||
|
@ -127,13 +127,13 @@ To use Wandb as your logger do the following.
|
||||||
wandb_logger = WandbLogger()
|
wandb_logger = WandbLogger()
|
||||||
trainer = Trainer(logger=wandb_logger)
|
trainer = Trainer(logger=wandb_logger)
|
||||||
|
|
||||||
The Wandb logger is available anywhere in your LightningModule
|
The Wandb logger is available anywhere except ``__init__`` in your LightningModule
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
class MyModule(pl.LightningModule):
|
class MyModule(pl.LightningModule):
|
||||||
|
|
||||||
def __init__(self, ...):
|
def any_lightning_module_function_or_hook(self, ...):
|
||||||
some_img = fake_image()
|
some_img = fake_image()
|
||||||
self.logger.experiment.add_image('generated_images', some_img, 0)
|
self.logger.experiment.add_image('generated_images', some_img, 0)
|
||||||
|
|
||||||
|
@ -151,12 +151,17 @@ PyTorch-Lightning supports use of multiple loggers, just pass a list to the `Tra
|
||||||
logger2 = TestTubeLogger("tt_logs", name="my_model")
|
logger2 = TestTubeLogger("tt_logs", name="my_model")
|
||||||
trainer = Trainer(logger=[logger1, logger2])
|
trainer = Trainer(logger=[logger1, logger2])
|
||||||
|
|
||||||
The loggers are available as a list anywhere in your LightningModule
|
The loggers are available as a list anywhere except ``__init__`` in your LightningModule
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
class MyModule(pl.LightningModule):
|
class MyModule(pl.LightningModule):
|
||||||
|
|
||||||
def __init__(self, ...):
|
def any_lightning_module_function_or_hook(self, ...):
|
||||||
some_img = fake_image()
|
some_img = fake_image()
|
||||||
|
|
||||||
|
# Option 1
|
||||||
self.logger.experiment[0].add_image('generated_images', some_img, 0)
|
self.logger.experiment[0].add_image('generated_images', some_img, 0)
|
||||||
|
|
||||||
|
# Option 2
|
||||||
|
self.logger[0].experiment.add_image('generated_images', some_img, 0)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
"""
|
"""
|
||||||
Lightning supports most popular logging frameworks (Tensorboard, comet, weights and biases, etc...).
|
Lightning supports most popular logging frameworks (Tensorboard, comet, weights and biases, etc...).
|
||||||
To use a logger, simply pass it into the trainer.
|
To use a logger, simply pass it into the trainer. To use multiple loggers, simply pass in a ``list``
|
||||||
|
or ``tuple`` of loggers.
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
|
@ -14,14 +15,19 @@ To use a logger, simply pass it into the trainer.
|
||||||
comet_logger = loggers.CometLogger()
|
comet_logger = loggers.CometLogger()
|
||||||
trainer = Trainer(logger=comet_logger)
|
trainer = Trainer(logger=comet_logger)
|
||||||
|
|
||||||
.. note:: All loggers log by default to `os.getcwd()`. To change the path without creating a logger set
|
# or pass a list
|
||||||
Trainer(default_save_path='/your/path/to/save/checkpoints')
|
tb_logger = loggers.TensorBoardLogger()
|
||||||
|
comet_logger = loggers.CometLogger()
|
||||||
|
trainer = Trainer(logger=[tb_logger, comet_logger])
|
||||||
|
|
||||||
|
.. note:: All loggers log by default to ``os.getcwd()``. To change the path without creating a logger set
|
||||||
|
``Trainer(default_save_path='/your/path/to/save/checkpoints')``
|
||||||
|
|
||||||
Custom logger
|
Custom logger
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
You can implement your own logger by writing a class that inherits from
|
You can implement your own logger by writing a class that inherits from
|
||||||
`LightningLoggerBase`. Use the `rank_zero_only` decorator to make sure that
|
``LightningLoggerBase``. Use the ``rank_zero_only`` decorator to make sure that
|
||||||
only the first process in DDP training logs data.
|
only the first process in DDP training logs data.
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
@ -52,13 +58,13 @@ only the first process in DDP training logs data.
|
||||||
# finishes goes here
|
# finishes goes here
|
||||||
|
|
||||||
|
|
||||||
If you write a logger than may be useful to others, please send
|
If you write a logger that may be useful to others, please send
|
||||||
a pull request to add it to Lighting!
|
a pull request to add it to Lighting!
|
||||||
|
|
||||||
Using loggers
|
Using loggers
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
Call the logger anywhere from your LightningModule by doing:
|
Call the logger anywhere except ``__init__`` in your LightningModule by doing:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
|
@ -69,6 +75,8 @@ Call the logger anywhere from your LightningModule by doing:
|
||||||
def any_lightning_module_function_or_hook(...):
|
def any_lightning_module_function_or_hook(...):
|
||||||
self.logger.experiment.add_histogram(...)
|
self.logger.experiment.add_histogram(...)
|
||||||
|
|
||||||
|
Read more in the `Experiment Logging use case <./experiment_logging.html>`_.
|
||||||
|
|
||||||
Supported Loggers
|
Supported Loggers
|
||||||
-----------------
|
-----------------
|
||||||
"""
|
"""
|
||||||
|
@ -77,7 +85,7 @@ from os import environ
|
||||||
from .base import LightningLoggerBase, LoggerCollection, rank_zero_only
|
from .base import LightningLoggerBase, LoggerCollection, rank_zero_only
|
||||||
from .tensorboard import TensorBoardLogger
|
from .tensorboard import TensorBoardLogger
|
||||||
|
|
||||||
__all__ = ['TensorBoardLogger', 'LoggerCollection']
|
__all__ = ['TensorBoardLogger']
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# needed to prevent ImportError and duplicated logs.
|
# needed to prevent ImportError and duplicated logs.
|
||||||
|
|
|
@ -100,6 +100,9 @@ class LoggerCollection(LightningLoggerBase):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self._logger_iterable = logger_iterable
|
self._logger_iterable = logger_iterable
|
||||||
|
|
||||||
|
def __getitem__(self, index: int) -> LightningLoggerBase:
|
||||||
|
return [logger for logger in self._logger_iterable][index]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def experiment(self) -> List[Any]:
|
def experiment(self) -> List[Any]:
|
||||||
return [logger.experiment() for logger in self._logger_iterable]
|
return [logger.experiment() for logger in self._logger_iterable]
|
||||||
|
|
|
@ -937,6 +937,9 @@ class Trainer(TrainerIOMixin,
|
||||||
# feed to .fit()
|
# feed to .fit()
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
# bind logger
|
||||||
|
model.logger = self.logger
|
||||||
|
|
||||||
# Fit begin callbacks
|
# Fit begin callbacks
|
||||||
self.on_fit_start()
|
self.on_fit_start()
|
||||||
|
|
||||||
|
@ -1065,10 +1068,8 @@ class Trainer(TrainerIOMixin,
|
||||||
# set local properties on the model
|
# set local properties on the model
|
||||||
self.copy_trainer_model_properties(ref_model)
|
self.copy_trainer_model_properties(ref_model)
|
||||||
|
|
||||||
# link up experiment object
|
# log hyper-parameters
|
||||||
if self.logger is not None:
|
if self.logger is not None:
|
||||||
ref_model.logger = self.logger
|
|
||||||
|
|
||||||
# save exp to get started
|
# save exp to get started
|
||||||
if hasattr(ref_model, "hparams"):
|
if hasattr(ref_model, "hparams"):
|
||||||
self.logger.log_hyperparams(ref_model.hparams)
|
self.logger.log_hyperparams(ref_model.hparams)
|
||||||
|
|
Loading…
Reference in New Issue