2020-07-09 11:15:41 +00:00
|
|
|
import atexit
|
2020-04-15 00:32:33 +00:00
|
|
|
import inspect
|
2020-07-27 16:53:11 +00:00
|
|
|
import os
|
2020-04-15 00:32:33 +00:00
|
|
|
import pickle
|
2020-06-30 22:09:16 +00:00
|
|
|
import platform
|
2020-07-09 11:15:41 +00:00
|
|
|
from unittest import mock
|
2020-04-15 00:32:33 +00:00
|
|
|
|
|
|
|
import pytest
|
|
|
|
|
2020-06-27 01:38:25 +00:00
|
|
|
import tests.base.develop_utils as tutils
|
2020-06-30 22:09:16 +00:00
|
|
|
from pytorch_lightning import Trainer, Callback
|
2020-04-15 00:32:33 +00:00
|
|
|
from pytorch_lightning.loggers import (
|
2020-06-30 22:09:16 +00:00
|
|
|
TensorBoardLogger,
|
|
|
|
MLFlowLogger,
|
|
|
|
NeptuneLogger,
|
|
|
|
TestTubeLogger,
|
|
|
|
CometLogger,
|
|
|
|
WandbLogger,
|
|
|
|
)
|
|
|
|
from pytorch_lightning.loggers.base import DummyExperiment
|
2020-05-02 12:38:22 +00:00
|
|
|
from tests.base import EvalModelTemplate
|
2020-04-15 00:32:33 +00:00
|
|
|
|
|
|
|
|
2020-04-15 15:14:29 +00:00
|
|
|
def _get_logger_args(logger_class, save_dir):
|
|
|
|
logger_args = {}
|
|
|
|
if 'save_dir' in inspect.getfullargspec(logger_class).args:
|
|
|
|
logger_args.update(save_dir=str(save_dir))
|
|
|
|
if 'offline_mode' in inspect.getfullargspec(logger_class).args:
|
|
|
|
logger_args.update(offline_mode=True)
|
2020-06-30 22:09:16 +00:00
|
|
|
if 'offline' in inspect.getfullargspec(logger_class).args:
|
|
|
|
logger_args.update(offline=True)
|
2020-04-15 15:14:29 +00:00
|
|
|
return logger_args
|
|
|
|
|
|
|
|
|
2020-04-15 00:32:33 +00:00
|
|
|
@pytest.mark.parametrize("logger_class", [
|
|
|
|
TensorBoardLogger,
|
|
|
|
CometLogger,
|
|
|
|
MLFlowLogger,
|
|
|
|
NeptuneLogger,
|
|
|
|
TestTubeLogger,
|
2020-07-09 11:15:41 +00:00
|
|
|
WandbLogger,
|
2020-04-15 00:32:33 +00:00
|
|
|
])
|
2020-07-09 11:15:41 +00:00
|
|
|
@mock.patch('pytorch_lightning.loggers.wandb.wandb')
|
|
|
|
def test_loggers_fit_test(wandb, tmpdir, monkeypatch, logger_class):
|
2020-04-15 00:32:33 +00:00
|
|
|
"""Verify that basic functionality of all loggers."""
|
2020-08-26 16:28:14 +00:00
|
|
|
os.environ['PL_DEV_DEBUG'] = '0'
|
|
|
|
|
2020-07-09 11:15:41 +00:00
|
|
|
if logger_class == CometLogger:
|
|
|
|
# prevent comet logger from trying to print at exit, since
|
|
|
|
# pytest's stdout/stderr redirection breaks it
|
|
|
|
monkeypatch.setattr(atexit, 'register', lambda _: None)
|
2020-04-15 00:32:33 +00:00
|
|
|
|
2020-05-10 17:15:28 +00:00
|
|
|
model = EvalModelTemplate()
|
2020-04-15 00:32:33 +00:00
|
|
|
|
|
|
|
class StoreHistoryLogger(logger_class):
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
self.history = []
|
|
|
|
|
|
|
|
def log_metrics(self, metrics, step):
|
|
|
|
super().log_metrics(metrics, step)
|
|
|
|
self.history.append((step, metrics))
|
|
|
|
|
2020-04-15 15:14:29 +00:00
|
|
|
logger_args = _get_logger_args(logger_class, tmpdir)
|
|
|
|
logger = StoreHistoryLogger(**logger_args)
|
2020-04-15 00:32:33 +00:00
|
|
|
|
2020-07-09 11:15:41 +00:00
|
|
|
if logger_class == WandbLogger:
|
|
|
|
# required mocks for Trainer
|
|
|
|
logger.experiment.id = 'foo'
|
|
|
|
logger.experiment.project_name.return_value = 'bar'
|
|
|
|
|
2020-04-15 00:32:33 +00:00
|
|
|
trainer = Trainer(
|
|
|
|
max_epochs=1,
|
|
|
|
logger=logger,
|
2020-06-17 17:42:28 +00:00
|
|
|
limit_train_batches=0.2,
|
2020-06-17 12:03:28 +00:00
|
|
|
limit_val_batches=0.5,
|
2020-04-15 00:32:33 +00:00
|
|
|
fast_dev_run=True,
|
2020-07-28 13:47:53 +00:00
|
|
|
default_root_dir=tmpdir,
|
2020-04-15 00:32:33 +00:00
|
|
|
)
|
|
|
|
trainer.fit(model)
|
|
|
|
trainer.test()
|
|
|
|
|
|
|
|
log_metric_names = [(s, sorted(m.keys())) for s, m in logger.history]
|
2020-08-24 06:57:04 +00:00
|
|
|
if logger_class == TensorBoardLogger:
|
|
|
|
assert log_metric_names == [(0, ['hp_metric']),
|
|
|
|
(0, ['epoch', 'val_acc', 'val_loss']),
|
|
|
|
(0, ['epoch', 'train_some_val']),
|
|
|
|
(0, ['hp_metric']),
|
|
|
|
(1, ['epoch', 'test_acc', 'test_loss'])]
|
|
|
|
else:
|
|
|
|
assert log_metric_names == [(0, ['epoch', 'val_acc', 'val_loss']),
|
|
|
|
(0, ['epoch', 'train_some_val']),
|
|
|
|
(1, ['epoch', 'test_acc', 'test_loss'])]
|
2020-04-15 00:32:33 +00:00
|
|
|
|
|
|
|
|
2020-07-27 16:53:11 +00:00
|
|
|
@pytest.mark.parametrize("logger_class", [
|
|
|
|
TensorBoardLogger,
|
|
|
|
CometLogger,
|
|
|
|
MLFlowLogger,
|
|
|
|
TestTubeLogger,
|
|
|
|
WandbLogger,
|
|
|
|
])
|
|
|
|
@mock.patch('pytorch_lightning.loggers.wandb.wandb')
|
|
|
|
def test_loggers_save_dir_and_weights_save_path(wandb, tmpdir, monkeypatch, logger_class):
|
|
|
|
""" Test the combinations of save_dir, weights_save_path and default_root_dir. """
|
|
|
|
if logger_class == CometLogger:
|
|
|
|
# prevent comet logger from trying to print at exit, since
|
|
|
|
# pytest's stdout/stderr redirection breaks it
|
|
|
|
monkeypatch.setattr(atexit, 'register', lambda _: None)
|
|
|
|
|
|
|
|
class TestLogger(logger_class):
|
|
|
|
# for this test it does not matter what these attributes are
|
|
|
|
# so we standardize them to make testing easier
|
|
|
|
@property
|
|
|
|
def version(self):
|
|
|
|
return 'version'
|
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
return 'name'
|
|
|
|
|
|
|
|
model = EvalModelTemplate()
|
|
|
|
trainer_args = dict(
|
|
|
|
default_root_dir=tmpdir,
|
|
|
|
max_steps=1,
|
|
|
|
)
|
|
|
|
|
|
|
|
# no weights_save_path given
|
|
|
|
save_dir = tmpdir / 'logs'
|
|
|
|
weights_save_path = None
|
|
|
|
logger = TestLogger(**_get_logger_args(TestLogger, save_dir))
|
|
|
|
trainer = Trainer(**trainer_args, logger=logger, weights_save_path=weights_save_path)
|
|
|
|
trainer.fit(model)
|
|
|
|
assert trainer.weights_save_path == trainer.default_root_dir
|
|
|
|
assert trainer.checkpoint_callback.dirpath == os.path.join(logger.save_dir, 'name', 'version', 'checkpoints')
|
|
|
|
assert trainer.default_root_dir == tmpdir
|
|
|
|
|
|
|
|
# with weights_save_path given, the logger path and checkpoint path should be different
|
|
|
|
save_dir = tmpdir / 'logs'
|
|
|
|
weights_save_path = tmpdir / 'weights'
|
|
|
|
logger = TestLogger(**_get_logger_args(TestLogger, save_dir))
|
|
|
|
trainer = Trainer(**trainer_args, logger=logger, weights_save_path=weights_save_path)
|
|
|
|
trainer.fit(model)
|
|
|
|
assert trainer.weights_save_path == weights_save_path
|
|
|
|
assert trainer.logger.save_dir == save_dir
|
|
|
|
assert trainer.checkpoint_callback.dirpath == weights_save_path / 'name' / 'version' / 'checkpoints'
|
|
|
|
assert trainer.default_root_dir == tmpdir
|
|
|
|
|
|
|
|
# no logger given
|
|
|
|
weights_save_path = tmpdir / 'weights'
|
|
|
|
trainer = Trainer(**trainer_args, logger=False, weights_save_path=weights_save_path)
|
|
|
|
trainer.fit(model)
|
|
|
|
assert trainer.weights_save_path == weights_save_path
|
|
|
|
assert trainer.checkpoint_callback.dirpath == weights_save_path / 'checkpoints'
|
|
|
|
assert trainer.default_root_dir == tmpdir
|
|
|
|
|
|
|
|
|
2020-04-15 00:32:33 +00:00
|
|
|
@pytest.mark.parametrize("logger_class", [
|
|
|
|
TensorBoardLogger,
|
|
|
|
CometLogger,
|
2020-07-09 11:15:41 +00:00
|
|
|
MLFlowLogger,
|
2020-04-15 00:32:33 +00:00
|
|
|
NeptuneLogger,
|
|
|
|
TestTubeLogger,
|
2020-07-09 11:15:41 +00:00
|
|
|
# The WandbLogger gets tested for pickling in its own test.
|
2020-04-15 00:32:33 +00:00
|
|
|
])
|
|
|
|
def test_loggers_pickle(tmpdir, monkeypatch, logger_class):
|
|
|
|
"""Verify that pickling trainer with logger works."""
|
2020-07-09 11:15:41 +00:00
|
|
|
if logger_class == CometLogger:
|
|
|
|
# prevent comet logger from trying to print at exit, since
|
|
|
|
# pytest's stdout/stderr redirection breaks it
|
|
|
|
monkeypatch.setattr(atexit, 'register', lambda _: None)
|
2020-04-15 00:32:33 +00:00
|
|
|
|
2020-04-15 15:14:29 +00:00
|
|
|
logger_args = _get_logger_args(logger_class, tmpdir)
|
|
|
|
logger = logger_class(**logger_args)
|
2020-04-15 00:32:33 +00:00
|
|
|
|
2020-07-05 23:57:22 +00:00
|
|
|
# this can cause pickle error if the experiment object is not picklable
|
|
|
|
# the logger needs to remove it from the state before pickle
|
|
|
|
_ = logger.experiment
|
|
|
|
|
2020-04-27 11:41:30 +00:00
|
|
|
# test pickling loggers
|
|
|
|
pickle.dumps(logger)
|
|
|
|
|
2020-04-15 00:32:33 +00:00
|
|
|
trainer = Trainer(
|
|
|
|
max_epochs=1,
|
2020-06-27 01:38:25 +00:00
|
|
|
logger=logger,
|
2020-04-15 00:32:33 +00:00
|
|
|
)
|
|
|
|
pkl_bytes = pickle.dumps(trainer)
|
|
|
|
|
|
|
|
trainer2 = pickle.loads(pkl_bytes)
|
|
|
|
trainer2.logger.log_metrics({'acc': 1.0})
|
2020-05-28 02:45:23 +00:00
|
|
|
|
2020-07-05 23:57:22 +00:00
|
|
|
# make sure we restord properly
|
|
|
|
assert trainer2.logger.name == logger.name
|
|
|
|
assert trainer2.logger.save_dir == logger.save_dir
|
|
|
|
|
2020-05-28 02:45:23 +00:00
|
|
|
|
|
|
|
@pytest.mark.parametrize("extra_params", [
|
|
|
|
pytest.param(dict(max_epochs=1, auto_scale_batch_size=True), id='Batch-size-Finder'),
|
2020-06-01 15:00:32 +00:00
|
|
|
pytest.param(dict(max_epochs=3, auto_lr_find=True), id='LR-Finder'),
|
2020-05-28 02:45:23 +00:00
|
|
|
])
|
|
|
|
def test_logger_reset_correctly(tmpdir, extra_params):
|
|
|
|
""" Test that the tuners do not alter the logger reference """
|
|
|
|
tutils.reset_seed()
|
|
|
|
|
|
|
|
model = EvalModelTemplate()
|
|
|
|
|
|
|
|
trainer = Trainer(
|
2020-06-12 18:37:52 +00:00
|
|
|
default_root_dir=tmpdir,
|
2020-06-27 01:38:25 +00:00
|
|
|
**extra_params,
|
2020-05-28 02:45:23 +00:00
|
|
|
)
|
|
|
|
logger1 = trainer.logger
|
2020-08-31 21:36:09 +00:00
|
|
|
trainer.tune(model)
|
2020-05-28 02:45:23 +00:00
|
|
|
logger2 = trainer.logger
|
|
|
|
logger3 = model.logger
|
|
|
|
|
|
|
|
assert logger1 == logger2, \
|
|
|
|
'Finder altered the logger of trainer'
|
|
|
|
assert logger2 == logger3, \
|
|
|
|
'Finder altered the logger of model'
|
2020-06-30 22:09:16 +00:00
|
|
|
|
|
|
|
|
|
|
|
class RankZeroLoggerCheck(Callback):
|
|
|
|
# this class has to be defined outside the test function, otherwise we get pickle error
|
|
|
|
# due to the way ddp process is launched
|
|
|
|
|
2020-08-07 13:29:57 +00:00
|
|
|
def on_train_batch_start(self, trainer, pl_module, batch, batch_idx, dataloader_idx):
|
2020-06-30 22:09:16 +00:00
|
|
|
is_dummy = isinstance(trainer.logger.experiment, DummyExperiment)
|
|
|
|
if trainer.is_global_zero:
|
|
|
|
assert not is_dummy
|
|
|
|
else:
|
|
|
|
assert is_dummy
|
|
|
|
assert pl_module.logger.experiment.something(foo="bar") is None
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.skipif(platform.system() == "Windows", reason="Distributed training is not supported on Windows")
|
|
|
|
@pytest.mark.parametrize("logger_class", [
|
|
|
|
TensorBoardLogger,
|
2020-07-09 11:15:41 +00:00
|
|
|
MLFlowLogger,
|
2020-06-30 22:09:16 +00:00
|
|
|
NeptuneLogger,
|
|
|
|
TestTubeLogger,
|
|
|
|
])
|
2020-07-09 11:15:41 +00:00
|
|
|
def test_logger_created_on_rank_zero_only(tmpdir, monkeypatch, logger_class):
|
|
|
|
""" Test that loggers get replaced by dummy logges on global rank > 0"""
|
|
|
|
if logger_class == CometLogger:
|
|
|
|
# prevent comet logger from trying to print at exit, since
|
|
|
|
# pytest's stdout/stderr redirection breaks it
|
|
|
|
monkeypatch.setattr(atexit, 'register', lambda _: None)
|
|
|
|
|
2020-06-30 22:09:16 +00:00
|
|
|
logger_args = _get_logger_args(logger_class, tmpdir)
|
|
|
|
logger = logger_class(**logger_args)
|
|
|
|
model = EvalModelTemplate()
|
|
|
|
trainer = Trainer(
|
|
|
|
logger=logger,
|
|
|
|
default_root_dir=tmpdir,
|
|
|
|
distributed_backend='ddp_cpu',
|
|
|
|
num_processes=2,
|
|
|
|
max_steps=1,
|
|
|
|
checkpoint_callback=True,
|
|
|
|
callbacks=[RankZeroLoggerCheck()],
|
|
|
|
)
|
|
|
|
result = trainer.fit(model)
|
|
|
|
assert result == 1
|