From 1feff5d7749ccb2e81e1157a947482a7bc9ff242 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Wed, 27 Jan 2021 09:52:14 +0100 Subject: [PATCH] move progress bar test to correct test folder (#5667) --- CHANGELOG.md | 2 +- .../logging_/test_progress_bar_logging.py | 22 +++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) create mode 100644 tests/trainer/logging_/test_progress_bar_logging.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 9328e4ab02..1db4b45a75 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -206,7 +206,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed FileNotFoundError for best checkpoint when using DDP with Hydra ([#5629](https://github.com/PyTorchLightning/pytorch-lightning/pull/5629)) - Fixed an error when logging a progress bar metric with a reserved name ([#5620](https://github.com/PyTorchLightning/pytorch-lightning/pull/5620)) - Fixed `Metric`'s `state_dict` not included when child modules ([#5614](https://github.com/PyTorchLightning/pytorch-lightning/pull/5614)) -- Fixed Neptune logger creating multiple experiments when GPUs > 1 ([#3256](https://github.com/PyTorchLightning/pytorch-lightning/pull/3256)) +- Fixed Neptune logger creating multiple experiments when GPUs > 1 ([#3256](https://github.com/PyTorchLightning/pytorch-lightning/pull/3256)) - Fixed duplicate logs appearing in console when using the python logging module ([#5509](https://github.com/PyTorchLightning/pytorch-lightning/pull/5509)) - Fixed tensor printing in `trainer.test()` ([#5138](https://github.com/PyTorchLightning/pytorch-lightning/pull/5138)) - Fixed not using dataloader when `hparams` present ([#4559](https://github.com/PyTorchLightning/pytorch-lightning/pull/4559)) diff --git a/tests/trainer/logging_/test_progress_bar_logging.py b/tests/trainer/logging_/test_progress_bar_logging.py new file mode 100644 index 0000000000..b7705dfd79 --- /dev/null +++ b/tests/trainer/logging_/test_progress_bar_logging.py @@ -0,0 +1,22 @@ +import pytest + +from pytorch_lightning import Trainer +from tests.base import BoringModel + + +def test_logging_to_progress_bar_with_reserved_key(tmpdir): + """ Test that logging a metric with a reserved name to the progress bar raises a warning. """ + class TestModel(BoringModel): + + def training_step(self, *args, **kwargs): + output = super().training_step(*args, **kwargs) + self.log("loss", output["loss"], prog_bar=True) + return output + + model = TestModel() + trainer = Trainer( + default_root_dir=tmpdir, + max_steps=2, + ) + with pytest.warns(UserWarning, match="The progress bar already tracks a metric with the .* 'loss'"): + trainer.fit(model)