diff --git a/CHANGELOG.md b/CHANGELOG.md index 17d292636f..43ab319ef5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -49,6 +49,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). ### Changed +- Consistently use `step=trainer.global_step` in `LearningRateMonitor` independently of `logging_interval` ([#4376](https://github.com/PyTorchLightning/pytorch-lightning/pull/4376)) + + - Tuner algorithms will be skipped if `fast_dev_run=True` ([#3903](https://github.com/PyTorchLightning/pytorch-lightning/pull/3903)) diff --git a/pytorch_lightning/callbacks/lr_monitor.py b/pytorch_lightning/callbacks/lr_monitor.py index 7502829044..f5cf3dc792 100755 --- a/pytorch_lightning/callbacks/lr_monitor.py +++ b/pytorch_lightning/callbacks/lr_monitor.py @@ -114,7 +114,7 @@ class LearningRateMonitor(Callback): latest_stat = self._extract_stats(trainer, interval) if trainer.logger is not None and latest_stat: - trainer.logger.log_metrics(latest_stat, step=trainer.current_epoch) + trainer.logger.log_metrics(latest_stat, step=trainer.global_step) def _extract_stats(self, trainer, interval: str) -> Dict[str, float]: latest_stat = {}