From 8601268c70649f49767001098adbf665a93843df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon-Martin=20Schr=C3=B6der?= Date: Sun, 22 Nov 2020 13:02:06 +0100 Subject: [PATCH] Fix #4375: Always use trainer.global_step for step (#4376) * Fix #4375: Always use trainer.global_step for step * Changelog * Remove superflous use "epoch" * Update Changelog Co-authored-by: Nicki Skafte --- CHANGELOG.md | 3 +++ pytorch_lightning/callbacks/lr_monitor.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 17d292636f..43ab319ef5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -49,6 +49,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). ### Changed +- Consistently use `step=trainer.global_step` in `LearningRateMonitor` independently of `logging_interval` ([#4376](https://github.com/PyTorchLightning/pytorch-lightning/pull/4376)) + + - Tuner algorithms will be skipped if `fast_dev_run=True` ([#3903](https://github.com/PyTorchLightning/pytorch-lightning/pull/3903)) diff --git a/pytorch_lightning/callbacks/lr_monitor.py b/pytorch_lightning/callbacks/lr_monitor.py index 7502829044..f5cf3dc792 100755 --- a/pytorch_lightning/callbacks/lr_monitor.py +++ b/pytorch_lightning/callbacks/lr_monitor.py @@ -114,7 +114,7 @@ class LearningRateMonitor(Callback): latest_stat = self._extract_stats(trainer, interval) if trainer.logger is not None and latest_stat: - trainer.logger.log_metrics(latest_stat, step=trainer.current_epoch) + trainer.logger.log_metrics(latest_stat, step=trainer.global_step) def _extract_stats(self, trainer, interval: str) -> Dict[str, float]: latest_stat = {}