* Fix #4375: Always use trainer.global_step for step * Changelog * Remove superflous use "epoch" * Update Changelog Co-authored-by: Nicki Skafte <skaftenicki@gmail.com>
This commit is contained in:
parent
299de5dc62
commit
8601268c70
|
@ -49,6 +49,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
||||||
|
- Consistently use `step=trainer.global_step` in `LearningRateMonitor` independently of `logging_interval` ([#4376](https://github.com/PyTorchLightning/pytorch-lightning/pull/4376))
|
||||||
|
|
||||||
|
|
||||||
- Tuner algorithms will be skipped if `fast_dev_run=True` ([#3903](https://github.com/PyTorchLightning/pytorch-lightning/pull/3903))
|
- Tuner algorithms will be skipped if `fast_dev_run=True` ([#3903](https://github.com/PyTorchLightning/pytorch-lightning/pull/3903))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -114,7 +114,7 @@ class LearningRateMonitor(Callback):
|
||||||
latest_stat = self._extract_stats(trainer, interval)
|
latest_stat = self._extract_stats(trainer, interval)
|
||||||
|
|
||||||
if trainer.logger is not None and latest_stat:
|
if trainer.logger is not None and latest_stat:
|
||||||
trainer.logger.log_metrics(latest_stat, step=trainer.current_epoch)
|
trainer.logger.log_metrics(latest_stat, step=trainer.global_step)
|
||||||
|
|
||||||
def _extract_stats(self, trainer, interval: str) -> Dict[str, float]:
|
def _extract_stats(self, trainer, interval: str) -> Dict[str, float]:
|
||||||
latest_stat = {}
|
latest_stat = {}
|
||||||
|
|
Loading…
Reference in New Issue