Move plateau schedulers epoch update to the training epoch loop (#8424)
This commit is contained in:
parent
7c07452615
commit
7d1f4ce718
|
@ -221,6 +221,8 @@ class TrainingEpochLoop(loops.Loop):
|
|||
self.trainer.call_hook('on_epoch_end')
|
||||
self.trainer.logger_connector.on_epoch_end()
|
||||
|
||||
self.update_lr_schedulers('epoch', update_plateau_schedulers=True)
|
||||
|
||||
epoch_output = self._epoch_output
|
||||
# free memory
|
||||
self._epoch_output = None
|
||||
|
|
|
@ -233,8 +233,6 @@ class FitLoop(Loop):
|
|||
if self.epoch_loop.batches_seen == 0:
|
||||
return
|
||||
|
||||
self.epoch_loop.update_lr_schedulers('epoch', update_plateau_schedulers=True)
|
||||
|
||||
did_train_only = not self.trainer.enable_validation or self.epoch_loop.val_loop.skip
|
||||
if did_train_only:
|
||||
self.global_step -= 1
|
||||
|
|
Loading…
Reference in New Issue