Remove legacy teardown check in train loop (#7917)

This commit is contained in:
Carlos Mocholí 2021-06-10 15:02:14 +02:00 committed by GitHub
parent b45a89a256
commit 839019a3a7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 0 additions and 5 deletions

View File

@ -50,7 +50,6 @@ class TrainLoop:
self.trainer = trainer
self.accumulated_loss = None
self.warning_cache = WarningCache()
self._teardown_already_run = False
self.running_loss = TensorRunningAccum(window_length=20)
self._skip_backward = False
self._optimizer_freq_cumsum = None
@ -105,10 +104,6 @@ class TrainLoop:
self.trainer.call_hook("on_train_start")
def on_train_end(self):
if self._teardown_already_run:
return
self._teardown_already_run = True
# trigger checkpoint check. need to temporarily decrease the global step to avoid saving duplicates
# when a checkpoint was saved at the last step
self.global_step -= 1