Delete `on_after_backward` unused argument (#7925)

This commit is contained in:
Carlos Mocholí 2021-06-11 02:38:30 +02:00 committed by GitHub
parent 8b73869369
commit 9e932f4dfd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 2 additions and 2 deletions

View File

@ -246,7 +246,7 @@ class TrainLoop:
opt_idx = int(np.argmax(self.optimizer_freq_cumsum > current_place_in_loop))
return [(opt_idx, self.trainer.optimizers[opt_idx])]
def on_after_backward(self, training_step_output, batch_idx, untouched_loss):
def on_after_backward(self, batch_idx, untouched_loss):
# insert after step hook
self.trainer.call_hook("on_after_backward")
@ -760,7 +760,7 @@ class TrainLoop:
# hook - call this hook only
# when gradients have finished to accumulate
if not self.should_accumulate():
self.on_after_backward(result.training_step_output, batch_idx, result.loss)
self.on_after_backward(batch_idx, result.loss)
# check if loss or model weights are nan
if self.trainer.terminate_on_nan: