From c84060bcf3de8c0d65e7ab5f99269b68be69a097 Mon Sep 17 00:00:00 2001 From: William Falcon Date: Tue, 25 Aug 2020 22:57:23 -0400 Subject: [PATCH] remove on_perf check hooks (#3178) --- pytorch_lightning/core/hooks.py | 12 ------------ pytorch_lightning/trainer/evaluation_loop.py | 6 ------ 2 files changed, 18 deletions(-) diff --git a/pytorch_lightning/core/hooks.py b/pytorch_lightning/core/hooks.py index c2ea544dd0..7056da598f 100644 --- a/pytorch_lightning/core/hooks.py +++ b/pytorch_lightning/core/hooks.py @@ -250,18 +250,6 @@ class ModelHooks(Module): """ # do something when the epoch ends - def on_pre_performance_check(self) -> None: - """ - Called at the very beginning of the validation loop. - """ - # do something before validation starts - - def on_post_performance_check(self) -> None: - """ - Called at the very end of the validation loop. - """ - # do something before validation end - def on_before_zero_grad(self, optimizer: Optimizer) -> None: """ Called after optimizer.step() and before optimizer.zero_grad(). diff --git a/pytorch_lightning/trainer/evaluation_loop.py b/pytorch_lightning/trainer/evaluation_loop.py index 650d55f9a4..6d18222e71 100644 --- a/pytorch_lightning/trainer/evaluation_loop.py +++ b/pytorch_lightning/trainer/evaluation_loop.py @@ -312,9 +312,6 @@ class TrainerEvaluationLoopMixin(ABC): # enable eval mode + no grads model = self.get_model() - # TODO: deprecate - model.on_pre_performance_check() - # select dataloaders dataloaders, max_batches = self.evaluation_loop.get_evaluation_dataloaders() @@ -389,9 +386,6 @@ class TrainerEvaluationLoopMixin(ABC): # log the final eval loop metrics eval_loop_results = self.__log_evaluation_epoch_metrics(eval_results, test_mode) - # hook - model.on_post_performance_check() - # user may want to reload every epoch if self.reload_dataloaders_every_epoch: self.evaluation_loop.reload_evaluation_dataloaders()