From 8a931732ae5135e3e55d9c7b7031d81837e5798a Mon Sep 17 00:00:00 2001 From: ananthsub Date: Sun, 22 Aug 2021 11:50:10 -0700 Subject: [PATCH] Remove unused `on_train_epoch_end` hook in accelerator (#9035) --- CHANGELOG.md | 3 +++ pytorch_lightning/accelerators/accelerator.py | 4 ---- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 869249d4cf..c90af8b9c9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -166,6 +166,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Removed deprecated `connect_precision_plugin` and `connect_training_type_plugin` from `Accelerator` ([#9019](https://github.com/PyTorchLightning/pytorch-lightning/pull/9019)) +- Removed `on_train_epoch_end` from `Accelerator` ([#9035](https://github.com/PyTorchLightning/pytorch-lightning/pull/9035)) + + ### Fixed - Ensure the existence of `DDPPlugin._sync_dir` in `reconciliate_processes` ([#8939](https://github.com/PyTorchLightning/pytorch-lightning/pull/8939)) diff --git a/pytorch_lightning/accelerators/accelerator.py b/pytorch_lightning/accelerators/accelerator.py index 54c354f9f1..4276ab4e3f 100644 --- a/pytorch_lightning/accelerators/accelerator.py +++ b/pytorch_lightning/accelerators/accelerator.py @@ -479,10 +479,6 @@ class Accelerator: def update_global_step(self, total_batch_idx: int, current_global_step: int) -> int: return self.training_type_plugin.update_global_step(total_batch_idx, current_global_step) - def on_train_epoch_end(self) -> None: - """Hook to do something on the end of an training epoch.""" - pass - def on_train_start(self) -> None: """Called when train begins.""" return self.training_type_plugin.on_train_start()