From 44cd412e91d8a4de76d1c6a0944b3f964f18f2de Mon Sep 17 00:00:00 2001 From: four4fish <88516121+four4fish@users.noreply.github.com> Date: Wed, 1 Dec 2021 18:42:17 -0800 Subject: [PATCH] Remove precision_plugin pre_dispatch() method (#10887) * Remove precision_plugin pre_dispatch() method * update changelog --- CHANGELOG.md | 3 +++ pytorch_lightning/accelerators/accelerator.py | 2 -- pytorch_lightning/plugins/precision/precision_plugin.py | 3 --- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 629b28e392..e4c03b70cc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -188,6 +188,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Removed argument `return_result` from the `DDPSpawnPlugin.spawn()` method ([#10867](https://github.com/PyTorchLightning/pytorch-lightning/pull/10867)) +- Removed method `pre_dispatch` from the `PrecisionPlugin` method ([#10887](https://github.com/PyTorchLightning/pytorch-lightning/pull/10887)) + + ### Fixed - Fixed an issue with `SignalConnector` not restoring the default signal handlers on teardown when running on SLURM or with fault-tolerant training enabled ([#10611](https://github.com/PyTorchLightning/pytorch-lightning/pull/10611)) diff --git a/pytorch_lightning/accelerators/accelerator.py b/pytorch_lightning/accelerators/accelerator.py index b50a192213..502bed9870 100644 --- a/pytorch_lightning/accelerators/accelerator.py +++ b/pytorch_lightning/accelerators/accelerator.py @@ -79,8 +79,6 @@ class Accelerator: if self.training_type_plugin.setup_optimizers_in_pre_dispatch: self.training_type_plugin.setup_optimizers(trainer) - self.training_type_plugin.precision_plugin.pre_dispatch() - def dispatch(self, trainer: "pl.Trainer") -> None: """Hook to do something before the training/evaluation/prediction starts.""" self.training_type_plugin.dispatch(trainer) diff --git a/pytorch_lightning/plugins/precision/precision_plugin.py b/pytorch_lightning/plugins/precision/precision_plugin.py index 3c02d198ab..1f2a97ad28 100644 --- a/pytorch_lightning/plugins/precision/precision_plugin.py +++ b/pytorch_lightning/plugins/precision/precision_plugin.py @@ -201,9 +201,6 @@ class PrecisionPlugin(CheckpointHooks): parameters = self.main_params(optimizer) torch.nn.utils.clip_grad_norm_(parameters, clip_val) - def pre_dispatch(self) -> None: - """Hook to do something before the training/evaluation/prediction starts.""" - def dispatch(self, trainer: "pl.Trainer") -> None: """Hook to do something when ``Accelerator.dispatch()`` gets called."""