Remove precision_plugin pre_dispatch() method (#10887)

* Remove precision_plugin pre_dispatch() method

* update changelog
This commit is contained in:
four4fish 2021-12-01 18:42:17 -08:00 committed by GitHub
parent 26977043bf
commit 44cd412e91
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 3 additions and 5 deletions

View File

@ -188,6 +188,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Removed argument `return_result` from the `DDPSpawnPlugin.spawn()` method ([#10867](https://github.com/PyTorchLightning/pytorch-lightning/pull/10867))
- Removed method `pre_dispatch` from the `PrecisionPlugin` method ([#10887](https://github.com/PyTorchLightning/pytorch-lightning/pull/10887))
### Fixed
- Fixed an issue with `SignalConnector` not restoring the default signal handlers on teardown when running on SLURM or with fault-tolerant training enabled ([#10611](https://github.com/PyTorchLightning/pytorch-lightning/pull/10611))

View File

@ -79,8 +79,6 @@ class Accelerator:
if self.training_type_plugin.setup_optimizers_in_pre_dispatch:
self.training_type_plugin.setup_optimizers(trainer)
self.training_type_plugin.precision_plugin.pre_dispatch()
def dispatch(self, trainer: "pl.Trainer") -> None:
"""Hook to do something before the training/evaluation/prediction starts."""
self.training_type_plugin.dispatch(trainer)

View File

@ -201,9 +201,6 @@ class PrecisionPlugin(CheckpointHooks):
parameters = self.main_params(optimizer)
torch.nn.utils.clip_grad_norm_(parameters, clip_val)
def pre_dispatch(self) -> None:
"""Hook to do something before the training/evaluation/prediction starts."""
def dispatch(self, trainer: "pl.Trainer") -> None:
"""Hook to do something when ``Accelerator.dispatch()`` gets called."""