Remove precision_plugin pre_dispatch() method (#10887)
* Remove precision_plugin pre_dispatch() method * update changelog
This commit is contained in:
parent
26977043bf
commit
44cd412e91
|
@ -188,6 +188,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
|
|||
- Removed argument `return_result` from the `DDPSpawnPlugin.spawn()` method ([#10867](https://github.com/PyTorchLightning/pytorch-lightning/pull/10867))
|
||||
|
||||
|
||||
- Removed method `pre_dispatch` from the `PrecisionPlugin` method ([#10887](https://github.com/PyTorchLightning/pytorch-lightning/pull/10887))
|
||||
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed an issue with `SignalConnector` not restoring the default signal handlers on teardown when running on SLURM or with fault-tolerant training enabled ([#10611](https://github.com/PyTorchLightning/pytorch-lightning/pull/10611))
|
||||
|
|
|
@ -79,8 +79,6 @@ class Accelerator:
|
|||
if self.training_type_plugin.setup_optimizers_in_pre_dispatch:
|
||||
self.training_type_plugin.setup_optimizers(trainer)
|
||||
|
||||
self.training_type_plugin.precision_plugin.pre_dispatch()
|
||||
|
||||
def dispatch(self, trainer: "pl.Trainer") -> None:
|
||||
"""Hook to do something before the training/evaluation/prediction starts."""
|
||||
self.training_type_plugin.dispatch(trainer)
|
||||
|
|
|
@ -201,9 +201,6 @@ class PrecisionPlugin(CheckpointHooks):
|
|||
parameters = self.main_params(optimizer)
|
||||
torch.nn.utils.clip_grad_norm_(parameters, clip_val)
|
||||
|
||||
def pre_dispatch(self) -> None:
|
||||
"""Hook to do something before the training/evaluation/prediction starts."""
|
||||
|
||||
def dispatch(self, trainer: "pl.Trainer") -> None:
|
||||
"""Hook to do something when ``Accelerator.dispatch()`` gets called."""
|
||||
|
||||
|
|
Loading…
Reference in New Issue