Remove deprecated `Strategy.post_dispatch` (#13461)

* Remove deprecated Strategy.post_dispatch

* changelog

* remove unused imports
This commit is contained in:
Adrian Wälchli 2022-07-15 19:18:55 +02:00 committed by GitHub
parent 4ec6f85b33
commit d42711f22f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 3 additions and 45 deletions

View File

@ -281,6 +281,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Removed the need to explicitly load habana module ([#13338](https://github.com/PyTorchLightning/pytorch-lightning/pull/13338))
- Removed the deprecated `Strategy.post_dispatch()` hook ([#13461](https://github.com/PyTorchLightning/pytorch-lightning/pull/13461))
- Removed deprecated `pytorch_lightning.callbacks.lr_monitor.LearningRateMonitor.lr_sch_names` ([#13353](https://github.com/Lightning-AI/lightning/pull/13353))

View File

@ -30,10 +30,8 @@ from pytorch_lightning.plugins.io.checkpoint_plugin import CheckpointIO
from pytorch_lightning.plugins.precision import PrecisionPlugin
from pytorch_lightning.strategies.launchers.base import _Launcher
from pytorch_lightning.trainer.states import TrainerFn
from pytorch_lightning.utilities import rank_zero_deprecation
from pytorch_lightning.utilities.apply_func import move_data_to_device
from pytorch_lightning.utilities.distributed import ReduceOp
from pytorch_lightning.utilities.model_helpers import is_overridden
from pytorch_lightning.utilities.optimizer import optimizer_to_device, optimizers_to_device
from pytorch_lightning.utilities.types import _PATH, LRSchedulerConfig, STEP_OUTPUT
@ -61,11 +59,6 @@ class Strategy(ABC):
self._lightning_optimizers: Dict[int, LightningOptimizer] = {}
self.lr_scheduler_configs: List[LRSchedulerConfig] = []
self.optimizer_frequencies: List[int] = []
if is_overridden("post_dispatch", self, parent=Strategy):
rank_zero_deprecation(
f"`{self.__class__.__name__}.post_dispatch()` has been deprecated in v1.6 and will be removed in v1.7."
f" Move your implementation to `{self.__class__.__name__}.teardown()` instead."
)
@property
def launcher(self) -> Optional[_Launcher]:
@ -506,11 +499,3 @@ class Strategy(ABC):
def __setstate__(self, state: Dict) -> None:
self.__dict__ = state
self.optimizers = self.optimizers # re-create the `_lightning_optimizers`
def post_dispatch(self, trainer: "pl.Trainer") -> None:
r"""
.. deprecated::
v1.6 This method has been deprecated in v1.6 and will be removed in v1.7. Use :meth:`teardown` instead.
Hook to do something after the training/evaluation/prediction finishes.
"""

View File

@ -1225,7 +1225,6 @@ class Trainer(
def _teardown(self):
"""This is the Trainer's internal teardown, unrelated to the `teardown` hooks in LightningModule and
Callback; those are handled by :meth:`_call_teardown_hook`."""
self.strategy.post_dispatch(self)
self.strategy.teardown()
loop = self._active_loop
# loop should never be `None` here but it can because we don't know the trainer stage with `ddp_spawn`

View File

@ -1,29 +0,0 @@
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test deprecated functionality which will be removed in v1.7.0."""
from re import escape
import pytest
import torch
from pytorch_lightning.strategies import SingleDeviceStrategy
def test_v1_7_0_post_dispatch_hook():
class CustomPlugin(SingleDeviceStrategy):
def post_dispatch(self, trainer):
pass
with pytest.deprecated_call(match=escape("`CustomPlugin.post_dispatch()` has been deprecated in v1.6")):
CustomPlugin(torch.device("cpu"))