From b29b07e9788311326bca4779d70e89eb36bfc57f Mon Sep 17 00:00:00 2001 From: DuYicong515 Date: Sun, 27 Feb 2022 07:03:35 -0800 Subject: [PATCH] Remove AcceleratorConnector.use_dp (#12112) --- CHANGELOG.md | 4 ++++ pytorch_lightning/trainer/configuration_validator.py | 3 ++- .../trainer/connectors/accelerator_connector.py | 5 ----- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c0a1be3540..0e13686f90 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -607,6 +607,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Removed `AcceleratorConnector.has_tpu` property ([#12109](https://github.com/PyTorchLightning/pytorch-lightning/pull/12109)) +- Removed `AcceleratorConnector.use_dp` property ([#12112](https://github.com/PyTorchLightning/pytorch-lightning/pull/12112)) + + + ### Fixed - Fixed an issue where `HorovodStrategy.teardown()` did not complete gracefully if an exception was thrown during callback setup [#11752](https://github.com/PyTorchLightning/pytorch-lightning/pull/11752) diff --git a/pytorch_lightning/trainer/configuration_validator.py b/pytorch_lightning/trainer/configuration_validator.py index ff910865b7..7e03d98f7d 100644 --- a/pytorch_lightning/trainer/configuration_validator.py +++ b/pytorch_lightning/trainer/configuration_validator.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import pytorch_lightning as pl +from pytorch_lightning.strategies import DataParallelStrategy from pytorch_lightning.trainer.states import TrainerFn from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.model_helpers import is_overridden @@ -208,7 +209,7 @@ def __verify_dp_batch_transfer_support(trainer: "pl.Trainer", model: "pl.Lightni batch_transfer_hooks = ("on_before_batch_transfer", "transfer_batch_to_device", "on_after_batch_transfer") datahook_selector = trainer._data_connector._datahook_selector for hook in batch_transfer_hooks: - if trainer._accelerator_connector.use_dp and ( + if isinstance(trainer.strategy, DataParallelStrategy) and ( is_overridden(hook, datahook_selector.model) or is_overridden(hook, datahook_selector.datamodule) ): raise MisconfigurationException(f"Overriding `{hook}` is not supported in DP mode.") diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 1347aa7e68..6b296ffe00 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -47,7 +47,6 @@ from pytorch_lightning.plugins.environments import ( TorchElasticEnvironment, ) from pytorch_lightning.strategies import ( - DataParallelStrategy, DDP2Strategy, DDPFullyShardedStrategy, DDPShardedStrategy, @@ -847,7 +846,3 @@ class AcceleratorConnector: if isinstance(self.accelerator, TPUAccelerator): is_distributed |= self.strategy.is_distributed return is_distributed - - @property - def use_dp(self) -> bool: - return isinstance(self.strategy, DataParallelStrategy)