Remove AcceleratorConnector.use_dp (#12112)

This commit is contained in:
DuYicong515 2022-02-27 07:03:35 -08:00 committed by GitHub
parent 0b677ecf2b
commit b29b07e978
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 6 additions and 6 deletions

View File

@ -607,6 +607,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Removed `AcceleratorConnector.has_tpu` property ([#12109](https://github.com/PyTorchLightning/pytorch-lightning/pull/12109))
- Removed `AcceleratorConnector.use_dp` property ([#12112](https://github.com/PyTorchLightning/pytorch-lightning/pull/12112))
### Fixed
- Fixed an issue where `HorovodStrategy.teardown()` did not complete gracefully if an exception was thrown during callback setup [#11752](https://github.com/PyTorchLightning/pytorch-lightning/pull/11752)

View File

@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import pytorch_lightning as pl
from pytorch_lightning.strategies import DataParallelStrategy
from pytorch_lightning.trainer.states import TrainerFn
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.model_helpers import is_overridden
@ -208,7 +209,7 @@ def __verify_dp_batch_transfer_support(trainer: "pl.Trainer", model: "pl.Lightni
batch_transfer_hooks = ("on_before_batch_transfer", "transfer_batch_to_device", "on_after_batch_transfer")
datahook_selector = trainer._data_connector._datahook_selector
for hook in batch_transfer_hooks:
if trainer._accelerator_connector.use_dp and (
if isinstance(trainer.strategy, DataParallelStrategy) and (
is_overridden(hook, datahook_selector.model) or is_overridden(hook, datahook_selector.datamodule)
):
raise MisconfigurationException(f"Overriding `{hook}` is not supported in DP mode.")

View File

@ -47,7 +47,6 @@ from pytorch_lightning.plugins.environments import (
TorchElasticEnvironment,
)
from pytorch_lightning.strategies import (
DataParallelStrategy,
DDP2Strategy,
DDPFullyShardedStrategy,
DDPShardedStrategy,
@ -847,7 +846,3 @@ class AcceleratorConnector:
if isinstance(self.accelerator, TPUAccelerator):
is_distributed |= self.strategy.is_distributed
return is_distributed
@property
def use_dp(self) -> bool:
return isinstance(self.strategy, DataParallelStrategy)