Remove `AcceleratorConnector.has_tpu` (#12109)

This commit is contained in:
DuYicong515 2022-02-27 06:16:03 -08:00 committed by GitHub
parent b2932337bc
commit 0b677ecf2b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 2 additions and 6 deletions

View File

@ -604,6 +604,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Removed `AcceleratorConnector.use_ipu` property ([#12110](https://github.com/PyTorchLightning/pytorch-lightning/pull/12110))
- Removed `AcceleratorConnector.has_tpu` property ([#12109](https://github.com/PyTorchLightning/pytorch-lightning/pull/12109))
### Fixed

View File

@ -848,10 +848,6 @@ class AcceleratorConnector:
is_distributed |= self.strategy.is_distributed
return is_distributed
@property
def has_tpu(self) -> bool:
return isinstance(self.accelerator, TPUAccelerator)
@property
def use_dp(self) -> bool:
return isinstance(self.strategy, DataParallelStrategy)

View File

@ -885,10 +885,8 @@ def test_strategy_choice_ddp_cpu_slurm(device_count_mock, setup_distributed_mock
def test_unsupported_tpu_choice(monkeypatch):
import pytorch_lightning.utilities.imports as imports
from pytorch_lightning.trainer.connectors.accelerator_connector import AcceleratorConnector
monkeypatch.setattr(imports, "_XLA_AVAILABLE", True)
monkeypatch.setattr(AcceleratorConnector, "has_tpu", True)
with pytest.raises(MisconfigurationException, match=r"accelerator='tpu', precision=64\)` is not implemented"):
Trainer(accelerator="tpu", precision=64)