Remove `AcceleratorConnector.has_tpu` (#12109)
This commit is contained in:
parent
b2932337bc
commit
0b677ecf2b
|
@ -604,6 +604,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
|
|||
- Removed `AcceleratorConnector.use_ipu` property ([#12110](https://github.com/PyTorchLightning/pytorch-lightning/pull/12110))
|
||||
|
||||
|
||||
- Removed `AcceleratorConnector.has_tpu` property ([#12109](https://github.com/PyTorchLightning/pytorch-lightning/pull/12109))
|
||||
|
||||
|
||||
### Fixed
|
||||
|
||||
|
|
|
@ -848,10 +848,6 @@ class AcceleratorConnector:
|
|||
is_distributed |= self.strategy.is_distributed
|
||||
return is_distributed
|
||||
|
||||
@property
|
||||
def has_tpu(self) -> bool:
|
||||
return isinstance(self.accelerator, TPUAccelerator)
|
||||
|
||||
@property
|
||||
def use_dp(self) -> bool:
|
||||
return isinstance(self.strategy, DataParallelStrategy)
|
||||
|
|
|
@ -885,10 +885,8 @@ def test_strategy_choice_ddp_cpu_slurm(device_count_mock, setup_distributed_mock
|
|||
|
||||
def test_unsupported_tpu_choice(monkeypatch):
|
||||
import pytorch_lightning.utilities.imports as imports
|
||||
from pytorch_lightning.trainer.connectors.accelerator_connector import AcceleratorConnector
|
||||
|
||||
monkeypatch.setattr(imports, "_XLA_AVAILABLE", True)
|
||||
monkeypatch.setattr(AcceleratorConnector, "has_tpu", True)
|
||||
with pytest.raises(MisconfigurationException, match=r"accelerator='tpu', precision=64\)` is not implemented"):
|
||||
Trainer(accelerator="tpu", precision=64)
|
||||
|
||||
|
|
Loading…
Reference in New Issue