From 0b677ecf2bd93640efe6f2361c3576e61f9c9724 Mon Sep 17 00:00:00 2001 From: DuYicong515 Date: Sun, 27 Feb 2022 06:16:03 -0800 Subject: [PATCH] Remove `AcceleratorConnector.has_tpu` (#12109) --- CHANGELOG.md | 2 ++ pytorch_lightning/trainer/connectors/accelerator_connector.py | 4 ---- tests/accelerators/test_accelerator_connector.py | 2 -- 3 files changed, 2 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c935414eb..c0a1be3540 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -604,6 +604,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Removed `AcceleratorConnector.use_ipu` property ([#12110](https://github.com/PyTorchLightning/pytorch-lightning/pull/12110)) +- Removed `AcceleratorConnector.has_tpu` property ([#12109](https://github.com/PyTorchLightning/pytorch-lightning/pull/12109)) + ### Fixed diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index e3601b6acf..1347aa7e68 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -848,10 +848,6 @@ class AcceleratorConnector: is_distributed |= self.strategy.is_distributed return is_distributed - @property - def has_tpu(self) -> bool: - return isinstance(self.accelerator, TPUAccelerator) - @property def use_dp(self) -> bool: return isinstance(self.strategy, DataParallelStrategy) diff --git a/tests/accelerators/test_accelerator_connector.py b/tests/accelerators/test_accelerator_connector.py index b1468f13f4..cf745fbad4 100644 --- a/tests/accelerators/test_accelerator_connector.py +++ b/tests/accelerators/test_accelerator_connector.py @@ -885,10 +885,8 @@ def test_strategy_choice_ddp_cpu_slurm(device_count_mock, setup_distributed_mock def test_unsupported_tpu_choice(monkeypatch): import pytorch_lightning.utilities.imports as imports - from pytorch_lightning.trainer.connectors.accelerator_connector import AcceleratorConnector monkeypatch.setattr(imports, "_XLA_AVAILABLE", True) - monkeypatch.setattr(AcceleratorConnector, "has_tpu", True) with pytest.raises(MisconfigurationException, match=r"accelerator='tpu', precision=64\)` is not implemented"): Trainer(accelerator="tpu", precision=64)