From b5b951b05af8811f3e78636f17285f60a2a22e4f Mon Sep 17 00:00:00 2001 From: DuYicong515 Date: Thu, 24 Mar 2022 17:35:46 -0700 Subject: [PATCH] Remove AcceleratorConnector.devices (#12435) --- CHANGELOG.md | 3 +++ .../trainer/connectors/accelerator_connector.py | 9 --------- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b265dc0999..1f50b405ff 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -792,6 +792,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Removed `AcceleratorConnector.parallel_device_ids` property ([#12072](https://github.com/PyTorchLightning/pytorch-lightning/pull/12072)) +- Removed `AcceleratorConnector.devices` property ([#12435](https://github.com/PyTorchLightning/pytorch-lightning/pull/12435)) + + ### Fixed - Fixed an issue where `ModelCheckpoint` could delete older checkpoints when `dirpath` has changed during resumed training ([#12045](https://github.com/PyTorchLightning/pytorch-lightning/pull/12045)) diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index e1cf4c6232..7f22c458c2 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -59,7 +59,6 @@ from pytorch_lightning.strategies import ( DeepSpeedStrategy, HorovodStrategy, IPUStrategy, - ParallelStrategy, SingleDeviceStrategy, SingleTPUStrategy, Strategy, @@ -780,14 +779,6 @@ class AcceleratorConnector: def parallel_devices(self) -> List[Union[torch.device, int]]: return self._parallel_devices - @property - def devices(self) -> int: - if isinstance(self.strategy, SingleDeviceStrategy): - return 1 - elif isinstance(self.strategy, ParallelStrategy): - return len(self.strategy.parallel_devices) - return 0 - @property def tpu_cores(self) -> Optional[Union[List[int], int]]: if isinstance(self.accelerator, TPUAccelerator):