diff --git a/pytorch_lightning/accelerators/gpu.py b/pytorch_lightning/accelerators/gpu.py index 6a38cd2cf5..7672f2edea 100644 --- a/pytorch_lightning/accelerators/gpu.py +++ b/pytorch_lightning/accelerators/gpu.py @@ -27,17 +27,17 @@ class GPUAccelerator(Accelerator): """Accelerator for GPU devices.""" def setup_environment(self) -> None: + """ + Raises: + MisconfigurationException: + If the selected device is not GPU. + """ super().setup_environment() if "cuda" not in str(self.root_device): raise MisconfigurationException(f"Device should be GPU, got {self.root_device} instead") torch.cuda.set_device(self.root_device) def setup(self, trainer: "pl.Trainer") -> None: - """ - Raises: - MisconfigurationException: - If the selected device is not GPU. - """ self.set_nvidia_flags(trainer.local_rank) return super().setup(trainer) diff --git a/pytorch_lightning/accelerators/ipu.py b/pytorch_lightning/accelerators/ipu.py index 4de644b15e..1456847a6a 100644 --- a/pytorch_lightning/accelerators/ipu.py +++ b/pytorch_lightning/accelerators/ipu.py @@ -24,6 +24,11 @@ class IPUAccelerator(Accelerator): """Accelerator for IPUs.""" def setup_optimizers(self, trainer: "pl.Trainer") -> None: + """ + Raises: + MisconfigurationException: + If multiple optimizers are provided. + """ super().setup_optimizers(trainer) if len(self.optimizers) > 1: diff --git a/pytorch_lightning/accelerators/tpu.py b/pytorch_lightning/accelerators/tpu.py index 954bed3dbc..1424bf2157 100644 --- a/pytorch_lightning/accelerators/tpu.py +++ b/pytorch_lightning/accelerators/tpu.py @@ -36,7 +36,9 @@ class TPUAccelerator(Accelerator): """ Raises: MisconfigurationException: - If AMP is used with TPU, or if TPUs are not using a single TPU core or TPU spawn training. + If AMP is used with TPU. + MisconfigurationException: + If TPUs are not using a single TPU core or TPU spawn training. """ if isinstance(self.precision_plugin, MixedPrecisionPlugin): raise MisconfigurationException(