Document exceptions in accelerators (#9558)

* Document exceptions in ipu.py

* Document exceptions in tpu.py

* Document exceptions in gpu.py
This commit is contained in:
Aki Nitta 2021-09-18 15:14:08 +09:00 committed by GitHub
parent c7451b3ccf
commit f5608e90d6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 13 additions and 6 deletions

View File

@ -27,17 +27,17 @@ class GPUAccelerator(Accelerator):
"""Accelerator for GPU devices."""
def setup_environment(self) -> None:
"""
Raises:
MisconfigurationException:
If the selected device is not GPU.
"""
super().setup_environment()
if "cuda" not in str(self.root_device):
raise MisconfigurationException(f"Device should be GPU, got {self.root_device} instead")
torch.cuda.set_device(self.root_device)
def setup(self, trainer: "pl.Trainer") -> None:
"""
Raises:
MisconfigurationException:
If the selected device is not GPU.
"""
self.set_nvidia_flags(trainer.local_rank)
return super().setup(trainer)

View File

@ -24,6 +24,11 @@ class IPUAccelerator(Accelerator):
"""Accelerator for IPUs."""
def setup_optimizers(self, trainer: "pl.Trainer") -> None:
"""
Raises:
MisconfigurationException:
If multiple optimizers are provided.
"""
super().setup_optimizers(trainer)
if len(self.optimizers) > 1:

View File

@ -36,7 +36,9 @@ class TPUAccelerator(Accelerator):
"""
Raises:
MisconfigurationException:
If AMP is used with TPU, or if TPUs are not using a single TPU core or TPU spawn training.
If AMP is used with TPU.
MisconfigurationException:
If TPUs are not using a single TPU core or TPU spawn training.
"""
if isinstance(self.precision_plugin, MixedPrecisionPlugin):
raise MisconfigurationException(