Document exceptions in accelerators (#9558)
* Document exceptions in ipu.py * Document exceptions in tpu.py * Document exceptions in gpu.py
This commit is contained in:
parent
c7451b3ccf
commit
f5608e90d6
|
@ -27,17 +27,17 @@ class GPUAccelerator(Accelerator):
|
|||
"""Accelerator for GPU devices."""
|
||||
|
||||
def setup_environment(self) -> None:
|
||||
"""
|
||||
Raises:
|
||||
MisconfigurationException:
|
||||
If the selected device is not GPU.
|
||||
"""
|
||||
super().setup_environment()
|
||||
if "cuda" not in str(self.root_device):
|
||||
raise MisconfigurationException(f"Device should be GPU, got {self.root_device} instead")
|
||||
torch.cuda.set_device(self.root_device)
|
||||
|
||||
def setup(self, trainer: "pl.Trainer") -> None:
|
||||
"""
|
||||
Raises:
|
||||
MisconfigurationException:
|
||||
If the selected device is not GPU.
|
||||
"""
|
||||
self.set_nvidia_flags(trainer.local_rank)
|
||||
return super().setup(trainer)
|
||||
|
||||
|
|
|
@ -24,6 +24,11 @@ class IPUAccelerator(Accelerator):
|
|||
"""Accelerator for IPUs."""
|
||||
|
||||
def setup_optimizers(self, trainer: "pl.Trainer") -> None:
|
||||
"""
|
||||
Raises:
|
||||
MisconfigurationException:
|
||||
If multiple optimizers are provided.
|
||||
"""
|
||||
super().setup_optimizers(trainer)
|
||||
|
||||
if len(self.optimizers) > 1:
|
||||
|
|
|
@ -36,7 +36,9 @@ class TPUAccelerator(Accelerator):
|
|||
"""
|
||||
Raises:
|
||||
MisconfigurationException:
|
||||
If AMP is used with TPU, or if TPUs are not using a single TPU core or TPU spawn training.
|
||||
If AMP is used with TPU.
|
||||
MisconfigurationException:
|
||||
If TPUs are not using a single TPU core or TPU spawn training.
|
||||
"""
|
||||
if isinstance(self.precision_plugin, MixedPrecisionPlugin):
|
||||
raise MisconfigurationException(
|
||||
|
|
Loading…
Reference in New Issue