From b0d19969200f87a14cd86a2e29fe8e0611518cb2 Mon Sep 17 00:00:00 2001 From: edenlightning <66261195+edenlightning@users.noreply.github.com> Date: Wed, 24 Feb 2021 15:13:48 -0500 Subject: [PATCH] Update gpu warning (#6181) Co-authored-by: Jirka Borovec Co-authored-by: Kaushik Bokka --- .../trainer/connectors/accelerator_connector.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 1b2eed56b8..f63fdc6944 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -547,7 +547,10 @@ class AcceleratorConnector(object): rank_zero_info(f'TPU available: {_TPU_AVAILABLE}, using: {num_cores} TPU cores') if torch.cuda.is_available() and self._device_type != DeviceType.GPU: - rank_zero_warn("GPU available but not used. Set the --gpus flag when calling the script.") + rank_zero_warn( + "GPU available but not used. Set the gpus flag in your trainer" + " `Trainer(gpus=1)` or script `--gpus=1`." + ) def _set_horovod_backend(self): self.check_horovod()