docstring changes in tuner (#6264)

* docstring changes in tuner

* added full stop
This commit is contained in:
Kunal Mundada 2021-03-02 06:52:44 +05:30 committed by GitHub
parent 6788dbabff
commit 3371d32664
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 21 additions and 0 deletions

View File

@ -17,6 +17,11 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException
def pick_multiple_gpus(nb):
'''
Raises:
MisconfigurationException:
If ``gpus`` is set to 0, when ``auto_select_gpus=True``.
'''
if nb == 0:
raise MisconfigurationException(
r"auto_select_gpus=True, gpus=0 is not a valid configuration.\
@ -33,6 +38,11 @@ def pick_multiple_gpus(nb):
def pick_single_gpu(exclude_gpus: list):
'''
Raises:
RuntimeError:
If you try to allocate a GPU, when no GPUs are available.
'''
for i in range(torch.cuda.device_count()):
if i in exclude_gpus:
continue

View File

@ -70,6 +70,13 @@ def scale_batch_size(
**fit_kwargs: remaining arguments to be passed to .fit(), e.g., dataloader
or datamodule.
Raises:
MisconfigurationException:
If field ``batch_arg_name`` is not found in ``model`` and ``model.hparams``, or
if batch scaling feature is used with dataloaders passed directly to ``.fit()``.
ValueError:
If mode in method ``scale_batch_size`` is neither ``power`` nor ``binsearch``.
"""
if trainer.fast_dev_run:
rank_zero_warn('Skipping batch size scaler since fast_dev_run is enabled.', UserWarning)

View File

@ -106,6 +106,10 @@ def lr_find(
update_attr: Whether to update the learning rate attribute or not.
Raises:
MisconfigurationException:
If learning rate/lr in ``model`` or ``model.hparams`` isn't overriden when ``auto_lr_find=True``, or
if you are using `more than one optimizer` with learning rate finder.
Example::