Document exceptions in callbacks (#5541)

* Add Raises: section to docstring

* Add Raises section to the docs

* Add raises section to the docs

* Apply suggestions from code review

Co-authored-by: Jirka Borovec <Borda@users.noreply.github.com>

* fix

* Remove unnecessary instance check

Co-authored-by: Jirka Borovec <Borda@users.noreply.github.com>
This commit is contained in:
Akihiro Nitta 2021-02-15 19:24:36 +09:00 committed by GitHub
parent 52c07f2f03
commit 0a2fb05aac
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 52 additions and 2 deletions

View File

@ -54,6 +54,12 @@ class EarlyStopping(Callback):
strict: whether to crash the training if `monitor` is
not found in the validation metrics. Default: ``True``.
Raises:
MisconfigurationException:
If ``mode`` is none of ``"min"``, ``"max"``, and ``"auto"``.
RuntimeError:
If the metric ``monitor`` is not available.
Example::
>>> from pytorch_lightning import Trainer

View File

@ -318,8 +318,12 @@ class BackboneFinetuning(BaseFinetuning):
self.verbose = verbose
def on_fit_start(self, trainer, pl_module):
if hasattr(pl_module, "backbone") and \
(isinstance(pl_module.backbone, Module) or isinstance(pl_module.backbone, Sequential)):
"""
Raises:
MisconfigurationException:
If LightningModule has no nn.Module `backbone` attribute.
"""
if hasattr(pl_module, "backbone") and isinstance(pl_module.backbone, Module):
return
raise MisconfigurationException("The LightningModule should have a nn.Module `backbone` attribute")

View File

@ -48,6 +48,10 @@ class GPUStatsMonitor(Callback):
temperature: Set to ``True`` to monitor the memory and gpu temperature in degree Celsius.
Default: ``False``.
Raises:
MisconfigurationException:
If NVIDIA driver is not installed, not running on GPUs, or ``Trainer`` has no logger.
Example::
>>> from pytorch_lightning import Trainer

View File

@ -32,6 +32,13 @@ class GradientAccumulationScheduler(Callback):
Args:
scheduling: scheduling in format {epoch: accumulation_factor}
Raises:
TypeError:
If ``scheduling`` is an empty ``dict``,
or not all keys and values of ``scheduling`` are integers.
IndexError:
If ``minimal_epoch`` is less than 0.
Example::
>>> from pytorch_lightning import Trainer

View File

@ -38,6 +38,10 @@ class LearningRateMonitor(Callback):
log_momentum: option to also log the momentum values of the optimizer, if the optimizer
has the ``momentum`` or ``betas`` attribute. Defaults to ``False``.
Raises:
MisconfigurationException:
If ``logging_interval`` is none of ``"step"``, ``"epoch"``, or ``None``.
Example::
>>> from pytorch_lightning import Trainer
@ -77,6 +81,10 @@ class LearningRateMonitor(Callback):
Called before training, determines unique names for all lr
schedulers in the case of multiple of the same type or in
the case of multiple parameter groups
Raises:
MisconfigurationException:
If ``Trainer`` has no ``logger``.
"""
if not trainer.logger:
raise MisconfigurationException(

View File

@ -115,6 +115,14 @@ class ModelCheckpoint(Callback):
For example, you can change the default last checkpoint name by doing
``checkpoint_callback.CHECKPOINT_NAME_LAST = "{epoch}-last"``
Raises:
MisconfigurationException:
If ``save_top_k`` is neither ``None`` nor more than or equal to ``-1``,
if ``monitor`` is ``None`` and ``save_top_k`` is none of ``None``, ``-1``, and ``0``, or
if ``mode`` is none of ``"min"``, ``"max"``, and ``"auto"``.
ValueError:
If ``trainer.save_checkpoint`` is ``None``.
Example::
>>> from pytorch_lightning import Trainer

View File

@ -135,6 +135,14 @@ class ModelPruning(Callback):
verbose: Verbosity level. 0 to disable, 1 to log overall sparsity, 2 to log per-layer sparsity
Raises:
MisconfigurationException:
If ``parameter_names`` is neither ``"weight"`` nor ``"bias"``,
if the provided ``pruning_fn`` is not supported,
if ``pruning_dim`` is not provided when ``"unstructured"``,
if ``pruning_norm`` is not provided when ``"ln_structured"``,
if ``pruning_fn`` is neither ``str`` nor :class:`torch.nn.utils.prune.BasePruningMethod`, or
if ``amount`` is none of ``int``, ``float`` and ``Callable``.
"""
self._use_global_unstructured = use_global_unstructured
@ -382,6 +390,11 @@ class ModelPruning(Callback):
"""
This function is responsible of sanitizing ``parameters_to_prune`` and ``parameter_names``.
If ``parameters_to_prune is None``, it will be generated with all parameters of the model.
Raises:
MisconfigurationException:
If ``parameters_to_prune`` doesn't exist in the model, or
if ``parameters_to_prune`` is neither a list of tuple nor ``None``.
"""
parameters = parameter_names or ModelPruning.PARAMETER_NAMES