diff --git a/pytorch_lightning/pt_overrides/override_data_parallel.py b/pytorch_lightning/pt_overrides/override_data_parallel.py index 522955e86d..89b550fdbb 100644 --- a/pytorch_lightning/pt_overrides/override_data_parallel.py +++ b/pytorch_lightning/pt_overrides/override_data_parallel.py @@ -112,7 +112,7 @@ class LightningDistributedDataParallel(DistributedDataParallel): return output -def parallel_apply(modules, inputs, kwargs_tup=None, devices=None): +def parallel_apply(modules, inputs, kwargs_tup=None, devices=None): # pragma: no cover r"""Applies each `module` in :attr:`modules` in parallel on arguments contained in :attr:`inputs` (positional) and :attr:`kwargs_tup` (keyword) on each of :attr:`devices`. diff --git a/setup.cfg b/setup.cfg index 01c0b04981..f090474258 100644 --- a/setup.cfg +++ b/setup.cfg @@ -33,6 +33,7 @@ exclude_lines = raise Exception warnings print + raise RuntimeError omit = pytorch_lightning/callbacks/pt_callbacks.py