pt dpp some ignores

This commit is contained in:
William Falcon 2019-07-24 19:29:51 -04:00
parent db9a8cfe78
commit 6fb27c4526
2 changed files with 2 additions and 1 deletions

View File

@ -112,7 +112,7 @@ class LightningDistributedDataParallel(DistributedDataParallel):
return output return output
def parallel_apply(modules, inputs, kwargs_tup=None, devices=None): def parallel_apply(modules, inputs, kwargs_tup=None, devices=None): # pragma: no cover
r"""Applies each `module` in :attr:`modules` in parallel on arguments r"""Applies each `module` in :attr:`modules` in parallel on arguments
contained in :attr:`inputs` (positional) and :attr:`kwargs_tup` (keyword) contained in :attr:`inputs` (positional) and :attr:`kwargs_tup` (keyword)
on each of :attr:`devices`. on each of :attr:`devices`.

View File

@ -33,6 +33,7 @@ exclude_lines =
raise Exception raise Exception
warnings warnings
print print
raise RuntimeError
omit = omit =
pytorch_lightning/callbacks/pt_callbacks.py pytorch_lightning/callbacks/pt_callbacks.py