Update to Mypy>0.9 (#8386)

This commit is contained in:
Carlos Mocholí 2021-07-13 08:23:36 +02:00 committed by GitHub
parent 733cdbb9ad
commit c5a120ed9d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 56 additions and 109 deletions

View File

@ -17,6 +17,6 @@ jobs:
python-version: 3.8
- name: Install mypy
run: |
pip install mypy==0.790
grep mypy requirements/test.txt | xargs -0 pip install
pip list
- run: mypy

View File

@ -4,6 +4,7 @@ requires = [
"wheel",
]
[tool.isort]
known_first_party = [
"benchmarks",
@ -16,3 +17,51 @@ profile = "black"
line_length = 120
force_sort_within_sections = "False"
order_by_type = "False"
[tool.mypy]
files = ["pytorch_lightning", "pl_examples", "benchmarks"]
disallow_untyped_defs = "True"
ignore_missing_imports = "True"
show_error_codes = "True"
warn_redundant_casts = "True"
warn_unused_configs = "True"
warn_unused_ignores = "True"
allow_redefinition = "True"
# disable this rule as the Trainer attributes are defined in the connectors, not in its __init__
disable_error_code = "attr-defined"
# TODO: Fix typing for these modules
[[tool.mypy.overrides]]
module = [
"pytorch_lightning.callbacks.*",
"pytorch_lightning.core.*",
"pytorch_lightning.loggers.*",
"pytorch_lightning.loops.*",
"pytorch_lightning.metrics.*",
"pytorch_lightning.overrides.*",
"pytorch_lightning.plugins.environments.*",
"pytorch_lightning.plugins.training_type.*",
"pytorch_lightning.profiler.*",
"pytorch_lightning.trainer.*",
"pytorch_lightning.distributed.*",
"pytorch_lightning.tuner.*",
"pytorch_lightning.utilities.*",
"pl_examples.*",
"benchmarks.*",
"tests.helpers.*"
]
ignore_errors = "True"
[[tool.mypy.overrides]]
module = [
"pytorch_lightning.callbacks.pruning",
"pytorch_lightning.trainer.evaluation_loop",
"pytorch_lightning.trainer.connectors.logger_connector",
"pytorch_lightning.utilities.cli",
"pytorch_lightning.utilities.device_dtype_mixin",
"pytorch_lightning.utilities.device_parser",
"pytorch_lightning.utilities.parsing",
]
ignore_errors = "False"

View File

@ -174,6 +174,8 @@ class Accelerator:
dataloader_idx: The index of the dataloader to which the batch belongs.
"""
model = self.lightning_module
device = device or self.root_device
if model is not None and not isinstance(self.training_type_plugin, DataParallelPlugin):
# no need to transfer batch to device in DP mode
return model._apply_batch_transfer_handler(batch, device, dataloader_idx)

View File

@ -121,10 +121,9 @@ class DeviceDtypeModuleMixin(Module):
Returns:
Module: self
"""
property_device = (
device if isinstance(device, torch.device) else torch.device('cuda', index=device) # type: ignore
) # mypy expects `device` for `index` to be int, while `Optional[int]` is okay => ignore typing for now
self.__update_properties(device=property_device)
if device is None or isinstance(device, int):
device = torch.device('cuda', index=device)
self.__update_properties(device=device)
return super().cuda(device=device)
def cpu(self) -> 'DeviceDtypeModuleMixin':

View File

@ -1,13 +1,11 @@
coverage>5.2.0
codecov>=2.1
pytest>=6.0
#pytest-cov>2.10
#pytest-xdist
flake8>=3.6
check-manifest
twine==3.2
isort>=5.6.4
mypy>=0.720, <0.800
mypy>=0.900
pre-commit>=1.0
cloudpickle>=1.3

101
setup.cfg
View File

@ -100,104 +100,3 @@ DEDENT_CLOSING_BRACKETS = true
ALLOW_SPLIT_BEFORE_DICT_VALUE = false
BLANK_LINE_BEFORE_NESTED_CLASS_OR_DEF = true
NO_SPACES_AROUND_SELECTED_BINARY_OPERATORS = false
[mypy]
files = pytorch_lightning, pl_examples, benchmarks, tests
disallow_untyped_defs = True
ignore_missing_imports = True
show_error_codes = True
warn_redundant_casts = True
warn_unused_configs = True
warn_unused_ignores = True
allow_redefinition = True
# disable this rule as the Trainer attributes are defined in the connectors, not in its __init__
disable_error_code = attr-defined
# todo: add proper typing to this module...
[mypy-pytorch_lightning.callbacks.*]
ignore_errors = True
# whitelist
[mypy-pytorch_lightning.callbacks.pruning]
ignore_errors = False
# todo: add proper typing to this module...
[mypy-pytorch_lightning.core.*]
ignore_errors = True
# todo: add proper typing to this module...
[mypy-pytorch_lightning.loggers.*]
ignore_errors = True
# todo: add proper typing to this module...
[mypy-pytorch_lightning.loops.*]
ignore_errors = True
# todo: add proper typing to this module...
[mypy-pytorch_lightning.metrics.*]
ignore_errors = True
# todo: add proper typing to this module...
[mypy-pytorch_lightning.overrides.*]
ignore_errors = True
# todo: add proper typing to this module...
[mypy-pytorch_lightning.plugins.environments.*]
ignore_errors = True
# todo: add proper typing to this module...
[mypy-pytorch_lightning.plugins.training_type.*]
ignore_errors = True
# todo: add proper typing to this module...
[mypy-pytorch_lightning.profiler.*]
ignore_errors = True
# todo: add proper typing to this module...
[mypy-pytorch_lightning.pt_overrides.*]
ignore_errors = True
# todo: add proper typing to this module...
[mypy-pytorch_lightning.root_module.*]
ignore_errors = True
# todo: add proper typing to this module...
[mypy-pytorch_lightning.trainer.*]
ignore_errors = True
# whitelist
[mypy-pytorch_lightning.trainer.evaluation_loop]
ignore_errors = False
[mypy-pytorch_lightning.trainer.connectors.logger_connector]
ignore_errors = False
# todo: add proper typing to this module...
[mypy-pytorch_lightning.distributed.*]
ignore_errors = True
# todo: add proper typing to this module...
[mypy-pytorch_lightning.tuner.*]
ignore_errors = True
# todo: add proper typing to this module...
[mypy-pytorch_lightning.utilities.*]
ignore_errors = True
[mypy-pytorch_lightning.utilities.cli]
ignore_errors = False
[mypy-pytorch_lightning.utilities.device_dtype_mixin]
ignore_errors = False
[mypy-pytorch_lightning.utilities.device_parser]
ignore_errors = False
[mypy-pytorch_lightning.utilities.parsing]
ignore_errors = False
# todo: add proper typing to this module...
[mypy-pl_examples.*]
ignore_errors = True
# todo: add proper typing to this module...
[mypy-benchmarks.*]
ignore_errors = True
# todo: add proper typing to this module...
[mypy-tests.*]
ignore_errors = True