Be explicit with mypy ignores (#10751)
* Ignore mypy only for failing files * Comment
This commit is contained in:
parent
85d7c4dce4
commit
b57feccbff
138
pyproject.toml
138
pyproject.toml
|
@ -37,43 +37,109 @@ disable_error_code = "attr-defined"
|
|||
warn_no_return = "False"
|
||||
|
||||
# Changes mypy default to ignore all errors
|
||||
# TODO: the goal is for this to be empty
|
||||
[[tool.mypy.overrides]]
|
||||
# the list can be generated with:
|
||||
# mypy | tr ':' ' ' | awk '{print $1}' | sort | uniq | sed 's/\.py//g' | sed 's|\/|\.|g' | xargs -I {} echo '"{}",'
|
||||
module = [
|
||||
"pytorch_lightning.*",
|
||||
"pytorch_lightning.accelerators.accelerator",
|
||||
"pytorch_lightning.accelerators.gpu",
|
||||
"pytorch_lightning.callbacks.finetuning",
|
||||
"pytorch_lightning.callbacks.lr_monitor",
|
||||
"pytorch_lightning.callbacks.model_checkpoint",
|
||||
"pytorch_lightning.callbacks.prediction_writer",
|
||||
"pytorch_lightning.callbacks.progress.base",
|
||||
"pytorch_lightning.callbacks.progress.progress",
|
||||
"pytorch_lightning.callbacks.progress.rich_progress",
|
||||
"pytorch_lightning.callbacks.progress.tqdm_progress",
|
||||
"pytorch_lightning.callbacks.quantization",
|
||||
"pytorch_lightning.callbacks.stochastic_weight_avg",
|
||||
"pytorch_lightning.callbacks.timer",
|
||||
"pytorch_lightning.callbacks.xla_stats_monitor",
|
||||
"pytorch_lightning.core.datamodule",
|
||||
"pytorch_lightning.core.decorators",
|
||||
"pytorch_lightning.core.lightning",
|
||||
"pytorch_lightning.core.mixins.device_dtype_mixin",
|
||||
"pytorch_lightning.core.mixins.hparams_mixin",
|
||||
"pytorch_lightning.core.saving",
|
||||
"pytorch_lightning.distributed.dist",
|
||||
"pytorch_lightning.lite.lite",
|
||||
"pytorch_lightning.lite.wrappers",
|
||||
"pytorch_lightning.loggers.base",
|
||||
"pytorch_lightning.loggers.comet",
|
||||
"pytorch_lightning.loggers.csv_logs",
|
||||
"pytorch_lightning.loggers.mlflow",
|
||||
"pytorch_lightning.loggers.neptune",
|
||||
"pytorch_lightning.loggers.tensorboard",
|
||||
"pytorch_lightning.loggers.test_tube",
|
||||
"pytorch_lightning.loggers.wandb",
|
||||
"pytorch_lightning.loops.base",
|
||||
"pytorch_lightning.loops.batch.training_batch_loop",
|
||||
"pytorch_lightning.loops.dataloader.dataloader_loop",
|
||||
"pytorch_lightning.loops.dataloader.evaluation_loop",
|
||||
"pytorch_lightning.loops.dataloader.prediction_loop",
|
||||
"pytorch_lightning.loops.epoch.evaluation_epoch_loop",
|
||||
"pytorch_lightning.loops.epoch.prediction_epoch_loop",
|
||||
"pytorch_lightning.loops.epoch.training_epoch_loop",
|
||||
"pytorch_lightning.loops.fit_loop",
|
||||
"pytorch_lightning.loops.optimization.optimizer_loop",
|
||||
"pytorch_lightning.loops.utilities",
|
||||
"pytorch_lightning.overrides.base",
|
||||
"pytorch_lightning.overrides.data_parallel",
|
||||
"pytorch_lightning.overrides.distributed",
|
||||
"pytorch_lightning.overrides.fairscale",
|
||||
"pytorch_lightning.plugins.environments.lightning_environment",
|
||||
"pytorch_lightning.plugins.environments.lsf_environment",
|
||||
"pytorch_lightning.plugins.environments.slurm_environment",
|
||||
"pytorch_lightning.plugins.environments.torchelastic_environment",
|
||||
"pytorch_lightning.plugins.precision.deepspeed",
|
||||
"pytorch_lightning.plugins.precision.native_amp",
|
||||
"pytorch_lightning.plugins.precision.precision_plugin",
|
||||
"pytorch_lightning.plugins.training_type.ddp",
|
||||
"pytorch_lightning.plugins.training_type.ddp2",
|
||||
"pytorch_lightning.plugins.training_type.ddp_spawn",
|
||||
"pytorch_lightning.plugins.training_type.deepspeed",
|
||||
"pytorch_lightning.plugins.training_type.dp",
|
||||
"pytorch_lightning.plugins.training_type.fully_sharded",
|
||||
"pytorch_lightning.plugins.training_type.horovod",
|
||||
"pytorch_lightning.plugins.training_type.ipu",
|
||||
"pytorch_lightning.plugins.training_type.parallel",
|
||||
"pytorch_lightning.plugins.training_type.sharded",
|
||||
"pytorch_lightning.plugins.training_type.sharded_spawn",
|
||||
"pytorch_lightning.plugins.training_type.single_device",
|
||||
"pytorch_lightning.plugins.training_type.single_tpu",
|
||||
"pytorch_lightning.plugins.training_type.tpu_spawn",
|
||||
"pytorch_lightning.plugins.training_type.training_type_plugin",
|
||||
"pytorch_lightning.profiler.advanced",
|
||||
"pytorch_lightning.profiler.base",
|
||||
"pytorch_lightning.profiler.pytorch",
|
||||
"pytorch_lightning.profiler.simple",
|
||||
"pytorch_lightning.trainer.callback_hook",
|
||||
"pytorch_lightning.trainer.configuration_validator",
|
||||
"pytorch_lightning.trainer.connectors.accelerator_connector",
|
||||
"pytorch_lightning.trainer.connectors.callback_connector",
|
||||
"pytorch_lightning.trainer.connectors.checkpoint_connector",
|
||||
"pytorch_lightning.trainer.connectors.data_connector",
|
||||
"pytorch_lightning.trainer.connectors.logger_connector.result",
|
||||
"pytorch_lightning.trainer.data_loading",
|
||||
"pytorch_lightning.trainer.optimizers",
|
||||
"pytorch_lightning.trainer.supporters",
|
||||
"pytorch_lightning.trainer.trainer",
|
||||
"pytorch_lightning.tuner.batch_size_scaling",
|
||||
"pytorch_lightning.tuner.lr_finder",
|
||||
"pytorch_lightning.tuner.tuning",
|
||||
"pytorch_lightning.utilities.auto_restart",
|
||||
"pytorch_lightning.utilities.data",
|
||||
"pytorch_lightning.utilities.deepspeed",
|
||||
"pytorch_lightning.utilities.distributed",
|
||||
"pytorch_lightning.utilities.enums",
|
||||
"pytorch_lightning.utilities.fetching",
|
||||
"pytorch_lightning.utilities.imports",
|
||||
"pytorch_lightning.utilities.memory",
|
||||
"pytorch_lightning.utilities.meta",
|
||||
"pytorch_lightning.utilities.metrics",
|
||||
"pytorch_lightning.utilities.migration",
|
||||
"pytorch_lightning.utilities.upgrade_checkpoint",
|
||||
"pytorch_lightning.utilities.warnings",
|
||||
]
|
||||
ignore_errors = "True"
|
||||
|
||||
# Override the default for files where we would like to enable type checking
|
||||
# TODO: Bring more files into this section
|
||||
[[tool.mypy.overrides]]
|
||||
module = [
|
||||
"pytorch_lightning.callbacks.device_stats_monitor",
|
||||
"pytorch_lightning.callbacks.early_stopping",
|
||||
"pytorch_lightning.callbacks.gpu_stats_monitor",
|
||||
"pytorch_lightning.callbacks.gradient_accumulation_scheduler",
|
||||
"pytorch_lightning.callbacks.model_summary",
|
||||
"pytorch_lightning.callbacks.progress",
|
||||
"pytorch_lightning.callbacks.pruning",
|
||||
"pytorch_lightning.callbacks.rich_model_summary",
|
||||
"pytorch_lightning.core.optimizer",
|
||||
"pytorch_lightning.loops.optimization.closure.py",
|
||||
"pytorch_lightning.loops.optimization.manual_loop.py",
|
||||
"pytorch_lightning.loops.evaluation_loop",
|
||||
"pytorch_lightning.trainer.connectors.logger_connector.py",
|
||||
"pytorch_lightning.trainer.connectors.logger_connector.fx_validator.py",
|
||||
"pytorch_lightning.trainer.connectors.signal_connector",
|
||||
"pytorch_lightning.trainer.progress.*",
|
||||
"pytorch_lightning.tuner.auto_gpu_select",
|
||||
"pytorch_lightning.utilities.apply_func",
|
||||
"pytorch_lightning.utilities.argparse",
|
||||
"pytorch_lightning.utilities.cli",
|
||||
"pytorch_lightning.utilities.cloud_io",
|
||||
"pytorch_lightning.utilities.device_dtype_mixin",
|
||||
"pytorch_lightning.utilities.device_parser",
|
||||
"pytorch_lightning.utilities.model_summary",
|
||||
"pytorch_lightning.utilities.parameter_tying",
|
||||
"pytorch_lightning.utilities.parsing",
|
||||
"pytorch_lightning.utilities.seed",
|
||||
"pytorch_lightning.utilities.xla_device",
|
||||
]
|
||||
ignore_errors = "False"
|
||||
|
|
Loading…
Reference in New Issue