lightning/pyproject.toml

141 lines
5.5 KiB
TOML

[build-system]
requires = [
"setuptools",
"wheel",
]
[tool.isort]
known_first_party = [
"docs",
"pl_examples",
"pytorch_lightning",
"tests",
]
profile = "black"
line_length = 120
force_sort_within_sections = "False"
order_by_type = "False"
[tool.black]
line-length = 120
[tool.mypy]
files = ["pytorch_lightning"]
disallow_untyped_defs = "True"
ignore_missing_imports = "True"
show_error_codes = "True"
warn_redundant_casts = "True"
warn_unused_configs = "True"
warn_unused_ignores = "True"
allow_redefinition = "True"
# disable this rule as the Trainer attributes are defined in the connectors, not in its __init__
disable_error_code = "attr-defined"
# style choices
warn_no_return = "False"
# Changes mypy default to ignore all errors
# TODO: the goal is for this to be empty
[[tool.mypy.overrides]]
# the list can be generated with:
# mypy | tr ':' ' ' | awk '{print $1}' | sort | uniq | sed 's/\.py//g' | sed 's|\/|\.|g' | xargs -I {} echo '"{}",'
module = [
"pytorch_lightning.accelerators.accelerator",
"pytorch_lightning.accelerators.gpu",
"pytorch_lightning.callbacks.finetuning",
"pytorch_lightning.callbacks.lr_monitor",
"pytorch_lightning.callbacks.model_checkpoint",
"pytorch_lightning.callbacks.prediction_writer",
"pytorch_lightning.callbacks.progress.base",
"pytorch_lightning.callbacks.progress.progress",
"pytorch_lightning.callbacks.progress.rich_progress",
"pytorch_lightning.callbacks.progress.tqdm_progress",
"pytorch_lightning.callbacks.quantization",
"pytorch_lightning.callbacks.stochastic_weight_avg",
"pytorch_lightning.callbacks.timer",
"pytorch_lightning.callbacks.xla_stats_monitor",
"pytorch_lightning.core.datamodule",
"pytorch_lightning.core.decorators",
"pytorch_lightning.core.lightning",
"pytorch_lightning.core.mixins.device_dtype_mixin",
"pytorch_lightning.core.mixins.hparams_mixin",
"pytorch_lightning.core.saving",
"pytorch_lightning.distributed.dist",
"pytorch_lightning.lite.lite",
"pytorch_lightning.lite.wrappers",
"pytorch_lightning.loggers.base",
"pytorch_lightning.loggers.comet",
"pytorch_lightning.loggers.csv_logs",
"pytorch_lightning.loggers.mlflow",
"pytorch_lightning.loggers.neptune",
"pytorch_lightning.loggers.tensorboard",
"pytorch_lightning.loggers.test_tube",
"pytorch_lightning.loggers.wandb",
"pytorch_lightning.loops.base",
"pytorch_lightning.loops.batch.training_batch_loop",
"pytorch_lightning.loops.epoch.evaluation_epoch_loop",
"pytorch_lightning.loops.epoch.prediction_epoch_loop",
"pytorch_lightning.loops.epoch.training_epoch_loop",
"pytorch_lightning.loops.fit_loop",
"pytorch_lightning.loops.utilities",
"pytorch_lightning.overrides.base",
"pytorch_lightning.overrides.data_parallel",
"pytorch_lightning.overrides.distributed",
"pytorch_lightning.overrides.fairscale",
"pytorch_lightning.plugins.environments.lightning_environment",
"pytorch_lightning.plugins.environments.lsf_environment",
"pytorch_lightning.plugins.environments.slurm_environment",
"pytorch_lightning.plugins.environments.torchelastic_environment",
"pytorch_lightning.plugins.precision.deepspeed",
"pytorch_lightning.plugins.precision.native_amp",
"pytorch_lightning.plugins.precision.precision_plugin",
"pytorch_lightning.plugins.training_type.ddp",
"pytorch_lightning.plugins.training_type.ddp2",
"pytorch_lightning.plugins.training_type.ddp_spawn",
"pytorch_lightning.plugins.training_type.deepspeed",
"pytorch_lightning.plugins.training_type.dp",
"pytorch_lightning.plugins.training_type.fully_sharded",
"pytorch_lightning.plugins.training_type.horovod",
"pytorch_lightning.plugins.training_type.ipu",
"pytorch_lightning.plugins.training_type.parallel",
"pytorch_lightning.plugins.training_type.sharded",
"pytorch_lightning.plugins.training_type.sharded_spawn",
"pytorch_lightning.plugins.training_type.single_device",
"pytorch_lightning.plugins.training_type.single_tpu",
"pytorch_lightning.plugins.training_type.tpu_spawn",
"pytorch_lightning.plugins.training_type.training_type_plugin",
"pytorch_lightning.profiler.advanced",
"pytorch_lightning.profiler.base",
"pytorch_lightning.profiler.pytorch",
"pytorch_lightning.profiler.simple",
"pytorch_lightning.trainer.callback_hook",
"pytorch_lightning.trainer.configuration_validator",
"pytorch_lightning.trainer.connectors.accelerator_connector",
"pytorch_lightning.trainer.connectors.callback_connector",
"pytorch_lightning.trainer.connectors.checkpoint_connector",
"pytorch_lightning.trainer.connectors.data_connector",
"pytorch_lightning.trainer.data_loading",
"pytorch_lightning.trainer.optimizers",
"pytorch_lightning.trainer.supporters",
"pytorch_lightning.trainer.trainer",
"pytorch_lightning.tuner.batch_size_scaling",
"pytorch_lightning.tuner.lr_finder",
"pytorch_lightning.tuner.tuning",
"pytorch_lightning.utilities.auto_restart",
"pytorch_lightning.utilities.data",
"pytorch_lightning.utilities.deepspeed",
"pytorch_lightning.utilities.distributed",
"pytorch_lightning.utilities.enums",
"pytorch_lightning.utilities.fetching",
"pytorch_lightning.utilities.imports",
"pytorch_lightning.utilities.memory",
"pytorch_lightning.utilities.meta",
"pytorch_lightning.utilities.metrics",
"pytorch_lightning.utilities.migration",
"pytorch_lightning.utilities.upgrade_checkpoint",
"pytorch_lightning.utilities.warnings",
]
ignore_errors = "True"