101 lines
3.4 KiB
TOML
101 lines
3.4 KiB
TOML
[build-system]
|
|
requires = [
|
|
"setuptools",
|
|
"wheel",
|
|
]
|
|
|
|
|
|
[tool.isort]
|
|
known_first_party = [
|
|
"pl_examples",
|
|
"pytorch_lightning",
|
|
"tests_pytorch",
|
|
]
|
|
profile = "black"
|
|
line_length = 120
|
|
force_sort_within_sections = "False"
|
|
order_by_type = "False"
|
|
|
|
|
|
[tool.black]
|
|
line-length = 120
|
|
|
|
|
|
[tool.mypy]
|
|
files = ["pytorch_lightning"]
|
|
disallow_untyped_defs = "True"
|
|
ignore_missing_imports = "True"
|
|
show_error_codes = "True"
|
|
warn_redundant_casts = "True"
|
|
warn_unused_configs = "True"
|
|
warn_unused_ignores = "True"
|
|
allow_redefinition = "True"
|
|
# disable this rule as the Trainer attributes are defined in the connectors, not in its __init__
|
|
disable_error_code = "attr-defined"
|
|
# style choices
|
|
warn_no_return = "False"
|
|
|
|
# Ignore mypy errors for these files
|
|
# TODO: the goal is for this to be empty
|
|
[[tool.mypy.overrides]]
|
|
# the list can be generated with:
|
|
# mypy | tr ':' ' ' | awk '{print $1}' | sort | uniq | sed 's/\.py//g' | sed 's|\/|\.|g' | xargs -I {} echo '"{}",'
|
|
module = [
|
|
"pytorch_lightning.callbacks.finetuning",
|
|
"pytorch_lightning.callbacks.model_checkpoint",
|
|
"pytorch_lightning.callbacks.progress.rich_progress",
|
|
"pytorch_lightning.callbacks.quantization",
|
|
"pytorch_lightning.callbacks.stochastic_weight_avg",
|
|
"pytorch_lightning.core.datamodule",
|
|
"pytorch_lightning.core.decorators",
|
|
"pytorch_lightning.core.module",
|
|
"pytorch_lightning.core.mixins.device_dtype_mixin",
|
|
"pytorch_lightning.core.saving",
|
|
"pytorch_lightning.demos.boring_classes",
|
|
"pytorch_lightning.demos.mnist_datamodule",
|
|
"pytorch_lightning.distributed.dist",
|
|
"pytorch_lightning.loggers.base",
|
|
"pytorch_lightning.loggers.logger",
|
|
"pytorch_lightning.loggers.comet",
|
|
"pytorch_lightning.loggers.csv_logs",
|
|
"pytorch_lightning.loggers.mlflow",
|
|
"pytorch_lightning.loggers.neptune",
|
|
"pytorch_lightning.loggers.tensorboard",
|
|
"pytorch_lightning.loggers.wandb",
|
|
"pytorch_lightning.loops.epoch.training_epoch_loop",
|
|
"pytorch_lightning.strategies.ddp",
|
|
"pytorch_lightning.strategies.ddp2",
|
|
"pytorch_lightning.strategies.ddp_spawn",
|
|
"pytorch_lightning.strategies.deepspeed",
|
|
"pytorch_lightning.strategies.dp",
|
|
"pytorch_lightning.strategies.fully_sharded",
|
|
"pytorch_lightning.strategies.horovod",
|
|
"pytorch_lightning.strategies.ipu",
|
|
"pytorch_lightning.strategies.parallel",
|
|
"pytorch_lightning.strategies.sharded",
|
|
"pytorch_lightning.strategies.sharded_spawn",
|
|
"pytorch_lightning.strategies.single_device",
|
|
"pytorch_lightning.strategies.single_tpu",
|
|
"pytorch_lightning.strategies.tpu_spawn",
|
|
"pytorch_lightning.strategies.strategy",
|
|
"pytorch_lightning.profiler.advanced",
|
|
"pytorch_lightning.profiler.base",
|
|
"pytorch_lightning.profiler.pytorch",
|
|
"pytorch_lightning.profiler.simple",
|
|
"pytorch_lightning.trainer.callback_hook",
|
|
"pytorch_lightning.trainer.connectors.callback_connector",
|
|
"pytorch_lightning.trainer.connectors.data_connector",
|
|
"pytorch_lightning.trainer.data_loading",
|
|
"pytorch_lightning.trainer.optimizers",
|
|
"pytorch_lightning.trainer.supporters",
|
|
"pytorch_lightning.trainer.trainer",
|
|
"pytorch_lightning.tuner.batch_size_scaling",
|
|
"pytorch_lightning.tuner.lr_finder",
|
|
"pytorch_lightning.tuner.tuning",
|
|
"pytorch_lightning.utilities.auto_restart",
|
|
"pytorch_lightning.utilities.data",
|
|
"pytorch_lightning.utilities.distributed",
|
|
"pytorch_lightning.utilities.meta",
|
|
]
|
|
ignore_errors = "True"
|