Remove the deprecated `LightningCLI` arguments (#16380)

This commit is contained in:
Carlos Mocholí 2023-01-16 18:22:55 +01:00 committed by Luca Antiga
parent bf75a1758c
commit 67eb931cdf
4 changed files with 10 additions and 46 deletions

View File

@ -178,9 +178,9 @@ def cli_main():
LitAutoEncoder,
MyDataModule,
seed_everything_default=1234,
save_config_overwrite=True,
run=False, # used to de-activate automatic fitting.
trainer_defaults={"callbacks": ImageSampler(), "max_epochs": 10},
save_config_kwargs={"overwrite": True},
)
cli.trainer.fit(cli.model, datamodule=cli.datamodule)
cli.trainer.test(ckpt_path="best", datamodule=cli.datamodule)

View File

@ -56,6 +56,14 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Removed the deprecated `pytorch_lightning.profiler` module ([#16359](https://github.com/Lightning-AI/lightning/pull/16359))
- Removed the deprecated `LightningCLI` arguments ([#16380](https://github.com/Lightning-AI/lightning/pull/16380))
* save_config_filename
* save_config_overwrite
* save_config_multifile
* description
* env_prefix
* env_parse
- Removed the deprecated automatic GPU selection ([#16184](https://github.com/Lightning-AI/lightning/pull/16184))
* Removed the `Trainer(auto_select_gpus=...)` argument
* Removed the `pytorch_lightning.tuner.auto_gpu_select.{pick_single_gpu,pick_multiple_gpus}` functions

View File

@ -28,7 +28,7 @@ from lightning_fabric.utilities.types import _TORCH_LRSCHEDULER
from pytorch_lightning import Callback, LightningDataModule, LightningModule, seed_everything, Trainer
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.model_helpers import is_overridden
from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation, rank_zero_warn
from pytorch_lightning.utilities.rank_zero import rank_zero_warn
_JSONARGPARSE_SIGNATURES_AVAILABLE = RequirementCache("jsonargparse[signatures]>=4.17.0")
@ -279,7 +279,6 @@ class LightningCLI:
args: ArgsType = None,
run: bool = True,
auto_configure_optimizers: bool = True,
**kwargs: Any, # Remove with deprecations of v2.0.0
) -> None:
"""Receives as input pytorch-lightning classes (or callables which return pytorch-lightning classes), which
are called / instantiated using a parsed configuration file and / or command line args.
@ -331,8 +330,6 @@ class LightningCLI:
self.parser_kwargs = parser_kwargs or {} # type: ignore[var-annotated] # github.com/python/mypy/issues/6463
self.auto_configure_optimizers = auto_configure_optimizers
self._handle_deprecated_params(kwargs)
self.model_class = model_class
# used to differentiate between the original value and the processed value
self._model_class = model_class or LightningModule
@ -357,28 +354,6 @@ class LightningCLI:
if self.subcommand is not None:
self._run_subcommand(self.subcommand)
def _handle_deprecated_params(self, kwargs: dict) -> None:
for name in kwargs.keys() & ["save_config_filename", "save_config_overwrite", "save_config_multifile"]:
value = kwargs.pop(name)
key = name.replace("save_config_", "").replace("filename", "config_filename")
self.save_config_kwargs[key] = value
rank_zero_deprecation(
f"LightningCLI's {name!r} init parameter is deprecated from v1.8 and will "
f"be removed in v2.0.0. Use `save_config_kwargs={{'{key}': ...}}` instead."
)
for name in kwargs.keys() & ["description", "env_prefix", "env_parse"]:
value = kwargs.pop(name)
key = name.replace("env_parse", "default_env")
self.parser_kwargs[key] = value
rank_zero_deprecation(
f"LightningCLI's {name!r} init parameter is deprecated from v1.9 and will "
f"be removed in v2.0. Use `parser_kwargs={{'{key}': ...}}` instead."
)
if kwargs:
raise ValueError(f"Unexpected keyword parameters: {kwargs}")
def _setup_parser_kwargs(self, parser_kwargs: Dict[str, Any]) -> Tuple[Dict[str, Any], Dict[str, Any]]:
subcommand_names = self.subcommands().keys()
main_kwargs = {k: v for k, v in parser_kwargs.items() if k not in subcommand_names}

View File

@ -22,7 +22,6 @@ from torch.utils.data import DataLoader
from pytorch_lightning import Trainer
from pytorch_lightning.accelerators.cpu import CPUAccelerator
from pytorch_lightning.cli import LightningCLI
from pytorch_lightning.core.mixins.device_dtype_mixin import DeviceDtypeModuleMixin
from pytorch_lightning.demos.boring_classes import BoringModel, RandomDataset
from pytorch_lightning.overrides import LightningDistributedModule, LightningParallelModule
@ -68,15 +67,6 @@ from pytorch_lightning.utilities.xla_device import inner_f, pl_multi_process, XL
from tests_pytorch.helpers.runif import RunIf
@pytest.mark.parametrize(
["name", "value"],
[("description", "description"), ("env_prefix", "PL"), ("env_parse", False)],
)
def test_lightningCLI_parser_init_params_deprecation_warning(name, value):
with mock.patch("sys.argv", ["any.py"]), pytest.deprecated_call(match=f".*{name!r} init parameter is deprecated.*"):
LightningCLI(BoringModel, run=False, **{name: value})
@pytest.mark.parametrize(
"wrapper_class",
[
@ -281,15 +271,6 @@ def test_v1_10_deprecated_accelerator_setup_environment_method():
CPUAccelerator().setup_environment(torch.device("cpu"))
@pytest.mark.parametrize(
["name", "value"],
[("save_config_filename", "config.yaml"), ("save_config_overwrite", False), ("save_config_multifile", False)],
)
def test_lightningCLI_save_config_init_params_deprecation_warning(name, value):
with mock.patch("sys.argv", ["any.py"]), pytest.deprecated_call(match=f".*{name!r} init parameter is deprecated.*"):
LightningCLI(BoringModel, run=False, **{name: value})
def test_tuning_enum():
with pytest.deprecated_call(
match="`TrainerFn.TUNING` has been deprecated in v1.8.0 and will be removed in v2.0.0."