Remove deprecated `max_steps=None` (#13591)
* Remove max_steps=None * Update changelog * Update docs * Unused import Co-authored-by: Carlos Mocholí <carlossmocholi@gmail.com>
This commit is contained in:
parent
9098514ea0
commit
7ba0270552
|
@ -954,7 +954,7 @@ Training will stop if max_steps or max_epochs have reached (earliest).
|
|||
.. testcode::
|
||||
|
||||
# Default (disabled)
|
||||
trainer = Trainer(max_steps=None)
|
||||
trainer = Trainer(max_steps=-1)
|
||||
|
||||
# Stop after 100 steps
|
||||
trainer = Trainer(max_steps=100)
|
||||
|
|
|
@ -264,6 +264,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
|
|||
- Removed the deprecated `test_transforms` argument from the `LightningDataModule` constructor ([#12773](https://github.com/PyTorchLightning/pytorch-lightning/pull/12773))
|
||||
|
||||
|
||||
- Removed deprecated `Trainer(max_steps=None)` ([#13591](https://github.com/Lightning-AI/lightning/pull/13591))
|
||||
|
||||
|
||||
- Removed deprecated `dataloader_idx` argument from `on_train_batch_start/end` hooks `Callback` and `LightningModule` ([#12769](https://github.com/PyTorchLightning/pytorch-lightning/pull/12769), [#12977](https://github.com/PyTorchLightning/pytorch-lightning/pull/12977))
|
||||
|
||||
|
||||
|
|
|
@ -48,13 +48,7 @@ class TrainingEpochLoop(loops.Loop[_OUTPUTS_TYPE]):
|
|||
|
||||
def __init__(self, min_steps: Optional[int] = None, max_steps: int = -1) -> None:
|
||||
super().__init__()
|
||||
if max_steps is None:
|
||||
rank_zero_deprecation(
|
||||
"Setting `max_steps = None` is deprecated in v1.5 and will no longer be supported in v1.7."
|
||||
" Use `max_steps = -1` instead."
|
||||
)
|
||||
max_steps = -1
|
||||
elif max_steps < -1:
|
||||
if max_steps < -1:
|
||||
raise MisconfigurationException(
|
||||
f"`max_steps` must be a non-negative integer or -1 (infinite steps). You passed in {max_steps}."
|
||||
)
|
||||
|
|
|
@ -33,7 +33,7 @@ from pytorch_lightning.utilities.fetching import (
|
|||
InterBatchParallelDataFetcher,
|
||||
)
|
||||
from pytorch_lightning.utilities.model_helpers import is_overridden
|
||||
from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation, rank_zero_warn
|
||||
from pytorch_lightning.utilities.rank_zero import rank_zero_warn
|
||||
from pytorch_lightning.utilities.signature_utils import is_param_in_hook_signature
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -104,13 +104,7 @@ class FitLoop(Loop[None]):
|
|||
def max_steps(self, value: int) -> None:
|
||||
"""Sets the maximum number of steps (forwards to epoch_loop)"""
|
||||
# TODO(@awaelchli): This setter is required by debugging connector (fast dev run), should be avoided
|
||||
if value is None:
|
||||
rank_zero_deprecation(
|
||||
"Setting `max_steps = None` is deprecated in v1.5 and will no longer be supported in v1.7."
|
||||
" Use `max_steps = -1` instead."
|
||||
)
|
||||
value = -1
|
||||
elif value < -1:
|
||||
if value < -1:
|
||||
raise MisconfigurationException(
|
||||
f"`max_steps` must be a non-negative integer or -1 (infinite steps). You passed in {value}."
|
||||
)
|
||||
|
|
|
@ -17,19 +17,9 @@ from re import escape
|
|||
import pytest
|
||||
import torch
|
||||
|
||||
from pytorch_lightning import Trainer
|
||||
from pytorch_lightning.strategies import SingleDeviceStrategy
|
||||
|
||||
|
||||
def test_v1_7_0_deprecated_max_steps_none(tmpdir):
|
||||
with pytest.deprecated_call(match="`max_steps = None` is deprecated in v1.5"):
|
||||
_ = Trainer(max_steps=None)
|
||||
|
||||
trainer = Trainer()
|
||||
with pytest.deprecated_call(match="`max_steps = None` is deprecated in v1.5"):
|
||||
trainer.fit_loop.max_steps = None
|
||||
|
||||
|
||||
def test_v1_7_0_post_dispatch_hook():
|
||||
class CustomPlugin(SingleDeviceStrategy):
|
||||
def post_dispatch(self, trainer):
|
||||
|
|
Loading…
Reference in New Issue