Prune deprecated Trainer(checkpoint_callback=ModelCheckpoint()) (#6166)

This commit is contained in:
Carlos Mocholí 2021-02-25 21:42:23 +01:00 committed by GitHub
parent 4d96f19493
commit ddf55a2f6a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 4 additions and 23 deletions

View File

@ -26,6 +26,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Removed support for passing a bool value to `profiler` argument of Trainer ([#6164](https://github.com/PyTorchLightning/pytorch-lightning/pull/6164))
- Removed passing a `ModelCheckpoint` instance to `Trainer(checkpoint_callback)` ([#6166](https://github.com/PyTorchLightning/pytorch-lightning/pull/6166))
- Removed deprecated Trainer argument `enable_pl_optimizer` and `automatic_optimization` ([#6163](https://github.com/PyTorchLightning/pytorch-lightning/pull/6163))

View File

@ -16,7 +16,7 @@ from typing import List, Union
from pytorch_lightning.callbacks import Callback, ModelCheckpoint, ProgressBar, ProgressBarBase
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.utilities import rank_zero_info, rank_zero_warn
from pytorch_lightning.utilities import rank_zero_info
from pytorch_lightning.utilities.exceptions import MisconfigurationException
@ -63,15 +63,6 @@ class CallbackConnector:
self.trainer.callbacks = self._reorder_callbacks(self.trainer.callbacks)
def configure_checkpoint_callbacks(self, checkpoint_callback: Union[ModelCheckpoint, bool]):
if isinstance(checkpoint_callback, ModelCheckpoint):
# TODO: deprecated, remove this block in v1.3.0
rank_zero_warn(
"Passing a ModelCheckpoint instance to Trainer(checkpoint_callbacks=...)"
" is deprecated since v1.1 and will no longer be supported in v1.3."
" Use `callbacks` argument instead.", DeprecationWarning
)
self.trainer.callbacks.append(checkpoint_callback)
if self._trainer_has_checkpoint_callbacks() and checkpoint_callback is False:
raise MisconfigurationException(
"Trainer was configured with checkpoint_callback=False but found ModelCheckpoint"

View File

@ -177,9 +177,6 @@ class Trainer(
It will configure a default ModelCheckpoint callback if there is no user-defined ModelCheckpoint in
:paramref:`~pytorch_lightning.trainer.trainer.Trainer.callbacks`.
.. warning:: Passing a ModelCheckpoint instance to this argument is deprecated since
v1.1 and will be unsupported from v1.3. Use `callbacks` argument instead.
check_val_every_n_epoch: Check val every n train epochs.
default_root_dir: Default path for logs and weights when no logger/ckpt_callback passed.

View File

@ -899,16 +899,6 @@ def test_configure_model_checkpoint(tmpdir):
assert trainer.checkpoint_callback == callback1
assert trainer.checkpoint_callbacks == [callback1, callback2]
with pytest.warns(DeprecationWarning, match='will no longer be supported in v1.3'):
trainer = Trainer(checkpoint_callback=callback1, **kwargs)
assert [c for c in trainer.callbacks if isinstance(c, ModelCheckpoint)] == [callback1]
assert trainer.checkpoint_callback == callback1
with pytest.warns(DeprecationWarning, match="will no longer be supported in v1.3"):
trainer = Trainer(checkpoint_callback=callback1, callbacks=[callback2], **kwargs)
assert trainer.checkpoint_callback == callback2
assert trainer.checkpoint_callbacks == [callback2, callback1]
with pytest.raises(MisconfigurationException, match="checkpoint_callback=False but found ModelCheckpoint"):
Trainer(checkpoint_callback=False, callbacks=[callback1], **kwargs)