From ddf55a2f6a61c28c7d86c1144652c5e7c3811a3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Thu, 25 Feb 2021 21:42:23 +0100 Subject: [PATCH] Prune deprecated Trainer(checkpoint_callback=ModelCheckpoint()) (#6166) --- CHANGELOG.md | 3 +++ .../trainer/connectors/callback_connector.py | 11 +---------- pytorch_lightning/trainer/trainer.py | 3 --- tests/checkpointing/test_model_checkpoint.py | 10 ---------- 4 files changed, 4 insertions(+), 23 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f3884c8dc2..6e1d4b1d6c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,6 +26,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Removed support for passing a bool value to `profiler` argument of Trainer ([#6164](https://github.com/PyTorchLightning/pytorch-lightning/pull/6164)) +- Removed passing a `ModelCheckpoint` instance to `Trainer(checkpoint_callback)` ([#6166](https://github.com/PyTorchLightning/pytorch-lightning/pull/6166)) + + - Removed deprecated Trainer argument `enable_pl_optimizer` and `automatic_optimization` ([#6163](https://github.com/PyTorchLightning/pytorch-lightning/pull/6163)) diff --git a/pytorch_lightning/trainer/connectors/callback_connector.py b/pytorch_lightning/trainer/connectors/callback_connector.py index 4d8fe9b7b2..40ac8f3e69 100644 --- a/pytorch_lightning/trainer/connectors/callback_connector.py +++ b/pytorch_lightning/trainer/connectors/callback_connector.py @@ -16,7 +16,7 @@ from typing import List, Union from pytorch_lightning.callbacks import Callback, ModelCheckpoint, ProgressBar, ProgressBarBase from pytorch_lightning.core.lightning import LightningModule -from pytorch_lightning.utilities import rank_zero_info, rank_zero_warn +from pytorch_lightning.utilities import rank_zero_info from pytorch_lightning.utilities.exceptions import MisconfigurationException @@ -63,15 +63,6 @@ class CallbackConnector: self.trainer.callbacks = self._reorder_callbacks(self.trainer.callbacks) def configure_checkpoint_callbacks(self, checkpoint_callback: Union[ModelCheckpoint, bool]): - if isinstance(checkpoint_callback, ModelCheckpoint): - # TODO: deprecated, remove this block in v1.3.0 - rank_zero_warn( - "Passing a ModelCheckpoint instance to Trainer(checkpoint_callbacks=...)" - " is deprecated since v1.1 and will no longer be supported in v1.3." - " Use `callbacks` argument instead.", DeprecationWarning - ) - self.trainer.callbacks.append(checkpoint_callback) - if self._trainer_has_checkpoint_callbacks() and checkpoint_callback is False: raise MisconfigurationException( "Trainer was configured with checkpoint_callback=False but found ModelCheckpoint" diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 3c564542f6..530001e0be 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -177,9 +177,6 @@ class Trainer( It will configure a default ModelCheckpoint callback if there is no user-defined ModelCheckpoint in :paramref:`~pytorch_lightning.trainer.trainer.Trainer.callbacks`. - .. warning:: Passing a ModelCheckpoint instance to this argument is deprecated since - v1.1 and will be unsupported from v1.3. Use `callbacks` argument instead. - check_val_every_n_epoch: Check val every n train epochs. default_root_dir: Default path for logs and weights when no logger/ckpt_callback passed. diff --git a/tests/checkpointing/test_model_checkpoint.py b/tests/checkpointing/test_model_checkpoint.py index 8ea6f8a600..a6ec2bc566 100644 --- a/tests/checkpointing/test_model_checkpoint.py +++ b/tests/checkpointing/test_model_checkpoint.py @@ -899,16 +899,6 @@ def test_configure_model_checkpoint(tmpdir): assert trainer.checkpoint_callback == callback1 assert trainer.checkpoint_callbacks == [callback1, callback2] - with pytest.warns(DeprecationWarning, match='will no longer be supported in v1.3'): - trainer = Trainer(checkpoint_callback=callback1, **kwargs) - assert [c for c in trainer.callbacks if isinstance(c, ModelCheckpoint)] == [callback1] - assert trainer.checkpoint_callback == callback1 - - with pytest.warns(DeprecationWarning, match="will no longer be supported in v1.3"): - trainer = Trainer(checkpoint_callback=callback1, callbacks=[callback2], **kwargs) - assert trainer.checkpoint_callback == callback2 - assert trainer.checkpoint_callbacks == [callback2, callback1] - with pytest.raises(MisconfigurationException, match="checkpoint_callback=False but found ModelCheckpoint"): Trainer(checkpoint_callback=False, callbacks=[callback1], **kwargs)