Remove `add_to_queue` and `remove_from_queue` from LightningModule (#13600)

Co-authored-by: Carlos Mocholí <carlossmocholi@gmail.com>
This commit is contained in:
Nikhil Shenoy 2022-07-12 20:17:51 +05:30 committed by GitHub
parent 94688665ca
commit e034cd31d3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 6 additions and 112 deletions

View File

@ -1626,15 +1626,3 @@ on_after_batch_transfer
.. automethod:: pytorch_lightning.core.module.LightningModule.on_after_batch_transfer
:noindex:
add_to_queue
~~~~~~~~~~~~
.. automethod:: pytorch_lightning.core.module.LightningModule.add_to_queue
:noindex:
get_from_queue
~~~~~~~~~~~~~~
.. automethod:: pytorch_lightning.core.module.LightningModule.get_from_queue
:noindex:

View File

@ -174,6 +174,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
### Removed
- Removed the deprecated `LightningModule.add_to_queue` and `LightningModule.get_from_queue` method ([#13600](https://github.com/PyTorchLightning/pytorch-lightning/pull/13600))
- Removed deprecated `pytorch_lightning.core.decorators.parameter_validation` from `decorators` ([#13514](https://github.com/Lightning-AI/lightning/pull/13514))

View File

@ -1955,28 +1955,6 @@ class LightningModule(
)
self._use_amp = use_amp
def add_to_queue(self, queue: pl.strategies.launchers.spawn._FakeQueue) -> None:
"""Appends the :attr:`trainer.callback_metrics` dictionary to the given queue. To avoid issues with memory
sharing, we cast the data to numpy.
Args:
queue: the instance of the queue to append the data.
.. deprecated:: v1.5
This method was deprecated in v1.5 and will be removed in v1.7.
"""
def get_from_queue(self, queue: pl.strategies.launchers.spawn._FakeQueue) -> None:
"""Retrieve the :attr:`trainer.callback_metrics` dictionary from the given queue. To preserve consistency,
we cast back the data to ``torch.Tensor``.
Args:
queue: the instance of the queue from where to get the data.
.. deprecated:: v1.5
This method was deprecated in v1.5 and will be removed in v1.7.
"""
@contextmanager
def _prevent_trainer_and_dataloaders_deepcopy(self) -> None:
self._should_prevent_trainer_and_dataloaders_deepcopy = True

View File

@ -26,7 +26,6 @@ from pytorch_lightning.strategies.launchers.base import _Launcher
from pytorch_lightning.strategies.strategy import Strategy
from pytorch_lightning.trainer.states import TrainerFn, TrainerState
from pytorch_lightning.utilities.apply_func import apply_to_collection, move_data_to_device
from pytorch_lightning.utilities.model_helpers import is_overridden
from pytorch_lightning.utilities.rank_zero import rank_zero_debug
from pytorch_lightning.utilities.types import _PATH
@ -122,10 +121,6 @@ class _SpawnLauncher(_Launcher):
trainer.state = spawn_output.trainer_state
# get the `callback_metrics` and set it to the trainer
if is_overridden("get_from_queue", trainer.lightning_module):
# only in case the user does not override it.
# TODO: Remove the if in v1.7
trainer.lightning_module.get_from_queue(spawn_output.extra)
self.get_from_queue(trainer, spawn_output.extra)
def _collect_rank_zero_results(self, trainer: "pl.Trainer", results: Any) -> Optional["_SpawnOutput"]:
@ -151,9 +146,6 @@ class _SpawnLauncher(_Launcher):
# adds the `callback_metrics` to the queue
extra = _FakeQueue()
if is_overridden("add_to_queue", trainer.lightning_module):
# TODO: Remove the if in v1.7
trainer.lightning_module.add_to_queue(extra)
self.add_to_queue(trainer, extra)
return _SpawnOutput(best_model_path, weights_path, trainer.state, results, extra)

View File

@ -23,7 +23,6 @@ from pytorch_lightning.strategies.launchers.spawn import _FakeQueue, _SpawnLaunc
from pytorch_lightning.trainer.states import TrainerFn
from pytorch_lightning.utilities import _TPU_AVAILABLE
from pytorch_lightning.utilities.apply_func import move_data_to_device
from pytorch_lightning.utilities.model_helpers import is_overridden
from pytorch_lightning.utilities.rank_zero import rank_zero_debug
if _TPU_AVAILABLE:
@ -136,9 +135,6 @@ class _XLASpawnLauncher(_SpawnLauncher):
# adds the `callback_metrics` to the queue
extra = _FakeQueue()
if is_overridden("add_to_queue", trainer.lightning_module):
# TODO: Remove the if in v1.7
trainer.lightning_module.add_to_queue(extra)
self.add_to_queue(trainer, extra)
return _SpawnOutput(best_model_path, weights_path, trainer.state, results, extra)

View File

@ -46,7 +46,6 @@ def verify_loop_configurations(trainer: "pl.Trainer") -> None:
__verify_eval_loop_configuration(trainer, model, "predict")
__verify_dp_batch_transfer_support(trainer, model)
_check_add_get_queue(model)
# TODO: Delete _check_on_post_move_to_device in v1.7
_check_on_post_move_to_device(model)
_check_deprecated_callback_hooks(trainer)
@ -218,23 +217,6 @@ def __check_training_step_requires_dataloader_iter(model: "pl.LightningModule")
)
def _check_add_get_queue(model: "pl.LightningModule") -> None:
r"""
Checks if add_to_queue or get_from_queue is overridden and sends a deprecation warning.
Args:
model: The lightning module
"""
if is_overridden("add_to_queue", model):
rank_zero_deprecation(
"The `LightningModule.add_to_queue` method was deprecated in v1.5 and will be removed in v1.7."
)
if is_overridden("get_from_queue", model):
rank_zero_deprecation(
"The `LightningModule.get_from_queue` method was deprecated in v1.5 and will be removed in v1.7."
)
# TODO: Delete _check_on_hpc_hooks in v1.8
def _check_on_hpc_hooks(model: "pl.LightningModule") -> None:
if is_overridden("on_hpc_save", model):

View File

@ -34,25 +34,6 @@ from pytorch_lightning.strategies import SingleDeviceStrategy
from tests_pytorch.plugins.environments.test_lsf_environment import _make_rankfile
class BoringCallbackDDPSpawnModel(BoringModel):
def add_to_queue(self, queue):
...
def get_from_queue(self, queue):
...
def test_v1_7_0_deprecate_add_get_queue(tmpdir):
model = BoringCallbackDDPSpawnModel()
trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True)
with pytest.deprecated_call(match=r"`LightningModule.add_to_queue` method was deprecated in v1.5"):
trainer.fit(model)
with pytest.deprecated_call(match=r"`LightningModule.get_from_queue` method was deprecated in v1.5"):
trainer.fit(model)
def test_v1_7_0_deprecate_lightning_distributed(tmpdir):
with pytest.deprecated_call(match="LightningDistributed is deprecated in v1.5 and will be removed in v1.7."):
from pytorch_lightning.distributed.dist import LightningDistributed

View File

@ -44,14 +44,6 @@ class BoringCallbackDDPSpawnModel(BoringModel):
self.log(self.name, self.val)
return super().validation_step(batch, batch_idx)
def add_to_queue(self, queue) -> None:
queue.put("test_val")
return super().add_to_queue(queue)
def get_from_queue(self, queue) -> None:
self.test_val = queue.get()
return super().get_from_queue(queue)
@RunIf(skip_windows=True)
def test_ddp_cpu():
@ -67,31 +59,13 @@ def test_ddp_cpu():
trainer.fit(model)
@RunIf(min_cuda_gpus=2)
def test_ddp_spawn_extra_parameters(tmpdir):
"""Tests if device is set correctly when training for DDPSpawnStrategy and tests add_to_queue/get_from_queue
with Lightning Module (deprecated way)."""
trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, accelerator="gpu", devices=2, strategy="ddp_spawn")
assert isinstance(trainer.strategy, DDPSpawnStrategy)
assert trainer.strategy.root_device == torch.device("cuda:0")
val: float = 1.0
val_name: str = "val_acc"
model = BoringCallbackDDPSpawnModel(val_name, val)
dm = BoringDataModule()
trainer.fit(model, datamodule=dm)
assert trainer.callback_metrics[val_name] == torch.tensor(val)
assert model.test_val == "test_val"
class CustomSpawnLauncher(_SpawnLauncher):
def add_to_queue(self, trainer, queue) -> None:
queue.put("new_test_val")
queue.put("test_val")
return super().add_to_queue(trainer, queue)
def get_from_queue(self, trainer: Trainer, queue) -> None:
trainer.strategy.new_test_val = queue.get()
trainer.strategy.test_val = queue.get()
return super().get_from_queue(trainer, queue)
@ -115,7 +89,7 @@ def test_ddp_spawn_add_get_queue(tmpdir):
dm = BoringDataModule()
trainer.fit(model, datamodule=dm)
assert trainer.callback_metrics[val_name] == torch.tensor(val)
assert ddp_spawn_strategy.new_test_val == "new_test_val"
assert ddp_spawn_strategy.test_val == "test_val"
class BoringModelDDP(BoringModel):