From 9eda520bee3552591b732eadcae2a36de82225de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Tue, 6 Jul 2021 10:13:09 +0200 Subject: [PATCH] clean up unused attributes in LightningModule (#8259) --- pytorch_lightning/core/lightning.py | 11 ++++------- .../trainer/connectors/model_connector.py | 2 -- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 6697181bb9..66b2bbc39d 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -91,8 +91,6 @@ class LightningModule( # torch/nn/modules/module.py#L227) torch._C._log_api_usage_once(f"lightning.module.{self.__class__.__name__}") - self._loaded_optimizer_states_dict = {} - # pointer to the trainer object self.trainer = None @@ -109,13 +107,15 @@ class LightningModule( self._example_input_array = None self._datamodule = None self._current_fx_name: Optional[str] = None - self._running_manual_backward: bool = False self._current_dataloader_idx: Optional[int] = None self._automatic_optimization: bool = True self._truncated_bptt_steps: int = 0 self._param_requires_grad_state = dict() self._metric_attributes: Optional[Dict[int, str]] = None + # deprecated, will be removed in 1.6 + self._loaded_optimizer_states_dict = {} + def optimizers(self, use_pl_optimizer: bool = True) -> Union[Optimizer, List[Optimizer], List[LightningOptimizer]]: """ Returns the optimizer(s) that are being used during training. Useful for manual optimization. @@ -1450,9 +1450,7 @@ class LightningModule( self._verify_is_manual_optimization('manual_backward') # backward - self._running_manual_backward = True self.trainer.fit_loop.epoch_loop.batch_loop.backward(loss, optimizer=None, opt_idx=None, *args, **kwargs) - self._running_manual_backward = False def backward(self, loss: Tensor, optimizer: Optimizer, optimizer_idx: int, *args, **kwargs) -> None: """ @@ -1470,8 +1468,7 @@ class LightningModule( def backward(self, loss, optimizer, optimizer_idx): loss.backward() """ - if self.automatic_optimization or self._running_manual_backward: - loss.backward(*args, **kwargs) + loss.backward(*args, **kwargs) def toggle_optimizer(self, optimizer: Optimizer, optimizer_idx: int): """ diff --git a/pytorch_lightning/trainer/connectors/model_connector.py b/pytorch_lightning/trainer/connectors/model_connector.py index d4bdedd31e..760dc57cfe 100644 --- a/pytorch_lightning/trainer/connectors/model_connector.py +++ b/pytorch_lightning/trainer/connectors/model_connector.py @@ -24,7 +24,5 @@ class ModelConnector: for m in [model, ref_model]: m.trainer = proxy(self.trainer) - m._device_type = str(self.trainer._device_type) - m._distrib_type = str(self.trainer._distrib_type) m.use_amp = self.trainer.amp_backend is not None m.precision = self.trainer.precision