Remove GradInformation module, including from LightningModule hierarchy (#8831)
* Remove GradInformation module from LightningModule hierarchy
This commit is contained in:
parent
6de66eb110
commit
f87b2ef21f
|
@ -95,6 +95,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
|
|||
|
||||
- `Trainer.request_dataloader` now takes a `RunningStage` enum instance ([#8858](https://github.com/PyTorchLightning/pytorch-lightning/pull/8858))
|
||||
|
||||
|
||||
### Deprecated
|
||||
|
||||
- Deprecated `LightningModule.summarize()` in favor of `pytorch_lightning.utilities.model_summary.summarize()`
|
||||
|
@ -143,12 +144,14 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
|
|||
- Removed the deprecated `Trainer.truncated_bptt_steps` in favor of `LightningModule.truncated_bptt_steps` ([#8826](https://github.com/PyTorchLightning/pytorch-lightning/pull/8826))
|
||||
|
||||
|
||||
- Removed `LightningModule.write_predictions` and `LightningModule.write_predictions_dict` ([#](https://github.com/PyTorchLightning/pytorch-lightning/pull/8850))
|
||||
- Removed `LightningModule.write_predictions` and `LightningModule.write_predictions_dict` ([#8850](https://github.com/PyTorchLightning/pytorch-lightning/pull/8850))
|
||||
|
||||
|
||||
- Removed reset dataloader hooks to Training Plugins and Accelerators ([#8858](https://github.com/PyTorchLightning/pytorch-lightning/pull/8858))
|
||||
|
||||
|
||||
- Removed deprecated `GradInformation` module in favor of `pytorch_lightning.utilities.grads` ([#8831](https://github.com/PyTorchLightning/pytorch-lightning/pull/8831/))
|
||||
|
||||
|
||||
### Fixed
|
||||
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
# Copyright The PyTorch Lightning team.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""
|
||||
Module to describe gradients. This class is deprecated in v1.3 and will be removed in v1.5
|
||||
"""
|
||||
from typing import Dict, Union
|
||||
|
||||
from torch.nn import Module
|
||||
|
||||
from pytorch_lightning.utilities import rank_zero_deprecation
|
||||
from pytorch_lightning.utilities.grads import grad_norm as new_grad_norm
|
||||
|
||||
|
||||
class GradInformation(Module):
|
||||
def grad_norm(self, norm_type: Union[float, int, str]) -> Dict[str, float]:
|
||||
"""Compute each parameter's gradient's norm and their overall norm.
|
||||
|
||||
.. deprecated:: v1.3
|
||||
Will be removed in v1.5.0. Use :func:`pytorch_lightning.utilities.grads.grad_norm` instead.
|
||||
"""
|
||||
rank_zero_deprecation(
|
||||
"LightningModule.grad_norm is deprecated in v1.3 and will be removed in v1.5."
|
||||
" Use grad_norm from pytorch_lightning.utilities.grads instead."
|
||||
)
|
||||
return new_grad_norm(self, norm_type)
|
|
@ -31,7 +31,6 @@ from torch.nn import Module
|
|||
from torch.optim.optimizer import Optimizer
|
||||
from torchmetrics import Metric
|
||||
|
||||
from pytorch_lightning.core.grads import GradInformation
|
||||
from pytorch_lightning.core.hooks import CheckpointHooks, DataHooks, ModelHooks
|
||||
from pytorch_lightning.core.mixins import DeviceDtypeModuleMixin, HyperparametersMixin
|
||||
from pytorch_lightning.core.optimizer import LightningOptimizer
|
||||
|
@ -57,7 +56,6 @@ class LightningModule(
|
|||
ABC,
|
||||
DeviceDtypeModuleMixin,
|
||||
HyperparametersMixin,
|
||||
GradInformation,
|
||||
ModelIO,
|
||||
ModelHooks,
|
||||
DataHooks,
|
||||
|
|
|
@ -161,12 +161,6 @@ def test_v1_5_0_auto_move_data():
|
|||
pass
|
||||
|
||||
|
||||
def test_v1_5_0_lighting_module_grad_norm(tmpdir):
|
||||
model = BoringModel()
|
||||
with pytest.deprecated_call(match="is deprecated in v1.3 and will be removed in v1.5"):
|
||||
model.grad_norm(2)
|
||||
|
||||
|
||||
def test_v1_5_0_datamodule_setter():
|
||||
model = BoringModel()
|
||||
datamodule = BoringDataModule()
|
||||
|
|
Loading…
Reference in New Issue