Add back support for logging in the gradient clipping hooks (#14298)

* Add back support for logging in the gradient clipping hooks

* Docs and CHANGELOG

* Fix tests
This commit is contained in:
Carlos Mocholí 2022-08-22 15:19:53 +02:00 committed by GitHub
parent 9cf9bc5820
commit 7a617ec90e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 14 additions and 11 deletions

View File

@ -355,7 +355,7 @@ In LightningModule
* - Method
- on_step
- on_epoch
* - on_after_backward, on_before_backward, on_before_optimizer_step, on_before_zero_grad, training_step, training_step_end
* - on_after_backward, on_before_backward, on_before_optimizer_step, optimizer_step, configure_gradient_clipping, on_before_zero_grad, training_step, training_step_end
- True
- False
* - training_epoch_end, test_epoch_end, test_step, test_step_end, validation_epoch_end, validation_step, validation_step_end

View File

@ -76,6 +76,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Fixed an `AttributeError` when accessing `LightningModule.logger` and the Trainer has multiple loggers ([#14234](https://github.com/Lightning-AI/lightning/pull/14234))
- Added back support for `log`ging in the `configure_gradient_clipping` hook after unintended removal in v1.7.2 ([#14298](https://github.com/Lightning-AI/lightning/issues/14298))
- Fixed wrong num padding for `RichProgressBar` ([#14296](https://github.com/Lightning-AI/lightning/pull/14296))

View File

@ -44,8 +44,13 @@ class _FxValidator:
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"lr_scheduler_step": None,
"configure_gradient_clipping": None,
"clip_gradients": None,
# should match `optimizer_step`
"configure_gradient_clipping": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"clip_gradients": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"on_before_zero_grad": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),

View File

@ -183,12 +183,7 @@ class HookedModel(BoringModel):
def __init__(self, not_supported):
super().__init__()
pl_module_hooks = get_members(LightningModule)
pl_module_hooks.difference_update(
{
"log",
"log_dict",
}
)
pl_module_hooks.difference_update({"log", "log_dict"})
# remove `nn.Module` hooks
module_hooks = get_members(torch.nn.Module)
pl_module_hooks.difference_update(module_hooks)
@ -236,8 +231,6 @@ def test_fx_validator_integration(tmpdir):
"on_validation_model_eval": "You can't",
"on_validation_model_train": "You can't",
"lr_scheduler_step": "You can't",
"configure_gradient_clipping": "You can't",
"clip_gradients": "You can't",
"on_save_checkpoint": "You can't",
"on_load_checkpoint": "You can't",
"on_exception": "You can't",

View File

@ -54,6 +54,8 @@ def test_default_level_for_hooks_that_support_logging():
"on_after_backward",
"on_before_optimizer_step",
"optimizer_step",
"configure_gradient_clipping",
"clip_gradients",
"on_before_zero_grad",
"optimizer_zero_grad",
"training_step",