Avoid calling average_parameters multiple times per optimizer step (#12452)

This commit is contained in:
Yi Wang 2022-04-01 22:34:25 -07:00 committed by lexierule
parent 7122a597ed
commit 8eebd0154a
2 changed files with 3 additions and 6 deletions

View File

@ -73,7 +73,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
### Fixed
-
- Avoid calling `average_parameters` multiple times per optimizer step ([#12452](https://github.com/PyTorchLightning/pytorch-lightning/pull/12452))
-

View File

@ -280,11 +280,8 @@ class DDPStrategy(ParallelStrategy):
if not _TORCH_GREATER_EQUAL_1_10 or self._model_averager is None:
return optimizer_output
for group in optimizer.param_groups:
for param in group["params"]:
if param.grad is None:
continue
self._model_averager.average_parameters(iter(param))
params = [param for group in optimizer.param_groups for param in group["params"] if param.grad is not None]
self._model_averager.average_parameters(iter(params))
return optimizer_output