fixed clip grad warning

This commit is contained in:
William Falcon 2019-08-05 13:59:46 -04:00
parent e0a09f0fc4
commit 9af4267056
1 changed files with 1 additions and 1 deletions

View File

@ -814,7 +814,7 @@ class Trainer(TrainerIO):
# clip gradients
if self.gradient_clip > 0:
model = self.__get_model()
torch.nn.utils.clip_grad_norm(model.parameters(), self.gradient_clip)
torch.nn.utils.clip_grad_norm_(model.parameters(), self.gradient_clip)
# update gradients across all optimizers
for optimizer in self.optimizers: