From dc41769f02a4fd03ae909c53e4b66028fb15be4f Mon Sep 17 00:00:00 2001 From: William Falcon Date: Mon, 5 Aug 2019 13:59:46 -0400 Subject: [PATCH] fixed clip grad warning --- pytorch_lightning/models/trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/models/trainer.py b/pytorch_lightning/models/trainer.py index e60a0d95cc..ddaca18c2d 100644 --- a/pytorch_lightning/models/trainer.py +++ b/pytorch_lightning/models/trainer.py @@ -814,7 +814,7 @@ class Trainer(TrainerIO): # clip gradients if self.gradient_clip > 0: model = self.__get_model() - torch.nn.utils.clip_grad_norm(model.parameters(), self.gradient_clip) + torch.nn.utils.clip_grad_norm_(model.parameters(), self.gradient_clip) # update gradients across all optimizers for optimizer in self.optimizers: