From 917dc1e1ee9fb911dfcc4e041d9d19d16c203255 Mon Sep 17 00:00:00 2001 From: Ir1d Date: Tue, 3 Dec 2019 22:17:42 +0800 Subject: [PATCH] filter param with no grad --- pytorch_lightning/trainer/training_tricks_mixin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/training_tricks_mixin.py b/pytorch_lightning/trainer/training_tricks_mixin.py index 836b66897bace..1cdef291e8cb6 100644 --- a/pytorch_lightning/trainer/training_tricks_mixin.py +++ b/pytorch_lightning/trainer/training_tricks_mixin.py @@ -13,7 +13,7 @@ def clip_gradients(self): def print_nan_gradients(self): model = self.get_model() for param in model.parameters(): - if torch.isnan(param.grad.float()).any(): + if (param.grad is not None) and torch.isnan(param.grad.float()).any(): logging.info(param, param.grad) def configure_accumulated_gradients(self, accumulate_grad_batches):