From aed9f7a798d6c5b58c212956e88f13ba74f77f39 Mon Sep 17 00:00:00 2001 From: vganapati Date: Fri, 23 Jun 2023 16:26:44 -0400 Subject: [PATCH] gradient clipping --- utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/utils.py b/utils.py index d1d1707..4cf48bf 100644 --- a/utils.py +++ b/utils.py @@ -329,6 +329,7 @@ def train(dataloader, pde_loss.backward() if use_dist: average_gradients(model) + torch.nn.utils.clip_grad_norm_(model.parameters(), 10) optimizer.step() total_examples_finished += len(data) print(f"{device}: loss: {pde_loss.item():>7f} [{total_examples_finished:>5d}/{size:>5d}]")