We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent f0975c2 commit dba12e1Copy full SHA for dba12e1
opacus/optimizers/optimizer.py
@@ -395,7 +395,7 @@ def clip_and_accumulate(self):
395
"""
396
397
per_param_norms = [
398
- g.view(len(g), -1).norm(2, dim=-1) for g in self.grad_samples
+ g.reshape(len(g), -1).norm(2, dim=-1) for g in self.grad_samples
399
]
400
per_sample_norms = torch.stack(per_param_norms, dim=1).norm(2, dim=1)
401
per_sample_clip_factor = (self.max_grad_norm / (per_sample_norms + 1e-6)).clamp(
0 commit comments