Skip to content

Commit

Permalink
Merge pull request #268 from kozistr/update/coverage
Browse files Browse the repository at this point in the history
[Update] Coverage
  • Loading branch information
kozistr authored Aug 13, 2024
2 parents f4648b0 + 3f38e3f commit cbaf93a
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 3 deletions.
2 changes: 1 addition & 1 deletion pytorch_optimizer/optimizer/prodigy.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
if 'd_numerator' not in group:
group['d_numerator'] = torch.tensor([0.0], device=device)
elif group['d_numerator'].device != device:
group['d_numerator'] = group['d_numerator'].to(device)
group['d_numerator'] = group['d_numerator'].to(device) # pragma: no cover

d_numerator = group['d_numerator']
d_numerator.mul_(beta3)
Expand Down
3 changes: 1 addition & 2 deletions tests/test_optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,7 @@ def _closure() -> float:
for _ in range(iterations):
optimizer.zero_grad()

y_pred = model(x_data)
loss = loss_fn(y_pred, y_data)
loss = loss_fn(model(x_data), y_data)

if init_loss == np.inf:
init_loss = loss
Expand Down

0 comments on commit cbaf93a

Please sign in to comment.