-
Notifications
You must be signed in to change notification settings - Fork 16
/
losses.py
54 lines (42 loc) · 1.57 KB
/
losses.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import torch
import torch.nn as nn
from torch.autograd import Variable
epsilon = 1e-8
def compute_epe(gt, pred):
_, _, h_pred, w_pred = pred.size()
bs, nc, h_gt, w_gt = gt.size()
u_gt, v_gt = gt[:,0,:,:], gt[:,1,:,:]
pred = nn.functional.upsample(pred, size=(h_gt, w_gt), mode='bilinear')
u_pred = pred[:,0,:,:] * (w_gt/w_pred)
v_pred = pred[:,1,:,:] * (h_gt/h_pred)
epe = torch.sqrt(torch.pow((u_gt - u_pred), 2) + torch.pow((v_gt - v_pred), 2))
if nc == 3:
valid = gt[:,2,:,:]
epe = epe * valid
avg_epe = epe.sum()/(valid.sum() + epsilon)
else:
avg_epe = epe.sum()/(bs*h_gt*w_gt)
if type(avg_epe) == Variable: avg_epe = avg_epe.data
return avg_epe.item()
def compute_cossim(gt, pred):
_, _, h_pred, w_pred = pred.size()
bs, nc, h_gt, w_gt = gt.size()
#u_gt, v_gt = gt[:,0,:,:], gt[:,1,:,:]
pred = nn.functional.upsample(pred, size=(h_gt, w_gt), mode='bilinear')
#u_pred = pred[:,0,:,:] * (w_gt/w_pred)
#v_pred = pred[:,1,:,:] * (h_gt/h_pred)
similarity = nn.functional.cosine_similarity(gt[:,:2], pred)
if nc == 3:
valid = gt[:,2,:,:]
similarity = similarity * valid
avg_sim = similarity.sum()/(valid.sum() + epsilon)
else:
avg_sim = similarity.sum()/(bs*h_gt*w_gt)
if type(avg_sim) == Variable: avg_sim = avg_sim.data
return avg_sim.item()
def multiscale_cossim(gt, pred):
assert(len(gt)==len(pred))
loss = 0
for (_gt, _pred) in zip(gt, pred):
loss += - nn.functional.cosine_similarity(_gt, _pred).mean()
return loss