forked from youngryan1993/SFDA-SourceFreeDA
-
Notifications
You must be signed in to change notification settings - Fork 0
/
lib.py
106 lines (77 loc) · 3.29 KB
/
lib.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
from easydl import *
import torch.nn.functional as F
def reverse_sigmoid(y):
return torch.log(y / (1.0 - y + 1e-10) + 1e-10)
def get_source_share_weight(domain_out, before_softmax, domain_temperature=1.0, class_temperature=10.0):
before_softmax = before_softmax / class_temperature
after_softmax = nn.Softmax(-1)(before_softmax)
domain_logit = reverse_sigmoid(domain_out) # why reverse layer?: do sigmoid()-1
domain_logit = domain_logit / domain_temperature
domain_out = nn.Sigmoid()(domain_logit)
entropy = torch.sum(- after_softmax * torch.log(after_softmax + 1e-10), dim=1, keepdim=True)
entropy_norm = entropy / np.log(after_softmax.size(1))
weight = entropy_norm - domain_out
weight = weight.detach()
return weight
def get_source_share_weight_onlyentropy( before_softmax, class_temperature=10.0):
before_softmax = before_softmax / class_temperature
after_softmax = nn.Softmax(-1)(before_softmax)
# print (after_softmax)
entropy = torch.sum(- after_softmax * torch.log(after_softmax + 1e-5), dim=1, keepdim=True)
entropy_norm = entropy / (np.log(after_softmax.size(1)) )
# print (entropy_norm)
weight = entropy_norm
weight = weight.detach()
return weight
def hellinger_distance(p, q):
return torch.norm((torch.sqrt(p) - torch.sqrt(q)), p=2, dim=1) / np.sqrt(2)
def get_commonness_weight(ps_s, pt_s, ps_t, pt_t, class_temperature=10.0):
ps_s = F.softmax(ps_s / class_temperature)
pt_s = F.softmax(pt_s / class_temperature)
ps_t = F.softmax(ps_t)
pt_t = F.softmax(pt_t)
ws = hellinger_distance(ps_s, pt_s).detach()
wt = hellinger_distance(ps_t, pt_t).detach()
return ws, wt
def get_entropy(domain_out, before_softmax, domain_temperature=1.0, class_temperature=10.0):
before_softmax = before_softmax / class_temperature
after_softmax = nn.Softmax(-1)(before_softmax)
domain_logit = reverse_sigmoid(domain_out) # why reverse layer?: do sigmoid()-1
domain_logit = domain_logit / domain_temperature
domain_out = nn.Sigmoid()(domain_logit)
entropy = torch.sum(- after_softmax * torch.log(after_softmax + 1e-10), dim=1, keepdim=True)
entropy_norm = entropy / np.log(after_softmax.size(1))
weight = entropy_norm
weight = weight.detach()
return weight
def get_target_share_weight(domain_out, before_softmax, domain_temperature=1.0, class_temperature=10.0):
return - get_source_share_weight(domain_out, before_softmax, domain_temperature, class_temperature)
def normalize_weight(x):
min_val = x.min()
max_val = x.max()
x = (x - min_val) / (max_val - min_val +1e-5)
x = x / (torch.mean(x)+1e-5) # why do this?
return x.detach()
def normalize_weight01(x):
min_val = x.min()
max_val = x.max()
x = (x - min_val) / (max_val - min_val)
return x.detach()
def normalize_weight_11(x):
min_val = x.min()
max_val = x.max()
x = (x - min_val) / (max_val - min_val)
x = x*2 - 1
return x.detach()
def seed_everything(seed=1234):
import random
random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
np.random.seed(seed)
import os
os.environ['PYTHONHASHSEED'] = str(seed)
def tensor_l2normalization(q):
qn = torch.norm(q, p=2, dim=1).detach().unsqueeze(1)
q = q.div(qn.expand_as(q))
return q