-
Notifications
You must be signed in to change notification settings - Fork 11
/
utils.py
119 lines (84 loc) · 2.9 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import csv
import random
from functools import partialmethod
import torch
import numpy as np
from sklearn.metrics import precision_recall_fscore_support
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
class Logger(object):
def __init__(self, path, header):
self.log_file = path.open('w')
self.logger = csv.writer(self.log_file, delimiter='\t')
self.logger.writerow(header)
self.header = header
def __del(self):
self.log_file.close()
def log(self, values):
write_values = []
for col in self.header:
assert col in values
write_values.append(values[col])
self.logger.writerow(write_values)
self.log_file.flush()
def calculate_accuracy(outputs, targets):
with torch.no_grad():
batch_size = targets.size(0)
_, pred = outputs.topk(1, 1, largest=True, sorted=True)
pred = pred.t()
correct = pred.eq(targets.view(1, -1))
n_correct_elems = correct.float().sum().item()
return n_correct_elems / batch_size
def calculate_precision_and_recall(outputs, targets, pos_label=1):
with torch.no_grad():
_, pred = outputs.topk(1, 1, largest=True, sorted=True)
precision, recall, _, _ = precision_recall_fscore_support(
targets.view(-1, 1).cpu().numpy(),
pred.cpu().numpy())
return precision[pos_label], recall[pos_label]
def worker_init_fn(worker_id):
torch_seed = torch.initial_seed()
random.seed(torch_seed + worker_id)
if torch_seed >= 2**32:
torch_seed = torch_seed % 2**32
np.random.seed(torch_seed + worker_id)
def get_lr(optimizer):
lrs = []
for param_group in optimizer.param_groups:
lr = float(param_group['lr'])
lrs.append(lr)
return max(lrs)
def partialclass(cls, *args, **kwargs):
class PartialClass(cls):
__init__ = partialmethod(cls.__init__, *args, **kwargs)
return PartialClass
def write_to_batch_logger(batch_logger, epoch, i, data_loader, losses, accuracies, current_lr):
if batch_logger is not None:
batch_logger.log({
'epoch': epoch,
'batch': i + 1,
'iter': (epoch - 1) * len(data_loader) + (i + 1),
'loss': losses,
'acc': accuracies,
'lr': current_lr
})
def write_to_epoch_logger(epoch_logger, epoch, losses, accuracies, current_lr):
if epoch_logger is not None:
epoch_logger.log({
'epoch': epoch,
'loss': losses,
'acc': accuracies,
'lr': current_lr
})