-
Notifications
You must be signed in to change notification settings - Fork 30
/
utils.py
73 lines (58 loc) · 2.11 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import torch
import logging
def setup_logging(log_file='log.txt'):
"""Setup logging configuration
"""
logging.basicConfig(level=logging.DEBUG,
format="%(asctime)s - %(levelname)s - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
filename=log_file,
filemode='w')
console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('%(message)s')
console.setFormatter(formatter)
logging.getLogger('').addHandler(console)
__optimizers = {
'SGD': torch.optim.SGD,
'ASGD': torch.optim.ASGD,
'Adam': torch.optim.Adam,
'Adamax': torch.optim.Adamax,
'Adagrad': torch.optim.Adagrad,
'Adadelta': torch.optim.Adadelta,
'Rprop': torch.optim.Rprop,
'RMSprop': torch.optim.RMSprop
}
def select_optimizer(optimizer_name, params, *kargs, **kwargs):
return __optimizers[optimizer_name](params, kargs, kwargs)
def adjust_optimizer(optimizer, epoch, config):
"""Reconfigures the optimizer according to epoch and config dict"""
def modify_optimizer(optimizer, setting):
for param_group in optimizer.param_groups:
for key in param_group.keys():
if key in setting:
logging.debug('OPTIMIZER - setting %s = %s' %
(key, setting[key]))
param_group[key] = setting[key]
return optimizer
if callable(config):
optimizer = modify_optimizer(optimizer, config(epoch))
else:
for e in range(epoch): # run over all epochs - sticky setting
if e in config:
optimizer = modify_optimizer(optimizer, config[e])
return optimizer
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count