-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathutils.py
90 lines (69 loc) · 2.43 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import os
import shutil
import json
import torch
import numpy as np
from scipy.stats import norm
from matplotlib import pyplot as plt
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self, name="", fmt=":f"):
self.name = name
self.fmt = fmt
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def __str__(self):
fmtstr = "{name} {val" + self.fmt + "} ({avg" + self.fmt + "})"
return fmtstr.format(**self.__dict__)
class ProgressMeter(object):
def __init__(self, num_batches, meters, prefix=""):
self.batch_fmtstr = self._get_batch_fmtstr(num_batches)
self.meters = meters
self.prefix = prefix
def display(self, batch):
entries = [self.prefix + self.batch_fmtstr.format(batch)]
entries += [str(meter) for meter in self.meters]
print("\t".join(entries))
def _get_batch_fmtstr(self, num_batches):
num_digits = len(str(num_batches // 1))
fmt = "{:" + str(num_digits) + "d}"
return "[" + fmt + "/" + fmt.format(num_batches) + "]"
def save_config(opt, file_path):
with open(file_path, "w") as f:
json.dump(opt.__dict__, f, indent=2)
def load_config(opt, file_path):
with open(file_path, "r") as f:
opt.__dict__ = json.load(f)
def save_checkpoint(state, is_best, filename="checkpoint.pth.tar", prefix=""):
tries = 15
error = None
# deal with unstable I/O. Usually not necessary.
while tries:
try:
if is_best:
filename = 'checkpoint_dev_best.pth.tar'
torch.save(state, prefix + filename)
#shutil.copyfile(prefix + filename, prefix + "model_best.pth.tar")
except IOError as e:
error = e
tries -= 1
else:
break
print("model save {} failed, remaining {} trials".format(filename, tries))
if not tries:
raise error
def adjust_learning_rate(opt, optimizer, epoch):
"""Sets the learning rate to the initial LR
decayed by 10 every 30 epochs"""
lr = opt.learning_rate * (0.1 ** (epoch // opt.lr_update))
for param_group in optimizer.param_groups:
param_group["lr"] = lr