-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathutility.py
96 lines (70 loc) · 2.73 KB
/
utility.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
"""
Reference from Project Page : https://github.com/zzangjinsun/NLSPN_ECCV20
======================================================================
Some of useful functions are defined here.
"""
import os
import shutil
import torch.optim as optim
import torch.optim.lr_scheduler as lrs
class LRFactor:
def __init__(self, decay, gamma):
assert len(decay) == len(gamma)
self.decay = decay
self.gamma = gamma
def get_factor(self, epoch):
for (d, g) in zip(self.decay, self.gamma):
if epoch < d:
return g
return self.gamma[-1]
def convert_str_to_num(val, t):
val = val.replace('\'', '')
val = val.replace('\"', '')
if t == 'int':
val = [int(v) for v in val.split(',')]
elif t == 'float':
val = [float(v) for v in val.split(',')]
else:
raise NotImplementedError
return val
def make_optimizer_scheduler(args, target):
# optimizer
if hasattr(target, 'param_groups'):
# NOTE : lr for each group must be set by the network
trainable = target.param_groups
else:
trainable = filter(lambda x: x.requires_grad, target.parameters())
kwargs_optimizer = {'lr': args.lr, 'weight_decay': args.weight_decay}
if args.optimizer == 'SGD':
optimizer_class = optim.SGD
kwargs_optimizer['momentum'] = args.momentum
elif args.optimizer == 'ADAM':
optimizer_class = optim.Adam
kwargs_optimizer['betas'] = args.betas
kwargs_optimizer['eps'] = args.epsilon
elif args.optimizer == 'ADAMW':
optimizer_class = optim.AdamW
kwargs_optimizer['betas'] = args.betas
kwargs_optimizer['eps'] = args.epsilon
elif args.optimizer == 'RMSprop':
optimizer_class = optim.RMSProp
kwargs_optimizer['eps'] = args.epsilon
else:
raise NotImplementedError
optimizer = optimizer_class(trainable, **kwargs_optimizer)
# scheduler
decay = convert_str_to_num(args.decay, 'int')
gamma = convert_str_to_num(args.gamma, 'float')
assert len(decay) == len(gamma), 'decay and gamma must have same length'
calculator = LRFactor(decay, gamma)
scheduler = lrs.LambdaLR(optimizer, calculator.get_factor)
# scheduler = lrs.CosineAnnealingLR(optimizer, T_max=args.epochs, eta_min=gamma[-1]*args.lr)
return optimizer, scheduler
def backup_source_code(backup_directory):
ignore_hidden = shutil.ignore_patterns(
".", "..", ".git*", "*pycache*", "*build", "*.fuse*", "*_drive_*",
"*pretrained*")
if os.path.exists(backup_directory):
shutil.rmtree(backup_directory)
shutil.copytree('.', backup_directory, ignore=ignore_hidden)
os.system("chmod -R g+w {}".format(backup_directory))