-
Notifications
You must be signed in to change notification settings - Fork 24
/
Copy pathhparams.py
101 lines (84 loc) · 2.15 KB
/
hparams.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
from glob import glob
import os
def get_image_list(data_root, split):
filelist = []
with open('filelists/{}.txt'.format(split)) as f:
for line in f:
line = line.strip()
if ' ' in line: line = line.split()[0]
filelist.append(os.path.join(data_root, line))
return filelist
class HParams:
def __init__(self, **kwargs):
self.data = {}
for key, value in kwargs.items():
self.data[key] = value
def __getattr__(self, key):
if key not in self.data:
raise AttributeError("'HParams' object has no attribute %s" % key)
return self.data[key]
def set_hparam(self, key, value):
self.data[key] = value
hparams = HParams(
num_mels=80,
rescale=True,
rescaling_max=0.9,
use_lws=False,
n_fft=800,
hop_size=200,
win_size=800,
sample_rate=16000,
frame_shift_ms=None,
power = 1.5,
griffin_lim_iters = 60,
signal_normalization=True,
allow_clipping_in_normalization=True,
symmetric_mels=True,
max_abs_value=4.,
preemphasize=True,
preemphasis=0.97,
# Limits
min_level_db=-100,
ref_level_db=20,
fmin=55,
fmax=7600,
# Training hyperparameters
img_size=128,
# img_size=512,
# img_size=256,
# img_size=96,
fps=25,
# batch_size = 2,
# batch_size = 3,
batch_size = 24,
initial_learning_rate=1e-4,
nepochs = 2000000000000000000,
disc_initial_learning_rate=5e-4,
eval_interval=3000,
checkpoint_interval=3000,
l1_wt = 10.,
mem_wt=0.2,
vv_wt = 0.2,
av_wt=0.2,
disc_wt=0.2,
# num_workers=16,
num_workers=1,
m_slot = 96,
min = 0,
max = 0.7,
syncnet_wt=0.03, # is initially zero, will be set automatically to 0.03 later. Leads to faster convergence.
# for pretraining SyncNet
# syncnet_batch_size=256,
syncnet_batch_size=64,
save_optimizer_state=True,
syncnet_lr=1e-4,
# syncnet_lr=1e-3,
syncnet_eval_interval=10000,
syncnet_checkpoint_interval=10000,
# syncnet_eval_interval=5000,
# syncnet_checkpoint_interval=5000,
)
def hparams_debug_string():
values = hparams.values()
hp = [" %s: %s" % (name, values[name]) for name in sorted(values) if name != "sentences"]
return "Hyperparameters:\n" + "\n".join(hp)