forked from fishaudio/Bert-VITS2
-
Notifications
You must be signed in to change notification settings - Fork 106
/
Copy pathstyle_gen.py
123 lines (99 loc) · 4.17 KB
/
style_gen.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
import argparse
from concurrent.futures import ThreadPoolExecutor
from typing import Any
import numpy as np
import torch
from numpy.typing import NDArray
from pyannote.audio import Inference, Model
from tqdm import tqdm
from config import get_config
from style_bert_vits2.logging import logger
from style_bert_vits2.models.hyper_parameters import HyperParameters
from style_bert_vits2.utils.stdout_wrapper import SAFE_STDOUT
config = get_config()
model = Model.from_pretrained("pyannote/wespeaker-voxceleb-resnet34-LM")
inference = Inference(model, window="whole")
device = torch.device(config.style_gen_config.device)
inference.to(device)
class NaNValueError(ValueError):
"""カスタム例外クラス。NaN値が見つかった場合に使用されます。"""
# 推論時にインポートするために短いが関数を書く
def get_style_vector(wav_path: str) -> NDArray[Any]:
return inference(wav_path) # type: ignore
def save_style_vector(wav_path: str):
try:
style_vec = get_style_vector(wav_path)
except Exception as e:
print("\n")
logger.error(f"Error occurred with file: {wav_path}, Details:\n{e}\n")
raise
# 値にNaNが含まれていると悪影響なのでチェックする
if np.isnan(style_vec).any():
print("\n")
logger.warning(f"NaN value found in style vector: {wav_path}")
raise NaNValueError(f"NaN value found in style vector: {wav_path}")
np.save(f"{wav_path}.npy", style_vec) # `test.wav` -> `test.wav.npy`
def process_line(line: str):
wav_path = line.split("|")[0]
try:
save_style_vector(wav_path)
return line, None
except NaNValueError:
return line, "nan_error"
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"-c", "--config", type=str, default=config.style_gen_config.config_path
)
parser.add_argument(
"--num_processes", type=int, default=config.style_gen_config.num_processes
)
args, _ = parser.parse_known_args()
config_path: str = args.config
num_processes: int = args.num_processes
hps = HyperParameters.load_from_json(config_path)
device = config.style_gen_config.device
training_lines: list[str] = []
with open(hps.data.training_files, encoding="utf-8") as f:
training_lines.extend(f.readlines())
with ThreadPoolExecutor(max_workers=num_processes) as executor:
training_results = list(
tqdm(
executor.map(process_line, training_lines),
total=len(training_lines),
file=SAFE_STDOUT,
)
)
ok_training_lines = [line for line, error in training_results if error is None]
nan_training_lines = [
line for line, error in training_results if error == "nan_error"
]
if nan_training_lines:
nan_files = [line.split("|")[0] for line in nan_training_lines]
logger.warning(
f"Found NaN value in {len(nan_training_lines)} files: {nan_files}, so they will be deleted from training data."
)
val_lines: list[str] = []
with open(hps.data.validation_files, encoding="utf-8") as f:
val_lines.extend(f.readlines())
with ThreadPoolExecutor(max_workers=num_processes) as executor:
val_results = list(
tqdm(
executor.map(process_line, val_lines),
total=len(val_lines),
file=SAFE_STDOUT,
)
)
ok_val_lines = [line for line, error in val_results if error is None]
nan_val_lines = [line for line, error in val_results if error == "nan_error"]
if nan_val_lines:
nan_files = [line.split("|")[0] for line in nan_val_lines]
logger.warning(
f"Found NaN value in {len(nan_val_lines)} files: {nan_files}, so they will be deleted from validation data."
)
with open(hps.data.training_files, "w", encoding="utf-8") as f:
f.writelines(ok_training_lines)
with open(hps.data.validation_files, "w", encoding="utf-8") as f:
f.writelines(ok_val_lines)
ok_num = len(ok_training_lines) + len(ok_val_lines)
logger.info(f"Finished generating style vectors! total: {ok_num} npy files.")