Update infer/modules/vc/pipeline.py
Browse files- infer/modules/vc/pipeline.py +15 -1
infer/modules/vc/pipeline.py
CHANGED
@@ -7,7 +7,7 @@ logger = logging.getLogger(__name__)
|
|
7 |
|
8 |
from functools import lru_cache
|
9 |
from time import time as ttime
|
10 |
-
|
11 |
import faiss
|
12 |
import librosa
|
13 |
import numpy as np
|
@@ -61,6 +61,18 @@ def change_rms(data1, sr1, data2, sr2, rate): # 1是输入音频,2是输出
|
|
61 |
).numpy()
|
62 |
return data2
|
63 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
64 |
|
65 |
class Pipeline(object):
|
66 |
def __init__(self, tgt_sr, config):
|
@@ -139,6 +151,8 @@ class Pipeline(object):
|
|
139 |
f0 = torchcrepe.filter.mean(f0, 3)
|
140 |
f0[pd < 0.1] = 0
|
141 |
f0 = f0[0].cpu().numpy()
|
|
|
|
|
142 |
elif f0_method == "rmvpe":
|
143 |
if not hasattr(self, "model_rmvpe"):
|
144 |
from infer.lib.rmvpe import RMVPE
|
|
|
7 |
|
8 |
from functools import lru_cache
|
9 |
from time import time as ttime
|
10 |
+
from torchfcpe import spawn_bundled_infer_model
|
11 |
import faiss
|
12 |
import librosa
|
13 |
import numpy as np
|
|
|
61 |
).numpy()
|
62 |
return data2
|
63 |
|
64 |
+
def get_torchfcpe(self, x, sr, f0_min, f0_max, p_len, *args, **kwargs):
|
65 |
+
self.model_torchfcpe = spawn_bundled_infer_model(device=self.device)
|
66 |
+
f0 = self.model_torchfcpe.infer(
|
67 |
+
torch.from_numpy(x).float().unsqueeze(0).unsqueeze(-1).to(self.device),
|
68 |
+
sr=sr,
|
69 |
+
decoder_mode="local_argmax",
|
70 |
+
threshold=0.006,
|
71 |
+
f0_min=f0_min,
|
72 |
+
f0_max=f0_max,
|
73 |
+
output_interp_target_length=p_len
|
74 |
+
)
|
75 |
+
return f0.squeeze().cpu().numpy()
|
76 |
|
77 |
class Pipeline(object):
|
78 |
def __init__(self, tgt_sr, config):
|
|
|
151 |
f0 = torchcrepe.filter.mean(f0, 3)
|
152 |
f0[pd < 0.1] = 0
|
153 |
f0 = f0[0].cpu().numpy()
|
154 |
+
elif method == "torchfcpe":
|
155 |
+
f0 = self.get_torchfcpe(x, self.sr, f0_min, f0_max, p_len)
|
156 |
elif f0_method == "rmvpe":
|
157 |
if not hasattr(self, "model_rmvpe"):
|
158 |
from infer.lib.rmvpe import RMVPE
|