Spaces:
Runtime error
Runtime error
fix config path
Browse files
app.py
CHANGED
@@ -4,6 +4,7 @@ import torch
|
|
4 |
from pipelines.pipeline import InferencePipeline
|
5 |
import time
|
6 |
|
|
|
7 |
class ChaplinGradio:
|
8 |
def __init__(self):
|
9 |
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
@@ -17,17 +18,11 @@ class ChaplinGradio:
|
|
17 |
self.last_frame_time = time.time()
|
18 |
|
19 |
def load_models(self):
|
20 |
-
"""Load models using the InferencePipeline with
|
21 |
-
|
22 |
-
"model": {
|
23 |
-
"name": "chaplin_vsr",
|
24 |
-
"weights": "models/chaplin_vsr.pth",
|
25 |
-
"detector": "mediapipe"
|
26 |
-
}
|
27 |
-
}
|
28 |
|
29 |
self.vsr_model = InferencePipeline(
|
30 |
-
|
31 |
device=self.device,
|
32 |
detector="mediapipe",
|
33 |
face_track=True
|
|
|
4 |
from pipelines.pipeline import InferencePipeline
|
5 |
import time
|
6 |
|
7 |
+
|
8 |
class ChaplinGradio:
|
9 |
def __init__(self):
|
10 |
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
|
18 |
self.last_frame_time = time.time()
|
19 |
|
20 |
def load_models(self):
|
21 |
+
"""Load models using the InferencePipeline with LRS3 config"""
|
22 |
+
config_path = "configs/LRS3_V_WER19.1.ini"
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
|
24 |
self.vsr_model = InferencePipeline(
|
25 |
+
config_path,
|
26 |
device=self.device,
|
27 |
detector="mediapipe",
|
28 |
face_track=True
|