Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -24,6 +24,7 @@ from fastapi.staticfiles import StaticFiles
|
|
24 |
|
25 |
import logging
|
26 |
|
|
|
27 |
app = FastAPI()
|
28 |
app.mount("/static", StaticFiles(directory="static"), name="static")
|
29 |
templates = Jinja2Templates(directory="templates")
|
@@ -59,7 +60,7 @@ import soundfile as sf
|
|
59 |
from piper_phonemize import phonemize_codepoints, phonemize_espeak, tashkeel_run
|
60 |
|
61 |
|
62 |
-
|
63 |
import os
|
64 |
#if not os.path.exists("./content/piper/src/python/lng"):
|
65 |
# import subprocess
|
@@ -73,7 +74,7 @@ import configparser
|
|
73 |
|
74 |
class Translator:
|
75 |
def __init__(self):
|
76 |
-
self.configs = {
|
77 |
|
78 |
def load_language(self, language_name):
|
79 |
if language_name not in self.configs:
|
@@ -264,14 +265,14 @@ async def download_file(fileId: str):
|
|
264 |
return {"error": "File not found"}
|
265 |
|
266 |
def load_onnx(model, sess_options, providers = ["CPUExecutionProvider"]):
|
267 |
-
|
268 |
config = load_config(model)
|
269 |
model = onnxruntime.InferenceSession(
|
270 |
str(model),
|
271 |
sess_options=sess_options,
|
272 |
providers= providers
|
273 |
)
|
274 |
-
|
275 |
return model, config
|
276 |
|
277 |
def load_config(model):
|
|
|
24 |
|
25 |
import logging
|
26 |
|
27 |
+
|
28 |
app = FastAPI()
|
29 |
app.mount("/static", StaticFiles(directory="static"), name="static")
|
30 |
templates = Jinja2Templates(directory="templates")
|
|
|
60 |
from piper_phonemize import phonemize_codepoints, phonemize_espeak, tashkeel_run
|
61 |
|
62 |
|
63 |
+
_LOGGER = logging.getLogger("piper_train.infer_onnx")
|
64 |
import os
|
65 |
#if not os.path.exists("./content/piper/src/python/lng"):
|
66 |
# import subprocess
|
|
|
74 |
|
75 |
class Translator:
|
76 |
def __init__(self):
|
77 |
+
self.configs = {}
|
78 |
|
79 |
def load_language(self, language_name):
|
80 |
if language_name not in self.configs:
|
|
|
265 |
return {"error": "File not found"}
|
266 |
|
267 |
def load_onnx(model, sess_options, providers = ["CPUExecutionProvider"]):
|
268 |
+
_LOGGER.debug("Loading model from %s", model)
|
269 |
config = load_config(model)
|
270 |
model = onnxruntime.InferenceSession(
|
271 |
str(model),
|
272 |
sess_options=sess_options,
|
273 |
providers= providers
|
274 |
)
|
275 |
+
_LOGGER.info("Loaded model from %s", model)
|
276 |
return model, config
|
277 |
|
278 |
def load_config(model):
|