Spaces:
Running
on
Zero
Running
on
Zero
lihaoxin2020
commited on
Commit
•
364e345
1
Parent(s):
11d44ce
bind cuda
Browse files
app.py
CHANGED
@@ -19,15 +19,15 @@ CHUNK_LENGTH_S = 15
|
|
19 |
FILE_LIMIT_MB = 1000
|
20 |
YT_LENGTH_LIMIT_S = 3600 # limit to 1 hour YouTube files
|
21 |
# device setting
|
22 |
-
if torch.cuda.is_available():
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
else:
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
# define the pipeline
|
32 |
pipe = pipeline(
|
33 |
model=MODEL_NAME,
|
@@ -167,4 +167,4 @@ yt_transcribe = gr.Interface(
|
|
167 |
with demo:
|
168 |
gr.TabbedInterface([mf_transcribe, file_transcribe, yt_transcribe], ["Microphone", "Audio file", "YouTube"])
|
169 |
|
170 |
-
demo.launch()
|
|
|
19 |
FILE_LIMIT_MB = 1000
|
20 |
YT_LENGTH_LIMIT_S = 3600 # limit to 1 hour YouTube files
|
21 |
# device setting
|
22 |
+
# if torch.cuda.is_available():
|
23 |
+
# torch_dtype = torch.bfloat16
|
24 |
+
# device = "cuda:0"
|
25 |
+
# model_kwargs = {'attn_implementation': 'sdpa'}
|
26 |
+
# else:
|
27 |
+
# torch_dtype = torch.float32
|
28 |
+
# device = "cpu"
|
29 |
+
# model_kwargs = {}
|
30 |
+
device = "cuda"
|
31 |
# define the pipeline
|
32 |
pipe = pipeline(
|
33 |
model=MODEL_NAME,
|
|
|
167 |
with demo:
|
168 |
gr.TabbedInterface([mf_transcribe, file_transcribe, yt_transcribe], ["Microphone", "Audio file", "YouTube"])
|
169 |
|
170 |
+
demo.queue(api_open=False, default_concurrency_limit=40).launch(show_api=False, show_error=True)
|