Spaces:
Runtime error
Runtime error
File size: 510 Bytes
3a2174e e34d347 3a2174e e34d347 3a2174e e34d347 3a2174e e34d347 3a2174e e34d347 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 |
from transformers import pipeline
import gradio as gr
pipe = pipeline(model="Ranjit/Whisper_Small_Odia_10k_steps") # change to "your-username/the-name-you-picked"
def transcribe(audio):
text = pipe(audio)["text"]
return text
iface = gr.Interface(
fn=transcribe,
inputs=gr.Audio(source="microphone", type="filepath"),
outputs="text",
title="Whisper Small Odia",
description="Realtime demo for Odia speech recognition using a fine-tuned Whisper small model.",
)
iface.launch() |