Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import pipeline | |
# Load your Hugging Face model | |
# pipe = pipeline("text-classification", model="davidgaofc/TechDebtClassifier") | |
# Replace 'gpt2' with your model | |
def pipe(temp, temp2): | |
return temp | |
def predict(input_text): | |
# Generate output using the model | |
output = pipe(input_text, max_length=2000) # Adjust parameters as needed | |
return output[0]['generated_text'] | |
# Create the Gradio interface | |
interface = gr.Interface(fn=predict, | |
inputs=gr.inputs.Textbox(lines=2, placeholder="Type something here..."), | |
outputs='text', | |
title="Hugging Face Model Inference", | |
description="Type in some text and see how the model responds!") | |
if __name__ == "__main__": | |
interface.launch() |