Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
# Wczytanie tokena z pliku konfiguracyjnego | |
with open("config.txt", "r") as f: | |
lines = f.readlines() | |
config = {} | |
for line in lines: | |
if "=" in line: | |
key, value = line.strip().split(" = ") | |
config[key] = value | |
model_name = config.get("repo_id", "") | |
token = config.get("token", "") | |
# Wczytanie własnego modelu chatbota z Hugging Face | |
if model_name == "pp3232133/pp3232133-distilgpt2-wikitext2": | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
# Funkcja obsługująca wejście i wyjście dla interfejsu Gradio | |
def chatbot_interface(input_text): | |
input_ids = tokenizer.encode(input_text, return_tensors="pt") | |
chatbot_output = model.generate(input_ids, max_length=100)[0] | |
response = tokenizer.decode(chatbot_output, skip_special_tokens=True) | |
return response | |
# Interfejs Gradio dla chatbota | |
iface = gr.Interface( | |
fn=chatbot_interface, | |
inputs="text", | |
outputs="text", | |
title="Chatbot", | |
description="Custom chatbot based on your Hugging Face model. Start typing to chat with the bot.", | |
theme="compact" | |
) | |
# Uruchomienie interfejsu | |
iface.launch() | |
else: | |
print("Nie można znaleźć nazwy modelu w pliku konfiguracyjnym.") | |