seawolf2357's picture
Update app.py
e4ecada verified
raw
history blame
1.41 kB
import gradio as gr
import requests
import os
ACCESS_TOKEN = os.getenv("HF_TOKEN")
def respond(message, history: list[tuple[str, str]], system_message, max_tokens, temperature, top_p):
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
# 사용자의 최종 메시지를 포함한 전체 메시지 리스트를 JSON 형식으로 구성
data = {
"model": "command-r-plus:104b-fp16",
"prompt": message,
"max_tokens": max_tokens,
"temperature": temperature,
"top_p": top_p
}
# API 요청 보내기
response = requests.post("http://hugpu.ai:7877/api/generate", json=data)
generated_text = response.json().get('generated_text', '') # 응답에서 생성된 텍스트 추출
return generated_text # 생성된 텍스트 반환
chatbot = gr.Chatbot(height=600)
def chat_function(message):
return "Response to: " + message
chat_interface = gr.Interface(
fn=chat_function,
inputs=gr.Textbox(),
outputs=gr.Textbox(),
title="Simple Chatbot",
description="This is a simple chatbot example."
)
if __name__ == "__main__":
chat_interface.launch()