import gradio as gr import os from openai import OpenAI import time # Initialize the OpenAI Client client = OpenAI( api_key=os.environ.get("RUNPOD_API_KEY"), base_url="https://api.runpod.ai/v2/vllm-k0g4c60zor9xuu/openai/v1", ) def runpod_chat(question, history=None): if history is None: history = [] # Ensure history starts as an empty list if none is provided # Add the role description at the beginning of the session if not history: history.append({"role": "system", "content": "You are a history assistant, that provides the best possible answers to any historical questions asked about American History. Be helpful and specific, providing any detailed nuance needed to have a full understanding of the question."}) history.append({"role": "user", "content": question}) response_stream = client.chat.completions.create( model="ambrosfitz/llama-3-history", messages=history, temperature=0, max_tokens=150, stream=True, ) # Stream the response and accumulate full response before displaying full_response = "HistoryBot: " for message in response_stream: part = message.choices[0].delta.content if message.choices[0].delta.content is not None else "" full_response += part # Append the full response to history once complete history.append({"role": "assistant", "content": full_response}) return full_response, history # Return full response and updated history to maintain state # Set up the Gradio interface iface = gr.Interface( fn=runpod_chat, inputs=[gr.State(), gr.Textbox(label="Enter your question:")], outputs=[gr.Textbox(label="Responses"), gr.State()], title="HistoryBot Chat", description="Interact with HistoryBot, a specialized assistant for American History. Ask any historical questions to get detailed and nuanced answers." ) iface.launch()