|
import streamlit as st |
|
from langchain.chat_models import ChatOpenAI |
|
from langchain.schema import AIMessage, HumanMessage |
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_deepseek_llm(api_key: str): |
|
""" |
|
TODO: Implement your DeepSeek integration. |
|
""" |
|
|
|
pass |
|
|
|
def get_gemini_llm(api_key: str): |
|
""" |
|
TODO: Implement your Gemini integration. |
|
""" |
|
|
|
pass |
|
|
|
def get_ollama_llm(): |
|
""" |
|
TODO: Implement your local Ollama integration. |
|
Possibly specify a port, endpoint, etc. |
|
""" |
|
|
|
pass |
|
|
|
def get_claude_llm(api_key: str): |
|
""" |
|
Example for Anthropic's Claude |
|
""" |
|
|
|
|
|
|
|
|
|
pass |
|
|
|
def load_llm(selected_model: str, api_key: str): |
|
""" |
|
Returns the LLM object depending on user selection. |
|
""" |
|
if selected_model == "OpenAI": |
|
|
|
|
|
llm = ChatOpenAI(temperature=0.7, openai_api_key=api_key) |
|
|
|
elif selected_model == "Claude": |
|
|
|
llm = None |
|
st.warning("Claude is not implemented. Implement the get_claude_llm function.") |
|
|
|
elif selected_model == "Gemini": |
|
|
|
llm = None |
|
st.warning("Gemini is not implemented. Implement the get_gemini_llm function.") |
|
|
|
elif selected_model == "DeepSeek": |
|
|
|
llm = None |
|
st.warning("DeepSeek is not implemented. Implement the get_deepseek_llm function.") |
|
|
|
elif selected_model == "Ollama (local)": |
|
|
|
llm = None |
|
st.warning("Ollama is not implemented. Implement the get_ollama_llm function.") |
|
|
|
else: |
|
llm = None |
|
|
|
return llm |
|
|
|
def initialize_session_state(): |
|
""" |
|
Initialize the session state for storing conversation history. |
|
""" |
|
if "messages" not in st.session_state: |
|
st.session_state["messages"] = [] |
|
|
|
def main(): |
|
st.title("Multi-LLM Chat App") |
|
|
|
|
|
st.sidebar.header("Configuration") |
|
selected_model = st.sidebar.selectbox( |
|
"Select an LLM", |
|
["OpenAI", "Claude", "Gemini", "DeepSeek", "Ollama (local)"] |
|
) |
|
api_key = st.sidebar.text_input("API Key (if needed)", type="password") |
|
|
|
st.sidebar.write("---") |
|
if st.sidebar.button("Clear Chat"): |
|
st.session_state["messages"] = [] |
|
|
|
|
|
initialize_session_state() |
|
|
|
|
|
llm = load_llm(selected_model, api_key) |
|
|
|
|
|
for msg in st.session_state["messages"]: |
|
if msg["role"] == "user": |
|
st.markdown(f"**You:** {msg['content']}") |
|
else: |
|
st.markdown(f"**LLM:** {msg['content']}") |
|
|
|
|
|
user_input = st.text_input("Type your message here...", "") |
|
|
|
|
|
if st.button("Send"): |
|
if user_input.strip() == "": |
|
st.warning("Please enter a message before sending.") |
|
else: |
|
|
|
st.session_state["messages"].append({"role": "user", "content": user_input}) |
|
|
|
if llm is None: |
|
st.error("LLM is not configured or implemented for this choice.") |
|
else: |
|
|
|
lc_messages = [] |
|
for msg in st.session_state["messages"]: |
|
if msg["role"] == "user": |
|
lc_messages.append(HumanMessage(content=msg["content"])) |
|
else: |
|
lc_messages.append(AIMessage(content=msg["content"])) |
|
|
|
|
|
response = llm(lc_messages) |
|
|
|
st.session_state["messages"].append({"role": "assistant", "content": response.content}) |
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
main() |
|
|