|
import os |
|
import streamlit as st |
|
from langchain_chroma import Chroma |
|
from langchain_community.embeddings import HuggingFaceEmbeddings |
|
from langchain.chains.question_answering import load_qa_chain |
|
from langchain.memory import ConversationBufferMemory |
|
from langchain_core.prompts import PromptTemplate |
|
from langchain_groq import ChatGroq |
|
from dotenv import load_dotenv |
|
from sentence_transformers import SentenceTransformer |
|
|
|
st.title("HocamBot") |
|
|
|
|
|
load_dotenv() |
|
GROQ_API_KEY = os.getenv("GROQ_API_KEY") |
|
assert GROQ_API_KEY, "GROQ_API_KEY environment variable not set." |
|
|
|
|
|
if 'initialized' not in st.session_state: |
|
st.session_state.initialized = False |
|
|
|
try: |
|
with st.spinner("Initializing..."): |
|
|
|
model_path = "sentence-transformers/all-MiniLM-L12-v2" |
|
|
|
st.session_state.embedding_function = HuggingFaceEmbeddings( |
|
model_name=model_path, |
|
model_kwargs={'device': 'cpu'}, |
|
encode_kwargs={'normalize_embeddings': False} |
|
) |
|
|
|
|
|
persist_directory = "doc_db" |
|
st.session_state.docsearch = Chroma( |
|
persist_directory=persist_directory, |
|
embedding_function=st.session_state.embedding_function |
|
) |
|
|
|
|
|
st.session_state.chat_model = ChatGroq( |
|
model="llama-3.1-8b-instant", |
|
temperature=0, |
|
api_key=GROQ_API_KEY |
|
) |
|
|
|
|
|
template = """You are a chatbot having a conversation with a human. Your name is Devrim. |
|
Given the following extracted parts of a long document and a question, create a final answer. If the answer is not in the document or irrelevant, just say that you don't know, don't try to make up an answer. |
|
{context} |
|
{chat_history} |
|
Human: {human_input} |
|
Chatbot:""" |
|
|
|
prompt = PromptTemplate( |
|
input_variables=["chat_history", "human_input", "context"], template=template |
|
) |
|
st.session_state.memory = ConversationBufferMemory(memory_key="chat_history", input_key="human_input") |
|
|
|
|
|
st.session_state.qa_chain = load_qa_chain( |
|
llm=st.session_state.chat_model, |
|
chain_type="stuff", |
|
memory=st.session_state.memory, |
|
prompt=prompt |
|
) |
|
|
|
st.session_state.initialized = True |
|
st.success("Initialization successful.") |
|
|
|
except Exception as e: |
|
st.session_state.initialized = False |
|
st.error(f"Initialization failed: {e}") |
|
|
|
|
|
if st.button("Clear Chat History"): |
|
if 'memory' in st.session_state: |
|
st.session_state.memory.clear() |
|
st.rerun() |
|
|
|
|
|
if st.session_state.initialized and 'memory' in st.session_state: |
|
if st.session_state.memory.buffer_as_messages: |
|
for message in st.session_state.memory.buffer_as_messages: |
|
if message.type == "ai": |
|
st.chat_message(name="ai", avatar="🤖").write(message.content) |
|
else: |
|
st.chat_message(name="human", avatar="👤").write(message.content) |
|
|
|
|
|
query = st.chat_input("Ask something") |
|
if query: |
|
try: |
|
with st.spinner("Answering..."): |
|
|
|
docs = st.session_state.docsearch.similarity_search(query, k=1) |
|
response = st.session_state.qa_chain( |
|
{"input_documents": docs, "human_input": query}, |
|
return_only_outputs=True |
|
)["output_text"] |
|
|
|
|
|
st.chat_message(name="human", avatar="👤").write(query) |
|
st.chat_message(name="ai", avatar="🤖").write(response) |
|
|
|
except Exception as e: |
|
st.error(f"An error occurred: {e}") |
|
|