AyoubChLin's picture
Update app.py
8a46061 verified
import gradio as gr
import os
from huggingface_hub import login
from dotenv import load_dotenv
from embedding import embeddings
from db.chroma import load_and_setup_db, search_cases
from chat.hermes_llm import ChatManager
# Load environment variables
load_dotenv()
# Login to Hugging Face
# login(token=os.getenv("HUGGINGFACEHUB_API_TOKEN"), add_to_git_credential=True)
# Initialize components
VECTOR_DB_PATH = os.getenv("VECTOR_DB_PATH")
vector_store = load_and_setup_db(VECTOR_DB_PATH, embeddings)
legal_chat = ChatManager(temperature=0.1)
def process_query(query, chat_history):
try:
# Search relevant cases
results = search_cases(vectorstore=vector_store, query=query, k=1)
response=None
if len(results)>0:
# Get response from chat manager
response = legal_chat.get_response(results[0]['content'], query=query)
response_final = f"""{response}\n\nkilde:[case_id:{results[0]['metadata']['case_id']}]"""
else :
response_final =" Det ønskede ord blev ikke fundet i nogen sager. Prøv met et andet søgeord"
# Update chat history
chat_history.append((query, response_final))
return "", chat_history
except Exception as e:
return "", chat_history + [(query, f"Det ønskede ord blev ikke fundet i nogen sager. Prøv met et andet søgeord")]
# Create Gradio interface
with gr.Blocks(title="Jurai Insight") as demo:
gr.Markdown("# Jurai Insight")
gr.Markdown("Forudsig fremtiden, byg din sag på data.")
chatbot = gr.Chatbot(
[],
elem_id="chatbot",
bubble_full_width=False,
height=400
)
with gr.Row():
query_input = gr.Textbox(
placeholder="Indtast dit spørgsmål her...",
show_label=False,
scale=4
)
submit_btn = gr.Button("Sende", scale=1)
# Set up event handlers
submit_btn.click(
process_query,
inputs=[query_input, chatbot],
outputs=[query_input, chatbot]
)
query_input.submit(
process_query,
inputs=[query_input, chatbot],
outputs=[query_input, chatbot]
)
if __name__ == "__main__":
demo.launch(share=True)