File size: 4,293 Bytes
50d8518
d09fcc1
50d8518
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import os
import gradio as gr
from langchain.prompts import ChatPromptTemplate
from langchain.chains import RetrievalQA, ConversationalRetrievalChain
from langchain.memory import ConversationBufferMemory

def rag_retriever(message, history, system_prompt, num_sources=4, temperature=0):
    chat = ChatGroq(temperature=temperature, model_name="llama3-70b-8192", api_key=os.getenv("GROQ_API_KEY"))

    prompt_template = ChatPromptTemplate.from_messages([
        ("system", system_prompt+"""

        Use the following pieces of context to answer the user's question.
        ----------------
        {context}"""),
        ("human", "{question}")
    ])

    memory = ConversationBufferMemory(memory_key="chat_history", output_key="answer", return_messages=True)

    retriever = store.as_retriever(search_type="similarity", search_kwargs={'k': num_sources})

    chain = ConversationalRetrievalChain.from_llm(llm=chat,
                                                  retriever=retriever,
                                                  return_source_documents=True,
                                                  memory=memory,
                                                  combine_docs_chain_kwargs={"prompt": prompt_template})


    output = chain.invoke({"question": message})

    sources = ""
    for doc in output['source_documents']:
      source_content = doc.page_content.strip().replace("\r\n", " ").replace("\r", " ").replace("\n", " ")
      sources += f'<span style="color:green">Страница: {doc.metadata["page"]+1}</span><br><span style="color:gray">{source_content}</span><br><br>'

    response = f"""<h5>Отговор:</h5>{output['answer']}<br><h5>Източници:</h5>{sources}"""
    return response


rag = gr.ChatInterface(rag_retriever,
                       examples=[["Каква е целта на настоящия регламент", "You are an expert assistant in Bulgarian regulations. Provide precise and clear answers. Provide a detailed and comprehensive answer, incorporating as much relevant information as possible. Always respond in Bulgarian, regardless of the language used in the question."],
                                 ["Какво са Системите с ИИ", "You are an expert assistant in Bulgarian regulations. Provide precise and clear answers. Always respond in Bulgarian, regardless of the language used in the question."],
                                 ["Какво е равнище на технологично развитие", "You are an expert assistant in Bulgarian regulations. Provide precise and clear answers. Always respond in Bulgarian, regardless of the language used in the question."]],
                       title="Чатене с документа AI Act",
                       description="Питайте каквото пожелаете, но пишете на български.",
                       chatbot=gr.Chatbot(placeholder="<strong>Вашият личен AI Act помощник</strong><br>Питайте каквото пожелаете, но пишете на български."),
                       textbox=gr.Textbox(placeholder="Задайте своя въпрос...", container=False, scale=7),
                       retry_btn="Отново",
                       undo_btn="Назад",
                       clear_btn="Изчистете",
                       submit_btn="Изпрати",
                       additional_inputs=[gr.components.Textbox("You are an expert assistant in Bulgarian regulations. Provide precise and clear answers. Always respond in Bulgarian, regardless of the language used in the question.", label="System Prompt"),
                                          gr.components.Slider(minimum=1, maximum=10, value=4, step=1, label="Брой препратки"),
                                          gr.components.Slider(minimum=0, maximum=2, value=0, label="Креативност на модела", info="Ако е много високо моделът си измисля, но може да напише интересни неща."),],
                       additional_inputs_accordion=gr.Accordion("Допълнителни настройки", open=False),
                      )

rag.launch()