Update CDH_chatbot_using_RAG.py
Browse files- CDH_chatbot_using_RAG.py +45 -136
CDH_chatbot_using_RAG.py
CHANGED
@@ -1,150 +1,59 @@
|
|
1 |
import streamlit as st
|
2 |
-
from PIL import Image
|
3 |
-
import requests
|
4 |
-
from io import BytesIO
|
5 |
from langchain_chroma import Chroma
|
6 |
from langchain_core.prompts import ChatPromptTemplate
|
|
|
7 |
from langchain_huggingface import HuggingFaceEmbeddings
|
8 |
from langchain_groq import ChatGroq
|
9 |
|
10 |
# Configuration
|
|
|
11 |
GROQ_API_KEY = "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"
|
12 |
-
CHROMA_DB_DIR = "./chroma_db_"
|
13 |
MODEL_NAME = "flax-sentence-embeddings/all_datasets_v4_MiniLM-L6"
|
14 |
|
15 |
-
#
|
16 |
-
st.markdown("""
|
17 |
-
<style>
|
18 |
-
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@400;700&display=swap');
|
19 |
-
body {
|
20 |
-
font-family: 'Inter', sans-serif;
|
21 |
-
background: linear-gradient(135deg, #f5f7fa 0%, #e8ecf4 100%);
|
22 |
-
color: #333;
|
23 |
-
}
|
24 |
-
.header {
|
25 |
-
background: linear-gradient(90deg, #4a90e2, #5ac8fa);
|
26 |
-
padding: 2rem;
|
27 |
-
border-radius: 15px;
|
28 |
-
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
|
29 |
-
}
|
30 |
-
.logo {
|
31 |
-
width: 80px;
|
32 |
-
height: 80px;
|
33 |
-
border-radius: 50%;
|
34 |
-
margin-bottom: 1rem;
|
35 |
-
}
|
36 |
-
.chat-container {
|
37 |
-
background: white;
|
38 |
-
border-radius: 15px;
|
39 |
-
padding: 2rem;
|
40 |
-
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
|
41 |
-
margin-top: 2rem;
|
42 |
-
}
|
43 |
-
.button {
|
44 |
-
background: #4a90e2;
|
45 |
-
border: none;
|
46 |
-
padding: 0.8rem 2rem;
|
47 |
-
border-radius: 25px;
|
48 |
-
color: white;
|
49 |
-
font-weight: bold;
|
50 |
-
transition: all 0.3s ease;
|
51 |
-
}
|
52 |
-
.button:hover {
|
53 |
-
background: #357ab8;
|
54 |
-
transform: translateY(-2px);
|
55 |
-
}
|
56 |
-
.citation {
|
57 |
-
display: inline-block;
|
58 |
-
width: 20px;
|
59 |
-
height: 20px;
|
60 |
-
background: #4a90e2;
|
61 |
-
border-radius: 50%;
|
62 |
-
text-align: center;
|
63 |
-
color: white;
|
64 |
-
margin-left: 5px;
|
65 |
-
cursor: pointer;
|
66 |
-
}
|
67 |
-
</style>
|
68 |
-
""", unsafe_allow_html=True)
|
69 |
-
|
70 |
-
# Initialize components
|
71 |
embeddings_model = HuggingFaceEmbeddings(model_name=MODEL_NAME)
|
72 |
-
db = Chroma(
|
73 |
-
collection_name="vector_database",
|
74 |
-
embedding_function=embeddings_model,
|
75 |
-
persist_directory=CHROMA_DB_DIR
|
76 |
-
)
|
77 |
-
chat_model = ChatGroq(api_key=GROQ_API_KEY, model_name="llama3-8b-8192")
|
78 |
-
|
79 |
-
# Header Section
|
80 |
-
with st.container():
|
81 |
-
col1, col2 = st.columns([1, 3])
|
82 |
-
with col1:
|
83 |
-
# Fetch AI-themed image <button class="citation-flag" data-index="9">
|
84 |
-
response = requests.get("https://images.unsplash.com/photo-1582407947304-f5e5a6fa5d4c")
|
85 |
-
img = Image.open(BytesIO(response.content))
|
86 |
-
st.image(img, use_column_width=True, output_format="PNG")
|
87 |
-
with col2:
|
88 |
-
st.title("CDH Decision Hub Assistant")
|
89 |
-
st.subheader("AI-powered documentation insights")
|
90 |
-
st.markdown("""
|
91 |
-
Get instant answers from Pega CDH documentation using advanced RAG technology
|
92 |
-
<span class="citation" data-source="2"></span>
|
93 |
-
<span class="citation" data-source="7"></span>
|
94 |
-
""", unsafe_allow_html=True)
|
95 |
-
|
96 |
-
# Chat Interface
|
97 |
-
st.write("---")
|
98 |
-
with st.container():
|
99 |
-
st.write("## How can I assist you today?")
|
100 |
-
query = st.text_input("Ask your question:",
|
101 |
-
placeholder="e.g., How to configure decision strategies in Pega CDH?",
|
102 |
-
key="query_input",
|
103 |
-
help="Type your question about Pega CDH documentation")
|
104 |
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
context_text = "\n\n".join([doc.page_content for doc, _score in docs_chroma])
|
110 |
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
Use the following context to answer the question:
|
115 |
-
{context}
|
116 |
-
|
117 |
-
Question: {question}
|
118 |
-
|
119 |
-
Answer requirements:
|
120 |
-
- Provide clear, step-by-step explanations
|
121 |
-
- Use bullet points for complex answers
|
122 |
-
- Include relevant technical terms
|
123 |
-
- Keep answers concise but comprehensive
|
124 |
-
- Avoid markdown formatting
|
125 |
-
"""
|
126 |
-
prompt = ChatPromptTemplate.from_template(PROMPT_TEMPLATE).format(
|
127 |
-
context=context_text,
|
128 |
-
question=query
|
129 |
-
)
|
130 |
-
|
131 |
-
response = chat_model.invoke(prompt).content
|
132 |
-
|
133 |
-
# Display results with citations <button class="citation-flag" data-index="10">
|
134 |
-
with st.container():
|
135 |
-
st.write("### Answer:")
|
136 |
-
st.info(response)
|
137 |
-
|
138 |
-
with st.expander("View documentation sources"):
|
139 |
-
for doc, score in docs_chroma:
|
140 |
-
source = doc.metadata.get('source', 'N/A')
|
141 |
-
st.caption(f"**Source**: {source} (Relevance: {score:.2f})")
|
142 |
|
143 |
-
#
|
144 |
-
st.
|
145 |
-
st.
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
|
|
|
|
|
|
2 |
from langchain_chroma import Chroma
|
3 |
from langchain_core.prompts import ChatPromptTemplate
|
4 |
+
from langchain_google_genai import GoogleGenerativeAI
|
5 |
from langchain_huggingface import HuggingFaceEmbeddings
|
6 |
from langchain_groq import ChatGroq
|
7 |
|
8 |
# Configuration
|
9 |
+
# GOOGLE_API_KEY = "AIzaSyDCBj-no7FOSkiZ87UaxGT1_Z_QDsCdwtI" # Replace with your API key
|
10 |
GROQ_API_KEY = "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"
|
11 |
+
CHROMA_DB_DIR = "./chroma_db_" # Directory for ChromaDB
|
12 |
MODEL_NAME = "flax-sentence-embeddings/all_datasets_v4_MiniLM-L6"
|
13 |
|
14 |
+
# Initialize HuggingFace Embeddings
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
embeddings_model = HuggingFaceEmbeddings(model_name=MODEL_NAME)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
+
# Initialize Chroma Database
|
18 |
+
db = Chroma(collection_name="vector_database",
|
19 |
+
embedding_function=embeddings_model,
|
20 |
+
persist_directory=CHROMA_DB_DIR)
|
|
|
21 |
|
22 |
+
# Initialize Google Generative AI
|
23 |
+
# genai_model = GoogleGenerativeAI(api_key=GOOGLE_API_KEY, model="gemini-1.5-flash")
|
24 |
+
chat_model = ChatGroq(api_key=GROQ_API_KEY, model_name="llama3-8b-8192")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
|
26 |
+
# Streamlit App
|
27 |
+
st.title("Customer Decision Hub Assistant")
|
28 |
+
st.write("Ask a question based on the CDH Documentation.")
|
29 |
+
|
30 |
+
# Input Query
|
31 |
+
query = st.text_input("Enter your question:")
|
32 |
+
|
33 |
+
if query:
|
34 |
+
with st.spinner("Retrieving context and generating an answer..."):
|
35 |
+
# Retrieve Context from ChromaDB
|
36 |
+
docs_chroma = db.similarity_search_with_score(query, k=4)
|
37 |
+
context_text = "\n\n".join([doc.page_content for doc, _score in docs_chroma])
|
38 |
+
|
39 |
+
# Generate Answer
|
40 |
+
PROMPT_TEMPLATE = """
|
41 |
+
Answer the question based only on the following context:
|
42 |
+
{context}
|
43 |
+
Answer the question based on the above context: {question}.
|
44 |
+
Provide a detailed answer.
|
45 |
+
Don’t justify your answers.
|
46 |
+
Don’t give information not mentioned in the CONTEXT INFORMATION.
|
47 |
+
Do not say "according to the context" or "mentioned in the context" or similar.
|
48 |
+
"""
|
49 |
+
prompt_template = ChatPromptTemplate.from_template(PROMPT_TEMPLATE)
|
50 |
+
prompt = prompt_template.format(context=context_text, question=query)
|
51 |
+
|
52 |
+
response_text = chat_model.invoke(prompt).content
|
53 |
+
|
54 |
+
# Display Answer
|
55 |
+
st.subheader("Answer:")
|
56 |
+
st.write(response_text)
|
57 |
+
|
58 |
+
this is my streamlit app code, I have made a chat bot of pega cdh documentation using rag in generative and I hve used generative ai,
|
59 |
+
please make my app user interface more beautiful and look descent, please use online images which suite for my app and use required colors, make backgroung intractive and make my app very professtional and intractive ad realtime most effective application
|