NiborKowon's picture
Update app.py
c2ea985 verified
raw
history blame
4.35 kB
"""This is an example of a simple chatbot that uses the RAG model to answer questions
about GIZ with Streamlit."""
import streamlit as st
# Here we import the rag_pipeline function from the rag.py file
from rag import rag_pipeline
# We use the st.cache decorator to load the RAG pipeline only once and cache it
@st.cache_resource
def load_rag_pipeline():
rag = rag_pipeline()
return rag
rag = load_rag_pipeline()
st.image("gender_strat_cover_pic.png", use_container_width=True)
st.markdown(
"""
<div style="display: flex; align-items: center; gap: 10px;">
<span style="font-size: 30px;">πŸ€–</span>
<h3 style="margin: 0;">Welcome to the GIZ Gender Strategy Assistant</h3>
</div>
<p>The <b> GIZ Gender Strategy Chatbot </b> enables users to explore GIZ’s Gender Strategy through open, context-aware questions. It provides insights into how gender equality is integrated into operations, business development, and corporate values. Aligned with GIZ’s vision, the assistant makes gender-related topics accessible, supporting users in understanding policies, enhancing gender competence, and promoting inclusive practices.</p>
""",
unsafe_allow_html=True
)
with st.expander("πŸ“– Background Information & Example Questions"):
st.markdown(
"""
<h4>πŸ’‘ How does the app work?</h4>
<p>The assistant uses a <b>Retrieval-Augmented Generation (RAG) approach</b> to ensure responses are grounded in the content of the <b>GIZ Gender Strategy (2019)</b>:</p>
<ul>
<li>Your question is <b>converted into an embedding</b> (numerical representation).</li>
<li>The system <b>retrieves the most relevant text sections</b> from the strategy document.</li>
<li>A <b>language model (LLM)</b> generates a response based on the retrieved content.</li>
</ul>
<p><b>⚠️ Important:</b> The assistant is <b>limited to the Gender Strategy (2019)</b>. It <b>does not</b> access external sources, additional policies, or updated guidelines beyond the provided document.</p>
<h4>🎯 Example questions:</h4>
<ul>
<li>What are the key objectives of the Gender Strategy?</li>
<li>How does GIZ define "gender," and what is the conceptual foundation of the strategy?</li>
<li>How does the strategy align with the <b>2030 Agenda</b>?</li>
<li>How is the success of the strategy measured and reviewed?</li>
</ul>
<h4>πŸ“Œ Further resources:</h4>
<ul>
<li>πŸ“„ <a href="https://www.giz.de/en/downloads/giz-2019-en-gender-strategy.pdf" target="_blank" rel="noopener noreferrer"><b>GIZ Gender Strategy (2019, PDF)</b></a></li>
<li>🌍 <a href="https://reporting.giz.de/2022/operating-responsibly/our-gender-strategy/index.html" target="_blank" rel="noopener noreferrer"><b>GIZ Gender Strategy Website</b></a></li>
<li>πŸ“Š <a href="https://www.blog-datalab.com/" target="_blank" rel="noopener noreferrer"><b>GIZ Data Lab Blog</b></a></li>
</ul>
""",
unsafe_allow_html=True
)
st.html(
"""
<p>Feel free to explore and gain deeper insights into GIZ’s commitment to gender equality! πŸš€</p>
<p><b>Now, go ahead and ask a question related to the GIZ Gender Strategy in the text field below!</b> πŸ“</p>
"""
)
if "messages" not in st.session_state:
st.session_state.messages = []
for message in st.session_state.messages:
with st.chat_message(name=message["role"], avatar=message["avatar"]):
st.markdown(message["content"])
prompt = st.chat_input("Say something")
if prompt:
with st.chat_message(name="user", avatar=":material/person:"):
st.write(prompt)
st.session_state.messages.append(
{"role": "user", "content": prompt, "avatar": ":material/person:"}
)
with st.chat_message(name="ai", avatar=":material/smart_toy:"):
result = rag.run(
{"prompt_builder": {"query": prompt}, "text_embedder": {"text": prompt}},
)
result = result["llm"]["replies"][0]
result = result.split("Question:")[0]
st.write(result)
st.session_state.messages.append(
{"role": "ai", "content": result, "avatar": ":material/smart_toy:"}
)