"""This is an example of a simple chatbot that uses the RAG model to answer questions about GIZ with Streamlit.""" import streamlit as st # Here we import the rag_pipeline function from the rag.py file from rag import rag_pipeline # We use the st.cache decorator to load the RAG pipeline only once and cache it @st.cache_resource def load_rag_pipeline(): rag = rag_pipeline() return rag rag = load_rag_pipeline() st.image("gender_strat_cover_pic.png", use_container_width=True) st.markdown( """
The GIZ Gender Strategy Chatbot enables users to explore GIZ’s Gender Strategy through open, context-aware questions. It provides insights into how gender equality is integrated into operations, business development, and corporate values. Aligned with GIZ’s vision, the assistant makes gender-related topics accessible, supporting users in understanding policies, enhancing gender competence, and promoting inclusive practices.
""", unsafe_allow_html=True ) with st.expander("📖 Background Information & Example Questions"): st.markdown( """The assistant uses a Retrieval-Augmented Generation (RAG) approach to ensure responses are grounded in the content of the GIZ Gender Strategy (2019):
⚠️ Important: The assistant is limited to the Gender Strategy (2019). It does not access external sources, additional policies, or updated guidelines beyond the provided document.
Feel free to explore and gain deeper insights into GIZ’s commitment to gender equality! 🚀
Now, go ahead and ask a question related to the GIZ Gender Strategy in the text field below! 📝
""" ) if "messages" not in st.session_state: st.session_state.messages = [] for message in st.session_state.messages: with st.chat_message(name=message["role"], avatar=message["avatar"]): st.markdown(message["content"]) prompt = st.chat_input("Say something") if prompt: with st.chat_message(name="user", avatar=":material/person:"): st.write(prompt) st.session_state.messages.append( {"role": "user", "content": prompt, "avatar": ":material/person:"} ) with st.chat_message(name="ai", avatar=":material/smart_toy:"): result = rag.run( {"prompt_builder": {"query": prompt}, "text_embedder": {"text": prompt}}, ) result = result["llm"]["replies"][0] result = result.split("Question:")[0] st.write(result) st.session_state.messages.append( {"role": "ai", "content": result, "avatar": ":material/smart_toy:"} )