codingxperienceAI's picture
app complete
28da4cd
raw
history blame
10.3 kB
# Author: Fred Okorio
# Date: 2024-01-01
# Description: A Streamlit app for a Climate Change Awareness Chatbot using the ClimateGPT-7B model.
# Have to SWITCH to this more expressive model before the deadline.
# # necessary libraries
# import streamlit as st
# import accelerate
# from transformers import AutoTokenizer, AutoModelForCausalLM
# import torch
# # page configuration
# st.set_page_config(page_title="Climate Change Awareness Chatbot", layout="wide")
# # ClimateGPT-7B model and tokenizer
# @st.cache_resource
# def load_climategpt():
# tokenizer = AutoTokenizer.from_pretrained("eci-io/climategpt-7b")
# model = AutoModelForCausalLM.from_pretrained("eci-io/climategpt-7b", device_map="auto")
# return tokenizer, model
# tokenizer, model = load_climategpt()
# # generate responses
# def generate_response(user_input):
# prompt = f"""
# <|im_start|>system
# You are ClimateGPT, a large language model trained to provide information on climate change.<|im_end|>
# <|im_start|>user
# {user_input}<|im_end|>
# <|im_start|>assistant
# """
# inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
# outputs = model.generate(**inputs, max_new_tokens=200)
# response = tokenizer.decode(outputs[0], skip_special_tokens=True)
# return response.split("<|im_end|>")[-1].strip()
# # initialize session state for chat history
# if "history" not in st.session_state:
# st.session_state.history = []
# # sidebar for chat history
# with st.sidebar:
# st.title("Chat History")
# for idx, (question, answer) in enumerate(st.session_state.history[::-1]):
# with st.expander(f"πŸ’¬ {question}"):
# st.write(f"**Chatbot:** {answer}")
# st.markdown("---")
# st.info("🌱 *Ask me anything about climate change, sustainability, or eco-friendly living.*")
# # main chat interface
# st.title("Climate Change Awareness Chatbot")
# st.subheader("Get answers, tips, and climate change facts for Uganda & East Africa")
# # Display chat history
# for question, answer in st.session_state.history:
# st.markdown(f"**You:** {question}")
# st.success(f"**Chatbot:** {answer}")
# st.markdown("---")
# # User input
# user_input = st.text_input("πŸ’¬ Type your message and press Enter", key="text_input")
# if user_input:
# response = generate_response(user_input)
# # Append conversation to history
# st.session_state.history.append((user_input, response))
# # Clear input field after processing
# st.session_state.text_input = ""
# # Rerun the app to display the updated chat history
# st.experimental_rerun()
# # Clear chat history button
# if st.button("Clear Chat History"):
# st.session_state.history = []
# st.experimental_rerun()
# # Footer
# st.markdown("""
# ---
# *Educational Purpose Only* | 🌱 **SDG Guardians AI - 2024** | *For a greener East Africa*
# """)
# import streamlit as st
# from transformers import pipeline, AutoTokenizer, AutoModelForQuestionAnswering
# # page configuration
# st.set_page_config(page_title="Climate Chatbot - Uganda & East Africa", layout="wide")
# # model loading...
# @st.cache_resource
# def load_climate_bert():
# tokenizer = AutoTokenizer.from_pretrained("NinaErlacher/ClimateBERTqa")
# model = AutoModelForQuestionAnswering.from_pretrained("NinaErlacher/ClimateBERTqa")
# qa_pipeline = pipeline("question-answering", model=model, tokenizer=tokenizer)
# return qa_pipeline
# qa_pipeline = load_climate_bert()
# def generate_response(user_question, context):
# result = qa_pipeline(question=user_question, context=context)
# return result['answer']
# # Initialize session state variables
# if "history" not in st.session_state:
# st.session_state.history = []
# # Sidebar for chat history
# with st.sidebar:
# st.title("Chat History")
# for idx, (question, answer) in enumerate(st.session_state.history[::-1]):
# with st.expander(f"πŸ’¬ {question}"):
# st.write(f"**Chatbot:** {answer}")
# st.markdown("---")
# st.info("🌱 *Ask me anything about climate change, sustainability, or eco-friendly living.*")
# # main chat UI
# st.title("Climate Change Awareness Chatbot")
# st.subheader("Get answers, tips, and climate change facts for Uganda & East Africa")
# # chat display
# chat_container = st.container()
# with chat_container:
# for question, answer in st.session_state.history:
# st.markdown(f"**You:** {question}")
# st.success(f"**Chatbot:** {answer}")
# st.markdown("---")
# User input
# user_input = st.text_input("πŸ’¬ Type your message and press Enter", key="text_input")
# if user_input:
# context = """
# Climate change is affecting Uganda and East Africa in various ways, including unpredictable rainfall patterns,
# increased temperatures, and prolonged droughts. Sustainable farming practices, afforestation, and renewable
# energy adoption are key solutions to mitigate these effects.
# """ # Placeholder context
# response = generate_response(user_input, context)
# # append conversation to history
# /' ' st.session_state.history.append((user_input, response))
# # Clear stored input after processing
# st.session_state.pop("text_input", None)
# st.rerun()
# # Clear chat history button
# if st.button("Clear Chat History"):
# st.session_state.history = []
# st.rerun()
# # footer
# st.markdown("""
# ---
# *Educational Purpose Only* | 🌱 **SDG Guardians AI - 2024** | *For a greener East Africa*
# """)
import streamlit as st
from transformers import pipeline, AutoTokenizer, AutoModelForQuestionAnswering
# Page configuration
st.set_page_config(page_title="Climate Chatbot - Uganda", layout="wide")
# Custom CSS for shadow effect
st.markdown(
"""
<style>
.stChatInput {
box-shadow: 0px 10px 20px rgba(0, 0, 0, 0.4); /* Strong shadow */
border-radius: 10px;
padding: 12px;
background: white;
}
.stChatInput::before {
content: "";
position: absolute;
width: 100%;
height: 15px;
left: 0;
background: linear-gradient(to top, rgba(0, 0, 0, 0.3), rgba(0, 0, 0, 0)); /* Fading effect */
}
</style>
""",
unsafe_allow_html=True
)
# Load model
@st.cache_resource
def load_climate_bert():
tokenizer = AutoTokenizer.from_pretrained("NinaErlacher/ClimateBERTqa")
model = AutoModelForQuestionAnswering.from_pretrained("NinaErlacher/ClimateBERTqa")
return pipeline("question-answering", model=model, tokenizer=tokenizer)
qa_pipeline = load_climate_bert()
# Function to check if question is climate-related
def is_climate_related(question):
climate_keywords = ["climate", "global warming", "deforestation", "carbon", "sustainability",
"renewable", "pollution", "green energy", "climate action", "afforestation"]
return any(keyword in question.lower() for keyword in climate_keywords)
# Function to check if Uganda is mentioned
def is_uganda_related(question):
return "uganda" in question.lower() or "east africa" in question.lower()
# Function to generate response
def generate_response(user_question, context):
if not is_climate_related(user_question):
return "I'm here to discuss climate change. Try asking about Uganda's climate, sustainability, or environmental issues."
if not is_uganda_related(user_question):
return "This chatbot focuses on climate change in Uganda. Try asking about Uganda's environmental challenges."
result = qa_pipeline(question=user_question, context=context)
return result['answer']
# Session state for chat history
if "history" not in st.session_state:
st.session_state.history = []
# Sidebar - Chat History & Clear Button
with st.sidebar:
st.title("Chat History")
for idx, (question, answer) in enumerate(st.session_state.history[::-1]):
with st.expander(f"πŸ’¬ {question}"):
st.write(f"**Chatbot:** {answer}")
st.markdown("---")
if st.button("πŸ—‘οΈ Clear Chat History"):
st.session_state.history = []
st.rerun()
st.info("🌱 *Ask about climate change in Uganda.*")
# Main UI
st.title("Climate Change Chatbot")
st.subheader("Explore climate action and sustainability in Uganda")
# Sample questions section
with st.expander("Need ideas? (Click to expand)"):
st.markdown("""
- **How is Uganda affected by climate change?**
- **What are sustainable farming methods?**
- **How can I reduce my energy use?**
- **What are the risks of deforestation?**
- **Why is tree planting important?**
- **How can youth take action?**
""")
# Chat container with avatars
chat_container = st.container()
with chat_container:
for question, answer in st.session_state.history:
with st.chat_message("user"):
st.write(question)
with st.chat_message("assistant"):
st.write(answer)
# User input field with shadow effect
user_input = st.chat_input("Ask about climate change in Uganda...")
if user_input:
context = """
Climate change is affecting Uganda and East Africa in various ways, including unpredictable rainfall,
rising temperatures, and prolonged droughts. Sustainable farming, afforestation, and renewable energy
adoption are key solutions to mitigate these effects.
""" # Placeholder context
response = generate_response(user_input, context)
st.session_state.history.append((user_input, response))
st.rerun()
# seems to be overcrowding the page, so we can remove it for now.
# # footer fixed at the bottom
# st.markdown(
# """
# <style>
# .footer {
# position: fixed;
# bottom: 0;
# left: 100px;
# font-size: 14px;
# font-weight: 900;
# width: 100%;
# background-color: white;
# text-align: center;
# padding: 10px;
# box-shadow: 0px -2px 5px rgba(0, 0, 0, 0.1);
# z-index: 999;
# }
# </style>
# <div class="footer">
# ---
# *Educational Purpose Only* | 🌱 **SDG Guardians AI - 2024** | *For a greener East Africa*
# </div>
# """,
# unsafe_allow_html=True
# )