Spaces:
Sleeping
Sleeping
File size: 1,900 Bytes
c566cea cd9ab33 c566cea 70ce953 f6e2c16 c566cea |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
# Q&A Chatbot with Gemini
import streamlit as st
import os, random, time
import google.generativeai as genai
from IPython.display import display
genai.configure(api_key=os.environ['GOOGLE_API_KEY'])
st.set_page_config(page_title="Q&A Demo")
st.header("ππ₯ Q&A ChatBot π«°π₯")
st.caption("A Chatbot created by SURAT")
if "history" not in st.session_state:
st.session_state.history = []
model = genai.GenerativeModel('gemini-1.5-pro')
chat = model.start_chat(history = st.session_state.history)
with st.sidebar:
if st.button("Clear Chat Window", use_container_width=True, type="primary"):
st.session_state.history = []
st.rerun()
for message in chat.history:
role ="assistant" if message.role == 'model' else message.role
with st.chat_message(role):
st.markdown(message.parts[0].text)
if prompt := st.chat_input(""):
prompt = prompt.replace('\n', ' \n')
with st.chat_message("user"):st.markdown(prompt)
with st.chat_message("assistant"):
message_placeholder = st.empty()
message_placeholder.markdown("Thinking...")
try:
full_response = ""
for chunk in chat.send_message(prompt, stream=True):
word_count = 0
random_int = random.randint(5,10)
for word in chunk.text:
full_response+=word
word_count+=1
if word_count == random_int:
time.sleep(0.05)
message_placeholder.markdown(full_response + "_")
word_count = 0
random_int = random.randint(5,10)
message_placeholder.markdown(full_response)
except genai.types.generation_types.BlockedPromptException as e:st.exception(e)
except Exception as e:st.exception(e)
st.session_state.history = chat.history
|