File size: 2,472 Bytes
243762e 23c33b8 243762e 23c33b8 243762e b9054f9 243762e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
import google.generativeai as genai
import streamlit as st
import time
import random
from utils import safety_settings
st.set_page_config(
page_title="Gemini-Pro Chat",
page_icon="🔥",
menu_items={
'About': "# Forked from https://github.com/hiliuxg/geminichat"
}
)
st.title("Gemini-Pro Chat")
st.caption("Chatbot, powered by Google Gemini Pro.")
if "app_key" not in st.session_state:
app_key = st.text_input("Your Gemini App Key", type='password')
if app_key:
st.session_state.app_key = app_key
if "history" not in st.session_state:
st.session_state.history = []
try:
genai.configure(api_key = st.session_state.app_key)
except AttributeError as e:
st.warning("Please Add Your Gemini App Key.")
model = genai.GenerativeModel('gemini-pro')
chat = model.start_chat(history = st.session_state.history)
with st.sidebar:
if st.button("Clear Chat Window", use_container_width = True, type="primary"):
st.session_state.history = []
st.rerun()
for message in chat.history:
role = "assistant" if message.role == "model" else message.role
with st.chat_message(role):
st.markdown(message.parts[0].text)
if "app_key" in st.session_state:
if prompt := st.chat_input(""):
prompt = prompt.replace('\n', ' \n')
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
message_placeholder = st.empty()
message_placeholder.markdown("Thinking...")
try:
full_response = ""
for chunk in chat.send_message(prompt, stream=True, safety_settings = safety_settings):
word_count = 0
random_int = random.randint(5, 10)
for word in chunk.text:
full_response += word
word_count += 1
if word_count == random_int:
time.sleep(0.05)
message_placeholder.markdown(full_response + "_")
word_count = 0
random_int = random.randint(5, 10)
message_placeholder.markdown(full_response)
except genai.types.generation_types.BlockedPromptException as e:
st.exception(e)
except Exception as e:
st.exception(e)
st.session_state.history = chat.history |