Spaces:
Runtime error
Runtime error
fixed the chat reset
Browse files
app.py
CHANGED
@@ -16,12 +16,16 @@ if "messages" not in st.session_state:
|
|
16 |
if "model" not in st.session_state:
|
17 |
st.session_state.model = genai.GenerativeModel(MODEL_ID)
|
18 |
|
19 |
-
|
|
|
|
|
|
|
|
|
20 |
|
21 |
# Function to reset chat history
|
22 |
def reset_chat():
|
23 |
st.session_state.messages = []
|
24 |
-
model.start_chat()
|
25 |
|
26 |
def main():
|
27 |
# Streamlit app
|
@@ -45,51 +49,70 @@ def main():
|
|
45 |
else:
|
46 |
st.error("Unsupported file type. Please upload an image or PDF.")
|
47 |
st.stop()
|
48 |
-
|
49 |
# Reset chat history when a new file is uploaded
|
50 |
reset_chat()
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
|
|
67 |
# Upload the file with the correct MIME type
|
68 |
file_data = genai.upload_file(uploaded_file, mime_type=mime_type)
|
69 |
|
70 |
-
# Send file and prompt to Gemini API
|
71 |
-
|
|
|
72 |
[
|
73 |
user_input,
|
74 |
file_data
|
75 |
],
|
76 |
stream=enable_stream
|
77 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
78 |
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
with st.chat_message("assistant"):
|
84 |
-
st.write(chunk.text)
|
85 |
-
full_response += chunk.text
|
86 |
-
else:
|
87 |
-
full_response = response.text
|
88 |
with st.chat_message("assistant"):
|
89 |
-
st.write(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
90 |
|
91 |
-
|
92 |
-
|
|
|
|
|
|
|
93 |
|
94 |
if __name__ == "__main__":
|
95 |
main()
|
|
|
16 |
if "model" not in st.session_state:
|
17 |
st.session_state.model = genai.GenerativeModel(MODEL_ID)
|
18 |
|
19 |
+
if "chat" not in st.session_state:
|
20 |
+
st.session_state.chat = st.session_state.model.start_chat()
|
21 |
+
|
22 |
+
if "is_new_file" not in st.session_state:
|
23 |
+
st.session_state.is_new_file = True
|
24 |
|
25 |
# Function to reset chat history
|
26 |
def reset_chat():
|
27 |
st.session_state.messages = []
|
28 |
+
st.session_state.model.start_chat()
|
29 |
|
30 |
def main():
|
31 |
# Streamlit app
|
|
|
49 |
else:
|
50 |
st.error("Unsupported file type. Please upload an image or PDF.")
|
51 |
st.stop()
|
52 |
+
|
53 |
# Reset chat history when a new file is uploaded
|
54 |
reset_chat()
|
55 |
+
st.session_state.is_new_file = True
|
56 |
+
|
57 |
+
# Text input for user prompt
|
58 |
+
user_input = st.text_area("Enter your prompt:", height=200)
|
59 |
+
|
60 |
+
# Send button
|
61 |
+
if st.button("Send"):
|
62 |
+
if not uploaded_file or not user_input:
|
63 |
+
st.warning("Please upload an image or PDF and enter a prompt.")
|
64 |
+
st.stop()
|
65 |
+
|
66 |
+
if user_input:
|
67 |
+
# Add user message to chat history
|
68 |
+
st.session_state.messages.append({"role": "user", "content": user_input})
|
69 |
+
|
70 |
+
with st.spinner("Processing..."):
|
71 |
+
if st.session_state.is_new_file:
|
72 |
# Upload the file with the correct MIME type
|
73 |
file_data = genai.upload_file(uploaded_file, mime_type=mime_type)
|
74 |
|
75 |
+
# Send file and prompt to Gemini API
|
76 |
+
chat = st.session_state.chat
|
77 |
+
response = chat.send_message(
|
78 |
[
|
79 |
user_input,
|
80 |
file_data
|
81 |
],
|
82 |
stream=enable_stream
|
83 |
)
|
84 |
+
st.session_state.is_new_file = False
|
85 |
+
else:
|
86 |
+
# continue chat without sending the file again
|
87 |
+
# Send a text prompt to Gemini API
|
88 |
+
chat = st.session_state.chat
|
89 |
+
response = chat.send_message(
|
90 |
+
[
|
91 |
+
user_input
|
92 |
+
],
|
93 |
+
stream=enable_stream
|
94 |
+
)
|
95 |
|
96 |
+
# Display Gemini response as it streams in
|
97 |
+
full_response = ""
|
98 |
+
if enable_stream:
|
99 |
+
for chunk in response:
|
|
|
|
|
|
|
|
|
|
|
100 |
with st.chat_message("assistant"):
|
101 |
+
st.write(chunk.text)
|
102 |
+
full_response += chunk.text
|
103 |
+
else:
|
104 |
+
full_response = response.text
|
105 |
+
with st.chat_message("assistant"):
|
106 |
+
st.write(full_response)
|
107 |
+
|
108 |
+
# Add Gemini response to chat history
|
109 |
+
st.session_state.messages.append({"role": "assistant", "content": full_response})
|
110 |
|
111 |
+
st.subheader("Chat History")
|
112 |
+
# Display chat history
|
113 |
+
for message in st.session_state.messages:
|
114 |
+
with st.chat_message(message["role"]):
|
115 |
+
st.markdown(message["content"])
|
116 |
|
117 |
if __name__ == "__main__":
|
118 |
main()
|