louiecerv commited on
Commit
61bf71b
·
1 Parent(s): 9628f0b

fixed the chat reset

Browse files
Files changed (1) hide show
  1. app.py +56 -33
app.py CHANGED
@@ -16,12 +16,16 @@ if "messages" not in st.session_state:
16
  if "model" not in st.session_state:
17
  st.session_state.model = genai.GenerativeModel(MODEL_ID)
18
 
19
- model = st.session_state.model
 
 
 
 
20
 
21
  # Function to reset chat history
22
  def reset_chat():
23
  st.session_state.messages = []
24
- model.start_chat()
25
 
26
  def main():
27
  # Streamlit app
@@ -45,51 +49,70 @@ def main():
45
  else:
46
  st.error("Unsupported file type. Please upload an image or PDF.")
47
  st.stop()
48
-
49
  # Reset chat history when a new file is uploaded
50
  reset_chat()
51
-
52
- # Text input for user prompt
53
- user_input = st.text_area("Enter your prompt:", height=200)
54
-
55
- # Send button
56
- if st.button("Send"):
57
- if user_input:
58
- # Add user message to chat history
59
- st.session_state.messages.append({"role": "user", "content": user_input})
60
-
61
- # Display chat history
62
- for message in st.session_state.messages:
63
- with st.chat_message(message["role"]):
64
- st.markdown(message["content"])
65
-
66
- with st.spinner("Processing..."):
 
67
  # Upload the file with the correct MIME type
68
  file_data = genai.upload_file(uploaded_file, mime_type=mime_type)
69
 
70
- # Send file and prompt to Gemini API with streaming enabled
71
- response = model.generate_content(
 
72
  [
73
  user_input,
74
  file_data
75
  ],
76
  stream=enable_stream
77
  )
 
 
 
 
 
 
 
 
 
 
 
78
 
79
- # Display Gemini response as it streams in
80
- full_response = ""
81
- if enable_stream:
82
- for chunk in response:
83
- with st.chat_message("assistant"):
84
- st.write(chunk.text)
85
- full_response += chunk.text
86
- else:
87
- full_response = response.text
88
  with st.chat_message("assistant"):
89
- st.write(full_response)
 
 
 
 
 
 
 
 
90
 
91
- # Add Gemini response to chat history
92
- st.session_state.messages.append({"role": "assistant", "content": full_response})
 
 
 
93
 
94
  if __name__ == "__main__":
95
  main()
 
16
  if "model" not in st.session_state:
17
  st.session_state.model = genai.GenerativeModel(MODEL_ID)
18
 
19
+ if "chat" not in st.session_state:
20
+ st.session_state.chat = st.session_state.model.start_chat()
21
+
22
+ if "is_new_file" not in st.session_state:
23
+ st.session_state.is_new_file = True
24
 
25
  # Function to reset chat history
26
  def reset_chat():
27
  st.session_state.messages = []
28
+ st.session_state.model.start_chat()
29
 
30
  def main():
31
  # Streamlit app
 
49
  else:
50
  st.error("Unsupported file type. Please upload an image or PDF.")
51
  st.stop()
52
+
53
  # Reset chat history when a new file is uploaded
54
  reset_chat()
55
+ st.session_state.is_new_file = True
56
+
57
+ # Text input for user prompt
58
+ user_input = st.text_area("Enter your prompt:", height=200)
59
+
60
+ # Send button
61
+ if st.button("Send"):
62
+ if not uploaded_file or not user_input:
63
+ st.warning("Please upload an image or PDF and enter a prompt.")
64
+ st.stop()
65
+
66
+ if user_input:
67
+ # Add user message to chat history
68
+ st.session_state.messages.append({"role": "user", "content": user_input})
69
+
70
+ with st.spinner("Processing..."):
71
+ if st.session_state.is_new_file:
72
  # Upload the file with the correct MIME type
73
  file_data = genai.upload_file(uploaded_file, mime_type=mime_type)
74
 
75
+ # Send file and prompt to Gemini API
76
+ chat = st.session_state.chat
77
+ response = chat.send_message(
78
  [
79
  user_input,
80
  file_data
81
  ],
82
  stream=enable_stream
83
  )
84
+ st.session_state.is_new_file = False
85
+ else:
86
+ # continue chat without sending the file again
87
+ # Send a text prompt to Gemini API
88
+ chat = st.session_state.chat
89
+ response = chat.send_message(
90
+ [
91
+ user_input
92
+ ],
93
+ stream=enable_stream
94
+ )
95
 
96
+ # Display Gemini response as it streams in
97
+ full_response = ""
98
+ if enable_stream:
99
+ for chunk in response:
 
 
 
 
 
100
  with st.chat_message("assistant"):
101
+ st.write(chunk.text)
102
+ full_response += chunk.text
103
+ else:
104
+ full_response = response.text
105
+ with st.chat_message("assistant"):
106
+ st.write(full_response)
107
+
108
+ # Add Gemini response to chat history
109
+ st.session_state.messages.append({"role": "assistant", "content": full_response})
110
 
111
+ st.subheader("Chat History")
112
+ # Display chat history
113
+ for message in st.session_state.messages:
114
+ with st.chat_message(message["role"]):
115
+ st.markdown(message["content"])
116
 
117
  if __name__ == "__main__":
118
  main()