nkcong206 commited on
Commit
07fe9aa
·
1 Parent(s): 9bb6ed4

fix load rag

Browse files
Files changed (1) hide show
  1. app.py +24 -16
app.py CHANGED
@@ -250,6 +250,9 @@ if "save_dir" not in st.session_state:
250
  if "uploaded_files" not in st.session_state:
251
  st.session_state.uploaded_files = set()
252
 
 
 
 
253
  @st.dialog("Setup Gemini")
254
  def vote():
255
  st.markdown(
@@ -347,8 +350,10 @@ def compute_rag_chain(_model, _embd, docs_texts):
347
  if st.session_state.uploaded_files:
348
  if st.session_state.gemini_api is not None:
349
  if st.session_state.rag is None:
 
350
  docs_texts = [d.page_content for d in documents]
351
  st.session_state.rag = compute_rag_chain(st.session_state.model, st.session_state.embd, docs_texts)
 
352
 
353
  if st.session_state.gemini_api is not None:
354
  if st.session_state.llm is None:
@@ -372,20 +377,23 @@ for message in st.session_state.chat_history:
372
  st.write(message["content"])
373
 
374
  prompt = st.chat_input("Bạn muốn hỏi gì?")
375
- if st.session_state.gemini_api:
376
- if prompt:
377
- st.session_state.chat_history.append({"role": "user", "content": prompt})
378
-
379
- with st.chat_message("user"):
380
- st.write(prompt)
381
-
382
- with st.chat_message("assistant"):
383
- if st.session_state.rag is not None:
384
- respone = st.session_state.rag.invoke(prompt)
385
- st.write(respone)
386
- else:
387
- ans = st.session_state.llm.invoke(prompt)
388
- respone = ans.content
389
- st.write(respone)
390
 
391
- st.session_state.chat_history.append({"role": "assistant", "content": respone})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
250
  if "uploaded_files" not in st.session_state:
251
  st.session_state.uploaded_files = set()
252
 
253
+ if "processing" not in st.session_state:
254
+ st.session_state.processing = False
255
+
256
  @st.dialog("Setup Gemini")
257
  def vote():
258
  st.markdown(
 
350
  if st.session_state.uploaded_files:
351
  if st.session_state.gemini_api is not None:
352
  if st.session_state.rag is None:
353
+ st.session_state.processing = True
354
  docs_texts = [d.page_content for d in documents]
355
  st.session_state.rag = compute_rag_chain(st.session_state.model, st.session_state.embd, docs_texts)
356
+ st.session_state.processing = False
357
 
358
  if st.session_state.gemini_api is not None:
359
  if st.session_state.llm is None:
 
377
  st.write(message["content"])
378
 
379
  prompt = st.chat_input("Bạn muốn hỏi gì?")
380
+
381
+ if not st.session_state.processing:
382
+ if st.session_state.gemini_api:
383
+ if prompt:
384
+ st.session_state.chat_history.append({"role": "user", "content": prompt})
 
 
 
 
 
 
 
 
 
 
385
 
386
+ with st.chat_message("user"):
387
+ st.write(prompt)
388
+
389
+ with st.chat_message("assistant"):
390
+ if st.session_state.rag is not None:
391
+ respone = st.session_state.rag.invoke(prompt)
392
+ st.write(respone)
393
+ else:
394
+ ans = st.session_state.llm.invoke(prompt)
395
+ respone = ans.content
396
+ st.write(respone)
397
+
398
+ st.session_state.chat_history.append({"role": "assistant", "content": respone})
399
+