tatts commited on
Commit
0792b3a
1 Parent(s): c2d6c78

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +47 -22
app.py CHANGED
@@ -254,35 +254,60 @@ question_answer_chain = create_stuff_documents_chain(llm, qa_prompt)
254
 
255
  rag_chain = create_retrieval_chain(history_aware_retriever, question_answer_chain)
256
 
257
- chat_history = ['Moodle','course','un cours']
258
- import gradio as gr
 
 
 
 
259
 
260
- #def ask(question, history):
261
- # ai_message = rag_chain.invoke({"input": question, "chat_history": chat_history})
262
- # chat_history.extend([HumanMessage(content=question), ai_message["answer"]])
263
- # return ai_message['answer']
 
 
264
 
265
- def ask(question, history):
266
- ai_message = rag_chain.invoke({"input": question, "chat_history": chat_history})
267
- chat_history.extend([HumanMessage(content=question), ai_message["answer"]])
268
  document_links = []
269
- if 'context' in ai_message and ai_message['context']:
270
- for doc in ai_message['context']:
271
- if 'url' in doc.metadata:
272
- document_links.append(doc.metadata['url'])
273
- # Format document links as part of the text output
 
 
 
 
 
 
 
 
 
 
 
 
274
  if document_links:
275
  document_links_text = "\n".join(document_links)
276
- links_text = f"\n\nSources:\n{document_links_text}"
277
- else:
278
- links_text = "UNTE_ASSISTANTE"
 
 
279
 
280
- return ai_message['answer'] + "\n" + links_text
281
 
282
- demo = gr.ChatInterface(fn=ask, title="UNTE ChatBot",theme=gr.themes.Soft())
283
 
 
 
 
 
 
 
 
284
 
 
 
 
285
 
286
- if __name__ == "__main__":
287
- gr.close_all()
288
- demo.launch(share = False)
 
254
 
255
  rag_chain = create_retrieval_chain(history_aware_retriever, question_answer_chain)
256
 
257
+ chat_history = []
258
+
259
+ def ask(question, chat_history):
260
+ # Prepend a phrase to the question to ensure relevance to Moodle
261
+ prepended_phrase = "using platform Moodle :"
262
+ modified_question = prepended_phrase + question
263
 
264
+
265
+ # Invoke the chain to get the response
266
+ ai_message = rag_chain.invoke({"input": modified_question, "chat_history": chat_history})
267
+ chat_history.append(("user", question))
268
+
269
+ answer = ai_message["answer"]
270
 
271
+ # Prepare document links if available
 
 
272
  document_links = []
273
+ for doc in ai_message.get('context', []):
274
+ if 'url' in doc.metadata:
275
+ document_links.append(doc.metadata['url'])
276
+
277
+ # Append the question and answer to the chat history (without sources)
278
+
279
+ chat_history.append(("assistant", answer))
280
+
281
+ # For display purposes, format the chat history without labels
282
+ display_chat_history = []
283
+ for role, content in chat_history:
284
+ if role == "user":
285
+ display_chat_history.append((None, content)) # User question on the right
286
+ else:
287
+ display_chat_history.append((content, None)) # Assistant answer on the left
288
+
289
+ # Add sources to the last assistant message for display purposes only
290
  if document_links:
291
  document_links_text = "\n".join(document_links)
292
+ display_chat_history[-1] = (display_chat_history[-1][0] + f"\nSources: {document_links_text}", None)
293
+
294
+ # Return display history for the UI, and the actual chat history for internal use
295
+ return display_chat_history, chat_history, ""
296
+
297
 
 
298
 
 
299
 
300
+ # Initialize the Gradio interface
301
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
302
+ chatbot = gr.Chatbot()
303
+ clear_button = gr.Button("Clear")
304
+ #clear = gr.Button("Clear")
305
+ question = gr.Textbox(placeholder="Ask me anything about Moodle...")
306
+ chat_history = gr.State([])
307
 
308
+ question.submit(ask, [question, chat_history], [chatbot, chat_history, question])
309
+ clear_button.click(lambda: ([], [], ""), None, [chatbot, chat_history, question], queue=False)
310
+ #clear.click(lambda: ("", []), None, [chatbot, chat_history, question], queue=False)
311
 
312
+ demo.queue()
313
+ demo.launch(share=False)