import gradio as gr def process_question(question, history, uploaded_files): # This is where you would integrate your RAG system logic. # For this example, we'll just return a placeholder response. if uploaded_files is None: return history, [("", "Please upload a PDF first.")] answer = f"Answer to: {question} (Based on {len(uploaded_files)} uploaded files)" history.append((question, answer)) return history, [("", answer)] with gr.Blocks(css=""" body { background-color: #f4f4f4; } .gradio-container { border-radius: 10px; box-shadow: 0 2px 5px rgba(0, 0, 0, 0.1); } .container { padding: 20px; } .question-area { margin-bottom: 20px; } .history-button { background-color: #4CAF50; color: white; padding: 10px 20px; border: none; border-radius: 5px; cursor: pointer; } .history-button:hover { background-color: #46a049; } .gradio-chatbot > .wrap > div:last-child > div:last-child { display: none !important; } """) as demo: gr.Markdown("## RAG System Demo") with gr.Tabs() as tabs: with gr.TabItem("Upload PDFs") as upload_tab: uploaded_files = gr.Files(label="Upload at least one PDF", type="filepath") with gr.Row(): upload_button = gr.Button("Upload") # Upload button is visible initially upload_status = gr.Textbox(visible=False) with gr.TabItem("Chat", visible=False) as chat_tab: question = gr.Textbox(label="Ask a Question", placeholder="Enter your question here...") submit_btn = gr.Button("Submit") rag_reply = gr.Chatbot(label="RAG Reply") with gr.TabItem("History"): history_markdown = gr.Markdown("No chat history yet.") chat_history = gr.State([]) def activate_chat_tab(): return gr.Tabs.update(selected=1) def upload_pdfs(files): # Simulate an upload process (replace with your actual logic) # time.sleep(2) if files: return gr.update(visible=True, value="Upload finished!"), gr.update(visible=True) else: return gr.update(visible=True, value="Please upload at least one PDF file."), gr.update(visible=False) upload_button.click( upload_pdfs, inputs=uploaded_files, outputs=[upload_status, chat_tab], ) upload_button.click( fn=activate_chat_tab, inputs=None, outputs=tabs, ) upload_status.change( fn=lambda: gr.update(visible=False), inputs=None, outputs=[upload_status], ) submit_btn.click( fn=process_question, inputs=[question, chat_history, uploaded_files], outputs=[chat_history, rag_reply], ) chat_history.change( fn=lambda x: gr.Markdown("<br>".join([f"**Question:** {q}<br>**Answer:** {a}" for q, a in x])), inputs=chat_history, outputs=history_markdown, ) demo.launch()