kmkarakaya commited on
Commit
1738d2a
·
verified ·
1 Parent(s): a75fc0f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +92 -77
app.py CHANGED
@@ -1,86 +1,101 @@
1
  import gradio as gr
2
- import os
3
- from typing import List
4
- from datetime import datetime
5
 
6
- uploaded_files = []
 
 
 
 
 
 
 
7
 
8
- def upload_pdfs(files: List[gr.File]):
9
- """
10
- Saves the uploaded PDF files to a directory and tracks the uploaded files.
11
- """
12
- global uploaded_files
13
- saved_files = []
14
- try:
15
- os.makedirs("uploaded_pdfs", exist_ok=True)
16
- for file in files:
17
- if file.name.endswith(".pdf"):
18
- file_path = os.path.join("uploaded_pdfs", file.name)
19
- # Write the file content to the destination
20
- with open(file_path, "wb") as f:
21
- f.write(file.read())
22
- saved_files.append(file.name)
23
- if file.name not in uploaded_files:
24
- uploaded_files.append(file.name)
25
- else:
26
- return f"Error: {file.name} is not a PDF file."
27
- if saved_files:
28
- return f"Successfully uploaded: {', '.join(saved_files)}."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  else:
30
- return "No PDF files were uploaded."
31
- except Exception as e:
32
- return f"Error uploading files: {str(e)}"
 
 
 
 
33
 
34
- chat_history = []
 
 
 
 
35
 
36
- def chat_with_rag(question: str):
37
- """
38
- Interacts with the RAG system and returns the answer.
39
- """
40
- global chat_history
41
- # Placeholder response. Replace this with actual logic to interact with RAG system.
42
- answer = f"The answer to your question '{question}' is: This is a placeholder response."
43
- chat_history.append(f"[{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] Q: {question}\nA: {answer}")
44
- return answer
45
 
46
- def show_chat_history():
47
- """
48
- Displays the chat history.
49
- """
50
- global chat_history
51
- if chat_history:
52
- return "\n\n".join(chat_history)
53
- else:
54
- return "No chat history available."
55
 
56
- def create_gradio_interface():
57
- with gr.Blocks(title="RAG System UI", theme=gr.themes.Soft()) as demo:
58
- gr.Markdown("## RAG System Interface")
59
-
60
- with gr.Tab("File Upload"):
61
- gr.Markdown("### Upload PDF Files")
62
- file_uploader = gr.File(label="Upload PDFs", file_count="multiple")
63
- upload_button = gr.Button("Upload PDFs")
64
- upload_status = gr.Textbox(label="Upload Status", interactive=False)
65
- upload_button.click(upload_pdfs, inputs=file_uploader, outputs=upload_status)
66
- gr.Markdown("#### Uploaded Files")
67
- uploaded_files_list = gr.Textbox(label="Uploaded Files", interactive=False)
68
- uploaded_files_list.update(value=lambda: "\n".join(uploaded_files))
69
-
70
- with gr.Tab("Chat"):
71
- gr.Markdown("### Ask a Question")
72
- chat_input = gr.Textbox(label="Your Question")
73
- chat_button = gr.Button("Get Answer")
74
- chat_output = gr.Textbox(label="Answer", interactive=False)
75
- chat_button.click(chat_with_rag, inputs=chat_input, outputs=chat_output)
76
-
77
- with gr.Tab("History"):
78
- gr.Markdown("### Chat History")
79
- history_button = gr.Button("Show Chat History")
80
- history_output = gr.Textbox(label="Chat History", interactive=False)
81
- history_button.click(show_chat_history, outputs=history_output)
82
-
83
- return demo
84
 
85
- if __name__ == "__main__":
86
- create_gradio_interface().launch()
 
1
  import gradio as gr
 
 
 
2
 
3
+ def process_question(question, history, uploaded_files):
4
+ # This is where you would integrate your RAG system logic.
5
+ # For this example, we'll just return a placeholder response.
6
+ if uploaded_files is None:
7
+ return history, [("", "Please upload a PDF first.")]
8
+ answer = f"Answer to: {question} (Based on {len(uploaded_files)} uploaded files)"
9
+ history.append((question, answer))
10
+ return history, [("", answer)]
11
 
12
+ with gr.Blocks(css="""
13
+ body {
14
+ background-color: #f4f4f4;
15
+ }
16
+ .gradio-container {
17
+ border-radius: 10px;
18
+ box-shadow: 0 2px 5px rgba(0, 0, 0, 0.1);
19
+ }
20
+ .container {
21
+ padding: 20px;
22
+ }
23
+ .question-area {
24
+ margin-bottom: 20px;
25
+ }
26
+ .history-button {
27
+ background-color: #4CAF50;
28
+ color: white;
29
+ padding: 10px 20px;
30
+ border: none;
31
+ border-radius: 5px;
32
+ cursor: pointer;
33
+ }
34
+ .history-button:hover {
35
+ background-color: #46a049;
36
+ }
37
+ .gradio-chatbot > .wrap > div:last-child > div:last-child {
38
+ display: none !important;
39
+ }
40
+ """) as demo:
41
+ gr.Markdown("## RAG System Demo")
42
+
43
+ with gr.Tabs() as tabs:
44
+ with gr.TabItem("Upload PDFs") as upload_tab:
45
+ uploaded_files = gr.Files(label="Upload at least one PDF", type="filepath")
46
+ with gr.Row():
47
+ upload_button = gr.Button("Upload") # Upload button is visible initially
48
+ upload_status = gr.Textbox(visible=False)
49
+
50
+ with gr.TabItem("Chat", visible=False) as chat_tab:
51
+ question = gr.Textbox(label="Ask a Question", placeholder="Enter your question here...")
52
+ submit_btn = gr.Button("Submit")
53
+ rag_reply = gr.Chatbot(label="RAG Reply")
54
+
55
+ with gr.TabItem("History"):
56
+ history_markdown = gr.Markdown("No chat history yet.")
57
+
58
+ chat_history = gr.State([])
59
+
60
+ def activate_chat_tab():
61
+ return gr.Tabs.update(selected=1)
62
+
63
+ def upload_pdfs(files):
64
+ # Simulate an upload process (replace with your actual logic)
65
+ # time.sleep(2)
66
+ if files:
67
+ return gr.update(visible=True, value="Upload finished!"), gr.update(visible=True)
68
  else:
69
+ return gr.update(visible=True, value="Please upload at least one PDF file."), gr.update(visible=False)
70
+
71
+ upload_button.click(
72
+ upload_pdfs,
73
+ inputs=uploaded_files,
74
+ outputs=[upload_status, chat_tab],
75
+ )
76
 
77
+ upload_button.click(
78
+ fn=activate_chat_tab,
79
+ inputs=None,
80
+ outputs=tabs,
81
+ )
82
 
83
+ upload_status.change(
84
+ fn=lambda: gr.update(visible=False),
85
+ inputs=None,
86
+ outputs=[upload_status],
87
+ )
 
 
 
 
88
 
89
+ submit_btn.click(
90
+ fn=process_question,
91
+ inputs=[question, chat_history, uploaded_files],
92
+ outputs=[chat_history, rag_reply],
93
+ )
 
 
 
 
94
 
95
+ chat_history.change(
96
+ fn=lambda x: gr.Markdown("<br>".join([f"**Question:** {q}<br>**Answer:** {a}" for q, a in x])),
97
+ inputs=chat_history,
98
+ outputs=history_markdown,
99
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
100
 
101
+ demo.launch()