Spaces:
Build error
Build error
dafajudin
commited on
Commit
·
f2f8465
1
Parent(s):
e260ed0
update code
Browse files- app.py +15 -22
- requirements.txt +1 -0
app.py
CHANGED
@@ -1,6 +1,5 @@
|
|
1 |
import gradio as gr
|
2 |
from transformers import pipeline
|
3 |
-
import plotly.graph_objs as go
|
4 |
|
5 |
# Load the Visual QA model
|
6 |
generator = pipeline("visual-question-answering", model="jihadzakki/blip1-medvqa")
|
@@ -9,21 +8,11 @@ def format_answer(image, question, history):
|
|
9 |
try:
|
10 |
result = generator(image, question, max_new_tokens=50)
|
11 |
predicted_answer = result[0].get('answer', 'No answer found')
|
12 |
-
history.append(f"Question: {question} | Answer: {predicted_answer}")
|
13 |
-
# Create a simple chart for demonstration purposes
|
14 |
-
chart = create_chart(predicted_answer)
|
15 |
-
return f"Predicted Answer: {predicted_answer}", chart, history
|
16 |
-
except Exception as e:
|
17 |
-
return f"Error: {str(e)}", None, history
|
18 |
-
|
19 |
-
def create_chart(predicted_answer):
|
20 |
-
# Example chart data
|
21 |
-
labels = ['Positive', 'Negative']
|
22 |
-
values = [1, 0] if predicted_answer.lower() == 'yes' else [0, 1]
|
23 |
-
colors = ['blue', 'red'] if predicted_answer.lower() == 'yes' else ['red', 'blue']
|
24 |
|
25 |
-
|
26 |
-
|
|
|
27 |
|
28 |
def switch_theme(mode):
|
29 |
if mode == "Light Mode":
|
@@ -34,6 +23,12 @@ def switch_theme(mode):
|
|
34 |
def save_feedback(feedback):
|
35 |
return "Thank you for your feedback!"
|
36 |
|
|
|
|
|
|
|
|
|
|
|
|
|
37 |
# Build the Visual QA application using Gradio with improvements
|
38 |
with gr.Blocks(
|
39 |
theme=gr.themes.Soft(
|
@@ -52,24 +47,22 @@ with gr.Blocks(
|
|
52 |
|
53 |
with gr.Column():
|
54 |
answer_output = gr.Textbox(label="Result Prediction")
|
55 |
-
chart_output = gr.Plot(label="Interactive Chart")
|
56 |
|
57 |
history_state = gr.State([]) # Initialize the history state
|
58 |
|
59 |
submit_button.click(
|
60 |
format_answer,
|
61 |
inputs=[image_input, question_input, history_state],
|
62 |
-
outputs=[answer_output,
|
63 |
show_progress=True
|
64 |
)
|
65 |
|
66 |
with gr.Row():
|
67 |
-
|
68 |
-
gr.Markdown("**Log of previous interactions:**")
|
69 |
submit_button.click(
|
70 |
-
|
71 |
inputs=[history_state],
|
72 |
-
outputs=[
|
73 |
)
|
74 |
|
75 |
with gr.Accordion("Help", open=False):
|
@@ -99,4 +92,4 @@ with gr.Blocks(
|
|
99 |
outputs=[feedback_input]
|
100 |
)
|
101 |
|
102 |
-
VisualQAApp.launch(share=True, server_name="0.0.0.0", server_port=8080, debug=True)
|
|
|
1 |
import gradio as gr
|
2 |
from transformers import pipeline
|
|
|
3 |
|
4 |
# Load the Visual QA model
|
5 |
generator = pipeline("visual-question-answering", model="jihadzakki/blip1-medvqa")
|
|
|
8 |
try:
|
9 |
result = generator(image, question, max_new_tokens=50)
|
10 |
predicted_answer = result[0].get('answer', 'No answer found')
|
11 |
+
history.append((image, f"Question: {question} | Answer: {predicted_answer}"))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
|
13 |
+
return f"Predicted Answer: {predicted_answer}", history
|
14 |
+
except Exception as e:
|
15 |
+
return f"Error: {str(e)}", history
|
16 |
|
17 |
def switch_theme(mode):
|
18 |
if mode == "Light Mode":
|
|
|
23 |
def save_feedback(feedback):
|
24 |
return "Thank you for your feedback!"
|
25 |
|
26 |
+
def display_history(history):
|
27 |
+
log_entries = []
|
28 |
+
for img, text in history:
|
29 |
+
log_entries.append((img, text))
|
30 |
+
return log_entries
|
31 |
+
|
32 |
# Build the Visual QA application using Gradio with improvements
|
33 |
with gr.Blocks(
|
34 |
theme=gr.themes.Soft(
|
|
|
47 |
|
48 |
with gr.Column():
|
49 |
answer_output = gr.Textbox(label="Result Prediction")
|
|
|
50 |
|
51 |
history_state = gr.State([]) # Initialize the history state
|
52 |
|
53 |
submit_button.click(
|
54 |
format_answer,
|
55 |
inputs=[image_input, question_input, history_state],
|
56 |
+
outputs=[answer_output, history_state],
|
57 |
show_progress=True
|
58 |
)
|
59 |
|
60 |
with gr.Row():
|
61 |
+
history_gallery = gr.Gallery(label="History Log", elem_id="history_log")
|
|
|
62 |
submit_button.click(
|
63 |
+
display_history,
|
64 |
inputs=[history_state],
|
65 |
+
outputs=[history_gallery]
|
66 |
)
|
67 |
|
68 |
with gr.Accordion("Help", open=False):
|
|
|
92 |
outputs=[feedback_input]
|
93 |
)
|
94 |
|
95 |
+
VisualQAApp.launch(share=True, server_name="0.0.0.0", server_port=8080, debug=True)
|
requirements.txt
CHANGED
@@ -3,3 +3,4 @@ gradio
|
|
3 |
plotly
|
4 |
torch
|
5 |
tensorflow
|
|
|
|
3 |
plotly
|
4 |
torch
|
5 |
tensorflow
|
6 |
+
keras
|