Update app.py
Browse files
app.py
CHANGED
@@ -49,7 +49,6 @@ def predict(image, input_text, input_context):
|
|
49 |
|
50 |
# Generate next layer prompt (WIP)
|
51 |
chained_prompt = prompt_format.format(input_text, output_text, input_context)
|
52 |
-
|
53 |
|
54 |
|
55 |
return output_text
|
@@ -60,8 +59,8 @@ input_prompt = gr.components.Textbox(label="Input Prompt")
|
|
60 |
added_context = gr.components.Textbox(label="Input Context")
|
61 |
|
62 |
model_output = gr.components.Textbox(label="Model Output")
|
63 |
-
examples = [["chart_example_1.png", "Describe the trend of the mortality rates for children before age 5", ""],
|
64 |
-
["chart_example_2.png", "What is the share of respondants who prefer Facebook Messenger in the 30-59 age group?"]]
|
65 |
|
66 |
title = "Gradio Demo for ChartGemma + llama3 context"
|
67 |
interface = gr.Interface(fn=predict,
|
|
|
49 |
|
50 |
# Generate next layer prompt (WIP)
|
51 |
chained_prompt = prompt_format.format(input_text, output_text, input_context)
|
|
|
52 |
|
53 |
|
54 |
return output_text
|
|
|
59 |
added_context = gr.components.Textbox(label="Input Context")
|
60 |
|
61 |
model_output = gr.components.Textbox(label="Model Output")
|
62 |
+
examples = [["chart_example_1.png", "Describe the trend of the mortality rates for children before age 5", "The country of interest is Bahrain."],
|
63 |
+
["chart_example_2.png", "What is the share of respondants who prefer Facebook Messenger in the 30-59 age group?", "This data needs to be cleaned"]]
|
64 |
|
65 |
title = "Gradio Demo for ChartGemma + llama3 context"
|
66 |
interface = gr.Interface(fn=predict,
|