Spaces:
Sleeping
Sleeping
liujch1998
commited on
Commit
•
ebb18a8
1
Parent(s):
a1f4642
Update description
Browse files
app.py
CHANGED
@@ -114,6 +114,13 @@ def predict(question, kg_model, qa_model, max_input_len, max_output_len, m, top_
|
|
114 |
output += f'Knowledge selected to make the prediction: {result["selected_knowledge"]}\n'
|
115 |
return output
|
116 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
117 |
examples = [
|
118 |
'If the mass of an object gets bigger what will happen to the amount of matter contained within it? \\n (A) gets bigger (B) gets smaller',
|
119 |
'What would vinyl be an odd thing to replace? \\n (A) pants (B) record albums (C) record store (D) cheese (E) wallpaper',
|
@@ -123,22 +130,23 @@ examples = [
|
|
123 |
'Causes bad breath and frightens blood-suckers \\n (A) tuna (B) iron (C) trash (D) garlic (E) pubs',
|
124 |
]
|
125 |
|
126 |
-
input_question = gr.Dropdown(
|
127 |
-
choices=examples,
|
128 |
-
label='Question:',
|
129 |
info='A multiple-choice commonsense question. Please follow the UnifiedQA input format: "{question} \\n (A) ... (B) ... (C) ..."',
|
130 |
)
|
131 |
input_kg_model = gr.Textbox(label='Knowledge generation model:', value='liujch1998/rainier-large', interactive=False)
|
132 |
input_qa_model = gr.Textbox(label='QA model:', value='allenai/unifiedqa-t5-large', interactive=False)
|
133 |
-
input_max_input_len = gr.Number(label='Max question
|
134 |
-
input_max_output_len = gr.Number(label='Max knowledge
|
135 |
-
input_m = gr.Slider(label='Number of generated knowledges:', value=10, mininum=1, maximum=20, step=1
|
136 |
-
|
|
|
|
|
137 |
output_text = gr.Textbox(label='Output', interactive=False)
|
138 |
|
139 |
gr.Interface(
|
140 |
fn=predict,
|
141 |
inputs=[input_question, input_kg_model, input_qa_model, input_max_input_len, input_max_output_len, input_m, input_top_p],
|
142 |
outputs=output_text,
|
143 |
-
title="Rainier",
|
|
|
144 |
).launch()
|
|
|
114 |
output += f'Knowledge selected to make the prediction: {result["selected_knowledge"]}\n'
|
115 |
return output
|
116 |
|
117 |
+
description = '''This is a demo for the paper, <a href="https://arxiv.org/pdf/2210.03078.pdf" target="_blank">Rainier: Reinforced Knowledge Introspector for Commonsense Question Answering</a>, presented in EMNLP 2022.
|
118 |
+
[<a href="https://github.com/liujch1998/rainier" target="_blank">Code</a>] [<a href="https://huggingface.co/liujch1998/rainier-large" target="_blank">Model</a>]
|
119 |
+
This demo is made & maintained by <a href="https://liujch1998.github.io/" target="_blank">Jiacheng (Gary) Liu</a>.
|
120 |
+
|
121 |
+
Rainier is a knowledge-generating model that enhances the commonsense QA capability of a QA model.
|
122 |
+
To try this model, select an example question, or write your own question in the suggested format.'''
|
123 |
+
|
124 |
examples = [
|
125 |
'If the mass of an object gets bigger what will happen to the amount of matter contained within it? \\n (A) gets bigger (B) gets smaller',
|
126 |
'What would vinyl be an odd thing to replace? \\n (A) pants (B) record albums (C) record store (D) cheese (E) wallpaper',
|
|
|
130 |
'Causes bad breath and frightens blood-suckers \\n (A) tuna (B) iron (C) trash (D) garlic (E) pubs',
|
131 |
]
|
132 |
|
133 |
+
input_question = gr.Dropdown(choices=examples, label='Question:',
|
|
|
|
|
134 |
info='A multiple-choice commonsense question. Please follow the UnifiedQA input format: "{question} \\n (A) ... (B) ... (C) ..."',
|
135 |
)
|
136 |
input_kg_model = gr.Textbox(label='Knowledge generation model:', value='liujch1998/rainier-large', interactive=False)
|
137 |
input_qa_model = gr.Textbox(label='QA model:', value='allenai/unifiedqa-t5-large', interactive=False)
|
138 |
+
input_max_input_len = gr.Number(label='Max number of tokens in question:', value=256, precision=0)
|
139 |
+
input_max_output_len = gr.Number(label='Max number of tokens in knowledge:', value=32, precision=0)
|
140 |
+
input_m = gr.Slider(label='Number of generated knowledges:', value=10, mininum=1, maximum=20, step=1,
|
141 |
+
info='The actual number of generated knowledges may be less than this number due to possible duplicates.',
|
142 |
+
)
|
143 |
+
input_top_p = gr.Slider(label='top_p for knowledge generation:', value=0.5, mininum=0.0, maximum=1.0, step=0.05)
|
144 |
output_text = gr.Textbox(label='Output', interactive=False)
|
145 |
|
146 |
gr.Interface(
|
147 |
fn=predict,
|
148 |
inputs=[input_question, input_kg_model, input_qa_model, input_max_input_len, input_max_output_len, input_m, input_top_p],
|
149 |
outputs=output_text,
|
150 |
+
title="Rainier Demo",
|
151 |
+
description=description,
|
152 |
).launch()
|