Spaces:
Sleeping
Sleeping
kz209
commited on
Commit
•
64df9ac
1
Parent(s):
3f9babb
update
Browse files- pages/arena.py +23 -3
pages/arena.py
CHANGED
@@ -42,12 +42,12 @@ Once the streaming is complete, you can choose the best response.\u2764\ufe0f"""
|
|
42 |
)
|
43 |
|
44 |
random.shuffle(prompts)
|
45 |
-
|
46 |
|
47 |
with gr.Row():
|
48 |
-
columns = [gr.Textbox(label=f"Prompt {i+1}", lines=10) for i in range(len(
|
49 |
|
50 |
-
content_list = [prompt['prompt'] + '\n{' + data_textbox.value + '}\n\nsummary:' for prompt in
|
51 |
model = get_model_batch_generation("Qwen/Qwen2-1.5B-Instruct")
|
52 |
|
53 |
def start_streaming():
|
@@ -71,6 +71,26 @@ Once the streaming is complete, you can choose the best response.\u2764\ufe0f"""
|
|
71 |
|
72 |
submit_button.click(fn=lock_selection, inputs=choice, outputs=[output, output, choice, submit_button])
|
73 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
74 |
return demo
|
75 |
|
76 |
if __name__ == "__main__":
|
|
|
42 |
)
|
43 |
|
44 |
random.shuffle(prompts)
|
45 |
+
random_selected_prompts = prompts[:3]
|
46 |
|
47 |
with gr.Row():
|
48 |
+
columns = [gr.Textbox(label=f"Prompt {i+1}", lines=10) for i in range(len(random_selected_prompts))]
|
49 |
|
50 |
+
content_list = [prompt['prompt'] + '\n{' + data_textbox.value + '}\n\nsummary:' for prompt in random_selected_prompts]
|
51 |
model = get_model_batch_generation("Qwen/Qwen2-1.5B-Instruct")
|
52 |
|
53 |
def start_streaming():
|
|
|
71 |
|
72 |
submit_button.click(fn=lock_selection, inputs=choice, outputs=[output, output, choice, submit_button])
|
73 |
|
74 |
+
if choice == "Response 1":
|
75 |
+
prompt_id = random_selected_prompts[0]
|
76 |
+
elif choice == "Response 2":
|
77 |
+
prompt_id = random_selected_prompts[1]
|
78 |
+
elif choice == "Response 3":
|
79 |
+
prompt_id = random_selected_prompts[2]
|
80 |
+
else:
|
81 |
+
raise ValueError(f"No corresponding response of {choice}")
|
82 |
+
|
83 |
+
for i in range(len(prompts)):
|
84 |
+
if prompts[i]['id'] == prompt_id:
|
85 |
+
prompts[i]["metric"]["winning_number"] += 1
|
86 |
+
break
|
87 |
+
|
88 |
+
if i == len(prompts)-1:
|
89 |
+
raise ValueError(f"No prompt of id {prompt_id}")
|
90 |
+
|
91 |
+
with open("prompt/prompt.json", "w") as f:
|
92 |
+
json.dump(prompts, f)
|
93 |
+
|
94 |
return demo
|
95 |
|
96 |
if __name__ == "__main__":
|