xuanricheng commited on
Commit
8dad4ec
·
verified ·
1 Parent(s): 7151446

Add results for OpenBuddy/openbuddy-mistral-22b-v21.1-32k

Browse files
OpenBuddy/openbuddy-mistral-22b-v21.1-32k/results_2024-05-27T09-47-38.240960.json ADDED
@@ -0,0 +1,173 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "OpenBuddy/openbuddy-mistral-22b-v21.1-32k",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "harness-c_arc_challenge": {
9
+ "acc_norm": 25.68,
10
+ "acc_stderr": 0,
11
+ "c_arc_challenge_25shot_acc": 21.42,
12
+ "c_arc_challenge_25shot_acc_norm": 25.68
13
+ },
14
+ "harness-c_gsm8k": {
15
+ "acc": 0.0,
16
+ "acc_stderr": 0,
17
+ "c_gsm8k_5shot_acc": 0.0
18
+ },
19
+ "harness-c_hellaswag": {
20
+ "acc_norm": 26.7,
21
+ "acc_stderr": 0,
22
+ "c_hellaswag_10shot_acc": 26.07,
23
+ "c_hellaswag_10shot_acc_norm": 26.7
24
+ },
25
+ "harness-c-sem-v2": {
26
+ "acc": 24.2375,
27
+ "acc_stderr": 0,
28
+ "c_sem_v2-LLSRC_5shot_acc": 23.17,
29
+ "c_sem_v2-SLPWC_5shot_acc": 23.71,
30
+ "c_sem_v2-SLRFC_5shot_acc": 26.76,
31
+ "c_sem_v2-SLSRC_5shot_acc": 23.31,
32
+ "c_sem_v2-LLSRC_5shot_acc_norm": 23.17,
33
+ "c_sem_v2-SLPWC_5shot_acc_norm": 23.71,
34
+ "c_sem_v2-SLRFC_5shot_acc_norm": 26.76,
35
+ "c_sem_v2-SLSRC_5shot_acc_norm": 23.31
36
+ },
37
+ "harness-c_truthfulqa_mc": {
38
+ "mc2": 51.24,
39
+ "acc_stderr": 0,
40
+ "c_truthfulqa_mc_0shot_mc1": 25.21,
41
+ "c_truthfulqa_mc_0shot_mc2": 51.24
42
+ },
43
+ "harness-c_winogrande": {
44
+ "acc": 51.07,
45
+ "acc_stderr": 0,
46
+ "c_winogrande_0shot_acc": 51.07
47
+ },
48
+ "harness-cmmlu": {
49
+ "acc_norm": 25.48,
50
+ "acc_stderr": 0,
51
+ "cmmlu-anatomy_5shot_acc": 28.89,
52
+ "cmmlu_fullavg_5shot_acc": 25.48,
53
+ "cmmlu-virology_5shot_acc": 28.92,
54
+ "cmmlu-astronomy_5shot_acc": 19.74,
55
+ "cmmlu-marketing_5shot_acc": 24.79,
56
+ "cmmlu-nutrition_5shot_acc": 23.53,
57
+ "cmmlu-sociology_5shot_acc": 24.38,
58
+ "cmmlu-management_5shot_acc": 17.48,
59
+ "cmmlu-philosophy_5shot_acc": 26.37,
60
+ "cmmlu-prehistory_5shot_acc": 21.6,
61
+ "cmmlu-human_aging_5shot_acc": 39.91,
62
+ "cmmlu-econometrics_5shot_acc": 22.81,
63
+ "cmmlu-formal_logic_5shot_acc": 19.2,
64
+ "cmmlu-global_facts_5shot_acc": 31.0,
65
+ "cmmlu-jurisprudence_5shot_acc": 27.78,
66
+ "cmmlu-miscellaneous_5shot_acc": 28.74,
67
+ "cmmlu-moral_disputes_5shot_acc": 25.14,
68
+ "cmmlu-business_ethics_5shot_acc": 28.0,
69
+ "cmmlu-college_biology_5shot_acc": 22.22,
70
+ "cmmlu-college_physics_5shot_acc": 20.59,
71
+ "cmmlu-human_sexuality_5shot_acc": 26.72,
72
+ "cmmlu-moral_scenarios_5shot_acc": 21.68,
73
+ "cmmlu-world_religions_5shot_acc": 22.22,
74
+ "cmmlu-abstract_algebra_5shot_acc": 27.0,
75
+ "cmmlu-college_medicine_5shot_acc": 20.23,
76
+ "cmmlu-machine_learning_5shot_acc": 26.79,
77
+ "cmmlu-medical_genetics_5shot_acc": 30.0,
78
+ "cmmlu-professional_law_5shot_acc": 23.92,
79
+ "cmmlu-public_relations_5shot_acc": 23.64,
80
+ "cmmlu-security_studies_5shot_acc": 17.96,
81
+ "cmmlu-college_chemistry_5shot_acc": 18.0,
82
+ "cmmlu-computer_security_5shot_acc": 24.0,
83
+ "cmmlu-international_law_5shot_acc": 28.1,
84
+ "cmmlu-logical_fallacies_5shot_acc": 25.15,
85
+ "cmmlu-us_foreign_policy_5shot_acc": 30.0,
86
+ "cmmlu-clinical_knowledge_5shot_acc": 24.15,
87
+ "cmmlu-conceptual_physics_5shot_acc": 27.23,
88
+ "cmmlu-college_mathematics_5shot_acc": 26.0,
89
+ "cmmlu-high_school_biology_5shot_acc": 30.32,
90
+ "cmmlu-high_school_physics_5shot_acc": 23.84,
91
+ "cmmlu-high_school_chemistry_5shot_acc": 25.12,
92
+ "cmmlu-high_school_geography_5shot_acc": 22.73,
93
+ "cmmlu-professional_medicine_5shot_acc": 44.85,
94
+ "cmmlu-electrical_engineering_5shot_acc": 22.07,
95
+ "cmmlu-elementary_mathematics_5shot_acc": 25.66,
96
+ "cmmlu-high_school_psychology_5shot_acc": 23.67,
97
+ "cmmlu-high_school_statistics_5shot_acc": 47.22,
98
+ "cmmlu-high_school_us_history_5shot_acc": 23.53,
99
+ "cmmlu-high_school_mathematics_5shot_acc": 27.41,
100
+ "cmmlu-professional_accounting_5shot_acc": 25.89,
101
+ "cmmlu-professional_psychology_5shot_acc": 25.16,
102
+ "cmmlu-college_computer_science_5shot_acc": 20.0,
103
+ "cmmlu-high_school_world_history_5shot_acc": 26.16,
104
+ "cmmlu-high_school_macroeconomics_5shot_acc": 22.05,
105
+ "cmmlu-high_school_microeconomics_5shot_acc": 23.95,
106
+ "cmmlu-high_school_computer_science_5shot_acc": 21.0,
107
+ "cmmlu-high_school_european_history_5shot_acc": 23.03,
108
+ "cmmlu-high_school_government_and_politics_5shot_acc": 24.87,
109
+ "cmmlu-anatomy_5shot_acc_norm": 28.89,
110
+ "cmmlu_fullavg_5shot_acc_norm": 25.48,
111
+ "cmmlu-virology_5shot_acc_norm": 28.92,
112
+ "cmmlu-astronomy_5shot_acc_norm": 19.74,
113
+ "cmmlu-marketing_5shot_acc_norm": 24.79,
114
+ "cmmlu-nutrition_5shot_acc_norm": 23.53,
115
+ "cmmlu-sociology_5shot_acc_norm": 24.38,
116
+ "cmmlu-management_5shot_acc_norm": 17.48,
117
+ "cmmlu-philosophy_5shot_acc_norm": 26.37,
118
+ "cmmlu-prehistory_5shot_acc_norm": 21.6,
119
+ "cmmlu-human_aging_5shot_acc_norm": 39.91,
120
+ "cmmlu-econometrics_5shot_acc_norm": 22.81,
121
+ "cmmlu-formal_logic_5shot_acc_norm": 19.2,
122
+ "cmmlu-global_facts_5shot_acc_norm": 31.0,
123
+ "cmmlu-jurisprudence_5shot_acc_norm": 27.78,
124
+ "cmmlu-miscellaneous_5shot_acc_norm": 28.74,
125
+ "cmmlu-moral_disputes_5shot_acc_norm": 25.14,
126
+ "cmmlu-business_ethics_5shot_acc_norm": 28.0,
127
+ "cmmlu-college_biology_5shot_acc_norm": 22.22,
128
+ "cmmlu-college_physics_5shot_acc_norm": 20.59,
129
+ "cmmlu-human_sexuality_5shot_acc_norm": 26.72,
130
+ "cmmlu-moral_scenarios_5shot_acc_norm": 21.68,
131
+ "cmmlu-world_religions_5shot_acc_norm": 22.22,
132
+ "cmmlu-abstract_algebra_5shot_acc_norm": 27.0,
133
+ "cmmlu-college_medicine_5shot_acc_norm": 20.23,
134
+ "cmmlu-machine_learning_5shot_acc_norm": 26.79,
135
+ "cmmlu-medical_genetics_5shot_acc_norm": 30.0,
136
+ "cmmlu-professional_law_5shot_acc_norm": 23.92,
137
+ "cmmlu-public_relations_5shot_acc_norm": 23.64,
138
+ "cmmlu-security_studies_5shot_acc_norm": 17.96,
139
+ "cmmlu-college_chemistry_5shot_acc_norm": 18.0,
140
+ "cmmlu-computer_security_5shot_acc_norm": 24.0,
141
+ "cmmlu-international_law_5shot_acc_norm": 28.1,
142
+ "cmmlu-logical_fallacies_5shot_acc_norm": 25.15,
143
+ "cmmlu-us_foreign_policy_5shot_acc_norm": 30.0,
144
+ "cmmlu-clinical_knowledge_5shot_acc_norm": 24.15,
145
+ "cmmlu-conceptual_physics_5shot_acc_norm": 27.23,
146
+ "cmmlu-college_mathematics_5shot_acc_norm": 26.0,
147
+ "cmmlu-high_school_biology_5shot_acc_norm": 30.32,
148
+ "cmmlu-high_school_physics_5shot_acc_norm": 23.84,
149
+ "cmmlu-high_school_chemistry_5shot_acc_norm": 25.12,
150
+ "cmmlu-high_school_geography_5shot_acc_norm": 22.73,
151
+ "cmmlu-professional_medicine_5shot_acc_norm": 44.85,
152
+ "cmmlu-electrical_engineering_5shot_acc_norm": 22.07,
153
+ "cmmlu-elementary_mathematics_5shot_acc_norm": 25.66,
154
+ "cmmlu-high_school_psychology_5shot_acc_norm": 23.67,
155
+ "cmmlu-high_school_statistics_5shot_acc_norm": 47.22,
156
+ "cmmlu-high_school_us_history_5shot_acc_norm": 23.53,
157
+ "cmmlu-high_school_mathematics_5shot_acc_norm": 27.41,
158
+ "cmmlu-professional_accounting_5shot_acc_norm": 25.89,
159
+ "cmmlu-professional_psychology_5shot_acc_norm": 25.16,
160
+ "cmmlu-college_computer_science_5shot_acc_norm": 20.0,
161
+ "cmmlu-high_school_world_history_5shot_acc_norm": 26.16,
162
+ "cmmlu-high_school_macroeconomics_5shot_acc_norm": 22.05,
163
+ "cmmlu-high_school_microeconomics_5shot_acc_norm": 23.95,
164
+ "cmmlu-high_school_computer_science_5shot_acc_norm": 21.0,
165
+ "cmmlu-high_school_european_history_5shot_acc_norm": 23.03,
166
+ "cmmlu-high_school_government_and_politics_5shot_acc_norm": 24.87
167
+ }
168
+ },
169
+ "versions": {},
170
+ "config_tasks": {},
171
+ "summary_tasks": {},
172
+ "summary_general": {}
173
+ }