results / eldogbbhed /Peagle-9b /results_2024-05-22T13-35-26.955439.json
xuanricheng's picture
Add results for eldogbbhed/Peagle-9b
ec452f9 verified
raw
history blame
6.77 kB
{
"config_general": {
"model_name": "eldogbbhed/Peagle-9b",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"harness-c_arc_challenge": {
"acc_norm": 51.79,
"acc_stderr": 0,
"c_arc_challenge_25shot_acc": 46.5,
"c_arc_challenge_25shot_acc_norm": 51.79
},
"harness-c_gsm8k": {
"acc": 43.82,
"acc_stderr": 0,
"c_gsm8k_5shot_acc": 43.82
},
"harness-c_hellaswag": {
"acc_norm": 58.18,
"acc_stderr": 0,
"c_hellaswag_10shot_acc": 44.06,
"c_hellaswag_10shot_acc_norm": 58.18
},
"harness-c-sem-v2": {
"acc": 60.285,
"acc_stderr": 0,
"c_sem_v2-LLSRC_5shot_acc": 62.16,
"c_sem_v2-SLPWC_5shot_acc": 62.14,
"c_sem_v2-SLRFC_5shot_acc": 43.6,
"c_sem_v2-SLSRC_5shot_acc": 73.24,
"c_sem_v2-LLSRC_5shot_acc_norm": 62.16,
"c_sem_v2-SLPWC_5shot_acc_norm": 62.14,
"c_sem_v2-SLRFC_5shot_acc_norm": 43.6,
"c_sem_v2-SLSRC_5shot_acc_norm": 73.24
},
"harness-c_truthfulqa_mc": {
"mc2": 59.78,
"acc_stderr": 0,
"c_truthfulqa_mc_0shot_mc1": 39.78,
"c_truthfulqa_mc_0shot_mc2": 59.78
},
"harness-c_winogrande": {
"acc": 61.33,
"acc_stderr": 0,
"c_winogrande_0shot_acc": 61.33
},
"harness-cmmlu": {
"acc_norm": 49.17,
"acc_stderr": 0,
"cmmlu_fullavg_5shot_acc": 49.17,
"cmmlu-virology_5shot_acc": 36.75,
"cmmlu-marketing_5shot_acc": 72.22,
"cmmlu-nutrition_5shot_acc": 50.0,
"cmmlu-sociology_5shot_acc": 65.67,
"cmmlu-management_5shot_acc": 62.14,
"cmmlu-philosophy_5shot_acc": 43.41,
"cmmlu-prehistory_5shot_acc": 42.28,
"cmmlu-human_aging_5shot_acc": 47.98,
"cmmlu-formal_logic_5shot_acc": 42.4,
"cmmlu-global_facts_5shot_acc": 40.0,
"cmmlu-jurisprudence_5shot_acc": 53.7,
"cmmlu-miscellaneous_5shot_acc": 50.7,
"cmmlu-moral_disputes_5shot_acc": 53.76,
"cmmlu-human_sexuality_5shot_acc": 45.04,
"cmmlu-moral_scenarios_5shot_acc": 28.38,
"cmmlu-world_religions_5shot_acc": 46.2,
"cmmlu-machine_learning_5shot_acc": 42.86,
"cmmlu-medical_genetics_5shot_acc": 44.0,
"cmmlu-professional_law_5shot_acc": 36.31,
"cmmlu-public_relations_5shot_acc": 53.64,
"cmmlu-security_studies_5shot_acc": 61.63,
"cmmlu-international_law_5shot_acc": 59.5,
"cmmlu-logical_fallacies_5shot_acc": 49.69,
"cmmlu-us_foreign_policy_5shot_acc": 52.0,
"cmmlu-high_school_biology_5shot_acc": 49.68,
"cmmlu-high_school_physics_5shot_acc": 39.74,
"cmmlu-high_school_chemistry_5shot_acc": 37.44,
"cmmlu-high_school_geography_5shot_acc": 58.08,
"cmmlu-professional_medicine_5shot_acc": 39.71,
"cmmlu-elementary_mathematics_5shot_acc": 33.86,
"cmmlu-high_school_psychology_5shot_acc": 56.7,
"cmmlu-high_school_statistics_5shot_acc": 46.76,
"cmmlu-high_school_us_history_5shot_acc": 59.31,
"cmmlu-high_school_mathematics_5shot_acc": 30.0,
"cmmlu-professional_accounting_5shot_acc": 37.59,
"cmmlu-professional_psychology_5shot_acc": 45.1,
"cmmlu-high_school_world_history_5shot_acc": 64.98,
"cmmlu-high_school_macroeconomics_5shot_acc": 52.56,
"cmmlu-high_school_microeconomics_5shot_acc": 51.68,
"cmmlu-high_school_computer_science_5shot_acc": 65.0,
"cmmlu-high_school_european_history_5shot_acc": 58.18,
"cmmlu-high_school_government_and_politics_5shot_acc": 58.55,
"cmmlu_fullavg_5shot_acc_norm": 49.17,
"cmmlu-virology_5shot_acc_norm": 36.75,
"cmmlu-marketing_5shot_acc_norm": 72.22,
"cmmlu-nutrition_5shot_acc_norm": 50.0,
"cmmlu-sociology_5shot_acc_norm": 65.67,
"cmmlu-management_5shot_acc_norm": 62.14,
"cmmlu-philosophy_5shot_acc_norm": 43.41,
"cmmlu-prehistory_5shot_acc_norm": 42.28,
"cmmlu-human_aging_5shot_acc_norm": 47.98,
"cmmlu-formal_logic_5shot_acc_norm": 42.4,
"cmmlu-global_facts_5shot_acc_norm": 40.0,
"cmmlu-jurisprudence_5shot_acc_norm": 53.7,
"cmmlu-miscellaneous_5shot_acc_norm": 50.7,
"cmmlu-moral_disputes_5shot_acc_norm": 53.76,
"cmmlu-human_sexuality_5shot_acc_norm": 45.04,
"cmmlu-moral_scenarios_5shot_acc_norm": 28.38,
"cmmlu-world_religions_5shot_acc_norm": 46.2,
"cmmlu-machine_learning_5shot_acc_norm": 42.86,
"cmmlu-medical_genetics_5shot_acc_norm": 44.0,
"cmmlu-professional_law_5shot_acc_norm": 36.31,
"cmmlu-public_relations_5shot_acc_norm": 53.64,
"cmmlu-security_studies_5shot_acc_norm": 61.63,
"cmmlu-international_law_5shot_acc_norm": 59.5,
"cmmlu-logical_fallacies_5shot_acc_norm": 49.69,
"cmmlu-us_foreign_policy_5shot_acc_norm": 52.0,
"cmmlu-high_school_biology_5shot_acc_norm": 49.68,
"cmmlu-high_school_physics_5shot_acc_norm": 39.74,
"cmmlu-high_school_chemistry_5shot_acc_norm": 37.44,
"cmmlu-high_school_geography_5shot_acc_norm": 58.08,
"cmmlu-professional_medicine_5shot_acc_norm": 39.71,
"cmmlu-elementary_mathematics_5shot_acc_norm": 33.86,
"cmmlu-high_school_psychology_5shot_acc_norm": 56.7,
"cmmlu-high_school_statistics_5shot_acc_norm": 46.76,
"cmmlu-high_school_us_history_5shot_acc_norm": 59.31,
"cmmlu-high_school_mathematics_5shot_acc_norm": 30.0,
"cmmlu-professional_accounting_5shot_acc_norm": 37.59,
"cmmlu-professional_psychology_5shot_acc_norm": 45.1,
"cmmlu-high_school_world_history_5shot_acc_norm": 64.98,
"cmmlu-high_school_macroeconomics_5shot_acc_norm": 52.56,
"cmmlu-high_school_microeconomics_5shot_acc_norm": 51.68,
"cmmlu-high_school_computer_science_5shot_acc_norm": 65.0,
"cmmlu-high_school_european_history_5shot_acc_norm": 58.18,
"cmmlu-high_school_government_and_politics_5shot_acc_norm": 58.55
}
},
"versions": {},
"config_tasks": {},
"summary_tasks": {},
"summary_general": {}
}