results / jondurbin /bagel-dpo-34b-v0.5 /results_2024-05-21T10-15-02.244055.json
xuanricheng's picture
Add results for jondurbin/bagel-dpo-34b-v0.5
b3d4b72 verified
raw
history blame
6.65 kB
{
"config_general": {
"model_name": "jondurbin/bagel-dpo-34b-v0.5",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"harness-c_arc_challenge": {
"acc_norm": 62.37,
"acc_stderr": 0,
"c_arc_challenge_25shot_acc": 56.66,
"c_arc_challenge_25shot_acc_norm": 62.37
},
"harness-c_gsm8k": {
"acc": 16.53,
"acc_stderr": 0,
"c_gsm8k_5shot_acc": 16.53
},
"harness-c_hellaswag": {
"acc_norm": 67.78,
"acc_stderr": 0,
"c_hellaswag_10shot_acc": 48.97,
"c_hellaswag_10shot_acc_norm": 67.78
},
"harness-c-sem-v2": {
"acc": 90.0625,
"acc_stderr": 0,
"c_sem_v2-LLSRC_5shot_acc": 93.53,
"c_sem_v2-SLPWC_5shot_acc": 85.43,
"c_sem_v2-SLRFC_5shot_acc": 93.09,
"c_sem_v2-SLSRC_5shot_acc": 88.2,
"c_sem_v2-LLSRC_5shot_acc_norm": 93.53,
"c_sem_v2-SLPWC_5shot_acc_norm": 85.43,
"c_sem_v2-SLRFC_5shot_acc_norm": 93.09,
"c_sem_v2-SLSRC_5shot_acc_norm": 88.2
},
"harness-c_truthfulqa_mc": {
"mc2": 57.4,
"acc_stderr": 0,
"c_truthfulqa_mc_0shot_mc1": 35.01,
"c_truthfulqa_mc_0shot_mc2": 57.4
},
"harness-c_winogrande": {
"acc": 67.32,
"acc_stderr": 0,
"c_winogrande_0shot_acc": 67.32
},
"harness-cmmlu": {
"acc_norm": 71.31,
"acc_stderr": 0,
"cmmlu_fullavg_5shot_acc": 71.31,
"cmmlu-virology_5shot_acc": 53.61,
"cmmlu-marketing_5shot_acc": 87.18,
"cmmlu-nutrition_5shot_acc": 77.45,
"cmmlu-sociology_5shot_acc": 83.08,
"cmmlu-management_5shot_acc": 79.61,
"cmmlu-philosophy_5shot_acc": 72.03,
"cmmlu-prehistory_5shot_acc": 75.93,
"cmmlu-human_aging_5shot_acc": 72.2,
"cmmlu-formal_logic_5shot_acc": 47.2,
"cmmlu-global_facts_5shot_acc": 51.0,
"cmmlu-jurisprudence_5shot_acc": 84.26,
"cmmlu-miscellaneous_5shot_acc": 80.08,
"cmmlu-moral_disputes_5shot_acc": 69.94,
"cmmlu-human_sexuality_5shot_acc": 77.86,
"cmmlu-moral_scenarios_5shot_acc": 59.78,
"cmmlu-world_religions_5shot_acc": 73.1,
"cmmlu-machine_learning_5shot_acc": 52.68,
"cmmlu-medical_genetics_5shot_acc": 75.0,
"cmmlu-professional_law_5shot_acc": 51.3,
"cmmlu-public_relations_5shot_acc": 64.55,
"cmmlu-security_studies_5shot_acc": 76.33,
"cmmlu-international_law_5shot_acc": 85.95,
"cmmlu-logical_fallacies_5shot_acc": 71.78,
"cmmlu-us_foreign_policy_5shot_acc": 82.0,
"cmmlu-high_school_biology_5shot_acc": 82.58,
"cmmlu-high_school_physics_5shot_acc": 47.02,
"cmmlu-high_school_chemistry_5shot_acc": 56.16,
"cmmlu-high_school_geography_5shot_acc": 80.3,
"cmmlu-professional_medicine_5shot_acc": 68.01,
"cmmlu-high_school_psychology_5shot_acc": 82.94,
"cmmlu-high_school_statistics_5shot_acc": 56.94,
"cmmlu-high_school_us_history_5shot_acc": 85.78,
"cmmlu-high_school_mathematics_5shot_acc": 41.48,
"cmmlu-professional_accounting_5shot_acc": 54.61,
"cmmlu-professional_psychology_5shot_acc": 71.57,
"cmmlu-high_school_world_history_5shot_acc": 86.08,
"cmmlu-high_school_macroeconomics_5shot_acc": 75.64,
"cmmlu-high_school_microeconomics_5shot_acc": 79.83,
"cmmlu-high_school_computer_science_5shot_acc": 79.0,
"cmmlu-high_school_european_history_5shot_acc": 80.0,
"cmmlu-high_school_government_and_politics_5shot_acc": 91.71,
"cmmlu_fullavg_5shot_acc_norm": 71.31,
"cmmlu-virology_5shot_acc_norm": 53.61,
"cmmlu-marketing_5shot_acc_norm": 87.18,
"cmmlu-nutrition_5shot_acc_norm": 77.45,
"cmmlu-sociology_5shot_acc_norm": 83.08,
"cmmlu-management_5shot_acc_norm": 79.61,
"cmmlu-philosophy_5shot_acc_norm": 72.03,
"cmmlu-prehistory_5shot_acc_norm": 75.93,
"cmmlu-human_aging_5shot_acc_norm": 72.2,
"cmmlu-formal_logic_5shot_acc_norm": 47.2,
"cmmlu-global_facts_5shot_acc_norm": 51.0,
"cmmlu-jurisprudence_5shot_acc_norm": 84.26,
"cmmlu-miscellaneous_5shot_acc_norm": 80.08,
"cmmlu-moral_disputes_5shot_acc_norm": 69.94,
"cmmlu-human_sexuality_5shot_acc_norm": 77.86,
"cmmlu-moral_scenarios_5shot_acc_norm": 59.78,
"cmmlu-world_religions_5shot_acc_norm": 73.1,
"cmmlu-machine_learning_5shot_acc_norm": 52.68,
"cmmlu-medical_genetics_5shot_acc_norm": 75.0,
"cmmlu-professional_law_5shot_acc_norm": 51.3,
"cmmlu-public_relations_5shot_acc_norm": 64.55,
"cmmlu-security_studies_5shot_acc_norm": 76.33,
"cmmlu-international_law_5shot_acc_norm": 85.95,
"cmmlu-logical_fallacies_5shot_acc_norm": 71.78,
"cmmlu-us_foreign_policy_5shot_acc_norm": 82.0,
"cmmlu-high_school_biology_5shot_acc_norm": 82.58,
"cmmlu-high_school_physics_5shot_acc_norm": 47.02,
"cmmlu-high_school_chemistry_5shot_acc_norm": 56.16,
"cmmlu-high_school_geography_5shot_acc_norm": 80.3,
"cmmlu-professional_medicine_5shot_acc_norm": 68.01,
"cmmlu-high_school_psychology_5shot_acc_norm": 82.94,
"cmmlu-high_school_statistics_5shot_acc_norm": 56.94,
"cmmlu-high_school_us_history_5shot_acc_norm": 85.78,
"cmmlu-high_school_mathematics_5shot_acc_norm": 41.48,
"cmmlu-professional_accounting_5shot_acc_norm": 54.61,
"cmmlu-professional_psychology_5shot_acc_norm": 71.57,
"cmmlu-high_school_world_history_5shot_acc_norm": 86.08,
"cmmlu-high_school_macroeconomics_5shot_acc_norm": 75.64,
"cmmlu-high_school_microeconomics_5shot_acc_norm": 79.83,
"cmmlu-high_school_computer_science_5shot_acc_norm": 79.0,
"cmmlu-high_school_european_history_5shot_acc_norm": 80.0,
"cmmlu-high_school_government_and_politics_5shot_acc_norm": 91.71
}
},
"versions": {},
"config_tasks": {},
"summary_tasks": {},
"summary_general": {}
}