results / microsoft /phi-1_5 /results_2024-04-25T20-28-51.421827.json
xuanricheng's picture
Add results for microsoft/phi-1_5
e881afe verified
raw
history blame
8.48 kB
{
"config_general": {
"model_name": "microsoft/phi-1_5",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"harness-c_arc_challenge": {
"acc_norm": 26.28,
"acc_stderr": 0,
"c_arc_challenge_25shot_acc": 22.95,
"c_arc_challenge_25shot_acc_norm": 26.28
},
"harness-c_gsm8k": {
"acc": 0.3,
"acc_stderr": 0,
"c_gsm8k_5shot_acc": 0.3
},
"harness-c_hellaswag": {
"acc_norm": 30.08,
"acc_stderr": 0,
"c_hellaswag_10shot_acc": 27.86,
"c_hellaswag_10shot_acc_norm": 30.08
},
"harness-c-sem-v2": {
"acc": 28.8775,
"acc_stderr": 0,
"c_sem_v2-LLSRC_5shot_acc": 26.76,
"c_sem_v2-SLPWC_5shot_acc": 24.57,
"c_sem_v2-SLRFC_5shot_acc": 26.19,
"c_sem_v2-SLSRC_5shot_acc": 37.99,
"c_sem_v2-LLSRC_5shot_acc_norm": 26.76,
"c_sem_v2-SLPWC_5shot_acc_norm": 24.57,
"c_sem_v2-SLRFC_5shot_acc_norm": 26.19,
"c_sem_v2-SLSRC_5shot_acc_norm": 37.99
},
"harness-c_truthfulqa_mc": {
"mc2": 52.56,
"acc_stderr": 0,
"c_truthfulqa_mc_0shot_mc1": 28.4,
"c_truthfulqa_mc_0shot_mc2": 52.56
},
"harness-c_winogrande": {
"acc": 49.72,
"acc_stderr": 0,
"c_winogrande_0shot_acc": 49.72
},
"harness-cmmlu": {
"acc_norm": 27.22,
"acc_stderr": 0,
"cmmlu-anatomy_5shot_acc": 26.67,
"cmmlu_fullavg_5shot_acc": 27.22,
"cmmlu-virology_5shot_acc": 28.31,
"cmmlu-astronomy_5shot_acc": 25.66,
"cmmlu-marketing_5shot_acc": 36.32,
"cmmlu-nutrition_5shot_acc": 28.1,
"cmmlu-sociology_5shot_acc": 31.84,
"cmmlu-management_5shot_acc": 27.18,
"cmmlu-philosophy_5shot_acc": 25.4,
"cmmlu-prehistory_5shot_acc": 32.41,
"cmmlu-human_aging_5shot_acc": 29.6,
"cmmlu-econometrics_5shot_acc": 29.82,
"cmmlu-formal_logic_5shot_acc": 19.2,
"cmmlu-global_facts_5shot_acc": 35.0,
"cmmlu-jurisprudence_5shot_acc": 31.48,
"cmmlu-miscellaneous_5shot_acc": 31.16,
"cmmlu-moral_disputes_5shot_acc": 27.75,
"cmmlu-business_ethics_5shot_acc": 21.0,
"cmmlu-college_biology_5shot_acc": 25.0,
"cmmlu-college_physics_5shot_acc": 26.47,
"cmmlu-human_sexuality_5shot_acc": 25.19,
"cmmlu-moral_scenarios_5shot_acc": 26.82,
"cmmlu-world_religions_5shot_acc": 30.41,
"cmmlu-abstract_algebra_5shot_acc": 29.0,
"cmmlu-college_medicine_5shot_acc": 25.43,
"cmmlu-machine_learning_5shot_acc": 24.11,
"cmmlu-medical_genetics_5shot_acc": 26.0,
"cmmlu-professional_law_5shot_acc": 26.01,
"cmmlu-public_relations_5shot_acc": 33.64,
"cmmlu-security_studies_5shot_acc": 21.22,
"cmmlu-college_chemistry_5shot_acc": 25.0,
"cmmlu-computer_security_5shot_acc": 33.0,
"cmmlu-international_law_5shot_acc": 33.06,
"cmmlu-logical_fallacies_5shot_acc": 30.67,
"cmmlu-us_foreign_policy_5shot_acc": 30.0,
"cmmlu-clinical_knowledge_5shot_acc": 25.28,
"cmmlu-conceptual_physics_5shot_acc": 32.34,
"cmmlu-college_mathematics_5shot_acc": 30.0,
"cmmlu-high_school_biology_5shot_acc": 24.84,
"cmmlu-high_school_physics_5shot_acc": 19.87,
"cmmlu-high_school_chemistry_5shot_acc": 30.05,
"cmmlu-high_school_geography_5shot_acc": 24.24,
"cmmlu-professional_medicine_5shot_acc": 29.41,
"cmmlu-electrical_engineering_5shot_acc": 24.83,
"cmmlu-elementary_mathematics_5shot_acc": 25.13,
"cmmlu-high_school_psychology_5shot_acc": 27.71,
"cmmlu-high_school_statistics_5shot_acc": 21.3,
"cmmlu-high_school_us_history_5shot_acc": 24.51,
"cmmlu-high_school_mathematics_5shot_acc": 22.59,
"cmmlu-professional_accounting_5shot_acc": 26.6,
"cmmlu-professional_psychology_5shot_acc": 27.94,
"cmmlu-college_computer_science_5shot_acc": 19.0,
"cmmlu-high_school_world_history_5shot_acc": 28.69,
"cmmlu-high_school_macroeconomics_5shot_acc": 23.59,
"cmmlu-high_school_microeconomics_5shot_acc": 25.21,
"cmmlu-high_school_computer_science_5shot_acc": 29.0,
"cmmlu-high_school_european_history_5shot_acc": 28.48,
"cmmlu-high_school_government_and_politics_5shot_acc": 22.8,
"cmmlu-anatomy_5shot_acc_norm": 26.67,
"cmmlu_fullavg_5shot_acc_norm": 27.22,
"cmmlu-virology_5shot_acc_norm": 28.31,
"cmmlu-astronomy_5shot_acc_norm": 25.66,
"cmmlu-marketing_5shot_acc_norm": 36.32,
"cmmlu-nutrition_5shot_acc_norm": 28.1,
"cmmlu-sociology_5shot_acc_norm": 31.84,
"cmmlu-management_5shot_acc_norm": 27.18,
"cmmlu-philosophy_5shot_acc_norm": 25.4,
"cmmlu-prehistory_5shot_acc_norm": 32.41,
"cmmlu-human_aging_5shot_acc_norm": 29.6,
"cmmlu-econometrics_5shot_acc_norm": 29.82,
"cmmlu-formal_logic_5shot_acc_norm": 19.2,
"cmmlu-global_facts_5shot_acc_norm": 35.0,
"cmmlu-jurisprudence_5shot_acc_norm": 31.48,
"cmmlu-miscellaneous_5shot_acc_norm": 31.16,
"cmmlu-moral_disputes_5shot_acc_norm": 27.75,
"cmmlu-business_ethics_5shot_acc_norm": 21.0,
"cmmlu-college_biology_5shot_acc_norm": 25.0,
"cmmlu-college_physics_5shot_acc_norm": 26.47,
"cmmlu-human_sexuality_5shot_acc_norm": 25.19,
"cmmlu-moral_scenarios_5shot_acc_norm": 26.82,
"cmmlu-world_religions_5shot_acc_norm": 30.41,
"cmmlu-abstract_algebra_5shot_acc_norm": 29.0,
"cmmlu-college_medicine_5shot_acc_norm": 25.43,
"cmmlu-machine_learning_5shot_acc_norm": 24.11,
"cmmlu-medical_genetics_5shot_acc_norm": 26.0,
"cmmlu-professional_law_5shot_acc_norm": 26.01,
"cmmlu-public_relations_5shot_acc_norm": 33.64,
"cmmlu-security_studies_5shot_acc_norm": 21.22,
"cmmlu-college_chemistry_5shot_acc_norm": 25.0,
"cmmlu-computer_security_5shot_acc_norm": 33.0,
"cmmlu-international_law_5shot_acc_norm": 33.06,
"cmmlu-logical_fallacies_5shot_acc_norm": 30.67,
"cmmlu-us_foreign_policy_5shot_acc_norm": 30.0,
"cmmlu-clinical_knowledge_5shot_acc_norm": 25.28,
"cmmlu-conceptual_physics_5shot_acc_norm": 32.34,
"cmmlu-college_mathematics_5shot_acc_norm": 30.0,
"cmmlu-high_school_biology_5shot_acc_norm": 24.84,
"cmmlu-high_school_physics_5shot_acc_norm": 19.87,
"cmmlu-high_school_chemistry_5shot_acc_norm": 30.05,
"cmmlu-high_school_geography_5shot_acc_norm": 24.24,
"cmmlu-professional_medicine_5shot_acc_norm": 29.41,
"cmmlu-electrical_engineering_5shot_acc_norm": 24.83,
"cmmlu-elementary_mathematics_5shot_acc_norm": 25.13,
"cmmlu-high_school_psychology_5shot_acc_norm": 27.71,
"cmmlu-high_school_statistics_5shot_acc_norm": 21.3,
"cmmlu-high_school_us_history_5shot_acc_norm": 24.51,
"cmmlu-high_school_mathematics_5shot_acc_norm": 22.59,
"cmmlu-professional_accounting_5shot_acc_norm": 26.6,
"cmmlu-professional_psychology_5shot_acc_norm": 27.94,
"cmmlu-college_computer_science_5shot_acc_norm": 19.0,
"cmmlu-high_school_world_history_5shot_acc_norm": 28.69,
"cmmlu-high_school_macroeconomics_5shot_acc_norm": 23.59,
"cmmlu-high_school_microeconomics_5shot_acc_norm": 25.21,
"cmmlu-high_school_computer_science_5shot_acc_norm": 29.0,
"cmmlu-high_school_european_history_5shot_acc_norm": 28.48,
"cmmlu-high_school_government_and_politics_5shot_acc_norm": 22.8
}
},
"versions": {},
"config_tasks": {},
"summary_tasks": {},
"summary_general": {}
}