results / Qwen /Qwen1.5-0.5B /results_2024-04-23T18-43-31.575440.json
xuanricheng's picture
Add results for Qwen/Qwen1.5-0.5B
0ab3ded verified
raw
history blame
8.48 kB
{
"config_general": {
"model_name": "Qwen/Qwen1.5-0.5B",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"harness-c_arc_challenge": {
"acc_norm": 31.4,
"acc_stderr": 0,
"c_arc_challenge_25shot_acc": 26.02,
"c_arc_challenge_25shot_acc_norm": 31.4
},
"harness-c_gsm8k": {
"acc": 6.14,
"acc_stderr": 0,
"c_gsm8k_5shot_acc": 6.14
},
"harness-c_hellaswag": {
"acc_norm": 40.75,
"acc_stderr": 0,
"c_hellaswag_10shot_acc": 33.13,
"c_hellaswag_10shot_acc_norm": 40.75
},
"harness-c-sem-v2": {
"acc": 35.6425,
"acc_stderr": 0,
"c_sem_v2-LLSRC_5shot_acc": 37.7,
"c_sem_v2-SLPWC_5shot_acc": 22.71,
"c_sem_v2-SLRFC_5shot_acc": 26.76,
"c_sem_v2-SLSRC_5shot_acc": 55.4,
"c_sem_v2-LLSRC_5shot_acc_norm": 37.7,
"c_sem_v2-SLPWC_5shot_acc_norm": 22.71,
"c_sem_v2-SLRFC_5shot_acc_norm": 26.76,
"c_sem_v2-SLSRC_5shot_acc_norm": 55.4
},
"harness-c_truthfulqa_mc": {
"mc2": 45.1,
"acc_stderr": 0,
"c_truthfulqa_mc_0shot_mc1": 23.62,
"c_truthfulqa_mc_0shot_mc2": 45.1
},
"harness-c_winogrande": {
"acc": 53.59,
"acc_stderr": 0,
"c_winogrande_0shot_acc": 53.59
},
"harness-cmmlu": {
"acc_norm": 37.83,
"acc_stderr": 0,
"cmmlu-anatomy_5shot_acc": 38.52,
"cmmlu_fullavg_5shot_acc": 37.83,
"cmmlu-virology_5shot_acc": 44.58,
"cmmlu-astronomy_5shot_acc": 36.84,
"cmmlu-marketing_5shot_acc": 56.84,
"cmmlu-nutrition_5shot_acc": 42.81,
"cmmlu-sociology_5shot_acc": 50.25,
"cmmlu-management_5shot_acc": 46.6,
"cmmlu-philosophy_5shot_acc": 36.33,
"cmmlu-prehistory_5shot_acc": 37.96,
"cmmlu-human_aging_5shot_acc": 38.12,
"cmmlu-anatomy_5shot_acc_norm": 38.52,
"cmmlu-econometrics_5shot_acc": 22.81,
"cmmlu-formal_logic_5shot_acc": 21.6,
"cmmlu-global_facts_5shot_acc": 38.0,
"cmmlu_fullavg_5shot_acc_norm": 37.83,
"cmmlu-jurisprudence_5shot_acc": 53.7,
"cmmlu-miscellaneous_5shot_acc": 40.36,
"cmmlu-virology_5shot_acc_norm": 44.58,
"cmmlu-astronomy_5shot_acc_norm": 36.84,
"cmmlu-marketing_5shot_acc_norm": 56.84,
"cmmlu-moral_disputes_5shot_acc": 40.75,
"cmmlu-nutrition_5shot_acc_norm": 42.81,
"cmmlu-sociology_5shot_acc_norm": 50.25,
"cmmlu-business_ethics_5shot_acc": 39.0,
"cmmlu-college_biology_5shot_acc": 29.86,
"cmmlu-college_physics_5shot_acc": 30.39,
"cmmlu-human_sexuality_5shot_acc": 42.75,
"cmmlu-management_5shot_acc_norm": 46.6,
"cmmlu-moral_scenarios_5shot_acc": 24.58,
"cmmlu-philosophy_5shot_acc_norm": 36.33,
"cmmlu-prehistory_5shot_acc_norm": 37.96,
"cmmlu-world_religions_5shot_acc": 38.01,
"cmmlu-abstract_algebra_5shot_acc": 33.0,
"cmmlu-college_medicine_5shot_acc": 34.68,
"cmmlu-human_aging_5shot_acc_norm": 38.12,
"cmmlu-machine_learning_5shot_acc": 35.71,
"cmmlu-medical_genetics_5shot_acc": 44.0,
"cmmlu-professional_law_5shot_acc": 30.38,
"cmmlu-public_relations_5shot_acc": 38.18,
"cmmlu-security_studies_5shot_acc": 34.69,
"cmmlu-college_chemistry_5shot_acc": 33.0,
"cmmlu-computer_security_5shot_acc": 46.0,
"cmmlu-econometrics_5shot_acc_norm": 22.81,
"cmmlu-formal_logic_5shot_acc_norm": 21.6,
"cmmlu-global_facts_5shot_acc_norm": 38.0,
"cmmlu-international_law_5shot_acc": 54.55,
"cmmlu-logical_fallacies_5shot_acc": 33.13,
"cmmlu-us_foreign_policy_5shot_acc": 54.0,
"cmmlu-clinical_knowledge_5shot_acc": 38.49,
"cmmlu-conceptual_physics_5shot_acc": 38.72,
"cmmlu-jurisprudence_5shot_acc_norm": 53.7,
"cmmlu-miscellaneous_5shot_acc_norm": 40.36,
"cmmlu-college_mathematics_5shot_acc": 37.0,
"cmmlu-high_school_biology_5shot_acc": 36.77,
"cmmlu-high_school_physics_5shot_acc": 23.18,
"cmmlu-moral_disputes_5shot_acc_norm": 40.75,
"cmmlu-business_ethics_5shot_acc_norm": 39.0,
"cmmlu-college_biology_5shot_acc_norm": 29.86,
"cmmlu-college_physics_5shot_acc_norm": 30.39,
"cmmlu-human_sexuality_5shot_acc_norm": 42.75,
"cmmlu-moral_scenarios_5shot_acc_norm": 24.58,
"cmmlu-world_religions_5shot_acc_norm": 38.01,
"cmmlu-abstract_algebra_5shot_acc_norm": 33.0,
"cmmlu-college_medicine_5shot_acc_norm": 34.68,
"cmmlu-high_school_chemistry_5shot_acc": 27.59,
"cmmlu-high_school_geography_5shot_acc": 45.96,
"cmmlu-machine_learning_5shot_acc_norm": 35.71,
"cmmlu-medical_genetics_5shot_acc_norm": 44.0,
"cmmlu-professional_law_5shot_acc_norm": 30.38,
"cmmlu-professional_medicine_5shot_acc": 30.51,
"cmmlu-public_relations_5shot_acc_norm": 38.18,
"cmmlu-security_studies_5shot_acc_norm": 34.69,
"cmmlu-college_chemistry_5shot_acc_norm": 33.0,
"cmmlu-computer_security_5shot_acc_norm": 46.0,
"cmmlu-electrical_engineering_5shot_acc": 43.45,
"cmmlu-elementary_mathematics_5shot_acc": 32.54,
"cmmlu-high_school_psychology_5shot_acc": 39.45,
"cmmlu-high_school_statistics_5shot_acc": 23.15,
"cmmlu-high_school_us_history_5shot_acc": 43.14,
"cmmlu-international_law_5shot_acc_norm": 54.55,
"cmmlu-logical_fallacies_5shot_acc_norm": 33.13,
"cmmlu-us_foreign_policy_5shot_acc_norm": 54.0,
"cmmlu-clinical_knowledge_5shot_acc_norm": 38.49,
"cmmlu-conceptual_physics_5shot_acc_norm": 38.72,
"cmmlu-high_school_mathematics_5shot_acc": 28.52,
"cmmlu-professional_accounting_5shot_acc": 34.4,
"cmmlu-professional_psychology_5shot_acc": 35.62,
"cmmlu-college_computer_science_5shot_acc": 36.0,
"cmmlu-college_mathematics_5shot_acc_norm": 37.0,
"cmmlu-high_school_biology_5shot_acc_norm": 36.77,
"cmmlu-high_school_physics_5shot_acc_norm": 23.18,
"cmmlu-high_school_world_history_5shot_acc": 44.73,
"cmmlu-high_school_chemistry_5shot_acc_norm": 27.59,
"cmmlu-high_school_geography_5shot_acc_norm": 45.96,
"cmmlu-high_school_macroeconomics_5shot_acc": 33.33,
"cmmlu-high_school_microeconomics_5shot_acc": 38.24,
"cmmlu-professional_medicine_5shot_acc_norm": 30.51,
"cmmlu-electrical_engineering_5shot_acc_norm": 43.45,
"cmmlu-elementary_mathematics_5shot_acc_norm": 32.54,
"cmmlu-high_school_psychology_5shot_acc_norm": 39.45,
"cmmlu-high_school_statistics_5shot_acc_norm": 23.15,
"cmmlu-high_school_us_history_5shot_acc_norm": 43.14,
"cmmlu-high_school_computer_science_5shot_acc": 43.0,
"cmmlu-high_school_european_history_5shot_acc": 47.27,
"cmmlu-high_school_mathematics_5shot_acc_norm": 28.52,
"cmmlu-professional_accounting_5shot_acc_norm": 34.4,
"cmmlu-professional_psychology_5shot_acc_norm": 35.62,
"cmmlu-college_computer_science_5shot_acc_norm": 36.0,
"cmmlu-high_school_world_history_5shot_acc_norm": 44.73,
"cmmlu-high_school_macroeconomics_5shot_acc_norm": 33.33,
"cmmlu-high_school_microeconomics_5shot_acc_norm": 38.24,
"cmmlu-high_school_computer_science_5shot_acc_norm": 43.0,
"cmmlu-high_school_european_history_5shot_acc_norm": 47.27,
"cmmlu-high_school_government_and_politics_5shot_acc": 33.16,
"cmmlu-high_school_government_and_politics_5shot_acc_norm": 33.16
}
},
"versions": {},
"config_tasks": {},
"summary_tasks": {},
"summary_general": {}
}