leaderboard-test-results
/
MNCJihun
/Mistral-7B-eng-kor-cot-combined
/result_2023-10-24 01:05:12.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.28668941979522183, | |
"acc_stderr": 0.01321498632927476, | |
"acc_norm": 0.33447098976109213, | |
"acc_norm_stderr": 0.013787460322441374 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.34096793467436765, | |
"acc_stderr": 0.004730658073041555, | |
"acc_norm": 0.4268074088826927, | |
"acc_norm_stderr": 0.004936029827672039 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.36257309941520466, | |
"acc_stderr": 0.0368713061556206, | |
"acc_norm": 0.36257309941520466, | |
"acc_norm_stderr": 0.0368713061556206 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.32038834951456313, | |
"acc_stderr": 0.046202840822800406, | |
"acc_norm": 0.32038834951456313, | |
"acc_norm_stderr": 0.046202840822800406 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.3997445721583653, | |
"acc_stderr": 0.01751684790705327, | |
"acc_norm": 0.3997445721583653, | |
"acc_norm_stderr": 0.01751684790705327 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.31851851851851853, | |
"acc_stderr": 0.0402477840197711, | |
"acc_norm": 0.31851851851851853, | |
"acc_norm_stderr": 0.0402477840197711 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.31, | |
"acc_stderr": 0.04648231987117316, | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.3191489361702128, | |
"acc_stderr": 0.030472973363380045, | |
"acc_norm": 0.3191489361702128, | |
"acc_norm_stderr": 0.030472973363380045 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.3313253012048193, | |
"acc_stderr": 0.03664314777288085, | |
"acc_norm": 0.3313253012048193, | |
"acc_norm_stderr": 0.03664314777288085 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.3858520900321543, | |
"acc_stderr": 0.027648149599751464, | |
"acc_norm": 0.3858520900321543, | |
"acc_norm_stderr": 0.027648149599751464 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.34977578475336324, | |
"acc_stderr": 0.03200736719484504, | |
"acc_norm": 0.34977578475336324, | |
"acc_norm_stderr": 0.03200736719484504 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.25190839694656486, | |
"acc_stderr": 0.03807387116306086, | |
"acc_norm": 0.25190839694656486, | |
"acc_norm_stderr": 0.03807387116306086 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.29, | |
"acc_stderr": 0.045604802157206845, | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.045604802157206845 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.35353535353535354, | |
"acc_stderr": 0.03406086723547153, | |
"acc_norm": 0.35353535353535354, | |
"acc_norm_stderr": 0.03406086723547153 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.31724137931034485, | |
"acc_stderr": 0.03878352372138622, | |
"acc_norm": 0.31724137931034485, | |
"acc_norm_stderr": 0.03878352372138622 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.21568627450980393, | |
"acc_stderr": 0.04092563958237653, | |
"acc_norm": 0.21568627450980393, | |
"acc_norm_stderr": 0.04092563958237653 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.31512605042016806, | |
"acc_stderr": 0.03017680828897434, | |
"acc_norm": 0.31512605042016806, | |
"acc_norm_stderr": 0.03017680828897434 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.3333333333333333, | |
"acc_stderr": 0.023901157979402538, | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.023901157979402538 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.44, | |
"acc_stderr": 0.04988876515698589, | |
"acc_norm": 0.44, | |
"acc_norm_stderr": 0.04988876515698589 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.31, | |
"acc_stderr": 0.04648231987117316, | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.35185185185185186, | |
"acc_stderr": 0.04616631111801714, | |
"acc_norm": 0.35185185185185186, | |
"acc_norm_stderr": 0.04616631111801714 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.2660098522167488, | |
"acc_stderr": 0.03108982600293753, | |
"acc_norm": 0.2660098522167488, | |
"acc_norm_stderr": 0.03108982600293753 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.32903225806451614, | |
"acc_stderr": 0.02672949906834996, | |
"acc_norm": 0.32903225806451614, | |
"acc_norm_stderr": 0.02672949906834996 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.5042735042735043, | |
"acc_stderr": 0.03275489264382132, | |
"acc_norm": 0.5042735042735043, | |
"acc_norm_stderr": 0.03275489264382132 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.2943396226415094, | |
"acc_stderr": 0.028049186315695248, | |
"acc_norm": 0.2943396226415094, | |
"acc_norm_stderr": 0.028049186315695248 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.39090909090909093, | |
"acc_stderr": 0.04673752333670237, | |
"acc_norm": 0.39090909090909093, | |
"acc_norm_stderr": 0.04673752333670237 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.3, | |
"acc_stderr": 0.027940457136228412, | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.027940457136228412 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.25165562913907286, | |
"acc_stderr": 0.03543304234389985, | |
"acc_norm": 0.25165562913907286, | |
"acc_norm_stderr": 0.03543304234389985 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.43283582089552236, | |
"acc_stderr": 0.0350349092367328, | |
"acc_norm": 0.43283582089552236, | |
"acc_norm_stderr": 0.0350349092367328 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.23121387283236994, | |
"acc_stderr": 0.0321473730202947, | |
"acc_norm": 0.23121387283236994, | |
"acc_norm_stderr": 0.0321473730202947 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.3201058201058201, | |
"acc_stderr": 0.024026846392873502, | |
"acc_norm": 0.3201058201058201, | |
"acc_norm_stderr": 0.024026846392873502 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.2916666666666667, | |
"acc_stderr": 0.038009680605548574, | |
"acc_norm": 0.2916666666666667, | |
"acc_norm_stderr": 0.038009680605548574 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.27, | |
"acc_stderr": 0.044619604333847394, | |
"acc_norm": 0.27, | |
"acc_norm_stderr": 0.044619604333847394 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.45, | |
"acc_stderr": 0.04999999999999998, | |
"acc_norm": 0.45, | |
"acc_norm_stderr": 0.04999999999999998 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.3439306358381503, | |
"acc_stderr": 0.025574123786546648, | |
"acc_norm": 0.3439306358381503, | |
"acc_norm_stderr": 0.025574123786546648 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.32515337423312884, | |
"acc_stderr": 0.03680350371286461, | |
"acc_norm": 0.32515337423312884, | |
"acc_norm_stderr": 0.03680350371286461 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.37037037037037035, | |
"acc_stderr": 0.026869490744815247, | |
"acc_norm": 0.37037037037037035, | |
"acc_norm_stderr": 0.026869490744815247 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.31, | |
"acc_stderr": 0.04648231987117316, | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.3626943005181347, | |
"acc_stderr": 0.034697137917043715, | |
"acc_norm": 0.3626943005181347, | |
"acc_norm_stderr": 0.034697137917043715 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.2982456140350877, | |
"acc_stderr": 0.04303684033537315, | |
"acc_norm": 0.2982456140350877, | |
"acc_norm_stderr": 0.04303684033537315 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.3651376146788991, | |
"acc_stderr": 0.020642801454383995, | |
"acc_norm": 0.3651376146788991, | |
"acc_norm_stderr": 0.020642801454383995 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.2619047619047619, | |
"acc_stderr": 0.0393253768039287, | |
"acc_norm": 0.2619047619047619, | |
"acc_norm_stderr": 0.0393253768039287 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.3235294117647059, | |
"acc_stderr": 0.026787453111906532, | |
"acc_norm": 0.3235294117647059, | |
"acc_norm_stderr": 0.026787453111906532 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.4, | |
"acc_stderr": 0.049236596391733084, | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.049236596391733084 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.4462809917355372, | |
"acc_stderr": 0.0453793517794788, | |
"acc_norm": 0.4462809917355372, | |
"acc_norm_stderr": 0.0453793517794788 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.28289473684210525, | |
"acc_stderr": 0.03665349695640767, | |
"acc_norm": 0.28289473684210525, | |
"acc_norm_stderr": 0.03665349695640767 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.32189542483660133, | |
"acc_stderr": 0.018901015322093085, | |
"acc_norm": 0.32189542483660133, | |
"acc_norm_stderr": 0.018901015322093085 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.2907801418439716, | |
"acc_stderr": 0.027090664368353178, | |
"acc_norm": 0.2907801418439716, | |
"acc_norm_stderr": 0.027090664368353178 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.24107142857142858, | |
"acc_stderr": 0.04059867246952688, | |
"acc_norm": 0.24107142857142858, | |
"acc_norm_stderr": 0.04059867246952688 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.2916666666666667, | |
"acc_stderr": 0.03099866630456052, | |
"acc_norm": 0.2916666666666667, | |
"acc_norm_stderr": 0.03099866630456052 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.22793296089385476, | |
"acc_stderr": 0.014030149950805097, | |
"acc_norm": 0.22793296089385476, | |
"acc_norm_stderr": 0.014030149950805097 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.34, | |
"acc_stderr": 0.04760952285695236, | |
"acc_norm": 0.34, | |
"acc_norm_stderr": 0.04760952285695236 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.42, | |
"acc_stderr": 0.049604496374885836, | |
"acc_norm": 0.42, | |
"acc_norm_stderr": 0.049604496374885836 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.34191176470588236, | |
"acc_stderr": 0.02881472242225418, | |
"acc_norm": 0.34191176470588236, | |
"acc_norm_stderr": 0.02881472242225418 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.3469387755102041, | |
"acc_stderr": 0.030472526026726492, | |
"acc_norm": 0.3469387755102041, | |
"acc_norm_stderr": 0.030472526026726492 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.4219409282700422, | |
"acc_stderr": 0.032148146302403695, | |
"acc_norm": 0.4219409282700422, | |
"acc_norm_stderr": 0.032148146302403695 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.29986962190352023, | |
"acc_stderr": 0.011702660860193989, | |
"acc_norm": 0.29986962190352023, | |
"acc_norm_stderr": 0.011702660860193989 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.3431372549019608, | |
"acc_stderr": 0.03332139944668086, | |
"acc_norm": 0.3431372549019608, | |
"acc_norm_stderr": 0.03332139944668086 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.3939393939393939, | |
"acc_stderr": 0.03815494308688929, | |
"acc_norm": 0.3939393939393939, | |
"acc_norm_stderr": 0.03815494308688929 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.2864137086903305, | |
"mc1_stderr": 0.015826142439502342, | |
"mc2": 0.46556936650012803, | |
"mc2_stderr": 0.01608055615378503 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.20188902007083825, | |
"acc_stderr": 0.01380075389577743, | |
"acc_norm": 0.21959858323494688, | |
"acc_norm_stderr": 0.014232743085580275 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "MNCJihun/Mistral-7B-eng-kor-cot-combined", | |
"model_sha": "ad4d7c60244d0f1e0cc11d44be9b14c3354df448", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |