|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3242320819112628, |
|
"acc_stderr": 0.013678810399518815, |
|
"acc_norm": 0.3771331058020478, |
|
"acc_norm_stderr": 0.01416336689619259 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3686516630153356, |
|
"acc_stderr": 0.00481453264257465, |
|
"acc_norm": 0.4765982871937861, |
|
"acc_norm_stderr": 0.004984313205791442 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.47953216374269003, |
|
"acc_stderr": 0.038316105328219316, |
|
"acc_norm": 0.47953216374269003, |
|
"acc_norm_stderr": 0.038316105328219316 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5728155339805825, |
|
"acc_stderr": 0.04897957737781168, |
|
"acc_norm": 0.5728155339805825, |
|
"acc_norm_stderr": 0.04897957737781168 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.44572158365261816, |
|
"acc_stderr": 0.0177742972824795, |
|
"acc_norm": 0.44572158365261816, |
|
"acc_norm_stderr": 0.0177742972824795 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4222222222222222, |
|
"acc_stderr": 0.04266763404099582, |
|
"acc_norm": 0.4222222222222222, |
|
"acc_norm_stderr": 0.04266763404099582 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.042923469599092816, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.042923469599092816 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.40425531914893614, |
|
"acc_stderr": 0.03208115750788684, |
|
"acc_norm": 0.40425531914893614, |
|
"acc_norm_stderr": 0.03208115750788684 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.43373493975903615, |
|
"acc_stderr": 0.03858158940685515, |
|
"acc_norm": 0.43373493975903615, |
|
"acc_norm_stderr": 0.03858158940685515 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4855305466237942, |
|
"acc_stderr": 0.02838619808417768, |
|
"acc_norm": 0.4855305466237942, |
|
"acc_norm_stderr": 0.02838619808417768 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4349775784753363, |
|
"acc_stderr": 0.03327283370271344, |
|
"acc_norm": 0.4349775784753363, |
|
"acc_norm_stderr": 0.03327283370271344 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4580152671755725, |
|
"acc_stderr": 0.04369802690578756, |
|
"acc_norm": 0.4580152671755725, |
|
"acc_norm_stderr": 0.04369802690578756 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5606060606060606, |
|
"acc_stderr": 0.0353608594752948, |
|
"acc_norm": 0.5606060606060606, |
|
"acc_norm_stderr": 0.0353608594752948 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.47586206896551725, |
|
"acc_stderr": 0.04161808503501528, |
|
"acc_norm": 0.47586206896551725, |
|
"acc_norm_stderr": 0.04161808503501528 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.043364327079931785, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.043364327079931785 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5252100840336135, |
|
"acc_stderr": 0.03243718055137411, |
|
"acc_norm": 0.5252100840336135, |
|
"acc_norm_stderr": 0.03243718055137411 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4897435897435897, |
|
"acc_stderr": 0.025345672221942374, |
|
"acc_norm": 0.4897435897435897, |
|
"acc_norm_stderr": 0.025345672221942374 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.0498887651569859, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.0498887651569859 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5370370370370371, |
|
"acc_stderr": 0.04820403072760627, |
|
"acc_norm": 0.5370370370370371, |
|
"acc_norm_stderr": 0.04820403072760627 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.458128078817734, |
|
"acc_stderr": 0.035056301407857426, |
|
"acc_norm": 0.458128078817734, |
|
"acc_norm_stderr": 0.035056301407857426 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4870967741935484, |
|
"acc_stderr": 0.028434533152681855, |
|
"acc_norm": 0.4870967741935484, |
|
"acc_norm_stderr": 0.028434533152681855 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7435897435897436, |
|
"acc_stderr": 0.028605953702004243, |
|
"acc_norm": 0.7435897435897436, |
|
"acc_norm_stderr": 0.028605953702004243 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.42641509433962266, |
|
"acc_stderr": 0.030437794342983045, |
|
"acc_norm": 0.42641509433962266, |
|
"acc_norm_stderr": 0.030437794342983045 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5363636363636364, |
|
"acc_stderr": 0.04776449162396197, |
|
"acc_norm": 0.5363636363636364, |
|
"acc_norm_stderr": 0.04776449162396197 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.35555555555555557, |
|
"acc_stderr": 0.029185714949857403, |
|
"acc_norm": 0.35555555555555557, |
|
"acc_norm_stderr": 0.029185714949857403 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2781456953642384, |
|
"acc_stderr": 0.03658603262763743, |
|
"acc_norm": 0.2781456953642384, |
|
"acc_norm_stderr": 0.03658603262763743 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6019900497512438, |
|
"acc_stderr": 0.034611994290400135, |
|
"acc_norm": 0.6019900497512438, |
|
"acc_norm_stderr": 0.034611994290400135 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3930635838150289, |
|
"acc_stderr": 0.03724249595817729, |
|
"acc_norm": 0.3930635838150289, |
|
"acc_norm_stderr": 0.03724249595817729 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.373015873015873, |
|
"acc_stderr": 0.02490699045899257, |
|
"acc_norm": 0.373015873015873, |
|
"acc_norm_stderr": 0.02490699045899257 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3194444444444444, |
|
"acc_stderr": 0.038990736873573344, |
|
"acc_norm": 0.3194444444444444, |
|
"acc_norm_stderr": 0.038990736873573344 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.04943110704237101, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.04943110704237101 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5057803468208093, |
|
"acc_stderr": 0.026917296179149123, |
|
"acc_norm": 0.5057803468208093, |
|
"acc_norm_stderr": 0.026917296179149123 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5214723926380368, |
|
"acc_stderr": 0.0392474687675113, |
|
"acc_norm": 0.5214723926380368, |
|
"acc_norm_stderr": 0.0392474687675113 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4506172839506173, |
|
"acc_stderr": 0.027684721415656206, |
|
"acc_norm": 0.4506172839506173, |
|
"acc_norm_stderr": 0.027684721415656206 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252606, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252606 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.48704663212435234, |
|
"acc_stderr": 0.03607228061047749, |
|
"acc_norm": 0.48704663212435234, |
|
"acc_norm_stderr": 0.03607228061047749 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.3508771929824561, |
|
"acc_stderr": 0.04489539350270698, |
|
"acc_norm": 0.3508771929824561, |
|
"acc_norm_stderr": 0.04489539350270698 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.48807339449541287, |
|
"acc_stderr": 0.021431223617362233, |
|
"acc_norm": 0.48807339449541287, |
|
"acc_norm_stderr": 0.021431223617362233 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.36507936507936506, |
|
"acc_stderr": 0.043062412591271526, |
|
"acc_norm": 0.36507936507936506, |
|
"acc_norm_stderr": 0.043062412591271526 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4934640522875817, |
|
"acc_stderr": 0.028627470550556047, |
|
"acc_norm": 0.4934640522875817, |
|
"acc_norm_stderr": 0.028627470550556047 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.48, |
|
"acc_stderr": 0.050211673156867795, |
|
"acc_norm": 0.48, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.7107438016528925, |
|
"acc_stderr": 0.04139112727635463, |
|
"acc_norm": 0.7107438016528925, |
|
"acc_norm_stderr": 0.04139112727635463 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4342105263157895, |
|
"acc_stderr": 0.0403356566784832, |
|
"acc_norm": 0.4342105263157895, |
|
"acc_norm_stderr": 0.0403356566784832 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4035947712418301, |
|
"acc_stderr": 0.019848280168401147, |
|
"acc_norm": 0.4035947712418301, |
|
"acc_norm_stderr": 0.019848280168401147 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3617021276595745, |
|
"acc_stderr": 0.028663820147199502, |
|
"acc_norm": 0.3617021276595745, |
|
"acc_norm_stderr": 0.028663820147199502 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.4107142857142857, |
|
"acc_stderr": 0.04669510663875191, |
|
"acc_norm": 0.4107142857142857, |
|
"acc_norm_stderr": 0.04669510663875191 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.42592592592592593, |
|
"acc_stderr": 0.03372343271653063, |
|
"acc_norm": 0.42592592592592593, |
|
"acc_norm_stderr": 0.03372343271653063 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.3307262569832402, |
|
"acc_stderr": 0.01573502625896612, |
|
"acc_norm": 0.3307262569832402, |
|
"acc_norm_stderr": 0.01573502625896612 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.41911764705882354, |
|
"acc_stderr": 0.02997280717046463, |
|
"acc_norm": 0.41911764705882354, |
|
"acc_norm_stderr": 0.02997280717046463 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5469387755102041, |
|
"acc_stderr": 0.031867859300041275, |
|
"acc_norm": 0.5469387755102041, |
|
"acc_norm_stderr": 0.031867859300041275 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5611814345991561, |
|
"acc_stderr": 0.032302649315470375, |
|
"acc_norm": 0.5611814345991561, |
|
"acc_norm_stderr": 0.032302649315470375 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3318122555410691, |
|
"acc_stderr": 0.012026088259897634, |
|
"acc_norm": 0.3318122555410691, |
|
"acc_norm_stderr": 0.012026088259897634 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.47058823529411764, |
|
"acc_stderr": 0.03503235296367992, |
|
"acc_norm": 0.47058823529411764, |
|
"acc_norm_stderr": 0.03503235296367992 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.4727272727272727, |
|
"acc_stderr": 0.03898531605579419, |
|
"acc_norm": 0.4727272727272727, |
|
"acc_norm_stderr": 0.03898531605579419 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.29865361077111385, |
|
"mc1_stderr": 0.016021570613768545, |
|
"mc2": 0.47266598912504365, |
|
"mc2_stderr": 0.015392669159401157 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5041322314049587, |
|
"acc_stderr": 0.01718976703213082, |
|
"acc_norm": 0.5501770956316411, |
|
"acc_norm_stderr": 0.01710357334382571 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "MNC-Jihun/Mistral-7B-A-u0.5-b2-ver0.4", |
|
"model_sha": "2274c77af5e028132156c1737de2a39d39bbff01", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |