results / 01-ai /Yi-1.5-34B-Chat-16K /results_2024-05-31T02-59-56.376436.json
xuanricheng's picture
Add results for 01-ai/Yi-1.5-34B-Chat-16K
83c1bc7 verified
raw
history blame
3.52 kB
{
"config_general": {
"model_name": "01-ai/Yi-1.5-34B-Chat-16K",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"harness-c_arc_challenge": {
"acc_norm": 63.74,
"acc_stderr": 0,
"c_arc_challenge_25shot_acc": 57.68,
"c_arc_challenge_25shot_acc_norm": 63.74
},
"harness-c_gsm8k": {
"acc": 66.72,
"acc_stderr": 0,
"c_gsm8k_5shot_acc": 66.72
},
"harness-c_hellaswag": {
"acc_norm": 69.35,
"acc_stderr": 0,
"c_hellaswag_10shot_acc": 50.67,
"c_hellaswag_10shot_acc_norm": 69.35
},
"harness-c-sem-v2": {
"acc": 90.095,
"acc_stderr": 0,
"c_sem_v2-LLSRC_5shot_acc": 93.81,
"c_sem_v2-SLPWC_5shot_acc": 85.71,
"c_sem_v2-SLRFC_5shot_acc": 93.09,
"c_sem_v2-SLSRC_5shot_acc": 87.77,
"c_sem_v2-LLSRC_5shot_acc_norm": 93.81,
"c_sem_v2-SLPWC_5shot_acc_norm": 85.71,
"c_sem_v2-SLRFC_5shot_acc_norm": 93.09,
"c_sem_v2-SLSRC_5shot_acc_norm": 87.77
},
"harness-c_truthfulqa_mc": {
"mc2": 54.54,
"acc_stderr": 0,
"c_truthfulqa_mc_0shot_mc1": 32.93,
"c_truthfulqa_mc_0shot_mc2": 54.54
},
"harness-c_winogrande": {
"acc": 69.53,
"acc_stderr": 0,
"c_winogrande_0shot_acc": 69.53
},
"harness-cmmlu": {
"acc_norm": 70.03,
"acc_stderr": 0,
"cmmlu_fullavg_5shot_acc": 70.03,
"cmmlu-virology_5shot_acc": 52.41,
"cmmlu-nutrition_5shot_acc": 80.07,
"cmmlu-sociology_5shot_acc": 82.59,
"cmmlu-philosophy_5shot_acc": 68.81,
"cmmlu-prehistory_5shot_acc": 72.53,
"cmmlu-moral_disputes_5shot_acc": 71.97,
"cmmlu-moral_scenarios_5shot_acc": 60.56,
"cmmlu-world_religions_5shot_acc": 75.44,
"cmmlu-professional_law_5shot_acc": 52.61,
"cmmlu-public_relations_5shot_acc": 71.82,
"cmmlu-security_studies_5shot_acc": 75.51,
"cmmlu-us_foreign_policy_5shot_acc": 86.0,
"cmmlu-professional_medicine_5shot_acc": 72.43,
"cmmlu-professional_accounting_5shot_acc": 57.09,
"cmmlu-professional_psychology_5shot_acc": 70.59,
"cmmlu_fullavg_5shot_acc_norm": 70.03,
"cmmlu-virology_5shot_acc_norm": 52.41,
"cmmlu-nutrition_5shot_acc_norm": 80.07,
"cmmlu-sociology_5shot_acc_norm": 82.59,
"cmmlu-philosophy_5shot_acc_norm": 68.81,
"cmmlu-prehistory_5shot_acc_norm": 72.53,
"cmmlu-moral_disputes_5shot_acc_norm": 71.97,
"cmmlu-moral_scenarios_5shot_acc_norm": 60.56,
"cmmlu-world_religions_5shot_acc_norm": 75.44,
"cmmlu-professional_law_5shot_acc_norm": 52.61,
"cmmlu-public_relations_5shot_acc_norm": 71.82,
"cmmlu-security_studies_5shot_acc_norm": 75.51,
"cmmlu-us_foreign_policy_5shot_acc_norm": 86.0,
"cmmlu-professional_medicine_5shot_acc_norm": 72.43,
"cmmlu-professional_accounting_5shot_acc_norm": 57.09,
"cmmlu-professional_psychology_5shot_acc_norm": 70.59
}
},
"versions": {},
"config_tasks": {},
"summary_tasks": {},
"summary_general": {}
}