results / paloalma /ECE-TW3-JRGL-V1 /results_2024-05-30T22-45-18.293434.json
xuanricheng's picture
Add results for paloalma/ECE-TW3-JRGL-V1
7059255 verified
raw
history blame
3.3 kB
{
"config_general": {
"model_name": "paloalma/ECE-TW3-JRGL-V1",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"harness-c_arc_challenge": {
"acc_norm": 57.51,
"acc_stderr": 0,
"c_arc_challenge_25shot_acc": 52.3,
"c_arc_challenge_25shot_acc_norm": 57.51
},
"harness-c_gsm8k": {
"acc": 53.83,
"acc_stderr": 0,
"c_gsm8k_5shot_acc": 53.83
},
"harness-c_hellaswag": {
"acc_norm": 63.09,
"acc_stderr": 0,
"c_hellaswag_10shot_acc": 46.8,
"c_hellaswag_10shot_acc_norm": 63.09
},
"harness-c-sem-v2": {
"acc": 80.39750000000001,
"acc_stderr": 0,
"c_sem_v2-LLSRC_5shot_acc": 82.59,
"c_sem_v2-SLPWC_5shot_acc": 78.43,
"c_sem_v2-SLRFC_5shot_acc": 81.15,
"c_sem_v2-SLSRC_5shot_acc": 79.42,
"c_sem_v2-LLSRC_5shot_acc_norm": 82.59,
"c_sem_v2-SLPWC_5shot_acc_norm": 78.43,
"c_sem_v2-SLRFC_5shot_acc_norm": 81.15,
"c_sem_v2-SLSRC_5shot_acc_norm": 79.42
},
"harness-c_truthfulqa_mc": {
"mc2": 58.62,
"acc_stderr": 0,
"c_truthfulqa_mc_0shot_mc1": 37.33,
"c_truthfulqa_mc_0shot_mc2": 58.62
},
"harness-c_winogrande": {
"acc": 66.54,
"acc_stderr": 0,
"c_winogrande_0shot_acc": 66.54
},
"harness-cmmlu": {
"acc_norm": 63.12,
"acc_stderr": 0,
"cmmlu_fullavg_5shot_acc": 63.12,
"cmmlu-virology_5shot_acc": 48.19,
"cmmlu-nutrition_5shot_acc": 67.32,
"cmmlu-sociology_5shot_acc": 79.6,
"cmmlu-philosophy_5shot_acc": 63.02,
"cmmlu-prehistory_5shot_acc": 65.12,
"cmmlu-world_religions_5shot_acc": 67.25,
"cmmlu-professional_law_5shot_acc": 47.65,
"cmmlu-public_relations_5shot_acc": 61.82,
"cmmlu-security_studies_5shot_acc": 72.65,
"cmmlu-us_foreign_policy_5shot_acc": 83.0,
"cmmlu-professional_medicine_5shot_acc": 53.31,
"cmmlu-professional_accounting_5shot_acc": 47.87,
"cmmlu-professional_psychology_5shot_acc": 63.73,
"cmmlu_fullavg_5shot_acc_norm": 63.12,
"cmmlu-virology_5shot_acc_norm": 48.19,
"cmmlu-nutrition_5shot_acc_norm": 67.32,
"cmmlu-sociology_5shot_acc_norm": 79.6,
"cmmlu-philosophy_5shot_acc_norm": 63.02,
"cmmlu-prehistory_5shot_acc_norm": 65.12,
"cmmlu-world_religions_5shot_acc_norm": 67.25,
"cmmlu-professional_law_5shot_acc_norm": 47.65,
"cmmlu-public_relations_5shot_acc_norm": 61.82,
"cmmlu-security_studies_5shot_acc_norm": 72.65,
"cmmlu-us_foreign_policy_5shot_acc_norm": 83.0,
"cmmlu-professional_medicine_5shot_acc_norm": 53.31,
"cmmlu-professional_accounting_5shot_acc_norm": 47.87,
"cmmlu-professional_psychology_5shot_acc_norm": 63.73
}
},
"versions": {},
"config_tasks": {},
"summary_tasks": {},
"summary_general": {}
}