results / paloalma /TW3-JRGL-v2 /results_2024-06-07T21-59-16.977848.json
xuanricheng's picture
Add results for paloalma/TW3-JRGL-v2
efe26eb verified
raw
history blame
3.94 kB
{
"config_general": {
"model_name": "paloalma/TW3-JRGL-v2",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"harness-c_arc_challenge": {
"acc_norm": 71.16,
"acc_stderr": 0,
"c_arc_challenge_25shot_acc": 67.06,
"c_arc_challenge_25shot_acc_norm": 71.16
},
"harness-c_gsm8k": {
"acc": 70.36,
"acc_stderr": 0,
"c_gsm8k_5shot_acc": 70.36
},
"harness-c_hellaswag": {
"acc_norm": 78.17,
"acc_stderr": 0,
"c_hellaswag_10shot_acc": 60.56,
"c_hellaswag_10shot_acc_norm": 78.17
},
"harness-c-sem-v2": {
"acc": 91.2775,
"acc_stderr": 0,
"c_sem_v2-LLSRC_5shot_acc": 93.81,
"c_sem_v2-SLPWC_5shot_acc": 88.71,
"c_sem_v2-SLRFC_5shot_acc": 96.83,
"c_sem_v2-SLSRC_5shot_acc": 85.76,
"c_sem_v2-LLSRC_5shot_acc_norm": 93.81,
"c_sem_v2-SLPWC_5shot_acc_norm": 88.71,
"c_sem_v2-SLRFC_5shot_acc_norm": 96.83,
"c_sem_v2-SLSRC_5shot_acc_norm": 85.76
},
"harness-c_truthfulqa_mc": {
"mc2": 66.26,
"acc_stderr": 0,
"c_truthfulqa_mc_0shot_mc1": 48.47,
"c_truthfulqa_mc_0shot_mc2": 66.26
},
"harness-c_winogrande": {
"acc": 71.67,
"acc_stderr": 0,
"c_winogrande_0shot_acc": 71.67
},
"harness-cmmlu": {
"acc_norm": 74.52,
"acc_stderr": 0,
"cmmlu_fullavg_5shot_acc": 74.52,
"cmmlu-virology_5shot_acc": 51.81,
"cmmlu-marketing_5shot_acc": 90.17,
"cmmlu-nutrition_5shot_acc": 84.31,
"cmmlu-sociology_5shot_acc": 88.06,
"cmmlu-management_5shot_acc": 77.67,
"cmmlu-philosophy_5shot_acc": 75.88,
"cmmlu-prehistory_5shot_acc": 77.16,
"cmmlu-miscellaneous_5shot_acc": 81.86,
"cmmlu-moral_disputes_5shot_acc": 75.14,
"cmmlu-moral_scenarios_5shot_acc": 65.03,
"cmmlu-world_religions_5shot_acc": 78.36,
"cmmlu-medical_genetics_5shot_acc": 73.0,
"cmmlu-professional_law_5shot_acc": 55.61,
"cmmlu-public_relations_5shot_acc": 68.18,
"cmmlu-security_studies_5shot_acc": 76.73,
"cmmlu-us_foreign_policy_5shot_acc": 91.0,
"cmmlu-professional_medicine_5shot_acc": 75.74,
"cmmlu-professional_accounting_5shot_acc": 58.16,
"cmmlu-professional_psychology_5shot_acc": 72.06,
"cmmlu_fullavg_5shot_acc_norm": 74.52,
"cmmlu-virology_5shot_acc_norm": 51.81,
"cmmlu-marketing_5shot_acc_norm": 90.17,
"cmmlu-nutrition_5shot_acc_norm": 84.31,
"cmmlu-sociology_5shot_acc_norm": 88.06,
"cmmlu-management_5shot_acc_norm": 77.67,
"cmmlu-philosophy_5shot_acc_norm": 75.88,
"cmmlu-prehistory_5shot_acc_norm": 77.16,
"cmmlu-miscellaneous_5shot_acc_norm": 81.86,
"cmmlu-moral_disputes_5shot_acc_norm": 75.14,
"cmmlu-moral_scenarios_5shot_acc_norm": 65.03,
"cmmlu-world_religions_5shot_acc_norm": 78.36,
"cmmlu-medical_genetics_5shot_acc_norm": 73.0,
"cmmlu-professional_law_5shot_acc_norm": 55.61,
"cmmlu-public_relations_5shot_acc_norm": 68.18,
"cmmlu-security_studies_5shot_acc_norm": 76.73,
"cmmlu-us_foreign_policy_5shot_acc_norm": 91.0,
"cmmlu-professional_medicine_5shot_acc_norm": 75.74,
"cmmlu-professional_accounting_5shot_acc_norm": 58.16,
"cmmlu-professional_psychology_5shot_acc_norm": 72.06
}
},
"versions": {},
"config_tasks": {},
"summary_tasks": {},
"summary_general": {}
}