File size: 3,624 Bytes
66dd044 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 |
{
"config_general": {
"model_name": "paloalma/Le_Triomphant-ECE-TW3",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"harness-c_arc_challenge": {
"acc_norm": 69.2,
"acc_stderr": 0,
"c_arc_challenge_25shot_acc": 64.25,
"c_arc_challenge_25shot_acc_norm": 69.2
},
"harness-c_gsm8k": {
"acc": 71.57,
"acc_stderr": 0,
"c_gsm8k_5shot_acc": 71.57
},
"harness-c_hellaswag": {
"acc_norm": 76.54,
"acc_stderr": 0,
"c_hellaswag_10shot_acc": 58.17,
"c_hellaswag_10shot_acc_norm": 76.54
},
"harness-c-sem-v2": {
"acc": 91.1,
"acc_stderr": 0,
"c_sem_v2-LLSRC_5shot_acc": 93.81,
"c_sem_v2-SLPWC_5shot_acc": 88.0,
"c_sem_v2-SLRFC_5shot_acc": 96.98,
"c_sem_v2-SLSRC_5shot_acc": 85.61,
"c_sem_v2-LLSRC_5shot_acc_norm": 93.81,
"c_sem_v2-SLPWC_5shot_acc_norm": 88.0,
"c_sem_v2-SLRFC_5shot_acc_norm": 96.98,
"c_sem_v2-SLSRC_5shot_acc_norm": 85.61
},
"harness-c_truthfulqa_mc": {
"mc2": 66.42,
"acc_stderr": 0,
"c_truthfulqa_mc_0shot_mc1": 48.96,
"c_truthfulqa_mc_0shot_mc2": 66.42
},
"harness-c_winogrande": {
"acc": 70.09,
"acc_stderr": 0,
"c_winogrande_0shot_acc": 70.09
},
"harness-cmmlu": {
"acc_norm": 73.51,
"acc_stderr": 0,
"cmmlu_fullavg_5shot_acc": 73.51,
"cmmlu-virology_5shot_acc": 51.81,
"cmmlu-nutrition_5shot_acc": 83.99,
"cmmlu-sociology_5shot_acc": 86.57,
"cmmlu-philosophy_5shot_acc": 75.88,
"cmmlu-prehistory_5shot_acc": 77.47,
"cmmlu-miscellaneous_5shot_acc": 81.86,
"cmmlu-moral_disputes_5shot_acc": 77.75,
"cmmlu-moral_scenarios_5shot_acc": 62.01,
"cmmlu-world_religions_5shot_acc": 79.53,
"cmmlu-professional_law_5shot_acc": 54.5,
"cmmlu-public_relations_5shot_acc": 69.09,
"cmmlu-security_studies_5shot_acc": 76.73,
"cmmlu-us_foreign_policy_5shot_acc": 90.0,
"cmmlu-professional_medicine_5shot_acc": 77.94,
"cmmlu-professional_accounting_5shot_acc": 58.51,
"cmmlu-professional_psychology_5shot_acc": 72.55,
"cmmlu_fullavg_5shot_acc_norm": 73.51,
"cmmlu-virology_5shot_acc_norm": 51.81,
"cmmlu-nutrition_5shot_acc_norm": 83.99,
"cmmlu-sociology_5shot_acc_norm": 86.57,
"cmmlu-philosophy_5shot_acc_norm": 75.88,
"cmmlu-prehistory_5shot_acc_norm": 77.47,
"cmmlu-miscellaneous_5shot_acc_norm": 81.86,
"cmmlu-moral_disputes_5shot_acc_norm": 77.75,
"cmmlu-moral_scenarios_5shot_acc_norm": 62.01,
"cmmlu-world_religions_5shot_acc_norm": 79.53,
"cmmlu-professional_law_5shot_acc_norm": 54.5,
"cmmlu-public_relations_5shot_acc_norm": 69.09,
"cmmlu-security_studies_5shot_acc_norm": 76.73,
"cmmlu-us_foreign_policy_5shot_acc_norm": 90.0,
"cmmlu-professional_medicine_5shot_acc_norm": 77.94,
"cmmlu-professional_accounting_5shot_acc_norm": 58.51,
"cmmlu-professional_psychology_5shot_acc_norm": 72.55
}
},
"versions": {},
"config_tasks": {},
"summary_tasks": {},
"summary_general": {}
} |