{ "config_general": { "model_name": "Qwen/Qwen2-72B", "model_dtype": "float16", "model_size": 0 }, "results": { "harness-c_arc_challenge": { "acc_norm": 63.74, "acc_stderr": 0, "c_arc_challenge_25shot_acc": 57.42, "c_arc_challenge_25shot_acc_norm": 63.74 }, "harness-c_gsm8k": { "acc": 75.36, "acc_stderr": 0, "c_gsm8k_5shot_acc": 75.36 }, "harness-c_hellaswag": { "acc_norm": 73.26, "acc_stderr": 0, "c_hellaswag_10shot_acc": 54.44, "c_hellaswag_10shot_acc_norm": 73.26 }, "harness-c-sem-v2": { "acc": 93.4675, "acc_stderr": 0, "c_sem_v2-LLSRC_5shot_acc": 94.68, "c_sem_v2-SLPWC_5shot_acc": 91.57, "c_sem_v2-SLRFC_5shot_acc": 97.55, "c_sem_v2-SLSRC_5shot_acc": 90.07, "c_sem_v2-LLSRC_5shot_acc_norm": 94.68, "c_sem_v2-SLPWC_5shot_acc_norm": 91.57, "c_sem_v2-SLRFC_5shot_acc_norm": 97.55, "c_sem_v2-SLSRC_5shot_acc_norm": 90.07 }, "harness-c_truthfulqa_mc": { "mc2": 52.77, "acc_stderr": 0, "c_truthfulqa_mc_0shot_mc1": 31.09, "c_truthfulqa_mc_0shot_mc2": 52.77 }, "harness-c_winogrande": { "acc": 71.35, "acc_stderr": 0, "c_winogrande_0shot_acc": 71.35 }, "harness-cmmlu": { "acc_norm": 78.66, "acc_stderr": 0, "cmmlu_fullavg_5shot_acc": 78.66, "cmmlu-virology_5shot_acc": 57.23, "cmmlu-sociology_5shot_acc": 88.56, "cmmlu-world_religions_5shot_acc": 82.46, "cmmlu-public_relations_5shot_acc": 70.0, "cmmlu-security_studies_5shot_acc": 80.82, "cmmlu-us_foreign_policy_5shot_acc": 91.0, "cmmlu-professional_psychology_5shot_acc": 80.56, "cmmlu_fullavg_5shot_acc_norm": 78.66, "cmmlu-virology_5shot_acc_norm": 57.23, "cmmlu-sociology_5shot_acc_norm": 88.56, "cmmlu-world_religions_5shot_acc_norm": 82.46, "cmmlu-public_relations_5shot_acc_norm": 70.0, "cmmlu-security_studies_5shot_acc_norm": 80.82, "cmmlu-us_foreign_policy_5shot_acc_norm": 91.0, "cmmlu-professional_psychology_5shot_acc_norm": 80.56 } }, "versions": {}, "config_tasks": {}, "summary_tasks": {}, "summary_general": {} }