{ "config_general": { "model_name": "abacusai/Smaug-Llama-3-70B-Instruct", "model_dtype": "float16", "model_size": 0 }, "results": { "harness-c_arc_challenge": { "acc_norm": 58.36, "acc_stderr": 0, "c_arc_challenge_25shot_acc": 54.1, "c_arc_challenge_25shot_acc_norm": 58.36 }, "harness-c_gsm8k": { "acc": 70.05, "acc_stderr": 0, "c_gsm8k_5shot_acc": 70.05 }, "harness-c_hellaswag": { "acc_norm": 64.73, "acc_stderr": 0, "c_hellaswag_10shot_acc": 47.24, "c_hellaswag_10shot_acc_norm": 64.73 }, "harness-c-sem-v2": { "acc": 87.07750000000001, "acc_stderr": 0, "c_sem_v2-LLSRC_5shot_acc": 90.79, "c_sem_v2-SLPWC_5shot_acc": 84.43, "c_sem_v2-SLRFC_5shot_acc": 89.06, "c_sem_v2-SLSRC_5shot_acc": 84.03, "c_sem_v2-LLSRC_5shot_acc_norm": 90.79, "c_sem_v2-SLPWC_5shot_acc_norm": 84.43, "c_sem_v2-SLRFC_5shot_acc_norm": 89.06, "c_sem_v2-SLSRC_5shot_acc_norm": 84.03 }, "harness-c_truthfulqa_mc": { "mc2": 53.91, "acc_stderr": 0, "c_truthfulqa_mc_0shot_mc1": 31.21, "c_truthfulqa_mc_0shot_mc2": 53.91 }, "harness-c_winogrande": { "acc": 62.12, "acc_stderr": 0, "c_winogrande_0shot_acc": 62.12 }, "harness-cmmlu": { "acc_norm": 80.7, "acc_stderr": 0, "cmmlu_fullavg_5shot_acc": 80.7, "cmmlu-world_religions_5shot_acc": 80.7, "cmmlu_fullavg_5shot_acc_norm": 80.7, "cmmlu-world_religions_5shot_acc_norm": 80.7 } }, "versions": {}, "config_tasks": {}, "summary_tasks": {}, "summary_general": {} }