results / refuelai /Llama-3-Refueled /results_2024-05-26T10-12-26.512843.json
xuanricheng's picture
Add results for refuelai/Llama-3-Refueled
60e2184 verified
raw
history blame
7.48 kB
{
"config_general": {
"model_name": "refuelai/Llama-3-Refueled",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"harness-c_arc_challenge": {
"acc_norm": 41.55,
"acc_stderr": 0,
"c_arc_challenge_25shot_acc": 35.75,
"c_arc_challenge_25shot_acc_norm": 41.55
},
"harness-c_gsm8k": {
"acc": 37.45,
"acc_stderr": 0,
"c_gsm8k_5shot_acc": 37.45
},
"harness-c_hellaswag": {
"acc_norm": 51.62,
"acc_stderr": 0,
"c_hellaswag_10shot_acc": 38.18,
"c_hellaswag_10shot_acc_norm": 51.62
},
"harness-c-sem-v2": {
"acc": 71.2775,
"acc_stderr": 0,
"c_sem_v2-LLSRC_5shot_acc": 74.53,
"c_sem_v2-SLPWC_5shot_acc": 69.43,
"c_sem_v2-SLRFC_5shot_acc": 65.47,
"c_sem_v2-SLSRC_5shot_acc": 75.68,
"c_sem_v2-LLSRC_5shot_acc_norm": 74.53,
"c_sem_v2-SLPWC_5shot_acc_norm": 69.43,
"c_sem_v2-SLRFC_5shot_acc_norm": 65.47,
"c_sem_v2-SLSRC_5shot_acc_norm": 75.68
},
"harness-c_truthfulqa_mc": {
"mc2": 44.64,
"acc_stderr": 0,
"c_truthfulqa_mc_0shot_mc1": 22.28,
"c_truthfulqa_mc_0shot_mc2": 44.64
},
"harness-c_winogrande": {
"acc": 60.62,
"acc_stderr": 0,
"c_winogrande_0shot_acc": 60.62
},
"harness-cmmlu": {
"acc_norm": 51.67,
"acc_stderr": 0,
"cmmlu_fullavg_5shot_acc": 51.67,
"cmmlu-virology_5shot_acc": 42.17,
"cmmlu-marketing_5shot_acc": 74.36,
"cmmlu-nutrition_5shot_acc": 57.52,
"cmmlu-sociology_5shot_acc": 66.67,
"cmmlu-management_5shot_acc": 64.08,
"cmmlu-philosophy_5shot_acc": 50.16,
"cmmlu-prehistory_5shot_acc": 47.84,
"cmmlu-human_aging_5shot_acc": 49.78,
"cmmlu-econometrics_5shot_acc": 34.21,
"cmmlu-formal_logic_5shot_acc": 30.4,
"cmmlu-global_facts_5shot_acc": 31.0,
"cmmlu-jurisprudence_5shot_acc": 69.44,
"cmmlu-miscellaneous_5shot_acc": 55.94,
"cmmlu-moral_disputes_5shot_acc": 57.8,
"cmmlu-college_physics_5shot_acc": 33.33,
"cmmlu-human_sexuality_5shot_acc": 50.38,
"cmmlu-moral_scenarios_5shot_acc": 34.64,
"cmmlu-world_religions_5shot_acc": 57.89,
"cmmlu-college_medicine_5shot_acc": 47.4,
"cmmlu-machine_learning_5shot_acc": 45.54,
"cmmlu-medical_genetics_5shot_acc": 56.0,
"cmmlu-professional_law_5shot_acc": 39.37,
"cmmlu-public_relations_5shot_acc": 50.91,
"cmmlu-security_studies_5shot_acc": 58.78,
"cmmlu-computer_security_5shot_acc": 62.0,
"cmmlu-international_law_5shot_acc": 69.42,
"cmmlu-logical_fallacies_5shot_acc": 56.44,
"cmmlu-us_foreign_policy_5shot_acc": 66.0,
"cmmlu-conceptual_physics_5shot_acc": 44.68,
"cmmlu-high_school_biology_5shot_acc": 57.1,
"cmmlu-high_school_physics_5shot_acc": 33.11,
"cmmlu-high_school_chemistry_5shot_acc": 39.41,
"cmmlu-high_school_geography_5shot_acc": 61.11,
"cmmlu-professional_medicine_5shot_acc": 45.96,
"cmmlu-electrical_engineering_5shot_acc": 52.41,
"cmmlu-elementary_mathematics_5shot_acc": 38.89,
"cmmlu-high_school_psychology_5shot_acc": 62.57,
"cmmlu-high_school_statistics_5shot_acc": 48.61,
"cmmlu-high_school_us_history_5shot_acc": 62.25,
"cmmlu-high_school_mathematics_5shot_acc": 31.48,
"cmmlu-professional_accounting_5shot_acc": 36.88,
"cmmlu-professional_psychology_5shot_acc": 49.84,
"cmmlu-high_school_world_history_5shot_acc": 71.73,
"cmmlu-high_school_macroeconomics_5shot_acc": 51.28,
"cmmlu-high_school_microeconomics_5shot_acc": 51.26,
"cmmlu-high_school_computer_science_5shot_acc": 64.0,
"cmmlu-high_school_european_history_5shot_acc": 65.45,
"cmmlu-high_school_government_and_politics_5shot_acc": 52.85,
"cmmlu_fullavg_5shot_acc_norm": 51.67,
"cmmlu-virology_5shot_acc_norm": 42.17,
"cmmlu-marketing_5shot_acc_norm": 74.36,
"cmmlu-nutrition_5shot_acc_norm": 57.52,
"cmmlu-sociology_5shot_acc_norm": 66.67,
"cmmlu-management_5shot_acc_norm": 64.08,
"cmmlu-philosophy_5shot_acc_norm": 50.16,
"cmmlu-prehistory_5shot_acc_norm": 47.84,
"cmmlu-human_aging_5shot_acc_norm": 49.78,
"cmmlu-econometrics_5shot_acc_norm": 34.21,
"cmmlu-formal_logic_5shot_acc_norm": 30.4,
"cmmlu-global_facts_5shot_acc_norm": 31.0,
"cmmlu-jurisprudence_5shot_acc_norm": 69.44,
"cmmlu-miscellaneous_5shot_acc_norm": 55.94,
"cmmlu-moral_disputes_5shot_acc_norm": 57.8,
"cmmlu-college_physics_5shot_acc_norm": 33.33,
"cmmlu-human_sexuality_5shot_acc_norm": 50.38,
"cmmlu-moral_scenarios_5shot_acc_norm": 34.64,
"cmmlu-world_religions_5shot_acc_norm": 57.89,
"cmmlu-college_medicine_5shot_acc_norm": 47.4,
"cmmlu-machine_learning_5shot_acc_norm": 45.54,
"cmmlu-medical_genetics_5shot_acc_norm": 56.0,
"cmmlu-professional_law_5shot_acc_norm": 39.37,
"cmmlu-public_relations_5shot_acc_norm": 50.91,
"cmmlu-security_studies_5shot_acc_norm": 58.78,
"cmmlu-computer_security_5shot_acc_norm": 62.0,
"cmmlu-international_law_5shot_acc_norm": 69.42,
"cmmlu-logical_fallacies_5shot_acc_norm": 56.44,
"cmmlu-us_foreign_policy_5shot_acc_norm": 66.0,
"cmmlu-conceptual_physics_5shot_acc_norm": 44.68,
"cmmlu-high_school_biology_5shot_acc_norm": 57.1,
"cmmlu-high_school_physics_5shot_acc_norm": 33.11,
"cmmlu-high_school_chemistry_5shot_acc_norm": 39.41,
"cmmlu-high_school_geography_5shot_acc_norm": 61.11,
"cmmlu-professional_medicine_5shot_acc_norm": 45.96,
"cmmlu-electrical_engineering_5shot_acc_norm": 52.41,
"cmmlu-elementary_mathematics_5shot_acc_norm": 38.89,
"cmmlu-high_school_psychology_5shot_acc_norm": 62.57,
"cmmlu-high_school_statistics_5shot_acc_norm": 48.61,
"cmmlu-high_school_us_history_5shot_acc_norm": 62.25,
"cmmlu-high_school_mathematics_5shot_acc_norm": 31.48,
"cmmlu-professional_accounting_5shot_acc_norm": 36.88,
"cmmlu-professional_psychology_5shot_acc_norm": 49.84,
"cmmlu-high_school_world_history_5shot_acc_norm": 71.73,
"cmmlu-high_school_macroeconomics_5shot_acc_norm": 51.28,
"cmmlu-high_school_microeconomics_5shot_acc_norm": 51.26,
"cmmlu-high_school_computer_science_5shot_acc_norm": 64.0,
"cmmlu-high_school_european_history_5shot_acc_norm": 65.45,
"cmmlu-high_school_government_and_politics_5shot_acc_norm": 52.85
}
},
"versions": {},
"config_tasks": {},
"summary_tasks": {},
"summary_general": {}
}