Mixtral-8x22B-Instruct-v0.1-FP8
/
mmlu
/__home__mlr__models__Mixtral-8x22B-Instruct-v0.1-FP8
/results_2024-06-07T16-13-28.474390.json
{ | |
"results": { | |
"mmlu": { | |
"acc,none": 0.7060959977211223, | |
"acc_stderr,none": 0.0036419117884613442, | |
"alias": "mmlu" | |
}, | |
"mmlu_humanities": { | |
"alias": " - humanities", | |
"acc,none": 0.6561105207226355, | |
"acc_stderr,none": 0.006537667125056556 | |
}, | |
"mmlu_formal_logic": { | |
"alias": " - formal_logic", | |
"acc,none": 0.5793650793650794, | |
"acc_stderr,none": 0.04415438226743745 | |
}, | |
"mmlu_high_school_european_history": { | |
"alias": " - high_school_european_history", | |
"acc,none": 0.8181818181818182, | |
"acc_stderr,none": 0.030117688929503582 | |
}, | |
"mmlu_high_school_us_history": { | |
"alias": " - high_school_us_history", | |
"acc,none": 0.8627450980392157, | |
"acc_stderr,none": 0.024152225962801577 | |
}, | |
"mmlu_high_school_world_history": { | |
"alias": " - high_school_world_history", | |
"acc,none": 0.8565400843881856, | |
"acc_stderr,none": 0.022818291821017012 | |
}, | |
"mmlu_international_law": { | |
"alias": " - international_law", | |
"acc,none": 0.8512396694214877, | |
"acc_stderr,none": 0.03248470083807195 | |
}, | |
"mmlu_jurisprudence": { | |
"alias": " - jurisprudence", | |
"acc,none": 0.8518518518518519, | |
"acc_stderr,none": 0.03434300243631002 | |
}, | |
"mmlu_logical_fallacies": { | |
"alias": " - logical_fallacies", | |
"acc,none": 0.7975460122699386, | |
"acc_stderr,none": 0.031570650789119 | |
}, | |
"mmlu_moral_disputes": { | |
"alias": " - moral_disputes", | |
"acc,none": 0.7947976878612717, | |
"acc_stderr,none": 0.021742519835276274 | |
}, | |
"mmlu_moral_scenarios": { | |
"alias": " - moral_scenarios", | |
"acc,none": 0.4983240223463687, | |
"acc_stderr,none": 0.016722407608296398 | |
}, | |
"mmlu_philosophy": { | |
"alias": " - philosophy", | |
"acc,none": 0.7942122186495176, | |
"acc_stderr,none": 0.022961339906764234 | |
}, | |
"mmlu_prehistory": { | |
"alias": " - prehistory", | |
"acc,none": 0.7993827160493827, | |
"acc_stderr,none": 0.0222823139497749 | |
}, | |
"mmlu_professional_law": { | |
"alias": " - professional_law", | |
"acc,none": 0.516297262059974, | |
"acc_stderr,none": 0.012763450734699812 | |
}, | |
"mmlu_world_religions": { | |
"alias": " - world_religions", | |
"acc,none": 0.9122807017543859, | |
"acc_stderr,none": 0.021696383943889223 | |
}, | |
"mmlu_other": { | |
"alias": " - other", | |
"acc,none": 0.7608625683939492, | |
"acc_stderr,none": 0.007354391095553756 | |
}, | |
"mmlu_business_ethics": { | |
"alias": " - business_ethics", | |
"acc,none": 0.77, | |
"acc_stderr,none": 0.04229525846816506 | |
}, | |
"mmlu_clinical_knowledge": { | |
"alias": " - clinical_knowledge", | |
"acc,none": 0.7735849056603774, | |
"acc_stderr,none": 0.025757559893106758 | |
}, | |
"mmlu_college_medicine": { | |
"alias": " - college_medicine", | |
"acc,none": 0.7398843930635838, | |
"acc_stderr,none": 0.033450369167889904 | |
}, | |
"mmlu_global_facts": { | |
"alias": " - global_facts", | |
"acc,none": 0.45, | |
"acc_stderr,none": 0.05 | |
}, | |
"mmlu_human_aging": { | |
"alias": " - human_aging", | |
"acc,none": 0.7533632286995515, | |
"acc_stderr,none": 0.028930413120910874 | |
}, | |
"mmlu_management": { | |
"alias": " - management", | |
"acc,none": 0.8543689320388349, | |
"acc_stderr,none": 0.0349260647662379 | |
}, | |
"mmlu_marketing": { | |
"alias": " - marketing", | |
"acc,none": 0.9145299145299145, | |
"acc_stderr,none": 0.01831589168562584 | |
}, | |
"mmlu_medical_genetics": { | |
"alias": " - medical_genetics", | |
"acc,none": 0.77, | |
"acc_stderr,none": 0.042295258468165065 | |
}, | |
"mmlu_miscellaneous": { | |
"alias": " - miscellaneous", | |
"acc,none": 0.8518518518518519, | |
"acc_stderr,none": 0.012703598899445173 | |
}, | |
"mmlu_nutrition": { | |
"alias": " - nutrition", | |
"acc,none": 0.7908496732026143, | |
"acc_stderr,none": 0.02328768531233481 | |
}, | |
"mmlu_professional_accounting": { | |
"alias": " - professional_accounting", | |
"acc,none": 0.5319148936170213, | |
"acc_stderr,none": 0.029766675075873866 | |
}, | |
"mmlu_professional_medicine": { | |
"alias": " - professional_medicine", | |
"acc,none": 0.7794117647058824, | |
"acc_stderr,none": 0.02518778666022727 | |
}, | |
"mmlu_virology": { | |
"alias": " - virology", | |
"acc,none": 0.5481927710843374, | |
"acc_stderr,none": 0.038743715565879536 | |
}, | |
"mmlu_social_sciences": { | |
"alias": " - social_sciences", | |
"acc,none": 0.8059798505037374, | |
"acc_stderr,none": 0.007000549787458337 | |
}, | |
"mmlu_econometrics": { | |
"alias": " - econometrics", | |
"acc,none": 0.6052631578947368, | |
"acc_stderr,none": 0.04598188057816542 | |
}, | |
"mmlu_high_school_geography": { | |
"alias": " - high_school_geography", | |
"acc,none": 0.8636363636363636, | |
"acc_stderr,none": 0.024450155973189835 | |
}, | |
"mmlu_high_school_government_and_politics": { | |
"alias": " - high_school_government_and_politics", | |
"acc,none": 0.9378238341968912, | |
"acc_stderr,none": 0.017426974154240514 | |
}, | |
"mmlu_high_school_macroeconomics": { | |
"alias": " - high_school_macroeconomics", | |
"acc,none": 0.7230769230769231, | |
"acc_stderr,none": 0.022688042352424994 | |
}, | |
"mmlu_high_school_microeconomics": { | |
"alias": " - high_school_microeconomics", | |
"acc,none": 0.8109243697478992, | |
"acc_stderr,none": 0.02543511943810537 | |
}, | |
"mmlu_high_school_psychology": { | |
"alias": " - high_school_psychology", | |
"acc,none": 0.8844036697247707, | |
"acc_stderr,none": 0.013708749534172636 | |
}, | |
"mmlu_human_sexuality": { | |
"alias": " - human_sexuality", | |
"acc,none": 0.7709923664122137, | |
"acc_stderr,none": 0.036853466317118506 | |
}, | |
"mmlu_professional_psychology": { | |
"alias": " - professional_psychology", | |
"acc,none": 0.7532679738562091, | |
"acc_stderr,none": 0.0174408203674025 | |
}, | |
"mmlu_public_relations": { | |
"alias": " - public_relations", | |
"acc,none": 0.7363636363636363, | |
"acc_stderr,none": 0.04220224692971987 | |
}, | |
"mmlu_security_studies": { | |
"alias": " - security_studies", | |
"acc,none": 0.7877551020408163, | |
"acc_stderr,none": 0.026176967197866767 | |
}, | |
"mmlu_sociology": { | |
"alias": " - sociology", | |
"acc,none": 0.8756218905472637, | |
"acc_stderr,none": 0.023335401790166327 | |
}, | |
"mmlu_us_foreign_policy": { | |
"alias": " - us_foreign_policy", | |
"acc,none": 0.9, | |
"acc_stderr,none": 0.030151134457776348 | |
}, | |
"mmlu_stem": { | |
"alias": " - stem", | |
"acc,none": 0.6292419917538852, | |
"acc_stderr,none": 0.00828854335131971 | |
}, | |
"mmlu_abstract_algebra": { | |
"alias": " - abstract_algebra", | |
"acc,none": 0.4, | |
"acc_stderr,none": 0.049236596391733084 | |
}, | |
"mmlu_anatomy": { | |
"alias": " - anatomy", | |
"acc,none": 0.6518518518518519, | |
"acc_stderr,none": 0.041153246103369526 | |
}, | |
"mmlu_astronomy": { | |
"alias": " - astronomy", | |
"acc,none": 0.8026315789473685, | |
"acc_stderr,none": 0.03238981601699397 | |
}, | |
"mmlu_college_biology": { | |
"alias": " - college_biology", | |
"acc,none": 0.8402777777777778, | |
"acc_stderr,none": 0.030635578972093278 | |
}, | |
"mmlu_college_chemistry": { | |
"alias": " - college_chemistry", | |
"acc,none": 0.5, | |
"acc_stderr,none": 0.050251890762960605 | |
}, | |
"mmlu_college_computer_science": { | |
"alias": " - college_computer_science", | |
"acc,none": 0.64, | |
"acc_stderr,none": 0.04824181513244218 | |
}, | |
"mmlu_college_mathematics": { | |
"alias": " - college_mathematics", | |
"acc,none": 0.47, | |
"acc_stderr,none": 0.050161355804659205 | |
}, | |
"mmlu_college_physics": { | |
"alias": " - college_physics", | |
"acc,none": 0.49019607843137253, | |
"acc_stderr,none": 0.04974229460422817 | |
}, | |
"mmlu_computer_security": { | |
"alias": " - computer_security", | |
"acc,none": 0.8, | |
"acc_stderr,none": 0.040201512610368445 | |
}, | |
"mmlu_conceptual_physics": { | |
"alias": " - conceptual_physics", | |
"acc,none": 0.6978723404255319, | |
"acc_stderr,none": 0.030017554471880557 | |
}, | |
"mmlu_electrical_engineering": { | |
"alias": " - electrical_engineering", | |
"acc,none": 0.6827586206896552, | |
"acc_stderr,none": 0.03878352372138622 | |
}, | |
"mmlu_elementary_mathematics": { | |
"alias": " - elementary_mathematics", | |
"acc,none": 0.5582010582010583, | |
"acc_stderr,none": 0.025576257061253833 | |
}, | |
"mmlu_high_school_biology": { | |
"alias": " - high_school_biology", | |
"acc,none": 0.8064516129032258, | |
"acc_stderr,none": 0.02247525852553606 | |
}, | |
"mmlu_high_school_chemistry": { | |
"alias": " - high_school_chemistry", | |
"acc,none": 0.5566502463054187, | |
"acc_stderr,none": 0.03495334582162933 | |
}, | |
"mmlu_high_school_computer_science": { | |
"alias": " - high_school_computer_science", | |
"acc,none": 0.8, | |
"acc_stderr,none": 0.04020151261036846 | |
}, | |
"mmlu_high_school_mathematics": { | |
"alias": " - high_school_mathematics", | |
"acc,none": 0.44074074074074077, | |
"acc_stderr,none": 0.030270671157284074 | |
}, | |
"mmlu_high_school_physics": { | |
"alias": " - high_school_physics", | |
"acc,none": 0.4768211920529801, | |
"acc_stderr,none": 0.04078093859163084 | |
}, | |
"mmlu_high_school_statistics": { | |
"alias": " - high_school_statistics", | |
"acc,none": 0.6898148148148148, | |
"acc_stderr,none": 0.03154696285656629 | |
}, | |
"mmlu_machine_learning": { | |
"alias": " - machine_learning", | |
"acc,none": 0.5803571428571429, | |
"acc_stderr,none": 0.04684099321077106 | |
} | |
}, | |
"groups": { | |
"mmlu": { | |
"acc,none": 0.7060959977211223, | |
"acc_stderr,none": 0.0036419117884613442, | |
"alias": "mmlu" | |
}, | |
"mmlu_humanities": { | |
"alias": " - humanities", | |
"acc,none": 0.6561105207226355, | |
"acc_stderr,none": 0.006537667125056556 | |
}, | |
"mmlu_other": { | |
"alias": " - other", | |
"acc,none": 0.7608625683939492, | |
"acc_stderr,none": 0.007354391095553756 | |
}, | |
"mmlu_social_sciences": { | |
"alias": " - social_sciences", | |
"acc,none": 0.8059798505037374, | |
"acc_stderr,none": 0.007000549787458337 | |
}, | |
"mmlu_stem": { | |
"alias": " - stem", | |
"acc,none": 0.6292419917538852, | |
"acc_stderr,none": 0.00828854335131971 | |
} | |
}, | |
"group_subtasks": { | |
"mmlu_stem": [ | |
"mmlu_college_computer_science", | |
"mmlu_high_school_physics", | |
"mmlu_college_chemistry", | |
"mmlu_college_biology", | |
"mmlu_high_school_mathematics", | |
"mmlu_high_school_computer_science", | |
"mmlu_electrical_engineering", | |
"mmlu_college_physics", | |
"mmlu_anatomy", | |
"mmlu_college_mathematics", | |
"mmlu_elementary_mathematics", | |
"mmlu_high_school_chemistry", | |
"mmlu_machine_learning", | |
"mmlu_abstract_algebra", | |
"mmlu_astronomy", | |
"mmlu_computer_security", | |
"mmlu_high_school_biology", | |
"mmlu_high_school_statistics", | |
"mmlu_conceptual_physics" | |
], | |
"mmlu_other": [ | |
"mmlu_business_ethics", | |
"mmlu_virology", | |
"mmlu_nutrition", | |
"mmlu_management", | |
"mmlu_clinical_knowledge", | |
"mmlu_marketing", | |
"mmlu_college_medicine", | |
"mmlu_professional_medicine", | |
"mmlu_medical_genetics", | |
"mmlu_human_aging", | |
"mmlu_professional_accounting", | |
"mmlu_miscellaneous", | |
"mmlu_global_facts" | |
], | |
"mmlu_social_sciences": [ | |
"mmlu_high_school_government_and_politics", | |
"mmlu_human_sexuality", | |
"mmlu_high_school_microeconomics", | |
"mmlu_high_school_macroeconomics", | |
"mmlu_public_relations", | |
"mmlu_sociology", | |
"mmlu_professional_psychology", | |
"mmlu_high_school_psychology", | |
"mmlu_econometrics", | |
"mmlu_high_school_geography", | |
"mmlu_us_foreign_policy", | |
"mmlu_security_studies" | |
], | |
"mmlu_humanities": [ | |
"mmlu_high_school_european_history", | |
"mmlu_high_school_world_history", | |
"mmlu_professional_law", | |
"mmlu_logical_fallacies", | |
"mmlu_high_school_us_history", | |
"mmlu_world_religions", | |
"mmlu_prehistory", | |
"mmlu_jurisprudence", | |
"mmlu_moral_scenarios", | |
"mmlu_formal_logic", | |
"mmlu_philosophy", | |
"mmlu_international_law", | |
"mmlu_moral_disputes" | |
], | |
"mmlu": [ | |
"mmlu_humanities", | |
"mmlu_social_sciences", | |
"mmlu_other", | |
"mmlu_stem" | |
] | |
}, | |
"configs": { | |
"mmlu_abstract_algebra": { | |
"task": "mmlu_abstract_algebra", | |
"task_alias": "abstract_algebra", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "abstract_algebra", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about abstract algebra.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_anatomy": { | |
"task": "mmlu_anatomy", | |
"task_alias": "anatomy", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "anatomy", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about anatomy.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_astronomy": { | |
"task": "mmlu_astronomy", | |
"task_alias": "astronomy", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "astronomy", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about astronomy.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_business_ethics": { | |
"task": "mmlu_business_ethics", | |
"task_alias": "business_ethics", | |
"group": "mmlu_other", | |
"group_alias": "other", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "business_ethics", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about business ethics.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_clinical_knowledge": { | |
"task": "mmlu_clinical_knowledge", | |
"task_alias": "clinical_knowledge", | |
"group": "mmlu_other", | |
"group_alias": "other", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "clinical_knowledge", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about clinical knowledge.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_college_biology": { | |
"task": "mmlu_college_biology", | |
"task_alias": "college_biology", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "college_biology", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about college biology.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_college_chemistry": { | |
"task": "mmlu_college_chemistry", | |
"task_alias": "college_chemistry", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "college_chemistry", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about college chemistry.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_college_computer_science": { | |
"task": "mmlu_college_computer_science", | |
"task_alias": "college_computer_science", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "college_computer_science", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about college computer science.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_college_mathematics": { | |
"task": "mmlu_college_mathematics", | |
"task_alias": "college_mathematics", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "college_mathematics", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about college mathematics.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_college_medicine": { | |
"task": "mmlu_college_medicine", | |
"task_alias": "college_medicine", | |
"group": "mmlu_other", | |
"group_alias": "other", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "college_medicine", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about college medicine.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_college_physics": { | |
"task": "mmlu_college_physics", | |
"task_alias": "college_physics", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "college_physics", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about college physics.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_computer_security": { | |
"task": "mmlu_computer_security", | |
"task_alias": "computer_security", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "computer_security", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about computer security.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_conceptual_physics": { | |
"task": "mmlu_conceptual_physics", | |
"task_alias": "conceptual_physics", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "conceptual_physics", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about conceptual physics.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_econometrics": { | |
"task": "mmlu_econometrics", | |
"task_alias": "econometrics", | |
"group": "mmlu_social_sciences", | |
"group_alias": "social_sciences", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "econometrics", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about econometrics.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_electrical_engineering": { | |
"task": "mmlu_electrical_engineering", | |
"task_alias": "electrical_engineering", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "electrical_engineering", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about electrical engineering.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_elementary_mathematics": { | |
"task": "mmlu_elementary_mathematics", | |
"task_alias": "elementary_mathematics", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "elementary_mathematics", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about elementary mathematics.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_formal_logic": { | |
"task": "mmlu_formal_logic", | |
"task_alias": "formal_logic", | |
"group": "mmlu_humanities", | |
"group_alias": "humanities", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "formal_logic", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about formal logic.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_global_facts": { | |
"task": "mmlu_global_facts", | |
"task_alias": "global_facts", | |
"group": "mmlu_other", | |
"group_alias": "other", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "global_facts", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about global facts.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_high_school_biology": { | |
"task": "mmlu_high_school_biology", | |
"task_alias": "high_school_biology", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "high_school_biology", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about high school biology.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_high_school_chemistry": { | |
"task": "mmlu_high_school_chemistry", | |
"task_alias": "high_school_chemistry", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "high_school_chemistry", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about high school chemistry.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_high_school_computer_science": { | |
"task": "mmlu_high_school_computer_science", | |
"task_alias": "high_school_computer_science", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "high_school_computer_science", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about high school computer science.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_high_school_european_history": { | |
"task": "mmlu_high_school_european_history", | |
"task_alias": "high_school_european_history", | |
"group": "mmlu_humanities", | |
"group_alias": "humanities", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "high_school_european_history", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about high school european history.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_high_school_geography": { | |
"task": "mmlu_high_school_geography", | |
"task_alias": "high_school_geography", | |
"group": "mmlu_social_sciences", | |
"group_alias": "social_sciences", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "high_school_geography", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about high school geography.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_high_school_government_and_politics": { | |
"task": "mmlu_high_school_government_and_politics", | |
"task_alias": "high_school_government_and_politics", | |
"group": "mmlu_social_sciences", | |
"group_alias": "social_sciences", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "high_school_government_and_politics", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about high school government and politics.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_high_school_macroeconomics": { | |
"task": "mmlu_high_school_macroeconomics", | |
"task_alias": "high_school_macroeconomics", | |
"group": "mmlu_social_sciences", | |
"group_alias": "social_sciences", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "high_school_macroeconomics", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about high school macroeconomics.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_high_school_mathematics": { | |
"task": "mmlu_high_school_mathematics", | |
"task_alias": "high_school_mathematics", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "high_school_mathematics", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about high school mathematics.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_high_school_microeconomics": { | |
"task": "mmlu_high_school_microeconomics", | |
"task_alias": "high_school_microeconomics", | |
"group": "mmlu_social_sciences", | |
"group_alias": "social_sciences", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "high_school_microeconomics", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about high school microeconomics.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_high_school_physics": { | |
"task": "mmlu_high_school_physics", | |
"task_alias": "high_school_physics", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "high_school_physics", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about high school physics.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_high_school_psychology": { | |
"task": "mmlu_high_school_psychology", | |
"task_alias": "high_school_psychology", | |
"group": "mmlu_social_sciences", | |
"group_alias": "social_sciences", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "high_school_psychology", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about high school psychology.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_high_school_statistics": { | |
"task": "mmlu_high_school_statistics", | |
"task_alias": "high_school_statistics", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "high_school_statistics", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about high school statistics.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_high_school_us_history": { | |
"task": "mmlu_high_school_us_history", | |
"task_alias": "high_school_us_history", | |
"group": "mmlu_humanities", | |
"group_alias": "humanities", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "high_school_us_history", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about high school us history.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_high_school_world_history": { | |
"task": "mmlu_high_school_world_history", | |
"task_alias": "high_school_world_history", | |
"group": "mmlu_humanities", | |
"group_alias": "humanities", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "high_school_world_history", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about high school world history.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_human_aging": { | |
"task": "mmlu_human_aging", | |
"task_alias": "human_aging", | |
"group": "mmlu_other", | |
"group_alias": "other", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "human_aging", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about human aging.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_human_sexuality": { | |
"task": "mmlu_human_sexuality", | |
"task_alias": "human_sexuality", | |
"group": "mmlu_social_sciences", | |
"group_alias": "social_sciences", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "human_sexuality", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about human sexuality.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_international_law": { | |
"task": "mmlu_international_law", | |
"task_alias": "international_law", | |
"group": "mmlu_humanities", | |
"group_alias": "humanities", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "international_law", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about international law.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_jurisprudence": { | |
"task": "mmlu_jurisprudence", | |
"task_alias": "jurisprudence", | |
"group": "mmlu_humanities", | |
"group_alias": "humanities", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "jurisprudence", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about jurisprudence.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_logical_fallacies": { | |
"task": "mmlu_logical_fallacies", | |
"task_alias": "logical_fallacies", | |
"group": "mmlu_humanities", | |
"group_alias": "humanities", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "logical_fallacies", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about logical fallacies.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_machine_learning": { | |
"task": "mmlu_machine_learning", | |
"task_alias": "machine_learning", | |
"group": "mmlu_stem", | |
"group_alias": "stem", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "machine_learning", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about machine learning.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_management": { | |
"task": "mmlu_management", | |
"task_alias": "management", | |
"group": "mmlu_other", | |
"group_alias": "other", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "management", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about management.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_marketing": { | |
"task": "mmlu_marketing", | |
"task_alias": "marketing", | |
"group": "mmlu_other", | |
"group_alias": "other", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "marketing", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about marketing.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_medical_genetics": { | |
"task": "mmlu_medical_genetics", | |
"task_alias": "medical_genetics", | |
"group": "mmlu_other", | |
"group_alias": "other", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "medical_genetics", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about medical genetics.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_miscellaneous": { | |
"task": "mmlu_miscellaneous", | |
"task_alias": "miscellaneous", | |
"group": "mmlu_other", | |
"group_alias": "other", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "miscellaneous", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about miscellaneous.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_moral_disputes": { | |
"task": "mmlu_moral_disputes", | |
"task_alias": "moral_disputes", | |
"group": "mmlu_humanities", | |
"group_alias": "humanities", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "moral_disputes", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about moral disputes.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_moral_scenarios": { | |
"task": "mmlu_moral_scenarios", | |
"task_alias": "moral_scenarios", | |
"group": "mmlu_humanities", | |
"group_alias": "humanities", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "moral_scenarios", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about moral scenarios.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_nutrition": { | |
"task": "mmlu_nutrition", | |
"task_alias": "nutrition", | |
"group": "mmlu_other", | |
"group_alias": "other", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "nutrition", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about nutrition.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_philosophy": { | |
"task": "mmlu_philosophy", | |
"task_alias": "philosophy", | |
"group": "mmlu_humanities", | |
"group_alias": "humanities", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "philosophy", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about philosophy.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_prehistory": { | |
"task": "mmlu_prehistory", | |
"task_alias": "prehistory", | |
"group": "mmlu_humanities", | |
"group_alias": "humanities", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "prehistory", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about prehistory.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_professional_accounting": { | |
"task": "mmlu_professional_accounting", | |
"task_alias": "professional_accounting", | |
"group": "mmlu_other", | |
"group_alias": "other", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "professional_accounting", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about professional accounting.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_professional_law": { | |
"task": "mmlu_professional_law", | |
"task_alias": "professional_law", | |
"group": "mmlu_humanities", | |
"group_alias": "humanities", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "professional_law", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about professional law.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_professional_medicine": { | |
"task": "mmlu_professional_medicine", | |
"task_alias": "professional_medicine", | |
"group": "mmlu_other", | |
"group_alias": "other", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "professional_medicine", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about professional medicine.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_professional_psychology": { | |
"task": "mmlu_professional_psychology", | |
"task_alias": "professional_psychology", | |
"group": "mmlu_social_sciences", | |
"group_alias": "social_sciences", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "professional_psychology", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about professional psychology.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_public_relations": { | |
"task": "mmlu_public_relations", | |
"task_alias": "public_relations", | |
"group": "mmlu_social_sciences", | |
"group_alias": "social_sciences", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "public_relations", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about public relations.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_security_studies": { | |
"task": "mmlu_security_studies", | |
"task_alias": "security_studies", | |
"group": "mmlu_social_sciences", | |
"group_alias": "social_sciences", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "security_studies", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about security studies.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_sociology": { | |
"task": "mmlu_sociology", | |
"task_alias": "sociology", | |
"group": "mmlu_social_sciences", | |
"group_alias": "social_sciences", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "sociology", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about sociology.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_us_foreign_policy": { | |
"task": "mmlu_us_foreign_policy", | |
"task_alias": "us_foreign_policy", | |
"group": "mmlu_social_sciences", | |
"group_alias": "social_sciences", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "us_foreign_policy", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about us foreign policy.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_virology": { | |
"task": "mmlu_virology", | |
"task_alias": "virology", | |
"group": "mmlu_other", | |
"group_alias": "other", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "virology", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about virology.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
}, | |
"mmlu_world_religions": { | |
"task": "mmlu_world_religions", | |
"task_alias": "world_religions", | |
"group": "mmlu_humanities", | |
"group_alias": "humanities", | |
"dataset_path": "hails/mmlu_no_train", | |
"dataset_name": "world_religions", | |
"test_split": "test", | |
"fewshot_split": "dev", | |
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", | |
"doc_to_target": "answer", | |
"doc_to_choice": [ | |
"A", | |
"B", | |
"C", | |
"D" | |
], | |
"description": "The following are multiple choice questions (with answers) about world religions.\n\n", | |
"target_delimiter": " ", | |
"fewshot_delimiter": "\n\n", | |
"fewshot_config": { | |
"sampler": "first_n" | |
}, | |
"num_fewshot": 5, | |
"metric_list": [ | |
{ | |
"metric": "acc", | |
"aggregation": "mean", | |
"higher_is_better": true | |
} | |
], | |
"output_type": "multiple_choice", | |
"repeats": 1, | |
"should_decontaminate": false, | |
"metadata": { | |
"version": 0.0 | |
} | |
} | |
}, | |
"versions": { | |
"mmlu_abstract_algebra": 0.0, | |
"mmlu_anatomy": 0.0, | |
"mmlu_astronomy": 0.0, | |
"mmlu_business_ethics": 0.0, | |
"mmlu_clinical_knowledge": 0.0, | |
"mmlu_college_biology": 0.0, | |
"mmlu_college_chemistry": 0.0, | |
"mmlu_college_computer_science": 0.0, | |
"mmlu_college_mathematics": 0.0, | |
"mmlu_college_medicine": 0.0, | |
"mmlu_college_physics": 0.0, | |
"mmlu_computer_security": 0.0, | |
"mmlu_conceptual_physics": 0.0, | |
"mmlu_econometrics": 0.0, | |
"mmlu_electrical_engineering": 0.0, | |
"mmlu_elementary_mathematics": 0.0, | |
"mmlu_formal_logic": 0.0, | |
"mmlu_global_facts": 0.0, | |
"mmlu_high_school_biology": 0.0, | |
"mmlu_high_school_chemistry": 0.0, | |
"mmlu_high_school_computer_science": 0.0, | |
"mmlu_high_school_european_history": 0.0, | |
"mmlu_high_school_geography": 0.0, | |
"mmlu_high_school_government_and_politics": 0.0, | |
"mmlu_high_school_macroeconomics": 0.0, | |
"mmlu_high_school_mathematics": 0.0, | |
"mmlu_high_school_microeconomics": 0.0, | |
"mmlu_high_school_physics": 0.0, | |
"mmlu_high_school_psychology": 0.0, | |
"mmlu_high_school_statistics": 0.0, | |
"mmlu_high_school_us_history": 0.0, | |
"mmlu_high_school_world_history": 0.0, | |
"mmlu_human_aging": 0.0, | |
"mmlu_human_sexuality": 0.0, | |
"mmlu_international_law": 0.0, | |
"mmlu_jurisprudence": 0.0, | |
"mmlu_logical_fallacies": 0.0, | |
"mmlu_machine_learning": 0.0, | |
"mmlu_management": 0.0, | |
"mmlu_marketing": 0.0, | |
"mmlu_medical_genetics": 0.0, | |
"mmlu_miscellaneous": 0.0, | |
"mmlu_moral_disputes": 0.0, | |
"mmlu_moral_scenarios": 0.0, | |
"mmlu_nutrition": 0.0, | |
"mmlu_philosophy": 0.0, | |
"mmlu_prehistory": 0.0, | |
"mmlu_professional_accounting": 0.0, | |
"mmlu_professional_law": 0.0, | |
"mmlu_professional_medicine": 0.0, | |
"mmlu_professional_psychology": 0.0, | |
"mmlu_public_relations": 0.0, | |
"mmlu_security_studies": 0.0, | |
"mmlu_sociology": 0.0, | |
"mmlu_us_foreign_policy": 0.0, | |
"mmlu_virology": 0.0, | |
"mmlu_world_religions": 0.0 | |
}, | |
"n-shot": { | |
"mmlu": 0, | |
"mmlu_abstract_algebra": 5, | |
"mmlu_anatomy": 5, | |
"mmlu_astronomy": 5, | |
"mmlu_business_ethics": 5, | |
"mmlu_clinical_knowledge": 5, | |
"mmlu_college_biology": 5, | |
"mmlu_college_chemistry": 5, | |
"mmlu_college_computer_science": 5, | |
"mmlu_college_mathematics": 5, | |
"mmlu_college_medicine": 5, | |
"mmlu_college_physics": 5, | |
"mmlu_computer_security": 5, | |
"mmlu_conceptual_physics": 5, | |
"mmlu_econometrics": 5, | |
"mmlu_electrical_engineering": 5, | |
"mmlu_elementary_mathematics": 5, | |
"mmlu_formal_logic": 5, | |
"mmlu_global_facts": 5, | |
"mmlu_high_school_biology": 5, | |
"mmlu_high_school_chemistry": 5, | |
"mmlu_high_school_computer_science": 5, | |
"mmlu_high_school_european_history": 5, | |
"mmlu_high_school_geography": 5, | |
"mmlu_high_school_government_and_politics": 5, | |
"mmlu_high_school_macroeconomics": 5, | |
"mmlu_high_school_mathematics": 5, | |
"mmlu_high_school_microeconomics": 5, | |
"mmlu_high_school_physics": 5, | |
"mmlu_high_school_psychology": 5, | |
"mmlu_high_school_statistics": 5, | |
"mmlu_high_school_us_history": 5, | |
"mmlu_high_school_world_history": 5, | |
"mmlu_human_aging": 5, | |
"mmlu_human_sexuality": 5, | |
"mmlu_humanities": 5, | |
"mmlu_international_law": 5, | |
"mmlu_jurisprudence": 5, | |
"mmlu_logical_fallacies": 5, | |
"mmlu_machine_learning": 5, | |
"mmlu_management": 5, | |
"mmlu_marketing": 5, | |
"mmlu_medical_genetics": 5, | |
"mmlu_miscellaneous": 5, | |
"mmlu_moral_disputes": 5, | |
"mmlu_moral_scenarios": 5, | |
"mmlu_nutrition": 5, | |
"mmlu_other": 5, | |
"mmlu_philosophy": 5, | |
"mmlu_prehistory": 5, | |
"mmlu_professional_accounting": 5, | |
"mmlu_professional_law": 5, | |
"mmlu_professional_medicine": 5, | |
"mmlu_professional_psychology": 5, | |
"mmlu_public_relations": 5, | |
"mmlu_security_studies": 5, | |
"mmlu_social_sciences": 5, | |
"mmlu_sociology": 5, | |
"mmlu_stem": 5, | |
"mmlu_us_foreign_policy": 5, | |
"mmlu_virology": 5, | |
"mmlu_world_religions": 5 | |
}, | |
"higher_is_better": { | |
"mmlu": { | |
"acc": true | |
}, | |
"mmlu_abstract_algebra": { | |
"acc": true | |
}, | |
"mmlu_anatomy": { | |
"acc": true | |
}, | |
"mmlu_astronomy": { | |
"acc": true | |
}, | |
"mmlu_business_ethics": { | |
"acc": true | |
}, | |
"mmlu_clinical_knowledge": { | |
"acc": true | |
}, | |
"mmlu_college_biology": { | |
"acc": true | |
}, | |
"mmlu_college_chemistry": { | |
"acc": true | |
}, | |
"mmlu_college_computer_science": { | |
"acc": true | |
}, | |
"mmlu_college_mathematics": { | |
"acc": true | |
}, | |
"mmlu_college_medicine": { | |
"acc": true | |
}, | |
"mmlu_college_physics": { | |
"acc": true | |
}, | |
"mmlu_computer_security": { | |
"acc": true | |
}, | |
"mmlu_conceptual_physics": { | |
"acc": true | |
}, | |
"mmlu_econometrics": { | |
"acc": true | |
}, | |
"mmlu_electrical_engineering": { | |
"acc": true | |
}, | |
"mmlu_elementary_mathematics": { | |
"acc": true | |
}, | |
"mmlu_formal_logic": { | |
"acc": true | |
}, | |
"mmlu_global_facts": { | |
"acc": true | |
}, | |
"mmlu_high_school_biology": { | |
"acc": true | |
}, | |
"mmlu_high_school_chemistry": { | |
"acc": true | |
}, | |
"mmlu_high_school_computer_science": { | |
"acc": true | |
}, | |
"mmlu_high_school_european_history": { | |
"acc": true | |
}, | |
"mmlu_high_school_geography": { | |
"acc": true | |
}, | |
"mmlu_high_school_government_and_politics": { | |
"acc": true | |
}, | |
"mmlu_high_school_macroeconomics": { | |
"acc": true | |
}, | |
"mmlu_high_school_mathematics": { | |
"acc": true | |
}, | |
"mmlu_high_school_microeconomics": { | |
"acc": true | |
}, | |
"mmlu_high_school_physics": { | |
"acc": true | |
}, | |
"mmlu_high_school_psychology": { | |
"acc": true | |
}, | |
"mmlu_high_school_statistics": { | |
"acc": true | |
}, | |
"mmlu_high_school_us_history": { | |
"acc": true | |
}, | |
"mmlu_high_school_world_history": { | |
"acc": true | |
}, | |
"mmlu_human_aging": { | |
"acc": true | |
}, | |
"mmlu_human_sexuality": { | |
"acc": true | |
}, | |
"mmlu_humanities": { | |
"acc": true | |
}, | |
"mmlu_international_law": { | |
"acc": true | |
}, | |
"mmlu_jurisprudence": { | |
"acc": true | |
}, | |
"mmlu_logical_fallacies": { | |
"acc": true | |
}, | |
"mmlu_machine_learning": { | |
"acc": true | |
}, | |
"mmlu_management": { | |
"acc": true | |
}, | |
"mmlu_marketing": { | |
"acc": true | |
}, | |
"mmlu_medical_genetics": { | |
"acc": true | |
}, | |
"mmlu_miscellaneous": { | |
"acc": true | |
}, | |
"mmlu_moral_disputes": { | |
"acc": true | |
}, | |
"mmlu_moral_scenarios": { | |
"acc": true | |
}, | |
"mmlu_nutrition": { | |
"acc": true | |
}, | |
"mmlu_other": { | |
"acc": true | |
}, | |
"mmlu_philosophy": { | |
"acc": true | |
}, | |
"mmlu_prehistory": { | |
"acc": true | |
}, | |
"mmlu_professional_accounting": { | |
"acc": true | |
}, | |
"mmlu_professional_law": { | |
"acc": true | |
}, | |
"mmlu_professional_medicine": { | |
"acc": true | |
}, | |
"mmlu_professional_psychology": { | |
"acc": true | |
}, | |
"mmlu_public_relations": { | |
"acc": true | |
}, | |
"mmlu_security_studies": { | |
"acc": true | |
}, | |
"mmlu_social_sciences": { | |
"acc": true | |
}, | |
"mmlu_sociology": { | |
"acc": true | |
}, | |
"mmlu_stem": { | |
"acc": true | |
}, | |
"mmlu_us_foreign_policy": { | |
"acc": true | |
}, | |
"mmlu_virology": { | |
"acc": true | |
}, | |
"mmlu_world_religions": { | |
"acc": true | |
} | |
}, | |
"n-samples": { | |
"mmlu_high_school_european_history": { | |
"original": 165, | |
"effective": 165 | |
}, | |
"mmlu_high_school_world_history": { | |
"original": 237, | |
"effective": 237 | |
}, | |
"mmlu_professional_law": { | |
"original": 1534, | |
"effective": 1534 | |
}, | |
"mmlu_logical_fallacies": { | |
"original": 163, | |
"effective": 163 | |
}, | |
"mmlu_high_school_us_history": { | |
"original": 204, | |
"effective": 204 | |
}, | |
"mmlu_world_religions": { | |
"original": 171, | |
"effective": 171 | |
}, | |
"mmlu_prehistory": { | |
"original": 324, | |
"effective": 324 | |
}, | |
"mmlu_jurisprudence": { | |
"original": 108, | |
"effective": 108 | |
}, | |
"mmlu_moral_scenarios": { | |
"original": 895, | |
"effective": 895 | |
}, | |
"mmlu_formal_logic": { | |
"original": 126, | |
"effective": 126 | |
}, | |
"mmlu_philosophy": { | |
"original": 311, | |
"effective": 311 | |
}, | |
"mmlu_international_law": { | |
"original": 121, | |
"effective": 121 | |
}, | |
"mmlu_moral_disputes": { | |
"original": 346, | |
"effective": 346 | |
}, | |
"mmlu_high_school_government_and_politics": { | |
"original": 193, | |
"effective": 193 | |
}, | |
"mmlu_human_sexuality": { | |
"original": 131, | |
"effective": 131 | |
}, | |
"mmlu_high_school_microeconomics": { | |
"original": 238, | |
"effective": 238 | |
}, | |
"mmlu_high_school_macroeconomics": { | |
"original": 390, | |
"effective": 390 | |
}, | |
"mmlu_public_relations": { | |
"original": 110, | |
"effective": 110 | |
}, | |
"mmlu_sociology": { | |
"original": 201, | |
"effective": 201 | |
}, | |
"mmlu_professional_psychology": { | |
"original": 612, | |
"effective": 612 | |
}, | |
"mmlu_high_school_psychology": { | |
"original": 545, | |
"effective": 545 | |
}, | |
"mmlu_econometrics": { | |
"original": 114, | |
"effective": 114 | |
}, | |
"mmlu_high_school_geography": { | |
"original": 198, | |
"effective": 198 | |
}, | |
"mmlu_us_foreign_policy": { | |
"original": 100, | |
"effective": 100 | |
}, | |
"mmlu_security_studies": { | |
"original": 245, | |
"effective": 245 | |
}, | |
"mmlu_business_ethics": { | |
"original": 100, | |
"effective": 100 | |
}, | |
"mmlu_virology": { | |
"original": 166, | |
"effective": 166 | |
}, | |
"mmlu_nutrition": { | |
"original": 306, | |
"effective": 306 | |
}, | |
"mmlu_management": { | |
"original": 103, | |
"effective": 103 | |
}, | |
"mmlu_clinical_knowledge": { | |
"original": 265, | |
"effective": 265 | |
}, | |
"mmlu_marketing": { | |
"original": 234, | |
"effective": 234 | |
}, | |
"mmlu_college_medicine": { | |
"original": 173, | |
"effective": 173 | |
}, | |
"mmlu_professional_medicine": { | |
"original": 272, | |
"effective": 272 | |
}, | |
"mmlu_medical_genetics": { | |
"original": 100, | |
"effective": 100 | |
}, | |
"mmlu_human_aging": { | |
"original": 223, | |
"effective": 223 | |
}, | |
"mmlu_professional_accounting": { | |
"original": 282, | |
"effective": 282 | |
}, | |
"mmlu_miscellaneous": { | |
"original": 783, | |
"effective": 783 | |
}, | |
"mmlu_global_facts": { | |
"original": 100, | |
"effective": 100 | |
}, | |
"mmlu_college_computer_science": { | |
"original": 100, | |
"effective": 100 | |
}, | |
"mmlu_high_school_physics": { | |
"original": 151, | |
"effective": 151 | |
}, | |
"mmlu_college_chemistry": { | |
"original": 100, | |
"effective": 100 | |
}, | |
"mmlu_college_biology": { | |
"original": 144, | |
"effective": 144 | |
}, | |
"mmlu_high_school_mathematics": { | |
"original": 270, | |
"effective": 270 | |
}, | |
"mmlu_high_school_computer_science": { | |
"original": 100, | |
"effective": 100 | |
}, | |
"mmlu_electrical_engineering": { | |
"original": 145, | |
"effective": 145 | |
}, | |
"mmlu_college_physics": { | |
"original": 102, | |
"effective": 102 | |
}, | |
"mmlu_anatomy": { | |
"original": 135, | |
"effective": 135 | |
}, | |
"mmlu_college_mathematics": { | |
"original": 100, | |
"effective": 100 | |
}, | |
"mmlu_elementary_mathematics": { | |
"original": 378, | |
"effective": 378 | |
}, | |
"mmlu_high_school_chemistry": { | |
"original": 203, | |
"effective": 203 | |
}, | |
"mmlu_machine_learning": { | |
"original": 112, | |
"effective": 112 | |
}, | |
"mmlu_abstract_algebra": { | |
"original": 100, | |
"effective": 100 | |
}, | |
"mmlu_astronomy": { | |
"original": 152, | |
"effective": 152 | |
}, | |
"mmlu_computer_security": { | |
"original": 100, | |
"effective": 100 | |
}, | |
"mmlu_high_school_biology": { | |
"original": 310, | |
"effective": 310 | |
}, | |
"mmlu_high_school_statistics": { | |
"original": 216, | |
"effective": 216 | |
}, | |
"mmlu_conceptual_physics": { | |
"original": 235, | |
"effective": 235 | |
} | |
}, | |
"config": { | |
"model": "vllm", | |
"model_args": "pretrained=/home/mlr/models/Mixtral-8x22B-Instruct-v0.1-FP8,tensor_parallel_size=4,dtype=auto,add_bos_token=True,gpu_memory_utilization=0.8,data_parallel_size=1", | |
"batch_size": "auto", | |
"batch_sizes": [], | |
"device": "cuda", | |
"use_cache": null, | |
"limit": null, | |
"bootstrap_iters": 100000, | |
"gen_kwargs": null, | |
"random_seed": 0, | |
"numpy_seed": 1234, | |
"torch_seed": 1234, | |
"fewshot_seed": 1234 | |
}, | |
"git_hash": "f2843b2f", | |
"date": 1717759668.7806425, | |
"pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: Ubuntu 22.04.4 LTS (x86_64)\nGCC version: (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0\nClang version: Could not collect\nCMake version: version 3.29.3\nLibc version: glibc-2.35\n\nPython version: 3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0] (64-bit runtime)\nPython platform: Linux-5.19.0-1010-nvidia-lowlatency-x86_64-with-glibc2.35\nIs CUDA available: True\nCUDA runtime version: 12.5.40\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: \nGPU 0: NVIDIA H100 NVL\nGPU 1: NVIDIA H100 NVL\nGPU 2: NVIDIA H100 NVL\nGPU 3: NVIDIA H100 NVL\nGPU 4: NVIDIA H100 NVL\nGPU 5: NVIDIA H100 NVL\nGPU 6: NVIDIA H100 NVL\nGPU 7: NVIDIA H100 NVL\n\nNvidia driver version: 555.42.02\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nAddress sizes: 46 bits physical, 57 bits virtual\nByte Order: Little Endian\nCPU(s): 144\nOn-line CPU(s) list: 0-143\nVendor ID: GenuineIntel\nModel name: Intel(R) Xeon(R) Platinum 8452Y\nCPU family: 6\nModel: 143\nThread(s) per core: 2\nCore(s) per socket: 36\nSocket(s): 2\nStepping: 8\nFrequency boost: enabled\nCPU max MHz: 2001.0000\nCPU min MHz: 800.0000\nBogoMIPS: 4000.00\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc cpuid aperfmperf tsc_known_freq pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch cpuid_fault epb cat_l3 cat_l2 cdp_l3 invpcid_single intel_ppin cdp_l2 ssbd mba ibrs ibpb stibp ibrs_enhanced tpr_shadow vnmi flexpriority ept vpid ept_ad fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb intel_pt avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local split_lock_detect avx_vnni avx512_bf16 wbnoinvd dtherm ida arat pln pts hfi avx512vbmi umip pku ospke waitpkg avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg tme avx512_vpopcntdq la57 rdpid bus_lock_detect cldemote movdiri movdir64b enqcmd fsrm md_clear serialize tsxldtrk pconfig arch_lbr ibt amx_bf16 avx512_fp16 amx_tile amx_int8 flush_l1d arch_capabilities\nVirtualization: VT-x\nL1d cache: 3.4 MiB (72 instances)\nL1i cache: 2.3 MiB (72 instances)\nL2 cache: 144 MiB (72 instances)\nL3 cache: 135 MiB (2 instances)\nNUMA node(s): 2\nNUMA node0 CPU(s): 0-35,72-107\nNUMA node1 CPU(s): 36-71,108-143\nVulnerability Itlb multihit: Not affected\nVulnerability L1tf: Not affected\nVulnerability Mds: Not affected\nVulnerability Meltdown: Not affected\nVulnerability Mmio stale data: Not affected\nVulnerability Retbleed: Not affected\nVulnerability Spec store bypass: Mitigation; Speculative Store Bypass disabled via prctl\nVulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization\nVulnerability Spectre v2: Mitigation; Enhanced IBRS, IBPB conditional, RSB filling, PBRSB-eIBRS SW sequence\nVulnerability Srbds: Not affected\nVulnerability Tsx async abort: Not affected\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[pip3] triton==2.3.0\n[conda] Could not collect", | |
"transformers_version": "4.41.2", | |
"upper_git_hash": "f2843b2fd64df799179808ce2428b7a8dbc403de", | |
"task_hashes": {}, | |
"model_source": "vllm", | |
"model_name": "/home/mlr/models/Mixtral-8x22B-Instruct-v0.1-FP8", | |
"model_name_sanitized": "__home__mlr__models__Mixtral-8x22B-Instruct-v0.1-FP8", | |
"system_instruction": null, | |
"system_instruction_sha": null, | |
"chat_template": null, | |
"chat_template_sha": null, | |
"start_time": 829948.992005701, | |
"end_time": 847093.177875013, | |
"total_evaluation_time_seconds": "17144.18586931203" | |
} |