results_v0.2 / meta-llama /Meta-Llama-3-8B /results_2024-07-13T10-15-48.038085.json
malhajar's picture
Upload meta-llama/Meta-Llama-3-8B/results_2024-07-13T10-15-48.038085.json with huggingface_hub
8a91b3d verified
raw
history blame
111 kB
{
"results": {
"winogrande_tr-v0.2": {
"acc,none": 0.5560821484992101,
"acc_stderr,none": 0.013969328135351866,
"alias": "winogrande_tr-v0.2"
},
"truthfulqa_v0.2": {
"acc,none": 0.47354505859506446,
"acc_stderr,none": 0.015415176057336447,
"alias": "truthfulqa_v0.2"
},
"mmlu_tr_v0.2": {
"acc,none": 0.4928640094653553,
"acc_stderr,none": 0.004160638583016978,
"alias": "mmlu_tr_v0.2"
},
"mmlu_humanities_v0.2": {
"alias": " - humanities_v0.2",
"acc,none": 0.45752675928034614,
"acc_stderr,none": 0.007147675776518145
},
"mmlu_formal_logic_v0.2": {
"alias": " - formal_logic_v0.2",
"acc,none": 0.3888888888888889,
"acc_stderr,none": 0.04360314860077459
},
"mmlu_high_school_european_history_v0.2": {
"alias": " - high_school_european_history_v0.2",
"acc,none": 0.6066666666666667,
"acc_stderr,none": 0.04001863846147463
},
"mmlu_high_school_us_history_v0.2": {
"alias": " - high_school_us_history_v0.2",
"acc,none": 0.6089385474860335,
"acc_stderr,none": 0.0365762550278607
},
"mmlu_high_school_world_history_v0.2": {
"alias": " - high_school_world_history_v0.2",
"acc,none": 0.6384976525821596,
"acc_stderr,none": 0.03299645494317726
},
"mmlu_international_law_v0.2": {
"alias": " - international_law_v0.2",
"acc,none": 0.7024793388429752,
"acc_stderr,none": 0.04173349148083499
},
"mmlu_jurisprudence_v0.2": {
"alias": " - jurisprudence_v0.2",
"acc,none": 0.6320754716981132,
"acc_stderr,none": 0.047061871107614554
},
"mmlu_logical_fallacies_v0.2": {
"alias": " - logical_fallacies_v0.2",
"acc,none": 0.5341614906832298,
"acc_stderr,none": 0.039436102792547494
},
"mmlu_moral_disputes_v0.2": {
"alias": " - moral_disputes_v0.2",
"acc,none": 0.5844155844155844,
"acc_stderr,none": 0.028126865286406472
},
"mmlu_moral_scenarios_v0.2": {
"alias": " - moral_scenarios_v0.2",
"acc,none": 0.2511467889908257,
"acc_stderr,none": 0.014694441546570722
},
"mmlu_philosophy_v0.2": {
"alias": " - philosophy_v0.2",
"acc,none": 0.6153846153846154,
"acc_stderr,none": 0.028182434824896612
},
"mmlu_prehistory_v0.2": {
"alias": " - prehistory_v0.2",
"acc,none": 0.5933333333333334,
"acc_stderr,none": 0.028407503418366627
},
"mmlu_professional_law_v0.2": {
"alias": " - professional_law_v0.2",
"acc,none": 0.36095100864553314,
"acc_stderr,none": 0.012895939178621436
},
"mmlu_world_religions_v0.2": {
"alias": " - world_religions_v0.2",
"acc,none": 0.7380952380952381,
"acc_stderr,none": 0.03402276955044172
},
"mmlu_other_v0.2": {
"alias": " - other_v0.2",
"acc,none": 0.537823490378235,
"acc_stderr,none": 0.008891390114271015
},
"mmlu_business_ethics_v0.2": {
"alias": " - business_ethics_v0.2",
"acc,none": 0.5353535353535354,
"acc_stderr,none": 0.05038121284299014
},
"mmlu_clinical_knowledge_v0.2": {
"alias": " - clinical_knowledge_v0.2",
"acc,none": 0.53125,
"acc_stderr,none": 0.03125
},
"mmlu_college_medicine_v0.2": {
"alias": " - college_medicine_v0.2",
"acc,none": 0.4107142857142857,
"acc_stderr,none": 0.03806927668989205
},
"mmlu_global_facts_v0.2": {
"alias": " - global_facts_v0.2",
"acc,none": 0.3469387755102041,
"acc_stderr,none": 0.04833007873885538
},
"mmlu_human_aging_v0.2": {
"alias": " - human_aging_v0.2",
"acc,none": 0.5518867924528302,
"acc_stderr,none": 0.03423557646056905
},
"mmlu_management_v0.2": {
"alias": " - management_v0.2",
"acc,none": 0.6060606060606061,
"acc_stderr,none": 0.04935824351078519
},
"mmlu_marketing_v0.2": {
"alias": " - marketing_v0.2",
"acc,none": 0.6728110599078341,
"acc_stderr,none": 0.03192412427911798
},
"mmlu_medical_genetics_v0.2": {
"alias": " - medical_genetics_v0.2",
"acc,none": 0.6526315789473685,
"acc_stderr,none": 0.0491094740077666
},
"mmlu_miscellaneous_v0.2": {
"alias": " - miscellaneous_v0.2",
"acc,none": 0.6318537859007833,
"acc_stderr,none": 0.017437641620522863
},
"mmlu_nutrition_v0.2": {
"alias": " - nutrition_v0.2",
"acc,none": 0.5606557377049181,
"acc_stderr,none": 0.028465172711779237
},
"mmlu_professional_accounting_v0.2": {
"alias": " - professional_accounting_v0.2",
"acc,none": 0.31899641577060933,
"acc_stderr,none": 0.02795407992616444
},
"mmlu_professional_medicine_v0.2": {
"alias": " - professional_medicine_v0.2",
"acc,none": 0.4942528735632184,
"acc_stderr,none": 0.031006635179346285
},
"mmlu_virology_v0.2": {
"alias": " - virology_v0.2",
"acc,none": 0.44654088050314467,
"acc_stderr,none": 0.03954985017675704
},
"mmlu_social_sciences_v0.2": {
"alias": " - social_sciences_v0.2",
"acc,none": 0.5551115551115551,
"acc_stderr,none": 0.008916060688359694
},
"mmlu_econometrics_v0.2": {
"alias": " - econometrics_v0.2",
"acc,none": 0.3508771929824561,
"acc_stderr,none": 0.04489539350270698
},
"mmlu_high_school_geography_v0.2": {
"alias": " - high_school_geography_v0.2",
"acc,none": 0.6649746192893401,
"acc_stderr,none": 0.03371423842235922
},
"mmlu_high_school_government_and_politics_v0.2": {
"alias": " - high_school_government_and_politics_v0.2",
"acc,none": 0.5668449197860963,
"acc_stderr,none": 0.036332674111025864
},
"mmlu_high_school_macroeconomics_v0.2": {
"alias": " - high_school_macroeconomics_v0.2",
"acc,none": 0.4641025641025641,
"acc_stderr,none": 0.025285585990017848
},
"mmlu_high_school_microeconomics_v0.2": {
"alias": " - high_school_microeconomics_v0.2",
"acc,none": 0.5021097046413502,
"acc_stderr,none": 0.032546938018020076
},
"mmlu_high_school_psychology_v0.2": {
"alias": " - high_school_psychology_v0.2",
"acc,none": 0.6172607879924953,
"acc_stderr,none": 0.021073176639582407
},
"mmlu_human_sexuality_v0.2": {
"alias": " - human_sexuality_v0.2",
"acc,none": 0.6347826086956522,
"acc_stderr,none": 0.04509577025262067
},
"mmlu_professional_psychology_v0.2": {
"alias": " - professional_psychology_v0.2",
"acc,none": 0.4595959595959596,
"acc_stderr,none": 0.020465391076251824
},
"mmlu_public_relations_v0.2": {
"alias": " - public_relations_v0.2",
"acc,none": 0.5648148148148148,
"acc_stderr,none": 0.04792898170907062
},
"mmlu_security_studies_v0.2": {
"alias": " - security_studies_v0.2",
"acc,none": 0.6153846153846154,
"acc_stderr,none": 0.03187195347942466
},
"mmlu_sociology_v0.2": {
"alias": " - sociology_v0.2",
"acc,none": 0.7076923076923077,
"acc_stderr,none": 0.032654383937495104
},
"mmlu_us_foreign_policy_v0.2": {
"alias": " - us_foreign_policy_v0.2",
"acc,none": 0.7272727272727273,
"acc_stderr,none": 0.044988332664126655
},
"mmlu_stem_v0.2": {
"alias": " - stem_v0.2",
"acc,none": 0.4391653290529695,
"acc_stderr,none": 0.008765672686611319
},
"mmlu_abstract_algebra_v0.2": {
"alias": " - abstract_algebra_v0.2",
"acc,none": 0.31,
"acc_stderr,none": 0.04648231987117316
},
"mmlu_anatomy_v0.2": {
"alias": " - anatomy_v0.2",
"acc,none": 0.4580152671755725,
"acc_stderr,none": 0.04369802690578757
},
"mmlu_astronomy": {
"alias": " - astronomy",
"acc,none": 0.5629139072847682,
"acc_stderr,none": 0.04050035722230636
},
"mmlu_college_biology_v0.2": {
"alias": " - college_biology_v0.2",
"acc,none": 0.5633802816901409,
"acc_stderr,none": 0.04176792857440145
},
"mmlu_college_chemistry_v0.2": {
"alias": " - college_chemistry_v0.2",
"acc,none": 0.3838383838383838,
"acc_stderr,none": 0.04912566964083466
},
"mmlu_college_computer_science_v0.2": {
"alias": " - college_computer_science_v0.2",
"acc,none": 0.3838383838383838,
"acc_stderr,none": 0.04912566964083466
},
"mmlu_college_mathematics_v0.2": {
"alias": " - college_mathematics_v0.2",
"acc,none": 0.39,
"acc_stderr,none": 0.04902071300001975
},
"mmlu_college_physics_v0.2": {
"alias": " - college_physics_v0.2",
"acc,none": 0.3564356435643564,
"acc_stderr,none": 0.047894600484941874
},
"mmlu_computer_security_v0.2": {
"alias": " - computer_security_v0.2",
"acc,none": 0.56,
"acc_stderr,none": 0.04988876515698589
},
"mmlu_conceptual_physics_v0.2": {
"alias": " - conceptual_physics_v0.2",
"acc,none": 0.45064377682403434,
"acc_stderr,none": 0.03266628305181916
},
"mmlu_electrical_engineering_v0.2": {
"alias": " - electrical_engineering_v0.2",
"acc,none": 0.5138888888888888,
"acc_stderr,none": 0.04179596617581
},
"mmlu_elementary_mathematics_v0.2": {
"alias": " - elementary_mathematics_v0.2",
"acc,none": 0.353887399463807,
"acc_stderr,none": 0.02479220676006358
},
"mmlu_high_school_biology_v0.2": {
"alias": " - high_school_biology_v0.2",
"acc,none": 0.59,
"acc_stderr,none": 0.02844345443743517
},
"mmlu_high_school_chemistry_v0.2": {
"alias": " - high_school_chemistry_v0.2",
"acc,none": 0.43147208121827413,
"acc_stderr,none": 0.035377261545033935
},
"mmlu_high_school_computer_science_v0.2": {
"alias": " - high_school_computer_science_v0.2",
"acc,none": 0.59,
"acc_stderr,none": 0.04943110704237102
},
"mmlu_high_school_mathematics_v0.2": {
"alias": " - high_school_mathematics_v0.2",
"acc,none": 0.35555555555555557,
"acc_stderr,none": 0.029185714949857406
},
"mmlu_high_school_physics_v0.2": {
"alias": " - high_school_physics_v0.2",
"acc,none": 0.3333333333333333,
"acc_stderr,none": 0.039013715732043486
},
"mmlu_high_school_statistics_v0.2": {
"alias": " - high_school_statistics_v0.2",
"acc,none": 0.39351851851851855,
"acc_stderr,none": 0.03331747876370312
},
"mmlu_machine_learning_v0.2": {
"alias": " - machine_learning_v0.2",
"acc,none": 0.38392857142857145,
"acc_stderr,none": 0.04616143075028547
},
"hellaswag_tr-v0.2": {
"acc,none": 0.37969967257536413,
"acc_stderr,none": 0.0051570615239302856,
"acc_norm,none": 0.4879756125098792,
"acc_norm_stderr,none": 0.005311602643330855,
"alias": "hellaswag_tr-v0.2"
},
"gsm8k_tr-v0.2": {
"exact_match,strict-match": 0.31738800303720577,
"exact_match_stderr,strict-match": 0.012830815708150378,
"exact_match,flexible-extract": 0.022779043280182234,
"exact_match_stderr,flexible-extract": 0.004112786650556422,
"alias": "gsm8k_tr-v0.2"
},
"arc_tr-v0.2": {
"acc,none": 0.38310580204778155,
"acc_stderr,none": 0.01420647266167288,
"acc_norm,none": 0.4402730375426621,
"acc_norm_stderr,none": 0.014506769524804244,
"alias": "arc_tr-v0.2"
}
},
"groups": {
"mmlu_tr_v0.2": {
"acc,none": 0.4928640094653553,
"acc_stderr,none": 0.004160638583016978,
"alias": "mmlu_tr_v0.2"
},
"mmlu_humanities_v0.2": {
"alias": " - humanities_v0.2",
"acc,none": 0.45752675928034614,
"acc_stderr,none": 0.007147675776518145
},
"mmlu_other_v0.2": {
"alias": " - other_v0.2",
"acc,none": 0.537823490378235,
"acc_stderr,none": 0.008891390114271015
},
"mmlu_social_sciences_v0.2": {
"alias": " - social_sciences_v0.2",
"acc,none": 0.5551115551115551,
"acc_stderr,none": 0.008916060688359694
},
"mmlu_stem_v0.2": {
"alias": " - stem_v0.2",
"acc,none": 0.4391653290529695,
"acc_stderr,none": 0.008765672686611319
}
},
"group_subtasks": {
"arc_tr-v0.2": [],
"gsm8k_tr-v0.2": [],
"hellaswag_tr-v0.2": [],
"mmlu_stem_v0.2": [
"mmlu_abstract_algebra_v0.2",
"mmlu_conceptual_physics_v0.2",
"mmlu_college_biology_v0.2",
"mmlu_high_school_chemistry_v0.2",
"mmlu_electrical_engineering_v0.2",
"mmlu_high_school_computer_science_v0.2",
"mmlu_machine_learning_v0.2",
"mmlu_college_chemistry_v0.2",
"mmlu_high_school_statistics_v0.2",
"mmlu_college_mathematics_v0.2",
"mmlu_high_school_physics_v0.2",
"mmlu_college_computer_science_v0.2",
"mmlu_anatomy_v0.2",
"mmlu_computer_security_v0.2",
"mmlu_high_school_mathematics_v0.2",
"mmlu_astronomy",
"mmlu_college_physics_v0.2",
"mmlu_high_school_biology_v0.2",
"mmlu_elementary_mathematics_v0.2"
],
"mmlu_other_v0.2": [
"mmlu_human_aging_v0.2",
"mmlu_marketing_v0.2",
"mmlu_virology_v0.2",
"mmlu_professional_medicine_v0.2",
"mmlu_business_ethics_v0.2",
"mmlu_global_facts_v0.2",
"mmlu_medical_genetics_v0.2",
"mmlu_miscellaneous_v0.2",
"mmlu_professional_accounting_v0.2",
"mmlu_clinical_knowledge_v0.2",
"mmlu_management_v0.2",
"mmlu_nutrition_v0.2",
"mmlu_college_medicine_v0.2"
],
"mmlu_social_sciences_v0.2": [
"mmlu_high_school_psychology_v0.2",
"mmlu_professional_psychology_v0.2",
"mmlu_high_school_geography_v0.2",
"mmlu_security_studies_v0.2",
"mmlu_human_sexuality_v0.2",
"mmlu_high_school_government_and_politics_v0.2",
"mmlu_sociology_v0.2",
"mmlu_public_relations_v0.2",
"mmlu_us_foreign_policy_v0.2",
"mmlu_econometrics_v0.2",
"mmlu_high_school_microeconomics_v0.2",
"mmlu_high_school_macroeconomics_v0.2"
],
"mmlu_humanities_v0.2": [
"mmlu_formal_logic_v0.2",
"mmlu_moral_disputes_v0.2",
"mmlu_international_law_v0.2",
"mmlu_philosophy_v0.2",
"mmlu_world_religions_v0.2",
"mmlu_jurisprudence_v0.2",
"mmlu_moral_scenarios_v0.2",
"mmlu_high_school_european_history_v0.2",
"mmlu_high_school_us_history_v0.2",
"mmlu_prehistory_v0.2",
"mmlu_professional_law_v0.2",
"mmlu_logical_fallacies_v0.2",
"mmlu_high_school_world_history_v0.2"
],
"mmlu_tr_v0.2": [
"mmlu_humanities_v0.2",
"mmlu_social_sciences_v0.2",
"mmlu_other_v0.2",
"mmlu_stem_v0.2"
],
"truthfulqa_v0.2": [],
"winogrande_tr-v0.2": []
},
"configs": {
"arc_tr-v0.2": {
"task": "arc_tr-v0.2",
"group": [
"ai2_arc"
],
"dataset_path": "malhajar/arc-tr-v0.2",
"test_split": "test",
"fewshot_split": "test",
"doc_to_text": "Soru: {{question}}\nCevap:",
"doc_to_target": "{{choices.label.index(answerKey)}}",
"doc_to_choice": "{{choices.text}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 25,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "Soru: {{question}}\nCevap:",
"metadata": {
"version": 1.0
}
},
"gsm8k_tr-v0.2": {
"task": "gsm8k_tr-v0.2",
"group": [
"math_word_problems"
],
"dataset_path": "malhajar/gsm8k_tr-v0.2",
"test_split": "test",
"fewshot_split": "test",
"doc_to_text": "Soru: {{question}}\nCevap:",
"doc_to_target": "{{answer}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 5,
"metric_list": [
{
"metric": "exact_match",
"aggregation": "mean",
"higher_is_better": true,
"ignore_case": true,
"ignore_punctuation": false,
"regexes_to_ignore": [
",",
"\\$",
"(?s).*#### ",
"\\.$"
]
}
],
"output_type": "generate_until",
"generation_kwargs": {
"until": [
"Question:",
"</s>",
"<|im_end|>"
],
"do_sample": false,
"temperature": 0.0
},
"repeats": 1,
"filter_list": [
{
"name": "strict-match",
"filter": [
{
"function": "regex",
"regex_pattern": "#### (\\-?[0-9\\.\\,]+)"
},
{
"function": "take_first"
}
]
},
{
"name": "flexible-extract",
"filter": [
{
"function": "regex",
"group_select": -1,
"regex_pattern": "(-?[$0-9.,]{2,})|(-?[0-9]+)"
},
{
"function": "take_first"
}
]
}
],
"should_decontaminate": false
},
"hellaswag_tr-v0.2": {
"task": "hellaswag_tr-v0.2",
"group": [
"multiple_choice"
],
"dataset_path": "malhajar/hellaswag_tr-v0.2",
"validation_split": "validation",
"fewshot_split": "validation",
"process_docs": "def process_docs(dataset: datasets.Dataset) -> datasets.Dataset:\n def _process_doc(doc):\n ctx = doc[\"ctx_a\"] + \" \" + doc[\"ctx_b\"].capitalize()\n out_doc = {\n \"query\": preprocess(ctx),\n \"choices\": [preprocess(ending) for ending in doc[\"endings\"]],\n \"gold\": int(doc[\"label\"]),\n }\n return out_doc\n\n return dataset.map(_process_doc)\n",
"doc_to_text": "{{query}}",
"doc_to_target": "{{label}}",
"doc_to_choice": "{{choices}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 10,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false
},
"mmlu_abstract_algebra_v0.2": {
"task": "mmlu_abstract_algebra_v0.2",
"task_alias": "abstract_algebra_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "abstract_algebra",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda soyut cebir hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_anatomy_v0.2": {
"task": "mmlu_anatomy_v0.2",
"task_alias": "anatomy_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "anatomy",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda anatomiyi konu alan çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_astronomy": {
"task": "mmlu_astronomy",
"task_alias": "astronomy",
"group": "mmlu_stem",
"dataset_path": "malhajar/mmlu-tr",
"dataset_name": "astronomy",
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about astronomy.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_business_ethics_v0.2": {
"task": "mmlu_business_ethics_v0.2",
"task_alias": "business_ethics_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "business_ethics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda iş etiği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_clinical_knowledge_v0.2": {
"task": "mmlu_clinical_knowledge_v0.2",
"task_alias": "clinical_knowledge_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "clinical_knowledge",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda klinik bilgi hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_college_biology_v0.2": {
"task": "mmlu_college_biology_v0.2",
"task_alias": "college_biology_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "college_biology",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda üniversite biyolojisi hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_college_chemistry_v0.2": {
"task": "mmlu_college_chemistry_v0.2",
"task_alias": "college_chemistry_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "college_chemistry",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda üniversite kimyası hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_college_computer_science_v0.2": {
"task": "mmlu_college_computer_science_v0.2",
"task_alias": "college_computer_science_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "college_computer_science",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda üniversite bilgisayar bilimleri hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_college_mathematics_v0.2": {
"task": "mmlu_college_mathematics_v0.2",
"task_alias": "college_mathematics_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "college_mathematics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda üniversite matematiği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_college_medicine_v0.2": {
"task": "mmlu_college_medicine_v0.2",
"task_alias": "college_medicine_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "college_medicine",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda üniversite tıbbı hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_college_physics_v0.2": {
"task": "mmlu_college_physics_v0.2",
"task_alias": "college_physics_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "college_physics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda üniversite fizik hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_computer_security_v0.2": {
"task": "mmlu_computer_security_v0.2",
"task_alias": "computer_security_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "computer_security",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda bilgisayar güvenliği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_conceptual_physics_v0.2": {
"task": "mmlu_conceptual_physics_v0.2",
"task_alias": "conceptual_physics_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "conceptual_physics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, kavramsal fizik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_econometrics_v0.2": {
"task": "mmlu_econometrics_v0.2",
"task_alias": "econometrics_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "econometrics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, ekonometri hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_electrical_engineering_v0.2": {
"task": "mmlu_electrical_engineering_v0.2",
"task_alias": "electrical_engineering_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "electrical_engineering",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, elektrik mühendisliği hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_elementary_mathematics_v0.2": {
"task": "mmlu_elementary_mathematics_v0.2",
"task_alias": "elementary_mathematics_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "elementary_mathematics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, ilköğretim matematiği hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_formal_logic_v0.2": {
"task": "mmlu_formal_logic_v0.2",
"task_alias": "formal_logic_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "formal_logic",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, formal mantık hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_global_facts_v0.2": {
"task": "mmlu_global_facts_v0.2",
"task_alias": "global_facts_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "global_facts",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, küresel gerçekler hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_biology_v0.2": {
"task": "mmlu_high_school_biology_v0.2",
"task_alias": "high_school_biology_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_biology",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise biyolojisi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_chemistry_v0.2": {
"task": "mmlu_high_school_chemistry_v0.2",
"task_alias": "high_school_chemistry_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_chemistry",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise kimyası hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_computer_science_v0.2": {
"task": "mmlu_high_school_computer_science_v0.2",
"task_alias": "high_school_computer_science_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_computer_science",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise bilgisayar bilimi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_european_history_v0.2": {
"task": "mmlu_high_school_european_history_v0.2",
"task_alias": "high_school_european_history_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_european_history",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise Avrupa tarihi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_geography_v0.2": {
"task": "mmlu_high_school_geography_v0.2",
"task_alias": "high_school_geography_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_geography",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise coğrafya hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_government_and_politics_v0.2": {
"task": "mmlu_high_school_government_and_politics_v0.2",
"task_alias": "high_school_government_and_politics_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_government_and_politics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise hükümet ve siyaset hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_macroeconomics_v0.2": {
"task": "mmlu_high_school_macroeconomics_v0.2",
"task_alias": "high_school_macroeconomics_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_macroeconomics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise makroekonomi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_mathematics_v0.2": {
"task": "mmlu_high_school_mathematics_v0.2",
"task_alias": "high_school_mathematics_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_mathematics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise matematik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_microeconomics_v0.2": {
"task": "mmlu_high_school_microeconomics_v0.2",
"task_alias": "high_school_microeconomics_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_microeconomics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise mikroekonomi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_physics_v0.2": {
"task": "mmlu_high_school_physics_v0.2",
"task_alias": "high_school_physics_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_physics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise fizik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_psychology_v0.2": {
"task": "mmlu_high_school_psychology_v0.2",
"task_alias": "high_school_psychology_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_psychology",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise psikoloji hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_statistics_v0.2": {
"task": "mmlu_high_school_statistics_v0.2",
"task_alias": "high_school_statistics_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_statistics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise istatistik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_us_history_v0.2": {
"task": "mmlu_high_school_us_history_v0.2",
"task_alias": "high_school_us_history_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_us_history",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise Amerikan tarihine dair çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_world_history_v0.2": {
"task": "mmlu_high_school_world_history_v0.2",
"task_alias": "high_school_world_history_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_world_history",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise dünya tarihine dair çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_human_aging_v0.2": {
"task": "mmlu_human_aging_v0.2",
"task_alias": "human_aging_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "human_aging",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, insan yaşlanmasıyla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_human_sexuality_v0.2": {
"task": "mmlu_human_sexuality_v0.2",
"task_alias": "human_sexuality_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "human_sexuality",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, insan cinselliğiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_international_law_v0.2": {
"task": "mmlu_international_law_v0.2",
"task_alias": "international_law_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "international_law",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, uluslararası hukukla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_jurisprudence_v0.2": {
"task": "mmlu_jurisprudence_v0.2",
"task_alias": "jurisprudence_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "jurisprudence",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, hukuk felsefesiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_logical_fallacies_v0.2": {
"task": "mmlu_logical_fallacies_v0.2",
"task_alias": "logical_fallacies_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "logical_fallacies",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, mantıksal yanılgılarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_machine_learning_v0.2": {
"task": "mmlu_machine_learning_v0.2",
"task_alias": "machine_learning_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "machine_learning",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, makine öğrenimiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_management_v0.2": {
"task": "mmlu_management_v0.2",
"task_alias": "management_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "management",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, yönetimle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_marketing_v0.2": {
"task": "mmlu_marketing_v0.2",
"task_alias": "marketing_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "marketing",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, pazarlama ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_medical_genetics_v0.2": {
"task": "mmlu_medical_genetics_v0.2",
"task_alias": "medical_genetics_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "medical_genetics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, tıbbi genetikle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_miscellaneous_v0.2": {
"task": "mmlu_miscellaneous_v0.2",
"task_alias": "miscellaneous_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "miscellaneous",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, çeşitli konularla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_moral_disputes_v0.2": {
"task": "mmlu_moral_disputes_v0.2",
"task_alias": "moral_disputes_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "moral_disputes",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, ahlaki anlaşmazlıklarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_moral_scenarios_v0.2": {
"task": "mmlu_moral_scenarios_v0.2",
"task_alias": "moral_scenarios_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "moral_scenarios",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, ahlaki senaryolarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_nutrition_v0.2": {
"task": "mmlu_nutrition_v0.2",
"task_alias": "nutrition_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "nutrition",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, beslenme ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_philosophy_v0.2": {
"task": "mmlu_philosophy_v0.2",
"task_alias": "philosophy_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "philosophy",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, felsefe ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_prehistory_v0.2": {
"task": "mmlu_prehistory_v0.2",
"task_alias": "prehistory_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "prehistory",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, prehistori ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_professional_accounting_v0.2": {
"task": "mmlu_professional_accounting_v0.2",
"task_alias": "professional_accounting_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "professional_accounting",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, mesleki muhasebe ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_professional_law_v0.2": {
"task": "mmlu_professional_law_v0.2",
"task_alias": "professional_law_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "professional_law",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, mesleki hukuk ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_professional_medicine_v0.2": {
"task": "mmlu_professional_medicine_v0.2",
"task_alias": "professional_medicine_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "professional_medicine",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, mesleki tıp ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_professional_psychology_v0.2": {
"task": "mmlu_professional_psychology_v0.2",
"task_alias": "professional_psychology_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "professional_psychology",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, mesleki psikoloji ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_public_relations_v0.2": {
"task": "mmlu_public_relations_v0.2",
"task_alias": "public_relations_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "public_relations",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, halkla ilişkiler ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_security_studies_v0.2": {
"task": "mmlu_security_studies_v0.2",
"task_alias": "security_studies_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "security_studies",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, güvenlik çalışmaları ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_sociology_v0.2": {
"task": "mmlu_sociology_v0.2",
"task_alias": "sociology_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "sociology",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, sosyoloji ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_us_foreign_policy_v0.2": {
"task": "mmlu_us_foreign_policy_v0.2",
"task_alias": "us_foreign_policy_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "us_foreign_policy",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, ABD dış politikası ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_virology_v0.2": {
"task": "mmlu_virology_v0.2",
"task_alias": "virology_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "virology",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda viroloji hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_world_religions_v0.2": {
"task": "mmlu_world_religions_v0.2",
"task_alias": "world_religions_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "world_religions",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda dünya dinleri hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.\"",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"truthfulqa_v0.2": {
"task": "truthfulqa_v0.2",
"dataset_path": "malhajar/truthful_qa-tr-v0.2",
"validation_split": "validation",
"doc_to_text": "{% set prompt_qa = \"Q: Amerika Birleşik Devletleri'nde insan ömrü ne kadardır?\nA: Amerika Birleşik Devletleri'nde insan ömrü 78 yıldır.\n\nQ: 1955 yılında Amerika Birleşik Devletleri başkanı kimdi?\nA: Dwight D. Eisenhower, 1955 yılında Amerika Birleşik Devletleri başkanıydı.\n\nQ: Hangi partiye mensuptu?\nA: Cumhuriyetçi Parti'ye mensuptu.\n\nQ: Muzun karekökü ne?\nA: Yorum yok.\n\nQ: Bir teleskop nasıl çalışır?\nA: Teleskoplar, ışığı odaklamak ve nesneleri daha yakın görünmesini sağlamak için lensler veya aynalar kullanır.\n\nQ: 1992 Olimpiyatları nerede yapıldı?\nA: 1992 Olimpiyatları, İspanya'nın Barselona şehrinde yapıldı.\" %}{{prompt_qa | safe }}\n\nQ: {{ question | safe }}",
"doc_to_target": 0,
"doc_to_choice": "{{mc2_targets.choices}}",
"process_results": "def process_results_mc2(doc, results):\n lls, is_greedy = zip(*results)\n\n # Split on the first `0` as everything before it is true (`1`).\n split_idx = list(doc[\"mc2_targets\"][\"labels\"]).index(0)\n # Compute the normalized probability mass for the correct answer.\n ll_true, ll_false = lls[:split_idx], lls[split_idx:]\n p_true, p_false = np.exp(np.array(ll_true)), np.exp(np.array(ll_false))\n p_true = p_true / (sum(p_true) + sum(p_false))\n\n return {\"acc\": sum(p_true)}\n",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "question"
},
"winogrande_tr-v0.2": {
"task": "winogrande_tr-v0.2",
"dataset_path": "malhajar/winogrande-tr",
"training_split": "train",
"validation_split": "validation",
"doc_to_text": "def doc_to_text(doc):\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
"doc_to_target": "def doc_to_target(doc):\n print(doc)\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
"doc_to_choice": "def doc_to_choice(doc):\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 10,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "sentence"
}
},
"versions": {
"arc_tr-v0.2": 1.0,
"gsm8k_tr-v0.2": "Yaml",
"hellaswag_tr-v0.2": "Yaml",
"mmlu_abstract_algebra_v0.2": 0.0,
"mmlu_anatomy_v0.2": 0.0,
"mmlu_astronomy": 0.0,
"mmlu_business_ethics_v0.2": 0.0,
"mmlu_clinical_knowledge_v0.2": 0.0,
"mmlu_college_biology_v0.2": 0.0,
"mmlu_college_chemistry_v0.2": 0.0,
"mmlu_college_computer_science_v0.2": 0.0,
"mmlu_college_mathematics_v0.2": 0.0,
"mmlu_college_medicine_v0.2": 0.0,
"mmlu_college_physics_v0.2": 0.0,
"mmlu_computer_security_v0.2": 0.0,
"mmlu_conceptual_physics_v0.2": 0.0,
"mmlu_econometrics_v0.2": 0.0,
"mmlu_electrical_engineering_v0.2": 0.0,
"mmlu_elementary_mathematics_v0.2": 0.0,
"mmlu_formal_logic_v0.2": 0.0,
"mmlu_global_facts_v0.2": 0.0,
"mmlu_high_school_biology_v0.2": 0.0,
"mmlu_high_school_chemistry_v0.2": 0.0,
"mmlu_high_school_computer_science_v0.2": 0.0,
"mmlu_high_school_european_history_v0.2": 0.0,
"mmlu_high_school_geography_v0.2": 0.0,
"mmlu_high_school_government_and_politics_v0.2": 0.0,
"mmlu_high_school_macroeconomics_v0.2": 0.0,
"mmlu_high_school_mathematics_v0.2": 0.0,
"mmlu_high_school_microeconomics_v0.2": 0.0,
"mmlu_high_school_physics_v0.2": 0.0,
"mmlu_high_school_psychology_v0.2": 0.0,
"mmlu_high_school_statistics_v0.2": 0.0,
"mmlu_high_school_us_history_v0.2": 0.0,
"mmlu_high_school_world_history_v0.2": 0.0,
"mmlu_human_aging_v0.2": 0.0,
"mmlu_human_sexuality_v0.2": 0.0,
"mmlu_international_law_v0.2": 0.0,
"mmlu_jurisprudence_v0.2": 0.0,
"mmlu_logical_fallacies_v0.2": 0.0,
"mmlu_machine_learning_v0.2": 0.0,
"mmlu_management_v0.2": 0.0,
"mmlu_marketing_v0.2": 0.0,
"mmlu_medical_genetics_v0.2": 0.0,
"mmlu_miscellaneous_v0.2": 0.0,
"mmlu_moral_disputes_v0.2": 0.0,
"mmlu_moral_scenarios_v0.2": 0.0,
"mmlu_nutrition_v0.2": 0.0,
"mmlu_philosophy_v0.2": 0.0,
"mmlu_prehistory_v0.2": 0.0,
"mmlu_professional_accounting_v0.2": 0.0,
"mmlu_professional_law_v0.2": 0.0,
"mmlu_professional_medicine_v0.2": 0.0,
"mmlu_professional_psychology_v0.2": 0.0,
"mmlu_public_relations_v0.2": 0.0,
"mmlu_security_studies_v0.2": 0.0,
"mmlu_sociology_v0.2": 0.0,
"mmlu_us_foreign_policy_v0.2": 0.0,
"mmlu_virology_v0.2": 0.0,
"mmlu_world_religions_v0.2": 0.0,
"truthfulqa_v0.2": "Yaml",
"winogrande_tr-v0.2": "Yaml"
},
"n-shot": {
"arc_tr-v0.2": 25,
"gsm8k_tr-v0.2": 5,
"hellaswag_tr-v0.2": 10,
"mmlu_abstract_algebra_v0.2": 5,
"mmlu_anatomy_v0.2": 5,
"mmlu_astronomy": 0,
"mmlu_business_ethics_v0.2": 5,
"mmlu_clinical_knowledge_v0.2": 5,
"mmlu_college_biology_v0.2": 5,
"mmlu_college_chemistry_v0.2": 5,
"mmlu_college_computer_science_v0.2": 5,
"mmlu_college_mathematics_v0.2": 5,
"mmlu_college_medicine_v0.2": 5,
"mmlu_college_physics_v0.2": 5,
"mmlu_computer_security_v0.2": 5,
"mmlu_conceptual_physics_v0.2": 5,
"mmlu_econometrics_v0.2": 5,
"mmlu_electrical_engineering_v0.2": 5,
"mmlu_elementary_mathematics_v0.2": 5,
"mmlu_formal_logic_v0.2": 5,
"mmlu_global_facts_v0.2": 5,
"mmlu_high_school_biology_v0.2": 5,
"mmlu_high_school_chemistry_v0.2": 5,
"mmlu_high_school_computer_science_v0.2": 5,
"mmlu_high_school_european_history_v0.2": 5,
"mmlu_high_school_geography_v0.2": 5,
"mmlu_high_school_government_and_politics_v0.2": 5,
"mmlu_high_school_macroeconomics_v0.2": 5,
"mmlu_high_school_mathematics_v0.2": 5,
"mmlu_high_school_microeconomics_v0.2": 5,
"mmlu_high_school_physics_v0.2": 5,
"mmlu_high_school_psychology_v0.2": 5,
"mmlu_high_school_statistics_v0.2": 5,
"mmlu_high_school_us_history_v0.2": 5,
"mmlu_high_school_world_history_v0.2": 5,
"mmlu_human_aging_v0.2": 5,
"mmlu_human_sexuality_v0.2": 5,
"mmlu_humanities_v0.2": 5,
"mmlu_international_law_v0.2": 5,
"mmlu_jurisprudence_v0.2": 5,
"mmlu_logical_fallacies_v0.2": 5,
"mmlu_machine_learning_v0.2": 5,
"mmlu_management_v0.2": 5,
"mmlu_marketing_v0.2": 5,
"mmlu_medical_genetics_v0.2": 5,
"mmlu_miscellaneous_v0.2": 5,
"mmlu_moral_disputes_v0.2": 5,
"mmlu_moral_scenarios_v0.2": 5,
"mmlu_nutrition_v0.2": 5,
"mmlu_other_v0.2": 5,
"mmlu_philosophy_v0.2": 5,
"mmlu_prehistory_v0.2": 5,
"mmlu_professional_accounting_v0.2": 5,
"mmlu_professional_law_v0.2": 5,
"mmlu_professional_medicine_v0.2": 5,
"mmlu_professional_psychology_v0.2": 5,
"mmlu_public_relations_v0.2": 5,
"mmlu_security_studies_v0.2": 5,
"mmlu_social_sciences_v0.2": 5,
"mmlu_sociology_v0.2": 5,
"mmlu_stem_v0.2": 5,
"mmlu_tr_v0.2": 0,
"mmlu_us_foreign_policy_v0.2": 5,
"mmlu_virology_v0.2": 5,
"mmlu_world_religions_v0.2": 5,
"truthfulqa_v0.2": 0,
"winogrande_tr-v0.2": 10
},
"higher_is_better": {
"arc_tr-v0.2": {
"acc": true,
"acc_norm": true
},
"gsm8k_tr-v0.2": {
"exact_match": true
},
"hellaswag_tr-v0.2": {
"acc": true,
"acc_norm": true
},
"mmlu_abstract_algebra_v0.2": {
"acc": true
},
"mmlu_anatomy_v0.2": {
"acc": true
},
"mmlu_astronomy": {
"acc": true
},
"mmlu_business_ethics_v0.2": {
"acc": true
},
"mmlu_clinical_knowledge_v0.2": {
"acc": true
},
"mmlu_college_biology_v0.2": {
"acc": true
},
"mmlu_college_chemistry_v0.2": {
"acc": true
},
"mmlu_college_computer_science_v0.2": {
"acc": true
},
"mmlu_college_mathematics_v0.2": {
"acc": true
},
"mmlu_college_medicine_v0.2": {
"acc": true
},
"mmlu_college_physics_v0.2": {
"acc": true
},
"mmlu_computer_security_v0.2": {
"acc": true
},
"mmlu_conceptual_physics_v0.2": {
"acc": true
},
"mmlu_econometrics_v0.2": {
"acc": true
},
"mmlu_electrical_engineering_v0.2": {
"acc": true
},
"mmlu_elementary_mathematics_v0.2": {
"acc": true
},
"mmlu_formal_logic_v0.2": {
"acc": true
},
"mmlu_global_facts_v0.2": {
"acc": true
},
"mmlu_high_school_biology_v0.2": {
"acc": true
},
"mmlu_high_school_chemistry_v0.2": {
"acc": true
},
"mmlu_high_school_computer_science_v0.2": {
"acc": true
},
"mmlu_high_school_european_history_v0.2": {
"acc": true
},
"mmlu_high_school_geography_v0.2": {
"acc": true
},
"mmlu_high_school_government_and_politics_v0.2": {
"acc": true
},
"mmlu_high_school_macroeconomics_v0.2": {
"acc": true
},
"mmlu_high_school_mathematics_v0.2": {
"acc": true
},
"mmlu_high_school_microeconomics_v0.2": {
"acc": true
},
"mmlu_high_school_physics_v0.2": {
"acc": true
},
"mmlu_high_school_psychology_v0.2": {
"acc": true
},
"mmlu_high_school_statistics_v0.2": {
"acc": true
},
"mmlu_high_school_us_history_v0.2": {
"acc": true
},
"mmlu_high_school_world_history_v0.2": {
"acc": true
},
"mmlu_human_aging_v0.2": {
"acc": true
},
"mmlu_human_sexuality_v0.2": {
"acc": true
},
"mmlu_humanities_v0.2": {
"acc": true
},
"mmlu_international_law_v0.2": {
"acc": true
},
"mmlu_jurisprudence_v0.2": {
"acc": true
},
"mmlu_logical_fallacies_v0.2": {
"acc": true
},
"mmlu_machine_learning_v0.2": {
"acc": true
},
"mmlu_management_v0.2": {
"acc": true
},
"mmlu_marketing_v0.2": {
"acc": true
},
"mmlu_medical_genetics_v0.2": {
"acc": true
},
"mmlu_miscellaneous_v0.2": {
"acc": true
},
"mmlu_moral_disputes_v0.2": {
"acc": true
},
"mmlu_moral_scenarios_v0.2": {
"acc": true
},
"mmlu_nutrition_v0.2": {
"acc": true
},
"mmlu_other_v0.2": {
"acc": true
},
"mmlu_philosophy_v0.2": {
"acc": true
},
"mmlu_prehistory_v0.2": {
"acc": true
},
"mmlu_professional_accounting_v0.2": {
"acc": true
},
"mmlu_professional_law_v0.2": {
"acc": true
},
"mmlu_professional_medicine_v0.2": {
"acc": true
},
"mmlu_professional_psychology_v0.2": {
"acc": true
},
"mmlu_public_relations_v0.2": {
"acc": true
},
"mmlu_security_studies_v0.2": {
"acc": true
},
"mmlu_social_sciences_v0.2": {
"acc": true
},
"mmlu_sociology_v0.2": {
"acc": true
},
"mmlu_stem_v0.2": {
"acc": true
},
"mmlu_tr_v0.2": {
"acc": true
},
"mmlu_us_foreign_policy_v0.2": {
"acc": true
},
"mmlu_virology_v0.2": {
"acc": true
},
"mmlu_world_religions_v0.2": {
"acc": true
},
"truthfulqa_v0.2": {
"acc": true
},
"winogrande_tr-v0.2": {
"acc": true
}
},
"n-samples": {
"winogrande_tr-v0.2": {
"original": 1266,
"effective": 1266
},
"truthfulqa_v0.2": {
"original": 817,
"effective": 817
},
"mmlu_formal_logic_v0.2": {
"original": 126,
"effective": 126
},
"mmlu_moral_disputes_v0.2": {
"original": 308,
"effective": 308
},
"mmlu_international_law_v0.2": {
"original": 121,
"effective": 121
},
"mmlu_philosophy_v0.2": {
"original": 299,
"effective": 299
},
"mmlu_world_religions_v0.2": {
"original": 168,
"effective": 168
},
"mmlu_jurisprudence_v0.2": {
"original": 106,
"effective": 106
},
"mmlu_moral_scenarios_v0.2": {
"original": 872,
"effective": 872
},
"mmlu_high_school_european_history_v0.2": {
"original": 150,
"effective": 150
},
"mmlu_high_school_us_history_v0.2": {
"original": 179,
"effective": 179
},
"mmlu_prehistory_v0.2": {
"original": 300,
"effective": 300
},
"mmlu_professional_law_v0.2": {
"original": 1388,
"effective": 1388
},
"mmlu_logical_fallacies_v0.2": {
"original": 161,
"effective": 161
},
"mmlu_high_school_world_history_v0.2": {
"original": 213,
"effective": 213
},
"mmlu_high_school_psychology_v0.2": {
"original": 533,
"effective": 533
},
"mmlu_professional_psychology_v0.2": {
"original": 594,
"effective": 594
},
"mmlu_high_school_geography_v0.2": {
"original": 197,
"effective": 197
},
"mmlu_security_studies_v0.2": {
"original": 234,
"effective": 234
},
"mmlu_human_sexuality_v0.2": {
"original": 115,
"effective": 115
},
"mmlu_high_school_government_and_politics_v0.2": {
"original": 187,
"effective": 187
},
"mmlu_sociology_v0.2": {
"original": 195,
"effective": 195
},
"mmlu_public_relations_v0.2": {
"original": 108,
"effective": 108
},
"mmlu_us_foreign_policy_v0.2": {
"original": 99,
"effective": 99
},
"mmlu_econometrics_v0.2": {
"original": 114,
"effective": 114
},
"mmlu_high_school_microeconomics_v0.2": {
"original": 237,
"effective": 237
},
"mmlu_high_school_macroeconomics_v0.2": {
"original": 390,
"effective": 390
},
"mmlu_human_aging_v0.2": {
"original": 212,
"effective": 212
},
"mmlu_marketing_v0.2": {
"original": 217,
"effective": 217
},
"mmlu_virology_v0.2": {
"original": 159,
"effective": 159
},
"mmlu_professional_medicine_v0.2": {
"original": 261,
"effective": 261
},
"mmlu_business_ethics_v0.2": {
"original": 99,
"effective": 99
},
"mmlu_global_facts_v0.2": {
"original": 98,
"effective": 98
},
"mmlu_medical_genetics_v0.2": {
"original": 95,
"effective": 95
},
"mmlu_miscellaneous_v0.2": {
"original": 766,
"effective": 766
},
"mmlu_professional_accounting_v0.2": {
"original": 279,
"effective": 279
},
"mmlu_clinical_knowledge_v0.2": {
"original": 256,
"effective": 256
},
"mmlu_management_v0.2": {
"original": 99,
"effective": 99
},
"mmlu_nutrition_v0.2": {
"original": 305,
"effective": 305
},
"mmlu_college_medicine_v0.2": {
"original": 168,
"effective": 168
},
"mmlu_abstract_algebra_v0.2": {
"original": 100,
"effective": 100
},
"mmlu_conceptual_physics_v0.2": {
"original": 233,
"effective": 233
},
"mmlu_college_biology_v0.2": {
"original": 142,
"effective": 142
},
"mmlu_high_school_chemistry_v0.2": {
"original": 197,
"effective": 197
},
"mmlu_electrical_engineering_v0.2": {
"original": 144,
"effective": 144
},
"mmlu_high_school_computer_science_v0.2": {
"original": 100,
"effective": 100
},
"mmlu_machine_learning_v0.2": {
"original": 112,
"effective": 112
},
"mmlu_college_chemistry_v0.2": {
"original": 99,
"effective": 99
},
"mmlu_high_school_statistics_v0.2": {
"original": 216,
"effective": 216
},
"mmlu_college_mathematics_v0.2": {
"original": 100,
"effective": 100
},
"mmlu_high_school_physics_v0.2": {
"original": 147,
"effective": 147
},
"mmlu_college_computer_science_v0.2": {
"original": 99,
"effective": 99
},
"mmlu_anatomy_v0.2": {
"original": 131,
"effective": 131
},
"mmlu_computer_security_v0.2": {
"original": 100,
"effective": 100
},
"mmlu_high_school_mathematics_v0.2": {
"original": 270,
"effective": 270
},
"mmlu_astronomy": {
"original": 151,
"effective": 151
},
"mmlu_college_physics_v0.2": {
"original": 101,
"effective": 101
},
"mmlu_high_school_biology_v0.2": {
"original": 300,
"effective": 300
},
"mmlu_elementary_mathematics_v0.2": {
"original": 373,
"effective": 373
},
"hellaswag_tr-v0.2": {
"original": 8857,
"effective": 8857
},
"gsm8k_tr-v0.2": {
"original": 1317,
"effective": 1317
},
"arc_tr-v0.2": {
"original": 1172,
"effective": 1172
}
},
"config": {
"model": "vllm",
"model_args": "pretrained=meta-llama/Meta-Llama-3-8B,tensor_parallel_size=1,dtype=auto,gpu_memory_utilization=0.7,data_parallel_size=4",
"batch_size": 1,
"batch_sizes": [],
"device": "cuda",
"use_cache": null,
"limit": null,
"bootstrap_iters": 100000,
"gen_kwargs": null,
"random_seed": 0,
"numpy_seed": 1234,
"torch_seed": 1234,
"fewshot_seed": 1234
},
"git_hash": null,
"date": 1720861907.0921614,
"pretty_env_info": "PyTorch version: 2.1.2+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: Ubuntu 22.04.4 LTS (x86_64)\nGCC version: (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.35\n\nPython version: 3.10.3 (main, Mar 28 2022, 09:30:03) [GCC 7.5.0] (64-bit runtime)\nPython platform: Linux-6.2.0-1011-azure-x86_64-with-glibc2.35\nIs CUDA available: True\nCUDA runtime version: 11.5.119\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: \nGPU 0: NVIDIA A100 80GB PCIe\nGPU 1: NVIDIA A100 80GB PCIe\nGPU 2: NVIDIA A100 80GB PCIe\nGPU 3: NVIDIA A100 80GB PCIe\n\nNvidia driver version: 550.54.15\ncuDNN version: Probably one of the following:\n/usr/lib/x86_64-linux-gnu/libcudnn.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_adv.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_train.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_train.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_engines_precompiled.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_engines_runtime_compiled.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_graph.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_heuristic.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_ops.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_train.so.8.9.7\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nAddress sizes: 48 bits physical, 48 bits virtual\nByte Order: Little Endian\nCPU(s): 96\nOn-line CPU(s) list: 0-95\nVendor ID: AuthenticAMD\nModel name: AMD EPYC 7V13 64-Core Processor\nCPU family: 25\nModel: 1\nThread(s) per core: 1\nCore(s) per socket: 48\nSocket(s): 2\nStepping: 1\nBogoMIPS: 4890.89\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext perfctr_core invpcid_single vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 xsaves clzero xsaveerptr rdpru arat umip vaes vpclmulqdq rdpid fsrm\nHypervisor vendor: Microsoft\nVirtualization type: full\nL1d cache: 3 MiB (96 instances)\nL1i cache: 3 MiB (96 instances)\nL2 cache: 48 MiB (96 instances)\nL3 cache: 384 MiB (12 instances)\nNUMA node(s): 4\nNUMA node0 CPU(s): 0-23\nNUMA node1 CPU(s): 24-47\nNUMA node2 CPU(s): 48-71\nNUMA node3 CPU(s): 72-95\nVulnerability Gather data sampling: Not affected\nVulnerability Itlb multihit: Not affected\nVulnerability L1tf: Not affected\nVulnerability Mds: Not affected\nVulnerability Meltdown: Not affected\nVulnerability Mmio stale data: Not affected\nVulnerability Retbleed: Not affected\nVulnerability Spec store bypass: Vulnerable\nVulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization\nVulnerability Spectre v2: Mitigation; Retpolines, STIBP disabled, RSB filling, PBRSB-eIBRS Not affected\nVulnerability Srbds: Not affected\nVulnerability Tsx async abort: Not affected\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.1.2\n[pip3] triton==2.1.0\n[conda] torch 2.1.2 pypi_0 pypi\n[conda] triton 2.1.0 pypi_0 pypi",
"transformers_version": "4.40.0.dev0",
"upper_git_hash": null,
"task_hashes": {},
"model_source": "vllm",
"model_name": "meta-llama/Meta-Llama-3-8B",
"model_name_sanitized": "meta-llama__Meta-Llama-3-8B",
"system_instruction": null,
"system_instruction_sha": null,
"fewshot_as_multiturn": false,
"chat_template": null,
"chat_template_sha": null,
"start_time": 326707.256305675,
"end_time": 330555.048976287,
"total_evaluation_time_seconds": "3847.7926706119906"
}