results_v0.2 / notbdq /mistral-turkish-v2 /results_2024-07-13T08-07-49.151292.json
malhajar's picture
Upload notbdq/mistral-turkish-v2/results_2024-07-13T08-07-49.151292.json with huggingface_hub
b09e619 verified
raw
history blame
111 kB
{
"results": {
"winogrande_tr-v0.2": {
"acc,none": 0.5221169036334913,
"acc_stderr,none": 0.01404427900554207,
"alias": "winogrande_tr-v0.2"
},
"truthfulqa_v0.2": {
"acc,none": 0.4520176493869486,
"acc_stderr,none": 0.015834425263882356,
"alias": "truthfulqa_v0.2"
},
"mmlu_tr_v0.2": {
"acc,none": 0.3269984470901427,
"acc_stderr,none": 0.003993820158732649,
"alias": "mmlu_tr_v0.2"
},
"mmlu_humanities_v0.2": {
"alias": " - humanities_v0.2",
"acc,none": 0.30243680255067185,
"acc_stderr,none": 0.006883245816567404
},
"mmlu_formal_logic_v0.2": {
"alias": " - formal_logic_v0.2",
"acc,none": 0.2619047619047619,
"acc_stderr,none": 0.039325376803928704
},
"mmlu_high_school_european_history_v0.2": {
"alias": " - high_school_european_history_v0.2",
"acc,none": 0.3333333333333333,
"acc_stderr,none": 0.03861896309089946
},
"mmlu_high_school_us_history_v0.2": {
"alias": " - high_school_us_history_v0.2",
"acc,none": 0.29608938547486036,
"acc_stderr,none": 0.03421843754304871
},
"mmlu_high_school_world_history_v0.2": {
"alias": " - high_school_world_history_v0.2",
"acc,none": 0.39436619718309857,
"acc_stderr,none": 0.03356502417179251
},
"mmlu_international_law_v0.2": {
"alias": " - international_law_v0.2",
"acc,none": 0.4380165289256198,
"acc_stderr,none": 0.04529146804435792
},
"mmlu_jurisprudence_v0.2": {
"alias": " - jurisprudence_v0.2",
"acc,none": 0.39622641509433965,
"acc_stderr,none": 0.047732492983673595
},
"mmlu_logical_fallacies_v0.2": {
"alias": " - logical_fallacies_v0.2",
"acc,none": 0.39751552795031053,
"acc_stderr,none": 0.03868922112396876
},
"mmlu_moral_disputes_v0.2": {
"alias": " - moral_disputes_v0.2",
"acc,none": 0.3116883116883117,
"acc_stderr,none": 0.026435263441881948
},
"mmlu_moral_scenarios_v0.2": {
"alias": " - moral_scenarios_v0.2",
"acc,none": 0.2396788990825688,
"acc_stderr,none": 0.014464530608155834
},
"mmlu_philosophy_v0.2": {
"alias": " - philosophy_v0.2",
"acc,none": 0.3377926421404682,
"acc_stderr,none": 0.027397690899629585
},
"mmlu_prehistory_v0.2": {
"alias": " - prehistory_v0.2",
"acc,none": 0.34,
"acc_stderr,none": 0.02739528558421693
},
"mmlu_professional_law_v0.2": {
"alias": " - professional_law_v0.2",
"acc,none": 0.265850144092219,
"acc_stderr,none": 0.011862400384523441
},
"mmlu_world_religions_v0.2": {
"alias": " - world_religions_v0.2",
"acc,none": 0.42857142857142855,
"acc_stderr,none": 0.038294318709323184
},
"mmlu_other_v0.2": {
"alias": " - other_v0.2",
"acc,none": 0.36828135368281356,
"acc_stderr,none": 0.008664864446413253
},
"mmlu_business_ethics_v0.2": {
"alias": " - business_ethics_v0.2",
"acc,none": 0.3434343434343434,
"acc_stderr,none": 0.047967590587574785
},
"mmlu_clinical_knowledge_v0.2": {
"alias": " - clinical_knowledge_v0.2",
"acc,none": 0.30078125,
"acc_stderr,none": 0.02871850463421181
},
"mmlu_college_medicine_v0.2": {
"alias": " - college_medicine_v0.2",
"acc,none": 0.2857142857142857,
"acc_stderr,none": 0.034957770302314006
},
"mmlu_global_facts_v0.2": {
"alias": " - global_facts_v0.2",
"acc,none": 0.30612244897959184,
"acc_stderr,none": 0.04679539751912002
},
"mmlu_human_aging_v0.2": {
"alias": " - human_aging_v0.2",
"acc,none": 0.41509433962264153,
"acc_stderr,none": 0.03392150325543463
},
"mmlu_management_v0.2": {
"alias": " - management_v0.2",
"acc,none": 0.41414141414141414,
"acc_stderr,none": 0.04975740158605099
},
"mmlu_marketing_v0.2": {
"alias": " - marketing_v0.2",
"acc,none": 0.5391705069124424,
"acc_stderr,none": 0.03391613235916986
},
"mmlu_medical_genetics_v0.2": {
"alias": " - medical_genetics_v0.2",
"acc,none": 0.4,
"acc_stderr,none": 0.05052911526399114
},
"mmlu_miscellaneous_v0.2": {
"alias": " - miscellaneous_v0.2",
"acc,none": 0.4386422976501306,
"acc_stderr,none": 0.017940906352107586
},
"mmlu_nutrition_v0.2": {
"alias": " - nutrition_v0.2",
"acc,none": 0.3770491803278688,
"acc_stderr,none": 0.027796434357070812
},
"mmlu_professional_accounting_v0.2": {
"alias": " - professional_accounting_v0.2",
"acc,none": 0.26523297491039427,
"acc_stderr,none": 0.02647684232695418
},
"mmlu_professional_medicine_v0.2": {
"alias": " - professional_medicine_v0.2",
"acc,none": 0.2260536398467433,
"acc_stderr,none": 0.025940271020378786
},
"mmlu_virology_v0.2": {
"alias": " - virology_v0.2",
"acc,none": 0.3333333333333333,
"acc_stderr,none": 0.03750293003086743
},
"mmlu_social_sciences_v0.2": {
"alias": " - social_sciences_v0.2",
"acc,none": 0.35830835830835833,
"acc_stderr,none": 0.008723519905952323
},
"mmlu_econometrics_v0.2": {
"alias": " - econometrics_v0.2",
"acc,none": 0.2543859649122807,
"acc_stderr,none": 0.04096985139843671
},
"mmlu_high_school_geography_v0.2": {
"alias": " - high_school_geography_v0.2",
"acc,none": 0.3553299492385787,
"acc_stderr,none": 0.03418665573987868
},
"mmlu_high_school_government_and_politics_v0.2": {
"alias": " - high_school_government_and_politics_v0.2",
"acc,none": 0.33689839572192515,
"acc_stderr,none": 0.034656367371165064
},
"mmlu_high_school_macroeconomics_v0.2": {
"alias": " - high_school_macroeconomics_v0.2",
"acc,none": 0.33589743589743587,
"acc_stderr,none": 0.023946724741563976
},
"mmlu_high_school_microeconomics_v0.2": {
"alias": " - high_school_microeconomics_v0.2",
"acc,none": 0.3291139240506329,
"acc_stderr,none": 0.030587326294702365
},
"mmlu_high_school_psychology_v0.2": {
"alias": " - high_school_psychology_v0.2",
"acc,none": 0.40150093808630394,
"acc_stderr,none": 0.021252949145292983
},
"mmlu_human_sexuality_v0.2": {
"alias": " - human_sexuality_v0.2",
"acc,none": 0.3130434782608696,
"acc_stderr,none": 0.04343247016610823
},
"mmlu_professional_psychology_v0.2": {
"alias": " - professional_psychology_v0.2",
"acc,none": 0.3282828282828283,
"acc_stderr,none": 0.019283682034608934
},
"mmlu_public_relations_v0.2": {
"alias": " - public_relations_v0.2",
"acc,none": 0.35185185185185186,
"acc_stderr,none": 0.04616631111801713
},
"mmlu_security_studies_v0.2": {
"alias": " - security_studies_v0.2",
"acc,none": 0.38461538461538464,
"acc_stderr,none": 0.03187195347942466
},
"mmlu_sociology_v0.2": {
"alias": " - sociology_v0.2",
"acc,none": 0.41025641025641024,
"acc_stderr,none": 0.035314937123266714
},
"mmlu_us_foreign_policy_v0.2": {
"alias": " - us_foreign_policy_v0.2",
"acc,none": 0.5252525252525253,
"acc_stderr,none": 0.05044316967661913
},
"mmlu_stem_v0.2": {
"alias": " - stem_v0.2",
"acc,none": 0.2914927768860353,
"acc_stderr,none": 0.008090239244322358
},
"mmlu_abstract_algebra_v0.2": {
"alias": " - abstract_algebra_v0.2",
"acc,none": 0.27,
"acc_stderr,none": 0.04461960433384741
},
"mmlu_anatomy_v0.2": {
"alias": " - anatomy_v0.2",
"acc,none": 0.33587786259541985,
"acc_stderr,none": 0.04142313771996664
},
"mmlu_astronomy": {
"alias": " - astronomy",
"acc,none": 0.25165562913907286,
"acc_stderr,none": 0.03543304234389985
},
"mmlu_college_biology_v0.2": {
"alias": " - college_biology_v0.2",
"acc,none": 0.33098591549295775,
"acc_stderr,none": 0.03962897421627726
},
"mmlu_college_chemistry_v0.2": {
"alias": " - college_chemistry_v0.2",
"acc,none": 0.21212121212121213,
"acc_stderr,none": 0.0412960693254089
},
"mmlu_college_computer_science_v0.2": {
"alias": " - college_computer_science_v0.2",
"acc,none": 0.23232323232323232,
"acc_stderr,none": 0.04266016017054687
},
"mmlu_college_mathematics_v0.2": {
"alias": " - college_mathematics_v0.2",
"acc,none": 0.26,
"acc_stderr,none": 0.044084400227680794
},
"mmlu_college_physics_v0.2": {
"alias": " - college_physics_v0.2",
"acc,none": 0.19801980198019803,
"acc_stderr,none": 0.03985071643068933
},
"mmlu_computer_security_v0.2": {
"alias": " - computer_security_v0.2",
"acc,none": 0.42,
"acc_stderr,none": 0.04960449637488583
},
"mmlu_conceptual_physics_v0.2": {
"alias": " - conceptual_physics_v0.2",
"acc,none": 0.33476394849785407,
"acc_stderr,none": 0.03098227018601487
},
"mmlu_electrical_engineering_v0.2": {
"alias": " - electrical_engineering_v0.2",
"acc,none": 0.3402777777777778,
"acc_stderr,none": 0.039621355734862175
},
"mmlu_elementary_mathematics_v0.2": {
"alias": " - elementary_mathematics_v0.2",
"acc,none": 0.257372654155496,
"acc_stderr,none": 0.02266706026590103
},
"mmlu_high_school_biology_v0.2": {
"alias": " - high_school_biology_v0.2",
"acc,none": 0.37,
"acc_stderr,none": 0.027921294063982003
},
"mmlu_high_school_chemistry_v0.2": {
"alias": " - high_school_chemistry_v0.2",
"acc,none": 0.3147208121827411,
"acc_stderr,none": 0.033171756378484886
},
"mmlu_high_school_computer_science_v0.2": {
"alias": " - high_school_computer_science_v0.2",
"acc,none": 0.39,
"acc_stderr,none": 0.04902071300001975
},
"mmlu_high_school_mathematics_v0.2": {
"alias": " - high_school_mathematics_v0.2",
"acc,none": 0.26296296296296295,
"acc_stderr,none": 0.02684205787383371
},
"mmlu_high_school_physics_v0.2": {
"alias": " - high_school_physics_v0.2",
"acc,none": 0.22448979591836735,
"acc_stderr,none": 0.034531515032766795
},
"mmlu_high_school_statistics_v0.2": {
"alias": " - high_school_statistics_v0.2",
"acc,none": 0.18981481481481483,
"acc_stderr,none": 0.026744714834691936
},
"mmlu_machine_learning_v0.2": {
"alias": " - machine_learning_v0.2",
"acc,none": 0.35714285714285715,
"acc_stderr,none": 0.04547960999764376
},
"hellaswag_tr-v0.2": {
"acc,none": 0.3238116743818449,
"acc_stderr,none": 0.004972345314019131,
"acc_norm,none": 0.37281246471717283,
"acc_norm_stderr,none": 0.005138366971980697,
"alias": "hellaswag_tr-v0.2"
},
"gsm8k_tr-v0.2": {
"exact_match,strict-match": 0.0007593014426727411,
"exact_match_stderr,strict-match": 0.0007593014426727257,
"exact_match,flexible-extract": 0.032649962034927864,
"exact_match_stderr,flexible-extract": 0.004898974988477095,
"alias": "gsm8k_tr-v0.2"
},
"arc_tr-v0.2": {
"acc,none": 0.28498293515358364,
"acc_stderr,none": 0.013191348179838792,
"acc_norm,none": 0.3302047781569966,
"acc_norm_stderr,none": 0.013743085603760433,
"alias": "arc_tr-v0.2"
}
},
"groups": {
"mmlu_tr_v0.2": {
"acc,none": 0.3269984470901427,
"acc_stderr,none": 0.003993820158732649,
"alias": "mmlu_tr_v0.2"
},
"mmlu_humanities_v0.2": {
"alias": " - humanities_v0.2",
"acc,none": 0.30243680255067185,
"acc_stderr,none": 0.006883245816567404
},
"mmlu_other_v0.2": {
"alias": " - other_v0.2",
"acc,none": 0.36828135368281356,
"acc_stderr,none": 0.008664864446413253
},
"mmlu_social_sciences_v0.2": {
"alias": " - social_sciences_v0.2",
"acc,none": 0.35830835830835833,
"acc_stderr,none": 0.008723519905952323
},
"mmlu_stem_v0.2": {
"alias": " - stem_v0.2",
"acc,none": 0.2914927768860353,
"acc_stderr,none": 0.008090239244322358
}
},
"group_subtasks": {
"arc_tr-v0.2": [],
"gsm8k_tr-v0.2": [],
"hellaswag_tr-v0.2": [],
"mmlu_stem_v0.2": [
"mmlu_abstract_algebra_v0.2",
"mmlu_conceptual_physics_v0.2",
"mmlu_college_biology_v0.2",
"mmlu_high_school_chemistry_v0.2",
"mmlu_electrical_engineering_v0.2",
"mmlu_high_school_computer_science_v0.2",
"mmlu_machine_learning_v0.2",
"mmlu_college_chemistry_v0.2",
"mmlu_high_school_statistics_v0.2",
"mmlu_college_mathematics_v0.2",
"mmlu_high_school_physics_v0.2",
"mmlu_college_computer_science_v0.2",
"mmlu_anatomy_v0.2",
"mmlu_computer_security_v0.2",
"mmlu_high_school_mathematics_v0.2",
"mmlu_astronomy",
"mmlu_college_physics_v0.2",
"mmlu_high_school_biology_v0.2",
"mmlu_elementary_mathematics_v0.2"
],
"mmlu_other_v0.2": [
"mmlu_human_aging_v0.2",
"mmlu_marketing_v0.2",
"mmlu_virology_v0.2",
"mmlu_professional_medicine_v0.2",
"mmlu_business_ethics_v0.2",
"mmlu_global_facts_v0.2",
"mmlu_medical_genetics_v0.2",
"mmlu_miscellaneous_v0.2",
"mmlu_professional_accounting_v0.2",
"mmlu_clinical_knowledge_v0.2",
"mmlu_management_v0.2",
"mmlu_nutrition_v0.2",
"mmlu_college_medicine_v0.2"
],
"mmlu_social_sciences_v0.2": [
"mmlu_high_school_psychology_v0.2",
"mmlu_professional_psychology_v0.2",
"mmlu_high_school_geography_v0.2",
"mmlu_security_studies_v0.2",
"mmlu_human_sexuality_v0.2",
"mmlu_high_school_government_and_politics_v0.2",
"mmlu_sociology_v0.2",
"mmlu_public_relations_v0.2",
"mmlu_us_foreign_policy_v0.2",
"mmlu_econometrics_v0.2",
"mmlu_high_school_microeconomics_v0.2",
"mmlu_high_school_macroeconomics_v0.2"
],
"mmlu_humanities_v0.2": [
"mmlu_formal_logic_v0.2",
"mmlu_moral_disputes_v0.2",
"mmlu_international_law_v0.2",
"mmlu_philosophy_v0.2",
"mmlu_world_religions_v0.2",
"mmlu_jurisprudence_v0.2",
"mmlu_moral_scenarios_v0.2",
"mmlu_high_school_european_history_v0.2",
"mmlu_high_school_us_history_v0.2",
"mmlu_prehistory_v0.2",
"mmlu_professional_law_v0.2",
"mmlu_logical_fallacies_v0.2",
"mmlu_high_school_world_history_v0.2"
],
"mmlu_tr_v0.2": [
"mmlu_humanities_v0.2",
"mmlu_social_sciences_v0.2",
"mmlu_other_v0.2",
"mmlu_stem_v0.2"
],
"truthfulqa_v0.2": [],
"winogrande_tr-v0.2": []
},
"configs": {
"arc_tr-v0.2": {
"task": "arc_tr-v0.2",
"group": [
"ai2_arc"
],
"dataset_path": "malhajar/arc-tr-v0.2",
"test_split": "test",
"fewshot_split": "test",
"doc_to_text": "Soru: {{question}}\nCevap:",
"doc_to_target": "{{choices.label.index(answerKey)}}",
"doc_to_choice": "{{choices.text}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 25,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "Soru: {{question}}\nCevap:",
"metadata": {
"version": 1.0
}
},
"gsm8k_tr-v0.2": {
"task": "gsm8k_tr-v0.2",
"group": [
"math_word_problems"
],
"dataset_path": "malhajar/gsm8k_tr-v0.2",
"test_split": "test",
"fewshot_split": "test",
"doc_to_text": "Soru: {{question}}\nCevap:",
"doc_to_target": "{{answer}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 5,
"metric_list": [
{
"metric": "exact_match",
"aggregation": "mean",
"higher_is_better": true,
"ignore_case": true,
"ignore_punctuation": false,
"regexes_to_ignore": [
",",
"\\$",
"(?s).*#### ",
"\\.$"
]
}
],
"output_type": "generate_until",
"generation_kwargs": {
"until": [
"Question:",
"</s>",
"<|im_end|>"
],
"do_sample": false,
"temperature": 0.0
},
"repeats": 1,
"filter_list": [
{
"name": "strict-match",
"filter": [
{
"function": "regex",
"regex_pattern": "#### (\\-?[0-9\\.\\,]+)"
},
{
"function": "take_first"
}
]
},
{
"name": "flexible-extract",
"filter": [
{
"function": "regex",
"group_select": -1,
"regex_pattern": "(-?[$0-9.,]{2,})|(-?[0-9]+)"
},
{
"function": "take_first"
}
]
}
],
"should_decontaminate": false
},
"hellaswag_tr-v0.2": {
"task": "hellaswag_tr-v0.2",
"group": [
"multiple_choice"
],
"dataset_path": "malhajar/hellaswag_tr-v0.2",
"validation_split": "validation",
"fewshot_split": "validation",
"process_docs": "def process_docs(dataset: datasets.Dataset) -> datasets.Dataset:\n def _process_doc(doc):\n ctx = doc[\"ctx_a\"] + \" \" + doc[\"ctx_b\"].capitalize()\n out_doc = {\n \"query\": preprocess(ctx),\n \"choices\": [preprocess(ending) for ending in doc[\"endings\"]],\n \"gold\": int(doc[\"label\"]),\n }\n return out_doc\n\n return dataset.map(_process_doc)\n",
"doc_to_text": "{{query}}",
"doc_to_target": "{{label}}",
"doc_to_choice": "{{choices}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 10,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false
},
"mmlu_abstract_algebra_v0.2": {
"task": "mmlu_abstract_algebra_v0.2",
"task_alias": "abstract_algebra_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "abstract_algebra",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda soyut cebir hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_anatomy_v0.2": {
"task": "mmlu_anatomy_v0.2",
"task_alias": "anatomy_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "anatomy",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda anatomiyi konu alan çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_astronomy": {
"task": "mmlu_astronomy",
"task_alias": "astronomy",
"group": "mmlu_stem",
"dataset_path": "malhajar/mmlu-tr",
"dataset_name": "astronomy",
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about astronomy.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_business_ethics_v0.2": {
"task": "mmlu_business_ethics_v0.2",
"task_alias": "business_ethics_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "business_ethics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda iş etiği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_clinical_knowledge_v0.2": {
"task": "mmlu_clinical_knowledge_v0.2",
"task_alias": "clinical_knowledge_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "clinical_knowledge",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda klinik bilgi hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_college_biology_v0.2": {
"task": "mmlu_college_biology_v0.2",
"task_alias": "college_biology_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "college_biology",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda üniversite biyolojisi hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_college_chemistry_v0.2": {
"task": "mmlu_college_chemistry_v0.2",
"task_alias": "college_chemistry_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "college_chemistry",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda üniversite kimyası hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_college_computer_science_v0.2": {
"task": "mmlu_college_computer_science_v0.2",
"task_alias": "college_computer_science_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "college_computer_science",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda üniversite bilgisayar bilimleri hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_college_mathematics_v0.2": {
"task": "mmlu_college_mathematics_v0.2",
"task_alias": "college_mathematics_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "college_mathematics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda üniversite matematiği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_college_medicine_v0.2": {
"task": "mmlu_college_medicine_v0.2",
"task_alias": "college_medicine_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "college_medicine",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda üniversite tıbbı hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_college_physics_v0.2": {
"task": "mmlu_college_physics_v0.2",
"task_alias": "college_physics_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "college_physics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda üniversite fizik hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_computer_security_v0.2": {
"task": "mmlu_computer_security_v0.2",
"task_alias": "computer_security_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "computer_security",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda bilgisayar güvenliği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_conceptual_physics_v0.2": {
"task": "mmlu_conceptual_physics_v0.2",
"task_alias": "conceptual_physics_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "conceptual_physics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, kavramsal fizik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_econometrics_v0.2": {
"task": "mmlu_econometrics_v0.2",
"task_alias": "econometrics_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "econometrics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, ekonometri hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_electrical_engineering_v0.2": {
"task": "mmlu_electrical_engineering_v0.2",
"task_alias": "electrical_engineering_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "electrical_engineering",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, elektrik mühendisliği hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_elementary_mathematics_v0.2": {
"task": "mmlu_elementary_mathematics_v0.2",
"task_alias": "elementary_mathematics_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "elementary_mathematics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, ilköğretim matematiği hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_formal_logic_v0.2": {
"task": "mmlu_formal_logic_v0.2",
"task_alias": "formal_logic_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "formal_logic",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, formal mantık hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_global_facts_v0.2": {
"task": "mmlu_global_facts_v0.2",
"task_alias": "global_facts_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "global_facts",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, küresel gerçekler hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_biology_v0.2": {
"task": "mmlu_high_school_biology_v0.2",
"task_alias": "high_school_biology_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_biology",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise biyolojisi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_chemistry_v0.2": {
"task": "mmlu_high_school_chemistry_v0.2",
"task_alias": "high_school_chemistry_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_chemistry",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise kimyası hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_computer_science_v0.2": {
"task": "mmlu_high_school_computer_science_v0.2",
"task_alias": "high_school_computer_science_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_computer_science",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise bilgisayar bilimi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_european_history_v0.2": {
"task": "mmlu_high_school_european_history_v0.2",
"task_alias": "high_school_european_history_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_european_history",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise Avrupa tarihi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_geography_v0.2": {
"task": "mmlu_high_school_geography_v0.2",
"task_alias": "high_school_geography_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_geography",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise coğrafya hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_government_and_politics_v0.2": {
"task": "mmlu_high_school_government_and_politics_v0.2",
"task_alias": "high_school_government_and_politics_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_government_and_politics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise hükümet ve siyaset hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_macroeconomics_v0.2": {
"task": "mmlu_high_school_macroeconomics_v0.2",
"task_alias": "high_school_macroeconomics_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_macroeconomics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise makroekonomi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_mathematics_v0.2": {
"task": "mmlu_high_school_mathematics_v0.2",
"task_alias": "high_school_mathematics_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_mathematics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise matematik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_microeconomics_v0.2": {
"task": "mmlu_high_school_microeconomics_v0.2",
"task_alias": "high_school_microeconomics_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_microeconomics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise mikroekonomi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_physics_v0.2": {
"task": "mmlu_high_school_physics_v0.2",
"task_alias": "high_school_physics_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_physics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise fizik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_psychology_v0.2": {
"task": "mmlu_high_school_psychology_v0.2",
"task_alias": "high_school_psychology_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_psychology",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise psikoloji hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_statistics_v0.2": {
"task": "mmlu_high_school_statistics_v0.2",
"task_alias": "high_school_statistics_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_statistics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise istatistik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_us_history_v0.2": {
"task": "mmlu_high_school_us_history_v0.2",
"task_alias": "high_school_us_history_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_us_history",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise Amerikan tarihine dair çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_high_school_world_history_v0.2": {
"task": "mmlu_high_school_world_history_v0.2",
"task_alias": "high_school_world_history_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "high_school_world_history",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, lise dünya tarihine dair çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_human_aging_v0.2": {
"task": "mmlu_human_aging_v0.2",
"task_alias": "human_aging_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "human_aging",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, insan yaşlanmasıyla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_human_sexuality_v0.2": {
"task": "mmlu_human_sexuality_v0.2",
"task_alias": "human_sexuality_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "human_sexuality",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, insan cinselliğiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_international_law_v0.2": {
"task": "mmlu_international_law_v0.2",
"task_alias": "international_law_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "international_law",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, uluslararası hukukla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_jurisprudence_v0.2": {
"task": "mmlu_jurisprudence_v0.2",
"task_alias": "jurisprudence_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "jurisprudence",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, hukuk felsefesiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_logical_fallacies_v0.2": {
"task": "mmlu_logical_fallacies_v0.2",
"task_alias": "logical_fallacies_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "logical_fallacies",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, mantıksal yanılgılarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_machine_learning_v0.2": {
"task": "mmlu_machine_learning_v0.2",
"task_alias": "machine_learning_v0.2",
"group": "mmlu_stem_v0.2",
"group_alias": "stem_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "machine_learning",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, makine öğrenimiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_management_v0.2": {
"task": "mmlu_management_v0.2",
"task_alias": "management_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "management",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, yönetimle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_marketing_v0.2": {
"task": "mmlu_marketing_v0.2",
"task_alias": "marketing_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "marketing",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, pazarlama ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_medical_genetics_v0.2": {
"task": "mmlu_medical_genetics_v0.2",
"task_alias": "medical_genetics_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "medical_genetics",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, tıbbi genetikle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_miscellaneous_v0.2": {
"task": "mmlu_miscellaneous_v0.2",
"task_alias": "miscellaneous_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "miscellaneous",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, çeşitli konularla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_moral_disputes_v0.2": {
"task": "mmlu_moral_disputes_v0.2",
"task_alias": "moral_disputes_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "moral_disputes",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, ahlaki anlaşmazlıklarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_moral_scenarios_v0.2": {
"task": "mmlu_moral_scenarios_v0.2",
"task_alias": "moral_scenarios_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "moral_scenarios",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, ahlaki senaryolarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_nutrition_v0.2": {
"task": "mmlu_nutrition_v0.2",
"task_alias": "nutrition_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "nutrition",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, beslenme ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_philosophy_v0.2": {
"task": "mmlu_philosophy_v0.2",
"task_alias": "philosophy_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "philosophy",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, felsefe ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_prehistory_v0.2": {
"task": "mmlu_prehistory_v0.2",
"task_alias": "prehistory_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "prehistory",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, prehistori ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_professional_accounting_v0.2": {
"task": "mmlu_professional_accounting_v0.2",
"task_alias": "professional_accounting_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "professional_accounting",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, mesleki muhasebe ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_professional_law_v0.2": {
"task": "mmlu_professional_law_v0.2",
"task_alias": "professional_law_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "professional_law",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, mesleki hukuk ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_professional_medicine_v0.2": {
"task": "mmlu_professional_medicine_v0.2",
"task_alias": "professional_medicine_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "professional_medicine",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, mesleki tıp ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_professional_psychology_v0.2": {
"task": "mmlu_professional_psychology_v0.2",
"task_alias": "professional_psychology_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "professional_psychology",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, mesleki psikoloji ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_public_relations_v0.2": {
"task": "mmlu_public_relations_v0.2",
"task_alias": "public_relations_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "public_relations",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, halkla ilişkiler ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_security_studies_v0.2": {
"task": "mmlu_security_studies_v0.2",
"task_alias": "security_studies_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "security_studies",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, güvenlik çalışmaları ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_sociology_v0.2": {
"task": "mmlu_sociology_v0.2",
"task_alias": "sociology_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "sociology",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, sosyoloji ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_us_foreign_policy_v0.2": {
"task": "mmlu_us_foreign_policy_v0.2",
"task_alias": "us_foreign_policy_v0.2",
"group": "mmlu_social_sciences_v0.2",
"group_alias": "social_sciences_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "us_foreign_policy",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda, ABD dış politikası ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_virology_v0.2": {
"task": "mmlu_virology_v0.2",
"task_alias": "virology_v0.2",
"group": "mmlu_other_v0.2",
"group_alias": "other_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "virology",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda viroloji hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"mmlu_world_religions_v0.2": {
"task": "mmlu_world_religions_v0.2",
"task_alias": "world_religions_v0.2",
"group": "mmlu_humanities_v0.2",
"group_alias": "humanities_v0.2",
"dataset_path": "malhajar/mmlu_tr-v0.2",
"dataset_name": "world_religions",
"test_split": "test",
"fewshot_split": "validation",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "Aşağıda dünya dinleri hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.\"",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"truthfulqa_v0.2": {
"task": "truthfulqa_v0.2",
"dataset_path": "malhajar/truthful_qa-tr-v0.2",
"validation_split": "validation",
"doc_to_text": "{% set prompt_qa = \"Q: Amerika Birleşik Devletleri'nde insan ömrü ne kadardır?\nA: Amerika Birleşik Devletleri'nde insan ömrü 78 yıldır.\n\nQ: 1955 yılında Amerika Birleşik Devletleri başkanı kimdi?\nA: Dwight D. Eisenhower, 1955 yılında Amerika Birleşik Devletleri başkanıydı.\n\nQ: Hangi partiye mensuptu?\nA: Cumhuriyetçi Parti'ye mensuptu.\n\nQ: Muzun karekökü ne?\nA: Yorum yok.\n\nQ: Bir teleskop nasıl çalışır?\nA: Teleskoplar, ışığı odaklamak ve nesneleri daha yakın görünmesini sağlamak için lensler veya aynalar kullanır.\n\nQ: 1992 Olimpiyatları nerede yapıldı?\nA: 1992 Olimpiyatları, İspanya'nın Barselona şehrinde yapıldı.\" %}{{prompt_qa | safe }}\n\nQ: {{ question | safe }}",
"doc_to_target": 0,
"doc_to_choice": "{{mc2_targets.choices}}",
"process_results": "def process_results_mc2(doc, results):\n lls, is_greedy = zip(*results)\n\n # Split on the first `0` as everything before it is true (`1`).\n split_idx = list(doc[\"mc2_targets\"][\"labels\"]).index(0)\n # Compute the normalized probability mass for the correct answer.\n ll_true, ll_false = lls[:split_idx], lls[split_idx:]\n p_true, p_false = np.exp(np.array(ll_true)), np.exp(np.array(ll_false))\n p_true = p_true / (sum(p_true) + sum(p_false))\n\n return {\"acc\": sum(p_true)}\n",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "question"
},
"winogrande_tr-v0.2": {
"task": "winogrande_tr-v0.2",
"dataset_path": "malhajar/winogrande-tr",
"training_split": "train",
"validation_split": "validation",
"doc_to_text": "def doc_to_text(doc):\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
"doc_to_target": "def doc_to_target(doc):\n print(doc)\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
"doc_to_choice": "def doc_to_choice(doc):\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 10,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "sentence"
}
},
"versions": {
"arc_tr-v0.2": 1.0,
"gsm8k_tr-v0.2": "Yaml",
"hellaswag_tr-v0.2": "Yaml",
"mmlu_abstract_algebra_v0.2": 0.0,
"mmlu_anatomy_v0.2": 0.0,
"mmlu_astronomy": 0.0,
"mmlu_business_ethics_v0.2": 0.0,
"mmlu_clinical_knowledge_v0.2": 0.0,
"mmlu_college_biology_v0.2": 0.0,
"mmlu_college_chemistry_v0.2": 0.0,
"mmlu_college_computer_science_v0.2": 0.0,
"mmlu_college_mathematics_v0.2": 0.0,
"mmlu_college_medicine_v0.2": 0.0,
"mmlu_college_physics_v0.2": 0.0,
"mmlu_computer_security_v0.2": 0.0,
"mmlu_conceptual_physics_v0.2": 0.0,
"mmlu_econometrics_v0.2": 0.0,
"mmlu_electrical_engineering_v0.2": 0.0,
"mmlu_elementary_mathematics_v0.2": 0.0,
"mmlu_formal_logic_v0.2": 0.0,
"mmlu_global_facts_v0.2": 0.0,
"mmlu_high_school_biology_v0.2": 0.0,
"mmlu_high_school_chemistry_v0.2": 0.0,
"mmlu_high_school_computer_science_v0.2": 0.0,
"mmlu_high_school_european_history_v0.2": 0.0,
"mmlu_high_school_geography_v0.2": 0.0,
"mmlu_high_school_government_and_politics_v0.2": 0.0,
"mmlu_high_school_macroeconomics_v0.2": 0.0,
"mmlu_high_school_mathematics_v0.2": 0.0,
"mmlu_high_school_microeconomics_v0.2": 0.0,
"mmlu_high_school_physics_v0.2": 0.0,
"mmlu_high_school_psychology_v0.2": 0.0,
"mmlu_high_school_statistics_v0.2": 0.0,
"mmlu_high_school_us_history_v0.2": 0.0,
"mmlu_high_school_world_history_v0.2": 0.0,
"mmlu_human_aging_v0.2": 0.0,
"mmlu_human_sexuality_v0.2": 0.0,
"mmlu_international_law_v0.2": 0.0,
"mmlu_jurisprudence_v0.2": 0.0,
"mmlu_logical_fallacies_v0.2": 0.0,
"mmlu_machine_learning_v0.2": 0.0,
"mmlu_management_v0.2": 0.0,
"mmlu_marketing_v0.2": 0.0,
"mmlu_medical_genetics_v0.2": 0.0,
"mmlu_miscellaneous_v0.2": 0.0,
"mmlu_moral_disputes_v0.2": 0.0,
"mmlu_moral_scenarios_v0.2": 0.0,
"mmlu_nutrition_v0.2": 0.0,
"mmlu_philosophy_v0.2": 0.0,
"mmlu_prehistory_v0.2": 0.0,
"mmlu_professional_accounting_v0.2": 0.0,
"mmlu_professional_law_v0.2": 0.0,
"mmlu_professional_medicine_v0.2": 0.0,
"mmlu_professional_psychology_v0.2": 0.0,
"mmlu_public_relations_v0.2": 0.0,
"mmlu_security_studies_v0.2": 0.0,
"mmlu_sociology_v0.2": 0.0,
"mmlu_us_foreign_policy_v0.2": 0.0,
"mmlu_virology_v0.2": 0.0,
"mmlu_world_religions_v0.2": 0.0,
"truthfulqa_v0.2": "Yaml",
"winogrande_tr-v0.2": "Yaml"
},
"n-shot": {
"arc_tr-v0.2": 25,
"gsm8k_tr-v0.2": 5,
"hellaswag_tr-v0.2": 10,
"mmlu_abstract_algebra_v0.2": 5,
"mmlu_anatomy_v0.2": 5,
"mmlu_astronomy": 0,
"mmlu_business_ethics_v0.2": 5,
"mmlu_clinical_knowledge_v0.2": 5,
"mmlu_college_biology_v0.2": 5,
"mmlu_college_chemistry_v0.2": 5,
"mmlu_college_computer_science_v0.2": 5,
"mmlu_college_mathematics_v0.2": 5,
"mmlu_college_medicine_v0.2": 5,
"mmlu_college_physics_v0.2": 5,
"mmlu_computer_security_v0.2": 5,
"mmlu_conceptual_physics_v0.2": 5,
"mmlu_econometrics_v0.2": 5,
"mmlu_electrical_engineering_v0.2": 5,
"mmlu_elementary_mathematics_v0.2": 5,
"mmlu_formal_logic_v0.2": 5,
"mmlu_global_facts_v0.2": 5,
"mmlu_high_school_biology_v0.2": 5,
"mmlu_high_school_chemistry_v0.2": 5,
"mmlu_high_school_computer_science_v0.2": 5,
"mmlu_high_school_european_history_v0.2": 5,
"mmlu_high_school_geography_v0.2": 5,
"mmlu_high_school_government_and_politics_v0.2": 5,
"mmlu_high_school_macroeconomics_v0.2": 5,
"mmlu_high_school_mathematics_v0.2": 5,
"mmlu_high_school_microeconomics_v0.2": 5,
"mmlu_high_school_physics_v0.2": 5,
"mmlu_high_school_psychology_v0.2": 5,
"mmlu_high_school_statistics_v0.2": 5,
"mmlu_high_school_us_history_v0.2": 5,
"mmlu_high_school_world_history_v0.2": 5,
"mmlu_human_aging_v0.2": 5,
"mmlu_human_sexuality_v0.2": 5,
"mmlu_humanities_v0.2": 5,
"mmlu_international_law_v0.2": 5,
"mmlu_jurisprudence_v0.2": 5,
"mmlu_logical_fallacies_v0.2": 5,
"mmlu_machine_learning_v0.2": 5,
"mmlu_management_v0.2": 5,
"mmlu_marketing_v0.2": 5,
"mmlu_medical_genetics_v0.2": 5,
"mmlu_miscellaneous_v0.2": 5,
"mmlu_moral_disputes_v0.2": 5,
"mmlu_moral_scenarios_v0.2": 5,
"mmlu_nutrition_v0.2": 5,
"mmlu_other_v0.2": 5,
"mmlu_philosophy_v0.2": 5,
"mmlu_prehistory_v0.2": 5,
"mmlu_professional_accounting_v0.2": 5,
"mmlu_professional_law_v0.2": 5,
"mmlu_professional_medicine_v0.2": 5,
"mmlu_professional_psychology_v0.2": 5,
"mmlu_public_relations_v0.2": 5,
"mmlu_security_studies_v0.2": 5,
"mmlu_social_sciences_v0.2": 5,
"mmlu_sociology_v0.2": 5,
"mmlu_stem_v0.2": 5,
"mmlu_tr_v0.2": 0,
"mmlu_us_foreign_policy_v0.2": 5,
"mmlu_virology_v0.2": 5,
"mmlu_world_religions_v0.2": 5,
"truthfulqa_v0.2": 0,
"winogrande_tr-v0.2": 10
},
"higher_is_better": {
"arc_tr-v0.2": {
"acc": true,
"acc_norm": true
},
"gsm8k_tr-v0.2": {
"exact_match": true
},
"hellaswag_tr-v0.2": {
"acc": true,
"acc_norm": true
},
"mmlu_abstract_algebra_v0.2": {
"acc": true
},
"mmlu_anatomy_v0.2": {
"acc": true
},
"mmlu_astronomy": {
"acc": true
},
"mmlu_business_ethics_v0.2": {
"acc": true
},
"mmlu_clinical_knowledge_v0.2": {
"acc": true
},
"mmlu_college_biology_v0.2": {
"acc": true
},
"mmlu_college_chemistry_v0.2": {
"acc": true
},
"mmlu_college_computer_science_v0.2": {
"acc": true
},
"mmlu_college_mathematics_v0.2": {
"acc": true
},
"mmlu_college_medicine_v0.2": {
"acc": true
},
"mmlu_college_physics_v0.2": {
"acc": true
},
"mmlu_computer_security_v0.2": {
"acc": true
},
"mmlu_conceptual_physics_v0.2": {
"acc": true
},
"mmlu_econometrics_v0.2": {
"acc": true
},
"mmlu_electrical_engineering_v0.2": {
"acc": true
},
"mmlu_elementary_mathematics_v0.2": {
"acc": true
},
"mmlu_formal_logic_v0.2": {
"acc": true
},
"mmlu_global_facts_v0.2": {
"acc": true
},
"mmlu_high_school_biology_v0.2": {
"acc": true
},
"mmlu_high_school_chemistry_v0.2": {
"acc": true
},
"mmlu_high_school_computer_science_v0.2": {
"acc": true
},
"mmlu_high_school_european_history_v0.2": {
"acc": true
},
"mmlu_high_school_geography_v0.2": {
"acc": true
},
"mmlu_high_school_government_and_politics_v0.2": {
"acc": true
},
"mmlu_high_school_macroeconomics_v0.2": {
"acc": true
},
"mmlu_high_school_mathematics_v0.2": {
"acc": true
},
"mmlu_high_school_microeconomics_v0.2": {
"acc": true
},
"mmlu_high_school_physics_v0.2": {
"acc": true
},
"mmlu_high_school_psychology_v0.2": {
"acc": true
},
"mmlu_high_school_statistics_v0.2": {
"acc": true
},
"mmlu_high_school_us_history_v0.2": {
"acc": true
},
"mmlu_high_school_world_history_v0.2": {
"acc": true
},
"mmlu_human_aging_v0.2": {
"acc": true
},
"mmlu_human_sexuality_v0.2": {
"acc": true
},
"mmlu_humanities_v0.2": {
"acc": true
},
"mmlu_international_law_v0.2": {
"acc": true
},
"mmlu_jurisprudence_v0.2": {
"acc": true
},
"mmlu_logical_fallacies_v0.2": {
"acc": true
},
"mmlu_machine_learning_v0.2": {
"acc": true
},
"mmlu_management_v0.2": {
"acc": true
},
"mmlu_marketing_v0.2": {
"acc": true
},
"mmlu_medical_genetics_v0.2": {
"acc": true
},
"mmlu_miscellaneous_v0.2": {
"acc": true
},
"mmlu_moral_disputes_v0.2": {
"acc": true
},
"mmlu_moral_scenarios_v0.2": {
"acc": true
},
"mmlu_nutrition_v0.2": {
"acc": true
},
"mmlu_other_v0.2": {
"acc": true
},
"mmlu_philosophy_v0.2": {
"acc": true
},
"mmlu_prehistory_v0.2": {
"acc": true
},
"mmlu_professional_accounting_v0.2": {
"acc": true
},
"mmlu_professional_law_v0.2": {
"acc": true
},
"mmlu_professional_medicine_v0.2": {
"acc": true
},
"mmlu_professional_psychology_v0.2": {
"acc": true
},
"mmlu_public_relations_v0.2": {
"acc": true
},
"mmlu_security_studies_v0.2": {
"acc": true
},
"mmlu_social_sciences_v0.2": {
"acc": true
},
"mmlu_sociology_v0.2": {
"acc": true
},
"mmlu_stem_v0.2": {
"acc": true
},
"mmlu_tr_v0.2": {
"acc": true
},
"mmlu_us_foreign_policy_v0.2": {
"acc": true
},
"mmlu_virology_v0.2": {
"acc": true
},
"mmlu_world_religions_v0.2": {
"acc": true
},
"truthfulqa_v0.2": {
"acc": true
},
"winogrande_tr-v0.2": {
"acc": true
}
},
"n-samples": {
"winogrande_tr-v0.2": {
"original": 1266,
"effective": 1266
},
"truthfulqa_v0.2": {
"original": 817,
"effective": 817
},
"mmlu_formal_logic_v0.2": {
"original": 126,
"effective": 126
},
"mmlu_moral_disputes_v0.2": {
"original": 308,
"effective": 308
},
"mmlu_international_law_v0.2": {
"original": 121,
"effective": 121
},
"mmlu_philosophy_v0.2": {
"original": 299,
"effective": 299
},
"mmlu_world_religions_v0.2": {
"original": 168,
"effective": 168
},
"mmlu_jurisprudence_v0.2": {
"original": 106,
"effective": 106
},
"mmlu_moral_scenarios_v0.2": {
"original": 872,
"effective": 872
},
"mmlu_high_school_european_history_v0.2": {
"original": 150,
"effective": 150
},
"mmlu_high_school_us_history_v0.2": {
"original": 179,
"effective": 179
},
"mmlu_prehistory_v0.2": {
"original": 300,
"effective": 300
},
"mmlu_professional_law_v0.2": {
"original": 1388,
"effective": 1388
},
"mmlu_logical_fallacies_v0.2": {
"original": 161,
"effective": 161
},
"mmlu_high_school_world_history_v0.2": {
"original": 213,
"effective": 213
},
"mmlu_high_school_psychology_v0.2": {
"original": 533,
"effective": 533
},
"mmlu_professional_psychology_v0.2": {
"original": 594,
"effective": 594
},
"mmlu_high_school_geography_v0.2": {
"original": 197,
"effective": 197
},
"mmlu_security_studies_v0.2": {
"original": 234,
"effective": 234
},
"mmlu_human_sexuality_v0.2": {
"original": 115,
"effective": 115
},
"mmlu_high_school_government_and_politics_v0.2": {
"original": 187,
"effective": 187
},
"mmlu_sociology_v0.2": {
"original": 195,
"effective": 195
},
"mmlu_public_relations_v0.2": {
"original": 108,
"effective": 108
},
"mmlu_us_foreign_policy_v0.2": {
"original": 99,
"effective": 99
},
"mmlu_econometrics_v0.2": {
"original": 114,
"effective": 114
},
"mmlu_high_school_microeconomics_v0.2": {
"original": 237,
"effective": 237
},
"mmlu_high_school_macroeconomics_v0.2": {
"original": 390,
"effective": 390
},
"mmlu_human_aging_v0.2": {
"original": 212,
"effective": 212
},
"mmlu_marketing_v0.2": {
"original": 217,
"effective": 217
},
"mmlu_virology_v0.2": {
"original": 159,
"effective": 159
},
"mmlu_professional_medicine_v0.2": {
"original": 261,
"effective": 261
},
"mmlu_business_ethics_v0.2": {
"original": 99,
"effective": 99
},
"mmlu_global_facts_v0.2": {
"original": 98,
"effective": 98
},
"mmlu_medical_genetics_v0.2": {
"original": 95,
"effective": 95
},
"mmlu_miscellaneous_v0.2": {
"original": 766,
"effective": 766
},
"mmlu_professional_accounting_v0.2": {
"original": 279,
"effective": 279
},
"mmlu_clinical_knowledge_v0.2": {
"original": 256,
"effective": 256
},
"mmlu_management_v0.2": {
"original": 99,
"effective": 99
},
"mmlu_nutrition_v0.2": {
"original": 305,
"effective": 305
},
"mmlu_college_medicine_v0.2": {
"original": 168,
"effective": 168
},
"mmlu_abstract_algebra_v0.2": {
"original": 100,
"effective": 100
},
"mmlu_conceptual_physics_v0.2": {
"original": 233,
"effective": 233
},
"mmlu_college_biology_v0.2": {
"original": 142,
"effective": 142
},
"mmlu_high_school_chemistry_v0.2": {
"original": 197,
"effective": 197
},
"mmlu_electrical_engineering_v0.2": {
"original": 144,
"effective": 144
},
"mmlu_high_school_computer_science_v0.2": {
"original": 100,
"effective": 100
},
"mmlu_machine_learning_v0.2": {
"original": 112,
"effective": 112
},
"mmlu_college_chemistry_v0.2": {
"original": 99,
"effective": 99
},
"mmlu_high_school_statistics_v0.2": {
"original": 216,
"effective": 216
},
"mmlu_college_mathematics_v0.2": {
"original": 100,
"effective": 100
},
"mmlu_high_school_physics_v0.2": {
"original": 147,
"effective": 147
},
"mmlu_college_computer_science_v0.2": {
"original": 99,
"effective": 99
},
"mmlu_anatomy_v0.2": {
"original": 131,
"effective": 131
},
"mmlu_computer_security_v0.2": {
"original": 100,
"effective": 100
},
"mmlu_high_school_mathematics_v0.2": {
"original": 270,
"effective": 270
},
"mmlu_astronomy": {
"original": 151,
"effective": 151
},
"mmlu_college_physics_v0.2": {
"original": 101,
"effective": 101
},
"mmlu_high_school_biology_v0.2": {
"original": 300,
"effective": 300
},
"mmlu_elementary_mathematics_v0.2": {
"original": 373,
"effective": 373
},
"hellaswag_tr-v0.2": {
"original": 8857,
"effective": 8857
},
"gsm8k_tr-v0.2": {
"original": 1317,
"effective": 1317
},
"arc_tr-v0.2": {
"original": 1172,
"effective": 1172
}
},
"config": {
"model": "vllm",
"model_args": "pretrained=notbdq/mistral-turkish-v2,tensor_parallel_size=1,dtype=auto,gpu_memory_utilization=0.7,data_parallel_size=4",
"batch_size": 1,
"batch_sizes": [],
"device": "cuda",
"use_cache": null,
"limit": null,
"bootstrap_iters": 100000,
"gen_kwargs": null,
"random_seed": 0,
"numpy_seed": 1234,
"torch_seed": 1234,
"fewshot_seed": 1234
},
"git_hash": null,
"date": 1720850417.465147,
"pretty_env_info": "PyTorch version: 2.1.2+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: Ubuntu 22.04.4 LTS (x86_64)\nGCC version: (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.35\n\nPython version: 3.10.3 (main, Mar 28 2022, 09:30:03) [GCC 7.5.0] (64-bit runtime)\nPython platform: Linux-6.2.0-1011-azure-x86_64-with-glibc2.35\nIs CUDA available: True\nCUDA runtime version: 11.5.119\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: \nGPU 0: NVIDIA A100 80GB PCIe\nGPU 1: NVIDIA A100 80GB PCIe\nGPU 2: NVIDIA A100 80GB PCIe\nGPU 3: NVIDIA A100 80GB PCIe\n\nNvidia driver version: 550.54.15\ncuDNN version: Probably one of the following:\n/usr/lib/x86_64-linux-gnu/libcudnn.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_adv.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_train.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_train.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_engines_precompiled.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_engines_runtime_compiled.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_graph.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_heuristic.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_ops.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_train.so.8.9.7\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nAddress sizes: 48 bits physical, 48 bits virtual\nByte Order: Little Endian\nCPU(s): 96\nOn-line CPU(s) list: 0-95\nVendor ID: AuthenticAMD\nModel name: AMD EPYC 7V13 64-Core Processor\nCPU family: 25\nModel: 1\nThread(s) per core: 1\nCore(s) per socket: 48\nSocket(s): 2\nStepping: 1\nBogoMIPS: 4890.89\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext perfctr_core invpcid_single vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 xsaves clzero xsaveerptr rdpru arat umip vaes vpclmulqdq rdpid fsrm\nHypervisor vendor: Microsoft\nVirtualization type: full\nL1d cache: 3 MiB (96 instances)\nL1i cache: 3 MiB (96 instances)\nL2 cache: 48 MiB (96 instances)\nL3 cache: 384 MiB (12 instances)\nNUMA node(s): 4\nNUMA node0 CPU(s): 0-23\nNUMA node1 CPU(s): 24-47\nNUMA node2 CPU(s): 48-71\nNUMA node3 CPU(s): 72-95\nVulnerability Gather data sampling: Not affected\nVulnerability Itlb multihit: Not affected\nVulnerability L1tf: Not affected\nVulnerability Mds: Not affected\nVulnerability Meltdown: Not affected\nVulnerability Mmio stale data: Not affected\nVulnerability Retbleed: Not affected\nVulnerability Spec store bypass: Vulnerable\nVulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization\nVulnerability Spectre v2: Mitigation; Retpolines, STIBP disabled, RSB filling, PBRSB-eIBRS Not affected\nVulnerability Srbds: Not affected\nVulnerability Tsx async abort: Not affected\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.1.2\n[pip3] triton==2.1.0\n[conda] torch 2.1.2 pypi_0 pypi\n[conda] triton 2.1.0 pypi_0 pypi",
"transformers_version": "4.40.0.dev0",
"upper_git_hash": null,
"task_hashes": {},
"model_source": "vllm",
"model_name": "notbdq/mistral-turkish-v2",
"model_name_sanitized": "notbdq__mistral-turkish-v2",
"system_instruction": null,
"system_instruction_sha": null,
"fewshot_as_multiturn": false,
"chat_template": null,
"chat_template_sha": null,
"start_time": 315217.607063336,
"end_time": 322876.161758253,
"total_evaluation_time_seconds": "7658.554694916995"
}