|
{ |
|
"results": { |
|
"winogrande_tr-v0.2": { |
|
"acc,none": 0.566350710900474, |
|
"acc_stderr,none": 0.013933710456492776, |
|
"alias": "winogrande_tr-v0.2" |
|
}, |
|
"truthfulqa_v0.2": { |
|
"acc,none": 0.4287000745636075, |
|
"acc_stderr,none": 0.015893542036139328, |
|
"alias": "truthfulqa_v0.2" |
|
}, |
|
"mmlu_tr_v0.2": { |
|
"acc,none": 0.3783184204688309, |
|
"acc_stderr,none": 0.00413170443657757, |
|
"alias": "mmlu_tr_v0.2" |
|
}, |
|
"mmlu_humanities_v0.2": { |
|
"alias": " - humanities_v0.2", |
|
"acc,none": 0.3511728535641084, |
|
"acc_stderr,none": 0.007163272162171215 |
|
}, |
|
"mmlu_formal_logic_v0.2": { |
|
"alias": " - formal_logic_v0.2", |
|
"acc,none": 0.30952380952380953, |
|
"acc_stderr,none": 0.04134913018303316 |
|
}, |
|
"mmlu_high_school_european_history_v0.2": { |
|
"alias": " - high_school_european_history_v0.2", |
|
"acc,none": 0.4266666666666667, |
|
"acc_stderr,none": 0.04051863621453781 |
|
}, |
|
"mmlu_high_school_us_history_v0.2": { |
|
"alias": " - high_school_us_history_v0.2", |
|
"acc,none": 0.4022346368715084, |
|
"acc_stderr,none": 0.03675319551744533 |
|
}, |
|
"mmlu_high_school_world_history_v0.2": { |
|
"alias": " - high_school_world_history_v0.2", |
|
"acc,none": 0.43661971830985913, |
|
"acc_stderr,none": 0.03406313089378545 |
|
}, |
|
"mmlu_international_law_v0.2": { |
|
"alias": " - international_law_v0.2", |
|
"acc,none": 0.5041322314049587, |
|
"acc_stderr,none": 0.045641987674327526 |
|
}, |
|
"mmlu_jurisprudence_v0.2": { |
|
"alias": " - jurisprudence_v0.2", |
|
"acc,none": 0.39622641509433965, |
|
"acc_stderr,none": 0.047732492983673595 |
|
}, |
|
"mmlu_logical_fallacies_v0.2": { |
|
"alias": " - logical_fallacies_v0.2", |
|
"acc,none": 0.35403726708074534, |
|
"acc_stderr,none": 0.03780665290318812 |
|
}, |
|
"mmlu_moral_disputes_v0.2": { |
|
"alias": " - moral_disputes_v0.2", |
|
"acc,none": 0.40584415584415584, |
|
"acc_stderr,none": 0.028025969634932892 |
|
}, |
|
"mmlu_moral_scenarios_v0.2": { |
|
"alias": " - moral_scenarios_v0.2", |
|
"acc,none": 0.30045871559633025, |
|
"acc_stderr,none": 0.015534239646632418 |
|
}, |
|
"mmlu_philosophy_v0.2": { |
|
"alias": " - philosophy_v0.2", |
|
"acc,none": 0.4080267558528428, |
|
"acc_stderr,none": 0.028469984333030093 |
|
}, |
|
"mmlu_prehistory_v0.2": { |
|
"alias": " - prehistory_v0.2", |
|
"acc,none": 0.35, |
|
"acc_stderr,none": 0.027583864218368545 |
|
}, |
|
"mmlu_professional_law_v0.2": { |
|
"alias": " - professional_law_v0.2", |
|
"acc,none": 0.3076368876080692, |
|
"acc_stderr,none": 0.01239218632432633 |
|
}, |
|
"mmlu_world_religions_v0.2": { |
|
"alias": " - world_religions_v0.2", |
|
"acc,none": 0.43452380952380953, |
|
"acc_stderr,none": 0.038357978854480244 |
|
}, |
|
"mmlu_other_v0.2": { |
|
"alias": " - other_v0.2", |
|
"acc,none": 0.43331121433311215, |
|
"acc_stderr,none": 0.008939504025205647 |
|
}, |
|
"mmlu_business_ethics_v0.2": { |
|
"alias": " - business_ethics_v0.2", |
|
"acc,none": 0.5050505050505051, |
|
"acc_stderr,none": 0.0505050505050505 |
|
}, |
|
"mmlu_clinical_knowledge_v0.2": { |
|
"alias": " - clinical_knowledge_v0.2", |
|
"acc,none": 0.38671875, |
|
"acc_stderr,none": 0.030497017430410063 |
|
}, |
|
"mmlu_college_medicine_v0.2": { |
|
"alias": " - college_medicine_v0.2", |
|
"acc,none": 0.34523809523809523, |
|
"acc_stderr,none": 0.036791104173620234 |
|
}, |
|
"mmlu_global_facts_v0.2": { |
|
"alias": " - global_facts_v0.2", |
|
"acc,none": 0.3163265306122449, |
|
"acc_stderr,none": 0.04721786057356902 |
|
}, |
|
"mmlu_human_aging_v0.2": { |
|
"alias": " - human_aging_v0.2", |
|
"acc,none": 0.3867924528301887, |
|
"acc_stderr,none": 0.03352752644747648 |
|
}, |
|
"mmlu_management_v0.2": { |
|
"alias": " - management_v0.2", |
|
"acc,none": 0.5454545454545454, |
|
"acc_stderr,none": 0.05029848501568076 |
|
}, |
|
"mmlu_marketing_v0.2": { |
|
"alias": " - marketing_v0.2", |
|
"acc,none": 0.5391705069124424, |
|
"acc_stderr,none": 0.03391613235916986 |
|
}, |
|
"mmlu_medical_genetics_v0.2": { |
|
"alias": " - medical_genetics_v0.2", |
|
"acc,none": 0.4421052631578947, |
|
"acc_stderr,none": 0.051224183891818126 |
|
}, |
|
"mmlu_miscellaneous_v0.2": { |
|
"alias": " - miscellaneous_v0.2", |
|
"acc,none": 0.5169712793733682, |
|
"acc_stderr,none": 0.01806712160907662 |
|
}, |
|
"mmlu_nutrition_v0.2": { |
|
"alias": " - nutrition_v0.2", |
|
"acc,none": 0.4, |
|
"acc_stderr,none": 0.028097574347450745 |
|
}, |
|
"mmlu_professional_accounting_v0.2": { |
|
"alias": " - professional_accounting_v0.2", |
|
"acc,none": 0.34408602150537637, |
|
"acc_stderr,none": 0.028492762637163934 |
|
}, |
|
"mmlu_professional_medicine_v0.2": { |
|
"alias": " - professional_medicine_v0.2", |
|
"acc,none": 0.36015325670498083, |
|
"acc_stderr,none": 0.029771106823548582 |
|
}, |
|
"mmlu_virology_v0.2": { |
|
"alias": " - virology_v0.2", |
|
"acc,none": 0.4088050314465409, |
|
"acc_stderr,none": 0.03911064131397898 |
|
}, |
|
"mmlu_social_sciences_v0.2": { |
|
"alias": " - social_sciences_v0.2", |
|
"acc,none": 0.4092574092574093, |
|
"acc_stderr,none": 0.008932396857722518 |
|
}, |
|
"mmlu_econometrics_v0.2": { |
|
"alias": " - econometrics_v0.2", |
|
"acc,none": 0.3157894736842105, |
|
"acc_stderr,none": 0.043727482902780064 |
|
}, |
|
"mmlu_high_school_geography_v0.2": { |
|
"alias": " - high_school_geography_v0.2", |
|
"acc,none": 0.5076142131979695, |
|
"acc_stderr,none": 0.0357101443139815 |
|
}, |
|
"mmlu_high_school_government_and_politics_v0.2": { |
|
"alias": " - high_school_government_and_politics_v0.2", |
|
"acc,none": 0.34759358288770054, |
|
"acc_stderr,none": 0.03491712800447087 |
|
}, |
|
"mmlu_high_school_macroeconomics_v0.2": { |
|
"alias": " - high_school_macroeconomics_v0.2", |
|
"acc,none": 0.36153846153846153, |
|
"acc_stderr,none": 0.024359581465397 |
|
}, |
|
"mmlu_high_school_microeconomics_v0.2": { |
|
"alias": " - high_school_microeconomics_v0.2", |
|
"acc,none": 0.4008438818565401, |
|
"acc_stderr,none": 0.03190080389473236 |
|
}, |
|
"mmlu_high_school_psychology_v0.2": { |
|
"alias": " - high_school_psychology_v0.2", |
|
"acc,none": 0.46716697936210133, |
|
"acc_stderr,none": 0.02163096120434016 |
|
}, |
|
"mmlu_human_sexuality_v0.2": { |
|
"alias": " - human_sexuality_v0.2", |
|
"acc,none": 0.4608695652173913, |
|
"acc_stderr,none": 0.04668566114758418 |
|
}, |
|
"mmlu_professional_psychology_v0.2": { |
|
"alias": " - professional_psychology_v0.2", |
|
"acc,none": 0.35353535353535354, |
|
"acc_stderr,none": 0.01963186088430619 |
|
}, |
|
"mmlu_public_relations_v0.2": { |
|
"alias": " - public_relations_v0.2", |
|
"acc,none": 0.4351851851851852, |
|
"acc_stderr,none": 0.04792898170907062 |
|
}, |
|
"mmlu_security_studies_v0.2": { |
|
"alias": " - security_studies_v0.2", |
|
"acc,none": 0.41452991452991456, |
|
"acc_stderr,none": 0.03227396567623778 |
|
}, |
|
"mmlu_sociology_v0.2": { |
|
"alias": " - sociology_v0.2", |
|
"acc,none": 0.46153846153846156, |
|
"acc_stderr,none": 0.0357915435254457 |
|
}, |
|
"mmlu_us_foreign_policy_v0.2": { |
|
"alias": " - us_foreign_policy_v0.2", |
|
"acc,none": 0.46464646464646464, |
|
"acc_stderr,none": 0.05038121284299014 |
|
}, |
|
"mmlu_stem_v0.2": { |
|
"alias": " - stem_v0.2", |
|
"acc,none": 0.33354735152487963, |
|
"acc_stderr,none": 0.008414114823581998 |
|
}, |
|
"mmlu_abstract_algebra_v0.2": { |
|
"alias": " - abstract_algebra_v0.2", |
|
"acc,none": 0.27, |
|
"acc_stderr,none": 0.04461960433384741 |
|
}, |
|
"mmlu_anatomy_v0.2": { |
|
"alias": " - anatomy_v0.2", |
|
"acc,none": 0.35877862595419846, |
|
"acc_stderr,none": 0.04206739313864908 |
|
}, |
|
"mmlu_astronomy": { |
|
"alias": " - astronomy", |
|
"acc,none": 0.3973509933774834, |
|
"acc_stderr,none": 0.03995524007681681 |
|
}, |
|
"mmlu_college_biology_v0.2": { |
|
"alias": " - college_biology_v0.2", |
|
"acc,none": 0.39436619718309857, |
|
"acc_stderr,none": 0.04115715424330713 |
|
}, |
|
"mmlu_college_chemistry_v0.2": { |
|
"alias": " - college_chemistry_v0.2", |
|
"acc,none": 0.30303030303030304, |
|
"acc_stderr,none": 0.046423399544431185 |
|
}, |
|
"mmlu_college_computer_science_v0.2": { |
|
"alias": " - college_computer_science_v0.2", |
|
"acc,none": 0.29292929292929293, |
|
"acc_stderr,none": 0.04597267625418178 |
|
}, |
|
"mmlu_college_mathematics_v0.2": { |
|
"alias": " - college_mathematics_v0.2", |
|
"acc,none": 0.34, |
|
"acc_stderr,none": 0.04760952285695236 |
|
}, |
|
"mmlu_college_physics_v0.2": { |
|
"alias": " - college_physics_v0.2", |
|
"acc,none": 0.3564356435643564, |
|
"acc_stderr,none": 0.04789460048494186 |
|
}, |
|
"mmlu_computer_security_v0.2": { |
|
"alias": " - computer_security_v0.2", |
|
"acc,none": 0.44, |
|
"acc_stderr,none": 0.04988876515698589 |
|
}, |
|
"mmlu_conceptual_physics_v0.2": { |
|
"alias": " - conceptual_physics_v0.2", |
|
"acc,none": 0.31759656652360513, |
|
"acc_stderr,none": 0.030564303853826955 |
|
}, |
|
"mmlu_electrical_engineering_v0.2": { |
|
"alias": " - electrical_engineering_v0.2", |
|
"acc,none": 0.4027777777777778, |
|
"acc_stderr,none": 0.04101405519842426 |
|
}, |
|
"mmlu_elementary_mathematics_v0.2": { |
|
"alias": " - elementary_mathematics_v0.2", |
|
"acc,none": 0.32171581769436997, |
|
"acc_stderr,none": 0.024219801634933303 |
|
}, |
|
"mmlu_high_school_biology_v0.2": { |
|
"alias": " - high_school_biology_v0.2", |
|
"acc,none": 0.3933333333333333, |
|
"acc_stderr,none": 0.02825009084676088 |
|
}, |
|
"mmlu_high_school_chemistry_v0.2": { |
|
"alias": " - high_school_chemistry_v0.2", |
|
"acc,none": 0.3401015228426396, |
|
"acc_stderr,none": 0.033838789254010404 |
|
}, |
|
"mmlu_high_school_computer_science_v0.2": { |
|
"alias": " - high_school_computer_science_v0.2", |
|
"acc,none": 0.42, |
|
"acc_stderr,none": 0.04960449637488584 |
|
}, |
|
"mmlu_high_school_mathematics_v0.2": { |
|
"alias": " - high_school_mathematics_v0.2", |
|
"acc,none": 0.25925925925925924, |
|
"acc_stderr,none": 0.026719240783712173 |
|
}, |
|
"mmlu_high_school_physics_v0.2": { |
|
"alias": " - high_school_physics_v0.2", |
|
"acc,none": 0.29931972789115646, |
|
"acc_stderr,none": 0.0379010453091039 |
|
}, |
|
"mmlu_high_school_statistics_v0.2": { |
|
"alias": " - high_school_statistics_v0.2", |
|
"acc,none": 0.2361111111111111, |
|
"acc_stderr,none": 0.02896370257079105 |
|
}, |
|
"mmlu_machine_learning_v0.2": { |
|
"alias": " - machine_learning_v0.2", |
|
"acc,none": 0.2857142857142857, |
|
"acc_stderr,none": 0.042878587513404565 |
|
}, |
|
"hellaswag_tr-v0.2": { |
|
"acc,none": 0.3533927966580106, |
|
"acc_stderr,none": 0.005079609324171046, |
|
"acc_norm,none": 0.4213616348650785, |
|
"acc_norm_stderr,none": 0.005247014976327331, |
|
"alias": "hellaswag_tr-v0.2" |
|
}, |
|
"gsm8k_tr-v0.2": { |
|
"exact_match,strict-match": 0.04859529233105543, |
|
"exact_match_stderr,strict-match": 0.005927230313578962, |
|
"exact_match,flexible-extract": 0.07365223993925589, |
|
"exact_match_stderr,flexible-extract": 0.0072003243917122565, |
|
"alias": "gsm8k_tr-v0.2" |
|
}, |
|
"gsm1k_tr-v0.2": { |
|
"exact_match,strict-match": 0.02654867256637168, |
|
"exact_match_stderr,strict-match": 0.00874420189153513, |
|
"exact_match,flexible-extract": 0.0855457227138643, |
|
"exact_match_stderr,flexible-extract": 0.015213242833548139, |
|
"alias": "gsm1k_tr-v0.2" |
|
}, |
|
"arc_tr-v0.2": { |
|
"acc,none": 0.33447098976109213, |
|
"acc_stderr,none": 0.013787460322441382, |
|
"acc_norm,none": 0.3626279863481229, |
|
"acc_norm_stderr,none": 0.014049106564955005, |
|
"alias": "arc_tr-v0.2" |
|
} |
|
}, |
|
"groups": { |
|
"mmlu_tr_v0.2": { |
|
"acc,none": 0.3783184204688309, |
|
"acc_stderr,none": 0.00413170443657757, |
|
"alias": "mmlu_tr_v0.2" |
|
}, |
|
"mmlu_humanities_v0.2": { |
|
"alias": " - humanities_v0.2", |
|
"acc,none": 0.3511728535641084, |
|
"acc_stderr,none": 0.007163272162171215 |
|
}, |
|
"mmlu_other_v0.2": { |
|
"alias": " - other_v0.2", |
|
"acc,none": 0.43331121433311215, |
|
"acc_stderr,none": 0.008939504025205647 |
|
}, |
|
"mmlu_social_sciences_v0.2": { |
|
"alias": " - social_sciences_v0.2", |
|
"acc,none": 0.4092574092574093, |
|
"acc_stderr,none": 0.008932396857722518 |
|
}, |
|
"mmlu_stem_v0.2": { |
|
"alias": " - stem_v0.2", |
|
"acc,none": 0.33354735152487963, |
|
"acc_stderr,none": 0.008414114823581998 |
|
} |
|
}, |
|
"group_subtasks": { |
|
"arc_tr-v0.2": [], |
|
"gsm1k_tr-v0.2": [], |
|
"gsm8k_tr-v0.2": [], |
|
"hellaswag_tr-v0.2": [], |
|
"mmlu_stem_v0.2": [ |
|
"mmlu_abstract_algebra_v0.2", |
|
"mmlu_conceptual_physics_v0.2", |
|
"mmlu_college_biology_v0.2", |
|
"mmlu_high_school_chemistry_v0.2", |
|
"mmlu_electrical_engineering_v0.2", |
|
"mmlu_high_school_computer_science_v0.2", |
|
"mmlu_machine_learning_v0.2", |
|
"mmlu_college_chemistry_v0.2", |
|
"mmlu_high_school_statistics_v0.2", |
|
"mmlu_college_mathematics_v0.2", |
|
"mmlu_high_school_physics_v0.2", |
|
"mmlu_college_computer_science_v0.2", |
|
"mmlu_anatomy_v0.2", |
|
"mmlu_computer_security_v0.2", |
|
"mmlu_high_school_mathematics_v0.2", |
|
"mmlu_astronomy", |
|
"mmlu_college_physics_v0.2", |
|
"mmlu_high_school_biology_v0.2", |
|
"mmlu_elementary_mathematics_v0.2" |
|
], |
|
"mmlu_other_v0.2": [ |
|
"mmlu_human_aging_v0.2", |
|
"mmlu_marketing_v0.2", |
|
"mmlu_virology_v0.2", |
|
"mmlu_professional_medicine_v0.2", |
|
"mmlu_business_ethics_v0.2", |
|
"mmlu_global_facts_v0.2", |
|
"mmlu_medical_genetics_v0.2", |
|
"mmlu_miscellaneous_v0.2", |
|
"mmlu_professional_accounting_v0.2", |
|
"mmlu_clinical_knowledge_v0.2", |
|
"mmlu_management_v0.2", |
|
"mmlu_nutrition_v0.2", |
|
"mmlu_college_medicine_v0.2" |
|
], |
|
"mmlu_social_sciences_v0.2": [ |
|
"mmlu_high_school_psychology_v0.2", |
|
"mmlu_professional_psychology_v0.2", |
|
"mmlu_high_school_geography_v0.2", |
|
"mmlu_security_studies_v0.2", |
|
"mmlu_human_sexuality_v0.2", |
|
"mmlu_high_school_government_and_politics_v0.2", |
|
"mmlu_sociology_v0.2", |
|
"mmlu_public_relations_v0.2", |
|
"mmlu_us_foreign_policy_v0.2", |
|
"mmlu_econometrics_v0.2", |
|
"mmlu_high_school_microeconomics_v0.2", |
|
"mmlu_high_school_macroeconomics_v0.2" |
|
], |
|
"mmlu_humanities_v0.2": [ |
|
"mmlu_formal_logic_v0.2", |
|
"mmlu_moral_disputes_v0.2", |
|
"mmlu_international_law_v0.2", |
|
"mmlu_philosophy_v0.2", |
|
"mmlu_world_religions_v0.2", |
|
"mmlu_jurisprudence_v0.2", |
|
"mmlu_moral_scenarios_v0.2", |
|
"mmlu_high_school_european_history_v0.2", |
|
"mmlu_high_school_us_history_v0.2", |
|
"mmlu_prehistory_v0.2", |
|
"mmlu_professional_law_v0.2", |
|
"mmlu_logical_fallacies_v0.2", |
|
"mmlu_high_school_world_history_v0.2" |
|
], |
|
"mmlu_tr_v0.2": [ |
|
"mmlu_humanities_v0.2", |
|
"mmlu_social_sciences_v0.2", |
|
"mmlu_other_v0.2", |
|
"mmlu_stem_v0.2" |
|
], |
|
"truthfulqa_v0.2": [], |
|
"winogrande_tr-v0.2": [] |
|
}, |
|
"configs": { |
|
"arc_tr-v0.2": { |
|
"task": "arc_tr-v0.2", |
|
"group": [ |
|
"ai2_arc" |
|
], |
|
"dataset_path": "malhajar/arc-tr-v0.2", |
|
"test_split": "test", |
|
"fewshot_split": "test", |
|
"doc_to_text": "Soru: {{question}}\nCevap:", |
|
"doc_to_target": "{{choices.label.index(answerKey)}}", |
|
"doc_to_choice": "{{choices.text}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 25, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "Soru: {{question}}\nCevap:", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"gsm1k_tr-v0.2": { |
|
"task": "gsm1k_tr-v0.2", |
|
"group": [ |
|
"math_word_problems" |
|
], |
|
"dataset_path": "malhajar/gsm1k_tr-v0.2", |
|
"test_split": "test", |
|
"fewshot_split": "test", |
|
"doc_to_text": "Soru: {{question}}\nCevap:", |
|
"doc_to_target": "{{answer}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "exact_match", |
|
"aggregation": "mean", |
|
"higher_is_better": true, |
|
"ignore_case": true, |
|
"ignore_punctuation": false, |
|
"regexes_to_ignore": [ |
|
",", |
|
"\\$", |
|
"(?s).*#### ", |
|
"\\.$" |
|
] |
|
} |
|
], |
|
"output_type": "generate_until", |
|
"generation_kwargs": { |
|
"until": [ |
|
"Question:", |
|
"</s>", |
|
"<|im_end|>" |
|
], |
|
"do_sample": false, |
|
"temperature": 0.0 |
|
}, |
|
"repeats": 1, |
|
"filter_list": [ |
|
{ |
|
"name": "strict-match", |
|
"filter": [ |
|
{ |
|
"function": "regex", |
|
"regex_pattern": "#### (\\-?[0-9\\.\\,]+)" |
|
}, |
|
{ |
|
"function": "take_first" |
|
} |
|
] |
|
}, |
|
{ |
|
"name": "flexible-extract", |
|
"filter": [ |
|
{ |
|
"function": "regex", |
|
"group_select": -1, |
|
"regex_pattern": "(-?[$0-9.,]{2,})|(-?[0-9]+)" |
|
}, |
|
{ |
|
"function": "take_first" |
|
} |
|
] |
|
} |
|
], |
|
"should_decontaminate": false |
|
}, |
|
"gsm8k_tr-v0.2": { |
|
"task": "gsm8k_tr-v0.2", |
|
"group": [ |
|
"math_word_problems" |
|
], |
|
"dataset_path": "malhajar/gsm8k_tr-v0.2", |
|
"test_split": "test", |
|
"fewshot_split": "test", |
|
"doc_to_text": "Soru: {{question}}\nCevap:", |
|
"doc_to_target": "{{answer}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "exact_match", |
|
"aggregation": "mean", |
|
"higher_is_better": true, |
|
"ignore_case": true, |
|
"ignore_punctuation": false, |
|
"regexes_to_ignore": [ |
|
",", |
|
"\\$", |
|
"(?s).*#### ", |
|
"\\.$" |
|
] |
|
} |
|
], |
|
"output_type": "generate_until", |
|
"generation_kwargs": { |
|
"until": [ |
|
"Question:", |
|
"</s>", |
|
"<|im_end|>" |
|
], |
|
"do_sample": false, |
|
"temperature": 0.0 |
|
}, |
|
"repeats": 1, |
|
"filter_list": [ |
|
{ |
|
"name": "strict-match", |
|
"filter": [ |
|
{ |
|
"function": "regex", |
|
"regex_pattern": "#### (\\-?[0-9\\.\\,]+)" |
|
}, |
|
{ |
|
"function": "take_first" |
|
} |
|
] |
|
}, |
|
{ |
|
"name": "flexible-extract", |
|
"filter": [ |
|
{ |
|
"function": "regex", |
|
"group_select": -1, |
|
"regex_pattern": "(-?[$0-9.,]{2,})|(-?[0-9]+)" |
|
}, |
|
{ |
|
"function": "take_first" |
|
} |
|
] |
|
} |
|
], |
|
"should_decontaminate": false |
|
}, |
|
"hellaswag_tr-v0.2": { |
|
"task": "hellaswag_tr-v0.2", |
|
"group": [ |
|
"multiple_choice" |
|
], |
|
"dataset_path": "malhajar/hellaswag_tr-v0.2", |
|
"validation_split": "validation", |
|
"fewshot_split": "validation", |
|
"process_docs": "def process_docs(dataset: datasets.Dataset) -> datasets.Dataset:\n def _process_doc(doc):\n ctx = doc[\"ctx_a\"] + \" \" + doc[\"ctx_b\"].capitalize()\n out_doc = {\n \"query\": preprocess(ctx),\n \"choices\": [preprocess(ending) for ending in doc[\"endings\"]],\n \"gold\": int(doc[\"label\"]),\n }\n return out_doc\n\n return dataset.map(_process_doc)\n", |
|
"doc_to_text": "{{query}}", |
|
"doc_to_target": "{{label}}", |
|
"doc_to_choice": "{{choices}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 10, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false |
|
}, |
|
"mmlu_abstract_algebra_v0.2": { |
|
"task": "mmlu_abstract_algebra_v0.2", |
|
"task_alias": "abstract_algebra_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "abstract_algebra", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda soyut cebir hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_anatomy_v0.2": { |
|
"task": "mmlu_anatomy_v0.2", |
|
"task_alias": "anatomy_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "anatomy", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda anatomiyi konu alan çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_astronomy": { |
|
"task": "mmlu_astronomy", |
|
"task_alias": "astronomy", |
|
"group": "mmlu_stem", |
|
"dataset_path": "malhajar/mmlu-tr", |
|
"dataset_name": "astronomy", |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about astronomy.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_business_ethics_v0.2": { |
|
"task": "mmlu_business_ethics_v0.2", |
|
"task_alias": "business_ethics_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "business_ethics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda iş etiği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_clinical_knowledge_v0.2": { |
|
"task": "mmlu_clinical_knowledge_v0.2", |
|
"task_alias": "clinical_knowledge_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "clinical_knowledge", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda klinik bilgi hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_college_biology_v0.2": { |
|
"task": "mmlu_college_biology_v0.2", |
|
"task_alias": "college_biology_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "college_biology", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda üniversite biyolojisi hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_college_chemistry_v0.2": { |
|
"task": "mmlu_college_chemistry_v0.2", |
|
"task_alias": "college_chemistry_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "college_chemistry", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda üniversite kimyası hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_college_computer_science_v0.2": { |
|
"task": "mmlu_college_computer_science_v0.2", |
|
"task_alias": "college_computer_science_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "college_computer_science", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda üniversite bilgisayar bilimleri hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_college_mathematics_v0.2": { |
|
"task": "mmlu_college_mathematics_v0.2", |
|
"task_alias": "college_mathematics_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "college_mathematics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda üniversite matematiği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_college_medicine_v0.2": { |
|
"task": "mmlu_college_medicine_v0.2", |
|
"task_alias": "college_medicine_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "college_medicine", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda üniversite tıbbı hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_college_physics_v0.2": { |
|
"task": "mmlu_college_physics_v0.2", |
|
"task_alias": "college_physics_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "college_physics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda üniversite fizik hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_computer_security_v0.2": { |
|
"task": "mmlu_computer_security_v0.2", |
|
"task_alias": "computer_security_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "computer_security", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda bilgisayar güvenliği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_conceptual_physics_v0.2": { |
|
"task": "mmlu_conceptual_physics_v0.2", |
|
"task_alias": "conceptual_physics_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "conceptual_physics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, kavramsal fizik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_econometrics_v0.2": { |
|
"task": "mmlu_econometrics_v0.2", |
|
"task_alias": "econometrics_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "econometrics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, ekonometri hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_electrical_engineering_v0.2": { |
|
"task": "mmlu_electrical_engineering_v0.2", |
|
"task_alias": "electrical_engineering_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "electrical_engineering", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, elektrik mühendisliği hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_elementary_mathematics_v0.2": { |
|
"task": "mmlu_elementary_mathematics_v0.2", |
|
"task_alias": "elementary_mathematics_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "elementary_mathematics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, ilköğretim matematiği hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_formal_logic_v0.2": { |
|
"task": "mmlu_formal_logic_v0.2", |
|
"task_alias": "formal_logic_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "formal_logic", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, formal mantık hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_global_facts_v0.2": { |
|
"task": "mmlu_global_facts_v0.2", |
|
"task_alias": "global_facts_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "global_facts", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, küresel gerçekler hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_biology_v0.2": { |
|
"task": "mmlu_high_school_biology_v0.2", |
|
"task_alias": "high_school_biology_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_biology", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise biyolojisi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_chemistry_v0.2": { |
|
"task": "mmlu_high_school_chemistry_v0.2", |
|
"task_alias": "high_school_chemistry_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_chemistry", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise kimyası hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_computer_science_v0.2": { |
|
"task": "mmlu_high_school_computer_science_v0.2", |
|
"task_alias": "high_school_computer_science_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_computer_science", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise bilgisayar bilimi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_european_history_v0.2": { |
|
"task": "mmlu_high_school_european_history_v0.2", |
|
"task_alias": "high_school_european_history_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_european_history", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise Avrupa tarihi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_geography_v0.2": { |
|
"task": "mmlu_high_school_geography_v0.2", |
|
"task_alias": "high_school_geography_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_geography", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise coğrafya hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_government_and_politics_v0.2": { |
|
"task": "mmlu_high_school_government_and_politics_v0.2", |
|
"task_alias": "high_school_government_and_politics_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_government_and_politics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise hükümet ve siyaset hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_macroeconomics_v0.2": { |
|
"task": "mmlu_high_school_macroeconomics_v0.2", |
|
"task_alias": "high_school_macroeconomics_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_macroeconomics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise makroekonomi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_mathematics_v0.2": { |
|
"task": "mmlu_high_school_mathematics_v0.2", |
|
"task_alias": "high_school_mathematics_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_mathematics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise matematik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_microeconomics_v0.2": { |
|
"task": "mmlu_high_school_microeconomics_v0.2", |
|
"task_alias": "high_school_microeconomics_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_microeconomics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise mikroekonomi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_physics_v0.2": { |
|
"task": "mmlu_high_school_physics_v0.2", |
|
"task_alias": "high_school_physics_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_physics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise fizik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_psychology_v0.2": { |
|
"task": "mmlu_high_school_psychology_v0.2", |
|
"task_alias": "high_school_psychology_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_psychology", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise psikoloji hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_statistics_v0.2": { |
|
"task": "mmlu_high_school_statistics_v0.2", |
|
"task_alias": "high_school_statistics_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_statistics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise istatistik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_us_history_v0.2": { |
|
"task": "mmlu_high_school_us_history_v0.2", |
|
"task_alias": "high_school_us_history_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_us_history", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise Amerikan tarihine dair çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_world_history_v0.2": { |
|
"task": "mmlu_high_school_world_history_v0.2", |
|
"task_alias": "high_school_world_history_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_world_history", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise dünya tarihine dair çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_human_aging_v0.2": { |
|
"task": "mmlu_human_aging_v0.2", |
|
"task_alias": "human_aging_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "human_aging", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, insan yaşlanmasıyla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_human_sexuality_v0.2": { |
|
"task": "mmlu_human_sexuality_v0.2", |
|
"task_alias": "human_sexuality_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "human_sexuality", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, insan cinselliğiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_international_law_v0.2": { |
|
"task": "mmlu_international_law_v0.2", |
|
"task_alias": "international_law_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "international_law", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, uluslararası hukukla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_jurisprudence_v0.2": { |
|
"task": "mmlu_jurisprudence_v0.2", |
|
"task_alias": "jurisprudence_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "jurisprudence", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, hukuk felsefesiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_logical_fallacies_v0.2": { |
|
"task": "mmlu_logical_fallacies_v0.2", |
|
"task_alias": "logical_fallacies_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "logical_fallacies", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, mantıksal yanılgılarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_machine_learning_v0.2": { |
|
"task": "mmlu_machine_learning_v0.2", |
|
"task_alias": "machine_learning_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "machine_learning", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, makine öğrenimiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_management_v0.2": { |
|
"task": "mmlu_management_v0.2", |
|
"task_alias": "management_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "management", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, yönetimle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_marketing_v0.2": { |
|
"task": "mmlu_marketing_v0.2", |
|
"task_alias": "marketing_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "marketing", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, pazarlama ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_medical_genetics_v0.2": { |
|
"task": "mmlu_medical_genetics_v0.2", |
|
"task_alias": "medical_genetics_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "medical_genetics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, tıbbi genetikle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_miscellaneous_v0.2": { |
|
"task": "mmlu_miscellaneous_v0.2", |
|
"task_alias": "miscellaneous_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "miscellaneous", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, çeşitli konularla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_moral_disputes_v0.2": { |
|
"task": "mmlu_moral_disputes_v0.2", |
|
"task_alias": "moral_disputes_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "moral_disputes", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, ahlaki anlaşmazlıklarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_moral_scenarios_v0.2": { |
|
"task": "mmlu_moral_scenarios_v0.2", |
|
"task_alias": "moral_scenarios_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "moral_scenarios", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, ahlaki senaryolarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_nutrition_v0.2": { |
|
"task": "mmlu_nutrition_v0.2", |
|
"task_alias": "nutrition_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "nutrition", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, beslenme ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_philosophy_v0.2": { |
|
"task": "mmlu_philosophy_v0.2", |
|
"task_alias": "philosophy_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "philosophy", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, felsefe ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_prehistory_v0.2": { |
|
"task": "mmlu_prehistory_v0.2", |
|
"task_alias": "prehistory_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "prehistory", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, prehistori ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_professional_accounting_v0.2": { |
|
"task": "mmlu_professional_accounting_v0.2", |
|
"task_alias": "professional_accounting_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "professional_accounting", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, mesleki muhasebe ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_professional_law_v0.2": { |
|
"task": "mmlu_professional_law_v0.2", |
|
"task_alias": "professional_law_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "professional_law", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, mesleki hukuk ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_professional_medicine_v0.2": { |
|
"task": "mmlu_professional_medicine_v0.2", |
|
"task_alias": "professional_medicine_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "professional_medicine", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, mesleki tıp ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_professional_psychology_v0.2": { |
|
"task": "mmlu_professional_psychology_v0.2", |
|
"task_alias": "professional_psychology_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "professional_psychology", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, mesleki psikoloji ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_public_relations_v0.2": { |
|
"task": "mmlu_public_relations_v0.2", |
|
"task_alias": "public_relations_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "public_relations", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, halkla ilişkiler ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_security_studies_v0.2": { |
|
"task": "mmlu_security_studies_v0.2", |
|
"task_alias": "security_studies_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "security_studies", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, güvenlik çalışmaları ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_sociology_v0.2": { |
|
"task": "mmlu_sociology_v0.2", |
|
"task_alias": "sociology_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "sociology", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, sosyoloji ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_us_foreign_policy_v0.2": { |
|
"task": "mmlu_us_foreign_policy_v0.2", |
|
"task_alias": "us_foreign_policy_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "us_foreign_policy", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, ABD dış politikası ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_virology_v0.2": { |
|
"task": "mmlu_virology_v0.2", |
|
"task_alias": "virology_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "virology", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda viroloji hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_world_religions_v0.2": { |
|
"task": "mmlu_world_religions_v0.2", |
|
"task_alias": "world_religions_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "world_religions", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda dünya dinleri hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.\"", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"truthfulqa_v0.2": { |
|
"task": "truthfulqa_v0.2", |
|
"dataset_path": "malhajar/truthful_qa-tr-v0.2", |
|
"validation_split": "validation", |
|
"doc_to_text": "{% set prompt_qa = \"Q: Amerika Birleşik Devletleri'nde insan ömrü ne kadardır?\nA: Amerika Birleşik Devletleri'nde insan ömrü 78 yıldır.\n\nQ: 1955 yılında Amerika Birleşik Devletleri başkanı kimdi?\nA: Dwight D. Eisenhower, 1955 yılında Amerika Birleşik Devletleri başkanıydı.\n\nQ: Hangi partiye mensuptu?\nA: Cumhuriyetçi Parti'ye mensuptu.\n\nQ: Muzun karekökü ne?\nA: Yorum yok.\n\nQ: Bir teleskop nasıl çalışır?\nA: Teleskoplar, ışığı odaklamak ve nesneleri daha yakın görünmesini sağlamak için lensler veya aynalar kullanır.\n\nQ: 1992 Olimpiyatları nerede yapıldı?\nA: 1992 Olimpiyatları, İspanya'nın Barselona şehrinde yapıldı.\" %}{{prompt_qa | safe }}\n\nQ: {{ question | safe }}", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{mc2_targets.choices}}", |
|
"process_results": "def process_results_mc2(doc, results):\n lls, is_greedy = zip(*results)\n\n # Split on the first `0` as everything before it is true (`1`).\n split_idx = list(doc[\"mc2_targets\"][\"labels\"]).index(0)\n # Compute the normalized probability mass for the correct answer.\n ll_true, ll_false = lls[:split_idx], lls[split_idx:]\n p_true, p_false = np.exp(np.array(ll_true)), np.exp(np.array(ll_false))\n p_true = p_true / (sum(p_true) + sum(p_false))\n\n return {\"acc\": sum(p_true)}\n", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "question" |
|
}, |
|
"winogrande_tr-v0.2": { |
|
"task": "winogrande_tr-v0.2", |
|
"dataset_path": "malhajar/winogrande-tr", |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"doc_to_text": "def doc_to_text(doc):\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n", |
|
"doc_to_target": "def doc_to_target(doc):\n print(doc)\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n", |
|
"doc_to_choice": "def doc_to_choice(doc):\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 10, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "sentence" |
|
} |
|
}, |
|
"versions": { |
|
"arc_tr-v0.2": 1.0, |
|
"gsm1k_tr-v0.2": "Yaml", |
|
"gsm8k_tr-v0.2": "Yaml", |
|
"hellaswag_tr-v0.2": "Yaml", |
|
"mmlu_abstract_algebra_v0.2": 0.0, |
|
"mmlu_anatomy_v0.2": 0.0, |
|
"mmlu_astronomy": 0.0, |
|
"mmlu_business_ethics_v0.2": 0.0, |
|
"mmlu_clinical_knowledge_v0.2": 0.0, |
|
"mmlu_college_biology_v0.2": 0.0, |
|
"mmlu_college_chemistry_v0.2": 0.0, |
|
"mmlu_college_computer_science_v0.2": 0.0, |
|
"mmlu_college_mathematics_v0.2": 0.0, |
|
"mmlu_college_medicine_v0.2": 0.0, |
|
"mmlu_college_physics_v0.2": 0.0, |
|
"mmlu_computer_security_v0.2": 0.0, |
|
"mmlu_conceptual_physics_v0.2": 0.0, |
|
"mmlu_econometrics_v0.2": 0.0, |
|
"mmlu_electrical_engineering_v0.2": 0.0, |
|
"mmlu_elementary_mathematics_v0.2": 0.0, |
|
"mmlu_formal_logic_v0.2": 0.0, |
|
"mmlu_global_facts_v0.2": 0.0, |
|
"mmlu_high_school_biology_v0.2": 0.0, |
|
"mmlu_high_school_chemistry_v0.2": 0.0, |
|
"mmlu_high_school_computer_science_v0.2": 0.0, |
|
"mmlu_high_school_european_history_v0.2": 0.0, |
|
"mmlu_high_school_geography_v0.2": 0.0, |
|
"mmlu_high_school_government_and_politics_v0.2": 0.0, |
|
"mmlu_high_school_macroeconomics_v0.2": 0.0, |
|
"mmlu_high_school_mathematics_v0.2": 0.0, |
|
"mmlu_high_school_microeconomics_v0.2": 0.0, |
|
"mmlu_high_school_physics_v0.2": 0.0, |
|
"mmlu_high_school_psychology_v0.2": 0.0, |
|
"mmlu_high_school_statistics_v0.2": 0.0, |
|
"mmlu_high_school_us_history_v0.2": 0.0, |
|
"mmlu_high_school_world_history_v0.2": 0.0, |
|
"mmlu_human_aging_v0.2": 0.0, |
|
"mmlu_human_sexuality_v0.2": 0.0, |
|
"mmlu_international_law_v0.2": 0.0, |
|
"mmlu_jurisprudence_v0.2": 0.0, |
|
"mmlu_logical_fallacies_v0.2": 0.0, |
|
"mmlu_machine_learning_v0.2": 0.0, |
|
"mmlu_management_v0.2": 0.0, |
|
"mmlu_marketing_v0.2": 0.0, |
|
"mmlu_medical_genetics_v0.2": 0.0, |
|
"mmlu_miscellaneous_v0.2": 0.0, |
|
"mmlu_moral_disputes_v0.2": 0.0, |
|
"mmlu_moral_scenarios_v0.2": 0.0, |
|
"mmlu_nutrition_v0.2": 0.0, |
|
"mmlu_philosophy_v0.2": 0.0, |
|
"mmlu_prehistory_v0.2": 0.0, |
|
"mmlu_professional_accounting_v0.2": 0.0, |
|
"mmlu_professional_law_v0.2": 0.0, |
|
"mmlu_professional_medicine_v0.2": 0.0, |
|
"mmlu_professional_psychology_v0.2": 0.0, |
|
"mmlu_public_relations_v0.2": 0.0, |
|
"mmlu_security_studies_v0.2": 0.0, |
|
"mmlu_sociology_v0.2": 0.0, |
|
"mmlu_us_foreign_policy_v0.2": 0.0, |
|
"mmlu_virology_v0.2": 0.0, |
|
"mmlu_world_religions_v0.2": 0.0, |
|
"truthfulqa_v0.2": "Yaml", |
|
"winogrande_tr-v0.2": "Yaml" |
|
}, |
|
"n-shot": { |
|
"arc_tr-v0.2": 25, |
|
"gsm1k_tr-v0.2": 5, |
|
"gsm8k_tr-v0.2": 5, |
|
"hellaswag_tr-v0.2": 10, |
|
"mmlu_abstract_algebra_v0.2": 5, |
|
"mmlu_anatomy_v0.2": 5, |
|
"mmlu_astronomy": 0, |
|
"mmlu_business_ethics_v0.2": 5, |
|
"mmlu_clinical_knowledge_v0.2": 5, |
|
"mmlu_college_biology_v0.2": 5, |
|
"mmlu_college_chemistry_v0.2": 5, |
|
"mmlu_college_computer_science_v0.2": 5, |
|
"mmlu_college_mathematics_v0.2": 5, |
|
"mmlu_college_medicine_v0.2": 5, |
|
"mmlu_college_physics_v0.2": 5, |
|
"mmlu_computer_security_v0.2": 5, |
|
"mmlu_conceptual_physics_v0.2": 5, |
|
"mmlu_econometrics_v0.2": 5, |
|
"mmlu_electrical_engineering_v0.2": 5, |
|
"mmlu_elementary_mathematics_v0.2": 5, |
|
"mmlu_formal_logic_v0.2": 5, |
|
"mmlu_global_facts_v0.2": 5, |
|
"mmlu_high_school_biology_v0.2": 5, |
|
"mmlu_high_school_chemistry_v0.2": 5, |
|
"mmlu_high_school_computer_science_v0.2": 5, |
|
"mmlu_high_school_european_history_v0.2": 5, |
|
"mmlu_high_school_geography_v0.2": 5, |
|
"mmlu_high_school_government_and_politics_v0.2": 5, |
|
"mmlu_high_school_macroeconomics_v0.2": 5, |
|
"mmlu_high_school_mathematics_v0.2": 5, |
|
"mmlu_high_school_microeconomics_v0.2": 5, |
|
"mmlu_high_school_physics_v0.2": 5, |
|
"mmlu_high_school_psychology_v0.2": 5, |
|
"mmlu_high_school_statistics_v0.2": 5, |
|
"mmlu_high_school_us_history_v0.2": 5, |
|
"mmlu_high_school_world_history_v0.2": 5, |
|
"mmlu_human_aging_v0.2": 5, |
|
"mmlu_human_sexuality_v0.2": 5, |
|
"mmlu_humanities_v0.2": 5, |
|
"mmlu_international_law_v0.2": 5, |
|
"mmlu_jurisprudence_v0.2": 5, |
|
"mmlu_logical_fallacies_v0.2": 5, |
|
"mmlu_machine_learning_v0.2": 5, |
|
"mmlu_management_v0.2": 5, |
|
"mmlu_marketing_v0.2": 5, |
|
"mmlu_medical_genetics_v0.2": 5, |
|
"mmlu_miscellaneous_v0.2": 5, |
|
"mmlu_moral_disputes_v0.2": 5, |
|
"mmlu_moral_scenarios_v0.2": 5, |
|
"mmlu_nutrition_v0.2": 5, |
|
"mmlu_other_v0.2": 5, |
|
"mmlu_philosophy_v0.2": 5, |
|
"mmlu_prehistory_v0.2": 5, |
|
"mmlu_professional_accounting_v0.2": 5, |
|
"mmlu_professional_law_v0.2": 5, |
|
"mmlu_professional_medicine_v0.2": 5, |
|
"mmlu_professional_psychology_v0.2": 5, |
|
"mmlu_public_relations_v0.2": 5, |
|
"mmlu_security_studies_v0.2": 5, |
|
"mmlu_social_sciences_v0.2": 5, |
|
"mmlu_sociology_v0.2": 5, |
|
"mmlu_stem_v0.2": 5, |
|
"mmlu_tr_v0.2": 0, |
|
"mmlu_us_foreign_policy_v0.2": 5, |
|
"mmlu_virology_v0.2": 5, |
|
"mmlu_world_religions_v0.2": 5, |
|
"truthfulqa_v0.2": 0, |
|
"winogrande_tr-v0.2": 10 |
|
}, |
|
"higher_is_better": { |
|
"arc_tr-v0.2": { |
|
"acc": true, |
|
"acc_norm": true |
|
}, |
|
"gsm1k_tr-v0.2": { |
|
"exact_match": true |
|
}, |
|
"gsm8k_tr-v0.2": { |
|
"exact_match": true |
|
}, |
|
"hellaswag_tr-v0.2": { |
|
"acc": true, |
|
"acc_norm": true |
|
}, |
|
"mmlu_abstract_algebra_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_anatomy_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_astronomy": { |
|
"acc": true |
|
}, |
|
"mmlu_business_ethics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_clinical_knowledge_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_college_biology_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_college_chemistry_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_college_computer_science_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_college_mathematics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_college_medicine_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_college_physics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_computer_security_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_conceptual_physics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_econometrics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_electrical_engineering_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_elementary_mathematics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_formal_logic_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_global_facts_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_biology_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_chemistry_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_computer_science_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_european_history_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_geography_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_government_and_politics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_macroeconomics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_mathematics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_microeconomics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_physics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_psychology_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_statistics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_us_history_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_world_history_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_human_aging_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_human_sexuality_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_humanities_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_international_law_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_jurisprudence_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_logical_fallacies_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_machine_learning_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_management_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_marketing_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_medical_genetics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_miscellaneous_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_moral_disputes_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_moral_scenarios_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_nutrition_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_other_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_philosophy_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_prehistory_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_professional_accounting_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_professional_law_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_professional_medicine_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_professional_psychology_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_public_relations_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_security_studies_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_social_sciences_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_sociology_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_stem_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_tr_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_us_foreign_policy_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_virology_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_world_religions_v0.2": { |
|
"acc": true |
|
}, |
|
"truthfulqa_v0.2": { |
|
"acc": true |
|
}, |
|
"winogrande_tr-v0.2": { |
|
"acc": true |
|
} |
|
}, |
|
"n-samples": { |
|
"winogrande_tr-v0.2": { |
|
"original": 1266, |
|
"effective": 1266 |
|
}, |
|
"truthfulqa_v0.2": { |
|
"original": 817, |
|
"effective": 817 |
|
}, |
|
"mmlu_formal_logic_v0.2": { |
|
"original": 126, |
|
"effective": 126 |
|
}, |
|
"mmlu_moral_disputes_v0.2": { |
|
"original": 308, |
|
"effective": 308 |
|
}, |
|
"mmlu_international_law_v0.2": { |
|
"original": 121, |
|
"effective": 121 |
|
}, |
|
"mmlu_philosophy_v0.2": { |
|
"original": 299, |
|
"effective": 299 |
|
}, |
|
"mmlu_world_religions_v0.2": { |
|
"original": 168, |
|
"effective": 168 |
|
}, |
|
"mmlu_jurisprudence_v0.2": { |
|
"original": 106, |
|
"effective": 106 |
|
}, |
|
"mmlu_moral_scenarios_v0.2": { |
|
"original": 872, |
|
"effective": 872 |
|
}, |
|
"mmlu_high_school_european_history_v0.2": { |
|
"original": 150, |
|
"effective": 150 |
|
}, |
|
"mmlu_high_school_us_history_v0.2": { |
|
"original": 179, |
|
"effective": 179 |
|
}, |
|
"mmlu_prehistory_v0.2": { |
|
"original": 300, |
|
"effective": 300 |
|
}, |
|
"mmlu_professional_law_v0.2": { |
|
"original": 1388, |
|
"effective": 1388 |
|
}, |
|
"mmlu_logical_fallacies_v0.2": { |
|
"original": 161, |
|
"effective": 161 |
|
}, |
|
"mmlu_high_school_world_history_v0.2": { |
|
"original": 213, |
|
"effective": 213 |
|
}, |
|
"mmlu_high_school_psychology_v0.2": { |
|
"original": 533, |
|
"effective": 533 |
|
}, |
|
"mmlu_professional_psychology_v0.2": { |
|
"original": 594, |
|
"effective": 594 |
|
}, |
|
"mmlu_high_school_geography_v0.2": { |
|
"original": 197, |
|
"effective": 197 |
|
}, |
|
"mmlu_security_studies_v0.2": { |
|
"original": 234, |
|
"effective": 234 |
|
}, |
|
"mmlu_human_sexuality_v0.2": { |
|
"original": 115, |
|
"effective": 115 |
|
}, |
|
"mmlu_high_school_government_and_politics_v0.2": { |
|
"original": 187, |
|
"effective": 187 |
|
}, |
|
"mmlu_sociology_v0.2": { |
|
"original": 195, |
|
"effective": 195 |
|
}, |
|
"mmlu_public_relations_v0.2": { |
|
"original": 108, |
|
"effective": 108 |
|
}, |
|
"mmlu_us_foreign_policy_v0.2": { |
|
"original": 99, |
|
"effective": 99 |
|
}, |
|
"mmlu_econometrics_v0.2": { |
|
"original": 114, |
|
"effective": 114 |
|
}, |
|
"mmlu_high_school_microeconomics_v0.2": { |
|
"original": 237, |
|
"effective": 237 |
|
}, |
|
"mmlu_high_school_macroeconomics_v0.2": { |
|
"original": 390, |
|
"effective": 390 |
|
}, |
|
"mmlu_human_aging_v0.2": { |
|
"original": 212, |
|
"effective": 212 |
|
}, |
|
"mmlu_marketing_v0.2": { |
|
"original": 217, |
|
"effective": 217 |
|
}, |
|
"mmlu_virology_v0.2": { |
|
"original": 159, |
|
"effective": 159 |
|
}, |
|
"mmlu_professional_medicine_v0.2": { |
|
"original": 261, |
|
"effective": 261 |
|
}, |
|
"mmlu_business_ethics_v0.2": { |
|
"original": 99, |
|
"effective": 99 |
|
}, |
|
"mmlu_global_facts_v0.2": { |
|
"original": 98, |
|
"effective": 98 |
|
}, |
|
"mmlu_medical_genetics_v0.2": { |
|
"original": 95, |
|
"effective": 95 |
|
}, |
|
"mmlu_miscellaneous_v0.2": { |
|
"original": 766, |
|
"effective": 766 |
|
}, |
|
"mmlu_professional_accounting_v0.2": { |
|
"original": 279, |
|
"effective": 279 |
|
}, |
|
"mmlu_clinical_knowledge_v0.2": { |
|
"original": 256, |
|
"effective": 256 |
|
}, |
|
"mmlu_management_v0.2": { |
|
"original": 99, |
|
"effective": 99 |
|
}, |
|
"mmlu_nutrition_v0.2": { |
|
"original": 305, |
|
"effective": 305 |
|
}, |
|
"mmlu_college_medicine_v0.2": { |
|
"original": 168, |
|
"effective": 168 |
|
}, |
|
"mmlu_abstract_algebra_v0.2": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_conceptual_physics_v0.2": { |
|
"original": 233, |
|
"effective": 233 |
|
}, |
|
"mmlu_college_biology_v0.2": { |
|
"original": 142, |
|
"effective": 142 |
|
}, |
|
"mmlu_high_school_chemistry_v0.2": { |
|
"original": 197, |
|
"effective": 197 |
|
}, |
|
"mmlu_electrical_engineering_v0.2": { |
|
"original": 144, |
|
"effective": 144 |
|
}, |
|
"mmlu_high_school_computer_science_v0.2": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_machine_learning_v0.2": { |
|
"original": 112, |
|
"effective": 112 |
|
}, |
|
"mmlu_college_chemistry_v0.2": { |
|
"original": 99, |
|
"effective": 99 |
|
}, |
|
"mmlu_high_school_statistics_v0.2": { |
|
"original": 216, |
|
"effective": 216 |
|
}, |
|
"mmlu_college_mathematics_v0.2": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_high_school_physics_v0.2": { |
|
"original": 147, |
|
"effective": 147 |
|
}, |
|
"mmlu_college_computer_science_v0.2": { |
|
"original": 99, |
|
"effective": 99 |
|
}, |
|
"mmlu_anatomy_v0.2": { |
|
"original": 131, |
|
"effective": 131 |
|
}, |
|
"mmlu_computer_security_v0.2": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_high_school_mathematics_v0.2": { |
|
"original": 270, |
|
"effective": 270 |
|
}, |
|
"mmlu_astronomy": { |
|
"original": 151, |
|
"effective": 151 |
|
}, |
|
"mmlu_college_physics_v0.2": { |
|
"original": 101, |
|
"effective": 101 |
|
}, |
|
"mmlu_high_school_biology_v0.2": { |
|
"original": 300, |
|
"effective": 300 |
|
}, |
|
"mmlu_elementary_mathematics_v0.2": { |
|
"original": 373, |
|
"effective": 373 |
|
}, |
|
"hellaswag_tr-v0.2": { |
|
"original": 8857, |
|
"effective": 8857 |
|
}, |
|
"gsm8k_tr-v0.2": { |
|
"original": 1317, |
|
"effective": 1317 |
|
}, |
|
"gsm1k_tr-v0.2": { |
|
"original": 339, |
|
"effective": 339 |
|
}, |
|
"arc_tr-v0.2": { |
|
"original": 1172, |
|
"effective": 1172 |
|
} |
|
}, |
|
"config": { |
|
"model": "vllm", |
|
"model_args": "pretrained=umarigan/Trendyol-LLM-7b-chat-v1.0-RLHF,tensor_parallel_size=1,dtype=auto,gpu_memory_utilization=0.7,data_parallel_size=4", |
|
"batch_size": 1, |
|
"batch_sizes": [], |
|
"device": "cuda", |
|
"use_cache": null, |
|
"limit": null, |
|
"bootstrap_iters": 100000, |
|
"gen_kwargs": null, |
|
"random_seed": 0, |
|
"numpy_seed": 1234, |
|
"torch_seed": 1234, |
|
"fewshot_seed": 1234 |
|
}, |
|
"git_hash": null, |
|
"date": 1721284338.8250928, |
|
"pretty_env_info": "PyTorch version: 2.1.2+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: Ubuntu 22.04.4 LTS (x86_64)\nGCC version: (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.35\n\nPython version: 3.10.3 (main, Mar 28 2022, 09:30:03) [GCC 7.5.0] (64-bit runtime)\nPython platform: Linux-6.2.0-1011-azure-x86_64-with-glibc2.35\nIs CUDA available: True\nCUDA runtime version: 11.5.119\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: \nGPU 0: NVIDIA A100 80GB PCIe\nGPU 1: NVIDIA A100 80GB PCIe\nGPU 2: NVIDIA A100 80GB PCIe\nGPU 3: NVIDIA A100 80GB PCIe\n\nNvidia driver version: 550.54.15\ncuDNN version: Probably one of the following:\n/usr/lib/x86_64-linux-gnu/libcudnn.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_adv.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_train.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_train.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_engines_precompiled.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_engines_runtime_compiled.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_graph.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_heuristic.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_ops.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_train.so.8.9.7\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nAddress sizes: 48 bits physical, 48 bits virtual\nByte Order: Little Endian\nCPU(s): 96\nOn-line CPU(s) list: 0-95\nVendor ID: AuthenticAMD\nModel name: AMD EPYC 7V13 64-Core Processor\nCPU family: 25\nModel: 1\nThread(s) per core: 1\nCore(s) per socket: 48\nSocket(s): 2\nStepping: 1\nBogoMIPS: 4890.89\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext perfctr_core invpcid_single vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 xsaves clzero xsaveerptr rdpru arat umip vaes vpclmulqdq rdpid fsrm\nHypervisor vendor: Microsoft\nVirtualization type: full\nL1d cache: 3 MiB (96 instances)\nL1i cache: 3 MiB (96 instances)\nL2 cache: 48 MiB (96 instances)\nL3 cache: 384 MiB (12 instances)\nNUMA node(s): 4\nNUMA node0 CPU(s): 0-23\nNUMA node1 CPU(s): 24-47\nNUMA node2 CPU(s): 48-71\nNUMA node3 CPU(s): 72-95\nVulnerability Gather data sampling: Not affected\nVulnerability Itlb multihit: Not affected\nVulnerability L1tf: Not affected\nVulnerability Mds: Not affected\nVulnerability Meltdown: Not affected\nVulnerability Mmio stale data: Not affected\nVulnerability Retbleed: Not affected\nVulnerability Spec store bypass: Vulnerable\nVulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization\nVulnerability Spectre v2: Mitigation; Retpolines, STIBP disabled, RSB filling, PBRSB-eIBRS Not affected\nVulnerability Srbds: Not affected\nVulnerability Tsx async abort: Not affected\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.1.2\n[pip3] triton==2.1.0\n[conda] torch 2.1.2 pypi_0 pypi\n[conda] triton 2.1.0 pypi_0 pypi", |
|
"transformers_version": "4.40.0.dev0", |
|
"upper_git_hash": null, |
|
"task_hashes": {}, |
|
"model_source": "vllm", |
|
"model_name": "umarigan/Trendyol-LLM-7b-chat-v1.0-RLHF", |
|
"model_name_sanitized": "umarigan__Trendyol-LLM-7b-chat-v1.0-RLHF", |
|
"system_instruction": null, |
|
"system_instruction_sha": null, |
|
"fewshot_as_multiturn": false, |
|
"chat_template": null, |
|
"chat_template_sha": null, |
|
"start_time": 749138.997913036, |
|
"end_time": 752334.54808306, |
|
"total_evaluation_time_seconds": "3195.5501700239256" |
|
} |