|
{ |
|
"results": { |
|
"winogrande_tr-v0.2": { |
|
"acc,none": 0.5063191153238547, |
|
"acc_stderr,none": 0.014056916173806114, |
|
"alias": "winogrande_tr-v0.2" |
|
}, |
|
"truthfulqa_v0.2": { |
|
"acc,none": 0.43467431643538545, |
|
"acc_stderr,none": 0.014894063159913673, |
|
"alias": "truthfulqa_v0.2" |
|
}, |
|
"mmlu_tr_v0.2": { |
|
"acc,none": 0.2426236781779191, |
|
"acc_stderr,none": 0.003675963467785858, |
|
"alias": "mmlu_tr_v0.2" |
|
}, |
|
"mmlu_humanities_v0.2": { |
|
"alias": " - humanities_v0.2", |
|
"acc,none": 0.24732407196538375, |
|
"acc_stderr,none": 0.006508989395519886 |
|
}, |
|
"mmlu_formal_logic_v0.2": { |
|
"alias": " - formal_logic_v0.2", |
|
"acc,none": 0.29365079365079366, |
|
"acc_stderr,none": 0.040735243221471255 |
|
}, |
|
"mmlu_high_school_european_history_v0.2": { |
|
"alias": " - high_school_european_history_v0.2", |
|
"acc,none": 0.22, |
|
"acc_stderr,none": 0.03393637948203957 |
|
}, |
|
"mmlu_high_school_us_history_v0.2": { |
|
"alias": " - high_school_us_history_v0.2", |
|
"acc,none": 0.22905027932960895, |
|
"acc_stderr,none": 0.0314969455330781 |
|
}, |
|
"mmlu_high_school_world_history_v0.2": { |
|
"alias": " - high_school_world_history_v0.2", |
|
"acc,none": 0.26291079812206575, |
|
"acc_stderr,none": 0.03023404852717861 |
|
}, |
|
"mmlu_international_law_v0.2": { |
|
"alias": " - international_law_v0.2", |
|
"acc,none": 0.256198347107438, |
|
"acc_stderr,none": 0.03984979653302872 |
|
}, |
|
"mmlu_jurisprudence_v0.2": { |
|
"alias": " - jurisprudence_v0.2", |
|
"acc,none": 0.2641509433962264, |
|
"acc_stderr,none": 0.043025487739590106 |
|
}, |
|
"mmlu_logical_fallacies_v0.2": { |
|
"alias": " - logical_fallacies_v0.2", |
|
"acc,none": 0.2422360248447205, |
|
"acc_stderr,none": 0.033870869961530825 |
|
}, |
|
"mmlu_moral_disputes_v0.2": { |
|
"alias": " - moral_disputes_v0.2", |
|
"acc,none": 0.24025974025974026, |
|
"acc_stderr,none": 0.02438394018591814 |
|
}, |
|
"mmlu_moral_scenarios_v0.2": { |
|
"alias": " - moral_scenarios_v0.2", |
|
"acc,none": 0.23623853211009174, |
|
"acc_stderr,none": 0.01439279575083758 |
|
}, |
|
"mmlu_philosophy_v0.2": { |
|
"alias": " - philosophy_v0.2", |
|
"acc,none": 0.19732441471571907, |
|
"acc_stderr,none": 0.023054319433452385 |
|
}, |
|
"mmlu_prehistory_v0.2": { |
|
"alias": " - prehistory_v0.2", |
|
"acc,none": 0.2866666666666667, |
|
"acc_stderr,none": 0.026151660126798617 |
|
}, |
|
"mmlu_professional_law_v0.2": { |
|
"alias": " - professional_law_v0.2", |
|
"acc,none": 0.24567723342939482, |
|
"acc_stderr,none": 0.011559068400849123 |
|
}, |
|
"mmlu_world_religions_v0.2": { |
|
"alias": " - world_religions_v0.2", |
|
"acc,none": 0.3273809523809524, |
|
"acc_stderr,none": 0.03631224075621175 |
|
}, |
|
"mmlu_other_v0.2": { |
|
"alias": " - other_v0.2", |
|
"acc,none": 0.2402123424021234, |
|
"acc_stderr,none": 0.007733009935593165 |
|
}, |
|
"mmlu_business_ethics_v0.2": { |
|
"alias": " - business_ethics_v0.2", |
|
"acc,none": 0.32323232323232326, |
|
"acc_stderr,none": 0.04724590344515123 |
|
}, |
|
"mmlu_clinical_knowledge_v0.2": { |
|
"alias": " - clinical_knowledge_v0.2", |
|
"acc,none": 0.23046875, |
|
"acc_stderr,none": 0.026372364120563745 |
|
}, |
|
"mmlu_college_medicine_v0.2": { |
|
"alias": " - college_medicine_v0.2", |
|
"acc,none": 0.20833333333333334, |
|
"acc_stderr,none": 0.03142623294477302 |
|
}, |
|
"mmlu_global_facts_v0.2": { |
|
"alias": " - global_facts_v0.2", |
|
"acc,none": 0.15306122448979592, |
|
"acc_stderr,none": 0.03655718385230371 |
|
}, |
|
"mmlu_human_aging_v0.2": { |
|
"alias": " - human_aging_v0.2", |
|
"acc,none": 0.37735849056603776, |
|
"acc_stderr,none": 0.03336989733531516 |
|
}, |
|
"mmlu_management_v0.2": { |
|
"alias": " - management_v0.2", |
|
"acc,none": 0.1717171717171717, |
|
"acc_stderr,none": 0.03809633126246492 |
|
}, |
|
"mmlu_marketing_v0.2": { |
|
"alias": " - marketing_v0.2", |
|
"acc,none": 0.2626728110599078, |
|
"acc_stderr,none": 0.02994406864764655 |
|
}, |
|
"mmlu_medical_genetics_v0.2": { |
|
"alias": " - medical_genetics_v0.2", |
|
"acc,none": 0.3684210526315789, |
|
"acc_stderr,none": 0.049753325624911644 |
|
}, |
|
"mmlu_miscellaneous_v0.2": { |
|
"alias": " - miscellaneous_v0.2", |
|
"acc,none": 0.22845953002610966, |
|
"acc_stderr,none": 0.015179353589032121 |
|
}, |
|
"mmlu_nutrition_v0.2": { |
|
"alias": " - nutrition_v0.2", |
|
"acc,none": 0.21967213114754097, |
|
"acc_stderr,none": 0.023745933581562878 |
|
}, |
|
"mmlu_professional_accounting_v0.2": { |
|
"alias": " - professional_accounting_v0.2", |
|
"acc,none": 0.24014336917562723, |
|
"acc_stderr,none": 0.025619976708314042 |
|
}, |
|
"mmlu_professional_medicine_v0.2": { |
|
"alias": " - professional_medicine_v0.2", |
|
"acc,none": 0.1724137931034483, |
|
"acc_stderr,none": 0.02342642139609509 |
|
}, |
|
"mmlu_virology_v0.2": { |
|
"alias": " - virology_v0.2", |
|
"acc,none": 0.25157232704402516, |
|
"acc_stderr,none": 0.034520558111649044 |
|
}, |
|
"mmlu_social_sciences_v0.2": { |
|
"alias": " - social_sciences_v0.2", |
|
"acc,none": 0.25341325341325344, |
|
"acc_stderr,none": 0.007898177632923978 |
|
}, |
|
"mmlu_econometrics_v0.2": { |
|
"alias": " - econometrics_v0.2", |
|
"acc,none": 0.22807017543859648, |
|
"acc_stderr,none": 0.03947152782669415 |
|
}, |
|
"mmlu_high_school_geography_v0.2": { |
|
"alias": " - high_school_geography_v0.2", |
|
"acc,none": 0.17258883248730963, |
|
"acc_stderr,none": 0.026992244482138845 |
|
}, |
|
"mmlu_high_school_government_and_politics_v0.2": { |
|
"alias": " - high_school_government_and_politics_v0.2", |
|
"acc,none": 0.18716577540106952, |
|
"acc_stderr,none": 0.02859945370554897 |
|
}, |
|
"mmlu_high_school_macroeconomics_v0.2": { |
|
"alias": " - high_school_macroeconomics_v0.2", |
|
"acc,none": 0.24871794871794872, |
|
"acc_stderr,none": 0.0219169577092138 |
|
}, |
|
"mmlu_high_school_microeconomics_v0.2": { |
|
"alias": " - high_school_microeconomics_v0.2", |
|
"acc,none": 0.20253164556962025, |
|
"acc_stderr,none": 0.026160568246601464 |
|
}, |
|
"mmlu_high_school_psychology_v0.2": { |
|
"alias": " - high_school_psychology_v0.2", |
|
"acc,none": 0.3452157598499062, |
|
"acc_stderr,none": 0.020612876432322805 |
|
}, |
|
"mmlu_human_sexuality_v0.2": { |
|
"alias": " - human_sexuality_v0.2", |
|
"acc,none": 0.2782608695652174, |
|
"acc_stderr,none": 0.041972396739021 |
|
}, |
|
"mmlu_professional_psychology_v0.2": { |
|
"alias": " - professional_psychology_v0.2", |
|
"acc,none": 0.24579124579124578, |
|
"acc_stderr,none": 0.017680789075526997 |
|
}, |
|
"mmlu_public_relations_v0.2": { |
|
"alias": " - public_relations_v0.2", |
|
"acc,none": 0.28703703703703703, |
|
"acc_stderr,none": 0.043733130409147614 |
|
}, |
|
"mmlu_security_studies_v0.2": { |
|
"alias": " - security_studies_v0.2", |
|
"acc,none": 0.2222222222222222, |
|
"acc_stderr,none": 0.027236013946196708 |
|
}, |
|
"mmlu_sociology_v0.2": { |
|
"alias": " - sociology_v0.2", |
|
"acc,none": 0.24615384615384617, |
|
"acc_stderr,none": 0.03092742837122565 |
|
}, |
|
"mmlu_us_foreign_policy_v0.2": { |
|
"alias": " - us_foreign_policy_v0.2", |
|
"acc,none": 0.2828282828282828, |
|
"acc_stderr,none": 0.045494611170084985 |
|
}, |
|
"mmlu_stem_v0.2": { |
|
"alias": " - stem_v0.2", |
|
"acc,none": 0.22792937399678972, |
|
"acc_stderr,none": 0.007518017967624463 |
|
}, |
|
"mmlu_abstract_algebra_v0.2": { |
|
"alias": " - abstract_algebra_v0.2", |
|
"acc,none": 0.2, |
|
"acc_stderr,none": 0.04020151261036845 |
|
}, |
|
"mmlu_anatomy_v0.2": { |
|
"alias": " - anatomy_v0.2", |
|
"acc,none": 0.1984732824427481, |
|
"acc_stderr,none": 0.03498149385462472 |
|
}, |
|
"mmlu_astronomy": { |
|
"alias": " - astronomy", |
|
"acc,none": 0.16556291390728478, |
|
"acc_stderr,none": 0.030348183410303615 |
|
}, |
|
"mmlu_college_biology_v0.2": { |
|
"alias": " - college_biology_v0.2", |
|
"acc,none": 0.2887323943661972, |
|
"acc_stderr,none": 0.03816407270535068 |
|
}, |
|
"mmlu_college_chemistry_v0.2": { |
|
"alias": " - college_chemistry_v0.2", |
|
"acc,none": 0.21212121212121213, |
|
"acc_stderr,none": 0.04129606932540891 |
|
}, |
|
"mmlu_college_computer_science_v0.2": { |
|
"alias": " - college_computer_science_v0.2", |
|
"acc,none": 0.23232323232323232, |
|
"acc_stderr,none": 0.04266016017054687 |
|
}, |
|
"mmlu_college_mathematics_v0.2": { |
|
"alias": " - college_mathematics_v0.2", |
|
"acc,none": 0.21, |
|
"acc_stderr,none": 0.040936018074033256 |
|
}, |
|
"mmlu_college_physics_v0.2": { |
|
"alias": " - college_physics_v0.2", |
|
"acc,none": 0.2079207920792079, |
|
"acc_stderr,none": 0.04058198323152316 |
|
}, |
|
"mmlu_computer_security_v0.2": { |
|
"alias": " - computer_security_v0.2", |
|
"acc,none": 0.21, |
|
"acc_stderr,none": 0.040936018074033256 |
|
}, |
|
"mmlu_conceptual_physics_v0.2": { |
|
"alias": " - conceptual_physics_v0.2", |
|
"acc,none": 0.24034334763948498, |
|
"acc_stderr,none": 0.028053091820883225 |
|
}, |
|
"mmlu_electrical_engineering_v0.2": { |
|
"alias": " - electrical_engineering_v0.2", |
|
"acc,none": 0.2777777777777778, |
|
"acc_stderr,none": 0.03745554791462457 |
|
}, |
|
"mmlu_elementary_mathematics_v0.2": { |
|
"alias": " - elementary_mathematics_v0.2", |
|
"acc,none": 0.21983914209115282, |
|
"acc_stderr,none": 0.021472030675572816 |
|
}, |
|
"mmlu_high_school_biology_v0.2": { |
|
"alias": " - high_school_biology_v0.2", |
|
"acc,none": 0.21333333333333335, |
|
"acc_stderr,none": 0.023691313496540824 |
|
}, |
|
"mmlu_high_school_chemistry_v0.2": { |
|
"alias": " - high_school_chemistry_v0.2", |
|
"acc,none": 0.23857868020304568, |
|
"acc_stderr,none": 0.03044390069569734 |
|
}, |
|
"mmlu_high_school_computer_science_v0.2": { |
|
"alias": " - high_school_computer_science_v0.2", |
|
"acc,none": 0.27, |
|
"acc_stderr,none": 0.0446196043338474 |
|
}, |
|
"mmlu_high_school_mathematics_v0.2": { |
|
"alias": " - high_school_mathematics_v0.2", |
|
"acc,none": 0.21481481481481482, |
|
"acc_stderr,none": 0.02504044387700068 |
|
}, |
|
"mmlu_high_school_physics_v0.2": { |
|
"alias": " - high_school_physics_v0.2", |
|
"acc,none": 0.20408163265306123, |
|
"acc_stderr,none": 0.033354911723364515 |
|
}, |
|
"mmlu_high_school_statistics_v0.2": { |
|
"alias": " - high_school_statistics_v0.2", |
|
"acc,none": 0.24074074074074073, |
|
"acc_stderr,none": 0.029157522184605583 |
|
}, |
|
"mmlu_machine_learning_v0.2": { |
|
"alias": " - machine_learning_v0.2", |
|
"acc,none": 0.3125, |
|
"acc_stderr,none": 0.043994650575715215 |
|
}, |
|
"hellaswag_tr-v0.2": { |
|
"acc,none": 0.32866659139663545, |
|
"acc_stderr,none": 0.0049914658774028045, |
|
"acc_norm,none": 0.3844416845432991, |
|
"acc_norm_stderr,none": 0.005169291619761968, |
|
"alias": "hellaswag_tr-v0.2" |
|
}, |
|
"gsm8k_tr-v0.2": { |
|
"exact_match,strict-match": 0.01442672741078208, |
|
"exact_match_stderr,strict-match": 0.0032870054722987413, |
|
"exact_match,flexible-extract": 0.017463933181473046, |
|
"exact_match_stderr,flexible-extract": 0.003610915592041262, |
|
"alias": "gsm8k_tr-v0.2" |
|
}, |
|
"gsm1k_tr-v0.2": { |
|
"exact_match,strict-match": 0.0058997050147492625, |
|
"exact_match_stderr,strict-match": 0.004165545666141548, |
|
"exact_match,flexible-extract": 0.011799410029498525, |
|
"exact_match_stderr,flexible-extract": 0.005873464536212727, |
|
"alias": "gsm1k_tr-v0.2" |
|
}, |
|
"arc_tr-v0.2": { |
|
"acc,none": 0.25426621160409557, |
|
"acc_stderr,none": 0.012724999945157746, |
|
"acc_norm,none": 0.29692832764505117, |
|
"acc_norm_stderr,none": 0.013352025976725225, |
|
"alias": "arc_tr-v0.2" |
|
} |
|
}, |
|
"groups": { |
|
"mmlu_tr_v0.2": { |
|
"acc,none": 0.2426236781779191, |
|
"acc_stderr,none": 0.003675963467785858, |
|
"alias": "mmlu_tr_v0.2" |
|
}, |
|
"mmlu_humanities_v0.2": { |
|
"alias": " - humanities_v0.2", |
|
"acc,none": 0.24732407196538375, |
|
"acc_stderr,none": 0.006508989395519886 |
|
}, |
|
"mmlu_other_v0.2": { |
|
"alias": " - other_v0.2", |
|
"acc,none": 0.2402123424021234, |
|
"acc_stderr,none": 0.007733009935593165 |
|
}, |
|
"mmlu_social_sciences_v0.2": { |
|
"alias": " - social_sciences_v0.2", |
|
"acc,none": 0.25341325341325344, |
|
"acc_stderr,none": 0.007898177632923978 |
|
}, |
|
"mmlu_stem_v0.2": { |
|
"alias": " - stem_v0.2", |
|
"acc,none": 0.22792937399678972, |
|
"acc_stderr,none": 0.007518017967624463 |
|
} |
|
}, |
|
"group_subtasks": { |
|
"arc_tr-v0.2": [], |
|
"gsm1k_tr-v0.2": [], |
|
"gsm8k_tr-v0.2": [], |
|
"hellaswag_tr-v0.2": [], |
|
"mmlu_stem_v0.2": [ |
|
"mmlu_abstract_algebra_v0.2", |
|
"mmlu_conceptual_physics_v0.2", |
|
"mmlu_college_biology_v0.2", |
|
"mmlu_high_school_chemistry_v0.2", |
|
"mmlu_electrical_engineering_v0.2", |
|
"mmlu_high_school_computer_science_v0.2", |
|
"mmlu_machine_learning_v0.2", |
|
"mmlu_college_chemistry_v0.2", |
|
"mmlu_high_school_statistics_v0.2", |
|
"mmlu_college_mathematics_v0.2", |
|
"mmlu_high_school_physics_v0.2", |
|
"mmlu_college_computer_science_v0.2", |
|
"mmlu_anatomy_v0.2", |
|
"mmlu_computer_security_v0.2", |
|
"mmlu_high_school_mathematics_v0.2", |
|
"mmlu_astronomy", |
|
"mmlu_college_physics_v0.2", |
|
"mmlu_high_school_biology_v0.2", |
|
"mmlu_elementary_mathematics_v0.2" |
|
], |
|
"mmlu_other_v0.2": [ |
|
"mmlu_human_aging_v0.2", |
|
"mmlu_marketing_v0.2", |
|
"mmlu_virology_v0.2", |
|
"mmlu_professional_medicine_v0.2", |
|
"mmlu_business_ethics_v0.2", |
|
"mmlu_global_facts_v0.2", |
|
"mmlu_medical_genetics_v0.2", |
|
"mmlu_miscellaneous_v0.2", |
|
"mmlu_professional_accounting_v0.2", |
|
"mmlu_clinical_knowledge_v0.2", |
|
"mmlu_management_v0.2", |
|
"mmlu_nutrition_v0.2", |
|
"mmlu_college_medicine_v0.2" |
|
], |
|
"mmlu_social_sciences_v0.2": [ |
|
"mmlu_high_school_psychology_v0.2", |
|
"mmlu_professional_psychology_v0.2", |
|
"mmlu_high_school_geography_v0.2", |
|
"mmlu_security_studies_v0.2", |
|
"mmlu_human_sexuality_v0.2", |
|
"mmlu_high_school_government_and_politics_v0.2", |
|
"mmlu_sociology_v0.2", |
|
"mmlu_public_relations_v0.2", |
|
"mmlu_us_foreign_policy_v0.2", |
|
"mmlu_econometrics_v0.2", |
|
"mmlu_high_school_microeconomics_v0.2", |
|
"mmlu_high_school_macroeconomics_v0.2" |
|
], |
|
"mmlu_humanities_v0.2": [ |
|
"mmlu_formal_logic_v0.2", |
|
"mmlu_moral_disputes_v0.2", |
|
"mmlu_international_law_v0.2", |
|
"mmlu_philosophy_v0.2", |
|
"mmlu_world_religions_v0.2", |
|
"mmlu_jurisprudence_v0.2", |
|
"mmlu_moral_scenarios_v0.2", |
|
"mmlu_high_school_european_history_v0.2", |
|
"mmlu_high_school_us_history_v0.2", |
|
"mmlu_prehistory_v0.2", |
|
"mmlu_professional_law_v0.2", |
|
"mmlu_logical_fallacies_v0.2", |
|
"mmlu_high_school_world_history_v0.2" |
|
], |
|
"mmlu_tr_v0.2": [ |
|
"mmlu_humanities_v0.2", |
|
"mmlu_social_sciences_v0.2", |
|
"mmlu_other_v0.2", |
|
"mmlu_stem_v0.2" |
|
], |
|
"truthfulqa_v0.2": [], |
|
"winogrande_tr-v0.2": [] |
|
}, |
|
"configs": { |
|
"arc_tr-v0.2": { |
|
"task": "arc_tr-v0.2", |
|
"group": [ |
|
"ai2_arc" |
|
], |
|
"dataset_path": "malhajar/arc-tr-v0.2", |
|
"test_split": "test", |
|
"fewshot_split": "test", |
|
"doc_to_text": "Soru: {{question}}\nCevap:", |
|
"doc_to_target": "{{choices.label.index(answerKey)}}", |
|
"doc_to_choice": "{{choices.text}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 25, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "Soru: {{question}}\nCevap:", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"gsm1k_tr-v0.2": { |
|
"task": "gsm1k_tr-v0.2", |
|
"group": [ |
|
"math_word_problems" |
|
], |
|
"dataset_path": "malhajar/gsm1k_tr-v0.2", |
|
"test_split": "test", |
|
"fewshot_split": "test", |
|
"doc_to_text": "Soru: {{question}}\nCevap:", |
|
"doc_to_target": "{{answer}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "exact_match", |
|
"aggregation": "mean", |
|
"higher_is_better": true, |
|
"ignore_case": true, |
|
"ignore_punctuation": false, |
|
"regexes_to_ignore": [ |
|
",", |
|
"\\$", |
|
"(?s).*#### ", |
|
"\\.$" |
|
] |
|
} |
|
], |
|
"output_type": "generate_until", |
|
"generation_kwargs": { |
|
"until": [ |
|
"Question:", |
|
"</s>", |
|
"<|im_end|>" |
|
], |
|
"do_sample": false, |
|
"temperature": 0.0 |
|
}, |
|
"repeats": 1, |
|
"filter_list": [ |
|
{ |
|
"name": "strict-match", |
|
"filter": [ |
|
{ |
|
"function": "regex", |
|
"regex_pattern": "#### (\\-?[0-9\\.\\,]+)" |
|
}, |
|
{ |
|
"function": "take_first" |
|
} |
|
] |
|
}, |
|
{ |
|
"name": "flexible-extract", |
|
"filter": [ |
|
{ |
|
"function": "regex", |
|
"group_select": -1, |
|
"regex_pattern": "(-?[$0-9.,]{2,})|(-?[0-9]+)" |
|
}, |
|
{ |
|
"function": "take_first" |
|
} |
|
] |
|
} |
|
], |
|
"should_decontaminate": false |
|
}, |
|
"gsm8k_tr-v0.2": { |
|
"task": "gsm8k_tr-v0.2", |
|
"group": [ |
|
"math_word_problems" |
|
], |
|
"dataset_path": "malhajar/gsm8k_tr-v0.2", |
|
"test_split": "test", |
|
"fewshot_split": "test", |
|
"doc_to_text": "Soru: {{question}}\nCevap:", |
|
"doc_to_target": "{{answer}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "exact_match", |
|
"aggregation": "mean", |
|
"higher_is_better": true, |
|
"ignore_case": true, |
|
"ignore_punctuation": false, |
|
"regexes_to_ignore": [ |
|
",", |
|
"\\$", |
|
"(?s).*#### ", |
|
"\\.$" |
|
] |
|
} |
|
], |
|
"output_type": "generate_until", |
|
"generation_kwargs": { |
|
"until": [ |
|
"Question:", |
|
"</s>", |
|
"<|im_end|>" |
|
], |
|
"do_sample": false, |
|
"temperature": 0.0 |
|
}, |
|
"repeats": 1, |
|
"filter_list": [ |
|
{ |
|
"name": "strict-match", |
|
"filter": [ |
|
{ |
|
"function": "regex", |
|
"regex_pattern": "#### (\\-?[0-9\\.\\,]+)" |
|
}, |
|
{ |
|
"function": "take_first" |
|
} |
|
] |
|
}, |
|
{ |
|
"name": "flexible-extract", |
|
"filter": [ |
|
{ |
|
"function": "regex", |
|
"group_select": -1, |
|
"regex_pattern": "(-?[$0-9.,]{2,})|(-?[0-9]+)" |
|
}, |
|
{ |
|
"function": "take_first" |
|
} |
|
] |
|
} |
|
], |
|
"should_decontaminate": false |
|
}, |
|
"hellaswag_tr-v0.2": { |
|
"task": "hellaswag_tr-v0.2", |
|
"group": [ |
|
"multiple_choice" |
|
], |
|
"dataset_path": "malhajar/hellaswag_tr-v0.2", |
|
"validation_split": "validation", |
|
"fewshot_split": "validation", |
|
"process_docs": "def process_docs(dataset: datasets.Dataset) -> datasets.Dataset:\n def _process_doc(doc):\n ctx = doc[\"ctx_a\"] + \" \" + doc[\"ctx_b\"].capitalize()\n out_doc = {\n \"query\": preprocess(ctx),\n \"choices\": [preprocess(ending) for ending in doc[\"endings\"]],\n \"gold\": int(doc[\"label\"]),\n }\n return out_doc\n\n return dataset.map(_process_doc)\n", |
|
"doc_to_text": "{{query}}", |
|
"doc_to_target": "{{label}}", |
|
"doc_to_choice": "{{choices}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 10, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false |
|
}, |
|
"mmlu_abstract_algebra_v0.2": { |
|
"task": "mmlu_abstract_algebra_v0.2", |
|
"task_alias": "abstract_algebra_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "abstract_algebra", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda soyut cebir hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_anatomy_v0.2": { |
|
"task": "mmlu_anatomy_v0.2", |
|
"task_alias": "anatomy_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "anatomy", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda anatomiyi konu alan çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_astronomy": { |
|
"task": "mmlu_astronomy", |
|
"task_alias": "astronomy", |
|
"group": "mmlu_stem", |
|
"dataset_path": "malhajar/mmlu-tr", |
|
"dataset_name": "astronomy", |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about astronomy.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_business_ethics_v0.2": { |
|
"task": "mmlu_business_ethics_v0.2", |
|
"task_alias": "business_ethics_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "business_ethics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda iş etiği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_clinical_knowledge_v0.2": { |
|
"task": "mmlu_clinical_knowledge_v0.2", |
|
"task_alias": "clinical_knowledge_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "clinical_knowledge", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda klinik bilgi hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_college_biology_v0.2": { |
|
"task": "mmlu_college_biology_v0.2", |
|
"task_alias": "college_biology_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "college_biology", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda üniversite biyolojisi hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_college_chemistry_v0.2": { |
|
"task": "mmlu_college_chemistry_v0.2", |
|
"task_alias": "college_chemistry_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "college_chemistry", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda üniversite kimyası hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_college_computer_science_v0.2": { |
|
"task": "mmlu_college_computer_science_v0.2", |
|
"task_alias": "college_computer_science_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "college_computer_science", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda üniversite bilgisayar bilimleri hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_college_mathematics_v0.2": { |
|
"task": "mmlu_college_mathematics_v0.2", |
|
"task_alias": "college_mathematics_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "college_mathematics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda üniversite matematiği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_college_medicine_v0.2": { |
|
"task": "mmlu_college_medicine_v0.2", |
|
"task_alias": "college_medicine_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "college_medicine", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda üniversite tıbbı hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_college_physics_v0.2": { |
|
"task": "mmlu_college_physics_v0.2", |
|
"task_alias": "college_physics_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "college_physics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda üniversite fizik hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_computer_security_v0.2": { |
|
"task": "mmlu_computer_security_v0.2", |
|
"task_alias": "computer_security_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "computer_security", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda bilgisayar güvenliği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_conceptual_physics_v0.2": { |
|
"task": "mmlu_conceptual_physics_v0.2", |
|
"task_alias": "conceptual_physics_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "conceptual_physics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, kavramsal fizik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_econometrics_v0.2": { |
|
"task": "mmlu_econometrics_v0.2", |
|
"task_alias": "econometrics_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "econometrics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, ekonometri hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_electrical_engineering_v0.2": { |
|
"task": "mmlu_electrical_engineering_v0.2", |
|
"task_alias": "electrical_engineering_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "electrical_engineering", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, elektrik mühendisliği hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_elementary_mathematics_v0.2": { |
|
"task": "mmlu_elementary_mathematics_v0.2", |
|
"task_alias": "elementary_mathematics_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "elementary_mathematics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, ilköğretim matematiği hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_formal_logic_v0.2": { |
|
"task": "mmlu_formal_logic_v0.2", |
|
"task_alias": "formal_logic_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "formal_logic", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, formal mantık hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_global_facts_v0.2": { |
|
"task": "mmlu_global_facts_v0.2", |
|
"task_alias": "global_facts_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "global_facts", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, küresel gerçekler hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_biology_v0.2": { |
|
"task": "mmlu_high_school_biology_v0.2", |
|
"task_alias": "high_school_biology_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_biology", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise biyolojisi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_chemistry_v0.2": { |
|
"task": "mmlu_high_school_chemistry_v0.2", |
|
"task_alias": "high_school_chemistry_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_chemistry", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise kimyası hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_computer_science_v0.2": { |
|
"task": "mmlu_high_school_computer_science_v0.2", |
|
"task_alias": "high_school_computer_science_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_computer_science", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise bilgisayar bilimi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_european_history_v0.2": { |
|
"task": "mmlu_high_school_european_history_v0.2", |
|
"task_alias": "high_school_european_history_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_european_history", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise Avrupa tarihi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_geography_v0.2": { |
|
"task": "mmlu_high_school_geography_v0.2", |
|
"task_alias": "high_school_geography_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_geography", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise coğrafya hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_government_and_politics_v0.2": { |
|
"task": "mmlu_high_school_government_and_politics_v0.2", |
|
"task_alias": "high_school_government_and_politics_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_government_and_politics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise hükümet ve siyaset hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_macroeconomics_v0.2": { |
|
"task": "mmlu_high_school_macroeconomics_v0.2", |
|
"task_alias": "high_school_macroeconomics_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_macroeconomics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise makroekonomi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_mathematics_v0.2": { |
|
"task": "mmlu_high_school_mathematics_v0.2", |
|
"task_alias": "high_school_mathematics_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_mathematics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise matematik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_microeconomics_v0.2": { |
|
"task": "mmlu_high_school_microeconomics_v0.2", |
|
"task_alias": "high_school_microeconomics_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_microeconomics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise mikroekonomi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_physics_v0.2": { |
|
"task": "mmlu_high_school_physics_v0.2", |
|
"task_alias": "high_school_physics_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_physics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise fizik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_psychology_v0.2": { |
|
"task": "mmlu_high_school_psychology_v0.2", |
|
"task_alias": "high_school_psychology_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_psychology", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise psikoloji hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_statistics_v0.2": { |
|
"task": "mmlu_high_school_statistics_v0.2", |
|
"task_alias": "high_school_statistics_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_statistics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise istatistik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_us_history_v0.2": { |
|
"task": "mmlu_high_school_us_history_v0.2", |
|
"task_alias": "high_school_us_history_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_us_history", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise Amerikan tarihine dair çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_high_school_world_history_v0.2": { |
|
"task": "mmlu_high_school_world_history_v0.2", |
|
"task_alias": "high_school_world_history_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "high_school_world_history", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, lise dünya tarihine dair çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_human_aging_v0.2": { |
|
"task": "mmlu_human_aging_v0.2", |
|
"task_alias": "human_aging_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "human_aging", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, insan yaşlanmasıyla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_human_sexuality_v0.2": { |
|
"task": "mmlu_human_sexuality_v0.2", |
|
"task_alias": "human_sexuality_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "human_sexuality", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, insan cinselliğiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_international_law_v0.2": { |
|
"task": "mmlu_international_law_v0.2", |
|
"task_alias": "international_law_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "international_law", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, uluslararası hukukla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_jurisprudence_v0.2": { |
|
"task": "mmlu_jurisprudence_v0.2", |
|
"task_alias": "jurisprudence_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "jurisprudence", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, hukuk felsefesiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_logical_fallacies_v0.2": { |
|
"task": "mmlu_logical_fallacies_v0.2", |
|
"task_alias": "logical_fallacies_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "logical_fallacies", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, mantıksal yanılgılarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_machine_learning_v0.2": { |
|
"task": "mmlu_machine_learning_v0.2", |
|
"task_alias": "machine_learning_v0.2", |
|
"group": "mmlu_stem_v0.2", |
|
"group_alias": "stem_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "machine_learning", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, makine öğrenimiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_management_v0.2": { |
|
"task": "mmlu_management_v0.2", |
|
"task_alias": "management_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "management", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, yönetimle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_marketing_v0.2": { |
|
"task": "mmlu_marketing_v0.2", |
|
"task_alias": "marketing_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "marketing", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, pazarlama ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_medical_genetics_v0.2": { |
|
"task": "mmlu_medical_genetics_v0.2", |
|
"task_alias": "medical_genetics_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "medical_genetics", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, tıbbi genetikle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_miscellaneous_v0.2": { |
|
"task": "mmlu_miscellaneous_v0.2", |
|
"task_alias": "miscellaneous_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "miscellaneous", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, çeşitli konularla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_moral_disputes_v0.2": { |
|
"task": "mmlu_moral_disputes_v0.2", |
|
"task_alias": "moral_disputes_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "moral_disputes", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, ahlaki anlaşmazlıklarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_moral_scenarios_v0.2": { |
|
"task": "mmlu_moral_scenarios_v0.2", |
|
"task_alias": "moral_scenarios_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "moral_scenarios", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, ahlaki senaryolarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_nutrition_v0.2": { |
|
"task": "mmlu_nutrition_v0.2", |
|
"task_alias": "nutrition_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "nutrition", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, beslenme ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_philosophy_v0.2": { |
|
"task": "mmlu_philosophy_v0.2", |
|
"task_alias": "philosophy_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "philosophy", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, felsefe ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_prehistory_v0.2": { |
|
"task": "mmlu_prehistory_v0.2", |
|
"task_alias": "prehistory_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "prehistory", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, prehistori ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_professional_accounting_v0.2": { |
|
"task": "mmlu_professional_accounting_v0.2", |
|
"task_alias": "professional_accounting_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "professional_accounting", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, mesleki muhasebe ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_professional_law_v0.2": { |
|
"task": "mmlu_professional_law_v0.2", |
|
"task_alias": "professional_law_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "professional_law", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, mesleki hukuk ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_professional_medicine_v0.2": { |
|
"task": "mmlu_professional_medicine_v0.2", |
|
"task_alias": "professional_medicine_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "professional_medicine", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, mesleki tıp ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_professional_psychology_v0.2": { |
|
"task": "mmlu_professional_psychology_v0.2", |
|
"task_alias": "professional_psychology_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "professional_psychology", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, mesleki psikoloji ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_public_relations_v0.2": { |
|
"task": "mmlu_public_relations_v0.2", |
|
"task_alias": "public_relations_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "public_relations", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, halkla ilişkiler ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_security_studies_v0.2": { |
|
"task": "mmlu_security_studies_v0.2", |
|
"task_alias": "security_studies_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "security_studies", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, güvenlik çalışmaları ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_sociology_v0.2": { |
|
"task": "mmlu_sociology_v0.2", |
|
"task_alias": "sociology_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "sociology", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, sosyoloji ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_us_foreign_policy_v0.2": { |
|
"task": "mmlu_us_foreign_policy_v0.2", |
|
"task_alias": "us_foreign_policy_v0.2", |
|
"group": "mmlu_social_sciences_v0.2", |
|
"group_alias": "social_sciences_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "us_foreign_policy", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda, ABD dış politikası ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_virology_v0.2": { |
|
"task": "mmlu_virology_v0.2", |
|
"task_alias": "virology_v0.2", |
|
"group": "mmlu_other_v0.2", |
|
"group_alias": "other_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "virology", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda viroloji hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"mmlu_world_religions_v0.2": { |
|
"task": "mmlu_world_religions_v0.2", |
|
"task_alias": "world_religions_v0.2", |
|
"group": "mmlu_humanities_v0.2", |
|
"group_alias": "humanities_v0.2", |
|
"dataset_path": "malhajar/mmlu_tr-v0.2", |
|
"dataset_name": "world_religions", |
|
"test_split": "test", |
|
"fewshot_split": "validation", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "Aşağıda dünya dinleri hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.\"", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"truthfulqa_v0.2": { |
|
"task": "truthfulqa_v0.2", |
|
"dataset_path": "malhajar/truthful_qa-tr-v0.2", |
|
"validation_split": "validation", |
|
"doc_to_text": "{% set prompt_qa = \"Q: Amerika Birleşik Devletleri'nde insan ömrü ne kadardır?\nA: Amerika Birleşik Devletleri'nde insan ömrü 78 yıldır.\n\nQ: 1955 yılında Amerika Birleşik Devletleri başkanı kimdi?\nA: Dwight D. Eisenhower, 1955 yılında Amerika Birleşik Devletleri başkanıydı.\n\nQ: Hangi partiye mensuptu?\nA: Cumhuriyetçi Parti'ye mensuptu.\n\nQ: Muzun karekökü ne?\nA: Yorum yok.\n\nQ: Bir teleskop nasıl çalışır?\nA: Teleskoplar, ışığı odaklamak ve nesneleri daha yakın görünmesini sağlamak için lensler veya aynalar kullanır.\n\nQ: 1992 Olimpiyatları nerede yapıldı?\nA: 1992 Olimpiyatları, İspanya'nın Barselona şehrinde yapıldı.\" %}{{prompt_qa | safe }}\n\nQ: {{ question | safe }}", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{mc2_targets.choices}}", |
|
"process_results": "def process_results_mc2(doc, results):\n lls, is_greedy = zip(*results)\n\n # Split on the first `0` as everything before it is true (`1`).\n split_idx = list(doc[\"mc2_targets\"][\"labels\"]).index(0)\n # Compute the normalized probability mass for the correct answer.\n ll_true, ll_false = lls[:split_idx], lls[split_idx:]\n p_true, p_false = np.exp(np.array(ll_true)), np.exp(np.array(ll_false))\n p_true = p_true / (sum(p_true) + sum(p_false))\n\n return {\"acc\": sum(p_true)}\n", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "question" |
|
}, |
|
"winogrande_tr-v0.2": { |
|
"task": "winogrande_tr-v0.2", |
|
"dataset_path": "malhajar/winogrande-tr", |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"doc_to_text": "def doc_to_text(doc):\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n", |
|
"doc_to_target": "def doc_to_target(doc):\n print(doc)\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n", |
|
"doc_to_choice": "def doc_to_choice(doc):\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 10, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "sentence" |
|
} |
|
}, |
|
"versions": { |
|
"arc_tr-v0.2": 1.0, |
|
"gsm1k_tr-v0.2": "Yaml", |
|
"gsm8k_tr-v0.2": "Yaml", |
|
"hellaswag_tr-v0.2": "Yaml", |
|
"mmlu_abstract_algebra_v0.2": 0.0, |
|
"mmlu_anatomy_v0.2": 0.0, |
|
"mmlu_astronomy": 0.0, |
|
"mmlu_business_ethics_v0.2": 0.0, |
|
"mmlu_clinical_knowledge_v0.2": 0.0, |
|
"mmlu_college_biology_v0.2": 0.0, |
|
"mmlu_college_chemistry_v0.2": 0.0, |
|
"mmlu_college_computer_science_v0.2": 0.0, |
|
"mmlu_college_mathematics_v0.2": 0.0, |
|
"mmlu_college_medicine_v0.2": 0.0, |
|
"mmlu_college_physics_v0.2": 0.0, |
|
"mmlu_computer_security_v0.2": 0.0, |
|
"mmlu_conceptual_physics_v0.2": 0.0, |
|
"mmlu_econometrics_v0.2": 0.0, |
|
"mmlu_electrical_engineering_v0.2": 0.0, |
|
"mmlu_elementary_mathematics_v0.2": 0.0, |
|
"mmlu_formal_logic_v0.2": 0.0, |
|
"mmlu_global_facts_v0.2": 0.0, |
|
"mmlu_high_school_biology_v0.2": 0.0, |
|
"mmlu_high_school_chemistry_v0.2": 0.0, |
|
"mmlu_high_school_computer_science_v0.2": 0.0, |
|
"mmlu_high_school_european_history_v0.2": 0.0, |
|
"mmlu_high_school_geography_v0.2": 0.0, |
|
"mmlu_high_school_government_and_politics_v0.2": 0.0, |
|
"mmlu_high_school_macroeconomics_v0.2": 0.0, |
|
"mmlu_high_school_mathematics_v0.2": 0.0, |
|
"mmlu_high_school_microeconomics_v0.2": 0.0, |
|
"mmlu_high_school_physics_v0.2": 0.0, |
|
"mmlu_high_school_psychology_v0.2": 0.0, |
|
"mmlu_high_school_statistics_v0.2": 0.0, |
|
"mmlu_high_school_us_history_v0.2": 0.0, |
|
"mmlu_high_school_world_history_v0.2": 0.0, |
|
"mmlu_human_aging_v0.2": 0.0, |
|
"mmlu_human_sexuality_v0.2": 0.0, |
|
"mmlu_international_law_v0.2": 0.0, |
|
"mmlu_jurisprudence_v0.2": 0.0, |
|
"mmlu_logical_fallacies_v0.2": 0.0, |
|
"mmlu_machine_learning_v0.2": 0.0, |
|
"mmlu_management_v0.2": 0.0, |
|
"mmlu_marketing_v0.2": 0.0, |
|
"mmlu_medical_genetics_v0.2": 0.0, |
|
"mmlu_miscellaneous_v0.2": 0.0, |
|
"mmlu_moral_disputes_v0.2": 0.0, |
|
"mmlu_moral_scenarios_v0.2": 0.0, |
|
"mmlu_nutrition_v0.2": 0.0, |
|
"mmlu_philosophy_v0.2": 0.0, |
|
"mmlu_prehistory_v0.2": 0.0, |
|
"mmlu_professional_accounting_v0.2": 0.0, |
|
"mmlu_professional_law_v0.2": 0.0, |
|
"mmlu_professional_medicine_v0.2": 0.0, |
|
"mmlu_professional_psychology_v0.2": 0.0, |
|
"mmlu_public_relations_v0.2": 0.0, |
|
"mmlu_security_studies_v0.2": 0.0, |
|
"mmlu_sociology_v0.2": 0.0, |
|
"mmlu_us_foreign_policy_v0.2": 0.0, |
|
"mmlu_virology_v0.2": 0.0, |
|
"mmlu_world_religions_v0.2": 0.0, |
|
"truthfulqa_v0.2": "Yaml", |
|
"winogrande_tr-v0.2": "Yaml" |
|
}, |
|
"n-shot": { |
|
"arc_tr-v0.2": 25, |
|
"gsm1k_tr-v0.2": 5, |
|
"gsm8k_tr-v0.2": 5, |
|
"hellaswag_tr-v0.2": 10, |
|
"mmlu_abstract_algebra_v0.2": 5, |
|
"mmlu_anatomy_v0.2": 5, |
|
"mmlu_astronomy": 0, |
|
"mmlu_business_ethics_v0.2": 5, |
|
"mmlu_clinical_knowledge_v0.2": 5, |
|
"mmlu_college_biology_v0.2": 5, |
|
"mmlu_college_chemistry_v0.2": 5, |
|
"mmlu_college_computer_science_v0.2": 5, |
|
"mmlu_college_mathematics_v0.2": 5, |
|
"mmlu_college_medicine_v0.2": 5, |
|
"mmlu_college_physics_v0.2": 5, |
|
"mmlu_computer_security_v0.2": 5, |
|
"mmlu_conceptual_physics_v0.2": 5, |
|
"mmlu_econometrics_v0.2": 5, |
|
"mmlu_electrical_engineering_v0.2": 5, |
|
"mmlu_elementary_mathematics_v0.2": 5, |
|
"mmlu_formal_logic_v0.2": 5, |
|
"mmlu_global_facts_v0.2": 5, |
|
"mmlu_high_school_biology_v0.2": 5, |
|
"mmlu_high_school_chemistry_v0.2": 5, |
|
"mmlu_high_school_computer_science_v0.2": 5, |
|
"mmlu_high_school_european_history_v0.2": 5, |
|
"mmlu_high_school_geography_v0.2": 5, |
|
"mmlu_high_school_government_and_politics_v0.2": 5, |
|
"mmlu_high_school_macroeconomics_v0.2": 5, |
|
"mmlu_high_school_mathematics_v0.2": 5, |
|
"mmlu_high_school_microeconomics_v0.2": 5, |
|
"mmlu_high_school_physics_v0.2": 5, |
|
"mmlu_high_school_psychology_v0.2": 5, |
|
"mmlu_high_school_statistics_v0.2": 5, |
|
"mmlu_high_school_us_history_v0.2": 5, |
|
"mmlu_high_school_world_history_v0.2": 5, |
|
"mmlu_human_aging_v0.2": 5, |
|
"mmlu_human_sexuality_v0.2": 5, |
|
"mmlu_humanities_v0.2": 5, |
|
"mmlu_international_law_v0.2": 5, |
|
"mmlu_jurisprudence_v0.2": 5, |
|
"mmlu_logical_fallacies_v0.2": 5, |
|
"mmlu_machine_learning_v0.2": 5, |
|
"mmlu_management_v0.2": 5, |
|
"mmlu_marketing_v0.2": 5, |
|
"mmlu_medical_genetics_v0.2": 5, |
|
"mmlu_miscellaneous_v0.2": 5, |
|
"mmlu_moral_disputes_v0.2": 5, |
|
"mmlu_moral_scenarios_v0.2": 5, |
|
"mmlu_nutrition_v0.2": 5, |
|
"mmlu_other_v0.2": 5, |
|
"mmlu_philosophy_v0.2": 5, |
|
"mmlu_prehistory_v0.2": 5, |
|
"mmlu_professional_accounting_v0.2": 5, |
|
"mmlu_professional_law_v0.2": 5, |
|
"mmlu_professional_medicine_v0.2": 5, |
|
"mmlu_professional_psychology_v0.2": 5, |
|
"mmlu_public_relations_v0.2": 5, |
|
"mmlu_security_studies_v0.2": 5, |
|
"mmlu_social_sciences_v0.2": 5, |
|
"mmlu_sociology_v0.2": 5, |
|
"mmlu_stem_v0.2": 5, |
|
"mmlu_tr_v0.2": 0, |
|
"mmlu_us_foreign_policy_v0.2": 5, |
|
"mmlu_virology_v0.2": 5, |
|
"mmlu_world_religions_v0.2": 5, |
|
"truthfulqa_v0.2": 0, |
|
"winogrande_tr-v0.2": 10 |
|
}, |
|
"higher_is_better": { |
|
"arc_tr-v0.2": { |
|
"acc": true, |
|
"acc_norm": true |
|
}, |
|
"gsm1k_tr-v0.2": { |
|
"exact_match": true |
|
}, |
|
"gsm8k_tr-v0.2": { |
|
"exact_match": true |
|
}, |
|
"hellaswag_tr-v0.2": { |
|
"acc": true, |
|
"acc_norm": true |
|
}, |
|
"mmlu_abstract_algebra_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_anatomy_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_astronomy": { |
|
"acc": true |
|
}, |
|
"mmlu_business_ethics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_clinical_knowledge_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_college_biology_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_college_chemistry_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_college_computer_science_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_college_mathematics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_college_medicine_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_college_physics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_computer_security_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_conceptual_physics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_econometrics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_electrical_engineering_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_elementary_mathematics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_formal_logic_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_global_facts_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_biology_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_chemistry_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_computer_science_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_european_history_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_geography_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_government_and_politics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_macroeconomics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_mathematics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_microeconomics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_physics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_psychology_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_statistics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_us_history_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_world_history_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_human_aging_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_human_sexuality_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_humanities_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_international_law_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_jurisprudence_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_logical_fallacies_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_machine_learning_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_management_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_marketing_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_medical_genetics_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_miscellaneous_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_moral_disputes_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_moral_scenarios_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_nutrition_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_other_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_philosophy_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_prehistory_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_professional_accounting_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_professional_law_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_professional_medicine_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_professional_psychology_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_public_relations_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_security_studies_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_social_sciences_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_sociology_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_stem_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_tr_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_us_foreign_policy_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_virology_v0.2": { |
|
"acc": true |
|
}, |
|
"mmlu_world_religions_v0.2": { |
|
"acc": true |
|
}, |
|
"truthfulqa_v0.2": { |
|
"acc": true |
|
}, |
|
"winogrande_tr-v0.2": { |
|
"acc": true |
|
} |
|
}, |
|
"n-samples": { |
|
"winogrande_tr-v0.2": { |
|
"original": 1266, |
|
"effective": 1266 |
|
}, |
|
"truthfulqa_v0.2": { |
|
"original": 817, |
|
"effective": 817 |
|
}, |
|
"mmlu_formal_logic_v0.2": { |
|
"original": 126, |
|
"effective": 126 |
|
}, |
|
"mmlu_moral_disputes_v0.2": { |
|
"original": 308, |
|
"effective": 308 |
|
}, |
|
"mmlu_international_law_v0.2": { |
|
"original": 121, |
|
"effective": 121 |
|
}, |
|
"mmlu_philosophy_v0.2": { |
|
"original": 299, |
|
"effective": 299 |
|
}, |
|
"mmlu_world_religions_v0.2": { |
|
"original": 168, |
|
"effective": 168 |
|
}, |
|
"mmlu_jurisprudence_v0.2": { |
|
"original": 106, |
|
"effective": 106 |
|
}, |
|
"mmlu_moral_scenarios_v0.2": { |
|
"original": 872, |
|
"effective": 872 |
|
}, |
|
"mmlu_high_school_european_history_v0.2": { |
|
"original": 150, |
|
"effective": 150 |
|
}, |
|
"mmlu_high_school_us_history_v0.2": { |
|
"original": 179, |
|
"effective": 179 |
|
}, |
|
"mmlu_prehistory_v0.2": { |
|
"original": 300, |
|
"effective": 300 |
|
}, |
|
"mmlu_professional_law_v0.2": { |
|
"original": 1388, |
|
"effective": 1388 |
|
}, |
|
"mmlu_logical_fallacies_v0.2": { |
|
"original": 161, |
|
"effective": 161 |
|
}, |
|
"mmlu_high_school_world_history_v0.2": { |
|
"original": 213, |
|
"effective": 213 |
|
}, |
|
"mmlu_high_school_psychology_v0.2": { |
|
"original": 533, |
|
"effective": 533 |
|
}, |
|
"mmlu_professional_psychology_v0.2": { |
|
"original": 594, |
|
"effective": 594 |
|
}, |
|
"mmlu_high_school_geography_v0.2": { |
|
"original": 197, |
|
"effective": 197 |
|
}, |
|
"mmlu_security_studies_v0.2": { |
|
"original": 234, |
|
"effective": 234 |
|
}, |
|
"mmlu_human_sexuality_v0.2": { |
|
"original": 115, |
|
"effective": 115 |
|
}, |
|
"mmlu_high_school_government_and_politics_v0.2": { |
|
"original": 187, |
|
"effective": 187 |
|
}, |
|
"mmlu_sociology_v0.2": { |
|
"original": 195, |
|
"effective": 195 |
|
}, |
|
"mmlu_public_relations_v0.2": { |
|
"original": 108, |
|
"effective": 108 |
|
}, |
|
"mmlu_us_foreign_policy_v0.2": { |
|
"original": 99, |
|
"effective": 99 |
|
}, |
|
"mmlu_econometrics_v0.2": { |
|
"original": 114, |
|
"effective": 114 |
|
}, |
|
"mmlu_high_school_microeconomics_v0.2": { |
|
"original": 237, |
|
"effective": 237 |
|
}, |
|
"mmlu_high_school_macroeconomics_v0.2": { |
|
"original": 390, |
|
"effective": 390 |
|
}, |
|
"mmlu_human_aging_v0.2": { |
|
"original": 212, |
|
"effective": 212 |
|
}, |
|
"mmlu_marketing_v0.2": { |
|
"original": 217, |
|
"effective": 217 |
|
}, |
|
"mmlu_virology_v0.2": { |
|
"original": 159, |
|
"effective": 159 |
|
}, |
|
"mmlu_professional_medicine_v0.2": { |
|
"original": 261, |
|
"effective": 261 |
|
}, |
|
"mmlu_business_ethics_v0.2": { |
|
"original": 99, |
|
"effective": 99 |
|
}, |
|
"mmlu_global_facts_v0.2": { |
|
"original": 98, |
|
"effective": 98 |
|
}, |
|
"mmlu_medical_genetics_v0.2": { |
|
"original": 95, |
|
"effective": 95 |
|
}, |
|
"mmlu_miscellaneous_v0.2": { |
|
"original": 766, |
|
"effective": 766 |
|
}, |
|
"mmlu_professional_accounting_v0.2": { |
|
"original": 279, |
|
"effective": 279 |
|
}, |
|
"mmlu_clinical_knowledge_v0.2": { |
|
"original": 256, |
|
"effective": 256 |
|
}, |
|
"mmlu_management_v0.2": { |
|
"original": 99, |
|
"effective": 99 |
|
}, |
|
"mmlu_nutrition_v0.2": { |
|
"original": 305, |
|
"effective": 305 |
|
}, |
|
"mmlu_college_medicine_v0.2": { |
|
"original": 168, |
|
"effective": 168 |
|
}, |
|
"mmlu_abstract_algebra_v0.2": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_conceptual_physics_v0.2": { |
|
"original": 233, |
|
"effective": 233 |
|
}, |
|
"mmlu_college_biology_v0.2": { |
|
"original": 142, |
|
"effective": 142 |
|
}, |
|
"mmlu_high_school_chemistry_v0.2": { |
|
"original": 197, |
|
"effective": 197 |
|
}, |
|
"mmlu_electrical_engineering_v0.2": { |
|
"original": 144, |
|
"effective": 144 |
|
}, |
|
"mmlu_high_school_computer_science_v0.2": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_machine_learning_v0.2": { |
|
"original": 112, |
|
"effective": 112 |
|
}, |
|
"mmlu_college_chemistry_v0.2": { |
|
"original": 99, |
|
"effective": 99 |
|
}, |
|
"mmlu_high_school_statistics_v0.2": { |
|
"original": 216, |
|
"effective": 216 |
|
}, |
|
"mmlu_college_mathematics_v0.2": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_high_school_physics_v0.2": { |
|
"original": 147, |
|
"effective": 147 |
|
}, |
|
"mmlu_college_computer_science_v0.2": { |
|
"original": 99, |
|
"effective": 99 |
|
}, |
|
"mmlu_anatomy_v0.2": { |
|
"original": 131, |
|
"effective": 131 |
|
}, |
|
"mmlu_computer_security_v0.2": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_high_school_mathematics_v0.2": { |
|
"original": 270, |
|
"effective": 270 |
|
}, |
|
"mmlu_astronomy": { |
|
"original": 151, |
|
"effective": 151 |
|
}, |
|
"mmlu_college_physics_v0.2": { |
|
"original": 101, |
|
"effective": 101 |
|
}, |
|
"mmlu_high_school_biology_v0.2": { |
|
"original": 300, |
|
"effective": 300 |
|
}, |
|
"mmlu_elementary_mathematics_v0.2": { |
|
"original": 373, |
|
"effective": 373 |
|
}, |
|
"hellaswag_tr-v0.2": { |
|
"original": 8857, |
|
"effective": 8857 |
|
}, |
|
"gsm8k_tr-v0.2": { |
|
"original": 1317, |
|
"effective": 1317 |
|
}, |
|
"gsm1k_tr-v0.2": { |
|
"original": 339, |
|
"effective": 339 |
|
}, |
|
"arc_tr-v0.2": { |
|
"original": 1172, |
|
"effective": 1172 |
|
} |
|
}, |
|
"config": { |
|
"model": "vllm", |
|
"model_args": "pretrained=asafaya/kanarya-750m,tensor_parallel_size=1,dtype=auto,gpu_memory_utilization=0.7,data_parallel_size=4", |
|
"batch_size": 1, |
|
"batch_sizes": [], |
|
"device": "cuda", |
|
"use_cache": null, |
|
"limit": null, |
|
"bootstrap_iters": 100000, |
|
"gen_kwargs": null, |
|
"random_seed": 0, |
|
"numpy_seed": 1234, |
|
"torch_seed": 1234, |
|
"fewshot_seed": 1234 |
|
}, |
|
"git_hash": null, |
|
"date": 1721153419.3591154, |
|
"pretty_env_info": "PyTorch version: 2.1.2+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: Ubuntu 22.04.4 LTS (x86_64)\nGCC version: (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0\nClang version: Could not collect\nCMake version: version 3.28.3\nLibc version: glibc-2.35\n\nPython version: 3.10.3 (main, Mar 28 2022, 09:30:03) [GCC 7.5.0] (64-bit runtime)\nPython platform: Linux-6.2.0-1011-azure-x86_64-with-glibc2.35\nIs CUDA available: True\nCUDA runtime version: 12.4.131\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: \nGPU 0: NVIDIA A100 80GB PCIe\nGPU 1: NVIDIA A100 80GB PCIe\nGPU 2: NVIDIA A100 80GB PCIe\nGPU 3: NVIDIA A100 80GB PCIe\n\nNvidia driver version: 550.54.15\ncuDNN version: Probably one of the following:\n/usr/lib/x86_64-linux-gnu/libcudnn.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_adv.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_train.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_train.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_engines_precompiled.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_engines_runtime_compiled.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_graph.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_heuristic.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_ops.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_train.so.8.9.7\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nAddress sizes: 48 bits physical, 48 bits virtual\nByte Order: Little Endian\nCPU(s): 96\nOn-line CPU(s) list: 0-95\nVendor ID: AuthenticAMD\nModel name: AMD EPYC 7V13 64-Core Processor\nCPU family: 25\nModel: 1\nThread(s) per core: 1\nCore(s) per socket: 48\nSocket(s): 2\nStepping: 1\nBogoMIPS: 4890.89\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext perfctr_core invpcid_single vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 xsaves clzero xsaveerptr rdpru arat umip vaes vpclmulqdq rdpid fsrm\nHypervisor vendor: Microsoft\nVirtualization type: full\nL1d cache: 3 MiB (96 instances)\nL1i cache: 3 MiB (96 instances)\nL2 cache: 48 MiB (96 instances)\nL3 cache: 384 MiB (12 instances)\nNUMA node(s): 4\nNUMA node0 CPU(s): 0-23\nNUMA node1 CPU(s): 24-47\nNUMA node2 CPU(s): 48-71\nNUMA node3 CPU(s): 72-95\nVulnerability Gather data sampling: Not affected\nVulnerability Itlb multihit: Not affected\nVulnerability L1tf: Not affected\nVulnerability Mds: Not affected\nVulnerability Meltdown: Not affected\nVulnerability Mmio stale data: Not affected\nVulnerability Retbleed: Not affected\nVulnerability Spec store bypass: Vulnerable\nVulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization\nVulnerability Spectre v2: Mitigation; Retpolines, STIBP disabled, RSB filling, PBRSB-eIBRS Not affected\nVulnerability Srbds: Not affected\nVulnerability Tsx async abort: Not affected\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.1.2\n[pip3] triton==2.1.0\n[conda] torch 2.1.2 pypi_0 pypi\n[conda] triton 2.1.0 pypi_0 pypi", |
|
"transformers_version": "4.40.0.dev0", |
|
"upper_git_hash": null, |
|
"task_hashes": {}, |
|
"model_source": "vllm", |
|
"model_name": "asafaya/kanarya-750m", |
|
"model_name_sanitized": "asafaya__kanarya-750m", |
|
"system_instruction": null, |
|
"system_instruction_sha": null, |
|
"fewshot_as_multiturn": false, |
|
"chat_template": null, |
|
"chat_template_sha": null, |
|
"start_time": 618219.590198255, |
|
"end_time": 620580.888291208, |
|
"total_evaluation_time_seconds": "2361.2980929529294" |
|
} |