results
/
cognitivecomputations
/dolphin-2.9.3-mistral-7B-32k
/results_2024-06-30T13-11-04.517222.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 563.331875162, | |
"end_time": 28829.084152535, | |
"total_evaluation_time_secondes": "28265.752277373", | |
"model_name": "cognitivecomputations/dolphin-2.9.3-mistral-7B-32k", | |
"model_sha": "4f4273ee8e7930dd64e2c6121c79d12546b883e2", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "14.0 GB", | |
"config": null | |
}, | |
"results": { | |
"community|acva:Algeria|0": { | |
"acc_norm": 0.5230769230769231, | |
"acc_norm_stderr": 0.0358596530894741 | |
}, | |
"community|acva:Ancient_Egypt|0": { | |
"acc_norm": 0.06349206349206349, | |
"acc_norm_stderr": 0.01376102006171054 | |
}, | |
"community|acva:Arab_Empire|0": { | |
"acc_norm": 0.3132075471698113, | |
"acc_norm_stderr": 0.02854479331905533 | |
}, | |
"community|acva:Arabic_Architecture|0": { | |
"acc_norm": 0.558974358974359, | |
"acc_norm_stderr": 0.035647329318535786 | |
}, | |
"community|acva:Arabic_Art|0": { | |
"acc_norm": 0.35384615384615387, | |
"acc_norm_stderr": 0.03433004254147036 | |
}, | |
"community|acva:Arabic_Astronomy|0": { | |
"acc_norm": 0.4666666666666667, | |
"acc_norm_stderr": 0.03581804596782233 | |
}, | |
"community|acva:Arabic_Calligraphy|0": { | |
"acc_norm": 0.5019607843137255, | |
"acc_norm_stderr": 0.03137254901960784 | |
}, | |
"community|acva:Arabic_Ceremony|0": { | |
"acc_norm": 0.5837837837837838, | |
"acc_norm_stderr": 0.03633930360945234 | |
}, | |
"community|acva:Arabic_Clothing|0": { | |
"acc_norm": 0.5230769230769231, | |
"acc_norm_stderr": 0.03585965308947409 | |
}, | |
"community|acva:Arabic_Culture|0": { | |
"acc_norm": 0.2358974358974359, | |
"acc_norm_stderr": 0.030481516761721537 | |
}, | |
"community|acva:Arabic_Food|0": { | |
"acc_norm": 0.5538461538461539, | |
"acc_norm_stderr": 0.03568913546569233 | |
}, | |
"community|acva:Arabic_Funeral|0": { | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.050529115263991134 | |
}, | |
"community|acva:Arabic_Geography|0": { | |
"acc_norm": 0.593103448275862, | |
"acc_norm_stderr": 0.04093793981266237 | |
}, | |
"community|acva:Arabic_History|0": { | |
"acc_norm": 0.30256410256410254, | |
"acc_norm_stderr": 0.03298070870085619 | |
}, | |
"community|acva:Arabic_Language_Origin|0": { | |
"acc_norm": 0.5789473684210527, | |
"acc_norm_stderr": 0.05092415229967328 | |
}, | |
"community|acva:Arabic_Literature|0": { | |
"acc_norm": 0.4482758620689655, | |
"acc_norm_stderr": 0.041443118108781526 | |
}, | |
"community|acva:Arabic_Math|0": { | |
"acc_norm": 0.3128205128205128, | |
"acc_norm_stderr": 0.03328755065724854 | |
}, | |
"community|acva:Arabic_Medicine|0": { | |
"acc_norm": 0.4689655172413793, | |
"acc_norm_stderr": 0.04158632762097828 | |
}, | |
"community|acva:Arabic_Music|0": { | |
"acc_norm": 0.28776978417266186, | |
"acc_norm_stderr": 0.03853836179233389 | |
}, | |
"community|acva:Arabic_Ornament|0": { | |
"acc_norm": 0.5692307692307692, | |
"acc_norm_stderr": 0.035552132520587615 | |
}, | |
"community|acva:Arabic_Philosophy|0": { | |
"acc_norm": 0.5793103448275863, | |
"acc_norm_stderr": 0.0411391498118926 | |
}, | |
"community|acva:Arabic_Physics_and_Chemistry|0": { | |
"acc_norm": 0.5333333333333333, | |
"acc_norm_stderr": 0.035818045967822315 | |
}, | |
"community|acva:Arabic_Wedding|0": { | |
"acc_norm": 0.4153846153846154, | |
"acc_norm_stderr": 0.03538013280575029 | |
}, | |
"community|acva:Bahrain|0": { | |
"acc_norm": 0.28888888888888886, | |
"acc_norm_stderr": 0.06832943242540508 | |
}, | |
"community|acva:Comoros|0": { | |
"acc_norm": 0.4222222222222222, | |
"acc_norm_stderr": 0.07446027270295806 | |
}, | |
"community|acva:Egypt_modern|0": { | |
"acc_norm": 0.3473684210526316, | |
"acc_norm_stderr": 0.04910947400776658 | |
}, | |
"community|acva:InfluenceFromAncientEgypt|0": { | |
"acc_norm": 0.6, | |
"acc_norm_stderr": 0.035172622905632896 | |
}, | |
"community|acva:InfluenceFromByzantium|0": { | |
"acc_norm": 0.7172413793103448, | |
"acc_norm_stderr": 0.03752833958003337 | |
}, | |
"community|acva:InfluenceFromChina|0": { | |
"acc_norm": 0.26666666666666666, | |
"acc_norm_stderr": 0.0317493043641267 | |
}, | |
"community|acva:InfluenceFromGreece|0": { | |
"acc_norm": 0.6307692307692307, | |
"acc_norm_stderr": 0.034648411418637566 | |
}, | |
"community|acva:InfluenceFromIslam|0": { | |
"acc_norm": 0.3103448275862069, | |
"acc_norm_stderr": 0.03855289616378948 | |
}, | |
"community|acva:InfluenceFromPersia|0": { | |
"acc_norm": 0.7028571428571428, | |
"acc_norm_stderr": 0.03464507889884372 | |
}, | |
"community|acva:InfluenceFromRome|0": { | |
"acc_norm": 0.5794871794871795, | |
"acc_norm_stderr": 0.03544138389303483 | |
}, | |
"community|acva:Iraq|0": { | |
"acc_norm": 0.5176470588235295, | |
"acc_norm_stderr": 0.05452048340661895 | |
}, | |
"community|acva:Islam_Education|0": { | |
"acc_norm": 0.4564102564102564, | |
"acc_norm_stderr": 0.03576123096991215 | |
}, | |
"community|acva:Islam_branches_and_schools|0": { | |
"acc_norm": 0.4342857142857143, | |
"acc_norm_stderr": 0.037576101528126626 | |
}, | |
"community|acva:Islamic_law_system|0": { | |
"acc_norm": 0.4256410256410256, | |
"acc_norm_stderr": 0.035498710803677086 | |
}, | |
"community|acva:Jordan|0": { | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.07106690545187012 | |
}, | |
"community|acva:Kuwait|0": { | |
"acc_norm": 0.24444444444444444, | |
"acc_norm_stderr": 0.06478835438717001 | |
}, | |
"community|acva:Lebanon|0": { | |
"acc_norm": 0.24444444444444444, | |
"acc_norm_stderr": 0.06478835438716998 | |
}, | |
"community|acva:Libya|0": { | |
"acc_norm": 0.4444444444444444, | |
"acc_norm_stderr": 0.07491109582924914 | |
}, | |
"community|acva:Mauritania|0": { | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.07385489458759965 | |
}, | |
"community|acva:Mesopotamia_civilization|0": { | |
"acc_norm": 0.5419354838709678, | |
"acc_norm_stderr": 0.040149187308210765 | |
}, | |
"community|acva:Morocco|0": { | |
"acc_norm": 0.26666666666666666, | |
"acc_norm_stderr": 0.06666666666666664 | |
}, | |
"community|acva:Oman|0": { | |
"acc_norm": 0.2, | |
"acc_norm_stderr": 0.06030226891555273 | |
}, | |
"community|acva:Palestine|0": { | |
"acc_norm": 0.2823529411764706, | |
"acc_norm_stderr": 0.049114753600680516 | |
}, | |
"community|acva:Qatar|0": { | |
"acc_norm": 0.4222222222222222, | |
"acc_norm_stderr": 0.07446027270295806 | |
}, | |
"community|acva:Saudi_Arabia|0": { | |
"acc_norm": 0.35384615384615387, | |
"acc_norm_stderr": 0.03433004254147036 | |
}, | |
"community|acva:Somalia|0": { | |
"acc_norm": 0.4666666666666667, | |
"acc_norm_stderr": 0.0752101433090355 | |
}, | |
"community|acva:Sudan|0": { | |
"acc_norm": 0.4222222222222222, | |
"acc_norm_stderr": 0.07446027270295807 | |
}, | |
"community|acva:Syria|0": { | |
"acc_norm": 0.35555555555555557, | |
"acc_norm_stderr": 0.07216392363431012 | |
}, | |
"community|acva:Tunisia|0": { | |
"acc_norm": 0.28888888888888886, | |
"acc_norm_stderr": 0.06832943242540507 | |
}, | |
"community|acva:United_Arab_Emirates|0": { | |
"acc_norm": 0.43529411764705883, | |
"acc_norm_stderr": 0.05409572080481032 | |
}, | |
"community|acva:Yemen|0": { | |
"acc_norm": 0.2, | |
"acc_norm_stderr": 0.13333333333333333 | |
}, | |
"community|acva:communication|0": { | |
"acc_norm": 0.43131868131868134, | |
"acc_norm_stderr": 0.02599443023962308 | |
}, | |
"community|acva:computer_and_phone|0": { | |
"acc_norm": 0.4542372881355932, | |
"acc_norm_stderr": 0.029038197586234566 | |
}, | |
"community|acva:daily_life|0": { | |
"acc_norm": 0.18991097922848665, | |
"acc_norm_stderr": 0.021397930418183976 | |
}, | |
"community|acva:entertainment|0": { | |
"acc_norm": 0.23728813559322035, | |
"acc_norm_stderr": 0.024811018803776317 | |
}, | |
"community|alghafa:mcq_exams_test_ar|0": { | |
"acc_norm": 0.28186714542190305, | |
"acc_norm_stderr": 0.019080389276755812 | |
}, | |
"community|alghafa:meta_ar_dialects|0": { | |
"acc_norm": 0.2869323447636701, | |
"acc_norm_stderr": 0.006158850345423396 | |
}, | |
"community|alghafa:meta_ar_msa|0": { | |
"acc_norm": 0.3307262569832402, | |
"acc_norm_stderr": 0.01573502625896612 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { | |
"acc_norm": 0.52, | |
"acc_norm_stderr": 0.05807730170189531 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { | |
"acc_norm": 0.54, | |
"acc_norm_stderr": 0.04083030852148599 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.040134003725439044 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { | |
"acc_norm": 0.7395872420262665, | |
"acc_norm_stderr": 0.004908443112905467 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": { | |
"acc_norm": 0.42201834862385323, | |
"acc_norm_stderr": 0.006379172165123156 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task|0": { | |
"acc_norm": 0.40348837209302324, | |
"acc_norm_stderr": 0.011832786548799348 | |
}, | |
"community|arabic_exams|0": { | |
"acc_norm": 0.30353817504655495, | |
"acc_norm_stderr": 0.019859710928513158 | |
}, | |
"community|arabic_mmlu:abstract_algebra|0": { | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"community|arabic_mmlu:anatomy|0": { | |
"acc_norm": 0.35555555555555557, | |
"acc_norm_stderr": 0.04135176749720385 | |
}, | |
"community|arabic_mmlu:astronomy|0": { | |
"acc_norm": 0.34210526315789475, | |
"acc_norm_stderr": 0.03860731599316092 | |
}, | |
"community|arabic_mmlu:business_ethics|0": { | |
"acc_norm": 0.38, | |
"acc_norm_stderr": 0.04878317312145633 | |
}, | |
"community|arabic_mmlu:clinical_knowledge|0": { | |
"acc_norm": 0.3584905660377358, | |
"acc_norm_stderr": 0.02951470358398175 | |
}, | |
"community|arabic_mmlu:college_biology|0": { | |
"acc_norm": 0.2847222222222222, | |
"acc_norm_stderr": 0.03773809990686935 | |
}, | |
"community|arabic_mmlu:college_chemistry|0": { | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.04560480215720684 | |
}, | |
"community|arabic_mmlu:college_computer_science|0": { | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"community|arabic_mmlu:college_mathematics|0": { | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.04560480215720684 | |
}, | |
"community|arabic_mmlu:college_medicine|0": { | |
"acc_norm": 0.2832369942196532, | |
"acc_norm_stderr": 0.03435568056047873 | |
}, | |
"community|arabic_mmlu:college_physics|0": { | |
"acc_norm": 0.22549019607843138, | |
"acc_norm_stderr": 0.041583075330832865 | |
}, | |
"community|arabic_mmlu:computer_security|0": { | |
"acc_norm": 0.42, | |
"acc_norm_stderr": 0.049604496374885836 | |
}, | |
"community|arabic_mmlu:conceptual_physics|0": { | |
"acc_norm": 0.28085106382978725, | |
"acc_norm_stderr": 0.02937917046412481 | |
}, | |
"community|arabic_mmlu:econometrics|0": { | |
"acc_norm": 0.2894736842105263, | |
"acc_norm_stderr": 0.04266339443159394 | |
}, | |
"community|arabic_mmlu:electrical_engineering|0": { | |
"acc_norm": 0.3310344827586207, | |
"acc_norm_stderr": 0.03921545312467122 | |
}, | |
"community|arabic_mmlu:elementary_mathematics|0": { | |
"acc_norm": 0.31746031746031744, | |
"acc_norm_stderr": 0.02397386199899208 | |
}, | |
"community|arabic_mmlu:formal_logic|0": { | |
"acc_norm": 0.23809523809523808, | |
"acc_norm_stderr": 0.03809523809523811 | |
}, | |
"community|arabic_mmlu:global_facts|0": { | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.045604802157206845 | |
}, | |
"community|arabic_mmlu:high_school_biology|0": { | |
"acc_norm": 0.34516129032258064, | |
"acc_norm_stderr": 0.027045746573534323 | |
}, | |
"community|arabic_mmlu:high_school_chemistry|0": { | |
"acc_norm": 0.3694581280788177, | |
"acc_norm_stderr": 0.03395970381998575 | |
}, | |
"community|arabic_mmlu:high_school_computer_science|0": { | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"community|arabic_mmlu:high_school_european_history|0": { | |
"acc_norm": 0.23030303030303031, | |
"acc_norm_stderr": 0.0328766675860349 | |
}, | |
"community|arabic_mmlu:high_school_geography|0": { | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.03358618145732523 | |
}, | |
"community|arabic_mmlu:high_school_government_and_politics|0": { | |
"acc_norm": 0.27461139896373055, | |
"acc_norm_stderr": 0.03221024508041153 | |
}, | |
"community|arabic_mmlu:high_school_macroeconomics|0": { | |
"acc_norm": 0.31794871794871793, | |
"acc_norm_stderr": 0.02361088430892786 | |
}, | |
"community|arabic_mmlu:high_school_mathematics|0": { | |
"acc_norm": 0.26666666666666666, | |
"acc_norm_stderr": 0.026962424325073835 | |
}, | |
"community|arabic_mmlu:high_school_microeconomics|0": { | |
"acc_norm": 0.3067226890756303, | |
"acc_norm_stderr": 0.029953823891887048 | |
}, | |
"community|arabic_mmlu:high_school_physics|0": { | |
"acc_norm": 0.271523178807947, | |
"acc_norm_stderr": 0.03631329803969653 | |
}, | |
"community|arabic_mmlu:high_school_psychology|0": { | |
"acc_norm": 0.28256880733944956, | |
"acc_norm_stderr": 0.019304243497707152 | |
}, | |
"community|arabic_mmlu:high_school_statistics|0": { | |
"acc_norm": 0.30092592592592593, | |
"acc_norm_stderr": 0.031280390843298825 | |
}, | |
"community|arabic_mmlu:high_school_us_history|0": { | |
"acc_norm": 0.25, | |
"acc_norm_stderr": 0.03039153369274154 | |
}, | |
"community|arabic_mmlu:high_school_world_history|0": { | |
"acc_norm": 0.270042194092827, | |
"acc_norm_stderr": 0.028900721906293426 | |
}, | |
"community|arabic_mmlu:human_aging|0": { | |
"acc_norm": 0.31390134529147984, | |
"acc_norm_stderr": 0.031146796482972465 | |
}, | |
"community|arabic_mmlu:human_sexuality|0": { | |
"acc_norm": 0.2595419847328244, | |
"acc_norm_stderr": 0.03844876139785271 | |
}, | |
"community|arabic_mmlu:international_law|0": { | |
"acc_norm": 0.5289256198347108, | |
"acc_norm_stderr": 0.04556710331269498 | |
}, | |
"community|arabic_mmlu:jurisprudence|0": { | |
"acc_norm": 0.3611111111111111, | |
"acc_norm_stderr": 0.04643454608906274 | |
}, | |
"community|arabic_mmlu:logical_fallacies|0": { | |
"acc_norm": 0.2822085889570552, | |
"acc_norm_stderr": 0.03536117886664742 | |
}, | |
"community|arabic_mmlu:machine_learning|0": { | |
"acc_norm": 0.25892857142857145, | |
"acc_norm_stderr": 0.04157751539865629 | |
}, | |
"community|arabic_mmlu:management|0": { | |
"acc_norm": 0.3300970873786408, | |
"acc_norm_stderr": 0.0465614711001235 | |
}, | |
"community|arabic_mmlu:marketing|0": { | |
"acc_norm": 0.405982905982906, | |
"acc_norm_stderr": 0.03217180182641087 | |
}, | |
"community|arabic_mmlu:medical_genetics|0": { | |
"acc_norm": 0.28, | |
"acc_norm_stderr": 0.045126085985421276 | |
}, | |
"community|arabic_mmlu:miscellaneous|0": { | |
"acc_norm": 0.3665389527458493, | |
"acc_norm_stderr": 0.01723124462679703 | |
}, | |
"community|arabic_mmlu:moral_disputes|0": { | |
"acc_norm": 0.36416184971098264, | |
"acc_norm_stderr": 0.025906632631016124 | |
}, | |
"community|arabic_mmlu:moral_scenarios|0": { | |
"acc_norm": 0.2681564245810056, | |
"acc_norm_stderr": 0.014816119635316996 | |
}, | |
"community|arabic_mmlu:nutrition|0": { | |
"acc_norm": 0.38235294117647056, | |
"acc_norm_stderr": 0.027826109307283686 | |
}, | |
"community|arabic_mmlu:philosophy|0": { | |
"acc_norm": 0.36977491961414793, | |
"acc_norm_stderr": 0.027417996705631 | |
}, | |
"community|arabic_mmlu:prehistory|0": { | |
"acc_norm": 0.30864197530864196, | |
"acc_norm_stderr": 0.025702640260603756 | |
}, | |
"community|arabic_mmlu:professional_accounting|0": { | |
"acc_norm": 0.3262411347517731, | |
"acc_norm_stderr": 0.027968453043563168 | |
}, | |
"community|arabic_mmlu:professional_law|0": { | |
"acc_norm": 0.2685788787483703, | |
"acc_norm_stderr": 0.011320056629121725 | |
}, | |
"community|arabic_mmlu:professional_medicine|0": { | |
"acc_norm": 0.20955882352941177, | |
"acc_norm_stderr": 0.024723110407677055 | |
}, | |
"community|arabic_mmlu:professional_psychology|0": { | |
"acc_norm": 0.3300653594771242, | |
"acc_norm_stderr": 0.019023726160724553 | |
}, | |
"community|arabic_mmlu:public_relations|0": { | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.04389311454644286 | |
}, | |
"community|arabic_mmlu:security_studies|0": { | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.03136250240935893 | |
}, | |
"community|arabic_mmlu:sociology|0": { | |
"acc_norm": 0.36318407960199006, | |
"acc_norm_stderr": 0.034005985055990146 | |
}, | |
"community|arabic_mmlu:us_foreign_policy|0": { | |
"acc_norm": 0.44, | |
"acc_norm_stderr": 0.04988876515698589 | |
}, | |
"community|arabic_mmlu:virology|0": { | |
"acc_norm": 0.3493975903614458, | |
"acc_norm_stderr": 0.037117251907407514 | |
}, | |
"community|arabic_mmlu:world_religions|0": { | |
"acc_norm": 0.3157894736842105, | |
"acc_norm_stderr": 0.035650796707083106 | |
}, | |
"community|arc_challenge_okapi_ar|0": { | |
"acc_norm": 0.3103448275862069, | |
"acc_norm_stderr": 0.013589282801346027 | |
}, | |
"community|arc_easy_ar|0": { | |
"acc_norm": 0.3286802030456853, | |
"acc_norm_stderr": 0.00966317044032402 | |
}, | |
"community|boolq_ar|0": { | |
"acc_norm": 0.6736196319018405, | |
"acc_norm_stderr": 0.008213479954722913 | |
}, | |
"community|copa_ext_ar|0": { | |
"acc_norm": 0.5222222222222223, | |
"acc_norm_stderr": 0.05294752255076824 | |
}, | |
"community|hellaswag_okapi_ar|0": { | |
"acc_norm": 0.27150801439319594, | |
"acc_norm_stderr": 0.004644286983242181 | |
}, | |
"community|openbook_qa_ext_ar|0": { | |
"acc_norm": 0.4121212121212121, | |
"acc_norm_stderr": 0.02214587808774447 | |
}, | |
"community|piqa_ar|0": { | |
"acc_norm": 0.5537370430987453, | |
"acc_norm_stderr": 0.011614070910827407 | |
}, | |
"community|race_ar|0": { | |
"acc_norm": 0.3655913978494624, | |
"acc_norm_stderr": 0.00686036410927548 | |
}, | |
"community|sciq_ar|0": { | |
"acc_norm": 0.5306532663316583, | |
"acc_norm_stderr": 0.01582920591255727 | |
}, | |
"community|toxigen_ar|0": { | |
"acc_norm": 0.4320855614973262, | |
"acc_norm_stderr": 0.01620887578524445 | |
}, | |
"lighteval|xstory_cloze:ar|0": { | |
"acc": 0.5314361350099271, | |
"acc_stderr": 0.012841668760976905 | |
}, | |
"community|acva:_average|0": { | |
"acc_norm": 0.41521443338261005, | |
"acc_norm_stderr": 0.046346219315714254 | |
}, | |
"community|alghafa:_average|0": { | |
"acc_norm": 0.436068856656884, | |
"acc_norm_stderr": 0.022570697961865963 | |
}, | |
"community|arabic_mmlu:_average|0": { | |
"acc_norm": 0.3185775537279537, | |
"acc_norm_stderr": 0.034658454780766006 | |
}, | |
"all": { | |
"acc_norm": 0.37681539973104294, | |
"acc_norm_stderr": 0.03739499831804777, | |
"acc": 0.5314361350099271, | |
"acc_stderr": 0.012841668760976905 | |
} | |
}, | |
"versions": { | |
"community|acva:Algeria|0": 0, | |
"community|acva:Ancient_Egypt|0": 0, | |
"community|acva:Arab_Empire|0": 0, | |
"community|acva:Arabic_Architecture|0": 0, | |
"community|acva:Arabic_Art|0": 0, | |
"community|acva:Arabic_Astronomy|0": 0, | |
"community|acva:Arabic_Calligraphy|0": 0, | |
"community|acva:Arabic_Ceremony|0": 0, | |
"community|acva:Arabic_Clothing|0": 0, | |
"community|acva:Arabic_Culture|0": 0, | |
"community|acva:Arabic_Food|0": 0, | |
"community|acva:Arabic_Funeral|0": 0, | |
"community|acva:Arabic_Geography|0": 0, | |
"community|acva:Arabic_History|0": 0, | |
"community|acva:Arabic_Language_Origin|0": 0, | |
"community|acva:Arabic_Literature|0": 0, | |
"community|acva:Arabic_Math|0": 0, | |
"community|acva:Arabic_Medicine|0": 0, | |
"community|acva:Arabic_Music|0": 0, | |
"community|acva:Arabic_Ornament|0": 0, | |
"community|acva:Arabic_Philosophy|0": 0, | |
"community|acva:Arabic_Physics_and_Chemistry|0": 0, | |
"community|acva:Arabic_Wedding|0": 0, | |
"community|acva:Bahrain|0": 0, | |
"community|acva:Comoros|0": 0, | |
"community|acva:Egypt_modern|0": 0, | |
"community|acva:InfluenceFromAncientEgypt|0": 0, | |
"community|acva:InfluenceFromByzantium|0": 0, | |
"community|acva:InfluenceFromChina|0": 0, | |
"community|acva:InfluenceFromGreece|0": 0, | |
"community|acva:InfluenceFromIslam|0": 0, | |
"community|acva:InfluenceFromPersia|0": 0, | |
"community|acva:InfluenceFromRome|0": 0, | |
"community|acva:Iraq|0": 0, | |
"community|acva:Islam_Education|0": 0, | |
"community|acva:Islam_branches_and_schools|0": 0, | |
"community|acva:Islamic_law_system|0": 0, | |
"community|acva:Jordan|0": 0, | |
"community|acva:Kuwait|0": 0, | |
"community|acva:Lebanon|0": 0, | |
"community|acva:Libya|0": 0, | |
"community|acva:Mauritania|0": 0, | |
"community|acva:Mesopotamia_civilization|0": 0, | |
"community|acva:Morocco|0": 0, | |
"community|acva:Oman|0": 0, | |
"community|acva:Palestine|0": 0, | |
"community|acva:Qatar|0": 0, | |
"community|acva:Saudi_Arabia|0": 0, | |
"community|acva:Somalia|0": 0, | |
"community|acva:Sudan|0": 0, | |
"community|acva:Syria|0": 0, | |
"community|acva:Tunisia|0": 0, | |
"community|acva:United_Arab_Emirates|0": 0, | |
"community|acva:Yemen|0": 0, | |
"community|acva:communication|0": 0, | |
"community|acva:computer_and_phone|0": 0, | |
"community|acva:daily_life|0": 0, | |
"community|acva:entertainment|0": 0, | |
"community|alghafa:mcq_exams_test_ar|0": 0, | |
"community|alghafa:meta_ar_dialects|0": 0, | |
"community|alghafa:meta_ar_msa|0": 0, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0, | |
"community|alghafa:multiple_choice_sentiment_task|0": 0, | |
"community|arabic_exams|0": 0, | |
"community|arabic_mmlu:abstract_algebra|0": 0, | |
"community|arabic_mmlu:anatomy|0": 0, | |
"community|arabic_mmlu:astronomy|0": 0, | |
"community|arabic_mmlu:business_ethics|0": 0, | |
"community|arabic_mmlu:clinical_knowledge|0": 0, | |
"community|arabic_mmlu:college_biology|0": 0, | |
"community|arabic_mmlu:college_chemistry|0": 0, | |
"community|arabic_mmlu:college_computer_science|0": 0, | |
"community|arabic_mmlu:college_mathematics|0": 0, | |
"community|arabic_mmlu:college_medicine|0": 0, | |
"community|arabic_mmlu:college_physics|0": 0, | |
"community|arabic_mmlu:computer_security|0": 0, | |
"community|arabic_mmlu:conceptual_physics|0": 0, | |
"community|arabic_mmlu:econometrics|0": 0, | |
"community|arabic_mmlu:electrical_engineering|0": 0, | |
"community|arabic_mmlu:elementary_mathematics|0": 0, | |
"community|arabic_mmlu:formal_logic|0": 0, | |
"community|arabic_mmlu:global_facts|0": 0, | |
"community|arabic_mmlu:high_school_biology|0": 0, | |
"community|arabic_mmlu:high_school_chemistry|0": 0, | |
"community|arabic_mmlu:high_school_computer_science|0": 0, | |
"community|arabic_mmlu:high_school_european_history|0": 0, | |
"community|arabic_mmlu:high_school_geography|0": 0, | |
"community|arabic_mmlu:high_school_government_and_politics|0": 0, | |
"community|arabic_mmlu:high_school_macroeconomics|0": 0, | |
"community|arabic_mmlu:high_school_mathematics|0": 0, | |
"community|arabic_mmlu:high_school_microeconomics|0": 0, | |
"community|arabic_mmlu:high_school_physics|0": 0, | |
"community|arabic_mmlu:high_school_psychology|0": 0, | |
"community|arabic_mmlu:high_school_statistics|0": 0, | |
"community|arabic_mmlu:high_school_us_history|0": 0, | |
"community|arabic_mmlu:high_school_world_history|0": 0, | |
"community|arabic_mmlu:human_aging|0": 0, | |
"community|arabic_mmlu:human_sexuality|0": 0, | |
"community|arabic_mmlu:international_law|0": 0, | |
"community|arabic_mmlu:jurisprudence|0": 0, | |
"community|arabic_mmlu:logical_fallacies|0": 0, | |
"community|arabic_mmlu:machine_learning|0": 0, | |
"community|arabic_mmlu:management|0": 0, | |
"community|arabic_mmlu:marketing|0": 0, | |
"community|arabic_mmlu:medical_genetics|0": 0, | |
"community|arabic_mmlu:miscellaneous|0": 0, | |
"community|arabic_mmlu:moral_disputes|0": 0, | |
"community|arabic_mmlu:moral_scenarios|0": 0, | |
"community|arabic_mmlu:nutrition|0": 0, | |
"community|arabic_mmlu:philosophy|0": 0, | |
"community|arabic_mmlu:prehistory|0": 0, | |
"community|arabic_mmlu:professional_accounting|0": 0, | |
"community|arabic_mmlu:professional_law|0": 0, | |
"community|arabic_mmlu:professional_medicine|0": 0, | |
"community|arabic_mmlu:professional_psychology|0": 0, | |
"community|arabic_mmlu:public_relations|0": 0, | |
"community|arabic_mmlu:security_studies|0": 0, | |
"community|arabic_mmlu:sociology|0": 0, | |
"community|arabic_mmlu:us_foreign_policy|0": 0, | |
"community|arabic_mmlu:virology|0": 0, | |
"community|arabic_mmlu:world_religions|0": 0, | |
"community|arc_challenge_okapi_ar|0": 0, | |
"community|arc_easy_ar|0": 0, | |
"community|boolq_ar|0": 0, | |
"community|copa_ext_ar|0": 0, | |
"community|hellaswag_okapi_ar|0": 0, | |
"community|openbook_qa_ext_ar|0": 0, | |
"community|piqa_ar|0": 0, | |
"community|race_ar|0": 0, | |
"community|sciq_ar|0": 0, | |
"community|toxigen_ar|0": 0, | |
"lighteval|xstory_cloze:ar|0": 0 | |
}, | |
"config_tasks": { | |
"community|acva:Algeria": { | |
"name": "acva:Algeria", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Algeria", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Ancient_Egypt": { | |
"name": "acva:Ancient_Egypt", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Ancient_Egypt", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 315, | |
"effective_num_docs": 315, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arab_Empire": { | |
"name": "acva:Arab_Empire", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arab_Empire", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 265, | |
"effective_num_docs": 265, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Architecture": { | |
"name": "acva:Arabic_Architecture", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Architecture", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Art": { | |
"name": "acva:Arabic_Art", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Art", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Astronomy": { | |
"name": "acva:Arabic_Astronomy", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Astronomy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Calligraphy": { | |
"name": "acva:Arabic_Calligraphy", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Calligraphy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 255, | |
"effective_num_docs": 255, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Ceremony": { | |
"name": "acva:Arabic_Ceremony", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Ceremony", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 185, | |
"effective_num_docs": 185, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Clothing": { | |
"name": "acva:Arabic_Clothing", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Clothing", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Culture": { | |
"name": "acva:Arabic_Culture", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Culture", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Food": { | |
"name": "acva:Arabic_Food", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Food", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Funeral": { | |
"name": "acva:Arabic_Funeral", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Funeral", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 95, | |
"effective_num_docs": 95, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Geography": { | |
"name": "acva:Arabic_Geography", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Geography", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_History": { | |
"name": "acva:Arabic_History", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_History", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Language_Origin": { | |
"name": "acva:Arabic_Language_Origin", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Language_Origin", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 95, | |
"effective_num_docs": 95, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Literature": { | |
"name": "acva:Arabic_Literature", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Literature", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Math": { | |
"name": "acva:Arabic_Math", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Math", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Medicine": { | |
"name": "acva:Arabic_Medicine", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Medicine", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Music": { | |
"name": "acva:Arabic_Music", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Music", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 139, | |
"effective_num_docs": 139, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Ornament": { | |
"name": "acva:Arabic_Ornament", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Ornament", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Philosophy": { | |
"name": "acva:Arabic_Philosophy", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Philosophy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Physics_and_Chemistry": { | |
"name": "acva:Arabic_Physics_and_Chemistry", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Physics_and_Chemistry", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Wedding": { | |
"name": "acva:Arabic_Wedding", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Wedding", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Bahrain": { | |
"name": "acva:Bahrain", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Bahrain", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Comoros": { | |
"name": "acva:Comoros", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Comoros", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Egypt_modern": { | |
"name": "acva:Egypt_modern", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Egypt_modern", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 95, | |
"effective_num_docs": 95, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromAncientEgypt": { | |
"name": "acva:InfluenceFromAncientEgypt", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromAncientEgypt", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromByzantium": { | |
"name": "acva:InfluenceFromByzantium", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromByzantium", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromChina": { | |
"name": "acva:InfluenceFromChina", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromChina", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromGreece": { | |
"name": "acva:InfluenceFromGreece", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromGreece", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromIslam": { | |
"name": "acva:InfluenceFromIslam", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromIslam", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromPersia": { | |
"name": "acva:InfluenceFromPersia", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromPersia", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 175, | |
"effective_num_docs": 175, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromRome": { | |
"name": "acva:InfluenceFromRome", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromRome", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Iraq": { | |
"name": "acva:Iraq", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Iraq", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 85, | |
"effective_num_docs": 85, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Islam_Education": { | |
"name": "acva:Islam_Education", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Islam_Education", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Islam_branches_and_schools": { | |
"name": "acva:Islam_branches_and_schools", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Islam_branches_and_schools", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 175, | |
"effective_num_docs": 175, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Islamic_law_system": { | |
"name": "acva:Islamic_law_system", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Islamic_law_system", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Jordan": { | |
"name": "acva:Jordan", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Jordan", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Kuwait": { | |
"name": "acva:Kuwait", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Kuwait", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Lebanon": { | |
"name": "acva:Lebanon", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Lebanon", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Libya": { | |
"name": "acva:Libya", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Libya", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Mauritania": { | |
"name": "acva:Mauritania", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Mauritania", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Mesopotamia_civilization": { | |
"name": "acva:Mesopotamia_civilization", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Mesopotamia_civilization", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 155, | |
"effective_num_docs": 155, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Morocco": { | |
"name": "acva:Morocco", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Morocco", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Oman": { | |
"name": "acva:Oman", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Oman", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Palestine": { | |
"name": "acva:Palestine", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Palestine", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 85, | |
"effective_num_docs": 85, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Qatar": { | |
"name": "acva:Qatar", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Qatar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Saudi_Arabia": { | |
"name": "acva:Saudi_Arabia", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Saudi_Arabia", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Somalia": { | |
"name": "acva:Somalia", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Somalia", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Sudan": { | |
"name": "acva:Sudan", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Sudan", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Syria": { | |
"name": "acva:Syria", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Syria", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Tunisia": { | |
"name": "acva:Tunisia", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Tunisia", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:United_Arab_Emirates": { | |
"name": "acva:United_Arab_Emirates", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "United_Arab_Emirates", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 85, | |
"effective_num_docs": 85, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Yemen": { | |
"name": "acva:Yemen", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Yemen", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 10, | |
"effective_num_docs": 10, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:communication": { | |
"name": "acva:communication", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "communication", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 364, | |
"effective_num_docs": 364, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:computer_and_phone": { | |
"name": "acva:computer_and_phone", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "computer_and_phone", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 295, | |
"effective_num_docs": 295, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:daily_life": { | |
"name": "acva:daily_life", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "daily_life", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 337, | |
"effective_num_docs": 337, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:entertainment": { | |
"name": "acva:entertainment", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "entertainment", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 295, | |
"effective_num_docs": 295, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:mcq_exams_test_ar": { | |
"name": "alghafa:mcq_exams_test_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "mcq_exams_test_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 557, | |
"effective_num_docs": 557, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:meta_ar_dialects": { | |
"name": "alghafa:meta_ar_dialects", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "meta_ar_dialects", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 5395, | |
"effective_num_docs": 5395, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:meta_ar_msa": { | |
"name": "alghafa:meta_ar_msa", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "meta_ar_msa", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 895, | |
"effective_num_docs": 895, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": { | |
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_facts_truefalse_balanced_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 75, | |
"effective_num_docs": 75, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task": { | |
"name": "alghafa:multiple_choice_grounded_statement_soqal_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_grounded_statement_soqal_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 150, | |
"effective_num_docs": 150, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { | |
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 150, | |
"effective_num_docs": 150, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { | |
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 7995, | |
"effective_num_docs": 7995, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task": { | |
"name": "alghafa:multiple_choice_rating_sentiment_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_rating_sentiment_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 5995, | |
"effective_num_docs": 5995, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task": { | |
"name": "alghafa:multiple_choice_sentiment_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_sentiment_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1720, | |
"effective_num_docs": 1720, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_exams": { | |
"name": "arabic_exams", | |
"prompt_function": "arabic_exams", | |
"hf_repo": "OALL/Arabic_EXAMS", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 537, | |
"effective_num_docs": 537, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:abstract_algebra": { | |
"name": "arabic_mmlu:abstract_algebra", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "abstract_algebra", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:anatomy": { | |
"name": "arabic_mmlu:anatomy", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "anatomy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 135, | |
"effective_num_docs": 135, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:astronomy": { | |
"name": "arabic_mmlu:astronomy", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "astronomy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 152, | |
"effective_num_docs": 152, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:business_ethics": { | |
"name": "arabic_mmlu:business_ethics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "business_ethics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:clinical_knowledge": { | |
"name": "arabic_mmlu:clinical_knowledge", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "clinical_knowledge", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 265, | |
"effective_num_docs": 265, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_biology": { | |
"name": "arabic_mmlu:college_biology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_biology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 144, | |
"effective_num_docs": 144, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_chemistry": { | |
"name": "arabic_mmlu:college_chemistry", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_chemistry", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_computer_science": { | |
"name": "arabic_mmlu:college_computer_science", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_computer_science", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_mathematics": { | |
"name": "arabic_mmlu:college_mathematics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_mathematics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_medicine": { | |
"name": "arabic_mmlu:college_medicine", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_medicine", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 173, | |
"effective_num_docs": 173, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_physics": { | |
"name": "arabic_mmlu:college_physics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_physics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 102, | |
"effective_num_docs": 102, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:computer_security": { | |
"name": "arabic_mmlu:computer_security", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "computer_security", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:conceptual_physics": { | |
"name": "arabic_mmlu:conceptual_physics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "conceptual_physics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 235, | |
"effective_num_docs": 235, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:econometrics": { | |
"name": "arabic_mmlu:econometrics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "econometrics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 114, | |
"effective_num_docs": 114, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:electrical_engineering": { | |
"name": "arabic_mmlu:electrical_engineering", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "electrical_engineering", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:elementary_mathematics": { | |
"name": "arabic_mmlu:elementary_mathematics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "elementary_mathematics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 378, | |
"effective_num_docs": 378, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:formal_logic": { | |
"name": "arabic_mmlu:formal_logic", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "formal_logic", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 126, | |
"effective_num_docs": 126, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:global_facts": { | |
"name": "arabic_mmlu:global_facts", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "global_facts", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_biology": { | |
"name": "arabic_mmlu:high_school_biology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_biology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 310, | |
"effective_num_docs": 310, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_chemistry": { | |
"name": "arabic_mmlu:high_school_chemistry", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_chemistry", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 203, | |
"effective_num_docs": 203, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_computer_science": { | |
"name": "arabic_mmlu:high_school_computer_science", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_computer_science", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_european_history": { | |
"name": "arabic_mmlu:high_school_european_history", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_european_history", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 165, | |
"effective_num_docs": 165, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_geography": { | |
"name": "arabic_mmlu:high_school_geography", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_geography", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 198, | |
"effective_num_docs": 198, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_government_and_politics": { | |
"name": "arabic_mmlu:high_school_government_and_politics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_government_and_politics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 193, | |
"effective_num_docs": 193, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_macroeconomics": { | |
"name": "arabic_mmlu:high_school_macroeconomics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_macroeconomics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 390, | |
"effective_num_docs": 390, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_mathematics": { | |
"name": "arabic_mmlu:high_school_mathematics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_mathematics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 270, | |
"effective_num_docs": 270, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_microeconomics": { | |
"name": "arabic_mmlu:high_school_microeconomics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_microeconomics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 238, | |
"effective_num_docs": 238, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_physics": { | |
"name": "arabic_mmlu:high_school_physics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_physics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 151, | |
"effective_num_docs": 151, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_psychology": { | |
"name": "arabic_mmlu:high_school_psychology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_psychology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 545, | |
"effective_num_docs": 545, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_statistics": { | |
"name": "arabic_mmlu:high_school_statistics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_statistics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 216, | |
"effective_num_docs": 216, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_us_history": { | |
"name": "arabic_mmlu:high_school_us_history", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_us_history", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 204, | |
"effective_num_docs": 204, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_world_history": { | |
"name": "arabic_mmlu:high_school_world_history", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_world_history", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 237, | |
"effective_num_docs": 237, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:human_aging": { | |
"name": "arabic_mmlu:human_aging", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "human_aging", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 223, | |
"effective_num_docs": 223, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:human_sexuality": { | |
"name": "arabic_mmlu:human_sexuality", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "human_sexuality", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 131, | |
"effective_num_docs": 131, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:international_law": { | |
"name": "arabic_mmlu:international_law", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "international_law", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 121, | |
"effective_num_docs": 121, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:jurisprudence": { | |
"name": "arabic_mmlu:jurisprudence", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "jurisprudence", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 108, | |
"effective_num_docs": 108, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:logical_fallacies": { | |
"name": "arabic_mmlu:logical_fallacies", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "logical_fallacies", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 163, | |
"effective_num_docs": 163, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:machine_learning": { | |
"name": "arabic_mmlu:machine_learning", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "machine_learning", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 112, | |
"effective_num_docs": 112, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:management": { | |
"name": "arabic_mmlu:management", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "management", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 103, | |
"effective_num_docs": 103, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:marketing": { | |
"name": "arabic_mmlu:marketing", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "marketing", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 234, | |
"effective_num_docs": 234, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:medical_genetics": { | |
"name": "arabic_mmlu:medical_genetics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "medical_genetics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:miscellaneous": { | |
"name": "arabic_mmlu:miscellaneous", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "miscellaneous", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 783, | |
"effective_num_docs": 783, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:moral_disputes": { | |
"name": "arabic_mmlu:moral_disputes", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "moral_disputes", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 346, | |
"effective_num_docs": 346, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:moral_scenarios": { | |
"name": "arabic_mmlu:moral_scenarios", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "moral_scenarios", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 895, | |
"effective_num_docs": 895, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:nutrition": { | |
"name": "arabic_mmlu:nutrition", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "nutrition", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 306, | |
"effective_num_docs": 306, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:philosophy": { | |
"name": "arabic_mmlu:philosophy", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "philosophy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 311, | |
"effective_num_docs": 311, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:prehistory": { | |
"name": "arabic_mmlu:prehistory", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "prehistory", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 324, | |
"effective_num_docs": 324, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:professional_accounting": { | |
"name": "arabic_mmlu:professional_accounting", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "professional_accounting", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 282, | |
"effective_num_docs": 282, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:professional_law": { | |
"name": "arabic_mmlu:professional_law", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "professional_law", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1534, | |
"effective_num_docs": 1534, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:professional_medicine": { | |
"name": "arabic_mmlu:professional_medicine", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "professional_medicine", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 272, | |
"effective_num_docs": 272, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:professional_psychology": { | |
"name": "arabic_mmlu:professional_psychology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "professional_psychology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 612, | |
"effective_num_docs": 612, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:public_relations": { | |
"name": "arabic_mmlu:public_relations", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "public_relations", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 110, | |
"effective_num_docs": 110, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:security_studies": { | |
"name": "arabic_mmlu:security_studies", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "security_studies", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 245, | |
"effective_num_docs": 245, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:sociology": { | |
"name": "arabic_mmlu:sociology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "sociology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 201, | |
"effective_num_docs": 201, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:us_foreign_policy": { | |
"name": "arabic_mmlu:us_foreign_policy", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "us_foreign_policy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:virology": { | |
"name": "arabic_mmlu:virology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "virology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 166, | |
"effective_num_docs": 166, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:world_religions": { | |
"name": "arabic_mmlu:world_religions", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "world_religions", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 171, | |
"effective_num_docs": 171, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arc_challenge_okapi_ar": { | |
"name": "arc_challenge_okapi_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "arc_challenge_okapi_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1160, | |
"effective_num_docs": 1160, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arc_easy_ar": { | |
"name": "arc_easy_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "arc_easy_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 2364, | |
"effective_num_docs": 2364, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|boolq_ar": { | |
"name": "boolq_ar", | |
"prompt_function": "boolq_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "boolq_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 3260, | |
"effective_num_docs": 3260, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|copa_ext_ar": { | |
"name": "copa_ext_ar", | |
"prompt_function": "copa_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "copa_ext_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 90, | |
"effective_num_docs": 90, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|hellaswag_okapi_ar": { | |
"name": "hellaswag_okapi_ar", | |
"prompt_function": "hellaswag_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "hellaswag_okapi_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 9171, | |
"effective_num_docs": 9171, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|openbook_qa_ext_ar": { | |
"name": "openbook_qa_ext_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "openbook_qa_ext_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 495, | |
"effective_num_docs": 495, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|piqa_ar": { | |
"name": "piqa_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "piqa_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1833, | |
"effective_num_docs": 1833, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|race_ar": { | |
"name": "race_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "race_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 4929, | |
"effective_num_docs": 4929, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|sciq_ar": { | |
"name": "sciq_ar", | |
"prompt_function": "sciq_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "sciq_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 995, | |
"effective_num_docs": 995, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|toxigen_ar": { | |
"name": "toxigen_ar", | |
"prompt_function": "toxigen_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "toxigen_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 935, | |
"effective_num_docs": 935, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"lighteval|xstory_cloze:ar": { | |
"name": "xstory_cloze:ar", | |
"prompt_function": "storycloze", | |
"hf_repo": "juletxara/xstory_cloze", | |
"hf_subset": "ar", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"training", | |
"eval" | |
], | |
"evaluation_splits": [ | |
"eval" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 1511, | |
"effective_num_docs": 1511, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
} | |
}, | |
"summary_tasks": { | |
"community|acva:Algeria|0": { | |
"hashes": { | |
"hash_examples": "da5a3003cd46f6f9", | |
"hash_full_prompts": "da5a3003cd46f6f9", | |
"hash_input_tokens": "5f17840216542854", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Ancient_Egypt|0": { | |
"hashes": { | |
"hash_examples": "52d6f767fede195b", | |
"hash_full_prompts": "52d6f767fede195b", | |
"hash_input_tokens": "1ec54d6098a2bd25", | |
"hash_cont_tokens": "da3795911a69a578" | |
}, | |
"truncated": 0, | |
"non_truncated": 315, | |
"padded": 630, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arab_Empire|0": { | |
"hashes": { | |
"hash_examples": "8dacff6a79804a75", | |
"hash_full_prompts": "8dacff6a79804a75", | |
"hash_input_tokens": "2e55d159a75d85d9", | |
"hash_cont_tokens": "7735052d59c87300" | |
}, | |
"truncated": 0, | |
"non_truncated": 265, | |
"padded": 530, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Architecture|0": { | |
"hashes": { | |
"hash_examples": "df286cd862d9f6bb", | |
"hash_full_prompts": "df286cd862d9f6bb", | |
"hash_input_tokens": "b79526f90bcea90e", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Art|0": { | |
"hashes": { | |
"hash_examples": "112883d764118a49", | |
"hash_full_prompts": "112883d764118a49", | |
"hash_input_tokens": "201de85ef13e1f35", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Astronomy|0": { | |
"hashes": { | |
"hash_examples": "20dcdf2454bf8671", | |
"hash_full_prompts": "20dcdf2454bf8671", | |
"hash_input_tokens": "17ccaedd4eb889fe", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Calligraphy|0": { | |
"hashes": { | |
"hash_examples": "3a9f9d1ebe868a15", | |
"hash_full_prompts": "3a9f9d1ebe868a15", | |
"hash_input_tokens": "5cb34bc1c3dafb69", | |
"hash_cont_tokens": "10a7869b749e8d6d" | |
}, | |
"truncated": 0, | |
"non_truncated": 255, | |
"padded": 510, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Ceremony|0": { | |
"hashes": { | |
"hash_examples": "c927630f8d2f44da", | |
"hash_full_prompts": "c927630f8d2f44da", | |
"hash_input_tokens": "8be32eb08af7b8c6", | |
"hash_cont_tokens": "5148a932b6e3909c" | |
}, | |
"truncated": 0, | |
"non_truncated": 185, | |
"padded": 370, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Clothing|0": { | |
"hashes": { | |
"hash_examples": "6ad0740c2ac6ac92", | |
"hash_full_prompts": "6ad0740c2ac6ac92", | |
"hash_input_tokens": "d8df0f01469ce0b0", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Culture|0": { | |
"hashes": { | |
"hash_examples": "2177bd857ad872ae", | |
"hash_full_prompts": "2177bd857ad872ae", | |
"hash_input_tokens": "5d7453c3144e0e8b", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Food|0": { | |
"hashes": { | |
"hash_examples": "a6ada65b71d7c9c5", | |
"hash_full_prompts": "a6ada65b71d7c9c5", | |
"hash_input_tokens": "4efe8e2a4ed33c6c", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Funeral|0": { | |
"hashes": { | |
"hash_examples": "fcee39dc29eaae91", | |
"hash_full_prompts": "fcee39dc29eaae91", | |
"hash_input_tokens": "209a2185ae444dec", | |
"hash_cont_tokens": "1cbd6e3c10011d3e" | |
}, | |
"truncated": 0, | |
"non_truncated": 95, | |
"padded": 190, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Geography|0": { | |
"hashes": { | |
"hash_examples": "d36eda7c89231c02", | |
"hash_full_prompts": "d36eda7c89231c02", | |
"hash_input_tokens": "c25e1f1b87fe1ad6", | |
"hash_cont_tokens": "b2ca9c34f69712e3" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_History|0": { | |
"hashes": { | |
"hash_examples": "6354ac0d6db6a5fc", | |
"hash_full_prompts": "6354ac0d6db6a5fc", | |
"hash_input_tokens": "06d2e4b452c3aacd", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Language_Origin|0": { | |
"hashes": { | |
"hash_examples": "ddc967c8aca34402", | |
"hash_full_prompts": "ddc967c8aca34402", | |
"hash_input_tokens": "167eb9720a3afeb5", | |
"hash_cont_tokens": "1cbd6e3c10011d3e" | |
}, | |
"truncated": 0, | |
"non_truncated": 95, | |
"padded": 190, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Literature|0": { | |
"hashes": { | |
"hash_examples": "4305379fd46be5d8", | |
"hash_full_prompts": "4305379fd46be5d8", | |
"hash_input_tokens": "146f59aa53b82c26", | |
"hash_cont_tokens": "b2ca9c34f69712e3" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Math|0": { | |
"hashes": { | |
"hash_examples": "dec621144f4d28be", | |
"hash_full_prompts": "dec621144f4d28be", | |
"hash_input_tokens": "4c812aa53f959872", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Medicine|0": { | |
"hashes": { | |
"hash_examples": "2b344cdae9495ff2", | |
"hash_full_prompts": "2b344cdae9495ff2", | |
"hash_input_tokens": "20eaa17342c09d47", | |
"hash_cont_tokens": "b2ca9c34f69712e3" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Music|0": { | |
"hashes": { | |
"hash_examples": "0c54624d881944ce", | |
"hash_full_prompts": "0c54624d881944ce", | |
"hash_input_tokens": "b4fbdb01dec2e1ac", | |
"hash_cont_tokens": "23b74a3ea206e47e" | |
}, | |
"truncated": 0, | |
"non_truncated": 139, | |
"padded": 278, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Ornament|0": { | |
"hashes": { | |
"hash_examples": "251a4a84289d8bc1", | |
"hash_full_prompts": "251a4a84289d8bc1", | |
"hash_input_tokens": "a80d088f993fc5c1", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Philosophy|0": { | |
"hashes": { | |
"hash_examples": "3f86fb9c94c13d22", | |
"hash_full_prompts": "3f86fb9c94c13d22", | |
"hash_input_tokens": "2acc5568db3c3aa0", | |
"hash_cont_tokens": "b2ca9c34f69712e3" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Physics_and_Chemistry|0": { | |
"hashes": { | |
"hash_examples": "8fec65af3695b62a", | |
"hash_full_prompts": "8fec65af3695b62a", | |
"hash_input_tokens": "13c6b130b8ca54ca", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Wedding|0": { | |
"hashes": { | |
"hash_examples": "9cc3477184d7a4b8", | |
"hash_full_prompts": "9cc3477184d7a4b8", | |
"hash_input_tokens": "5185ff4ab015ba7e", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Bahrain|0": { | |
"hashes": { | |
"hash_examples": "c92e803a0fa8b9e2", | |
"hash_full_prompts": "c92e803a0fa8b9e2", | |
"hash_input_tokens": "cda623aa39e23f9d", | |
"hash_cont_tokens": "8cb2f99f2b1a6f34" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Comoros|0": { | |
"hashes": { | |
"hash_examples": "06e5d4bba8e54cae", | |
"hash_full_prompts": "06e5d4bba8e54cae", | |
"hash_input_tokens": "376f4b280dd3c01b", | |
"hash_cont_tokens": "8cb2f99f2b1a6f34" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Egypt_modern|0": { | |
"hashes": { | |
"hash_examples": "c6ec369164f93446", | |
"hash_full_prompts": "c6ec369164f93446", | |
"hash_input_tokens": "a368106167d025cc", | |
"hash_cont_tokens": "1cbd6e3c10011d3e" | |
}, | |
"truncated": 0, | |
"non_truncated": 95, | |
"padded": 190, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromAncientEgypt|0": { | |
"hashes": { | |
"hash_examples": "b9d56d74818b9bd4", | |
"hash_full_prompts": "b9d56d74818b9bd4", | |
"hash_input_tokens": "eefd7107a9e3a637", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromByzantium|0": { | |
"hashes": { | |
"hash_examples": "5316c9624e7e59b8", | |
"hash_full_prompts": "5316c9624e7e59b8", | |
"hash_input_tokens": "52a7993a7ba05de1", | |
"hash_cont_tokens": "b2ca9c34f69712e3" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromChina|0": { | |
"hashes": { | |
"hash_examples": "87894bce95a56411", | |
"hash_full_prompts": "87894bce95a56411", | |
"hash_input_tokens": "ac48b8674cc56ede", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromGreece|0": { | |
"hashes": { | |
"hash_examples": "0baa78a27e469312", | |
"hash_full_prompts": "0baa78a27e469312", | |
"hash_input_tokens": "679fb3b6c9b9a5b3", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromIslam|0": { | |
"hashes": { | |
"hash_examples": "0c2532cde6541ff2", | |
"hash_full_prompts": "0c2532cde6541ff2", | |
"hash_input_tokens": "c287bd87c72ab5df", | |
"hash_cont_tokens": "b2ca9c34f69712e3" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromPersia|0": { | |
"hashes": { | |
"hash_examples": "efcd8112dc53c6e5", | |
"hash_full_prompts": "efcd8112dc53c6e5", | |
"hash_input_tokens": "ec31b9d18573e287", | |
"hash_cont_tokens": "4d00ae84a9d0bd16" | |
}, | |
"truncated": 0, | |
"non_truncated": 175, | |
"padded": 350, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromRome|0": { | |
"hashes": { | |
"hash_examples": "9db61480e2e85fd3", | |
"hash_full_prompts": "9db61480e2e85fd3", | |
"hash_input_tokens": "73c5e4bd392e7277", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Iraq|0": { | |
"hashes": { | |
"hash_examples": "96dac3dfa8d2f41f", | |
"hash_full_prompts": "96dac3dfa8d2f41f", | |
"hash_input_tokens": "875bc1d50fa52ee3", | |
"hash_cont_tokens": "1a1688a7a8f91310" | |
}, | |
"truncated": 0, | |
"non_truncated": 85, | |
"padded": 170, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Islam_Education|0": { | |
"hashes": { | |
"hash_examples": "0d80355f6a4cb51b", | |
"hash_full_prompts": "0d80355f6a4cb51b", | |
"hash_input_tokens": "0cd957b61eddd6f5", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Islam_branches_and_schools|0": { | |
"hashes": { | |
"hash_examples": "5cedce1be2c3ad50", | |
"hash_full_prompts": "5cedce1be2c3ad50", | |
"hash_input_tokens": "967e969636a8cd49", | |
"hash_cont_tokens": "4d00ae84a9d0bd16" | |
}, | |
"truncated": 0, | |
"non_truncated": 175, | |
"padded": 350, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Islamic_law_system|0": { | |
"hashes": { | |
"hash_examples": "c0e6db8bc84e105e", | |
"hash_full_prompts": "c0e6db8bc84e105e", | |
"hash_input_tokens": "a778038dbf5cbe51", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Jordan|0": { | |
"hashes": { | |
"hash_examples": "33deb5b4e5ddd6a1", | |
"hash_full_prompts": "33deb5b4e5ddd6a1", | |
"hash_input_tokens": "cb674080a74cf498", | |
"hash_cont_tokens": "8cb2f99f2b1a6f34" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Kuwait|0": { | |
"hashes": { | |
"hash_examples": "eb41773346d7c46c", | |
"hash_full_prompts": "eb41773346d7c46c", | |
"hash_input_tokens": "4214231636dc1ce9", | |
"hash_cont_tokens": "8cb2f99f2b1a6f34" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Lebanon|0": { | |
"hashes": { | |
"hash_examples": "25932dbf4c13d34f", | |
"hash_full_prompts": "25932dbf4c13d34f", | |
"hash_input_tokens": "eb2db2e1b5abcc91", | |
"hash_cont_tokens": "8cb2f99f2b1a6f34" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Libya|0": { | |
"hashes": { | |
"hash_examples": "f2c4db63cd402926", | |
"hash_full_prompts": "f2c4db63cd402926", | |
"hash_input_tokens": "42093a3bcee92b24", | |
"hash_cont_tokens": "8cb2f99f2b1a6f34" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Mauritania|0": { | |
"hashes": { | |
"hash_examples": "8723ab5fdf286b54", | |
"hash_full_prompts": "8723ab5fdf286b54", | |
"hash_input_tokens": "1d7fb6055f63607c", | |
"hash_cont_tokens": "8cb2f99f2b1a6f34" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Mesopotamia_civilization|0": { | |
"hashes": { | |
"hash_examples": "c33f5502a6130ca9", | |
"hash_full_prompts": "c33f5502a6130ca9", | |
"hash_input_tokens": "b926fe2926f49b3b", | |
"hash_cont_tokens": "1dc215c4aadbcb3a" | |
}, | |
"truncated": 0, | |
"non_truncated": 155, | |
"padded": 310, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Morocco|0": { | |
"hashes": { | |
"hash_examples": "588a5ed27904b1ae", | |
"hash_full_prompts": "588a5ed27904b1ae", | |
"hash_input_tokens": "4665ebda734f86d8", | |
"hash_cont_tokens": "8cb2f99f2b1a6f34" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Oman|0": { | |
"hashes": { | |
"hash_examples": "d447c52b94248b69", | |
"hash_full_prompts": "d447c52b94248b69", | |
"hash_input_tokens": "85516aef87ee7e7f", | |
"hash_cont_tokens": "8cb2f99f2b1a6f34" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Palestine|0": { | |
"hashes": { | |
"hash_examples": "19197e076ad14ff5", | |
"hash_full_prompts": "19197e076ad14ff5", | |
"hash_input_tokens": "a08adcd52fb0b0a2", | |
"hash_cont_tokens": "1a1688a7a8f91310" | |
}, | |
"truncated": 0, | |
"non_truncated": 85, | |
"padded": 170, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Qatar|0": { | |
"hashes": { | |
"hash_examples": "cf0736fa185b28f6", | |
"hash_full_prompts": "cf0736fa185b28f6", | |
"hash_input_tokens": "7023883a2fb77b4b", | |
"hash_cont_tokens": "8cb2f99f2b1a6f34" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Saudi_Arabia|0": { | |
"hashes": { | |
"hash_examples": "69beda6e1b85a08d", | |
"hash_full_prompts": "69beda6e1b85a08d", | |
"hash_input_tokens": "62e7270bd4f92c97", | |
"hash_cont_tokens": "cb482b3aa2812d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Somalia|0": { | |
"hashes": { | |
"hash_examples": "b387940c65784fbf", | |
"hash_full_prompts": "b387940c65784fbf", | |
"hash_input_tokens": "83cebb18302cbb9c", | |
"hash_cont_tokens": "8cb2f99f2b1a6f34" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Sudan|0": { | |
"hashes": { | |
"hash_examples": "e02c32b9d2dd0c3f", | |
"hash_full_prompts": "e02c32b9d2dd0c3f", | |
"hash_input_tokens": "4acf4514ecf76d56", | |
"hash_cont_tokens": "8cb2f99f2b1a6f34" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Syria|0": { | |
"hashes": { | |
"hash_examples": "60a6f8fe73bda4bb", | |
"hash_full_prompts": "60a6f8fe73bda4bb", | |
"hash_input_tokens": "5ca331995d7147f3", | |
"hash_cont_tokens": "8cb2f99f2b1a6f34" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Tunisia|0": { | |
"hashes": { | |
"hash_examples": "34bb15d3830c5649", | |
"hash_full_prompts": "34bb15d3830c5649", | |
"hash_input_tokens": "a0d4217586927ca4", | |
"hash_cont_tokens": "8cb2f99f2b1a6f34" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:United_Arab_Emirates|0": { | |
"hashes": { | |
"hash_examples": "98a0ba78172718ce", | |
"hash_full_prompts": "98a0ba78172718ce", | |
"hash_input_tokens": "e351199130abbd8f", | |
"hash_cont_tokens": "1a1688a7a8f91310" | |
}, | |
"truncated": 0, | |
"non_truncated": 85, | |
"padded": 170, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Yemen|0": { | |
"hashes": { | |
"hash_examples": "18e9bcccbb4ced7a", | |
"hash_full_prompts": "18e9bcccbb4ced7a", | |
"hash_input_tokens": "b4f25cdca070d9d6", | |
"hash_cont_tokens": "87808e2fa4738904" | |
}, | |
"truncated": 0, | |
"non_truncated": 10, | |
"padded": 20, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:communication|0": { | |
"hashes": { | |
"hash_examples": "9ff28ab5eab5c97b", | |
"hash_full_prompts": "9ff28ab5eab5c97b", | |
"hash_input_tokens": "ccc63209c34187ac", | |
"hash_cont_tokens": "d829ffb41c94886f" | |
}, | |
"truncated": 0, | |
"non_truncated": 364, | |
"padded": 728, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:computer_and_phone|0": { | |
"hashes": { | |
"hash_examples": "37bac2f086aaf6c2", | |
"hash_full_prompts": "37bac2f086aaf6c2", | |
"hash_input_tokens": "a72d412eedd051e3", | |
"hash_cont_tokens": "f86558d0b0c63b3c" | |
}, | |
"truncated": 0, | |
"non_truncated": 295, | |
"padded": 590, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:daily_life|0": { | |
"hashes": { | |
"hash_examples": "bf07363c1c252e2f", | |
"hash_full_prompts": "bf07363c1c252e2f", | |
"hash_input_tokens": "1402ac2581a240b1", | |
"hash_cont_tokens": "ba3e48537d2038d9" | |
}, | |
"truncated": 0, | |
"non_truncated": 337, | |
"padded": 674, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:entertainment|0": { | |
"hashes": { | |
"hash_examples": "37077bc00f0ac56a", | |
"hash_full_prompts": "37077bc00f0ac56a", | |
"hash_input_tokens": "e3fb666c36d12f51", | |
"hash_cont_tokens": "f86558d0b0c63b3c" | |
}, | |
"truncated": 0, | |
"non_truncated": 295, | |
"padded": 590, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:mcq_exams_test_ar|0": { | |
"hashes": { | |
"hash_examples": "c07a5e78c5c0b8fe", | |
"hash_full_prompts": "c07a5e78c5c0b8fe", | |
"hash_input_tokens": "e1979858dce20c92", | |
"hash_cont_tokens": "64e7a22d3029797d" | |
}, | |
"truncated": 0, | |
"non_truncated": 557, | |
"padded": 2228, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:meta_ar_dialects|0": { | |
"hashes": { | |
"hash_examples": "c0b6081f83e14064", | |
"hash_full_prompts": "c0b6081f83e14064", | |
"hash_input_tokens": "5bf394177d857aec", | |
"hash_cont_tokens": "f58c12b61a13551a" | |
}, | |
"truncated": 0, | |
"non_truncated": 5395, | |
"padded": 21533, | |
"non_padded": 47, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:meta_ar_msa|0": { | |
"hashes": { | |
"hash_examples": "64eb78a7c5b7484b", | |
"hash_full_prompts": "64eb78a7c5b7484b", | |
"hash_input_tokens": "229788376bf73d92", | |
"hash_cont_tokens": "44e3ede1a296535b" | |
}, | |
"truncated": 0, | |
"non_truncated": 895, | |
"padded": 3572, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { | |
"hashes": { | |
"hash_examples": "54fc3502c1c02c06", | |
"hash_full_prompts": "54fc3502c1c02c06", | |
"hash_input_tokens": "731b49238dd37af4", | |
"hash_cont_tokens": "25b33d0c05a0b20d" | |
}, | |
"truncated": 0, | |
"non_truncated": 75, | |
"padded": 150, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { | |
"hashes": { | |
"hash_examples": "46572d83696552ae", | |
"hash_full_prompts": "46572d83696552ae", | |
"hash_input_tokens": "cedcad86e018608f", | |
"hash_cont_tokens": "4c0622f354f446f3" | |
}, | |
"truncated": 0, | |
"non_truncated": 150, | |
"padded": 747, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { | |
"hashes": { | |
"hash_examples": "f430d97ff715bc1c", | |
"hash_full_prompts": "f430d97ff715bc1c", | |
"hash_input_tokens": "a36969b44436977e", | |
"hash_cont_tokens": "67f9610b54887205" | |
}, | |
"truncated": 0, | |
"non_truncated": 150, | |
"padded": 745, | |
"non_padded": 5, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { | |
"hashes": { | |
"hash_examples": "6b70a7416584f98c", | |
"hash_full_prompts": "6b70a7416584f98c", | |
"hash_input_tokens": "c9616de0a31e7bb2", | |
"hash_cont_tokens": "8905757ccbd39109" | |
}, | |
"truncated": 0, | |
"non_truncated": 7995, | |
"padded": 15990, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": { | |
"hashes": { | |
"hash_examples": "bc2005cc9d2f436e", | |
"hash_full_prompts": "bc2005cc9d2f436e", | |
"hash_input_tokens": "1ea7eb367cae2150", | |
"hash_cont_tokens": "97b28e50e83fc688" | |
}, | |
"truncated": 0, | |
"non_truncated": 5995, | |
"padded": 17911, | |
"non_padded": 74, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task|0": { | |
"hashes": { | |
"hash_examples": "6fb0e254ea5945d8", | |
"hash_full_prompts": "6fb0e254ea5945d8", | |
"hash_input_tokens": "801178d8b9e34710", | |
"hash_cont_tokens": "2a2e928baf06b411" | |
}, | |
"truncated": 0, | |
"non_truncated": 1720, | |
"padded": 5121, | |
"non_padded": 39, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_exams|0": { | |
"hashes": { | |
"hash_examples": "6d721df351722656", | |
"hash_full_prompts": "6d721df351722656", | |
"hash_input_tokens": "7ab925b51553a6ae", | |
"hash_cont_tokens": "6f477436425536b4" | |
}, | |
"truncated": 0, | |
"non_truncated": 537, | |
"padded": 2112, | |
"non_padded": 36, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:abstract_algebra|0": { | |
"hashes": { | |
"hash_examples": "f2ddca8f45c0a511", | |
"hash_full_prompts": "f2ddca8f45c0a511", | |
"hash_input_tokens": "1abc6db67ee01ca5", | |
"hash_cont_tokens": "1cb63e6fc9a7f25d" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:anatomy|0": { | |
"hashes": { | |
"hash_examples": "dfdbc1b83107668d", | |
"hash_full_prompts": "dfdbc1b83107668d", | |
"hash_input_tokens": "ae4ec76d3f487642", | |
"hash_cont_tokens": "d5a6dac3efa6cef4" | |
}, | |
"truncated": 0, | |
"non_truncated": 135, | |
"padded": 532, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:astronomy|0": { | |
"hashes": { | |
"hash_examples": "9736a606002a848e", | |
"hash_full_prompts": "9736a606002a848e", | |
"hash_input_tokens": "3575f4af1a328878", | |
"hash_cont_tokens": "04a4e5bf4dac1686" | |
}, | |
"truncated": 0, | |
"non_truncated": 152, | |
"padded": 608, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:business_ethics|0": { | |
"hashes": { | |
"hash_examples": "735e452fbb6dc63d", | |
"hash_full_prompts": "735e452fbb6dc63d", | |
"hash_input_tokens": "a70ef2a8e8a7c018", | |
"hash_cont_tokens": "1cb63e6fc9a7f25d" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:clinical_knowledge|0": { | |
"hashes": { | |
"hash_examples": "6ab0ca4da98aedcf", | |
"hash_full_prompts": "6ab0ca4da98aedcf", | |
"hash_input_tokens": "22786cb1675a672d", | |
"hash_cont_tokens": "bab105bf58d8eb88" | |
}, | |
"truncated": 0, | |
"non_truncated": 265, | |
"padded": 1052, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_biology|0": { | |
"hashes": { | |
"hash_examples": "17e4e390848018a4", | |
"hash_full_prompts": "17e4e390848018a4", | |
"hash_input_tokens": "2e86257a1362f03e", | |
"hash_cont_tokens": "0290e3e410ffec1e" | |
}, | |
"truncated": 0, | |
"non_truncated": 144, | |
"padded": 576, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_chemistry|0": { | |
"hashes": { | |
"hash_examples": "4abb169f6dfd234b", | |
"hash_full_prompts": "4abb169f6dfd234b", | |
"hash_input_tokens": "b7cb1b32ec97175d", | |
"hash_cont_tokens": "1cb63e6fc9a7f25d" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 396, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_computer_science|0": { | |
"hashes": { | |
"hash_examples": "a369e2e941358a1e", | |
"hash_full_prompts": "a369e2e941358a1e", | |
"hash_input_tokens": "3473d086fa667fe7", | |
"hash_cont_tokens": "1cb63e6fc9a7f25d" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_mathematics|0": { | |
"hashes": { | |
"hash_examples": "d7be03b8b6020bff", | |
"hash_full_prompts": "d7be03b8b6020bff", | |
"hash_input_tokens": "d62620e5acd3ac9a", | |
"hash_cont_tokens": "1cb63e6fc9a7f25d" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_medicine|0": { | |
"hashes": { | |
"hash_examples": "0518a00f097346bf", | |
"hash_full_prompts": "0518a00f097346bf", | |
"hash_input_tokens": "0f341990565a1b8a", | |
"hash_cont_tokens": "12436c143db8b5f1" | |
}, | |
"truncated": 0, | |
"non_truncated": 173, | |
"padded": 692, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_physics|0": { | |
"hashes": { | |
"hash_examples": "5d842cd49bc70e12", | |
"hash_full_prompts": "5d842cd49bc70e12", | |
"hash_input_tokens": "b341b14666c5d276", | |
"hash_cont_tokens": "5f3b392b7904cf1c" | |
}, | |
"truncated": 0, | |
"non_truncated": 102, | |
"padded": 404, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:computer_security|0": { | |
"hashes": { | |
"hash_examples": "8e85d9f85be9b32f", | |
"hash_full_prompts": "8e85d9f85be9b32f", | |
"hash_input_tokens": "7828926533cc8d56", | |
"hash_cont_tokens": "1cb63e6fc9a7f25d" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:conceptual_physics|0": { | |
"hashes": { | |
"hash_examples": "7964b55a0a49502b", | |
"hash_full_prompts": "7964b55a0a49502b", | |
"hash_input_tokens": "dd0c75e41e20ed45", | |
"hash_cont_tokens": "8ce490bf68e58084" | |
}, | |
"truncated": 0, | |
"non_truncated": 235, | |
"padded": 916, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:econometrics|0": { | |
"hashes": { | |
"hash_examples": "1e192eae38347257", | |
"hash_full_prompts": "1e192eae38347257", | |
"hash_input_tokens": "64cfdbea3efceaa1", | |
"hash_cont_tokens": "32a757f8d355f2a0" | |
}, | |
"truncated": 0, | |
"non_truncated": 114, | |
"padded": 456, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:electrical_engineering|0": { | |
"hashes": { | |
"hash_examples": "cf97671d5c441da1", | |
"hash_full_prompts": "cf97671d5c441da1", | |
"hash_input_tokens": "2cf333f511d17622", | |
"hash_cont_tokens": "5947ce375c8d4f94" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 572, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:elementary_mathematics|0": { | |
"hashes": { | |
"hash_examples": "6f49107ed43c40c5", | |
"hash_full_prompts": "6f49107ed43c40c5", | |
"hash_input_tokens": "e5ac362f7eedd0d4", | |
"hash_cont_tokens": "0b9f4309f4f4b8cb" | |
}, | |
"truncated": 0, | |
"non_truncated": 378, | |
"padded": 1500, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:formal_logic|0": { | |
"hashes": { | |
"hash_examples": "7922c376008ba77b", | |
"hash_full_prompts": "7922c376008ba77b", | |
"hash_input_tokens": "0a89ac9c4a4eccaf", | |
"hash_cont_tokens": "c173b37d1b5975ac" | |
}, | |
"truncated": 0, | |
"non_truncated": 126, | |
"padded": 500, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:global_facts|0": { | |
"hashes": { | |
"hash_examples": "11f9813185047d5b", | |
"hash_full_prompts": "11f9813185047d5b", | |
"hash_input_tokens": "74623d7bec928129", | |
"hash_cont_tokens": "1cb63e6fc9a7f25d" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 384, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_biology|0": { | |
"hashes": { | |
"hash_examples": "2a804b1d90cbe66e", | |
"hash_full_prompts": "2a804b1d90cbe66e", | |
"hash_input_tokens": "bbb94cda458cd47a", | |
"hash_cont_tokens": "20a5ae47431c8974" | |
}, | |
"truncated": 0, | |
"non_truncated": 310, | |
"padded": 1232, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_chemistry|0": { | |
"hashes": { | |
"hash_examples": "0032168adabc53b4", | |
"hash_full_prompts": "0032168adabc53b4", | |
"hash_input_tokens": "746f4b3683bd8611", | |
"hash_cont_tokens": "ca9a3968b713d9ab" | |
}, | |
"truncated": 0, | |
"non_truncated": 203, | |
"padded": 804, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_computer_science|0": { | |
"hashes": { | |
"hash_examples": "f2fb8740f9df980f", | |
"hash_full_prompts": "f2fb8740f9df980f", | |
"hash_input_tokens": "0e2feaaf011f7422", | |
"hash_cont_tokens": "1cb63e6fc9a7f25d" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_european_history|0": { | |
"hashes": { | |
"hash_examples": "73509021e7e66435", | |
"hash_full_prompts": "73509021e7e66435", | |
"hash_input_tokens": "36dc99460a76ccd0", | |
"hash_cont_tokens": "ac8a29d33cd3b9a8" | |
}, | |
"truncated": 0, | |
"non_truncated": 165, | |
"padded": 660, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_geography|0": { | |
"hashes": { | |
"hash_examples": "9e08d1894940ff42", | |
"hash_full_prompts": "9e08d1894940ff42", | |
"hash_input_tokens": "6c4d6d3004cee1a8", | |
"hash_cont_tokens": "10ecff088a994b91" | |
}, | |
"truncated": 0, | |
"non_truncated": 198, | |
"padded": 784, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_government_and_politics|0": { | |
"hashes": { | |
"hash_examples": "64b7e97817ca6c76", | |
"hash_full_prompts": "64b7e97817ca6c76", | |
"hash_input_tokens": "bf5c42c2dd37041d", | |
"hash_cont_tokens": "643c4f9d0dab0b3c" | |
}, | |
"truncated": 0, | |
"non_truncated": 193, | |
"padded": 772, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_macroeconomics|0": { | |
"hashes": { | |
"hash_examples": "9f582da8534bd2ef", | |
"hash_full_prompts": "9f582da8534bd2ef", | |
"hash_input_tokens": "8b66c882bd294cb0", | |
"hash_cont_tokens": "4b654d5495255d29" | |
}, | |
"truncated": 0, | |
"non_truncated": 390, | |
"padded": 1560, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_mathematics|0": { | |
"hashes": { | |
"hash_examples": "fd54f1c10d423c51", | |
"hash_full_prompts": "fd54f1c10d423c51", | |
"hash_input_tokens": "f6483613c0d92bd6", | |
"hash_cont_tokens": "2f11c61d32af3331" | |
}, | |
"truncated": 0, | |
"non_truncated": 270, | |
"padded": 1068, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_microeconomics|0": { | |
"hashes": { | |
"hash_examples": "7037896925aaf42f", | |
"hash_full_prompts": "7037896925aaf42f", | |
"hash_input_tokens": "21ec6197a22e403e", | |
"hash_cont_tokens": "135fa417f37ad7ac" | |
}, | |
"truncated": 0, | |
"non_truncated": 238, | |
"padded": 952, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_physics|0": { | |
"hashes": { | |
"hash_examples": "60c3776215167dae", | |
"hash_full_prompts": "60c3776215167dae", | |
"hash_input_tokens": "effdb49d8c787295", | |
"hash_cont_tokens": "8f89893047813b1d" | |
}, | |
"truncated": 0, | |
"non_truncated": 151, | |
"padded": 604, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_psychology|0": { | |
"hashes": { | |
"hash_examples": "61176bfd5da1298f", | |
"hash_full_prompts": "61176bfd5da1298f", | |
"hash_input_tokens": "c7c2199d7ea2fdf5", | |
"hash_cont_tokens": "1158c59491db9726" | |
}, | |
"truncated": 0, | |
"non_truncated": 545, | |
"padded": 2160, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_statistics|0": { | |
"hashes": { | |
"hash_examples": "40dfeebd1ea10f76", | |
"hash_full_prompts": "40dfeebd1ea10f76", | |
"hash_input_tokens": "7d5146d95a7fb07d", | |
"hash_cont_tokens": "a678a03fe77bf965" | |
}, | |
"truncated": 0, | |
"non_truncated": 216, | |
"padded": 864, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_us_history|0": { | |
"hashes": { | |
"hash_examples": "03daa510ba917f4d", | |
"hash_full_prompts": "03daa510ba917f4d", | |
"hash_input_tokens": "3056080f2ba6f531", | |
"hash_cont_tokens": "25eae6d3c400bebd" | |
}, | |
"truncated": 0, | |
"non_truncated": 204, | |
"padded": 816, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_world_history|0": { | |
"hashes": { | |
"hash_examples": "be075ffd579f43c2", | |
"hash_full_prompts": "be075ffd579f43c2", | |
"hash_input_tokens": "31bba959b8a3caf0", | |
"hash_cont_tokens": "300f9b345c31f0b1" | |
}, | |
"truncated": 0, | |
"non_truncated": 237, | |
"padded": 948, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:human_aging|0": { | |
"hashes": { | |
"hash_examples": "caa5b69f640bd1ef", | |
"hash_full_prompts": "caa5b69f640bd1ef", | |
"hash_input_tokens": "b431ed61cafa69f0", | |
"hash_cont_tokens": "252787894bc45b2b" | |
}, | |
"truncated": 0, | |
"non_truncated": 223, | |
"padded": 884, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:human_sexuality|0": { | |
"hashes": { | |
"hash_examples": "5ed2e38fb25a3767", | |
"hash_full_prompts": "5ed2e38fb25a3767", | |
"hash_input_tokens": "d8af6af2834f977f", | |
"hash_cont_tokens": "0e38eb86cc5fa928" | |
}, | |
"truncated": 0, | |
"non_truncated": 131, | |
"padded": 520, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:international_law|0": { | |
"hashes": { | |
"hash_examples": "4e3e9e28d1b96484", | |
"hash_full_prompts": "4e3e9e28d1b96484", | |
"hash_input_tokens": "09dc5a15f9355a35", | |
"hash_cont_tokens": "bb6bc9f3cdead830" | |
}, | |
"truncated": 0, | |
"non_truncated": 121, | |
"padded": 484, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:jurisprudence|0": { | |
"hashes": { | |
"hash_examples": "e264b755366310b3", | |
"hash_full_prompts": "e264b755366310b3", | |
"hash_input_tokens": "ca40f8ca4cd68503", | |
"hash_cont_tokens": "1fc1efd21cb126c9" | |
}, | |
"truncated": 0, | |
"non_truncated": 108, | |
"padded": 432, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:logical_fallacies|0": { | |
"hashes": { | |
"hash_examples": "a4ab6965a3e38071", | |
"hash_full_prompts": "a4ab6965a3e38071", | |
"hash_input_tokens": "e9917a15f82e3723", | |
"hash_cont_tokens": "2df1296d79762f44" | |
}, | |
"truncated": 0, | |
"non_truncated": 163, | |
"padded": 648, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:machine_learning|0": { | |
"hashes": { | |
"hash_examples": "b92320efa6636b40", | |
"hash_full_prompts": "b92320efa6636b40", | |
"hash_input_tokens": "4d0cfc38addcdc7c", | |
"hash_cont_tokens": "449fc9db17760574" | |
}, | |
"truncated": 0, | |
"non_truncated": 112, | |
"padded": 448, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:management|0": { | |
"hashes": { | |
"hash_examples": "c9ee4872a850fe20", | |
"hash_full_prompts": "c9ee4872a850fe20", | |
"hash_input_tokens": "863414630ebbbccb", | |
"hash_cont_tokens": "034a8a66a778e9c0" | |
}, | |
"truncated": 0, | |
"non_truncated": 103, | |
"padded": 412, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:marketing|0": { | |
"hashes": { | |
"hash_examples": "0c151b70f6a047e3", | |
"hash_full_prompts": "0c151b70f6a047e3", | |
"hash_input_tokens": "018b7f702850cdb6", | |
"hash_cont_tokens": "7459eaa48b2c84b9" | |
}, | |
"truncated": 0, | |
"non_truncated": 234, | |
"padded": 928, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:medical_genetics|0": { | |
"hashes": { | |
"hash_examples": "513f6cb8fca3a24e", | |
"hash_full_prompts": "513f6cb8fca3a24e", | |
"hash_input_tokens": "cf21d8db4eb4bec3", | |
"hash_cont_tokens": "1cb63e6fc9a7f25d" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 392, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:miscellaneous|0": { | |
"hashes": { | |
"hash_examples": "259a190d635331db", | |
"hash_full_prompts": "259a190d635331db", | |
"hash_input_tokens": "bb311e58bfedb1b0", | |
"hash_cont_tokens": "8e78e780fbb96e44" | |
}, | |
"truncated": 0, | |
"non_truncated": 783, | |
"padded": 3108, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:moral_disputes|0": { | |
"hashes": { | |
"hash_examples": "b85052c48a0b7bc3", | |
"hash_full_prompts": "b85052c48a0b7bc3", | |
"hash_input_tokens": "191042fca474c972", | |
"hash_cont_tokens": "0fae2c96aad2ef25" | |
}, | |
"truncated": 0, | |
"non_truncated": 346, | |
"padded": 1376, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:moral_scenarios|0": { | |
"hashes": { | |
"hash_examples": "28d0b069ef00dd00", | |
"hash_full_prompts": "28d0b069ef00dd00", | |
"hash_input_tokens": "a3f0c7a0c9d2f5b1", | |
"hash_cont_tokens": "b9dfc805a8270799" | |
}, | |
"truncated": 0, | |
"non_truncated": 895, | |
"padded": 3580, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:nutrition|0": { | |
"hashes": { | |
"hash_examples": "00c9bc5f1d305b2f", | |
"hash_full_prompts": "00c9bc5f1d305b2f", | |
"hash_input_tokens": "ad2b1910f7e6e754", | |
"hash_cont_tokens": "fb932860c33c3c1c" | |
}, | |
"truncated": 0, | |
"non_truncated": 306, | |
"padded": 1216, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:philosophy|0": { | |
"hashes": { | |
"hash_examples": "a458c08454a3fd5f", | |
"hash_full_prompts": "a458c08454a3fd5f", | |
"hash_input_tokens": "b1a936aeb46a9ffd", | |
"hash_cont_tokens": "1d4cf9a72972fc20" | |
}, | |
"truncated": 0, | |
"non_truncated": 311, | |
"padded": 1236, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:prehistory|0": { | |
"hashes": { | |
"hash_examples": "d6a0ecbdbb670e9c", | |
"hash_full_prompts": "d6a0ecbdbb670e9c", | |
"hash_input_tokens": "5f8f90a30da376af", | |
"hash_cont_tokens": "d7c7aca8f5dd4628" | |
}, | |
"truncated": 0, | |
"non_truncated": 324, | |
"padded": 1292, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:professional_accounting|0": { | |
"hashes": { | |
"hash_examples": "b4a95fe480b6540e", | |
"hash_full_prompts": "b4a95fe480b6540e", | |
"hash_input_tokens": "5cb62adf610f407b", | |
"hash_cont_tokens": "ef4fb34aca9be0ad" | |
}, | |
"truncated": 0, | |
"non_truncated": 282, | |
"padded": 1128, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:professional_law|0": { | |
"hashes": { | |
"hash_examples": "c2be9651cdbdde3b", | |
"hash_full_prompts": "c2be9651cdbdde3b", | |
"hash_input_tokens": "88d27c98f251624c", | |
"hash_cont_tokens": "fb015510a9058fb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 1534, | |
"padded": 6132, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:professional_medicine|0": { | |
"hashes": { | |
"hash_examples": "26ce92416288f273", | |
"hash_full_prompts": "26ce92416288f273", | |
"hash_input_tokens": "e719e8a39abfc647", | |
"hash_cont_tokens": "a05a5c8e86e9dc80" | |
}, | |
"truncated": 0, | |
"non_truncated": 272, | |
"padded": 1088, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:professional_psychology|0": { | |
"hashes": { | |
"hash_examples": "71ea5f182ea9a641", | |
"hash_full_prompts": "71ea5f182ea9a641", | |
"hash_input_tokens": "7e96b2f2c753c671", | |
"hash_cont_tokens": "9d7ccafb1da8d017" | |
}, | |
"truncated": 0, | |
"non_truncated": 612, | |
"padded": 2444, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:public_relations|0": { | |
"hashes": { | |
"hash_examples": "125adc21f91f8d77", | |
"hash_full_prompts": "125adc21f91f8d77", | |
"hash_input_tokens": "008ae17fcaff35e4", | |
"hash_cont_tokens": "5ce20805245c79b4" | |
}, | |
"truncated": 0, | |
"non_truncated": 110, | |
"padded": 440, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:security_studies|0": { | |
"hashes": { | |
"hash_examples": "3c18b216c099fb26", | |
"hash_full_prompts": "3c18b216c099fb26", | |
"hash_input_tokens": "39e6388b078db45e", | |
"hash_cont_tokens": "192c8c1a52cd702b" | |
}, | |
"truncated": 0, | |
"non_truncated": 245, | |
"padded": 980, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:sociology|0": { | |
"hashes": { | |
"hash_examples": "3f2a9634cef7417d", | |
"hash_full_prompts": "3f2a9634cef7417d", | |
"hash_input_tokens": "dbda9a19adba38c6", | |
"hash_cont_tokens": "a4f282456f4383b5" | |
}, | |
"truncated": 0, | |
"non_truncated": 201, | |
"padded": 804, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:us_foreign_policy|0": { | |
"hashes": { | |
"hash_examples": "22249da54056475e", | |
"hash_full_prompts": "22249da54056475e", | |
"hash_input_tokens": "7118bde6e18d8aa9", | |
"hash_cont_tokens": "1cb63e6fc9a7f25d" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:virology|0": { | |
"hashes": { | |
"hash_examples": "9d194b9471dc624e", | |
"hash_full_prompts": "9d194b9471dc624e", | |
"hash_input_tokens": "88ccdf93d9078273", | |
"hash_cont_tokens": "89a994f9a7654532" | |
}, | |
"truncated": 0, | |
"non_truncated": 166, | |
"padded": 660, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:world_religions|0": { | |
"hashes": { | |
"hash_examples": "229e5fe50082b064", | |
"hash_full_prompts": "229e5fe50082b064", | |
"hash_input_tokens": "fe9cc1539b810463", | |
"hash_cont_tokens": "8ec586889ca9cee2" | |
}, | |
"truncated": 0, | |
"non_truncated": 171, | |
"padded": 664, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arc_challenge_okapi_ar|0": { | |
"hashes": { | |
"hash_examples": "ab893807673bc355", | |
"hash_full_prompts": "ab893807673bc355", | |
"hash_input_tokens": "9f966fae3e217d0d", | |
"hash_cont_tokens": "db42079d5cdd6b61" | |
}, | |
"truncated": 0, | |
"non_truncated": 1160, | |
"padded": 4624, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arc_easy_ar|0": { | |
"hashes": { | |
"hash_examples": "acb688624acc3d04", | |
"hash_full_prompts": "acb688624acc3d04", | |
"hash_input_tokens": "5b707c8036e55b57", | |
"hash_cont_tokens": "089334da50947e55" | |
}, | |
"truncated": 0, | |
"non_truncated": 2364, | |
"padded": 9411, | |
"non_padded": 45, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|boolq_ar|0": { | |
"hashes": { | |
"hash_examples": "48355a67867e0c32", | |
"hash_full_prompts": "48355a67867e0c32", | |
"hash_input_tokens": "829379c1940468ee", | |
"hash_cont_tokens": "13d198baaf2593bd" | |
}, | |
"truncated": 0, | |
"non_truncated": 3260, | |
"padded": 6500, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|copa_ext_ar|0": { | |
"hashes": { | |
"hash_examples": "9bb83301bb72eecf", | |
"hash_full_prompts": "9bb83301bb72eecf", | |
"hash_input_tokens": "bfa6461bd9200f42", | |
"hash_cont_tokens": "ff643c29c1639645" | |
}, | |
"truncated": 0, | |
"non_truncated": 90, | |
"padded": 180, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|hellaswag_okapi_ar|0": { | |
"hashes": { | |
"hash_examples": "6e8cf57a322dfadd", | |
"hash_full_prompts": "6e8cf57a322dfadd", | |
"hash_input_tokens": "4bd10e016f185bab", | |
"hash_cont_tokens": "737fd4356838c8cf" | |
}, | |
"truncated": 0, | |
"non_truncated": 9171, | |
"padded": 36674, | |
"non_padded": 10, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|openbook_qa_ext_ar|0": { | |
"hashes": { | |
"hash_examples": "923d41eb0aca93eb", | |
"hash_full_prompts": "923d41eb0aca93eb", | |
"hash_input_tokens": "5626171a4d19d55a", | |
"hash_cont_tokens": "9e70f1d2200fc2fb" | |
}, | |
"truncated": 0, | |
"non_truncated": 495, | |
"padded": 1971, | |
"non_padded": 9, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|piqa_ar|0": { | |
"hashes": { | |
"hash_examples": "94bc205a520d3ea0", | |
"hash_full_prompts": "94bc205a520d3ea0", | |
"hash_input_tokens": "9fa75fedb5a95d19", | |
"hash_cont_tokens": "eacb1e2fc7cfeeaf" | |
}, | |
"truncated": 0, | |
"non_truncated": 1833, | |
"padded": 3644, | |
"non_padded": 22, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|race_ar|0": { | |
"hashes": { | |
"hash_examples": "de65130bae647516", | |
"hash_full_prompts": "de65130bae647516", | |
"hash_input_tokens": "1f9f0b1f8d1c16b2", | |
"hash_cont_tokens": "890f40d626d3baf6" | |
}, | |
"truncated": 0, | |
"non_truncated": 4929, | |
"padded": 19709, | |
"non_padded": 7, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|sciq_ar|0": { | |
"hashes": { | |
"hash_examples": "aa26b6fac4ce117e", | |
"hash_full_prompts": "aa26b6fac4ce117e", | |
"hash_input_tokens": "abbe4488e22f6a98", | |
"hash_cont_tokens": "f93268950d4e04d3" | |
}, | |
"truncated": 0, | |
"non_truncated": 995, | |
"padded": 3972, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|toxigen_ar|0": { | |
"hashes": { | |
"hash_examples": "1e139513004a9a2e", | |
"hash_full_prompts": "1e139513004a9a2e", | |
"hash_input_tokens": "f6b579664bfa7bd1", | |
"hash_cont_tokens": "7a551a614a63edab" | |
}, | |
"truncated": 0, | |
"non_truncated": 935, | |
"padded": 1854, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|xstory_cloze:ar|0": { | |
"hashes": { | |
"hash_examples": "865426a22c787481", | |
"hash_full_prompts": "865426a22c787481", | |
"hash_input_tokens": "596365869aff88a2", | |
"hash_cont_tokens": "61dd46b53fe7b4a6" | |
}, | |
"truncated": 0, | |
"non_truncated": 1511, | |
"padded": 2984, | |
"non_padded": 38, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "4a5ecc7e5b35104b", | |
"hash_full_prompts": "4a5ecc7e5b35104b", | |
"hash_input_tokens": "3bfcb68dee2d0829", | |
"hash_cont_tokens": "d70a9cb59b6d4bf9" | |
}, | |
"truncated": 0, | |
"non_truncated": 72964, | |
"padded": 234960, | |
"non_padded": 663, | |
"num_truncated_few_shots": 0 | |
} | |
} |