| { | |
| "config_general": { | |
| "lighteval_sha": "?", | |
| "num_fewshot_seeds": 1, | |
| "override_batch_size": 1, | |
| "max_samples": null, | |
| "job_id": "", | |
| "start_time": 588.366592704, | |
| "end_time": 3969.258125471, | |
| "total_evaluation_time_secondes": "3380.8915327669997", | |
| "model_name": "google/gemma-7b", | |
| "model_sha": "a0eac5b80dba224e6ed79d306df50b1e92c2125d", | |
| "model_dtype": "torch.bfloat16", | |
| "model_size": "15.91 GB", | |
| "config": null | |
| }, | |
| "results": { | |
| "community|acva:Algeria|0": { | |
| "acc_norm": 0.5282051282051282, | |
| "acc_norm_stderr": 0.035840746749208334 | |
| }, | |
| "community|acva:Ancient_Egypt|0": { | |
| "acc_norm": 0.050793650793650794, | |
| "acc_norm_stderr": 0.01239139518482262 | |
| }, | |
| "community|acva:Arab_Empire|0": { | |
| "acc_norm": 0.30943396226415093, | |
| "acc_norm_stderr": 0.028450154794118627 | |
| }, | |
| "community|acva:Arabic_Architecture|0": { | |
| "acc_norm": 0.4564102564102564, | |
| "acc_norm_stderr": 0.035761230969912135 | |
| }, | |
| "community|acva:Arabic_Art|0": { | |
| "acc_norm": 0.358974358974359, | |
| "acc_norm_stderr": 0.03444042881521377 | |
| }, | |
| "community|acva:Arabic_Astronomy|0": { | |
| "acc_norm": 0.4666666666666667, | |
| "acc_norm_stderr": 0.03581804596782233 | |
| }, | |
| "community|acva:Arabic_Calligraphy|0": { | |
| "acc_norm": 0.7058823529411765, | |
| "acc_norm_stderr": 0.02858971627977945 | |
| }, | |
| "community|acva:Arabic_Ceremony|0": { | |
| "acc_norm": 0.518918918918919, | |
| "acc_norm_stderr": 0.036834092970087065 | |
| }, | |
| "community|acva:Arabic_Clothing|0": { | |
| "acc_norm": 0.517948717948718, | |
| "acc_norm_stderr": 0.03587477098773825 | |
| }, | |
| "community|acva:Arabic_Culture|0": { | |
| "acc_norm": 0.2358974358974359, | |
| "acc_norm_stderr": 0.030481516761721537 | |
| }, | |
| "community|acva:Arabic_Food|0": { | |
| "acc_norm": 0.47692307692307695, | |
| "acc_norm_stderr": 0.03585965308947409 | |
| }, | |
| "community|acva:Arabic_Funeral|0": { | |
| "acc_norm": 0.4, | |
| "acc_norm_stderr": 0.050529115263991134 | |
| }, | |
| "community|acva:Arabic_Geography|0": { | |
| "acc_norm": 0.6206896551724138, | |
| "acc_norm_stderr": 0.04043461861916747 | |
| }, | |
| "community|acva:Arabic_History|0": { | |
| "acc_norm": 0.30256410256410254, | |
| "acc_norm_stderr": 0.03298070870085619 | |
| }, | |
| "community|acva:Arabic_Language_Origin|0": { | |
| "acc_norm": 0.5368421052631579, | |
| "acc_norm_stderr": 0.05143087276324537 | |
| }, | |
| "community|acva:Arabic_Literature|0": { | |
| "acc_norm": 0.4689655172413793, | |
| "acc_norm_stderr": 0.04158632762097828 | |
| }, | |
| "community|acva:Arabic_Math|0": { | |
| "acc_norm": 0.30256410256410254, | |
| "acc_norm_stderr": 0.03298070870085618 | |
| }, | |
| "community|acva:Arabic_Medicine|0": { | |
| "acc_norm": 0.46206896551724136, | |
| "acc_norm_stderr": 0.041546596717075474 | |
| }, | |
| "community|acva:Arabic_Music|0": { | |
| "acc_norm": 0.23741007194244604, | |
| "acc_norm_stderr": 0.036220593237998276 | |
| }, | |
| "community|acva:Arabic_Ornament|0": { | |
| "acc_norm": 0.5538461538461539, | |
| "acc_norm_stderr": 0.035689135465692336 | |
| }, | |
| "community|acva:Arabic_Philosophy|0": { | |
| "acc_norm": 0.5793103448275863, | |
| "acc_norm_stderr": 0.0411391498118926 | |
| }, | |
| "community|acva:Arabic_Physics_and_Chemistry|0": { | |
| "acc_norm": 0.5333333333333333, | |
| "acc_norm_stderr": 0.03581804596782232 | |
| }, | |
| "community|acva:Arabic_Wedding|0": { | |
| "acc_norm": 0.4153846153846154, | |
| "acc_norm_stderr": 0.03538013280575029 | |
| }, | |
| "community|acva:Bahrain|0": { | |
| "acc_norm": 0.3111111111111111, | |
| "acc_norm_stderr": 0.06979205927323111 | |
| }, | |
| "community|acva:Comoros|0": { | |
| "acc_norm": 0.37777777777777777, | |
| "acc_norm_stderr": 0.07309112127323451 | |
| }, | |
| "community|acva:Egypt_modern|0": { | |
| "acc_norm": 0.3263157894736842, | |
| "acc_norm_stderr": 0.04835966701461423 | |
| }, | |
| "community|acva:InfluenceFromAncientEgypt|0": { | |
| "acc_norm": 0.6051282051282051, | |
| "acc_norm_stderr": 0.03509545602262038 | |
| }, | |
| "community|acva:InfluenceFromByzantium|0": { | |
| "acc_norm": 0.7172413793103448, | |
| "acc_norm_stderr": 0.03752833958003337 | |
| }, | |
| "community|acva:InfluenceFromChina|0": { | |
| "acc_norm": 0.26666666666666666, | |
| "acc_norm_stderr": 0.0317493043641267 | |
| }, | |
| "community|acva:InfluenceFromGreece|0": { | |
| "acc_norm": 0.6307692307692307, | |
| "acc_norm_stderr": 0.034648411418637566 | |
| }, | |
| "community|acva:InfluenceFromIslam|0": { | |
| "acc_norm": 0.296551724137931, | |
| "acc_norm_stderr": 0.03806142687309993 | |
| }, | |
| "community|acva:InfluenceFromPersia|0": { | |
| "acc_norm": 0.6857142857142857, | |
| "acc_norm_stderr": 0.03519324354579657 | |
| }, | |
| "community|acva:InfluenceFromRome|0": { | |
| "acc_norm": 0.5743589743589743, | |
| "acc_norm_stderr": 0.03549871080367708 | |
| }, | |
| "community|acva:Iraq|0": { | |
| "acc_norm": 0.5058823529411764, | |
| "acc_norm_stderr": 0.05455069703232772 | |
| }, | |
| "community|acva:Islam_Education|0": { | |
| "acc_norm": 0.4564102564102564, | |
| "acc_norm_stderr": 0.03576123096991215 | |
| }, | |
| "community|acva:Islam_branches_and_schools|0": { | |
| "acc_norm": 0.4342857142857143, | |
| "acc_norm_stderr": 0.037576101528126626 | |
| }, | |
| "community|acva:Islamic_law_system|0": { | |
| "acc_norm": 0.4256410256410256, | |
| "acc_norm_stderr": 0.035498710803677086 | |
| }, | |
| "community|acva:Jordan|0": { | |
| "acc_norm": 0.3333333333333333, | |
| "acc_norm_stderr": 0.07106690545187012 | |
| }, | |
| "community|acva:Kuwait|0": { | |
| "acc_norm": 0.26666666666666666, | |
| "acc_norm_stderr": 0.06666666666666667 | |
| }, | |
| "community|acva:Lebanon|0": { | |
| "acc_norm": 0.17777777777777778, | |
| "acc_norm_stderr": 0.05763774795025094 | |
| }, | |
| "community|acva:Libya|0": { | |
| "acc_norm": 0.4444444444444444, | |
| "acc_norm_stderr": 0.07491109582924914 | |
| }, | |
| "community|acva:Mauritania|0": { | |
| "acc_norm": 0.4222222222222222, | |
| "acc_norm_stderr": 0.07446027270295805 | |
| }, | |
| "community|acva:Mesopotamia_civilization|0": { | |
| "acc_norm": 0.5225806451612903, | |
| "acc_norm_stderr": 0.0402500394824441 | |
| }, | |
| "community|acva:Morocco|0": { | |
| "acc_norm": 0.2222222222222222, | |
| "acc_norm_stderr": 0.06267511942419628 | |
| }, | |
| "community|acva:Oman|0": { | |
| "acc_norm": 0.2, | |
| "acc_norm_stderr": 0.06030226891555273 | |
| }, | |
| "community|acva:Palestine|0": { | |
| "acc_norm": 0.24705882352941178, | |
| "acc_norm_stderr": 0.047058823529411785 | |
| }, | |
| "community|acva:Qatar|0": { | |
| "acc_norm": 0.4222222222222222, | |
| "acc_norm_stderr": 0.07446027270295806 | |
| }, | |
| "community|acva:Saudi_Arabia|0": { | |
| "acc_norm": 0.3282051282051282, | |
| "acc_norm_stderr": 0.03371243782413707 | |
| }, | |
| "community|acva:Somalia|0": { | |
| "acc_norm": 0.35555555555555557, | |
| "acc_norm_stderr": 0.07216392363431012 | |
| }, | |
| "community|acva:Sudan|0": { | |
| "acc_norm": 0.35555555555555557, | |
| "acc_norm_stderr": 0.07216392363431012 | |
| }, | |
| "community|acva:Syria|0": { | |
| "acc_norm": 0.3333333333333333, | |
| "acc_norm_stderr": 0.07106690545187012 | |
| }, | |
| "community|acva:Tunisia|0": { | |
| "acc_norm": 0.3111111111111111, | |
| "acc_norm_stderr": 0.06979205927323111 | |
| }, | |
| "community|acva:United_Arab_Emirates|0": { | |
| "acc_norm": 0.23529411764705882, | |
| "acc_norm_stderr": 0.04628210543937907 | |
| }, | |
| "community|acva:Yemen|0": { | |
| "acc_norm": 0.2, | |
| "acc_norm_stderr": 0.13333333333333333 | |
| }, | |
| "community|acva:communication|0": { | |
| "acc_norm": 0.42857142857142855, | |
| "acc_norm_stderr": 0.025974025974025955 | |
| }, | |
| "community|acva:computer_and_phone|0": { | |
| "acc_norm": 0.45084745762711864, | |
| "acc_norm_stderr": 0.02901934773187137 | |
| }, | |
| "community|acva:daily_life|0": { | |
| "acc_norm": 0.18694362017804153, | |
| "acc_norm_stderr": 0.021268948348414647 | |
| }, | |
| "community|acva:entertainment|0": { | |
| "acc_norm": 0.23389830508474577, | |
| "acc_norm_stderr": 0.024687839412166384 | |
| }, | |
| "community|alghafa:mcq_exams_test_ar|0": { | |
| "acc_norm": 0.3393177737881508, | |
| "acc_norm_stderr": 0.02007993120042186 | |
| }, | |
| "community|alghafa:meta_ar_dialects|0": { | |
| "acc_norm": 0.3149212233549583, | |
| "acc_norm_stderr": 0.00632434787579355 | |
| }, | |
| "community|alghafa:meta_ar_msa|0": { | |
| "acc_norm": 0.3452513966480447, | |
| "acc_norm_stderr": 0.01590143260893035 | |
| }, | |
| "community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { | |
| "acc_norm": 0.68, | |
| "acc_norm_stderr": 0.05422675115236519 | |
| }, | |
| "community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { | |
| "acc_norm": 0.5666666666666667, | |
| "acc_norm_stderr": 0.04059586016811274 | |
| }, | |
| "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { | |
| "acc_norm": 0.44666666666666666, | |
| "acc_norm_stderr": 0.040727903430234656 | |
| }, | |
| "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { | |
| "acc_norm": 0.7797373358348968, | |
| "acc_norm_stderr": 0.004635136593016933 | |
| }, | |
| "community|alghafa:multiple_choice_rating_sentiment_task|0": { | |
| "acc_norm": 0.49608006672226856, | |
| "acc_norm_stderr": 0.006458003677505753 | |
| }, | |
| "community|alghafa:multiple_choice_sentiment_task|0": { | |
| "acc_norm": 0.34069767441860466, | |
| "acc_norm_stderr": 0.011431124374208216 | |
| }, | |
| "community|arabic_exams|0": { | |
| "acc_norm": 0.41899441340782123, | |
| "acc_norm_stderr": 0.02131139484554668 | |
| }, | |
| "community|arabic_mmlu:abstract_algebra|0": { | |
| "acc_norm": 0.32, | |
| "acc_norm_stderr": 0.046882617226215034 | |
| }, | |
| "community|arabic_mmlu:anatomy|0": { | |
| "acc_norm": 0.37037037037037035, | |
| "acc_norm_stderr": 0.04171654161354543 | |
| }, | |
| "community|arabic_mmlu:astronomy|0": { | |
| "acc_norm": 0.5, | |
| "acc_norm_stderr": 0.04068942293855797 | |
| }, | |
| "community|arabic_mmlu:business_ethics|0": { | |
| "acc_norm": 0.59, | |
| "acc_norm_stderr": 0.04943110704237101 | |
| }, | |
| "community|arabic_mmlu:clinical_knowledge|0": { | |
| "acc_norm": 0.5245283018867924, | |
| "acc_norm_stderr": 0.030735822206205615 | |
| }, | |
| "community|arabic_mmlu:college_biology|0": { | |
| "acc_norm": 0.4583333333333333, | |
| "acc_norm_stderr": 0.04166666666666666 | |
| }, | |
| "community|arabic_mmlu:college_chemistry|0": { | |
| "acc_norm": 0.45, | |
| "acc_norm_stderr": 0.05 | |
| }, | |
| "community|arabic_mmlu:college_computer_science|0": { | |
| "acc_norm": 0.41, | |
| "acc_norm_stderr": 0.04943110704237102 | |
| }, | |
| "community|arabic_mmlu:college_mathematics|0": { | |
| "acc_norm": 0.28, | |
| "acc_norm_stderr": 0.04512608598542127 | |
| }, | |
| "community|arabic_mmlu:college_medicine|0": { | |
| "acc_norm": 0.3930635838150289, | |
| "acc_norm_stderr": 0.037242495958177295 | |
| }, | |
| "community|arabic_mmlu:college_physics|0": { | |
| "acc_norm": 0.23529411764705882, | |
| "acc_norm_stderr": 0.04220773659171452 | |
| }, | |
| "community|arabic_mmlu:computer_security|0": { | |
| "acc_norm": 0.49, | |
| "acc_norm_stderr": 0.05024183937956912 | |
| }, | |
| "community|arabic_mmlu:conceptual_physics|0": { | |
| "acc_norm": 0.4851063829787234, | |
| "acc_norm_stderr": 0.03267151848924777 | |
| }, | |
| "community|arabic_mmlu:econometrics|0": { | |
| "acc_norm": 0.34210526315789475, | |
| "acc_norm_stderr": 0.04462917535336936 | |
| }, | |
| "community|arabic_mmlu:electrical_engineering|0": { | |
| "acc_norm": 0.46206896551724136, | |
| "acc_norm_stderr": 0.041546596717075474 | |
| }, | |
| "community|arabic_mmlu:elementary_mathematics|0": { | |
| "acc_norm": 0.373015873015873, | |
| "acc_norm_stderr": 0.02490699045899257 | |
| }, | |
| "community|arabic_mmlu:formal_logic|0": { | |
| "acc_norm": 0.4444444444444444, | |
| "acc_norm_stderr": 0.044444444444444495 | |
| }, | |
| "community|arabic_mmlu:global_facts|0": { | |
| "acc_norm": 0.38, | |
| "acc_norm_stderr": 0.04878317312145632 | |
| }, | |
| "community|arabic_mmlu:high_school_biology|0": { | |
| "acc_norm": 0.535483870967742, | |
| "acc_norm_stderr": 0.028372287797962935 | |
| }, | |
| "community|arabic_mmlu:high_school_chemistry|0": { | |
| "acc_norm": 0.43842364532019706, | |
| "acc_norm_stderr": 0.03491207857486519 | |
| }, | |
| "community|arabic_mmlu:high_school_computer_science|0": { | |
| "acc_norm": 0.47, | |
| "acc_norm_stderr": 0.05016135580465919 | |
| }, | |
| "community|arabic_mmlu:high_school_european_history|0": { | |
| "acc_norm": 0.21818181818181817, | |
| "acc_norm_stderr": 0.032250781083062896 | |
| }, | |
| "community|arabic_mmlu:high_school_geography|0": { | |
| "acc_norm": 0.5555555555555556, | |
| "acc_norm_stderr": 0.035402943770953675 | |
| }, | |
| "community|arabic_mmlu:high_school_government_and_politics|0": { | |
| "acc_norm": 0.5025906735751295, | |
| "acc_norm_stderr": 0.03608390745384487 | |
| }, | |
| "community|arabic_mmlu:high_school_macroeconomics|0": { | |
| "acc_norm": 0.46923076923076923, | |
| "acc_norm_stderr": 0.025302958890850154 | |
| }, | |
| "community|arabic_mmlu:high_school_mathematics|0": { | |
| "acc_norm": 0.3333333333333333, | |
| "acc_norm_stderr": 0.028742040903948506 | |
| }, | |
| "community|arabic_mmlu:high_school_microeconomics|0": { | |
| "acc_norm": 0.47478991596638653, | |
| "acc_norm_stderr": 0.0324371805513741 | |
| }, | |
| "community|arabic_mmlu:high_school_physics|0": { | |
| "acc_norm": 0.271523178807947, | |
| "acc_norm_stderr": 0.03631329803969653 | |
| }, | |
| "community|arabic_mmlu:high_school_psychology|0": { | |
| "acc_norm": 0.4935779816513762, | |
| "acc_norm_stderr": 0.021435554820013077 | |
| }, | |
| "community|arabic_mmlu:high_school_statistics|0": { | |
| "acc_norm": 0.2962962962962963, | |
| "acc_norm_stderr": 0.03114144782353602 | |
| }, | |
| "community|arabic_mmlu:high_school_us_history|0": { | |
| "acc_norm": 0.30392156862745096, | |
| "acc_norm_stderr": 0.032282103870378914 | |
| }, | |
| "community|arabic_mmlu:high_school_world_history|0": { | |
| "acc_norm": 0.3459915611814346, | |
| "acc_norm_stderr": 0.030964810588786716 | |
| }, | |
| "community|arabic_mmlu:human_aging|0": { | |
| "acc_norm": 0.4798206278026906, | |
| "acc_norm_stderr": 0.033530461674123 | |
| }, | |
| "community|arabic_mmlu:human_sexuality|0": { | |
| "acc_norm": 0.5038167938931297, | |
| "acc_norm_stderr": 0.04385162325601553 | |
| }, | |
| "community|arabic_mmlu:international_law|0": { | |
| "acc_norm": 0.6115702479338843, | |
| "acc_norm_stderr": 0.04449270350068383 | |
| }, | |
| "community|arabic_mmlu:jurisprudence|0": { | |
| "acc_norm": 0.5185185185185185, | |
| "acc_norm_stderr": 0.04830366024635331 | |
| }, | |
| "community|arabic_mmlu:logical_fallacies|0": { | |
| "acc_norm": 0.48466257668711654, | |
| "acc_norm_stderr": 0.039265223787088424 | |
| }, | |
| "community|arabic_mmlu:machine_learning|0": { | |
| "acc_norm": 0.32142857142857145, | |
| "acc_norm_stderr": 0.04432804055291519 | |
| }, | |
| "community|arabic_mmlu:management|0": { | |
| "acc_norm": 0.47572815533980584, | |
| "acc_norm_stderr": 0.04944901092973781 | |
| }, | |
| "community|arabic_mmlu:marketing|0": { | |
| "acc_norm": 0.7435897435897436, | |
| "acc_norm_stderr": 0.02860595370200425 | |
| }, | |
| "community|arabic_mmlu:medical_genetics|0": { | |
| "acc_norm": 0.37, | |
| "acc_norm_stderr": 0.04852365870939099 | |
| }, | |
| "community|arabic_mmlu:miscellaneous|0": { | |
| "acc_norm": 0.51213282247765, | |
| "acc_norm_stderr": 0.017874698667491345 | |
| }, | |
| "community|arabic_mmlu:moral_disputes|0": { | |
| "acc_norm": 0.49710982658959535, | |
| "acc_norm_stderr": 0.02691864538323901 | |
| }, | |
| "community|arabic_mmlu:moral_scenarios|0": { | |
| "acc_norm": 0.2737430167597765, | |
| "acc_norm_stderr": 0.014912413096372434 | |
| }, | |
| "community|arabic_mmlu:nutrition|0": { | |
| "acc_norm": 0.545751633986928, | |
| "acc_norm_stderr": 0.028509807802626585 | |
| }, | |
| "community|arabic_mmlu:philosophy|0": { | |
| "acc_norm": 0.5594855305466238, | |
| "acc_norm_stderr": 0.028196400574197426 | |
| }, | |
| "community|arabic_mmlu:prehistory|0": { | |
| "acc_norm": 0.4876543209876543, | |
| "acc_norm_stderr": 0.027812262269327242 | |
| }, | |
| "community|arabic_mmlu:professional_accounting|0": { | |
| "acc_norm": 0.32978723404255317, | |
| "acc_norm_stderr": 0.0280459469420424 | |
| }, | |
| "community|arabic_mmlu:professional_law|0": { | |
| "acc_norm": 0.3305084745762712, | |
| "acc_norm_stderr": 0.012014142101842956 | |
| }, | |
| "community|arabic_mmlu:professional_medicine|0": { | |
| "acc_norm": 0.21323529411764705, | |
| "acc_norm_stderr": 0.024880971512294243 | |
| }, | |
| "community|arabic_mmlu:professional_psychology|0": { | |
| "acc_norm": 0.4117647058823529, | |
| "acc_norm_stderr": 0.019910377463105932 | |
| }, | |
| "community|arabic_mmlu:public_relations|0": { | |
| "acc_norm": 0.4909090909090909, | |
| "acc_norm_stderr": 0.04788339768702861 | |
| }, | |
| "community|arabic_mmlu:security_studies|0": { | |
| "acc_norm": 0.5795918367346938, | |
| "acc_norm_stderr": 0.03160106993449601 | |
| }, | |
| "community|arabic_mmlu:sociology|0": { | |
| "acc_norm": 0.5621890547263682, | |
| "acc_norm_stderr": 0.035080801121998406 | |
| }, | |
| "community|arabic_mmlu:us_foreign_policy|0": { | |
| "acc_norm": 0.72, | |
| "acc_norm_stderr": 0.04512608598542128 | |
| }, | |
| "community|arabic_mmlu:virology|0": { | |
| "acc_norm": 0.43373493975903615, | |
| "acc_norm_stderr": 0.03858158940685517 | |
| }, | |
| "community|arabic_mmlu:world_religions|0": { | |
| "acc_norm": 0.5321637426900585, | |
| "acc_norm_stderr": 0.03826882417660371 | |
| }, | |
| "community|arc_challenge_okapi_ar|0": { | |
| "acc_norm": 0.421551724137931, | |
| "acc_norm_stderr": 0.014504941528457622 | |
| }, | |
| "community|arc_easy_ar|0": { | |
| "acc_norm": 0.4560067681895093, | |
| "acc_norm_stderr": 0.01024590988148752 | |
| }, | |
| "community|boolq_ar|0": { | |
| "acc_norm": 0.6794478527607362, | |
| "acc_norm_stderr": 0.00817495235508086 | |
| }, | |
| "community|copa_ext_ar|0": { | |
| "acc_norm": 0.4777777777777778, | |
| "acc_norm_stderr": 0.05294752255076824 | |
| }, | |
| "community|hellaswag_okapi_ar|0": { | |
| "acc_norm": 0.2690001090393632, | |
| "acc_norm_stderr": 0.004630738067141277 | |
| }, | |
| "community|openbook_qa_ext_ar|0": { | |
| "acc_norm": 0.46464646464646464, | |
| "acc_norm_stderr": 0.022439758650564027 | |
| }, | |
| "community|piqa_ar|0": { | |
| "acc_norm": 0.5815602836879432, | |
| "acc_norm_stderr": 0.011525269373390839 | |
| }, | |
| "community|race_ar|0": { | |
| "acc_norm": 0.3836477987421384, | |
| "acc_norm_stderr": 0.006927004686371493 | |
| }, | |
| "community|sciq_ar|0": { | |
| "acc_norm": 0.4100502512562814, | |
| "acc_norm_stderr": 0.015600296735974161 | |
| }, | |
| "community|toxigen_ar|0": { | |
| "acc_norm": 0.5614973262032086, | |
| "acc_norm_stderr": 0.016236279955659978 | |
| }, | |
| "lighteval|xstory_cloze:ar|0": { | |
| "acc": 0.6068828590337525, | |
| "acc_stderr": 0.012569701151957317 | |
| }, | |
| "community|acva:_average|0": { | |
| "acc_norm": 0.40225399927198463, | |
| "acc_norm_stderr": 0.04581786726656804 | |
| }, | |
| "community|alghafa:_average|0": { | |
| "acc_norm": 0.4788154226778063, | |
| "acc_norm_stderr": 0.022264499008954358 | |
| }, | |
| "community|arabic_mmlu:_average|0": { | |
| "acc_norm": 0.44221276429547296, | |
| "acc_norm_stderr": 0.03621269932794016 | |
| }, | |
| "all": { | |
| "acc_norm": 0.42941021553012215, | |
| "acc_norm_stderr": 0.03782581276195976, | |
| "acc": 0.6068828590337525, | |
| "acc_stderr": 0.012569701151957317 | |
| } | |
| }, | |
| "versions": { | |
| "community|acva:Algeria|0": 0, | |
| "community|acva:Ancient_Egypt|0": 0, | |
| "community|acva:Arab_Empire|0": 0, | |
| "community|acva:Arabic_Architecture|0": 0, | |
| "community|acva:Arabic_Art|0": 0, | |
| "community|acva:Arabic_Astronomy|0": 0, | |
| "community|acva:Arabic_Calligraphy|0": 0, | |
| "community|acva:Arabic_Ceremony|0": 0, | |
| "community|acva:Arabic_Clothing|0": 0, | |
| "community|acva:Arabic_Culture|0": 0, | |
| "community|acva:Arabic_Food|0": 0, | |
| "community|acva:Arabic_Funeral|0": 0, | |
| "community|acva:Arabic_Geography|0": 0, | |
| "community|acva:Arabic_History|0": 0, | |
| "community|acva:Arabic_Language_Origin|0": 0, | |
| "community|acva:Arabic_Literature|0": 0, | |
| "community|acva:Arabic_Math|0": 0, | |
| "community|acva:Arabic_Medicine|0": 0, | |
| "community|acva:Arabic_Music|0": 0, | |
| "community|acva:Arabic_Ornament|0": 0, | |
| "community|acva:Arabic_Philosophy|0": 0, | |
| "community|acva:Arabic_Physics_and_Chemistry|0": 0, | |
| "community|acva:Arabic_Wedding|0": 0, | |
| "community|acva:Bahrain|0": 0, | |
| "community|acva:Comoros|0": 0, | |
| "community|acva:Egypt_modern|0": 0, | |
| "community|acva:InfluenceFromAncientEgypt|0": 0, | |
| "community|acva:InfluenceFromByzantium|0": 0, | |
| "community|acva:InfluenceFromChina|0": 0, | |
| "community|acva:InfluenceFromGreece|0": 0, | |
| "community|acva:InfluenceFromIslam|0": 0, | |
| "community|acva:InfluenceFromPersia|0": 0, | |
| "community|acva:InfluenceFromRome|0": 0, | |
| "community|acva:Iraq|0": 0, | |
| "community|acva:Islam_Education|0": 0, | |
| "community|acva:Islam_branches_and_schools|0": 0, | |
| "community|acva:Islamic_law_system|0": 0, | |
| "community|acva:Jordan|0": 0, | |
| "community|acva:Kuwait|0": 0, | |
| "community|acva:Lebanon|0": 0, | |
| "community|acva:Libya|0": 0, | |
| "community|acva:Mauritania|0": 0, | |
| "community|acva:Mesopotamia_civilization|0": 0, | |
| "community|acva:Morocco|0": 0, | |
| "community|acva:Oman|0": 0, | |
| "community|acva:Palestine|0": 0, | |
| "community|acva:Qatar|0": 0, | |
| "community|acva:Saudi_Arabia|0": 0, | |
| "community|acva:Somalia|0": 0, | |
| "community|acva:Sudan|0": 0, | |
| "community|acva:Syria|0": 0, | |
| "community|acva:Tunisia|0": 0, | |
| "community|acva:United_Arab_Emirates|0": 0, | |
| "community|acva:Yemen|0": 0, | |
| "community|acva:communication|0": 0, | |
| "community|acva:computer_and_phone|0": 0, | |
| "community|acva:daily_life|0": 0, | |
| "community|acva:entertainment|0": 0, | |
| "community|alghafa:mcq_exams_test_ar|0": 0, | |
| "community|alghafa:meta_ar_dialects|0": 0, | |
| "community|alghafa:meta_ar_msa|0": 0, | |
| "community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, | |
| "community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, | |
| "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, | |
| "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, | |
| "community|alghafa:multiple_choice_rating_sentiment_task|0": 0, | |
| "community|alghafa:multiple_choice_sentiment_task|0": 0, | |
| "community|arabic_exams|0": 0, | |
| "community|arabic_mmlu:abstract_algebra|0": 0, | |
| "community|arabic_mmlu:anatomy|0": 0, | |
| "community|arabic_mmlu:astronomy|0": 0, | |
| "community|arabic_mmlu:business_ethics|0": 0, | |
| "community|arabic_mmlu:clinical_knowledge|0": 0, | |
| "community|arabic_mmlu:college_biology|0": 0, | |
| "community|arabic_mmlu:college_chemistry|0": 0, | |
| "community|arabic_mmlu:college_computer_science|0": 0, | |
| "community|arabic_mmlu:college_mathematics|0": 0, | |
| "community|arabic_mmlu:college_medicine|0": 0, | |
| "community|arabic_mmlu:college_physics|0": 0, | |
| "community|arabic_mmlu:computer_security|0": 0, | |
| "community|arabic_mmlu:conceptual_physics|0": 0, | |
| "community|arabic_mmlu:econometrics|0": 0, | |
| "community|arabic_mmlu:electrical_engineering|0": 0, | |
| "community|arabic_mmlu:elementary_mathematics|0": 0, | |
| "community|arabic_mmlu:formal_logic|0": 0, | |
| "community|arabic_mmlu:global_facts|0": 0, | |
| "community|arabic_mmlu:high_school_biology|0": 0, | |
| "community|arabic_mmlu:high_school_chemistry|0": 0, | |
| "community|arabic_mmlu:high_school_computer_science|0": 0, | |
| "community|arabic_mmlu:high_school_european_history|0": 0, | |
| "community|arabic_mmlu:high_school_geography|0": 0, | |
| "community|arabic_mmlu:high_school_government_and_politics|0": 0, | |
| "community|arabic_mmlu:high_school_macroeconomics|0": 0, | |
| "community|arabic_mmlu:high_school_mathematics|0": 0, | |
| "community|arabic_mmlu:high_school_microeconomics|0": 0, | |
| "community|arabic_mmlu:high_school_physics|0": 0, | |
| "community|arabic_mmlu:high_school_psychology|0": 0, | |
| "community|arabic_mmlu:high_school_statistics|0": 0, | |
| "community|arabic_mmlu:high_school_us_history|0": 0, | |
| "community|arabic_mmlu:high_school_world_history|0": 0, | |
| "community|arabic_mmlu:human_aging|0": 0, | |
| "community|arabic_mmlu:human_sexuality|0": 0, | |
| "community|arabic_mmlu:international_law|0": 0, | |
| "community|arabic_mmlu:jurisprudence|0": 0, | |
| "community|arabic_mmlu:logical_fallacies|0": 0, | |
| "community|arabic_mmlu:machine_learning|0": 0, | |
| "community|arabic_mmlu:management|0": 0, | |
| "community|arabic_mmlu:marketing|0": 0, | |
| "community|arabic_mmlu:medical_genetics|0": 0, | |
| "community|arabic_mmlu:miscellaneous|0": 0, | |
| "community|arabic_mmlu:moral_disputes|0": 0, | |
| "community|arabic_mmlu:moral_scenarios|0": 0, | |
| "community|arabic_mmlu:nutrition|0": 0, | |
| "community|arabic_mmlu:philosophy|0": 0, | |
| "community|arabic_mmlu:prehistory|0": 0, | |
| "community|arabic_mmlu:professional_accounting|0": 0, | |
| "community|arabic_mmlu:professional_law|0": 0, | |
| "community|arabic_mmlu:professional_medicine|0": 0, | |
| "community|arabic_mmlu:professional_psychology|0": 0, | |
| "community|arabic_mmlu:public_relations|0": 0, | |
| "community|arabic_mmlu:security_studies|0": 0, | |
| "community|arabic_mmlu:sociology|0": 0, | |
| "community|arabic_mmlu:us_foreign_policy|0": 0, | |
| "community|arabic_mmlu:virology|0": 0, | |
| "community|arabic_mmlu:world_religions|0": 0, | |
| "community|arc_challenge_okapi_ar|0": 0, | |
| "community|arc_easy_ar|0": 0, | |
| "community|boolq_ar|0": 0, | |
| "community|copa_ext_ar|0": 0, | |
| "community|hellaswag_okapi_ar|0": 0, | |
| "community|openbook_qa_ext_ar|0": 0, | |
| "community|piqa_ar|0": 0, | |
| "community|race_ar|0": 0, | |
| "community|sciq_ar|0": 0, | |
| "community|toxigen_ar|0": 0, | |
| "lighteval|xstory_cloze:ar|0": 0 | |
| }, | |
| "config_tasks": { | |
| "community|acva:Algeria": { | |
| "name": "acva:Algeria", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Algeria", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Ancient_Egypt": { | |
| "name": "acva:Ancient_Egypt", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Ancient_Egypt", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 315, | |
| "effective_num_docs": 315, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arab_Empire": { | |
| "name": "acva:Arab_Empire", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arab_Empire", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 265, | |
| "effective_num_docs": 265, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Architecture": { | |
| "name": "acva:Arabic_Architecture", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Architecture", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Art": { | |
| "name": "acva:Arabic_Art", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Art", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Astronomy": { | |
| "name": "acva:Arabic_Astronomy", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Astronomy", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Calligraphy": { | |
| "name": "acva:Arabic_Calligraphy", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Calligraphy", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 255, | |
| "effective_num_docs": 255, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Ceremony": { | |
| "name": "acva:Arabic_Ceremony", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Ceremony", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 185, | |
| "effective_num_docs": 185, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Clothing": { | |
| "name": "acva:Arabic_Clothing", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Clothing", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Culture": { | |
| "name": "acva:Arabic_Culture", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Culture", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Food": { | |
| "name": "acva:Arabic_Food", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Food", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Funeral": { | |
| "name": "acva:Arabic_Funeral", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Funeral", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 95, | |
| "effective_num_docs": 95, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Geography": { | |
| "name": "acva:Arabic_Geography", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Geography", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 145, | |
| "effective_num_docs": 145, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_History": { | |
| "name": "acva:Arabic_History", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_History", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Language_Origin": { | |
| "name": "acva:Arabic_Language_Origin", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Language_Origin", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 95, | |
| "effective_num_docs": 95, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Literature": { | |
| "name": "acva:Arabic_Literature", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Literature", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 145, | |
| "effective_num_docs": 145, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Math": { | |
| "name": "acva:Arabic_Math", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Math", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Medicine": { | |
| "name": "acva:Arabic_Medicine", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Medicine", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 145, | |
| "effective_num_docs": 145, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Music": { | |
| "name": "acva:Arabic_Music", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Music", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 139, | |
| "effective_num_docs": 139, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Ornament": { | |
| "name": "acva:Arabic_Ornament", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Ornament", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Philosophy": { | |
| "name": "acva:Arabic_Philosophy", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Philosophy", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 145, | |
| "effective_num_docs": 145, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Physics_and_Chemistry": { | |
| "name": "acva:Arabic_Physics_and_Chemistry", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Physics_and_Chemistry", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Arabic_Wedding": { | |
| "name": "acva:Arabic_Wedding", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Arabic_Wedding", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Bahrain": { | |
| "name": "acva:Bahrain", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Bahrain", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 45, | |
| "effective_num_docs": 45, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Comoros": { | |
| "name": "acva:Comoros", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Comoros", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 45, | |
| "effective_num_docs": 45, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Egypt_modern": { | |
| "name": "acva:Egypt_modern", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Egypt_modern", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 95, | |
| "effective_num_docs": 95, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:InfluenceFromAncientEgypt": { | |
| "name": "acva:InfluenceFromAncientEgypt", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "InfluenceFromAncientEgypt", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:InfluenceFromByzantium": { | |
| "name": "acva:InfluenceFromByzantium", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "InfluenceFromByzantium", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 145, | |
| "effective_num_docs": 145, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:InfluenceFromChina": { | |
| "name": "acva:InfluenceFromChina", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "InfluenceFromChina", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:InfluenceFromGreece": { | |
| "name": "acva:InfluenceFromGreece", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "InfluenceFromGreece", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:InfluenceFromIslam": { | |
| "name": "acva:InfluenceFromIslam", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "InfluenceFromIslam", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 145, | |
| "effective_num_docs": 145, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:InfluenceFromPersia": { | |
| "name": "acva:InfluenceFromPersia", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "InfluenceFromPersia", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 175, | |
| "effective_num_docs": 175, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:InfluenceFromRome": { | |
| "name": "acva:InfluenceFromRome", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "InfluenceFromRome", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Iraq": { | |
| "name": "acva:Iraq", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Iraq", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 85, | |
| "effective_num_docs": 85, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Islam_Education": { | |
| "name": "acva:Islam_Education", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Islam_Education", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Islam_branches_and_schools": { | |
| "name": "acva:Islam_branches_and_schools", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Islam_branches_and_schools", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 175, | |
| "effective_num_docs": 175, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Islamic_law_system": { | |
| "name": "acva:Islamic_law_system", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Islamic_law_system", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Jordan": { | |
| "name": "acva:Jordan", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Jordan", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 45, | |
| "effective_num_docs": 45, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Kuwait": { | |
| "name": "acva:Kuwait", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Kuwait", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 45, | |
| "effective_num_docs": 45, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Lebanon": { | |
| "name": "acva:Lebanon", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Lebanon", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 45, | |
| "effective_num_docs": 45, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Libya": { | |
| "name": "acva:Libya", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Libya", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 45, | |
| "effective_num_docs": 45, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Mauritania": { | |
| "name": "acva:Mauritania", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Mauritania", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 45, | |
| "effective_num_docs": 45, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Mesopotamia_civilization": { | |
| "name": "acva:Mesopotamia_civilization", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Mesopotamia_civilization", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 155, | |
| "effective_num_docs": 155, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Morocco": { | |
| "name": "acva:Morocco", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Morocco", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 45, | |
| "effective_num_docs": 45, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Oman": { | |
| "name": "acva:Oman", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Oman", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 45, | |
| "effective_num_docs": 45, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Palestine": { | |
| "name": "acva:Palestine", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Palestine", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 85, | |
| "effective_num_docs": 85, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Qatar": { | |
| "name": "acva:Qatar", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Qatar", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 45, | |
| "effective_num_docs": 45, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Saudi_Arabia": { | |
| "name": "acva:Saudi_Arabia", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Saudi_Arabia", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 195, | |
| "effective_num_docs": 195, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Somalia": { | |
| "name": "acva:Somalia", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Somalia", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 45, | |
| "effective_num_docs": 45, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Sudan": { | |
| "name": "acva:Sudan", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Sudan", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 45, | |
| "effective_num_docs": 45, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Syria": { | |
| "name": "acva:Syria", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Syria", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 45, | |
| "effective_num_docs": 45, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Tunisia": { | |
| "name": "acva:Tunisia", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Tunisia", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 45, | |
| "effective_num_docs": 45, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:United_Arab_Emirates": { | |
| "name": "acva:United_Arab_Emirates", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "United_Arab_Emirates", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 85, | |
| "effective_num_docs": 85, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:Yemen": { | |
| "name": "acva:Yemen", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "Yemen", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 10, | |
| "effective_num_docs": 10, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:communication": { | |
| "name": "acva:communication", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "communication", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 364, | |
| "effective_num_docs": 364, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:computer_and_phone": { | |
| "name": "acva:computer_and_phone", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "computer_and_phone", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 295, | |
| "effective_num_docs": 295, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:daily_life": { | |
| "name": "acva:daily_life", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "daily_life", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 337, | |
| "effective_num_docs": 337, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|acva:entertainment": { | |
| "name": "acva:entertainment", | |
| "prompt_function": "acva", | |
| "hf_repo": "OALL/ACVA", | |
| "hf_subset": "entertainment", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 295, | |
| "effective_num_docs": 295, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|alghafa:mcq_exams_test_ar": { | |
| "name": "alghafa:mcq_exams_test_ar", | |
| "prompt_function": "alghafa_prompt", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
| "hf_subset": "mcq_exams_test_ar", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 557, | |
| "effective_num_docs": 557, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|alghafa:meta_ar_dialects": { | |
| "name": "alghafa:meta_ar_dialects", | |
| "prompt_function": "alghafa_prompt", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
| "hf_subset": "meta_ar_dialects", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 5395, | |
| "effective_num_docs": 5395, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|alghafa:meta_ar_msa": { | |
| "name": "alghafa:meta_ar_msa", | |
| "prompt_function": "alghafa_prompt", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
| "hf_subset": "meta_ar_msa", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 895, | |
| "effective_num_docs": 895, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|alghafa:multiple_choice_facts_truefalse_balanced_task": { | |
| "name": "alghafa:multiple_choice_facts_truefalse_balanced_task", | |
| "prompt_function": "alghafa_prompt", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
| "hf_subset": "multiple_choice_facts_truefalse_balanced_task", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 75, | |
| "effective_num_docs": 75, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|alghafa:multiple_choice_grounded_statement_soqal_task": { | |
| "name": "alghafa:multiple_choice_grounded_statement_soqal_task", | |
| "prompt_function": "alghafa_prompt", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
| "hf_subset": "multiple_choice_grounded_statement_soqal_task", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 150, | |
| "effective_num_docs": 150, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { | |
| "name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", | |
| "prompt_function": "alghafa_prompt", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
| "hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 150, | |
| "effective_num_docs": 150, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { | |
| "name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", | |
| "prompt_function": "alghafa_prompt", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
| "hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 7995, | |
| "effective_num_docs": 7995, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|alghafa:multiple_choice_rating_sentiment_task": { | |
| "name": "alghafa:multiple_choice_rating_sentiment_task", | |
| "prompt_function": "alghafa_prompt", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
| "hf_subset": "multiple_choice_rating_sentiment_task", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 5995, | |
| "effective_num_docs": 5995, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|alghafa:multiple_choice_sentiment_task": { | |
| "name": "alghafa:multiple_choice_sentiment_task", | |
| "prompt_function": "alghafa_prompt", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
| "hf_subset": "multiple_choice_sentiment_task", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 1720, | |
| "effective_num_docs": 1720, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_exams": { | |
| "name": "arabic_exams", | |
| "prompt_function": "arabic_exams", | |
| "hf_repo": "OALL/Arabic_EXAMS", | |
| "hf_subset": "default", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": null, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 537, | |
| "effective_num_docs": 537, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:abstract_algebra": { | |
| "name": "arabic_mmlu:abstract_algebra", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "abstract_algebra", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 100, | |
| "effective_num_docs": 100, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:anatomy": { | |
| "name": "arabic_mmlu:anatomy", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "anatomy", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 135, | |
| "effective_num_docs": 135, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:astronomy": { | |
| "name": "arabic_mmlu:astronomy", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "astronomy", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 152, | |
| "effective_num_docs": 152, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:business_ethics": { | |
| "name": "arabic_mmlu:business_ethics", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "business_ethics", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 100, | |
| "effective_num_docs": 100, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:clinical_knowledge": { | |
| "name": "arabic_mmlu:clinical_knowledge", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "clinical_knowledge", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 265, | |
| "effective_num_docs": 265, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:college_biology": { | |
| "name": "arabic_mmlu:college_biology", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "college_biology", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 144, | |
| "effective_num_docs": 144, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:college_chemistry": { | |
| "name": "arabic_mmlu:college_chemistry", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "college_chemistry", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 100, | |
| "effective_num_docs": 100, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:college_computer_science": { | |
| "name": "arabic_mmlu:college_computer_science", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "college_computer_science", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 100, | |
| "effective_num_docs": 100, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:college_mathematics": { | |
| "name": "arabic_mmlu:college_mathematics", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "college_mathematics", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 100, | |
| "effective_num_docs": 100, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:college_medicine": { | |
| "name": "arabic_mmlu:college_medicine", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "college_medicine", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 173, | |
| "effective_num_docs": 173, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:college_physics": { | |
| "name": "arabic_mmlu:college_physics", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "college_physics", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 102, | |
| "effective_num_docs": 102, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:computer_security": { | |
| "name": "arabic_mmlu:computer_security", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "computer_security", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 100, | |
| "effective_num_docs": 100, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:conceptual_physics": { | |
| "name": "arabic_mmlu:conceptual_physics", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "conceptual_physics", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 235, | |
| "effective_num_docs": 235, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:econometrics": { | |
| "name": "arabic_mmlu:econometrics", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "econometrics", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 114, | |
| "effective_num_docs": 114, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:electrical_engineering": { | |
| "name": "arabic_mmlu:electrical_engineering", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "electrical_engineering", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 145, | |
| "effective_num_docs": 145, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:elementary_mathematics": { | |
| "name": "arabic_mmlu:elementary_mathematics", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "elementary_mathematics", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 378, | |
| "effective_num_docs": 378, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:formal_logic": { | |
| "name": "arabic_mmlu:formal_logic", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "formal_logic", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 126, | |
| "effective_num_docs": 126, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:global_facts": { | |
| "name": "arabic_mmlu:global_facts", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "global_facts", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 100, | |
| "effective_num_docs": 100, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_biology": { | |
| "name": "arabic_mmlu:high_school_biology", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "high_school_biology", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 310, | |
| "effective_num_docs": 310, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_chemistry": { | |
| "name": "arabic_mmlu:high_school_chemistry", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "high_school_chemistry", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 203, | |
| "effective_num_docs": 203, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_computer_science": { | |
| "name": "arabic_mmlu:high_school_computer_science", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "high_school_computer_science", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 100, | |
| "effective_num_docs": 100, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_european_history": { | |
| "name": "arabic_mmlu:high_school_european_history", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "high_school_european_history", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 165, | |
| "effective_num_docs": 165, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_geography": { | |
| "name": "arabic_mmlu:high_school_geography", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "high_school_geography", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 198, | |
| "effective_num_docs": 198, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_government_and_politics": { | |
| "name": "arabic_mmlu:high_school_government_and_politics", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "high_school_government_and_politics", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 193, | |
| "effective_num_docs": 193, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_macroeconomics": { | |
| "name": "arabic_mmlu:high_school_macroeconomics", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "high_school_macroeconomics", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 390, | |
| "effective_num_docs": 390, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_mathematics": { | |
| "name": "arabic_mmlu:high_school_mathematics", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "high_school_mathematics", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 270, | |
| "effective_num_docs": 270, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_microeconomics": { | |
| "name": "arabic_mmlu:high_school_microeconomics", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "high_school_microeconomics", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 238, | |
| "effective_num_docs": 238, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_physics": { | |
| "name": "arabic_mmlu:high_school_physics", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "high_school_physics", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 151, | |
| "effective_num_docs": 151, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_psychology": { | |
| "name": "arabic_mmlu:high_school_psychology", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "high_school_psychology", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 545, | |
| "effective_num_docs": 545, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_statistics": { | |
| "name": "arabic_mmlu:high_school_statistics", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "high_school_statistics", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 216, | |
| "effective_num_docs": 216, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_us_history": { | |
| "name": "arabic_mmlu:high_school_us_history", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "high_school_us_history", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 204, | |
| "effective_num_docs": 204, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_world_history": { | |
| "name": "arabic_mmlu:high_school_world_history", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "high_school_world_history", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 237, | |
| "effective_num_docs": 237, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:human_aging": { | |
| "name": "arabic_mmlu:human_aging", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "human_aging", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 223, | |
| "effective_num_docs": 223, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:human_sexuality": { | |
| "name": "arabic_mmlu:human_sexuality", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "human_sexuality", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 131, | |
| "effective_num_docs": 131, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:international_law": { | |
| "name": "arabic_mmlu:international_law", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "international_law", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 121, | |
| "effective_num_docs": 121, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:jurisprudence": { | |
| "name": "arabic_mmlu:jurisprudence", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "jurisprudence", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 108, | |
| "effective_num_docs": 108, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:logical_fallacies": { | |
| "name": "arabic_mmlu:logical_fallacies", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "logical_fallacies", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 163, | |
| "effective_num_docs": 163, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:machine_learning": { | |
| "name": "arabic_mmlu:machine_learning", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "machine_learning", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 112, | |
| "effective_num_docs": 112, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:management": { | |
| "name": "arabic_mmlu:management", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "management", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 103, | |
| "effective_num_docs": 103, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:marketing": { | |
| "name": "arabic_mmlu:marketing", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "marketing", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 234, | |
| "effective_num_docs": 234, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:medical_genetics": { | |
| "name": "arabic_mmlu:medical_genetics", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "medical_genetics", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 100, | |
| "effective_num_docs": 100, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:miscellaneous": { | |
| "name": "arabic_mmlu:miscellaneous", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "miscellaneous", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 783, | |
| "effective_num_docs": 783, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:moral_disputes": { | |
| "name": "arabic_mmlu:moral_disputes", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "moral_disputes", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 346, | |
| "effective_num_docs": 346, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:moral_scenarios": { | |
| "name": "arabic_mmlu:moral_scenarios", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "moral_scenarios", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 895, | |
| "effective_num_docs": 895, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:nutrition": { | |
| "name": "arabic_mmlu:nutrition", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "nutrition", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 306, | |
| "effective_num_docs": 306, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:philosophy": { | |
| "name": "arabic_mmlu:philosophy", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "philosophy", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 311, | |
| "effective_num_docs": 311, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:prehistory": { | |
| "name": "arabic_mmlu:prehistory", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "prehistory", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 324, | |
| "effective_num_docs": 324, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:professional_accounting": { | |
| "name": "arabic_mmlu:professional_accounting", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "professional_accounting", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 282, | |
| "effective_num_docs": 282, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:professional_law": { | |
| "name": "arabic_mmlu:professional_law", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "professional_law", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 1534, | |
| "effective_num_docs": 1534, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:professional_medicine": { | |
| "name": "arabic_mmlu:professional_medicine", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "professional_medicine", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 272, | |
| "effective_num_docs": 272, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:professional_psychology": { | |
| "name": "arabic_mmlu:professional_psychology", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "professional_psychology", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 612, | |
| "effective_num_docs": 612, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:public_relations": { | |
| "name": "arabic_mmlu:public_relations", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "public_relations", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 110, | |
| "effective_num_docs": 110, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:security_studies": { | |
| "name": "arabic_mmlu:security_studies", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "security_studies", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 245, | |
| "effective_num_docs": 245, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:sociology": { | |
| "name": "arabic_mmlu:sociology", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "sociology", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 201, | |
| "effective_num_docs": 201, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:us_foreign_policy": { | |
| "name": "arabic_mmlu:us_foreign_policy", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "us_foreign_policy", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 100, | |
| "effective_num_docs": 100, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:virology": { | |
| "name": "arabic_mmlu:virology", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "virology", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 166, | |
| "effective_num_docs": 166, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arabic_mmlu:world_religions": { | |
| "name": "arabic_mmlu:world_religions", | |
| "prompt_function": "mmlu_arabic", | |
| "hf_repo": "OALL/Arabic_MMLU", | |
| "hf_subset": "world_religions", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "dev" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "dev", | |
| "few_shots_select": "sequential", | |
| "generation_size": -1, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 171, | |
| "effective_num_docs": 171, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arc_challenge_okapi_ar": { | |
| "name": "arc_challenge_okapi_ar", | |
| "prompt_function": "alghafa_prompt", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
| "hf_subset": "arc_challenge_okapi_ar", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": null, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 1160, | |
| "effective_num_docs": 1160, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|arc_easy_ar": { | |
| "name": "arc_easy_ar", | |
| "prompt_function": "alghafa_prompt", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
| "hf_subset": "arc_easy_ar", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": null, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 2364, | |
| "effective_num_docs": 2364, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|boolq_ar": { | |
| "name": "boolq_ar", | |
| "prompt_function": "boolq_prompt_arabic", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
| "hf_subset": "boolq_ar", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": null, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 3260, | |
| "effective_num_docs": 3260, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|copa_ext_ar": { | |
| "name": "copa_ext_ar", | |
| "prompt_function": "copa_prompt_arabic", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
| "hf_subset": "copa_ext_ar", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": null, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 90, | |
| "effective_num_docs": 90, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|hellaswag_okapi_ar": { | |
| "name": "hellaswag_okapi_ar", | |
| "prompt_function": "hellaswag_prompt_arabic", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
| "hf_subset": "hellaswag_okapi_ar", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": null, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 9171, | |
| "effective_num_docs": 9171, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|openbook_qa_ext_ar": { | |
| "name": "openbook_qa_ext_ar", | |
| "prompt_function": "alghafa_prompt", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
| "hf_subset": "openbook_qa_ext_ar", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": null, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 495, | |
| "effective_num_docs": 495, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|piqa_ar": { | |
| "name": "piqa_ar", | |
| "prompt_function": "alghafa_prompt", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
| "hf_subset": "piqa_ar", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": null, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 1833, | |
| "effective_num_docs": 1833, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|race_ar": { | |
| "name": "race_ar", | |
| "prompt_function": "alghafa_prompt", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
| "hf_subset": "race_ar", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": null, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 4929, | |
| "effective_num_docs": 4929, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|sciq_ar": { | |
| "name": "sciq_ar", | |
| "prompt_function": "sciq_prompt_arabic", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
| "hf_subset": "sciq_ar", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": null, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 995, | |
| "effective_num_docs": 995, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "community|toxigen_ar": { | |
| "name": "toxigen_ar", | |
| "prompt_function": "toxigen_prompt_arabic", | |
| "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
| "hf_subset": "toxigen_ar", | |
| "metric": [ | |
| "loglikelihood_acc_norm" | |
| ], | |
| "hf_avail_splits": [ | |
| "test", | |
| "validation" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": "validation", | |
| "few_shots_select": "sequential", | |
| "generation_size": null, | |
| "stop_sequence": null, | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "community" | |
| ], | |
| "original_num_docs": 935, | |
| "effective_num_docs": 935, | |
| "trust_dataset": null, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| }, | |
| "lighteval|xstory_cloze:ar": { | |
| "name": "xstory_cloze:ar", | |
| "prompt_function": "storycloze", | |
| "hf_repo": "juletxara/xstory_cloze", | |
| "hf_subset": "ar", | |
| "metric": [ | |
| "loglikelihood_acc" | |
| ], | |
| "hf_avail_splits": [ | |
| "training", | |
| "eval" | |
| ], | |
| "evaluation_splits": [ | |
| "eval" | |
| ], | |
| "few_shots_split": null, | |
| "few_shots_select": null, | |
| "generation_size": -1, | |
| "stop_sequence": [ | |
| "\n" | |
| ], | |
| "output_regex": null, | |
| "num_samples": null, | |
| "frozen": false, | |
| "suite": [ | |
| "lighteval" | |
| ], | |
| "original_num_docs": 1511, | |
| "effective_num_docs": 1511, | |
| "trust_dataset": true, | |
| "must_remove_duplicate_docs": null, | |
| "version": 0 | |
| } | |
| }, | |
| "summary_tasks": { | |
| "community|acva:Algeria|0": { | |
| "hashes": { | |
| "hash_examples": "da5a3003cd46f6f9", | |
| "hash_full_prompts": "da5a3003cd46f6f9", | |
| "hash_input_tokens": "cb0aeafc3d66fab0", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Ancient_Egypt|0": { | |
| "hashes": { | |
| "hash_examples": "52d6f767fede195b", | |
| "hash_full_prompts": "52d6f767fede195b", | |
| "hash_input_tokens": "27f54a4b6e3bb4bd", | |
| "hash_cont_tokens": "ec26842aaa651939" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 315, | |
| "padded": 630, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arab_Empire|0": { | |
| "hashes": { | |
| "hash_examples": "8dacff6a79804a75", | |
| "hash_full_prompts": "8dacff6a79804a75", | |
| "hash_input_tokens": "034957bc71538d3e", | |
| "hash_cont_tokens": "5cea6a3a29d2df50" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 265, | |
| "padded": 530, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Architecture|0": { | |
| "hashes": { | |
| "hash_examples": "df286cd862d9f6bb", | |
| "hash_full_prompts": "df286cd862d9f6bb", | |
| "hash_input_tokens": "ecea3bb351566f08", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Art|0": { | |
| "hashes": { | |
| "hash_examples": "112883d764118a49", | |
| "hash_full_prompts": "112883d764118a49", | |
| "hash_input_tokens": "529cfe9bd2bded30", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Astronomy|0": { | |
| "hashes": { | |
| "hash_examples": "20dcdf2454bf8671", | |
| "hash_full_prompts": "20dcdf2454bf8671", | |
| "hash_input_tokens": "228f506ade07e8eb", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Calligraphy|0": { | |
| "hashes": { | |
| "hash_examples": "3a9f9d1ebe868a15", | |
| "hash_full_prompts": "3a9f9d1ebe868a15", | |
| "hash_input_tokens": "d91f3619cd15eb15", | |
| "hash_cont_tokens": "e296c7fa5936d878" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 255, | |
| "padded": 510, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Ceremony|0": { | |
| "hashes": { | |
| "hash_examples": "c927630f8d2f44da", | |
| "hash_full_prompts": "c927630f8d2f44da", | |
| "hash_input_tokens": "2183cdbcfd5108da", | |
| "hash_cont_tokens": "28978f465eab9608" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 185, | |
| "padded": 370, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Clothing|0": { | |
| "hashes": { | |
| "hash_examples": "6ad0740c2ac6ac92", | |
| "hash_full_prompts": "6ad0740c2ac6ac92", | |
| "hash_input_tokens": "7c297948379df05b", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Culture|0": { | |
| "hashes": { | |
| "hash_examples": "2177bd857ad872ae", | |
| "hash_full_prompts": "2177bd857ad872ae", | |
| "hash_input_tokens": "0d2cb6d3027d8a38", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Food|0": { | |
| "hashes": { | |
| "hash_examples": "a6ada65b71d7c9c5", | |
| "hash_full_prompts": "a6ada65b71d7c9c5", | |
| "hash_input_tokens": "8951cdd908a7481d", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Funeral|0": { | |
| "hashes": { | |
| "hash_examples": "fcee39dc29eaae91", | |
| "hash_full_prompts": "fcee39dc29eaae91", | |
| "hash_input_tokens": "bf0ddc654e9c2100", | |
| "hash_cont_tokens": "be168b1474096fac" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 95, | |
| "padded": 190, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Geography|0": { | |
| "hashes": { | |
| "hash_examples": "d36eda7c89231c02", | |
| "hash_full_prompts": "d36eda7c89231c02", | |
| "hash_input_tokens": "b356c6a9095c27b4", | |
| "hash_cont_tokens": "2bad09f39ec6c291" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 145, | |
| "padded": 290, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_History|0": { | |
| "hashes": { | |
| "hash_examples": "6354ac0d6db6a5fc", | |
| "hash_full_prompts": "6354ac0d6db6a5fc", | |
| "hash_input_tokens": "7b1c408136469bd2", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Language_Origin|0": { | |
| "hashes": { | |
| "hash_examples": "ddc967c8aca34402", | |
| "hash_full_prompts": "ddc967c8aca34402", | |
| "hash_input_tokens": "07058d49651d3054", | |
| "hash_cont_tokens": "be168b1474096fac" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 95, | |
| "padded": 190, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Literature|0": { | |
| "hashes": { | |
| "hash_examples": "4305379fd46be5d8", | |
| "hash_full_prompts": "4305379fd46be5d8", | |
| "hash_input_tokens": "bd9666e7cfde43d6", | |
| "hash_cont_tokens": "2bad09f39ec6c291" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 145, | |
| "padded": 290, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Math|0": { | |
| "hashes": { | |
| "hash_examples": "dec621144f4d28be", | |
| "hash_full_prompts": "dec621144f4d28be", | |
| "hash_input_tokens": "6f1a66ce3300b045", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Medicine|0": { | |
| "hashes": { | |
| "hash_examples": "2b344cdae9495ff2", | |
| "hash_full_prompts": "2b344cdae9495ff2", | |
| "hash_input_tokens": "4b4f01d1ed9975c5", | |
| "hash_cont_tokens": "2bad09f39ec6c291" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 145, | |
| "padded": 290, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Music|0": { | |
| "hashes": { | |
| "hash_examples": "0c54624d881944ce", | |
| "hash_full_prompts": "0c54624d881944ce", | |
| "hash_input_tokens": "4be32b4c5d5cbbe2", | |
| "hash_cont_tokens": "9e9f81993e6c507b" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 139, | |
| "padded": 278, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Ornament|0": { | |
| "hashes": { | |
| "hash_examples": "251a4a84289d8bc1", | |
| "hash_full_prompts": "251a4a84289d8bc1", | |
| "hash_input_tokens": "8bfcbaef2f54cebe", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Philosophy|0": { | |
| "hashes": { | |
| "hash_examples": "3f86fb9c94c13d22", | |
| "hash_full_prompts": "3f86fb9c94c13d22", | |
| "hash_input_tokens": "d53559945d783945", | |
| "hash_cont_tokens": "2bad09f39ec6c291" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 145, | |
| "padded": 290, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Physics_and_Chemistry|0": { | |
| "hashes": { | |
| "hash_examples": "8fec65af3695b62a", | |
| "hash_full_prompts": "8fec65af3695b62a", | |
| "hash_input_tokens": "c0bdcee5b13a94d9", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Arabic_Wedding|0": { | |
| "hashes": { | |
| "hash_examples": "9cc3477184d7a4b8", | |
| "hash_full_prompts": "9cc3477184d7a4b8", | |
| "hash_input_tokens": "cb19a37687a23cb9", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Bahrain|0": { | |
| "hashes": { | |
| "hash_examples": "c92e803a0fa8b9e2", | |
| "hash_full_prompts": "c92e803a0fa8b9e2", | |
| "hash_input_tokens": "e1336d5551759bb9", | |
| "hash_cont_tokens": "da8a1fab862deec5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 45, | |
| "padded": 90, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Comoros|0": { | |
| "hashes": { | |
| "hash_examples": "06e5d4bba8e54cae", | |
| "hash_full_prompts": "06e5d4bba8e54cae", | |
| "hash_input_tokens": "a0b1846c795d5341", | |
| "hash_cont_tokens": "da8a1fab862deec5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 45, | |
| "padded": 90, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Egypt_modern|0": { | |
| "hashes": { | |
| "hash_examples": "c6ec369164f93446", | |
| "hash_full_prompts": "c6ec369164f93446", | |
| "hash_input_tokens": "16b9d782e0f95849", | |
| "hash_cont_tokens": "be168b1474096fac" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 95, | |
| "padded": 190, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:InfluenceFromAncientEgypt|0": { | |
| "hashes": { | |
| "hash_examples": "b9d56d74818b9bd4", | |
| "hash_full_prompts": "b9d56d74818b9bd4", | |
| "hash_input_tokens": "1e5278b4f53c1f81", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:InfluenceFromByzantium|0": { | |
| "hashes": { | |
| "hash_examples": "5316c9624e7e59b8", | |
| "hash_full_prompts": "5316c9624e7e59b8", | |
| "hash_input_tokens": "cc58d83ab85e2e5e", | |
| "hash_cont_tokens": "2bad09f39ec6c291" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 145, | |
| "padded": 290, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:InfluenceFromChina|0": { | |
| "hashes": { | |
| "hash_examples": "87894bce95a56411", | |
| "hash_full_prompts": "87894bce95a56411", | |
| "hash_input_tokens": "288333e351bb4db4", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:InfluenceFromGreece|0": { | |
| "hashes": { | |
| "hash_examples": "0baa78a27e469312", | |
| "hash_full_prompts": "0baa78a27e469312", | |
| "hash_input_tokens": "97e890fa7f09463c", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:InfluenceFromIslam|0": { | |
| "hashes": { | |
| "hash_examples": "0c2532cde6541ff2", | |
| "hash_full_prompts": "0c2532cde6541ff2", | |
| "hash_input_tokens": "90bf65e592c81898", | |
| "hash_cont_tokens": "2bad09f39ec6c291" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 145, | |
| "padded": 290, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:InfluenceFromPersia|0": { | |
| "hashes": { | |
| "hash_examples": "efcd8112dc53c6e5", | |
| "hash_full_prompts": "efcd8112dc53c6e5", | |
| "hash_input_tokens": "c00d0ac247c0505e", | |
| "hash_cont_tokens": "801b51ed98f0c7b4" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 175, | |
| "padded": 350, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:InfluenceFromRome|0": { | |
| "hashes": { | |
| "hash_examples": "9db61480e2e85fd3", | |
| "hash_full_prompts": "9db61480e2e85fd3", | |
| "hash_input_tokens": "36d1829b748dfa44", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Iraq|0": { | |
| "hashes": { | |
| "hash_examples": "96dac3dfa8d2f41f", | |
| "hash_full_prompts": "96dac3dfa8d2f41f", | |
| "hash_input_tokens": "6b052aa78a06333a", | |
| "hash_cont_tokens": "b7488076397f5471" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 85, | |
| "padded": 170, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Islam_Education|0": { | |
| "hashes": { | |
| "hash_examples": "0d80355f6a4cb51b", | |
| "hash_full_prompts": "0d80355f6a4cb51b", | |
| "hash_input_tokens": "5e9666e74fa79a2f", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Islam_branches_and_schools|0": { | |
| "hashes": { | |
| "hash_examples": "5cedce1be2c3ad50", | |
| "hash_full_prompts": "5cedce1be2c3ad50", | |
| "hash_input_tokens": "8eab7aba6bb2bdbf", | |
| "hash_cont_tokens": "801b51ed98f0c7b4" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 175, | |
| "padded": 350, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Islamic_law_system|0": { | |
| "hashes": { | |
| "hash_examples": "c0e6db8bc84e105e", | |
| "hash_full_prompts": "c0e6db8bc84e105e", | |
| "hash_input_tokens": "e69b7907c3f8986f", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Jordan|0": { | |
| "hashes": { | |
| "hash_examples": "33deb5b4e5ddd6a1", | |
| "hash_full_prompts": "33deb5b4e5ddd6a1", | |
| "hash_input_tokens": "e6b631a7724bda4d", | |
| "hash_cont_tokens": "da8a1fab862deec5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 45, | |
| "padded": 90, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Kuwait|0": { | |
| "hashes": { | |
| "hash_examples": "eb41773346d7c46c", | |
| "hash_full_prompts": "eb41773346d7c46c", | |
| "hash_input_tokens": "3d369fba9d1a4b42", | |
| "hash_cont_tokens": "da8a1fab862deec5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 45, | |
| "padded": 90, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Lebanon|0": { | |
| "hashes": { | |
| "hash_examples": "25932dbf4c13d34f", | |
| "hash_full_prompts": "25932dbf4c13d34f", | |
| "hash_input_tokens": "2f3cbcc7764dcc9c", | |
| "hash_cont_tokens": "da8a1fab862deec5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 45, | |
| "padded": 90, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Libya|0": { | |
| "hashes": { | |
| "hash_examples": "f2c4db63cd402926", | |
| "hash_full_prompts": "f2c4db63cd402926", | |
| "hash_input_tokens": "b8b69b9721477ab2", | |
| "hash_cont_tokens": "da8a1fab862deec5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 45, | |
| "padded": 90, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Mauritania|0": { | |
| "hashes": { | |
| "hash_examples": "8723ab5fdf286b54", | |
| "hash_full_prompts": "8723ab5fdf286b54", | |
| "hash_input_tokens": "b3793c2d33cc8cc6", | |
| "hash_cont_tokens": "da8a1fab862deec5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 45, | |
| "padded": 90, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Mesopotamia_civilization|0": { | |
| "hashes": { | |
| "hash_examples": "c33f5502a6130ca9", | |
| "hash_full_prompts": "c33f5502a6130ca9", | |
| "hash_input_tokens": "5c9e5f2d1d5851ee", | |
| "hash_cont_tokens": "ecf0ba35ed33e7aa" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 155, | |
| "padded": 310, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Morocco|0": { | |
| "hashes": { | |
| "hash_examples": "588a5ed27904b1ae", | |
| "hash_full_prompts": "588a5ed27904b1ae", | |
| "hash_input_tokens": "99fde477ea1cf5c0", | |
| "hash_cont_tokens": "da8a1fab862deec5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 45, | |
| "padded": 90, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Oman|0": { | |
| "hashes": { | |
| "hash_examples": "d447c52b94248b69", | |
| "hash_full_prompts": "d447c52b94248b69", | |
| "hash_input_tokens": "3cfeb17dd4b54cf7", | |
| "hash_cont_tokens": "da8a1fab862deec5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 45, | |
| "padded": 90, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Palestine|0": { | |
| "hashes": { | |
| "hash_examples": "19197e076ad14ff5", | |
| "hash_full_prompts": "19197e076ad14ff5", | |
| "hash_input_tokens": "54dfc28a49437e01", | |
| "hash_cont_tokens": "b7488076397f5471" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 85, | |
| "padded": 170, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Qatar|0": { | |
| "hashes": { | |
| "hash_examples": "cf0736fa185b28f6", | |
| "hash_full_prompts": "cf0736fa185b28f6", | |
| "hash_input_tokens": "0f789837547df0fa", | |
| "hash_cont_tokens": "da8a1fab862deec5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 45, | |
| "padded": 90, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Saudi_Arabia|0": { | |
| "hashes": { | |
| "hash_examples": "69beda6e1b85a08d", | |
| "hash_full_prompts": "69beda6e1b85a08d", | |
| "hash_input_tokens": "b257ec662f0b89b1", | |
| "hash_cont_tokens": "b7390c4ff28a847e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 195, | |
| "padded": 390, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Somalia|0": { | |
| "hashes": { | |
| "hash_examples": "b387940c65784fbf", | |
| "hash_full_prompts": "b387940c65784fbf", | |
| "hash_input_tokens": "950405f141596749", | |
| "hash_cont_tokens": "da8a1fab862deec5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 45, | |
| "padded": 90, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Sudan|0": { | |
| "hashes": { | |
| "hash_examples": "e02c32b9d2dd0c3f", | |
| "hash_full_prompts": "e02c32b9d2dd0c3f", | |
| "hash_input_tokens": "89de1025662ff57e", | |
| "hash_cont_tokens": "da8a1fab862deec5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 45, | |
| "padded": 90, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Syria|0": { | |
| "hashes": { | |
| "hash_examples": "60a6f8fe73bda4bb", | |
| "hash_full_prompts": "60a6f8fe73bda4bb", | |
| "hash_input_tokens": "e6fb860cddde096b", | |
| "hash_cont_tokens": "da8a1fab862deec5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 45, | |
| "padded": 90, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Tunisia|0": { | |
| "hashes": { | |
| "hash_examples": "34bb15d3830c5649", | |
| "hash_full_prompts": "34bb15d3830c5649", | |
| "hash_input_tokens": "bd47128b35a2d86f", | |
| "hash_cont_tokens": "da8a1fab862deec5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 45, | |
| "padded": 90, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:United_Arab_Emirates|0": { | |
| "hashes": { | |
| "hash_examples": "98a0ba78172718ce", | |
| "hash_full_prompts": "98a0ba78172718ce", | |
| "hash_input_tokens": "7647c1bc27e49fc8", | |
| "hash_cont_tokens": "b7488076397f5471" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 85, | |
| "padded": 170, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:Yemen|0": { | |
| "hashes": { | |
| "hash_examples": "18e9bcccbb4ced7a", | |
| "hash_full_prompts": "18e9bcccbb4ced7a", | |
| "hash_input_tokens": "2c681b96b17e1632", | |
| "hash_cont_tokens": "11c28cf0207d65c4" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 10, | |
| "padded": 20, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:communication|0": { | |
| "hashes": { | |
| "hash_examples": "9ff28ab5eab5c97b", | |
| "hash_full_prompts": "9ff28ab5eab5c97b", | |
| "hash_input_tokens": "11053b43a241624b", | |
| "hash_cont_tokens": "c2a8d8745e2b69b6" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 364, | |
| "padded": 728, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:computer_and_phone|0": { | |
| "hashes": { | |
| "hash_examples": "37bac2f086aaf6c2", | |
| "hash_full_prompts": "37bac2f086aaf6c2", | |
| "hash_input_tokens": "e2002483d56b5393", | |
| "hash_cont_tokens": "21775773fa6ef8a2" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 295, | |
| "padded": 590, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:daily_life|0": { | |
| "hashes": { | |
| "hash_examples": "bf07363c1c252e2f", | |
| "hash_full_prompts": "bf07363c1c252e2f", | |
| "hash_input_tokens": "379d58777e8e43b7", | |
| "hash_cont_tokens": "765f5c867eb46b99" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 337, | |
| "padded": 674, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|acva:entertainment|0": { | |
| "hashes": { | |
| "hash_examples": "37077bc00f0ac56a", | |
| "hash_full_prompts": "37077bc00f0ac56a", | |
| "hash_input_tokens": "ea151a4871612d5e", | |
| "hash_cont_tokens": "21775773fa6ef8a2" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 295, | |
| "padded": 590, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|alghafa:mcq_exams_test_ar|0": { | |
| "hashes": { | |
| "hash_examples": "c07a5e78c5c0b8fe", | |
| "hash_full_prompts": "c07a5e78c5c0b8fe", | |
| "hash_input_tokens": "2af1b4256d44d11f", | |
| "hash_cont_tokens": "30ace4175ca64b10" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 557, | |
| "padded": 2228, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|alghafa:meta_ar_dialects|0": { | |
| "hashes": { | |
| "hash_examples": "c0b6081f83e14064", | |
| "hash_full_prompts": "c0b6081f83e14064", | |
| "hash_input_tokens": "88f9f363325e7401", | |
| "hash_cont_tokens": "2e3f78af4c90859b" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 5395, | |
| "padded": 21580, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|alghafa:meta_ar_msa|0": { | |
| "hashes": { | |
| "hash_examples": "64eb78a7c5b7484b", | |
| "hash_full_prompts": "64eb78a7c5b7484b", | |
| "hash_input_tokens": "7e4873b0e0d81202", | |
| "hash_cont_tokens": "df39c57e8bf281fd" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 895, | |
| "padded": 3580, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { | |
| "hashes": { | |
| "hash_examples": "54fc3502c1c02c06", | |
| "hash_full_prompts": "54fc3502c1c02c06", | |
| "hash_input_tokens": "7654b2056078d564", | |
| "hash_cont_tokens": "a05cfc77c23206cd" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 75, | |
| "padded": 150, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { | |
| "hashes": { | |
| "hash_examples": "46572d83696552ae", | |
| "hash_full_prompts": "46572d83696552ae", | |
| "hash_input_tokens": "a08af47d30e9da82", | |
| "hash_cont_tokens": "0e06339976dc93a2" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 150, | |
| "padded": 750, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { | |
| "hashes": { | |
| "hash_examples": "f430d97ff715bc1c", | |
| "hash_full_prompts": "f430d97ff715bc1c", | |
| "hash_input_tokens": "17dade1c266f37aa", | |
| "hash_cont_tokens": "789e514956d6e9fc" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 150, | |
| "padded": 750, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { | |
| "hashes": { | |
| "hash_examples": "6b70a7416584f98c", | |
| "hash_full_prompts": "6b70a7416584f98c", | |
| "hash_input_tokens": "cd3d14b97169c1b2", | |
| "hash_cont_tokens": "2579dbe11be68347" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 7995, | |
| "padded": 15979, | |
| "non_padded": 11, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|alghafa:multiple_choice_rating_sentiment_task|0": { | |
| "hashes": { | |
| "hash_examples": "bc2005cc9d2f436e", | |
| "hash_full_prompts": "bc2005cc9d2f436e", | |
| "hash_input_tokens": "2b82d0a1acf8de2b", | |
| "hash_cont_tokens": "0d037028e85c377d" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 5995, | |
| "padded": 17374, | |
| "non_padded": 611, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|alghafa:multiple_choice_sentiment_task|0": { | |
| "hashes": { | |
| "hash_examples": "6fb0e254ea5945d8", | |
| "hash_full_prompts": "6fb0e254ea5945d8", | |
| "hash_input_tokens": "bc8e5b5bb00c899b", | |
| "hash_cont_tokens": "8bf540aedb8fe199" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 1720, | |
| "padded": 5077, | |
| "non_padded": 83, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_exams|0": { | |
| "hashes": { | |
| "hash_examples": "6d721df351722656", | |
| "hash_full_prompts": "6d721df351722656", | |
| "hash_input_tokens": "aae53bf0c0d4902f", | |
| "hash_cont_tokens": "ea2de25feadbb1f2" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 537, | |
| "padded": 2108, | |
| "non_padded": 40, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:abstract_algebra|0": { | |
| "hashes": { | |
| "hash_examples": "f2ddca8f45c0a511", | |
| "hash_full_prompts": "f2ddca8f45c0a511", | |
| "hash_input_tokens": "579f21d370581a74", | |
| "hash_cont_tokens": "1195321124e7aba4" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 100, | |
| "padded": 400, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:anatomy|0": { | |
| "hashes": { | |
| "hash_examples": "dfdbc1b83107668d", | |
| "hash_full_prompts": "dfdbc1b83107668d", | |
| "hash_input_tokens": "571f29d2f67a0d3e", | |
| "hash_cont_tokens": "f2e91b8b931771a9" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 135, | |
| "padded": 532, | |
| "non_padded": 8, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:astronomy|0": { | |
| "hashes": { | |
| "hash_examples": "9736a606002a848e", | |
| "hash_full_prompts": "9736a606002a848e", | |
| "hash_input_tokens": "dd550dcf953a6f75", | |
| "hash_cont_tokens": "38e69d93dcbbd9cb" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 152, | |
| "padded": 608, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:business_ethics|0": { | |
| "hashes": { | |
| "hash_examples": "735e452fbb6dc63d", | |
| "hash_full_prompts": "735e452fbb6dc63d", | |
| "hash_input_tokens": "a4b2cf5b63a9e378", | |
| "hash_cont_tokens": "1195321124e7aba4" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 100, | |
| "padded": 396, | |
| "non_padded": 4, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:clinical_knowledge|0": { | |
| "hashes": { | |
| "hash_examples": "6ab0ca4da98aedcf", | |
| "hash_full_prompts": "6ab0ca4da98aedcf", | |
| "hash_input_tokens": "c180ed9f20140f43", | |
| "hash_cont_tokens": "a75c5894981084ec" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 265, | |
| "padded": 1052, | |
| "non_padded": 8, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:college_biology|0": { | |
| "hashes": { | |
| "hash_examples": "17e4e390848018a4", | |
| "hash_full_prompts": "17e4e390848018a4", | |
| "hash_input_tokens": "8e1e3808d80e23d3", | |
| "hash_cont_tokens": "e13f831746c64d99" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 144, | |
| "padded": 576, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:college_chemistry|0": { | |
| "hashes": { | |
| "hash_examples": "4abb169f6dfd234b", | |
| "hash_full_prompts": "4abb169f6dfd234b", | |
| "hash_input_tokens": "8f6cc6d02c097a4b", | |
| "hash_cont_tokens": "1195321124e7aba4" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 100, | |
| "padded": 396, | |
| "non_padded": 4, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:college_computer_science|0": { | |
| "hashes": { | |
| "hash_examples": "a369e2e941358a1e", | |
| "hash_full_prompts": "a369e2e941358a1e", | |
| "hash_input_tokens": "1444a4480d9d6dd2", | |
| "hash_cont_tokens": "1195321124e7aba4" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 100, | |
| "padded": 396, | |
| "non_padded": 4, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:college_mathematics|0": { | |
| "hashes": { | |
| "hash_examples": "d7be03b8b6020bff", | |
| "hash_full_prompts": "d7be03b8b6020bff", | |
| "hash_input_tokens": "4fa625a6ce79b0f8", | |
| "hash_cont_tokens": "1195321124e7aba4" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 100, | |
| "padded": 388, | |
| "non_padded": 12, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:college_medicine|0": { | |
| "hashes": { | |
| "hash_examples": "0518a00f097346bf", | |
| "hash_full_prompts": "0518a00f097346bf", | |
| "hash_input_tokens": "2afc983c3109978b", | |
| "hash_cont_tokens": "0f278bf68b724bdc" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 173, | |
| "padded": 684, | |
| "non_padded": 8, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:college_physics|0": { | |
| "hashes": { | |
| "hash_examples": "5d842cd49bc70e12", | |
| "hash_full_prompts": "5d842cd49bc70e12", | |
| "hash_input_tokens": "eb768b081f978762", | |
| "hash_cont_tokens": "e74e146e81e55b5b" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 102, | |
| "padded": 404, | |
| "non_padded": 4, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:computer_security|0": { | |
| "hashes": { | |
| "hash_examples": "8e85d9f85be9b32f", | |
| "hash_full_prompts": "8e85d9f85be9b32f", | |
| "hash_input_tokens": "a3b52ba621313406", | |
| "hash_cont_tokens": "1195321124e7aba4" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 100, | |
| "padded": 392, | |
| "non_padded": 8, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:conceptual_physics|0": { | |
| "hashes": { | |
| "hash_examples": "7964b55a0a49502b", | |
| "hash_full_prompts": "7964b55a0a49502b", | |
| "hash_input_tokens": "2cc5f2b915b81c8f", | |
| "hash_cont_tokens": "eebb2b44e72c9e7f" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 235, | |
| "padded": 884, | |
| "non_padded": 56, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:econometrics|0": { | |
| "hashes": { | |
| "hash_examples": "1e192eae38347257", | |
| "hash_full_prompts": "1e192eae38347257", | |
| "hash_input_tokens": "d54ad2dfc0120713", | |
| "hash_cont_tokens": "546bc1ae87b0e4a3" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 114, | |
| "padded": 456, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:electrical_engineering|0": { | |
| "hashes": { | |
| "hash_examples": "cf97671d5c441da1", | |
| "hash_full_prompts": "cf97671d5c441da1", | |
| "hash_input_tokens": "9bfa334e4a1a4821", | |
| "hash_cont_tokens": "18b7821722149786" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 145, | |
| "padded": 552, | |
| "non_padded": 28, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:elementary_mathematics|0": { | |
| "hashes": { | |
| "hash_examples": "6f49107ed43c40c5", | |
| "hash_full_prompts": "6f49107ed43c40c5", | |
| "hash_input_tokens": "1019c42dbae75276", | |
| "hash_cont_tokens": "1f169b13f4b450ab" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 378, | |
| "padded": 1480, | |
| "non_padded": 32, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:formal_logic|0": { | |
| "hashes": { | |
| "hash_examples": "7922c376008ba77b", | |
| "hash_full_prompts": "7922c376008ba77b", | |
| "hash_input_tokens": "387df5fd06cb7af7", | |
| "hash_cont_tokens": "c496d1261168f9d0" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 126, | |
| "padded": 504, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:global_facts|0": { | |
| "hashes": { | |
| "hash_examples": "11f9813185047d5b", | |
| "hash_full_prompts": "11f9813185047d5b", | |
| "hash_input_tokens": "efb64d727f4a0a10", | |
| "hash_cont_tokens": "1195321124e7aba4" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 100, | |
| "padded": 396, | |
| "non_padded": 4, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_biology|0": { | |
| "hashes": { | |
| "hash_examples": "2a804b1d90cbe66e", | |
| "hash_full_prompts": "2a804b1d90cbe66e", | |
| "hash_input_tokens": "bba0da50246f65c7", | |
| "hash_cont_tokens": "2c5c18b03a122455" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 310, | |
| "padded": 1232, | |
| "non_padded": 8, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_chemistry|0": { | |
| "hashes": { | |
| "hash_examples": "0032168adabc53b4", | |
| "hash_full_prompts": "0032168adabc53b4", | |
| "hash_input_tokens": "d08e1c898021b243", | |
| "hash_cont_tokens": "7a5df3ff18787b73" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 203, | |
| "padded": 804, | |
| "non_padded": 8, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_computer_science|0": { | |
| "hashes": { | |
| "hash_examples": "f2fb8740f9df980f", | |
| "hash_full_prompts": "f2fb8740f9df980f", | |
| "hash_input_tokens": "f55b5ac2eb985869", | |
| "hash_cont_tokens": "1195321124e7aba4" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 100, | |
| "padded": 396, | |
| "non_padded": 4, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_european_history|0": { | |
| "hashes": { | |
| "hash_examples": "73509021e7e66435", | |
| "hash_full_prompts": "73509021e7e66435", | |
| "hash_input_tokens": "d40e39192e6dfa59", | |
| "hash_cont_tokens": "6a868c2dfcf7063b" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 165, | |
| "padded": 660, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_geography|0": { | |
| "hashes": { | |
| "hash_examples": "9e08d1894940ff42", | |
| "hash_full_prompts": "9e08d1894940ff42", | |
| "hash_input_tokens": "d083740af1b5edee", | |
| "hash_cont_tokens": "b30a11f2b2cd2b83" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 198, | |
| "padded": 776, | |
| "non_padded": 16, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_government_and_politics|0": { | |
| "hashes": { | |
| "hash_examples": "64b7e97817ca6c76", | |
| "hash_full_prompts": "64b7e97817ca6c76", | |
| "hash_input_tokens": "799d0c9444e2c86b", | |
| "hash_cont_tokens": "f7dd0838eac5d187" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 193, | |
| "padded": 772, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_macroeconomics|0": { | |
| "hashes": { | |
| "hash_examples": "9f582da8534bd2ef", | |
| "hash_full_prompts": "9f582da8534bd2ef", | |
| "hash_input_tokens": "633046b5f2555212", | |
| "hash_cont_tokens": "aa906d7e7379792f" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 390, | |
| "padded": 1560, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_mathematics|0": { | |
| "hashes": { | |
| "hash_examples": "fd54f1c10d423c51", | |
| "hash_full_prompts": "fd54f1c10d423c51", | |
| "hash_input_tokens": "9fa46aa2d5893828", | |
| "hash_cont_tokens": "f35939cee4976df9" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 270, | |
| "padded": 1060, | |
| "non_padded": 20, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_microeconomics|0": { | |
| "hashes": { | |
| "hash_examples": "7037896925aaf42f", | |
| "hash_full_prompts": "7037896925aaf42f", | |
| "hash_input_tokens": "b994f883bbf815f2", | |
| "hash_cont_tokens": "b550d5a3014215b7" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 238, | |
| "padded": 944, | |
| "non_padded": 8, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_physics|0": { | |
| "hashes": { | |
| "hash_examples": "60c3776215167dae", | |
| "hash_full_prompts": "60c3776215167dae", | |
| "hash_input_tokens": "32298e46ee4bd7d5", | |
| "hash_cont_tokens": "d2c1ad8e71d4c02c" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 151, | |
| "padded": 604, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_psychology|0": { | |
| "hashes": { | |
| "hash_examples": "61176bfd5da1298f", | |
| "hash_full_prompts": "61176bfd5da1298f", | |
| "hash_input_tokens": "e3fb5f85c891ab3c", | |
| "hash_cont_tokens": "1dec8f08c8fc27ad" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 545, | |
| "padded": 2164, | |
| "non_padded": 16, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_statistics|0": { | |
| "hashes": { | |
| "hash_examples": "40dfeebd1ea10f76", | |
| "hash_full_prompts": "40dfeebd1ea10f76", | |
| "hash_input_tokens": "b63465bbc1963e08", | |
| "hash_cont_tokens": "8ecae5f9122b3e68" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 216, | |
| "padded": 864, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_us_history|0": { | |
| "hashes": { | |
| "hash_examples": "03daa510ba917f4d", | |
| "hash_full_prompts": "03daa510ba917f4d", | |
| "hash_input_tokens": "1dde79e6128b3005", | |
| "hash_cont_tokens": "425441932a26a4b2" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 204, | |
| "padded": 816, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:high_school_world_history|0": { | |
| "hashes": { | |
| "hash_examples": "be075ffd579f43c2", | |
| "hash_full_prompts": "be075ffd579f43c2", | |
| "hash_input_tokens": "44c586174928b003", | |
| "hash_cont_tokens": "db835644c0f27d85" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 237, | |
| "padded": 948, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:human_aging|0": { | |
| "hashes": { | |
| "hash_examples": "caa5b69f640bd1ef", | |
| "hash_full_prompts": "caa5b69f640bd1ef", | |
| "hash_input_tokens": "c961d68b70c1cf28", | |
| "hash_cont_tokens": "ee52bd33861197cd" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 223, | |
| "padded": 860, | |
| "non_padded": 32, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:human_sexuality|0": { | |
| "hashes": { | |
| "hash_examples": "5ed2e38fb25a3767", | |
| "hash_full_prompts": "5ed2e38fb25a3767", | |
| "hash_input_tokens": "1e909520f2478496", | |
| "hash_cont_tokens": "219a54da8fbb609c" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 131, | |
| "padded": 516, | |
| "non_padded": 8, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:international_law|0": { | |
| "hashes": { | |
| "hash_examples": "4e3e9e28d1b96484", | |
| "hash_full_prompts": "4e3e9e28d1b96484", | |
| "hash_input_tokens": "e1d6828c5a14219b", | |
| "hash_cont_tokens": "cd7c89cafc5f98dc" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 121, | |
| "padded": 480, | |
| "non_padded": 4, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:jurisprudence|0": { | |
| "hashes": { | |
| "hash_examples": "e264b755366310b3", | |
| "hash_full_prompts": "e264b755366310b3", | |
| "hash_input_tokens": "cea50f3921f81984", | |
| "hash_cont_tokens": "912922da75a0f4cb" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 108, | |
| "padded": 432, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:logical_fallacies|0": { | |
| "hashes": { | |
| "hash_examples": "a4ab6965a3e38071", | |
| "hash_full_prompts": "a4ab6965a3e38071", | |
| "hash_input_tokens": "33706ab3139591b7", | |
| "hash_cont_tokens": "f59f09f36b9518f8" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 163, | |
| "padded": 640, | |
| "non_padded": 12, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:machine_learning|0": { | |
| "hashes": { | |
| "hash_examples": "b92320efa6636b40", | |
| "hash_full_prompts": "b92320efa6636b40", | |
| "hash_input_tokens": "30d23ee89a4539b8", | |
| "hash_cont_tokens": "06e5ed7500e1dd2f" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 112, | |
| "padded": 448, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:management|0": { | |
| "hashes": { | |
| "hash_examples": "c9ee4872a850fe20", | |
| "hash_full_prompts": "c9ee4872a850fe20", | |
| "hash_input_tokens": "de646084fbcb9dde", | |
| "hash_cont_tokens": "e1964824e70feaee" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 103, | |
| "padded": 404, | |
| "non_padded": 8, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:marketing|0": { | |
| "hashes": { | |
| "hash_examples": "0c151b70f6a047e3", | |
| "hash_full_prompts": "0c151b70f6a047e3", | |
| "hash_input_tokens": "3556777b3261fef5", | |
| "hash_cont_tokens": "d60b4f5359e5071d" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 234, | |
| "padded": 924, | |
| "non_padded": 12, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:medical_genetics|0": { | |
| "hashes": { | |
| "hash_examples": "513f6cb8fca3a24e", | |
| "hash_full_prompts": "513f6cb8fca3a24e", | |
| "hash_input_tokens": "4421e9f902318fc5", | |
| "hash_cont_tokens": "1195321124e7aba4" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 100, | |
| "padded": 396, | |
| "non_padded": 4, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:miscellaneous|0": { | |
| "hashes": { | |
| "hash_examples": "259a190d635331db", | |
| "hash_full_prompts": "259a190d635331db", | |
| "hash_input_tokens": "3f3975efa1961fb9", | |
| "hash_cont_tokens": "3c3c13ae8b929146" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 783, | |
| "padded": 3076, | |
| "non_padded": 56, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:moral_disputes|0": { | |
| "hashes": { | |
| "hash_examples": "b85052c48a0b7bc3", | |
| "hash_full_prompts": "b85052c48a0b7bc3", | |
| "hash_input_tokens": "c44c77551418f841", | |
| "hash_cont_tokens": "7401410a2784b188" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 346, | |
| "padded": 1368, | |
| "non_padded": 16, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:moral_scenarios|0": { | |
| "hashes": { | |
| "hash_examples": "28d0b069ef00dd00", | |
| "hash_full_prompts": "28d0b069ef00dd00", | |
| "hash_input_tokens": "342fae16c5498358", | |
| "hash_cont_tokens": "df8ed645eef4f648" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 895, | |
| "padded": 3580, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:nutrition|0": { | |
| "hashes": { | |
| "hash_examples": "00c9bc5f1d305b2f", | |
| "hash_full_prompts": "00c9bc5f1d305b2f", | |
| "hash_input_tokens": "d4057c62c8e1263f", | |
| "hash_cont_tokens": "2756b6bd154b56a9" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 306, | |
| "padded": 1200, | |
| "non_padded": 24, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:philosophy|0": { | |
| "hashes": { | |
| "hash_examples": "a458c08454a3fd5f", | |
| "hash_full_prompts": "a458c08454a3fd5f", | |
| "hash_input_tokens": "184a0b1a402e5a65", | |
| "hash_cont_tokens": "6b25f0aa2982cdf2" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 311, | |
| "padded": 1188, | |
| "non_padded": 56, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:prehistory|0": { | |
| "hashes": { | |
| "hash_examples": "d6a0ecbdbb670e9c", | |
| "hash_full_prompts": "d6a0ecbdbb670e9c", | |
| "hash_input_tokens": "a4cf65c3265668af", | |
| "hash_cont_tokens": "f2889cf11c795528" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 324, | |
| "padded": 1268, | |
| "non_padded": 28, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:professional_accounting|0": { | |
| "hashes": { | |
| "hash_examples": "b4a95fe480b6540e", | |
| "hash_full_prompts": "b4a95fe480b6540e", | |
| "hash_input_tokens": "3c31731ff3bc479c", | |
| "hash_cont_tokens": "f20a7802272ecc89" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 282, | |
| "padded": 1128, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:professional_law|0": { | |
| "hashes": { | |
| "hash_examples": "c2be9651cdbdde3b", | |
| "hash_full_prompts": "c2be9651cdbdde3b", | |
| "hash_input_tokens": "749954fe50ef502c", | |
| "hash_cont_tokens": "80fe18bd290b0ac3" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 1534, | |
| "padded": 6112, | |
| "non_padded": 24, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:professional_medicine|0": { | |
| "hashes": { | |
| "hash_examples": "26ce92416288f273", | |
| "hash_full_prompts": "26ce92416288f273", | |
| "hash_input_tokens": "3c52682ab1663822", | |
| "hash_cont_tokens": "64099d6a43b02c6f" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 272, | |
| "padded": 1088, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:professional_psychology|0": { | |
| "hashes": { | |
| "hash_examples": "71ea5f182ea9a641", | |
| "hash_full_prompts": "71ea5f182ea9a641", | |
| "hash_input_tokens": "c7491555b57a9dc0", | |
| "hash_cont_tokens": "50d39d403bbf6cd6" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 612, | |
| "padded": 2424, | |
| "non_padded": 24, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:public_relations|0": { | |
| "hashes": { | |
| "hash_examples": "125adc21f91f8d77", | |
| "hash_full_prompts": "125adc21f91f8d77", | |
| "hash_input_tokens": "03a54331dc45fb3f", | |
| "hash_cont_tokens": "e49d0145cc703990" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 110, | |
| "padded": 436, | |
| "non_padded": 4, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:security_studies|0": { | |
| "hashes": { | |
| "hash_examples": "3c18b216c099fb26", | |
| "hash_full_prompts": "3c18b216c099fb26", | |
| "hash_input_tokens": "a23c9628f8e02742", | |
| "hash_cont_tokens": "81b80155de3baac6" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 245, | |
| "padded": 976, | |
| "non_padded": 4, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:sociology|0": { | |
| "hashes": { | |
| "hash_examples": "3f2a9634cef7417d", | |
| "hash_full_prompts": "3f2a9634cef7417d", | |
| "hash_input_tokens": "c3e1018487f49fab", | |
| "hash_cont_tokens": "25f61338f8548c6a" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 201, | |
| "padded": 784, | |
| "non_padded": 20, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:us_foreign_policy|0": { | |
| "hashes": { | |
| "hash_examples": "22249da54056475e", | |
| "hash_full_prompts": "22249da54056475e", | |
| "hash_input_tokens": "09c7ca047ec3769b", | |
| "hash_cont_tokens": "1195321124e7aba4" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 100, | |
| "padded": 396, | |
| "non_padded": 4, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:virology|0": { | |
| "hashes": { | |
| "hash_examples": "9d194b9471dc624e", | |
| "hash_full_prompts": "9d194b9471dc624e", | |
| "hash_input_tokens": "46a017ede0b8dd93", | |
| "hash_cont_tokens": "84522949dd58bde8" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 166, | |
| "padded": 656, | |
| "non_padded": 8, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arabic_mmlu:world_religions|0": { | |
| "hashes": { | |
| "hash_examples": "229e5fe50082b064", | |
| "hash_full_prompts": "229e5fe50082b064", | |
| "hash_input_tokens": "c7c958cdca559469", | |
| "hash_cont_tokens": "dd64f347e4d30564" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 171, | |
| "padded": 656, | |
| "non_padded": 28, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arc_challenge_okapi_ar|0": { | |
| "hashes": { | |
| "hash_examples": "ab893807673bc355", | |
| "hash_full_prompts": "ab893807673bc355", | |
| "hash_input_tokens": "4b76d962e1abc6c1", | |
| "hash_cont_tokens": "3f1bf866a298644e" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 1160, | |
| "padded": 4566, | |
| "non_padded": 74, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|arc_easy_ar|0": { | |
| "hashes": { | |
| "hash_examples": "acb688624acc3d04", | |
| "hash_full_prompts": "acb688624acc3d04", | |
| "hash_input_tokens": "981fef30fdba8968", | |
| "hash_cont_tokens": "17ca0feb9ece3549" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 2364, | |
| "padded": 9327, | |
| "non_padded": 129, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|boolq_ar|0": { | |
| "hashes": { | |
| "hash_examples": "48355a67867e0c32", | |
| "hash_full_prompts": "48355a67867e0c32", | |
| "hash_input_tokens": "a0fd67d03b6aa034", | |
| "hash_cont_tokens": "8ce34a4850811b21" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 3260, | |
| "padded": 6512, | |
| "non_padded": 8, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|copa_ext_ar|0": { | |
| "hashes": { | |
| "hash_examples": "9bb83301bb72eecf", | |
| "hash_full_prompts": "9bb83301bb72eecf", | |
| "hash_input_tokens": "4b775d00707bf3b0", | |
| "hash_cont_tokens": "f043ed6930922fea" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 90, | |
| "padded": 180, | |
| "non_padded": 0, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|hellaswag_okapi_ar|0": { | |
| "hashes": { | |
| "hash_examples": "6e8cf57a322dfadd", | |
| "hash_full_prompts": "6e8cf57a322dfadd", | |
| "hash_input_tokens": "99869a7228344e69", | |
| "hash_cont_tokens": "1e3950da70fa4fce" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 9171, | |
| "padded": 36669, | |
| "non_padded": 15, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|openbook_qa_ext_ar|0": { | |
| "hashes": { | |
| "hash_examples": "923d41eb0aca93eb", | |
| "hash_full_prompts": "923d41eb0aca93eb", | |
| "hash_input_tokens": "86f78f7efc3c818f", | |
| "hash_cont_tokens": "2c9e93bf22cde9f5" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 495, | |
| "padded": 1936, | |
| "non_padded": 44, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|piqa_ar|0": { | |
| "hashes": { | |
| "hash_examples": "94bc205a520d3ea0", | |
| "hash_full_prompts": "94bc205a520d3ea0", | |
| "hash_input_tokens": "a460f283ffad5d24", | |
| "hash_cont_tokens": "e63993c8f7c13c68" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 1833, | |
| "padded": 3613, | |
| "non_padded": 53, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|race_ar|0": { | |
| "hashes": { | |
| "hash_examples": "de65130bae647516", | |
| "hash_full_prompts": "de65130bae647516", | |
| "hash_input_tokens": "18deec6e9f91281a", | |
| "hash_cont_tokens": "cb476918dc3fac79" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 4929, | |
| "padded": 19707, | |
| "non_padded": 9, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|sciq_ar|0": { | |
| "hashes": { | |
| "hash_examples": "4d24dfc6cbc61dcd", | |
| "hash_full_prompts": "4d24dfc6cbc61dcd", | |
| "hash_input_tokens": "3fea833e5bd1fb0c", | |
| "hash_cont_tokens": "84a69317792cecd4" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 995, | |
| "padded": 3947, | |
| "non_padded": 33, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "community|toxigen_ar|0": { | |
| "hashes": { | |
| "hash_examples": "1e139513004a9a2e", | |
| "hash_full_prompts": "1e139513004a9a2e", | |
| "hash_input_tokens": "1140cef3289a560c", | |
| "hash_cont_tokens": "201d4d5bfd82b9c3" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 935, | |
| "padded": 1838, | |
| "non_padded": 32, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| }, | |
| "lighteval|xstory_cloze:ar|0": { | |
| "hashes": { | |
| "hash_examples": "865426a22c787481", | |
| "hash_full_prompts": "865426a22c787481", | |
| "hash_input_tokens": "57cf12b06cce30ef", | |
| "hash_cont_tokens": "2b153c829423fcd9" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 1511, | |
| "padded": 2965, | |
| "non_padded": 57, | |
| "effective_few_shots": 0.0, | |
| "num_truncated_few_shots": 0 | |
| } | |
| }, | |
| "summary_general": { | |
| "hashes": { | |
| "hash_examples": "dc31d7bbf8c079a3", | |
| "hash_full_prompts": "dc31d7bbf8c079a3", | |
| "hash_input_tokens": "a82c34e77c1efd99", | |
| "hash_cont_tokens": "97183428ddb943eb" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 72964, | |
| "padded": 233788, | |
| "non_padded": 1835, | |
| "num_truncated_few_shots": 0 | |
| } | |
| } |