Spaces:
Running
Running
{ | |
"benczechmark_propaganda_argumentace": { | |
"name": "Propaganda β Argumentace", | |
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_argumentace", | |
"short_name": "P-Argumentace", | |
"category": "NLI", | |
"abbreviation": "P-ARG", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_propaganda_fabulace": { | |
"name": "Propaganda β Fabulace", | |
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_fabulace", | |
"short_name": "P-Fabulace", | |
"category": "NLI", | |
"abbreviation": "P-FAB", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_propaganda_nazor": { | |
"name": "Propaganda β NΓ‘zor", | |
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_nazor", | |
"short_name": "P-NΓ‘zor", | |
"category": "NLI", | |
"abbreviation": "P-NAZOR", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_propaganda_strach": { | |
"name": "Propaganda β Strach", | |
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_strach", | |
"short_name": "P-Strach", | |
"category": "NLI", | |
"abbreviation": "P-STCH", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_propaganda_zamereni": { | |
"name": "Propaganda β ZamΔΕenΓ", | |
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_zamereni", | |
"short_name": "P-ZamΔΕenΓ", | |
"category": "NLI", | |
"abbreviation": "P-MER", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_propaganda_demonizace": { | |
"name": "Propaganda β DΓ©monizace", | |
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_demonizace", | |
"short_name": "P-DΓ©monizace", | |
"category": "NLI", | |
"abbreviation": "P-DEMON", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_propaganda_lokace": { | |
"name": "Propaganda β Lokace", | |
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_lokace", | |
"short_name": "P-Lokace", | |
"category": "NLI", | |
"abbreviation": "P-LOK", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_propaganda_relativizace": { | |
"name": "Propaganda β Relativizace", | |
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_relativizace", | |
"short_name": "P-Relativizace", | |
"category": "NLI", | |
"abbreviation": "P-REL", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_propaganda_vina": { | |
"name": "Propaganda β Vina", | |
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_vina", | |
"short_name": "P-Vina", | |
"category": "NLI", | |
"abbreviation": "P-VINA", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_propaganda_zanr": { | |
"name": "Propaganda β Ε½Γ‘nr", | |
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_zanr", | |
"short_name": "P-Ε½Γ‘nr", | |
"category": "NLI", | |
"abbreviation": "P-ZANR", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_propaganda_emoce": { | |
"name": "Propaganda β Emoce", | |
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_emoce", | |
"short_name": "P-Emoce", | |
"category": "NLI", | |
"abbreviation": "P-EMOCE", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_propaganda_nalepkovani": { | |
"name": "Propaganda β NΓ‘lepkovΓ‘nΓ", | |
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_nalepkovani", | |
"short_name": "P-NΓ‘lepkovΓ‘nΓ", | |
"category": "NLI", | |
"abbreviation": "P-LEP", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_propaganda_rusko": { | |
"name": "Propaganda β Rusko", | |
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_rusko", | |
"short_name": "P-Rusko", | |
"category": "NLI", | |
"abbreviation": "P-RUS", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_sentiment_mall": { | |
"name": "CzechSentiment MALL", | |
"source_url": "https://huggingface.co/datasets/CZLC/mall_sentiment_balanced", | |
"short_name": "S-Mall", | |
"category": "Sentiment", | |
"abbreviation": "S-MALL", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_sentiment_fb": { | |
"name": "CzechSentiment FB", | |
"source_url": "https://huggingface.co/datasets/CZLC/fb_sentiment_balanced", | |
"short_name": "S-FB", | |
"category": "Sentiment", | |
"abbreviation": "S-FB", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_sentiment_csfd": { | |
"name": "CzechSentiment CSFD", | |
"source_url": "https://huggingface.co/datasets/CZLC/csfd_sentiment_balanced", | |
"short_name": "S-CSFD", | |
"category": "Sentiment", | |
"abbreviation": "S-CSFD", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_grammarerrorcorrection": { | |
"name": "GrammarErrorCorrection", | |
"source_url": "https://huggingface.co/datasets/CZLC/cs_gec", | |
"short_name": "Grammar Error Correction", | |
"category": "Czech Language Understanding", | |
"abbreviation": "GEC", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_cs_naturalquestions": { | |
"name": "NaturalQuestions-CZ", | |
"source_url": "https://huggingface.co/datasets/CZLC/cs_naturalquestions", | |
"short_name": "CS Natural Questions", | |
"category": "Factual Knowledge", | |
"abbreviation": "NQ", | |
"metric": "exact_match" | |
}, | |
"benczechmark_cs_sqad32": { | |
"name": "SQAD3.2", | |
"source_url": "https://huggingface.co/datasets/CZLC/SQAD_3.2", | |
"short_name": "CS SQAD 3.2", | |
"category": "Reading Comprehension", | |
"abbreviation": "SQAD32", | |
"metric": "exact_match" | |
}, | |
"benczechmark_cs_triviaQA": { | |
"name": "TriviaQA-CZ", | |
"source_url": "https://huggingface.co/datasets/CZLC/cs_triviaqa", | |
"short_name": "CS TriviaQA", | |
"category": "Factual Knowledge", | |
"abbreviation": "TQA", | |
"metric": "exact_match" | |
}, | |
"benczechmark_csfever_nli": { | |
"name": "CSFEVER", | |
"source_url": "https://huggingface.co/datasets/ctu-aic/csfever_nli", | |
"short_name": "CSFever NLI", | |
"category": "NLI", | |
"abbreviation": "CFR", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_ctkfacts_nli": { | |
"name": "CTKFACTS", | |
"source_url": "https://huggingface.co/datasets/ctu-aic/ctkfacts_nli", | |
"short_name": "CTKFacts NLI", | |
"category": "NLI", | |
"abbreviation": "CTK", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_cs_ner": { | |
"name": "Czech Named Entity Corpus 2.0\n", | |
"source_url": "https://huggingface.co/datasets/CZLC/cnec_2.0", | |
"short_name": "CNEC2.0", | |
"category": "NER", | |
"abbreviation": "CNEC", | |
"metric": "exact_match" | |
}, | |
"benczechmark_hellaswag": { | |
"name": "HellaSwag-CZ", | |
"source_url": "https://huggingface.co/datasets/CZLC/cs_hellaswag", | |
"short_name": "HellaSwag", | |
"category": "Language Modeling", | |
"abbreviation": "HASG", | |
"metric": "acc" | |
}, | |
"benczechmark_histcorpus": { | |
"name": "Historical Corpus", | |
"source_url": "https://huggingface.co/datasets/CZLC/benczechmark_histcorpus", | |
"short_name": "HistCorpus", | |
"category": "Language Modeling", | |
"abbreviation": "HIST", | |
"metric": "word_perplexity" | |
}, | |
"benczechmark_klokan_qa": { | |
"name": "Klokan QA", | |
"source_url": "https://huggingface.co/datasets/hynky/klokan-qa", | |
"short_name": "Klokan QA", | |
"category": "Czech Math Reasoning", | |
"abbreviation": "KQA", | |
"metric": "acc" | |
}, | |
"benczechmark_cs_court_decisions_ner": { | |
"name": "Czech Court Decisions", | |
"source_url": "https://huggingface.co/datasets/CZLC/ner_court_decisions", | |
"short_name": "CS Court Decisions NER", | |
"category": "NER", | |
"abbreviation": "CCDNER", | |
"metric": "exact_match" | |
}, | |
"benczechmark_umimeto_biology": { | |
"name": "Umimeto.cz β Biology", | |
"source_url": "https://huggingface.co/datasets/CZLC/umimeto-qa/viewer/biology", | |
"short_name": "Umimeto.cz β Biology", | |
"category": "Factual Knowledge", | |
"abbreviation": "UT-BIO", | |
"metric": "acc" | |
}, | |
"benczechmark_umimeto_chemistry": { | |
"name": "Umimeto.cz β Chemistry", | |
"source_url": "https://huggingface.co/datasets/CZLC/umimeto-qa/viewer/chemistry", | |
"short_name": "Umimeto.cz β Chemistry", | |
"category": "Factual Knowledge", | |
"abbreviation": "UT-CHEM", | |
"metric": "acc" | |
}, | |
"benczechmark_umimeto_czech": { | |
"name": "Umimeto.cz β Czech Language", | |
"source_url": "https://huggingface.co/datasets/CZLC/umimeto-qa/viewer/czech", | |
"short_name": "Umimeto.cz β Czech", | |
"category": "Czech Language Understanding", | |
"abbreviation": "UT-CZEL", | |
"metric": "acc" | |
}, | |
"benczechmark_umimeto_history": { | |
"name": "Umimeto.cz β History", | |
"source_url": "https://huggingface.co/datasets/CZLC/umimeto-qa/viewer/history", | |
"short_name": "Umimeto.cz β History", | |
"category": "Factual Knowledge", | |
"abbreviation": "UT-HIST", | |
"metric": "acc" | |
}, | |
"benczechmark_umimeto_informatics": { | |
"name": "Umimeto.cz β Informatics", | |
"source_url": "https://huggingface.co/datasets/CZLC/umimeto-qa/viewer/informatics", | |
"short_name": "Umimeto.cz β Informatics", | |
"category": "Factual Knowledge", | |
"abbreviation": "UT-IT", | |
"metric": "acc" | |
}, | |
"benczechmark_umimeto_math": { | |
"name": "Umimeto.cz β Math", | |
"source_url": "https://huggingface.co/datasets/CZLC/umimeto-qa/viewer/math", | |
"short_name": "Umimeto.cz β Math", | |
"category": "Czech Math Reasoning", | |
"abbreviation": "UT-MATH", | |
"metric": "acc" | |
}, | |
"benczechmark_umimeto_physics": { | |
"name": "Umimeto.cz β Physics", | |
"source_url": "https://huggingface.co/datasets/CZLC/umimeto-qa/viewer/physics", | |
"short_name": "Umimeto.cz β Physics", | |
"category": "Factual Knowledge", | |
"abbreviation": "UT-PHYS", | |
"metric": "acc" | |
}, | |
"benczechmark_cermat_czmath_mc": { | |
"name": "CERMAT β Czech Math β MC", | |
"source_url": "https://huggingface.co/datasets/CZLC/cermat_math_mc", | |
"short_name": "Cermat Czech Math MC", | |
"category": "Czech Math Reasoning", | |
"abbreviation": "CCM-MC", | |
"metric": "acc" | |
}, | |
"benczechmark_cermat_czmath_open": { | |
"name": "CERMAT β Czech Math β OPEN", | |
"source_url": "https://huggingface.co/datasets/CZLC/cermat_math_open", | |
"short_name": "Cermat Czech Math Open", | |
"category": "Czech Math Reasoning", | |
"abbreviation": "CCM-OPEN", | |
"metric": "exact_match" | |
}, | |
"benczechmark_cermat_czech_tf": { | |
"name": "CERMAT β Czech Language β TF", | |
"source_url": "https://huggingface.co/datasets/CZLC/cermat_czech_tf", | |
"short_name": "Cermat Czech Language TF", | |
"category": "Czech Language Understanding", | |
"abbreviation": "CCL-TF", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_cermat_czech_mc": { | |
"name": "CERMAT β Czech Language β MC", | |
"source_url": "https://huggingface.co/datasets/CZLC/cermat_czech_mc", | |
"short_name": "Cermat Czech Language MC", | |
"category": "Czech Language Understanding", | |
"abbreviation": "CCL-MC", | |
"metric": "acc" | |
}, | |
"benczechmark_cermat_czech_open": { | |
"name": "CERMAT β Czech Language β OPEN", | |
"source_url": "https://huggingface.co/datasets/CZLC/cermat_czech_open", | |
"short_name": "Cermat Czech Language Open", | |
"category": "Czech Language Understanding", | |
"abbreviation": "CCL-OPEN", | |
"metric": "exact_match" | |
}, | |
"benczechmark_history_ir": { | |
"name": "Historical Relevance Grading", | |
"source_url": "https://huggingface.co/datasets/CZLC/history_retrieval", | |
"abbreviation": "HIST-IR", | |
"category": "Historical", | |
"short_name": "Czech History IR", | |
"metric": "acc" | |
}, | |
"benczechmark_agree": { | |
"name": "Agree", | |
"source_url": "https://huggingface.co/datasets/davidadamczyk/czechbench_agree", | |
"abbreviation": "Agree", | |
"category": "Czech Language Understanding", | |
"short_name": "Agree", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_belebele": { | |
"name": "Belebele", | |
"source_url": "https://huggingface.co/datasets/davidadamczyk/czechbench_belebele", | |
"abbreviation": "BB", | |
"category": "Reading Comprehension", | |
"short_name": "Belebele", | |
"metric": "acc" | |
}, | |
"benczechmark_subjectivity": { | |
"name": "Subjectivity", | |
"source_url": "https://huggingface.co/datasets/davidadamczyk/czechbench_subjectivity", | |
"abbreviation": "SUBJ", | |
"category": "Sentiment", | |
"short_name": "Subjectivity", | |
"metric": "avg_mcauroc" | |
}, | |
"benczechmark_essay": { | |
"name": "Czech National Corpus β Essays", | |
"source_url": "https://huggingface.co/datasets/CZLC/CNC_skript12", | |
"abbreviation": "CNC-E", | |
"category": "Language Modeling", | |
"short_name": "CNC β Essays", | |
"metric": "word_perplexity" | |
}, | |
"benczechmark_fiction": { | |
"name": "Czech National Corpus β Fiction", | |
"source_url": "https://huggingface.co/datasets/CZLC/CNC_fictree", | |
"abbreviation": "CNC-F", | |
"category": "Language Modeling", | |
"short_name": "CNC β Fiction", | |
"metric": "word_perplexity" | |
}, | |
"benczechmark_correspondence": { | |
"name": "Czech National Corpus β Correspondence", | |
"source_url": "https://huggingface.co/datasets/CZLC/CNC_KSK", | |
"abbreviation": "CNC-KSK", | |
"category": "Language Modeling", | |
"short_name": "CNC β Correspondence", | |
"metric": "word_perplexity" | |
}, | |
"benczechmark_havlicek": { | |
"name": "Czech National Corpus β Karel HavlΓΔek β Noviny", | |
"source_url": "https://huggingface.co/datasets/CZLC/CNC_KHavlicek_HistNews", | |
"abbreviation": "CNC-KH", | |
"category": "Language Modeling", | |
"short_name": "CNC β KHavlicek β HistNews", | |
"metric": "word_perplexity" | |
}, | |
"benczechmark_spoken": { | |
"name": "Czech National Corpus β Spoken", | |
"source_url": "https://huggingface.co/datasets/CZLC/CNC_oral_ortofon", | |
"abbreviation": "CNC-SPK", | |
"category": "Language Modeling", | |
"short_name": "CNC β Spoken", | |
"metric": "word_perplexity" | |
}, | |
"benczechmark_dialect": { | |
"name": "Czech National Corpus β Dialect", | |
"source_url": "https://huggingface.co/datasets/CZLC/CNC_Dialekt", | |
"abbreviation": "CNC-DIAL", | |
"category": "Language Modeling", | |
"short_name": "CNC β DIALEKT", | |
"metric": "word_perplexity" | |
}, | |
"benczechmark_snli": { | |
"name": "Czech SNLI", | |
"source_url": "https://huggingface.co/datasets/CZLC/cs_snli", | |
"abbreviation": "SNLI", | |
"category": "NLI", | |
"short_name": "Czech SNLI", | |
"metric": "avg_mcauroc" | |
} | |
} | |