config
dict | results
dict |
---|---|
{
"model_dtype": "",
"model_name": "CohereForAI/aya-23-8B",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.285
},
"ParsiNLU Entailment": {
"Exact Match": 0.395
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.318
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.675
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.35
},
"Persian News Summary": {
"Persian Rouge": 0.252
},
"FarsTail Entailment": {
"Exact Match": 0.467
},
"Persian Math": {
"Math Equivalence": 0.36
}
} |
{
"model_dtype": "",
"model_name": "JabirLLM-400B",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.29
},
"ParsiNLU Entailment": {
"Exact Match": 0.675
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.366
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.777
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.405
},
"Persian News Summary": {
"Persian Rouge": 0.407
},
"FarsTail Entailment": {
"Exact Match": 0.897
},
"Persian Math": {
"Math Equivalence": 0.52
}
} |
{
"model_dtype": "",
"model_name": "MehdiHosseiniMoghadam/AVA-Llama-3-V2",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.225
},
"ParsiNLU Entailment": {
"Exact Match": 0.428
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.26
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.569
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.059
},
"Persian News Summary": {
"Persian Rouge": 0.275
},
"FarsTail Entailment": {
"Exact Match": 0.217
},
"Persian Math": {
"Math Equivalence": 0.434
}
} |
{
"model_dtype": "",
"model_name": "PartAI/Dorna-Llama3-8B-Instruct",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.235
},
"ParsiNLU Entailment": {
"Exact Match": 0.411
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.265
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.61
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.246
},
"Persian News Summary": {
"Persian Rouge": 0.23
},
"FarsTail Entailment": {
"Exact Match": 0.408
},
"Persian Math": {
"Math Equivalence": 0.423
}
} |
{
"model_dtype": "",
"model_name": "Qwen/Qwen2-72B-Instruct",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.285
},
"ParsiNLU Entailment": {
"Exact Match": 0.793
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.236
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.694
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.308
},
"Persian News Summary": {
"Persian Rouge": 0.3
},
"FarsTail Entailment": {
"Exact Match": 0.833
},
"Persian Math": {
"Math Equivalence": 0.737
}
} |
{
"model_dtype": "",
"model_name": "Qwen/Qwen2-7B-Instruct",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.395
},
"ParsiNLU Entailment": {
"Exact Match": 0.544
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.229
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.639
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.316
},
"Persian News Summary": {
"Persian Rouge": 0.27
},
"FarsTail Entailment": {
"Exact Match": 0.446
},
"Persian Math": {
"Math Equivalence": 0.6
}
} |
{
"model_dtype": "",
"model_name": "claude-3-5-sonnet-20240620",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.505
},
"ParsiNLU Entailment": {
"Exact Match": 0.851
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.216
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.496
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.181
},
"Persian News Summary": {
"Persian Rouge": 0.333
},
"FarsTail Entailment": {
"Exact Match": 0.944
},
"Persian Math": {
"Math Equivalence": 0.851
}
} |
{
"model_dtype": "",
"model_name": "google/gemma2-9b-it",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.305
},
"ParsiNLU Entailment": {
"Exact Match": 0.609
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.304
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.779
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.3
},
"Persian News Summary": {
"Persian Rouge": 0.36
},
"FarsTail Entailment": {
"Exact Match": 0.787
},
"Persian Math": {
"Math Equivalence": 0.702
}
} |
{
"model_dtype": "",
"model_name": "gpt-3.5-turbo",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.035
},
"ParsiNLU Entailment": {
"Exact Match": 0.432
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.343
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.681
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.36
},
"Persian News Summary": {
"Persian Rouge": 0.314
},
"FarsTail Entailment": {
"Exact Match": 0.366
},
"Persian Math": {
"Math Equivalence": 0.589
}
} |
{
"model_dtype": "",
"model_name": "gpt-4-turbo",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.135
},
"ParsiNLU Entailment": {
"Exact Match": 0.75
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.377
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.777
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.399
},
"Persian News Summary": {
"Persian Rouge": 0.348
},
"FarsTail Entailment": {
"Exact Match": 0.828
},
"Persian Math": {
"Math Equivalence": 0.811
}
} |
{
"model_dtype": "",
"model_name": "gpt-4o",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.31
},
"ParsiNLU Entailment": {
"Exact Match": 0.771
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.349
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.752
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.431
},
"Persian News Summary": {
"Persian Rouge": 0.35
},
"FarsTail Entailment": {
"Exact Match": 0.893
},
"Persian Math": {
"Math Equivalence": 0.823
}
} |
{
"model_dtype": "",
"model_name": "gpt-4o-mini",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.24
},
"ParsiNLU Entailment": {
"Exact Match": 0.733
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.341
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.734
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.389
},
"Persian News Summary": {
"Persian Rouge": 0.335
},
"FarsTail Entailment": {
"Exact Match": 0.85
},
"Persian Math": {
"Math Equivalence": 0.781
}
} |
{
"model_dtype": "",
"model_name": "meta-llama/Meta-Llama-3-70B-Instruct",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.37
},
"ParsiNLU Entailment": {
"Exact Match": 0.542
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.326
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.794
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.254
},
"Persian News Summary": {
"Persian Rouge": 0.358
},
"FarsTail Entailment": {
"Exact Match": 0.738
},
"Persian Math": {
"Math Equivalence": 0.68
}
} |
{
"model_dtype": "",
"model_name": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.035
},
"ParsiNLU Entailment": {
"Exact Match": 0.462
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.301
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.734
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.29
},
"Persian News Summary": {
"Persian Rouge": 0.303
},
"FarsTail Entailment": {
"Exact Match": 0.409
},
"Persian Math": {
"Math Equivalence": 0.537
}
} |
{
"model_dtype": "",
"model_name": "meta-llama/Meta-Llama-3-8B-Instruct",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0.215
},
"ParsiNLU Entailment": {
"Exact Match": 0.481
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.282
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.704
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.251
},
"Persian News Summary": {
"Persian Rouge": 0.09
},
"FarsTail Entailment": {
"Exact Match": 0.342
},
"Persian Math": {
"Math Equivalence": 0.497
}
} |
{
"model_dtype": "",
"model_name": "universitytehran/PersianMind-v1.0",
"model_sha": ""
} | {
"Persian MMLU": {
"Exact Match": 0
},
"ParsiNLU Entailment": {
"Exact Match": 0.011
},
"ParsiNLU Machine Translation En Fa": {
"Persian Sentence Bleu": 0.296
},
"ParsiNLU Reading Comprehension": {
"Common Tokens": 0.5
},
"ParsiNLU Machine Translation Fa En": {
"English Sentence Bleu": 0.26
},
"Persian News Summary": {
"Persian Rouge": 0.359
},
"FarsTail Entailment": {
"Exact Match": 0.313
},
"Persian Math": {
"Math Equivalence": 0.376
}
} |
No dataset card yet
- Downloads last month
- 9