Datasets:

License:
results / rhysjones /phi-2-orange-v2 /results_2024-03-18 18:02:17.780287.json
aaditya's picture
Upload rhysjones/phi-2-orange-v2/results_2024-03-18 18:02:17.780287.json with huggingface_hub
6128ac9 verified
raw
history blame
1.65 kB
{
"results": {
"pubmedqa": {
"acc,none": 0.734,
"acc_stderr,none": 0.019780559675655462,
"alias": "pubmedqa"
},
"mmlu_professional_medicine": {
"alias": "professional_medicine",
"acc,none": 0.48161764705882354,
"acc_stderr,none": 0.030352303395351964
},
"mmlu_medical_genetics": {
"alias": "medical_genetics",
"acc,none": 0.63,
"acc_stderr,none": 0.048523658709391
},
"mmlu_college_medicine": {
"alias": "college_medicine",
"acc,none": 0.5433526011560693,
"acc_stderr,none": 0.03798106566014499
},
"mmlu_college_biology": {
"alias": "college_biology",
"acc,none": 0.5763888888888888,
"acc_stderr,none": 0.04132125019723369
},
"mmlu_clinical_knowledge": {
"alias": "clinical_knowledge",
"acc,none": 0.5584905660377358,
"acc_stderr,none": 0.03056159042673184
},
"mmlu_anatomy": {
"alias": "anatomy",
"acc,none": 0.45185185185185184,
"acc_stderr,none": 0.04299268905480864
},
"medqa_4options": {
"acc,none": 0.39591516103692065,
"acc_stderr,none": 0.013712179154041873,
"acc_norm,none": 0.39591516103692065,
"acc_norm_stderr,none": 0.013712179154041873,
"alias": "medqa_4options"
},
"medmcqa": {
"acc,none": 0.361224001912503,
"acc_stderr,none": 0.007427977511911366,
"acc_norm,none": 0.361224001912503,
"acc_norm_stderr,none": 0.007427977511911366,
"alias": "medmcqa"
}
},
"config": {
"model_dtype": "float32",
"model_name": "rhysjones/phi-2-orange-v2",
"model_sha": "main"
}
}