{ | |
"config": { | |
"model_dtype": "torch.float16", | |
"model_name": "tolgadev/llama-2-7b-ruyallm" | |
}, | |
"results": { | |
"MMLU": { | |
"metric_name": 0.3403830510981291 | |
}, | |
"Truthful_qa": { | |
"metric_name": 0.42003020789928697 | |
}, | |
"ARC": { | |
"metric_name": 0.33361774744027306 | |
}, | |
"HellaSwag": { | |
"metric_name": 0.41910353392796657 | |
}, | |
"GSM8K": { | |
"metric_name": 0.01442672741078208 | |
}, | |
"Winogrande": { | |
"metric_name": 0.5410742496050553 | |
} | |
} | |
} |