{ | |
"config": { | |
"model_dtype": "torch.float16", | |
"model_name": "rdhawk/TR_TaskSpesificLM" | |
}, | |
"results": { | |
"MMLU": { | |
"metric_name": 0.5402 | |
}, | |
"Truthful_qa": { | |
"metric_name": 0.5175 | |
}, | |
"ARC": { | |
"metric_name": 0.4531 | |
}, | |
"HellaSwag": { | |
"metric_name": 0.4486 | |
}, | |
"GSM8K": { | |
"metric_name": 0.4700 | |
}, | |
"GSM1K": { | |
"metric_name": 0.4700 | |
}, | |
"Winogrande": { | |
"metric_name": 0.5237 | |
} | |
} | |
} |