{ | |
"results": { | |
"truthfulqa_vi": { | |
"mc1": 0.32229299363057323, | |
"mc1_stderr": 0.016691231807266282, | |
"mc2": 0.4771011274491321, | |
"mc2_stderr": 0.015678688985042436 | |
} | |
}, | |
"versions": { | |
"truthfulqa_vi": 1 | |
}, | |
"config": { | |
"model": "hf-causal", | |
"model_args": "pretrained=vilm/vietcuna-7b-v3", | |
"num_fewshot": 0, | |
"batch_size": "auto", | |
"batch_sizes": [ | |
64 | |
], | |
"device": "cuda:0", | |
"no_cache": false, | |
"limit": null, | |
"bootstrap_iters": 100000, | |
"description_dict": {} | |
} | |
} |