Muennighoff's picture
Add
411a88c
raw
history blame
2.45 kB
{
"results": {
"anli_r1": {
"acc": 0.312,
"acc_stderr": 0.014658474370509005
},
"anli_r2": {
"acc": 0.328,
"acc_stderr": 0.014853842487270336
},
"anli_r3": {
"acc": 0.32666666666666666,
"acc_stderr": 0.013544340907003665
},
"cb": {
"acc": 0.5,
"acc_stderr": 0.06741998624632421,
"f1": 0.4627446995868048
},
"copa": {
"acc": 0.71,
"acc_stderr": 0.04560480215720684
},
"hellaswag": {
"acc": 0.3833897629954192,
"acc_stderr": 0.0048521826212742526,
"acc_norm": 0.47769368651663013,
"acc_norm_stderr": 0.00498481339101621
},
"rte": {
"acc": 0.4657039711191336,
"acc_stderr": 0.030025579819366426
},
"winogrande": {
"acc": 0.5706393054459353,
"acc_stderr": 0.01391153749996917
},
"storycloze_2016": {
"acc": 0.6456440406199893,
"acc_stderr": 0.011061031791615487
},
"boolq": {
"acc": 0.5324159021406728,
"acc_stderr": 0.008726657178723137
},
"arc_easy": {
"acc": 0.6094276094276094,
"acc_stderr": 0.01001105911206424,
"acc_norm": 0.5631313131313131,
"acc_norm_stderr": 0.010177672928157695
},
"arc_challenge": {
"acc": 0.2832764505119454,
"acc_stderr": 0.013167478735134575,
"acc_norm": 0.29436860068259385,
"acc_norm_stderr": 0.013318528460539422
},
"sciq": {
"acc": 0.84,
"acc_stderr": 0.011598902298689004,
"acc_norm": 0.795,
"acc_norm_stderr": 0.012772554096113118
},
"piqa": {
"acc": 0.750816104461371,
"acc_stderr": 0.010091882770120216,
"acc_norm": 0.7584330794341676,
"acc_norm_stderr": 0.009986718001804439
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}