Muennighoff's picture
Add eval
b851397
raw
history blame
2.45 kB
{
"results": {
"anli_r1": {
"acc": 0.343,
"acc_stderr": 0.015019206922356953
},
"anli_r2": {
"acc": 0.318,
"acc_stderr": 0.014734079309311901
},
"anli_r3": {
"acc": 0.325,
"acc_stderr": 0.013526454480351028
},
"cb": {
"acc": 0.42857142857142855,
"acc_stderr": 0.06672848092813058,
"f1": 0.3058470764617691
},
"copa": {
"acc": 0.78,
"acc_stderr": 0.04163331998932263
},
"hellaswag": {
"acc": 0.45727942640908187,
"acc_stderr": 0.004971534874389935,
"acc_norm": 0.602867954590719,
"acc_norm_stderr": 0.004883037758919964
},
"rte": {
"acc": 0.48736462093862815,
"acc_stderr": 0.030086851767188564
},
"winogrande": {
"acc": 0.5808997632202052,
"acc_stderr": 0.013867325192210116
},
"storycloze_2016": {
"acc": 0.7215392838054516,
"acc_stderr": 0.010365521460604415
},
"boolq": {
"acc": 0.5489296636085627,
"acc_stderr": 0.008703080962379622
},
"arc_easy": {
"acc": 0.6325757575757576,
"acc_stderr": 0.009892552616211558,
"acc_norm": 0.617003367003367,
"acc_norm_stderr": 0.009974920384536479
},
"arc_challenge": {
"acc": 0.2901023890784983,
"acc_stderr": 0.013261573677520759,
"acc_norm": 0.31313993174061433,
"acc_norm_stderr": 0.013552671543623496
},
"sciq": {
"acc": 0.906,
"acc_stderr": 0.009233052000787738,
"acc_norm": 0.891,
"acc_norm_stderr": 0.009859828407037186
},
"piqa": {
"acc": 0.7540805223068553,
"acc_stderr": 0.010047331865625194,
"acc_norm": 0.7698585418933623,
"acc_norm_stderr": 0.009820832826839796
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}