nicholasKluge
commited on
Commit
•
ee42b7e
1
Parent(s):
ef9ddb3
Upload evals-dpo.yaml with huggingface_hub
Browse files- evals-dpo.yaml +15 -0
evals-dpo.yaml
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
arc_pt: 32.05
|
2 |
+
assin2_rte: 43.39
|
3 |
+
assin2_sts: 6.31
|
4 |
+
bluex: 22.81
|
5 |
+
calame_pt: 57.66
|
6 |
+
enem: 20.43
|
7 |
+
faquad_nli: 43.97
|
8 |
+
hatebr: 27.70
|
9 |
+
hatespeech_pt: 29.18
|
10 |
+
hellaswag_pt: 48.28
|
11 |
+
lambada_pt: 39.92
|
12 |
+
oab_exams: 24.83
|
13 |
+
step: 12700
|
14 |
+
truthfulqa: 38.44
|
15 |
+
tweet_br: 43.11
|