File size: 14,737 Bytes
f9a1661
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7c948d5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
---
license: apache-2.0
tags:
- merge
- OpenPipe/mistral-ft-optimized-1218
- mlabonne/NeuralHermes-2.5-Mistral-7B
---

# mistral-7b-merged-slerp

mistral-7b-merged-slerp is a merge of the following models:
* [OpenPipe/mistral-ft-optimized-1218](https://huggingface.co/OpenPipe/mistral-ft-optimized-1218)
* [mlabonne/NeuralHermes-2.5-Mistral-7B](https://huggingface.co/mlabonne/NeuralHermes-2.5-Mistral-7B)

## 🧩 Configuration

```yaml
slices:
  - sources:
      - model: OpenPipe/mistral-ft-optimized-1218
        layer_range: [0, 32]
      - model: mlabonne/NeuralHermes-2.5-Mistral-7B
        layer_range: [0, 32]
merge_method: slerp
base_model: OpenPipe/mistral-ft-optimized-1218
parameters:
  t:
    - filter: self_attn
      value: [0, 0.5, 0.3, 0.7, 1]
    - filter: mlp
      value: [1, 0.5, 0.7, 0.3, 0]
    - value: 0.5
dtype: bfloat16

```

## Evaluation
https://huggingface.co/datasets/open-llm-leaderboard/details_mychen76__mistral-7b-merged-slerp

Latest Result:
https://huggingface.co/datasets/open-llm-leaderboard/details_mychen76__mistral-7b-merged-slerp/blob/main/results_2024-03-10T11-04-57.263703.json

```
{
    "all": {
        "acc": 0.6444688446653744,
        "acc_stderr": 0.03217564834975917,
        "acc_norm": 0.6448609553287138,
        "acc_norm_stderr": 0.032833467276313325,
        "mc1": 0.4283965728274174,
        "mc1_stderr": 0.017323088597314754,
        "mc2": 0.5985018412437423,
        "mc2_stderr": 0.01514980059720055
    },
    "harness|arc:challenge|25": {
        "acc": 0.6476109215017065,
        "acc_stderr": 0.013960142600598675,
        "acc_norm": 0.6774744027303754,
        "acc_norm_stderr": 0.013659980894277364
    },
    "harness|hellaswag|10": {
        "acc": 0.6700856403106951,
        "acc_stderr": 0.004692208279690595,
        "acc_norm": 0.8616809400517825,
        "acc_norm_stderr": 0.0034452899250117337
    },
    "harness|hendrycksTest-abstract_algebra|5": {
        "acc": 0.3,
        "acc_stderr": 0.046056618647183814,
        "acc_norm": 0.3,
        "acc_norm_stderr": 0.046056618647183814
    },
    "harness|hendrycksTest-anatomy|5": {
        "acc": 0.6074074074074074,
        "acc_stderr": 0.0421850621536888,
        "acc_norm": 0.6074074074074074,
        "acc_norm_stderr": 0.0421850621536888
    },
    "harness|hendrycksTest-astronomy|5": {
        "acc": 0.7105263157894737,
        "acc_stderr": 0.03690677986137283,
        "acc_norm": 0.7105263157894737,
        "acc_norm_stderr": 0.03690677986137283
    },
    "harness|hendrycksTest-business_ethics|5": {
        "acc": 0.61,
        "acc_stderr": 0.04902071300001975,
        "acc_norm": 0.61,
        "acc_norm_stderr": 0.04902071300001975
    },
    "harness|hendrycksTest-clinical_knowledge|5": {
        "acc": 0.6830188679245283,
        "acc_stderr": 0.02863723563980089,
        "acc_norm": 0.6830188679245283,
        "acc_norm_stderr": 0.02863723563980089
    },
    "harness|hendrycksTest-college_biology|5": {
        "acc": 0.7777777777777778,
        "acc_stderr": 0.03476590104304134,
        "acc_norm": 0.7777777777777778,
        "acc_norm_stderr": 0.03476590104304134
    },
    "harness|hendrycksTest-college_chemistry|5": {
        "acc": 0.45,
        "acc_stderr": 0.05,
        "acc_norm": 0.45,
        "acc_norm_stderr": 0.05
    },
    "harness|hendrycksTest-college_computer_science|5": {
        "acc": 0.49,
        "acc_stderr": 0.05024183937956912,
        "acc_norm": 0.49,
        "acc_norm_stderr": 0.05024183937956912
    },
    "harness|hendrycksTest-college_mathematics|5": {
        "acc": 0.29,
        "acc_stderr": 0.045604802157206845,
        "acc_norm": 0.29,
        "acc_norm_stderr": 0.045604802157206845
    },
    "harness|hendrycksTest-college_medicine|5": {
        "acc": 0.6473988439306358,
        "acc_stderr": 0.036430371689585475,
        "acc_norm": 0.6473988439306358,
        "acc_norm_stderr": 0.036430371689585475
    },
    "harness|hendrycksTest-college_physics|5": {
        "acc": 0.39215686274509803,
        "acc_stderr": 0.04858083574266345,
        "acc_norm": 0.39215686274509803,
        "acc_norm_stderr": 0.04858083574266345
    },
    "harness|hendrycksTest-computer_security|5": {
        "acc": 0.74,
        "acc_stderr": 0.04408440022768078,
        "acc_norm": 0.74,
        "acc_norm_stderr": 0.04408440022768078
    },
    "harness|hendrycksTest-conceptual_physics|5": {
        "acc": 0.5829787234042553,
        "acc_stderr": 0.03223276266711712,
        "acc_norm": 0.5829787234042553,
        "acc_norm_stderr": 0.03223276266711712
    },
    "harness|hendrycksTest-econometrics|5": {
        "acc": 0.5,
        "acc_stderr": 0.047036043419179864,
        "acc_norm": 0.5,
        "acc_norm_stderr": 0.047036043419179864
    },
    "harness|hendrycksTest-electrical_engineering|5": {
        "acc": 0.5448275862068965,
        "acc_stderr": 0.04149886942192117,
        "acc_norm": 0.5448275862068965,
        "acc_norm_stderr": 0.04149886942192117
    },
    "harness|hendrycksTest-elementary_mathematics|5": {
        "acc": 0.41534391534391535,
        "acc_stderr": 0.025379524910778405,
        "acc_norm": 0.41534391534391535,
        "acc_norm_stderr": 0.025379524910778405
    },
    "harness|hendrycksTest-formal_logic|5": {
        "acc": 0.4603174603174603,
        "acc_stderr": 0.04458029125470973,
        "acc_norm": 0.4603174603174603,
        "acc_norm_stderr": 0.04458029125470973
    },
    "harness|hendrycksTest-global_facts|5": {
        "acc": 0.38,
        "acc_stderr": 0.048783173121456316,
        "acc_norm": 0.38,
        "acc_norm_stderr": 0.048783173121456316
    },
    "harness|hendrycksTest-high_school_biology|5": {
        "acc": 0.7774193548387097,
        "acc_stderr": 0.023664216671642518,
        "acc_norm": 0.7774193548387097,
        "acc_norm_stderr": 0.023664216671642518
    },
    "harness|hendrycksTest-high_school_chemistry|5": {
        "acc": 0.5073891625615764,
        "acc_stderr": 0.035176035403610105,
        "acc_norm": 0.5073891625615764,
        "acc_norm_stderr": 0.035176035403610105
    },
    "harness|hendrycksTest-high_school_computer_science|5": {
        "acc": 0.69,
        "acc_stderr": 0.04648231987117316,
        "acc_norm": 0.69,
        "acc_norm_stderr": 0.04648231987117316
    },
    "harness|hendrycksTest-high_school_european_history|5": {
        "acc": 0.7696969696969697,
        "acc_stderr": 0.0328766675860349,
        "acc_norm": 0.7696969696969697,
        "acc_norm_stderr": 0.0328766675860349
    },
    "harness|hendrycksTest-high_school_geography|5": {
        "acc": 0.7878787878787878,
        "acc_stderr": 0.029126522834586818,
        "acc_norm": 0.7878787878787878,
        "acc_norm_stderr": 0.029126522834586818
    },
    "harness|hendrycksTest-high_school_government_and_politics|5": {
        "acc": 0.9015544041450777,
        "acc_stderr": 0.02150024957603346,
        "acc_norm": 0.9015544041450777,
        "acc_norm_stderr": 0.02150024957603346
    },
    "harness|hendrycksTest-high_school_macroeconomics|5": {
        "acc": 0.6538461538461539,
        "acc_stderr": 0.02412112541694119,
        "acc_norm": 0.6538461538461539,
        "acc_norm_stderr": 0.02412112541694119
    },
    "harness|hendrycksTest-high_school_mathematics|5": {
        "acc": 0.32222222222222224,
        "acc_stderr": 0.028493465091028593,
        "acc_norm": 0.32222222222222224,
        "acc_norm_stderr": 0.028493465091028593
    },
    "harness|hendrycksTest-high_school_microeconomics|5": {
        "acc": 0.6890756302521008,
        "acc_stderr": 0.03006676158297793,
        "acc_norm": 0.6890756302521008,
        "acc_norm_stderr": 0.03006676158297793
    },
    "harness|hendrycksTest-high_school_physics|5": {
        "acc": 0.32450331125827814,
        "acc_stderr": 0.03822746937658752,
        "acc_norm": 0.32450331125827814,
        "acc_norm_stderr": 0.03822746937658752
    },
    "harness|hendrycksTest-high_school_psychology|5": {
        "acc": 0.8550458715596331,
        "acc_stderr": 0.01509421569970048,
        "acc_norm": 0.8550458715596331,
        "acc_norm_stderr": 0.01509421569970048
    },
    "harness|hendrycksTest-high_school_statistics|5": {
        "acc": 0.5185185185185185,
        "acc_stderr": 0.034076320938540516,
        "acc_norm": 0.5185185185185185,
        "acc_norm_stderr": 0.034076320938540516
    },
    "harness|hendrycksTest-high_school_us_history|5": {
        "acc": 0.8186274509803921,
        "acc_stderr": 0.027044621719474082,
        "acc_norm": 0.8186274509803921,
        "acc_norm_stderr": 0.027044621719474082
    },
    "harness|hendrycksTest-high_school_world_history|5": {
        "acc": 0.8059071729957806,
        "acc_stderr": 0.0257449025322909,
        "acc_norm": 0.8059071729957806,
        "acc_norm_stderr": 0.0257449025322909
    },
    "harness|hendrycksTest-human_aging|5": {
        "acc": 0.6905829596412556,
        "acc_stderr": 0.03102441174057221,
        "acc_norm": 0.6905829596412556,
        "acc_norm_stderr": 0.03102441174057221
    },
    "harness|hendrycksTest-human_sexuality|5": {
        "acc": 0.7786259541984732,
        "acc_stderr": 0.03641297081313729,
        "acc_norm": 0.7786259541984732,
        "acc_norm_stderr": 0.03641297081313729
    },
    "harness|hendrycksTest-international_law|5": {
        "acc": 0.8099173553719008,
        "acc_stderr": 0.03581796951709282,
        "acc_norm": 0.8099173553719008,
        "acc_norm_stderr": 0.03581796951709282
    },
    "harness|hendrycksTest-jurisprudence|5": {
        "acc": 0.7685185185185185,
        "acc_stderr": 0.04077494709252626,
        "acc_norm": 0.7685185185185185,
        "acc_norm_stderr": 0.04077494709252626
    },
    "harness|hendrycksTest-logical_fallacies|5": {
        "acc": 0.7730061349693251,
        "acc_stderr": 0.03291099578615769,
        "acc_norm": 0.7730061349693251,
        "acc_norm_stderr": 0.03291099578615769
    },
    "harness|hendrycksTest-machine_learning|5": {
        "acc": 0.4642857142857143,
        "acc_stderr": 0.04733667890053756,
        "acc_norm": 0.4642857142857143,
        "acc_norm_stderr": 0.04733667890053756
    },
    "harness|hendrycksTest-management|5": {
        "acc": 0.7572815533980582,
        "acc_stderr": 0.04245022486384495,
        "acc_norm": 0.7572815533980582,
        "acc_norm_stderr": 0.04245022486384495
    },
    "harness|hendrycksTest-marketing|5": {
        "acc": 0.8547008547008547,
        "acc_stderr": 0.023086635086841407,
        "acc_norm": 0.8547008547008547,
        "acc_norm_stderr": 0.023086635086841407
    },
    "harness|hendrycksTest-medical_genetics|5": {
        "acc": 0.7,
        "acc_stderr": 0.046056618647183814,
        "acc_norm": 0.7,
        "acc_norm_stderr": 0.046056618647183814
    },
    "harness|hendrycksTest-miscellaneous|5": {
        "acc": 0.8352490421455939,
        "acc_stderr": 0.013265346261323793,
        "acc_norm": 0.8352490421455939,
        "acc_norm_stderr": 0.013265346261323793
    },
    "harness|hendrycksTest-moral_disputes|5": {
        "acc": 0.7283236994219653,
        "acc_stderr": 0.023948512905468365,
        "acc_norm": 0.7283236994219653,
        "acc_norm_stderr": 0.023948512905468365
    },
    "harness|hendrycksTest-moral_scenarios|5": {
        "acc": 0.36312849162011174,
        "acc_stderr": 0.016083749986853697,
        "acc_norm": 0.36312849162011174,
        "acc_norm_stderr": 0.016083749986853697
    },
    "harness|hendrycksTest-nutrition|5": {
        "acc": 0.7450980392156863,
        "acc_stderr": 0.02495418432487991,
        "acc_norm": 0.7450980392156863,
        "acc_norm_stderr": 0.02495418432487991
    },
    "harness|hendrycksTest-philosophy|5": {
        "acc": 0.7106109324758842,
        "acc_stderr": 0.025755865922632945,
        "acc_norm": 0.7106109324758842,
        "acc_norm_stderr": 0.025755865922632945
    },
    "harness|hendrycksTest-prehistory|5": {
        "acc": 0.7469135802469136,
        "acc_stderr": 0.024191808600712995,
        "acc_norm": 0.7469135802469136,
        "acc_norm_stderr": 0.024191808600712995
    },
    "harness|hendrycksTest-professional_accounting|5": {
        "acc": 0.4787234042553192,
        "acc_stderr": 0.029800481645628693,
        "acc_norm": 0.4787234042553192,
        "acc_norm_stderr": 0.029800481645628693
    },
    "harness|hendrycksTest-professional_law|5": {
        "acc": 0.4726205997392438,
        "acc_stderr": 0.012751075788015058,
        "acc_norm": 0.4726205997392438,
        "acc_norm_stderr": 0.012751075788015058
    },
    "harness|hendrycksTest-professional_medicine|5": {
        "acc": 0.6948529411764706,
        "acc_stderr": 0.027971541370170598,
        "acc_norm": 0.6948529411764706,
        "acc_norm_stderr": 0.027971541370170598
    },
    "harness|hendrycksTest-professional_psychology|5": {
        "acc": 0.6748366013071896,
        "acc_stderr": 0.01895088677080631,
        "acc_norm": 0.6748366013071896,
        "acc_norm_stderr": 0.01895088677080631
    },
    "harness|hendrycksTest-public_relations|5": {
        "acc": 0.6636363636363637,
        "acc_stderr": 0.04525393596302506,
        "acc_norm": 0.6636363636363637,
        "acc_norm_stderr": 0.04525393596302506
    },
    "harness|hendrycksTest-security_studies|5": {
        "acc": 0.746938775510204,
        "acc_stderr": 0.027833023871399673,
        "acc_norm": 0.746938775510204,
        "acc_norm_stderr": 0.027833023871399673
    },
    "harness|hendrycksTest-sociology|5": {
        "acc": 0.835820895522388,
        "acc_stderr": 0.026193923544454115,
        "acc_norm": 0.835820895522388,
        "acc_norm_stderr": 0.026193923544454115
    },
    "harness|hendrycksTest-us_foreign_policy|5": {
        "acc": 0.85,
        "acc_stderr": 0.0358870281282637,
        "acc_norm": 0.85,
        "acc_norm_stderr": 0.0358870281282637
    },
    "harness|hendrycksTest-virology|5": {
        "acc": 0.5301204819277109,
        "acc_stderr": 0.03885425420866767,
        "acc_norm": 0.5301204819277109,
        "acc_norm_stderr": 0.03885425420866767
    },
    "harness|hendrycksTest-world_religions|5": {
        "acc": 0.8304093567251462,
        "acc_stderr": 0.02878210810540171,
        "acc_norm": 0.8304093567251462,
        "acc_norm_stderr": 0.02878210810540171
    },
    "harness|truthfulqa:mc|0": {
        "mc1": 0.4283965728274174,
        "mc1_stderr": 0.017323088597314754,
        "mc2": 0.5985018412437423,
        "mc2_stderr": 0.01514980059720055
    },
    "harness|winogrande|5": {
        "acc": 0.8018942383583267,
        "acc_stderr": 0.01120186274448705
    },
    "harness|gsm8k|5": {
        "acc": 0.6853677028051555,
        "acc_stderr": 0.01279103722733604
    }
}
```