File size: 24,820 Bytes
96ff303
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
{
  "best_metric": 0.7248261570930481,
  "best_model_checkpoint": "./output_v2/7b_cluster011_Nous-Hermes-llama-2-7b_partitioned_v3_standardized_011/checkpoint-800",
  "epoch": 1.8507807981492193,
  "global_step": 800,
  "is_hyper_param_search": false,
  "is_local_process_zero": true,
  "is_world_process_zero": true,
  "log_history": [
    {
      "epoch": 0.02,
      "learning_rate": 0.0002,
      "loss": 0.7957,
      "step": 10
    },
    {
      "epoch": 0.05,
      "learning_rate": 0.0002,
      "loss": 0.718,
      "step": 20
    },
    {
      "epoch": 0.07,
      "learning_rate": 0.0002,
      "loss": 0.73,
      "step": 30
    },
    {
      "epoch": 0.09,
      "learning_rate": 0.0002,
      "loss": 0.7826,
      "step": 40
    },
    {
      "epoch": 0.12,
      "learning_rate": 0.0002,
      "loss": 0.7013,
      "step": 50
    },
    {
      "epoch": 0.14,
      "learning_rate": 0.0002,
      "loss": 0.7353,
      "step": 60
    },
    {
      "epoch": 0.16,
      "learning_rate": 0.0002,
      "loss": 0.6666,
      "step": 70
    },
    {
      "epoch": 0.19,
      "learning_rate": 0.0002,
      "loss": 0.7649,
      "step": 80
    },
    {
      "epoch": 0.21,
      "learning_rate": 0.0002,
      "loss": 0.7018,
      "step": 90
    },
    {
      "epoch": 0.23,
      "learning_rate": 0.0002,
      "loss": 0.7173,
      "step": 100
    },
    {
      "epoch": 0.25,
      "learning_rate": 0.0002,
      "loss": 0.7857,
      "step": 110
    },
    {
      "epoch": 0.28,
      "learning_rate": 0.0002,
      "loss": 0.7154,
      "step": 120
    },
    {
      "epoch": 0.3,
      "learning_rate": 0.0002,
      "loss": 0.7485,
      "step": 130
    },
    {
      "epoch": 0.32,
      "learning_rate": 0.0002,
      "loss": 0.7114,
      "step": 140
    },
    {
      "epoch": 0.35,
      "learning_rate": 0.0002,
      "loss": 0.7333,
      "step": 150
    },
    {
      "epoch": 0.37,
      "learning_rate": 0.0002,
      "loss": 0.6549,
      "step": 160
    },
    {
      "epoch": 0.39,
      "learning_rate": 0.0002,
      "loss": 0.6765,
      "step": 170
    },
    {
      "epoch": 0.42,
      "learning_rate": 0.0002,
      "loss": 0.677,
      "step": 180
    },
    {
      "epoch": 0.44,
      "learning_rate": 0.0002,
      "loss": 0.6763,
      "step": 190
    },
    {
      "epoch": 0.46,
      "learning_rate": 0.0002,
      "loss": 0.6638,
      "step": 200
    },
    {
      "epoch": 0.46,
      "eval_loss": 0.7327473163604736,
      "eval_runtime": 246.3779,
      "eval_samples_per_second": 4.059,
      "eval_steps_per_second": 2.029,
      "step": 200
    },
    {
      "epoch": 0.46,
      "mmlu_eval_accuracy": 0.4592376175825003,
      "mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
      "mmlu_eval_accuracy_anatomy": 0.5714285714285714,
      "mmlu_eval_accuracy_astronomy": 0.375,
      "mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
      "mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655,
      "mmlu_eval_accuracy_college_biology": 0.4375,
      "mmlu_eval_accuracy_college_chemistry": 0.125,
      "mmlu_eval_accuracy_college_computer_science": 0.45454545454545453,
      "mmlu_eval_accuracy_college_mathematics": 0.18181818181818182,
      "mmlu_eval_accuracy_college_medicine": 0.2727272727272727,
      "mmlu_eval_accuracy_college_physics": 0.45454545454545453,
      "mmlu_eval_accuracy_computer_security": 0.36363636363636365,
      "mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
      "mmlu_eval_accuracy_econometrics": 0.16666666666666666,
      "mmlu_eval_accuracy_electrical_engineering": 0.4375,
      "mmlu_eval_accuracy_elementary_mathematics": 0.2926829268292683,
      "mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
      "mmlu_eval_accuracy_global_facts": 0.4,
      "mmlu_eval_accuracy_high_school_biology": 0.34375,
      "mmlu_eval_accuracy_high_school_chemistry": 0.4090909090909091,
      "mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666,
      "mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556,
      "mmlu_eval_accuracy_high_school_geography": 0.7272727272727273,
      "mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666,
      "mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723,
      "mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793,
      "mmlu_eval_accuracy_high_school_microeconomics": 0.38461538461538464,
      "mmlu_eval_accuracy_high_school_physics": 0.35294117647058826,
      "mmlu_eval_accuracy_high_school_psychology": 0.7333333333333333,
      "mmlu_eval_accuracy_high_school_statistics": 0.391304347826087,
      "mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
      "mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384,
      "mmlu_eval_accuracy_human_aging": 0.6956521739130435,
      "mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
      "mmlu_eval_accuracy_international_law": 0.6923076923076923,
      "mmlu_eval_accuracy_jurisprudence": 0.5454545454545454,
      "mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556,
      "mmlu_eval_accuracy_machine_learning": 0.2727272727272727,
      "mmlu_eval_accuracy_management": 0.6363636363636364,
      "mmlu_eval_accuracy_marketing": 0.72,
      "mmlu_eval_accuracy_medical_genetics": 0.7272727272727273,
      "mmlu_eval_accuracy_miscellaneous": 0.7093023255813954,
      "mmlu_eval_accuracy_moral_disputes": 0.39473684210526316,
      "mmlu_eval_accuracy_moral_scenarios": 0.24,
      "mmlu_eval_accuracy_nutrition": 0.5757575757575758,
      "mmlu_eval_accuracy_philosophy": 0.38235294117647056,
      "mmlu_eval_accuracy_prehistory": 0.45714285714285713,
      "mmlu_eval_accuracy_professional_accounting": 0.2903225806451613,
      "mmlu_eval_accuracy_professional_law": 0.34705882352941175,
      "mmlu_eval_accuracy_professional_medicine": 0.41935483870967744,
      "mmlu_eval_accuracy_professional_psychology": 0.391304347826087,
      "mmlu_eval_accuracy_public_relations": 0.4166666666666667,
      "mmlu_eval_accuracy_security_studies": 0.5185185185185185,
      "mmlu_eval_accuracy_sociology": 0.7272727272727273,
      "mmlu_eval_accuracy_us_foreign_policy": 0.45454545454545453,
      "mmlu_eval_accuracy_virology": 0.4444444444444444,
      "mmlu_eval_accuracy_world_religions": 0.7368421052631579,
      "mmlu_loss": 1.173365458184683,
      "step": 200
    },
    {
      "epoch": 0.49,
      "learning_rate": 0.0002,
      "loss": 0.6849,
      "step": 210
    },
    {
      "epoch": 0.51,
      "learning_rate": 0.0002,
      "loss": 0.7275,
      "step": 220
    },
    {
      "epoch": 0.53,
      "learning_rate": 0.0002,
      "loss": 0.6976,
      "step": 230
    },
    {
      "epoch": 0.56,
      "learning_rate": 0.0002,
      "loss": 0.6896,
      "step": 240
    },
    {
      "epoch": 0.58,
      "learning_rate": 0.0002,
      "loss": 0.6831,
      "step": 250
    },
    {
      "epoch": 0.6,
      "learning_rate": 0.0002,
      "loss": 0.8049,
      "step": 260
    },
    {
      "epoch": 0.62,
      "learning_rate": 0.0002,
      "loss": 0.6878,
      "step": 270
    },
    {
      "epoch": 0.65,
      "learning_rate": 0.0002,
      "loss": 0.6679,
      "step": 280
    },
    {
      "epoch": 0.67,
      "learning_rate": 0.0002,
      "loss": 0.6808,
      "step": 290
    },
    {
      "epoch": 0.69,
      "learning_rate": 0.0002,
      "loss": 0.7648,
      "step": 300
    },
    {
      "epoch": 0.72,
      "learning_rate": 0.0002,
      "loss": 0.7605,
      "step": 310
    },
    {
      "epoch": 0.74,
      "learning_rate": 0.0002,
      "loss": 0.7504,
      "step": 320
    },
    {
      "epoch": 0.76,
      "learning_rate": 0.0002,
      "loss": 0.7853,
      "step": 330
    },
    {
      "epoch": 0.79,
      "learning_rate": 0.0002,
      "loss": 0.7272,
      "step": 340
    },
    {
      "epoch": 0.81,
      "learning_rate": 0.0002,
      "loss": 0.6934,
      "step": 350
    },
    {
      "epoch": 0.83,
      "learning_rate": 0.0002,
      "loss": 0.7053,
      "step": 360
    },
    {
      "epoch": 0.86,
      "learning_rate": 0.0002,
      "loss": 0.7487,
      "step": 370
    },
    {
      "epoch": 0.88,
      "learning_rate": 0.0002,
      "loss": 0.668,
      "step": 380
    },
    {
      "epoch": 0.9,
      "learning_rate": 0.0002,
      "loss": 0.6899,
      "step": 390
    },
    {
      "epoch": 0.93,
      "learning_rate": 0.0002,
      "loss": 0.684,
      "step": 400
    },
    {
      "epoch": 0.93,
      "eval_loss": 0.7251922488212585,
      "eval_runtime": 246.5381,
      "eval_samples_per_second": 4.056,
      "eval_steps_per_second": 2.028,
      "step": 400
    },
    {
      "epoch": 0.93,
      "mmlu_eval_accuracy": 0.46184696708834644,
      "mmlu_eval_accuracy_abstract_algebra": 0.18181818181818182,
      "mmlu_eval_accuracy_anatomy": 0.5714285714285714,
      "mmlu_eval_accuracy_astronomy": 0.375,
      "mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
      "mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655,
      "mmlu_eval_accuracy_college_biology": 0.4375,
      "mmlu_eval_accuracy_college_chemistry": 0.125,
      "mmlu_eval_accuracy_college_computer_science": 0.45454545454545453,
      "mmlu_eval_accuracy_college_mathematics": 0.18181818181818182,
      "mmlu_eval_accuracy_college_medicine": 0.2727272727272727,
      "mmlu_eval_accuracy_college_physics": 0.45454545454545453,
      "mmlu_eval_accuracy_computer_security": 0.2727272727272727,
      "mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464,
      "mmlu_eval_accuracy_econometrics": 0.16666666666666666,
      "mmlu_eval_accuracy_electrical_engineering": 0.5,
      "mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536,
      "mmlu_eval_accuracy_formal_logic": 0.35714285714285715,
      "mmlu_eval_accuracy_global_facts": 0.5,
      "mmlu_eval_accuracy_high_school_biology": 0.34375,
      "mmlu_eval_accuracy_high_school_chemistry": 0.4090909090909091,
      "mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666,
      "mmlu_eval_accuracy_high_school_european_history": 0.7222222222222222,
      "mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
      "mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
      "mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723,
      "mmlu_eval_accuracy_high_school_mathematics": 0.13793103448275862,
      "mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231,
      "mmlu_eval_accuracy_high_school_physics": 0.35294117647058826,
      "mmlu_eval_accuracy_high_school_psychology": 0.75,
      "mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654,
      "mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
      "mmlu_eval_accuracy_high_school_world_history": 0.5769230769230769,
      "mmlu_eval_accuracy_human_aging": 0.6956521739130435,
      "mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
      "mmlu_eval_accuracy_international_law": 0.7692307692307693,
      "mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
      "mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556,
      "mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
      "mmlu_eval_accuracy_management": 0.5454545454545454,
      "mmlu_eval_accuracy_marketing": 0.72,
      "mmlu_eval_accuracy_medical_genetics": 0.7272727272727273,
      "mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
      "mmlu_eval_accuracy_moral_disputes": 0.5,
      "mmlu_eval_accuracy_moral_scenarios": 0.23,
      "mmlu_eval_accuracy_nutrition": 0.5454545454545454,
      "mmlu_eval_accuracy_philosophy": 0.4411764705882353,
      "mmlu_eval_accuracy_prehistory": 0.4857142857142857,
      "mmlu_eval_accuracy_professional_accounting": 0.3225806451612903,
      "mmlu_eval_accuracy_professional_law": 0.34705882352941175,
      "mmlu_eval_accuracy_professional_medicine": 0.45161290322580644,
      "mmlu_eval_accuracy_professional_psychology": 0.4057971014492754,
      "mmlu_eval_accuracy_public_relations": 0.5,
      "mmlu_eval_accuracy_security_studies": 0.5185185185185185,
      "mmlu_eval_accuracy_sociology": 0.6363636363636364,
      "mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454,
      "mmlu_eval_accuracy_virology": 0.3888888888888889,
      "mmlu_eval_accuracy_world_religions": 0.7368421052631579,
      "mmlu_loss": 1.1016935974982016,
      "step": 400
    },
    {
      "epoch": 0.95,
      "learning_rate": 0.0002,
      "loss": 0.7535,
      "step": 410
    },
    {
      "epoch": 0.97,
      "learning_rate": 0.0002,
      "loss": 0.707,
      "step": 420
    },
    {
      "epoch": 0.99,
      "learning_rate": 0.0002,
      "loss": 0.7077,
      "step": 430
    },
    {
      "epoch": 1.02,
      "learning_rate": 0.0002,
      "loss": 0.6389,
      "step": 440
    },
    {
      "epoch": 1.04,
      "learning_rate": 0.0002,
      "loss": 0.6701,
      "step": 450
    },
    {
      "epoch": 1.06,
      "learning_rate": 0.0002,
      "loss": 0.6462,
      "step": 460
    },
    {
      "epoch": 1.09,
      "learning_rate": 0.0002,
      "loss": 0.6421,
      "step": 470
    },
    {
      "epoch": 1.11,
      "learning_rate": 0.0002,
      "loss": 0.6822,
      "step": 480
    },
    {
      "epoch": 1.13,
      "learning_rate": 0.0002,
      "loss": 0.5916,
      "step": 490
    },
    {
      "epoch": 1.16,
      "learning_rate": 0.0002,
      "loss": 0.7141,
      "step": 500
    },
    {
      "epoch": 1.18,
      "learning_rate": 0.0002,
      "loss": 0.679,
      "step": 510
    },
    {
      "epoch": 1.2,
      "learning_rate": 0.0002,
      "loss": 0.5723,
      "step": 520
    },
    {
      "epoch": 1.23,
      "learning_rate": 0.0002,
      "loss": 0.6451,
      "step": 530
    },
    {
      "epoch": 1.25,
      "learning_rate": 0.0002,
      "loss": 0.6802,
      "step": 540
    },
    {
      "epoch": 1.27,
      "learning_rate": 0.0002,
      "loss": 0.5868,
      "step": 550
    },
    {
      "epoch": 1.3,
      "learning_rate": 0.0002,
      "loss": 0.6386,
      "step": 560
    },
    {
      "epoch": 1.32,
      "learning_rate": 0.0002,
      "loss": 0.5967,
      "step": 570
    },
    {
      "epoch": 1.34,
      "learning_rate": 0.0002,
      "loss": 0.618,
      "step": 580
    },
    {
      "epoch": 1.36,
      "learning_rate": 0.0002,
      "loss": 0.6294,
      "step": 590
    },
    {
      "epoch": 1.39,
      "learning_rate": 0.0002,
      "loss": 0.6417,
      "step": 600
    },
    {
      "epoch": 1.39,
      "eval_loss": 0.7272388935089111,
      "eval_runtime": 247.55,
      "eval_samples_per_second": 4.04,
      "eval_steps_per_second": 2.02,
      "step": 600
    },
    {
      "epoch": 1.39,
      "mmlu_eval_accuracy": 0.4580576219014606,
      "mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365,
      "mmlu_eval_accuracy_anatomy": 0.5714285714285714,
      "mmlu_eval_accuracy_astronomy": 0.375,
      "mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
      "mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655,
      "mmlu_eval_accuracy_college_biology": 0.5,
      "mmlu_eval_accuracy_college_chemistry": 0.25,
      "mmlu_eval_accuracy_college_computer_science": 0.36363636363636365,
      "mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
      "mmlu_eval_accuracy_college_medicine": 0.3181818181818182,
      "mmlu_eval_accuracy_college_physics": 0.45454545454545453,
      "mmlu_eval_accuracy_computer_security": 0.45454545454545453,
      "mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
      "mmlu_eval_accuracy_econometrics": 0.16666666666666666,
      "mmlu_eval_accuracy_electrical_engineering": 0.4375,
      "mmlu_eval_accuracy_elementary_mathematics": 0.2926829268292683,
      "mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
      "mmlu_eval_accuracy_global_facts": 0.4,
      "mmlu_eval_accuracy_high_school_biology": 0.40625,
      "mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365,
      "mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
      "mmlu_eval_accuracy_high_school_european_history": 0.7222222222222222,
      "mmlu_eval_accuracy_high_school_geography": 0.7727272727272727,
      "mmlu_eval_accuracy_high_school_government_and_politics": 0.5238095238095238,
      "mmlu_eval_accuracy_high_school_macroeconomics": 0.3488372093023256,
      "mmlu_eval_accuracy_high_school_mathematics": 0.13793103448275862,
      "mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
      "mmlu_eval_accuracy_high_school_physics": 0.35294117647058826,
      "mmlu_eval_accuracy_high_school_psychology": 0.7,
      "mmlu_eval_accuracy_high_school_statistics": 0.2608695652173913,
      "mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
      "mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384,
      "mmlu_eval_accuracy_human_aging": 0.6521739130434783,
      "mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
      "mmlu_eval_accuracy_international_law": 0.7692307692307693,
      "mmlu_eval_accuracy_jurisprudence": 0.2727272727272727,
      "mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556,
      "mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
      "mmlu_eval_accuracy_management": 0.45454545454545453,
      "mmlu_eval_accuracy_marketing": 0.72,
      "mmlu_eval_accuracy_medical_genetics": 0.7272727272727273,
      "mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
      "mmlu_eval_accuracy_moral_disputes": 0.4473684210526316,
      "mmlu_eval_accuracy_moral_scenarios": 0.23,
      "mmlu_eval_accuracy_nutrition": 0.5757575757575758,
      "mmlu_eval_accuracy_philosophy": 0.4117647058823529,
      "mmlu_eval_accuracy_prehistory": 0.4857142857142857,
      "mmlu_eval_accuracy_professional_accounting": 0.2903225806451613,
      "mmlu_eval_accuracy_professional_law": 0.3411764705882353,
      "mmlu_eval_accuracy_professional_medicine": 0.4838709677419355,
      "mmlu_eval_accuracy_professional_psychology": 0.42028985507246375,
      "mmlu_eval_accuracy_public_relations": 0.5,
      "mmlu_eval_accuracy_security_studies": 0.5555555555555556,
      "mmlu_eval_accuracy_sociology": 0.5454545454545454,
      "mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454,
      "mmlu_eval_accuracy_virology": 0.3888888888888889,
      "mmlu_eval_accuracy_world_religions": 0.7368421052631579,
      "mmlu_loss": 1.069228487308125,
      "step": 600
    },
    {
      "epoch": 1.41,
      "learning_rate": 0.0002,
      "loss": 0.6904,
      "step": 610
    },
    {
      "epoch": 1.43,
      "learning_rate": 0.0002,
      "loss": 0.6646,
      "step": 620
    },
    {
      "epoch": 1.46,
      "learning_rate": 0.0002,
      "loss": 0.6827,
      "step": 630
    },
    {
      "epoch": 1.48,
      "learning_rate": 0.0002,
      "loss": 0.6825,
      "step": 640
    },
    {
      "epoch": 1.5,
      "learning_rate": 0.0002,
      "loss": 0.6406,
      "step": 650
    },
    {
      "epoch": 1.53,
      "learning_rate": 0.0002,
      "loss": 0.6767,
      "step": 660
    },
    {
      "epoch": 1.55,
      "learning_rate": 0.0002,
      "loss": 0.6222,
      "step": 670
    },
    {
      "epoch": 1.57,
      "learning_rate": 0.0002,
      "loss": 0.5656,
      "step": 680
    },
    {
      "epoch": 1.6,
      "learning_rate": 0.0002,
      "loss": 0.6281,
      "step": 690
    },
    {
      "epoch": 1.62,
      "learning_rate": 0.0002,
      "loss": 0.6449,
      "step": 700
    },
    {
      "epoch": 1.64,
      "learning_rate": 0.0002,
      "loss": 0.6087,
      "step": 710
    },
    {
      "epoch": 1.67,
      "learning_rate": 0.0002,
      "loss": 0.6179,
      "step": 720
    },
    {
      "epoch": 1.69,
      "learning_rate": 0.0002,
      "loss": 0.596,
      "step": 730
    },
    {
      "epoch": 1.71,
      "learning_rate": 0.0002,
      "loss": 0.6577,
      "step": 740
    },
    {
      "epoch": 1.74,
      "learning_rate": 0.0002,
      "loss": 0.65,
      "step": 750
    },
    {
      "epoch": 1.76,
      "learning_rate": 0.0002,
      "loss": 0.5936,
      "step": 760
    },
    {
      "epoch": 1.78,
      "learning_rate": 0.0002,
      "loss": 0.65,
      "step": 770
    },
    {
      "epoch": 1.8,
      "learning_rate": 0.0002,
      "loss": 0.5787,
      "step": 780
    },
    {
      "epoch": 1.83,
      "learning_rate": 0.0002,
      "loss": 0.6082,
      "step": 790
    },
    {
      "epoch": 1.85,
      "learning_rate": 0.0002,
      "loss": 0.6129,
      "step": 800
    },
    {
      "epoch": 1.85,
      "eval_loss": 0.7248261570930481,
      "eval_runtime": 246.8878,
      "eval_samples_per_second": 4.05,
      "eval_steps_per_second": 2.025,
      "step": 800
    },
    {
      "epoch": 1.85,
      "mmlu_eval_accuracy": 0.4610550473708825,
      "mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365,
      "mmlu_eval_accuracy_anatomy": 0.5714285714285714,
      "mmlu_eval_accuracy_astronomy": 0.375,
      "mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
      "mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655,
      "mmlu_eval_accuracy_college_biology": 0.5,
      "mmlu_eval_accuracy_college_chemistry": 0.25,
      "mmlu_eval_accuracy_college_computer_science": 0.36363636363636365,
      "mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
      "mmlu_eval_accuracy_college_medicine": 0.3181818181818182,
      "mmlu_eval_accuracy_college_physics": 0.45454545454545453,
      "mmlu_eval_accuracy_computer_security": 0.18181818181818182,
      "mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464,
      "mmlu_eval_accuracy_econometrics": 0.16666666666666666,
      "mmlu_eval_accuracy_electrical_engineering": 0.4375,
      "mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
      "mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
      "mmlu_eval_accuracy_global_facts": 0.5,
      "mmlu_eval_accuracy_high_school_biology": 0.375,
      "mmlu_eval_accuracy_high_school_chemistry": 0.4090909090909091,
      "mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666,
      "mmlu_eval_accuracy_high_school_european_history": 0.7222222222222222,
      "mmlu_eval_accuracy_high_school_geography": 0.7272727272727273,
      "mmlu_eval_accuracy_high_school_government_and_politics": 0.5714285714285714,
      "mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723,
      "mmlu_eval_accuracy_high_school_mathematics": 0.1724137931034483,
      "mmlu_eval_accuracy_high_school_microeconomics": 0.5,
      "mmlu_eval_accuracy_high_school_physics": 0.35294117647058826,
      "mmlu_eval_accuracy_high_school_psychology": 0.6833333333333333,
      "mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654,
      "mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
      "mmlu_eval_accuracy_high_school_world_history": 0.6153846153846154,
      "mmlu_eval_accuracy_human_aging": 0.6521739130434783,
      "mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
      "mmlu_eval_accuracy_international_law": 0.7692307692307693,
      "mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
      "mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556,
      "mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
      "mmlu_eval_accuracy_management": 0.45454545454545453,
      "mmlu_eval_accuracy_marketing": 0.76,
      "mmlu_eval_accuracy_medical_genetics": 0.7272727272727273,
      "mmlu_eval_accuracy_miscellaneous": 0.6744186046511628,
      "mmlu_eval_accuracy_moral_disputes": 0.42105263157894735,
      "mmlu_eval_accuracy_moral_scenarios": 0.24,
      "mmlu_eval_accuracy_nutrition": 0.5151515151515151,
      "mmlu_eval_accuracy_philosophy": 0.4117647058823529,
      "mmlu_eval_accuracy_prehistory": 0.42857142857142855,
      "mmlu_eval_accuracy_professional_accounting": 0.3225806451612903,
      "mmlu_eval_accuracy_professional_law": 0.3411764705882353,
      "mmlu_eval_accuracy_professional_medicine": 0.4838709677419355,
      "mmlu_eval_accuracy_professional_psychology": 0.42028985507246375,
      "mmlu_eval_accuracy_public_relations": 0.5,
      "mmlu_eval_accuracy_security_studies": 0.5555555555555556,
      "mmlu_eval_accuracy_sociology": 0.5454545454545454,
      "mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454,
      "mmlu_eval_accuracy_virology": 0.3888888888888889,
      "mmlu_eval_accuracy_world_religions": 0.7368421052631579,
      "mmlu_loss": 1.0681800356538427,
      "step": 800
    }
  ],
  "max_steps": 5000,
  "num_train_epochs": 12,
  "total_flos": 2.27317833196757e+17,
  "trial_name": null,
  "trial_params": null
}