diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,25345 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 4.0, + "eval_steps": 0, + "global_step": 3616, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0011061946902654867, + "grad_norm": 0.07275390625, + "learning_rate": 9.997234513274337e-06, + "loss": 1.3771, + "step": 1 + }, + { + "epoch": 0.0022123893805309734, + "grad_norm": 0.07080078125, + "learning_rate": 9.994469026548673e-06, + "loss": 1.3441, + "step": 2 + }, + { + "epoch": 0.00331858407079646, + "grad_norm": 0.07177734375, + "learning_rate": 9.99170353982301e-06, + "loss": 1.2924, + "step": 3 + }, + { + "epoch": 0.004424778761061947, + "grad_norm": 0.0751953125, + "learning_rate": 9.988938053097345e-06, + "loss": 1.4005, + "step": 4 + }, + { + "epoch": 0.0055309734513274336, + "grad_norm": 0.0751953125, + "learning_rate": 9.986172566371683e-06, + "loss": 1.392, + "step": 5 + }, + { + "epoch": 0.00663716814159292, + "grad_norm": 0.07666015625, + "learning_rate": 9.983407079646018e-06, + "loss": 1.3735, + "step": 6 + }, + { + "epoch": 0.007743362831858407, + "grad_norm": 0.07568359375, + "learning_rate": 9.980641592920355e-06, + "loss": 1.2995, + "step": 7 + }, + { + "epoch": 0.008849557522123894, + "grad_norm": 0.07861328125, + "learning_rate": 9.977876106194692e-06, + "loss": 1.3195, + "step": 8 + }, + { + "epoch": 0.00995575221238938, + "grad_norm": 0.08203125, + "learning_rate": 9.975110619469028e-06, + "loss": 1.2597, + "step": 9 + }, + { + "epoch": 0.011061946902654867, + "grad_norm": 0.08251953125, + "learning_rate": 9.972345132743364e-06, + "loss": 1.3033, + "step": 10 + }, + { + "epoch": 0.012168141592920354, + "grad_norm": 0.0927734375, + "learning_rate": 9.9695796460177e-06, + "loss": 1.4118, + "step": 11 + }, + { + "epoch": 0.01327433628318584, + "grad_norm": 0.09619140625, + "learning_rate": 9.966814159292036e-06, + "loss": 1.3647, + "step": 12 + }, + { + "epoch": 0.014380530973451327, + "grad_norm": 0.1015625, + "learning_rate": 9.964048672566372e-06, + "loss": 1.5081, + "step": 13 + }, + { + "epoch": 0.015486725663716814, + "grad_norm": 0.111328125, + "learning_rate": 9.961283185840708e-06, + "loss": 1.488, + "step": 14 + }, + { + "epoch": 0.016592920353982302, + "grad_norm": 0.1015625, + "learning_rate": 9.958517699115044e-06, + "loss": 1.342, + "step": 15 + }, + { + "epoch": 0.017699115044247787, + "grad_norm": 0.1005859375, + "learning_rate": 9.95575221238938e-06, + "loss": 1.325, + "step": 16 + }, + { + "epoch": 0.018805309734513276, + "grad_norm": 0.1044921875, + "learning_rate": 9.952986725663718e-06, + "loss": 1.2776, + "step": 17 + }, + { + "epoch": 0.01991150442477876, + "grad_norm": 0.11376953125, + "learning_rate": 9.950221238938054e-06, + "loss": 1.3764, + "step": 18 + }, + { + "epoch": 0.02101769911504425, + "grad_norm": 0.1220703125, + "learning_rate": 9.94745575221239e-06, + "loss": 1.3939, + "step": 19 + }, + { + "epoch": 0.022123893805309734, + "grad_norm": 0.134765625, + "learning_rate": 9.944690265486727e-06, + "loss": 1.2943, + "step": 20 + }, + { + "epoch": 0.023230088495575223, + "grad_norm": 0.1259765625, + "learning_rate": 9.941924778761063e-06, + "loss": 1.3372, + "step": 21 + }, + { + "epoch": 0.024336283185840708, + "grad_norm": 0.12109375, + "learning_rate": 9.939159292035399e-06, + "loss": 1.289, + "step": 22 + }, + { + "epoch": 0.025442477876106196, + "grad_norm": 0.1259765625, + "learning_rate": 9.936393805309735e-06, + "loss": 1.2865, + "step": 23 + }, + { + "epoch": 0.02654867256637168, + "grad_norm": 0.1396484375, + "learning_rate": 9.933628318584071e-06, + "loss": 1.4118, + "step": 24 + }, + { + "epoch": 0.02765486725663717, + "grad_norm": 0.1376953125, + "learning_rate": 9.930862831858407e-06, + "loss": 1.3204, + "step": 25 + }, + { + "epoch": 0.028761061946902654, + "grad_norm": 0.146484375, + "learning_rate": 9.928097345132745e-06, + "loss": 1.4497, + "step": 26 + }, + { + "epoch": 0.029867256637168143, + "grad_norm": 0.158203125, + "learning_rate": 9.92533185840708e-06, + "loss": 1.4467, + "step": 27 + }, + { + "epoch": 0.030973451327433628, + "grad_norm": 0.1484375, + "learning_rate": 9.922566371681417e-06, + "loss": 1.3372, + "step": 28 + }, + { + "epoch": 0.032079646017699116, + "grad_norm": 0.1611328125, + "learning_rate": 9.919800884955752e-06, + "loss": 1.3878, + "step": 29 + }, + { + "epoch": 0.033185840707964605, + "grad_norm": 0.15625, + "learning_rate": 9.91703539823009e-06, + "loss": 1.3868, + "step": 30 + }, + { + "epoch": 0.034292035398230086, + "grad_norm": 0.1513671875, + "learning_rate": 9.914269911504426e-06, + "loss": 1.2612, + "step": 31 + }, + { + "epoch": 0.035398230088495575, + "grad_norm": 0.1591796875, + "learning_rate": 9.911504424778762e-06, + "loss": 1.3149, + "step": 32 + }, + { + "epoch": 0.03650442477876106, + "grad_norm": 0.177734375, + "learning_rate": 9.908738938053098e-06, + "loss": 1.4568, + "step": 33 + }, + { + "epoch": 0.03761061946902655, + "grad_norm": 0.1669921875, + "learning_rate": 9.905973451327434e-06, + "loss": 1.3127, + "step": 34 + }, + { + "epoch": 0.03871681415929203, + "grad_norm": 0.2001953125, + "learning_rate": 9.90320796460177e-06, + "loss": 1.249, + "step": 35 + }, + { + "epoch": 0.03982300884955752, + "grad_norm": 0.16796875, + "learning_rate": 9.900442477876106e-06, + "loss": 1.2219, + "step": 36 + }, + { + "epoch": 0.04092920353982301, + "grad_norm": 0.171875, + "learning_rate": 9.897676991150443e-06, + "loss": 1.3407, + "step": 37 + }, + { + "epoch": 0.0420353982300885, + "grad_norm": 0.1806640625, + "learning_rate": 9.89491150442478e-06, + "loss": 1.315, + "step": 38 + }, + { + "epoch": 0.04314159292035398, + "grad_norm": 0.1923828125, + "learning_rate": 9.892146017699117e-06, + "loss": 1.3925, + "step": 39 + }, + { + "epoch": 0.04424778761061947, + "grad_norm": 0.1767578125, + "learning_rate": 9.889380530973453e-06, + "loss": 1.2886, + "step": 40 + }, + { + "epoch": 0.04535398230088496, + "grad_norm": 0.171875, + "learning_rate": 9.886615044247789e-06, + "loss": 1.2282, + "step": 41 + }, + { + "epoch": 0.046460176991150445, + "grad_norm": 0.185546875, + "learning_rate": 9.883849557522125e-06, + "loss": 1.3511, + "step": 42 + }, + { + "epoch": 0.04756637168141593, + "grad_norm": 0.1845703125, + "learning_rate": 9.881084070796461e-06, + "loss": 1.2726, + "step": 43 + }, + { + "epoch": 0.048672566371681415, + "grad_norm": 0.1884765625, + "learning_rate": 9.878318584070797e-06, + "loss": 1.3065, + "step": 44 + }, + { + "epoch": 0.049778761061946904, + "grad_norm": 0.189453125, + "learning_rate": 9.875553097345133e-06, + "loss": 1.313, + "step": 45 + }, + { + "epoch": 0.05088495575221239, + "grad_norm": 0.1953125, + "learning_rate": 9.87278761061947e-06, + "loss": 1.3387, + "step": 46 + }, + { + "epoch": 0.051991150442477874, + "grad_norm": 0.1962890625, + "learning_rate": 9.870022123893807e-06, + "loss": 1.3307, + "step": 47 + }, + { + "epoch": 0.05309734513274336, + "grad_norm": 0.185546875, + "learning_rate": 9.867256637168142e-06, + "loss": 1.255, + "step": 48 + }, + { + "epoch": 0.05420353982300885, + "grad_norm": 0.1806640625, + "learning_rate": 9.86449115044248e-06, + "loss": 1.2238, + "step": 49 + }, + { + "epoch": 0.05530973451327434, + "grad_norm": 0.2021484375, + "learning_rate": 9.861725663716814e-06, + "loss": 1.2973, + "step": 50 + }, + { + "epoch": 0.05641592920353982, + "grad_norm": 0.189453125, + "learning_rate": 9.858960176991152e-06, + "loss": 1.2764, + "step": 51 + }, + { + "epoch": 0.05752212389380531, + "grad_norm": 0.1650390625, + "learning_rate": 9.856194690265488e-06, + "loss": 1.1427, + "step": 52 + }, + { + "epoch": 0.0586283185840708, + "grad_norm": 0.1982421875, + "learning_rate": 9.853429203539824e-06, + "loss": 1.3943, + "step": 53 + }, + { + "epoch": 0.059734513274336286, + "grad_norm": 0.1845703125, + "learning_rate": 9.85066371681416e-06, + "loss": 1.228, + "step": 54 + }, + { + "epoch": 0.06084070796460177, + "grad_norm": 0.19921875, + "learning_rate": 9.847898230088496e-06, + "loss": 1.3372, + "step": 55 + }, + { + "epoch": 0.061946902654867256, + "grad_norm": 0.1767578125, + "learning_rate": 9.845132743362832e-06, + "loss": 1.1932, + "step": 56 + }, + { + "epoch": 0.06305309734513274, + "grad_norm": 0.185546875, + "learning_rate": 9.842367256637168e-06, + "loss": 1.2614, + "step": 57 + }, + { + "epoch": 0.06415929203539823, + "grad_norm": 0.1904296875, + "learning_rate": 9.839601769911505e-06, + "loss": 1.3055, + "step": 58 + }, + { + "epoch": 0.06526548672566372, + "grad_norm": 0.18359375, + "learning_rate": 9.83683628318584e-06, + "loss": 1.2492, + "step": 59 + }, + { + "epoch": 0.06637168141592921, + "grad_norm": 0.193359375, + "learning_rate": 9.834070796460179e-06, + "loss": 1.2859, + "step": 60 + }, + { + "epoch": 0.06747787610619468, + "grad_norm": 0.18359375, + "learning_rate": 9.831305309734515e-06, + "loss": 1.2465, + "step": 61 + }, + { + "epoch": 0.06858407079646017, + "grad_norm": 0.1875, + "learning_rate": 9.82853982300885e-06, + "loss": 1.2437, + "step": 62 + }, + { + "epoch": 0.06969026548672566, + "grad_norm": 0.1875, + "learning_rate": 9.825774336283187e-06, + "loss": 1.2626, + "step": 63 + }, + { + "epoch": 0.07079646017699115, + "grad_norm": 0.181640625, + "learning_rate": 9.823008849557523e-06, + "loss": 1.2529, + "step": 64 + }, + { + "epoch": 0.07190265486725664, + "grad_norm": 0.1689453125, + "learning_rate": 9.82024336283186e-06, + "loss": 1.171, + "step": 65 + }, + { + "epoch": 0.07300884955752213, + "grad_norm": 0.185546875, + "learning_rate": 9.817477876106195e-06, + "loss": 1.2666, + "step": 66 + }, + { + "epoch": 0.07411504424778761, + "grad_norm": 0.1875, + "learning_rate": 9.814712389380531e-06, + "loss": 1.2449, + "step": 67 + }, + { + "epoch": 0.0752212389380531, + "grad_norm": 0.19140625, + "learning_rate": 9.81194690265487e-06, + "loss": 1.2663, + "step": 68 + }, + { + "epoch": 0.07632743362831858, + "grad_norm": 0.1767578125, + "learning_rate": 9.809181415929204e-06, + "loss": 1.2034, + "step": 69 + }, + { + "epoch": 0.07743362831858407, + "grad_norm": 0.16015625, + "learning_rate": 9.806415929203541e-06, + "loss": 1.1449, + "step": 70 + }, + { + "epoch": 0.07853982300884955, + "grad_norm": 0.189453125, + "learning_rate": 9.803650442477876e-06, + "loss": 1.278, + "step": 71 + }, + { + "epoch": 0.07964601769911504, + "grad_norm": 0.1650390625, + "learning_rate": 9.800884955752214e-06, + "loss": 1.1732, + "step": 72 + }, + { + "epoch": 0.08075221238938053, + "grad_norm": 0.1591796875, + "learning_rate": 9.79811946902655e-06, + "loss": 1.0929, + "step": 73 + }, + { + "epoch": 0.08185840707964602, + "grad_norm": 0.1591796875, + "learning_rate": 9.795353982300886e-06, + "loss": 1.1319, + "step": 74 + }, + { + "epoch": 0.08296460176991151, + "grad_norm": 0.1865234375, + "learning_rate": 9.792588495575222e-06, + "loss": 1.2224, + "step": 75 + }, + { + "epoch": 0.084070796460177, + "grad_norm": 0.177734375, + "learning_rate": 9.789823008849558e-06, + "loss": 1.2126, + "step": 76 + }, + { + "epoch": 0.08517699115044247, + "grad_norm": 0.162109375, + "learning_rate": 9.787057522123894e-06, + "loss": 1.1341, + "step": 77 + }, + { + "epoch": 0.08628318584070796, + "grad_norm": 0.1865234375, + "learning_rate": 9.78429203539823e-06, + "loss": 1.1886, + "step": 78 + }, + { + "epoch": 0.08738938053097345, + "grad_norm": 0.177734375, + "learning_rate": 9.781526548672567e-06, + "loss": 1.2075, + "step": 79 + }, + { + "epoch": 0.08849557522123894, + "grad_norm": 0.1669921875, + "learning_rate": 9.778761061946903e-06, + "loss": 1.1555, + "step": 80 + }, + { + "epoch": 0.08960176991150443, + "grad_norm": 0.384765625, + "learning_rate": 9.77599557522124e-06, + "loss": 1.184, + "step": 81 + }, + { + "epoch": 0.09070796460176991, + "grad_norm": 0.146484375, + "learning_rate": 9.773230088495575e-06, + "loss": 1.1637, + "step": 82 + }, + { + "epoch": 0.0918141592920354, + "grad_norm": 0.150390625, + "learning_rate": 9.770464601769913e-06, + "loss": 1.0969, + "step": 83 + }, + { + "epoch": 0.09292035398230089, + "grad_norm": 0.158203125, + "learning_rate": 9.767699115044249e-06, + "loss": 1.1828, + "step": 84 + }, + { + "epoch": 0.09402654867256637, + "grad_norm": 0.15234375, + "learning_rate": 9.764933628318585e-06, + "loss": 1.1675, + "step": 85 + }, + { + "epoch": 0.09513274336283185, + "grad_norm": 0.142578125, + "learning_rate": 9.762168141592921e-06, + "loss": 1.0722, + "step": 86 + }, + { + "epoch": 0.09623893805309734, + "grad_norm": 0.1455078125, + "learning_rate": 9.759402654867257e-06, + "loss": 1.1185, + "step": 87 + }, + { + "epoch": 0.09734513274336283, + "grad_norm": 0.1630859375, + "learning_rate": 9.756637168141593e-06, + "loss": 1.243, + "step": 88 + }, + { + "epoch": 0.09845132743362832, + "grad_norm": 0.1484375, + "learning_rate": 9.75387168141593e-06, + "loss": 1.1313, + "step": 89 + }, + { + "epoch": 0.09955752212389381, + "grad_norm": 0.146484375, + "learning_rate": 9.751106194690266e-06, + "loss": 1.1367, + "step": 90 + }, + { + "epoch": 0.1006637168141593, + "grad_norm": 0.12890625, + "learning_rate": 9.748340707964604e-06, + "loss": 1.0047, + "step": 91 + }, + { + "epoch": 0.10176991150442478, + "grad_norm": 0.1494140625, + "learning_rate": 9.745575221238938e-06, + "loss": 1.1659, + "step": 92 + }, + { + "epoch": 0.10287610619469026, + "grad_norm": 0.146484375, + "learning_rate": 9.742809734513276e-06, + "loss": 1.1399, + "step": 93 + }, + { + "epoch": 0.10398230088495575, + "grad_norm": 0.1435546875, + "learning_rate": 9.740044247787612e-06, + "loss": 1.12, + "step": 94 + }, + { + "epoch": 0.10508849557522124, + "grad_norm": 0.1376953125, + "learning_rate": 9.737278761061948e-06, + "loss": 1.1036, + "step": 95 + }, + { + "epoch": 0.10619469026548672, + "grad_norm": 0.134765625, + "learning_rate": 9.734513274336284e-06, + "loss": 1.1025, + "step": 96 + }, + { + "epoch": 0.10730088495575221, + "grad_norm": 0.1416015625, + "learning_rate": 9.73174778761062e-06, + "loss": 1.1097, + "step": 97 + }, + { + "epoch": 0.1084070796460177, + "grad_norm": 0.12451171875, + "learning_rate": 9.728982300884956e-06, + "loss": 1.0528, + "step": 98 + }, + { + "epoch": 0.10951327433628319, + "grad_norm": 0.130859375, + "learning_rate": 9.726216814159293e-06, + "loss": 1.1009, + "step": 99 + }, + { + "epoch": 0.11061946902654868, + "grad_norm": 0.1279296875, + "learning_rate": 9.72345132743363e-06, + "loss": 1.1063, + "step": 100 + }, + { + "epoch": 0.11172566371681415, + "grad_norm": 0.12451171875, + "learning_rate": 9.720685840707965e-06, + "loss": 1.0559, + "step": 101 + }, + { + "epoch": 0.11283185840707964, + "grad_norm": 0.162109375, + "learning_rate": 9.717920353982303e-06, + "loss": 1.2462, + "step": 102 + }, + { + "epoch": 0.11393805309734513, + "grad_norm": 0.134765625, + "learning_rate": 9.715154867256637e-06, + "loss": 1.1034, + "step": 103 + }, + { + "epoch": 0.11504424778761062, + "grad_norm": 0.140625, + "learning_rate": 9.712389380530975e-06, + "loss": 1.1577, + "step": 104 + }, + { + "epoch": 0.1161504424778761, + "grad_norm": 0.12451171875, + "learning_rate": 9.70962389380531e-06, + "loss": 1.0802, + "step": 105 + }, + { + "epoch": 0.1172566371681416, + "grad_norm": 0.12109375, + "learning_rate": 9.706858407079647e-06, + "loss": 1.0664, + "step": 106 + }, + { + "epoch": 0.11836283185840708, + "grad_norm": 0.12890625, + "learning_rate": 9.704092920353983e-06, + "loss": 1.0663, + "step": 107 + }, + { + "epoch": 0.11946902654867257, + "grad_norm": 0.12353515625, + "learning_rate": 9.70132743362832e-06, + "loss": 1.0972, + "step": 108 + }, + { + "epoch": 0.12057522123893805, + "grad_norm": 0.1337890625, + "learning_rate": 9.698561946902655e-06, + "loss": 1.1272, + "step": 109 + }, + { + "epoch": 0.12168141592920353, + "grad_norm": 0.130859375, + "learning_rate": 9.695796460176992e-06, + "loss": 1.06, + "step": 110 + }, + { + "epoch": 0.12278761061946902, + "grad_norm": 0.11376953125, + "learning_rate": 9.693030973451328e-06, + "loss": 1.054, + "step": 111 + }, + { + "epoch": 0.12389380530973451, + "grad_norm": 0.11865234375, + "learning_rate": 9.690265486725664e-06, + "loss": 1.1015, + "step": 112 + }, + { + "epoch": 0.125, + "grad_norm": 0.1416015625, + "learning_rate": 9.6875e-06, + "loss": 1.13, + "step": 113 + }, + { + "epoch": 0.1261061946902655, + "grad_norm": 0.130859375, + "learning_rate": 9.684734513274338e-06, + "loss": 1.1012, + "step": 114 + }, + { + "epoch": 0.12721238938053098, + "grad_norm": 0.11328125, + "learning_rate": 9.681969026548674e-06, + "loss": 1.0631, + "step": 115 + }, + { + "epoch": 0.12831858407079647, + "grad_norm": 0.12060546875, + "learning_rate": 9.67920353982301e-06, + "loss": 1.0734, + "step": 116 + }, + { + "epoch": 0.12942477876106195, + "grad_norm": 0.11669921875, + "learning_rate": 9.676438053097346e-06, + "loss": 1.0716, + "step": 117 + }, + { + "epoch": 0.13053097345132744, + "grad_norm": 0.11181640625, + "learning_rate": 9.673672566371682e-06, + "loss": 1.0607, + "step": 118 + }, + { + "epoch": 0.13163716814159293, + "grad_norm": 0.1318359375, + "learning_rate": 9.670907079646018e-06, + "loss": 1.0575, + "step": 119 + }, + { + "epoch": 0.13274336283185842, + "grad_norm": 0.142578125, + "learning_rate": 9.668141592920355e-06, + "loss": 1.0845, + "step": 120 + }, + { + "epoch": 0.1338495575221239, + "grad_norm": 0.1201171875, + "learning_rate": 9.665376106194692e-06, + "loss": 1.0203, + "step": 121 + }, + { + "epoch": 0.13495575221238937, + "grad_norm": 0.1240234375, + "learning_rate": 9.662610619469027e-06, + "loss": 1.0951, + "step": 122 + }, + { + "epoch": 0.13606194690265486, + "grad_norm": 0.10888671875, + "learning_rate": 9.659845132743365e-06, + "loss": 1.0298, + "step": 123 + }, + { + "epoch": 0.13716814159292035, + "grad_norm": 0.109375, + "learning_rate": 9.657079646017699e-06, + "loss": 1.0098, + "step": 124 + }, + { + "epoch": 0.13827433628318583, + "grad_norm": 0.10888671875, + "learning_rate": 9.654314159292037e-06, + "loss": 0.9845, + "step": 125 + }, + { + "epoch": 0.13938053097345132, + "grad_norm": 0.119140625, + "learning_rate": 9.651548672566371e-06, + "loss": 1.0747, + "step": 126 + }, + { + "epoch": 0.1404867256637168, + "grad_norm": 0.1220703125, + "learning_rate": 9.648783185840709e-06, + "loss": 1.0437, + "step": 127 + }, + { + "epoch": 0.1415929203539823, + "grad_norm": 0.12158203125, + "learning_rate": 9.646017699115045e-06, + "loss": 1.0673, + "step": 128 + }, + { + "epoch": 0.1426991150442478, + "grad_norm": 0.126953125, + "learning_rate": 9.643252212389381e-06, + "loss": 1.0883, + "step": 129 + }, + { + "epoch": 0.14380530973451328, + "grad_norm": 0.10205078125, + "learning_rate": 9.640486725663718e-06, + "loss": 1.0187, + "step": 130 + }, + { + "epoch": 0.14491150442477876, + "grad_norm": 0.11474609375, + "learning_rate": 9.637721238938054e-06, + "loss": 0.9874, + "step": 131 + }, + { + "epoch": 0.14601769911504425, + "grad_norm": 0.11328125, + "learning_rate": 9.63495575221239e-06, + "loss": 1.0553, + "step": 132 + }, + { + "epoch": 0.14712389380530974, + "grad_norm": 0.0927734375, + "learning_rate": 9.632190265486726e-06, + "loss": 0.9862, + "step": 133 + }, + { + "epoch": 0.14823008849557523, + "grad_norm": 0.1279296875, + "learning_rate": 9.629424778761062e-06, + "loss": 1.0906, + "step": 134 + }, + { + "epoch": 0.14933628318584072, + "grad_norm": 0.10107421875, + "learning_rate": 9.626659292035398e-06, + "loss": 0.9907, + "step": 135 + }, + { + "epoch": 0.1504424778761062, + "grad_norm": 0.123046875, + "learning_rate": 9.623893805309736e-06, + "loss": 1.0838, + "step": 136 + }, + { + "epoch": 0.1515486725663717, + "grad_norm": 0.12060546875, + "learning_rate": 9.621128318584072e-06, + "loss": 1.0756, + "step": 137 + }, + { + "epoch": 0.15265486725663716, + "grad_norm": 0.10498046875, + "learning_rate": 9.618362831858408e-06, + "loss": 1.0093, + "step": 138 + }, + { + "epoch": 0.15376106194690264, + "grad_norm": 0.10107421875, + "learning_rate": 9.615597345132744e-06, + "loss": 0.9203, + "step": 139 + }, + { + "epoch": 0.15486725663716813, + "grad_norm": 0.10400390625, + "learning_rate": 9.61283185840708e-06, + "loss": 1.02, + "step": 140 + }, + { + "epoch": 0.15597345132743362, + "grad_norm": 0.10546875, + "learning_rate": 9.610066371681417e-06, + "loss": 0.9911, + "step": 141 + }, + { + "epoch": 0.1570796460176991, + "grad_norm": 0.1396484375, + "learning_rate": 9.607300884955753e-06, + "loss": 1.1058, + "step": 142 + }, + { + "epoch": 0.1581858407079646, + "grad_norm": 0.10595703125, + "learning_rate": 9.604535398230089e-06, + "loss": 1.007, + "step": 143 + }, + { + "epoch": 0.1592920353982301, + "grad_norm": 0.1357421875, + "learning_rate": 9.601769911504427e-06, + "loss": 1.1132, + "step": 144 + }, + { + "epoch": 0.16039823008849557, + "grad_norm": 0.107421875, + "learning_rate": 9.599004424778761e-06, + "loss": 1.0425, + "step": 145 + }, + { + "epoch": 0.16150442477876106, + "grad_norm": 0.11865234375, + "learning_rate": 9.596238938053099e-06, + "loss": 1.045, + "step": 146 + }, + { + "epoch": 0.16261061946902655, + "grad_norm": 0.1083984375, + "learning_rate": 9.593473451327433e-06, + "loss": 0.9718, + "step": 147 + }, + { + "epoch": 0.16371681415929204, + "grad_norm": 0.0966796875, + "learning_rate": 9.590707964601771e-06, + "loss": 0.9833, + "step": 148 + }, + { + "epoch": 0.16482300884955753, + "grad_norm": 0.11328125, + "learning_rate": 9.587942477876107e-06, + "loss": 1.042, + "step": 149 + }, + { + "epoch": 0.16592920353982302, + "grad_norm": 0.09912109375, + "learning_rate": 9.585176991150443e-06, + "loss": 0.9759, + "step": 150 + }, + { + "epoch": 0.1670353982300885, + "grad_norm": 0.1103515625, + "learning_rate": 9.58241150442478e-06, + "loss": 1.0018, + "step": 151 + }, + { + "epoch": 0.168141592920354, + "grad_norm": 0.10400390625, + "learning_rate": 9.579646017699116e-06, + "loss": 0.9685, + "step": 152 + }, + { + "epoch": 0.16924778761061948, + "grad_norm": 0.10498046875, + "learning_rate": 9.576880530973452e-06, + "loss": 1.016, + "step": 153 + }, + { + "epoch": 0.17035398230088494, + "grad_norm": 0.11669921875, + "learning_rate": 9.574115044247788e-06, + "loss": 1.018, + "step": 154 + }, + { + "epoch": 0.17146017699115043, + "grad_norm": 0.103515625, + "learning_rate": 9.571349557522124e-06, + "loss": 0.9577, + "step": 155 + }, + { + "epoch": 0.17256637168141592, + "grad_norm": 0.09326171875, + "learning_rate": 9.56858407079646e-06, + "loss": 0.9991, + "step": 156 + }, + { + "epoch": 0.1736725663716814, + "grad_norm": 0.095703125, + "learning_rate": 9.565818584070798e-06, + "loss": 0.9418, + "step": 157 + }, + { + "epoch": 0.1747787610619469, + "grad_norm": 0.10693359375, + "learning_rate": 9.563053097345132e-06, + "loss": 0.9854, + "step": 158 + }, + { + "epoch": 0.17588495575221239, + "grad_norm": 0.10595703125, + "learning_rate": 9.56028761061947e-06, + "loss": 0.9415, + "step": 159 + }, + { + "epoch": 0.17699115044247787, + "grad_norm": 0.10693359375, + "learning_rate": 9.557522123893806e-06, + "loss": 0.9652, + "step": 160 + }, + { + "epoch": 0.17809734513274336, + "grad_norm": 0.1025390625, + "learning_rate": 9.554756637168143e-06, + "loss": 0.9492, + "step": 161 + }, + { + "epoch": 0.17920353982300885, + "grad_norm": 0.10546875, + "learning_rate": 9.551991150442479e-06, + "loss": 0.992, + "step": 162 + }, + { + "epoch": 0.18030973451327434, + "grad_norm": 0.130859375, + "learning_rate": 9.549225663716815e-06, + "loss": 1.0606, + "step": 163 + }, + { + "epoch": 0.18141592920353983, + "grad_norm": 0.09765625, + "learning_rate": 9.546460176991151e-06, + "loss": 0.9399, + "step": 164 + }, + { + "epoch": 0.18252212389380532, + "grad_norm": 0.11279296875, + "learning_rate": 9.543694690265487e-06, + "loss": 0.9926, + "step": 165 + }, + { + "epoch": 0.1836283185840708, + "grad_norm": 0.0966796875, + "learning_rate": 9.540929203539823e-06, + "loss": 0.9929, + "step": 166 + }, + { + "epoch": 0.1847345132743363, + "grad_norm": 0.11865234375, + "learning_rate": 9.538163716814161e-06, + "loss": 0.9743, + "step": 167 + }, + { + "epoch": 0.18584070796460178, + "grad_norm": 0.10009765625, + "learning_rate": 9.535398230088495e-06, + "loss": 0.9497, + "step": 168 + }, + { + "epoch": 0.18694690265486727, + "grad_norm": 0.12890625, + "learning_rate": 9.532632743362833e-06, + "loss": 0.9593, + "step": 169 + }, + { + "epoch": 0.18805309734513273, + "grad_norm": 0.0947265625, + "learning_rate": 9.52986725663717e-06, + "loss": 0.9257, + "step": 170 + }, + { + "epoch": 0.18915929203539822, + "grad_norm": 0.10302734375, + "learning_rate": 9.527101769911505e-06, + "loss": 0.9738, + "step": 171 + }, + { + "epoch": 0.1902654867256637, + "grad_norm": 0.11962890625, + "learning_rate": 9.524336283185842e-06, + "loss": 1.0004, + "step": 172 + }, + { + "epoch": 0.1913716814159292, + "grad_norm": 0.11279296875, + "learning_rate": 9.521570796460178e-06, + "loss": 0.987, + "step": 173 + }, + { + "epoch": 0.19247787610619468, + "grad_norm": 0.1259765625, + "learning_rate": 9.518805309734514e-06, + "loss": 0.9239, + "step": 174 + }, + { + "epoch": 0.19358407079646017, + "grad_norm": 0.09423828125, + "learning_rate": 9.51603982300885e-06, + "loss": 0.9693, + "step": 175 + }, + { + "epoch": 0.19469026548672566, + "grad_norm": 0.1181640625, + "learning_rate": 9.513274336283188e-06, + "loss": 1.0253, + "step": 176 + }, + { + "epoch": 0.19579646017699115, + "grad_norm": 0.1240234375, + "learning_rate": 9.510508849557522e-06, + "loss": 1.0259, + "step": 177 + }, + { + "epoch": 0.19690265486725664, + "grad_norm": 0.1328125, + "learning_rate": 9.50774336283186e-06, + "loss": 1.045, + "step": 178 + }, + { + "epoch": 0.19800884955752213, + "grad_norm": 0.103515625, + "learning_rate": 9.504977876106194e-06, + "loss": 0.939, + "step": 179 + }, + { + "epoch": 0.19911504424778761, + "grad_norm": 0.125, + "learning_rate": 9.502212389380532e-06, + "loss": 1.0235, + "step": 180 + }, + { + "epoch": 0.2002212389380531, + "grad_norm": 0.11083984375, + "learning_rate": 9.499446902654868e-06, + "loss": 0.9805, + "step": 181 + }, + { + "epoch": 0.2013274336283186, + "grad_norm": 0.103515625, + "learning_rate": 9.496681415929205e-06, + "loss": 0.9111, + "step": 182 + }, + { + "epoch": 0.20243362831858408, + "grad_norm": 0.1044921875, + "learning_rate": 9.49391592920354e-06, + "loss": 0.98, + "step": 183 + }, + { + "epoch": 0.20353982300884957, + "grad_norm": 0.103515625, + "learning_rate": 9.491150442477877e-06, + "loss": 0.97, + "step": 184 + }, + { + "epoch": 0.20464601769911506, + "grad_norm": 0.1103515625, + "learning_rate": 9.488384955752213e-06, + "loss": 1.0103, + "step": 185 + }, + { + "epoch": 0.20575221238938052, + "grad_norm": 0.099609375, + "learning_rate": 9.485619469026549e-06, + "loss": 0.9824, + "step": 186 + }, + { + "epoch": 0.206858407079646, + "grad_norm": 0.10205078125, + "learning_rate": 9.482853982300885e-06, + "loss": 0.9489, + "step": 187 + }, + { + "epoch": 0.2079646017699115, + "grad_norm": 0.09619140625, + "learning_rate": 9.480088495575221e-06, + "loss": 0.9815, + "step": 188 + }, + { + "epoch": 0.20907079646017698, + "grad_norm": 0.1015625, + "learning_rate": 9.477323008849557e-06, + "loss": 0.9694, + "step": 189 + }, + { + "epoch": 0.21017699115044247, + "grad_norm": 0.10546875, + "learning_rate": 9.474557522123895e-06, + "loss": 0.9392, + "step": 190 + }, + { + "epoch": 0.21128318584070796, + "grad_norm": 0.08984375, + "learning_rate": 9.471792035398231e-06, + "loss": 0.9339, + "step": 191 + }, + { + "epoch": 0.21238938053097345, + "grad_norm": 0.09326171875, + "learning_rate": 9.469026548672568e-06, + "loss": 0.9331, + "step": 192 + }, + { + "epoch": 0.21349557522123894, + "grad_norm": 0.091796875, + "learning_rate": 9.466261061946904e-06, + "loss": 0.898, + "step": 193 + }, + { + "epoch": 0.21460176991150443, + "grad_norm": 0.09033203125, + "learning_rate": 9.46349557522124e-06, + "loss": 0.8954, + "step": 194 + }, + { + "epoch": 0.2157079646017699, + "grad_norm": 0.09765625, + "learning_rate": 9.460730088495576e-06, + "loss": 0.933, + "step": 195 + }, + { + "epoch": 0.2168141592920354, + "grad_norm": 0.1025390625, + "learning_rate": 9.457964601769912e-06, + "loss": 0.9354, + "step": 196 + }, + { + "epoch": 0.2179203539823009, + "grad_norm": 0.109375, + "learning_rate": 9.45519911504425e-06, + "loss": 0.9548, + "step": 197 + }, + { + "epoch": 0.21902654867256638, + "grad_norm": 0.0966796875, + "learning_rate": 9.452433628318584e-06, + "loss": 0.9656, + "step": 198 + }, + { + "epoch": 0.22013274336283187, + "grad_norm": 0.1103515625, + "learning_rate": 9.449668141592922e-06, + "loss": 1.0013, + "step": 199 + }, + { + "epoch": 0.22123893805309736, + "grad_norm": 0.09375, + "learning_rate": 9.446902654867257e-06, + "loss": 0.9249, + "step": 200 + }, + { + "epoch": 0.22234513274336284, + "grad_norm": 0.09033203125, + "learning_rate": 9.444137168141594e-06, + "loss": 0.887, + "step": 201 + }, + { + "epoch": 0.2234513274336283, + "grad_norm": 0.10400390625, + "learning_rate": 9.441371681415929e-06, + "loss": 0.9147, + "step": 202 + }, + { + "epoch": 0.2245575221238938, + "grad_norm": 0.09814453125, + "learning_rate": 9.438606194690267e-06, + "loss": 0.9193, + "step": 203 + }, + { + "epoch": 0.22566371681415928, + "grad_norm": 0.11181640625, + "learning_rate": 9.435840707964603e-06, + "loss": 0.9591, + "step": 204 + }, + { + "epoch": 0.22676991150442477, + "grad_norm": 0.1064453125, + "learning_rate": 9.433075221238939e-06, + "loss": 0.9349, + "step": 205 + }, + { + "epoch": 0.22787610619469026, + "grad_norm": 0.09619140625, + "learning_rate": 9.430309734513275e-06, + "loss": 0.936, + "step": 206 + }, + { + "epoch": 0.22898230088495575, + "grad_norm": 0.09619140625, + "learning_rate": 9.427544247787611e-06, + "loss": 0.9086, + "step": 207 + }, + { + "epoch": 0.23008849557522124, + "grad_norm": 0.0888671875, + "learning_rate": 9.424778761061947e-06, + "loss": 0.9234, + "step": 208 + }, + { + "epoch": 0.23119469026548672, + "grad_norm": 0.10888671875, + "learning_rate": 9.422013274336283e-06, + "loss": 0.9718, + "step": 209 + }, + { + "epoch": 0.2323008849557522, + "grad_norm": 0.09912109375, + "learning_rate": 9.41924778761062e-06, + "loss": 0.9336, + "step": 210 + }, + { + "epoch": 0.2334070796460177, + "grad_norm": 0.09619140625, + "learning_rate": 9.416482300884957e-06, + "loss": 0.878, + "step": 211 + }, + { + "epoch": 0.2345132743362832, + "grad_norm": 0.095703125, + "learning_rate": 9.413716814159293e-06, + "loss": 0.9051, + "step": 212 + }, + { + "epoch": 0.23561946902654868, + "grad_norm": 0.09716796875, + "learning_rate": 9.41095132743363e-06, + "loss": 0.8874, + "step": 213 + }, + { + "epoch": 0.23672566371681417, + "grad_norm": 0.125, + "learning_rate": 9.408185840707966e-06, + "loss": 0.9931, + "step": 214 + }, + { + "epoch": 0.23783185840707965, + "grad_norm": 0.09423828125, + "learning_rate": 9.405420353982302e-06, + "loss": 0.8991, + "step": 215 + }, + { + "epoch": 0.23893805309734514, + "grad_norm": 0.103515625, + "learning_rate": 9.402654867256638e-06, + "loss": 0.967, + "step": 216 + }, + { + "epoch": 0.24004424778761063, + "grad_norm": 0.10205078125, + "learning_rate": 9.399889380530974e-06, + "loss": 0.9487, + "step": 217 + }, + { + "epoch": 0.2411504424778761, + "grad_norm": 0.0966796875, + "learning_rate": 9.39712389380531e-06, + "loss": 0.8811, + "step": 218 + }, + { + "epoch": 0.24225663716814158, + "grad_norm": 0.09765625, + "learning_rate": 9.394358407079646e-06, + "loss": 0.9777, + "step": 219 + }, + { + "epoch": 0.24336283185840707, + "grad_norm": 0.126953125, + "learning_rate": 9.391592920353984e-06, + "loss": 0.9486, + "step": 220 + }, + { + "epoch": 0.24446902654867256, + "grad_norm": 0.10498046875, + "learning_rate": 9.388827433628319e-06, + "loss": 0.9567, + "step": 221 + }, + { + "epoch": 0.24557522123893805, + "grad_norm": 0.091796875, + "learning_rate": 9.386061946902656e-06, + "loss": 0.9298, + "step": 222 + }, + { + "epoch": 0.24668141592920353, + "grad_norm": 0.080078125, + "learning_rate": 9.38329646017699e-06, + "loss": 0.8418, + "step": 223 + }, + { + "epoch": 0.24778761061946902, + "grad_norm": 0.0966796875, + "learning_rate": 9.380530973451329e-06, + "loss": 0.9089, + "step": 224 + }, + { + "epoch": 0.2488938053097345, + "grad_norm": 0.10205078125, + "learning_rate": 9.377765486725665e-06, + "loss": 0.9455, + "step": 225 + }, + { + "epoch": 0.25, + "grad_norm": 0.09814453125, + "learning_rate": 9.375000000000001e-06, + "loss": 0.9137, + "step": 226 + }, + { + "epoch": 0.25110619469026546, + "grad_norm": 0.123046875, + "learning_rate": 9.372234513274337e-06, + "loss": 0.9315, + "step": 227 + }, + { + "epoch": 0.252212389380531, + "grad_norm": 0.1220703125, + "learning_rate": 9.369469026548673e-06, + "loss": 0.9321, + "step": 228 + }, + { + "epoch": 0.25331858407079644, + "grad_norm": 0.1123046875, + "learning_rate": 9.36670353982301e-06, + "loss": 0.9468, + "step": 229 + }, + { + "epoch": 0.25442477876106195, + "grad_norm": 0.09228515625, + "learning_rate": 9.363938053097345e-06, + "loss": 0.9156, + "step": 230 + }, + { + "epoch": 0.2555309734513274, + "grad_norm": 0.09033203125, + "learning_rate": 9.361172566371683e-06, + "loss": 0.8977, + "step": 231 + }, + { + "epoch": 0.25663716814159293, + "grad_norm": 0.12158203125, + "learning_rate": 9.358407079646018e-06, + "loss": 0.9556, + "step": 232 + }, + { + "epoch": 0.2577433628318584, + "grad_norm": 0.10107421875, + "learning_rate": 9.355641592920355e-06, + "loss": 0.9364, + "step": 233 + }, + { + "epoch": 0.2588495575221239, + "grad_norm": 0.1015625, + "learning_rate": 9.352876106194692e-06, + "loss": 0.9383, + "step": 234 + }, + { + "epoch": 0.25995575221238937, + "grad_norm": 0.08984375, + "learning_rate": 9.350110619469028e-06, + "loss": 0.8955, + "step": 235 + }, + { + "epoch": 0.2610619469026549, + "grad_norm": 0.095703125, + "learning_rate": 9.347345132743364e-06, + "loss": 0.8868, + "step": 236 + }, + { + "epoch": 0.26216814159292035, + "grad_norm": 0.1181640625, + "learning_rate": 9.3445796460177e-06, + "loss": 0.9211, + "step": 237 + }, + { + "epoch": 0.26327433628318586, + "grad_norm": 0.10595703125, + "learning_rate": 9.341814159292036e-06, + "loss": 0.9522, + "step": 238 + }, + { + "epoch": 0.2643805309734513, + "grad_norm": 0.1103515625, + "learning_rate": 9.339048672566372e-06, + "loss": 0.8987, + "step": 239 + }, + { + "epoch": 0.26548672566371684, + "grad_norm": 0.10888671875, + "learning_rate": 9.336283185840708e-06, + "loss": 0.8889, + "step": 240 + }, + { + "epoch": 0.2665929203539823, + "grad_norm": 0.09375, + "learning_rate": 9.333517699115044e-06, + "loss": 0.8881, + "step": 241 + }, + { + "epoch": 0.2676991150442478, + "grad_norm": 0.09619140625, + "learning_rate": 9.33075221238938e-06, + "loss": 0.9134, + "step": 242 + }, + { + "epoch": 0.2688053097345133, + "grad_norm": 0.09814453125, + "learning_rate": 9.327986725663718e-06, + "loss": 0.9334, + "step": 243 + }, + { + "epoch": 0.26991150442477874, + "grad_norm": 0.10009765625, + "learning_rate": 9.325221238938053e-06, + "loss": 0.8891, + "step": 244 + }, + { + "epoch": 0.27101769911504425, + "grad_norm": 0.1826171875, + "learning_rate": 9.32245575221239e-06, + "loss": 0.9231, + "step": 245 + }, + { + "epoch": 0.2721238938053097, + "grad_norm": 0.0947265625, + "learning_rate": 9.319690265486727e-06, + "loss": 0.9043, + "step": 246 + }, + { + "epoch": 0.27323008849557523, + "grad_norm": 0.103515625, + "learning_rate": 9.316924778761063e-06, + "loss": 0.9049, + "step": 247 + }, + { + "epoch": 0.2743362831858407, + "grad_norm": 0.08740234375, + "learning_rate": 9.314159292035399e-06, + "loss": 0.8908, + "step": 248 + }, + { + "epoch": 0.2754424778761062, + "grad_norm": 0.109375, + "learning_rate": 9.311393805309735e-06, + "loss": 0.9422, + "step": 249 + }, + { + "epoch": 0.27654867256637167, + "grad_norm": 0.09521484375, + "learning_rate": 9.308628318584071e-06, + "loss": 0.8899, + "step": 250 + }, + { + "epoch": 0.2776548672566372, + "grad_norm": 0.11572265625, + "learning_rate": 9.305862831858407e-06, + "loss": 0.8903, + "step": 251 + }, + { + "epoch": 0.27876106194690264, + "grad_norm": 0.11962890625, + "learning_rate": 9.303097345132745e-06, + "loss": 0.9352, + "step": 252 + }, + { + "epoch": 0.27986725663716816, + "grad_norm": 0.10205078125, + "learning_rate": 9.30033185840708e-06, + "loss": 0.9226, + "step": 253 + }, + { + "epoch": 0.2809734513274336, + "grad_norm": 0.09716796875, + "learning_rate": 9.297566371681417e-06, + "loss": 0.8765, + "step": 254 + }, + { + "epoch": 0.28207964601769914, + "grad_norm": 0.0947265625, + "learning_rate": 9.294800884955752e-06, + "loss": 0.8705, + "step": 255 + }, + { + "epoch": 0.2831858407079646, + "grad_norm": 0.12060546875, + "learning_rate": 9.29203539823009e-06, + "loss": 0.9338, + "step": 256 + }, + { + "epoch": 0.2842920353982301, + "grad_norm": 0.1279296875, + "learning_rate": 9.289269911504426e-06, + "loss": 0.8753, + "step": 257 + }, + { + "epoch": 0.2853982300884956, + "grad_norm": 0.08203125, + "learning_rate": 9.286504424778762e-06, + "loss": 0.8488, + "step": 258 + }, + { + "epoch": 0.28650442477876104, + "grad_norm": 0.1103515625, + "learning_rate": 9.283738938053098e-06, + "loss": 0.9031, + "step": 259 + }, + { + "epoch": 0.28761061946902655, + "grad_norm": 0.10107421875, + "learning_rate": 9.280973451327434e-06, + "loss": 0.8952, + "step": 260 + }, + { + "epoch": 0.288716814159292, + "grad_norm": 0.09423828125, + "learning_rate": 9.27820796460177e-06, + "loss": 0.9301, + "step": 261 + }, + { + "epoch": 0.28982300884955753, + "grad_norm": 0.1123046875, + "learning_rate": 9.275442477876106e-06, + "loss": 0.9393, + "step": 262 + }, + { + "epoch": 0.290929203539823, + "grad_norm": 0.08984375, + "learning_rate": 9.272676991150443e-06, + "loss": 0.8957, + "step": 263 + }, + { + "epoch": 0.2920353982300885, + "grad_norm": 0.1376953125, + "learning_rate": 9.26991150442478e-06, + "loss": 0.981, + "step": 264 + }, + { + "epoch": 0.29314159292035397, + "grad_norm": 0.09521484375, + "learning_rate": 9.267146017699115e-06, + "loss": 0.8882, + "step": 265 + }, + { + "epoch": 0.2942477876106195, + "grad_norm": 0.1123046875, + "learning_rate": 9.264380530973453e-06, + "loss": 0.9222, + "step": 266 + }, + { + "epoch": 0.29535398230088494, + "grad_norm": 0.09033203125, + "learning_rate": 9.261615044247789e-06, + "loss": 0.8825, + "step": 267 + }, + { + "epoch": 0.29646017699115046, + "grad_norm": 0.1005859375, + "learning_rate": 9.258849557522125e-06, + "loss": 0.8807, + "step": 268 + }, + { + "epoch": 0.2975663716814159, + "grad_norm": 0.10498046875, + "learning_rate": 9.256084070796461e-06, + "loss": 0.8961, + "step": 269 + }, + { + "epoch": 0.29867256637168144, + "grad_norm": 0.08251953125, + "learning_rate": 9.253318584070797e-06, + "loss": 0.8597, + "step": 270 + }, + { + "epoch": 0.2997787610619469, + "grad_norm": 0.10791015625, + "learning_rate": 9.250553097345133e-06, + "loss": 0.9362, + "step": 271 + }, + { + "epoch": 0.3008849557522124, + "grad_norm": 0.1025390625, + "learning_rate": 9.24778761061947e-06, + "loss": 0.8848, + "step": 272 + }, + { + "epoch": 0.3019911504424779, + "grad_norm": 0.0966796875, + "learning_rate": 9.245022123893807e-06, + "loss": 0.8649, + "step": 273 + }, + { + "epoch": 0.3030973451327434, + "grad_norm": 0.11328125, + "learning_rate": 9.242256637168142e-06, + "loss": 0.9254, + "step": 274 + }, + { + "epoch": 0.30420353982300885, + "grad_norm": 0.11474609375, + "learning_rate": 9.23949115044248e-06, + "loss": 0.8977, + "step": 275 + }, + { + "epoch": 0.3053097345132743, + "grad_norm": 0.1015625, + "learning_rate": 9.236725663716814e-06, + "loss": 0.8841, + "step": 276 + }, + { + "epoch": 0.3064159292035398, + "grad_norm": 0.0986328125, + "learning_rate": 9.233960176991152e-06, + "loss": 0.887, + "step": 277 + }, + { + "epoch": 0.3075221238938053, + "grad_norm": 0.09130859375, + "learning_rate": 9.231194690265486e-06, + "loss": 0.8988, + "step": 278 + }, + { + "epoch": 0.3086283185840708, + "grad_norm": 0.08935546875, + "learning_rate": 9.228429203539824e-06, + "loss": 0.8823, + "step": 279 + }, + { + "epoch": 0.30973451327433627, + "grad_norm": 0.1103515625, + "learning_rate": 9.22566371681416e-06, + "loss": 0.9079, + "step": 280 + }, + { + "epoch": 0.3108407079646018, + "grad_norm": 0.10009765625, + "learning_rate": 9.222898230088496e-06, + "loss": 0.8576, + "step": 281 + }, + { + "epoch": 0.31194690265486724, + "grad_norm": 0.091796875, + "learning_rate": 9.220132743362832e-06, + "loss": 0.8819, + "step": 282 + }, + { + "epoch": 0.31305309734513276, + "grad_norm": 0.10791015625, + "learning_rate": 9.217367256637169e-06, + "loss": 0.8883, + "step": 283 + }, + { + "epoch": 0.3141592920353982, + "grad_norm": 0.09423828125, + "learning_rate": 9.214601769911505e-06, + "loss": 0.8942, + "step": 284 + }, + { + "epoch": 0.31526548672566373, + "grad_norm": 0.095703125, + "learning_rate": 9.21183628318584e-06, + "loss": 0.8416, + "step": 285 + }, + { + "epoch": 0.3163716814159292, + "grad_norm": 0.1025390625, + "learning_rate": 9.209070796460179e-06, + "loss": 0.834, + "step": 286 + }, + { + "epoch": 0.3174778761061947, + "grad_norm": 0.10400390625, + "learning_rate": 9.206305309734515e-06, + "loss": 0.9043, + "step": 287 + }, + { + "epoch": 0.3185840707964602, + "grad_norm": 0.10498046875, + "learning_rate": 9.203539823008851e-06, + "loss": 0.9304, + "step": 288 + }, + { + "epoch": 0.3196902654867257, + "grad_norm": 0.09228515625, + "learning_rate": 9.200774336283187e-06, + "loss": 0.8676, + "step": 289 + }, + { + "epoch": 0.32079646017699115, + "grad_norm": 0.09716796875, + "learning_rate": 9.198008849557523e-06, + "loss": 0.8873, + "step": 290 + }, + { + "epoch": 0.3219026548672566, + "grad_norm": 0.0986328125, + "learning_rate": 9.19524336283186e-06, + "loss": 0.8884, + "step": 291 + }, + { + "epoch": 0.3230088495575221, + "grad_norm": 0.10986328125, + "learning_rate": 9.192477876106195e-06, + "loss": 0.9317, + "step": 292 + }, + { + "epoch": 0.3241150442477876, + "grad_norm": 0.10107421875, + "learning_rate": 9.189712389380531e-06, + "loss": 0.8607, + "step": 293 + }, + { + "epoch": 0.3252212389380531, + "grad_norm": 0.1123046875, + "learning_rate": 9.18694690265487e-06, + "loss": 0.9487, + "step": 294 + }, + { + "epoch": 0.32632743362831856, + "grad_norm": 0.09912109375, + "learning_rate": 9.184181415929204e-06, + "loss": 0.8641, + "step": 295 + }, + { + "epoch": 0.3274336283185841, + "grad_norm": 0.126953125, + "learning_rate": 9.181415929203542e-06, + "loss": 0.8736, + "step": 296 + }, + { + "epoch": 0.32853982300884954, + "grad_norm": 0.08984375, + "learning_rate": 9.178650442477876e-06, + "loss": 0.875, + "step": 297 + }, + { + "epoch": 0.32964601769911506, + "grad_norm": 0.087890625, + "learning_rate": 9.175884955752214e-06, + "loss": 0.8868, + "step": 298 + }, + { + "epoch": 0.3307522123893805, + "grad_norm": 0.09326171875, + "learning_rate": 9.173119469026548e-06, + "loss": 0.8791, + "step": 299 + }, + { + "epoch": 0.33185840707964603, + "grad_norm": 0.08935546875, + "learning_rate": 9.170353982300886e-06, + "loss": 0.886, + "step": 300 + }, + { + "epoch": 0.3329646017699115, + "grad_norm": 0.107421875, + "learning_rate": 9.167588495575222e-06, + "loss": 0.871, + "step": 301 + }, + { + "epoch": 0.334070796460177, + "grad_norm": 0.08984375, + "learning_rate": 9.164823008849558e-06, + "loss": 0.8676, + "step": 302 + }, + { + "epoch": 0.33517699115044247, + "grad_norm": 0.1123046875, + "learning_rate": 9.162057522123894e-06, + "loss": 0.9212, + "step": 303 + }, + { + "epoch": 0.336283185840708, + "grad_norm": 0.107421875, + "learning_rate": 9.15929203539823e-06, + "loss": 0.9535, + "step": 304 + }, + { + "epoch": 0.33738938053097345, + "grad_norm": 0.0927734375, + "learning_rate": 9.156526548672567e-06, + "loss": 0.8449, + "step": 305 + }, + { + "epoch": 0.33849557522123896, + "grad_norm": 0.09716796875, + "learning_rate": 9.153761061946903e-06, + "loss": 0.9084, + "step": 306 + }, + { + "epoch": 0.3396017699115044, + "grad_norm": 0.1025390625, + "learning_rate": 9.15099557522124e-06, + "loss": 0.8582, + "step": 307 + }, + { + "epoch": 0.3407079646017699, + "grad_norm": 0.109375, + "learning_rate": 9.148230088495575e-06, + "loss": 0.8501, + "step": 308 + }, + { + "epoch": 0.3418141592920354, + "grad_norm": 0.10791015625, + "learning_rate": 9.145464601769913e-06, + "loss": 0.8705, + "step": 309 + }, + { + "epoch": 0.34292035398230086, + "grad_norm": 0.095703125, + "learning_rate": 9.142699115044249e-06, + "loss": 0.8562, + "step": 310 + }, + { + "epoch": 0.3440265486725664, + "grad_norm": 0.12060546875, + "learning_rate": 9.139933628318585e-06, + "loss": 0.9101, + "step": 311 + }, + { + "epoch": 0.34513274336283184, + "grad_norm": 0.09716796875, + "learning_rate": 9.137168141592921e-06, + "loss": 0.8588, + "step": 312 + }, + { + "epoch": 0.34623893805309736, + "grad_norm": 0.09619140625, + "learning_rate": 9.134402654867257e-06, + "loss": 0.9012, + "step": 313 + }, + { + "epoch": 0.3473451327433628, + "grad_norm": 0.10009765625, + "learning_rate": 9.131637168141594e-06, + "loss": 0.8713, + "step": 314 + }, + { + "epoch": 0.34845132743362833, + "grad_norm": 0.109375, + "learning_rate": 9.12887168141593e-06, + "loss": 0.8954, + "step": 315 + }, + { + "epoch": 0.3495575221238938, + "grad_norm": 0.08837890625, + "learning_rate": 9.126106194690266e-06, + "loss": 0.8791, + "step": 316 + }, + { + "epoch": 0.3506637168141593, + "grad_norm": 0.08642578125, + "learning_rate": 9.123340707964604e-06, + "loss": 0.8327, + "step": 317 + }, + { + "epoch": 0.35176991150442477, + "grad_norm": 0.091796875, + "learning_rate": 9.120575221238938e-06, + "loss": 0.8861, + "step": 318 + }, + { + "epoch": 0.3528761061946903, + "grad_norm": 0.1337890625, + "learning_rate": 9.117809734513276e-06, + "loss": 0.9875, + "step": 319 + }, + { + "epoch": 0.35398230088495575, + "grad_norm": 0.1015625, + "learning_rate": 9.11504424778761e-06, + "loss": 0.9193, + "step": 320 + }, + { + "epoch": 0.35508849557522126, + "grad_norm": 0.1123046875, + "learning_rate": 9.112278761061948e-06, + "loss": 0.9456, + "step": 321 + }, + { + "epoch": 0.3561946902654867, + "grad_norm": 0.103515625, + "learning_rate": 9.109513274336284e-06, + "loss": 0.8812, + "step": 322 + }, + { + "epoch": 0.3573008849557522, + "grad_norm": 0.09423828125, + "learning_rate": 9.10674778761062e-06, + "loss": 0.9022, + "step": 323 + }, + { + "epoch": 0.3584070796460177, + "grad_norm": 0.0859375, + "learning_rate": 9.103982300884956e-06, + "loss": 0.8448, + "step": 324 + }, + { + "epoch": 0.35951327433628316, + "grad_norm": 0.1123046875, + "learning_rate": 9.101216814159293e-06, + "loss": 0.8966, + "step": 325 + }, + { + "epoch": 0.3606194690265487, + "grad_norm": 0.0859375, + "learning_rate": 9.098451327433629e-06, + "loss": 0.8501, + "step": 326 + }, + { + "epoch": 0.36172566371681414, + "grad_norm": 0.10107421875, + "learning_rate": 9.095685840707965e-06, + "loss": 0.8781, + "step": 327 + }, + { + "epoch": 0.36283185840707965, + "grad_norm": 0.09521484375, + "learning_rate": 9.092920353982303e-06, + "loss": 0.8562, + "step": 328 + }, + { + "epoch": 0.3639380530973451, + "grad_norm": 0.0986328125, + "learning_rate": 9.090154867256637e-06, + "loss": 0.8992, + "step": 329 + }, + { + "epoch": 0.36504424778761063, + "grad_norm": 0.0927734375, + "learning_rate": 9.087389380530975e-06, + "loss": 0.8555, + "step": 330 + }, + { + "epoch": 0.3661504424778761, + "grad_norm": 0.0859375, + "learning_rate": 9.08462389380531e-06, + "loss": 0.8258, + "step": 331 + }, + { + "epoch": 0.3672566371681416, + "grad_norm": 0.140625, + "learning_rate": 9.081858407079647e-06, + "loss": 0.9688, + "step": 332 + }, + { + "epoch": 0.36836283185840707, + "grad_norm": 0.099609375, + "learning_rate": 9.079092920353983e-06, + "loss": 0.8857, + "step": 333 + }, + { + "epoch": 0.3694690265486726, + "grad_norm": 0.11376953125, + "learning_rate": 9.07632743362832e-06, + "loss": 0.9372, + "step": 334 + }, + { + "epoch": 0.37057522123893805, + "grad_norm": 0.1279296875, + "learning_rate": 9.073561946902656e-06, + "loss": 0.9147, + "step": 335 + }, + { + "epoch": 0.37168141592920356, + "grad_norm": 0.08349609375, + "learning_rate": 9.070796460176992e-06, + "loss": 0.8448, + "step": 336 + }, + { + "epoch": 0.372787610619469, + "grad_norm": 0.10595703125, + "learning_rate": 9.068030973451328e-06, + "loss": 0.8456, + "step": 337 + }, + { + "epoch": 0.37389380530973454, + "grad_norm": 0.09716796875, + "learning_rate": 9.065265486725664e-06, + "loss": 0.8809, + "step": 338 + }, + { + "epoch": 0.375, + "grad_norm": 0.0986328125, + "learning_rate": 9.0625e-06, + "loss": 0.8957, + "step": 339 + }, + { + "epoch": 0.37610619469026546, + "grad_norm": 0.08984375, + "learning_rate": 9.059734513274338e-06, + "loss": 0.8569, + "step": 340 + }, + { + "epoch": 0.377212389380531, + "grad_norm": 0.10791015625, + "learning_rate": 9.056969026548674e-06, + "loss": 0.853, + "step": 341 + }, + { + "epoch": 0.37831858407079644, + "grad_norm": 0.111328125, + "learning_rate": 9.05420353982301e-06, + "loss": 0.8779, + "step": 342 + }, + { + "epoch": 0.37942477876106195, + "grad_norm": 0.1123046875, + "learning_rate": 9.051438053097346e-06, + "loss": 0.9066, + "step": 343 + }, + { + "epoch": 0.3805309734513274, + "grad_norm": 0.115234375, + "learning_rate": 9.048672566371682e-06, + "loss": 0.8541, + "step": 344 + }, + { + "epoch": 0.38163716814159293, + "grad_norm": 0.1005859375, + "learning_rate": 9.045907079646019e-06, + "loss": 0.9062, + "step": 345 + }, + { + "epoch": 0.3827433628318584, + "grad_norm": 0.0859375, + "learning_rate": 9.043141592920355e-06, + "loss": 0.8774, + "step": 346 + }, + { + "epoch": 0.3838495575221239, + "grad_norm": 0.0927734375, + "learning_rate": 9.04037610619469e-06, + "loss": 0.8294, + "step": 347 + }, + { + "epoch": 0.38495575221238937, + "grad_norm": 0.099609375, + "learning_rate": 9.037610619469027e-06, + "loss": 0.8268, + "step": 348 + }, + { + "epoch": 0.3860619469026549, + "grad_norm": 0.095703125, + "learning_rate": 9.034845132743365e-06, + "loss": 0.8794, + "step": 349 + }, + { + "epoch": 0.38716814159292035, + "grad_norm": 0.11376953125, + "learning_rate": 9.032079646017699e-06, + "loss": 0.8497, + "step": 350 + }, + { + "epoch": 0.38827433628318586, + "grad_norm": 0.09765625, + "learning_rate": 9.029314159292037e-06, + "loss": 0.8492, + "step": 351 + }, + { + "epoch": 0.3893805309734513, + "grad_norm": 0.09228515625, + "learning_rate": 9.026548672566371e-06, + "loss": 0.8651, + "step": 352 + }, + { + "epoch": 0.39048672566371684, + "grad_norm": 0.09619140625, + "learning_rate": 9.02378318584071e-06, + "loss": 0.8738, + "step": 353 + }, + { + "epoch": 0.3915929203539823, + "grad_norm": 0.1123046875, + "learning_rate": 9.021017699115044e-06, + "loss": 0.9052, + "step": 354 + }, + { + "epoch": 0.3926991150442478, + "grad_norm": 0.07861328125, + "learning_rate": 9.018252212389381e-06, + "loss": 0.8124, + "step": 355 + }, + { + "epoch": 0.3938053097345133, + "grad_norm": 0.08251953125, + "learning_rate": 9.015486725663718e-06, + "loss": 0.8442, + "step": 356 + }, + { + "epoch": 0.39491150442477874, + "grad_norm": 0.140625, + "learning_rate": 9.012721238938054e-06, + "loss": 0.8475, + "step": 357 + }, + { + "epoch": 0.39601769911504425, + "grad_norm": 0.09716796875, + "learning_rate": 9.00995575221239e-06, + "loss": 0.8577, + "step": 358 + }, + { + "epoch": 0.3971238938053097, + "grad_norm": 0.09765625, + "learning_rate": 9.007190265486726e-06, + "loss": 0.8789, + "step": 359 + }, + { + "epoch": 0.39823008849557523, + "grad_norm": 0.0888671875, + "learning_rate": 9.004424778761062e-06, + "loss": 0.7857, + "step": 360 + }, + { + "epoch": 0.3993362831858407, + "grad_norm": 0.0947265625, + "learning_rate": 9.001659292035398e-06, + "loss": 0.8318, + "step": 361 + }, + { + "epoch": 0.4004424778761062, + "grad_norm": 0.09521484375, + "learning_rate": 8.998893805309736e-06, + "loss": 0.8693, + "step": 362 + }, + { + "epoch": 0.40154867256637167, + "grad_norm": 0.10107421875, + "learning_rate": 8.996128318584072e-06, + "loss": 0.8363, + "step": 363 + }, + { + "epoch": 0.4026548672566372, + "grad_norm": 0.087890625, + "learning_rate": 8.993362831858408e-06, + "loss": 0.8348, + "step": 364 + }, + { + "epoch": 0.40376106194690264, + "grad_norm": 0.08544921875, + "learning_rate": 8.990597345132744e-06, + "loss": 0.8321, + "step": 365 + }, + { + "epoch": 0.40486725663716816, + "grad_norm": 0.10693359375, + "learning_rate": 8.98783185840708e-06, + "loss": 0.9001, + "step": 366 + }, + { + "epoch": 0.4059734513274336, + "grad_norm": 0.10400390625, + "learning_rate": 8.985066371681417e-06, + "loss": 0.8371, + "step": 367 + }, + { + "epoch": 0.40707964601769914, + "grad_norm": 0.083984375, + "learning_rate": 8.982300884955753e-06, + "loss": 0.8114, + "step": 368 + }, + { + "epoch": 0.4081858407079646, + "grad_norm": 0.09033203125, + "learning_rate": 8.979535398230089e-06, + "loss": 0.8845, + "step": 369 + }, + { + "epoch": 0.4092920353982301, + "grad_norm": 0.09375, + "learning_rate": 8.976769911504427e-06, + "loss": 0.8328, + "step": 370 + }, + { + "epoch": 0.4103982300884956, + "grad_norm": 0.08935546875, + "learning_rate": 8.974004424778761e-06, + "loss": 0.8149, + "step": 371 + }, + { + "epoch": 0.41150442477876104, + "grad_norm": 0.10498046875, + "learning_rate": 8.971238938053099e-06, + "loss": 0.8859, + "step": 372 + }, + { + "epoch": 0.41261061946902655, + "grad_norm": 0.08837890625, + "learning_rate": 8.968473451327433e-06, + "loss": 0.8228, + "step": 373 + }, + { + "epoch": 0.413716814159292, + "grad_norm": 0.083984375, + "learning_rate": 8.965707964601771e-06, + "loss": 0.8487, + "step": 374 + }, + { + "epoch": 0.41482300884955753, + "grad_norm": 0.08544921875, + "learning_rate": 8.962942477876106e-06, + "loss": 0.7856, + "step": 375 + }, + { + "epoch": 0.415929203539823, + "grad_norm": 0.09716796875, + "learning_rate": 8.960176991150443e-06, + "loss": 0.8482, + "step": 376 + }, + { + "epoch": 0.4170353982300885, + "grad_norm": 0.0966796875, + "learning_rate": 8.95741150442478e-06, + "loss": 0.8308, + "step": 377 + }, + { + "epoch": 0.41814159292035397, + "grad_norm": 0.072265625, + "learning_rate": 8.954646017699116e-06, + "loss": 0.7822, + "step": 378 + }, + { + "epoch": 0.4192477876106195, + "grad_norm": 0.08837890625, + "learning_rate": 8.951880530973452e-06, + "loss": 0.8218, + "step": 379 + }, + { + "epoch": 0.42035398230088494, + "grad_norm": 0.0947265625, + "learning_rate": 8.949115044247788e-06, + "loss": 0.8155, + "step": 380 + }, + { + "epoch": 0.42146017699115046, + "grad_norm": 0.1015625, + "learning_rate": 8.946349557522124e-06, + "loss": 0.8144, + "step": 381 + }, + { + "epoch": 0.4225663716814159, + "grad_norm": 0.1083984375, + "learning_rate": 8.94358407079646e-06, + "loss": 0.8093, + "step": 382 + }, + { + "epoch": 0.42367256637168144, + "grad_norm": 0.09619140625, + "learning_rate": 8.940818584070798e-06, + "loss": 0.8792, + "step": 383 + }, + { + "epoch": 0.4247787610619469, + "grad_norm": 0.0947265625, + "learning_rate": 8.938053097345133e-06, + "loss": 0.8572, + "step": 384 + }, + { + "epoch": 0.4258849557522124, + "grad_norm": 0.095703125, + "learning_rate": 8.93528761061947e-06, + "loss": 0.823, + "step": 385 + }, + { + "epoch": 0.4269911504424779, + "grad_norm": 0.10205078125, + "learning_rate": 8.932522123893806e-06, + "loss": 0.8453, + "step": 386 + }, + { + "epoch": 0.4280973451327434, + "grad_norm": 0.10888671875, + "learning_rate": 8.929756637168143e-06, + "loss": 0.8318, + "step": 387 + }, + { + "epoch": 0.42920353982300885, + "grad_norm": 0.09326171875, + "learning_rate": 8.926991150442479e-06, + "loss": 0.8617, + "step": 388 + }, + { + "epoch": 0.4303097345132743, + "grad_norm": 0.0859375, + "learning_rate": 8.924225663716815e-06, + "loss": 0.8004, + "step": 389 + }, + { + "epoch": 0.4314159292035398, + "grad_norm": 0.09375, + "learning_rate": 8.921460176991151e-06, + "loss": 0.8504, + "step": 390 + }, + { + "epoch": 0.4325221238938053, + "grad_norm": 0.09765625, + "learning_rate": 8.918694690265487e-06, + "loss": 0.8461, + "step": 391 + }, + { + "epoch": 0.4336283185840708, + "grad_norm": 0.2470703125, + "learning_rate": 8.915929203539823e-06, + "loss": 0.7912, + "step": 392 + }, + { + "epoch": 0.43473451327433627, + "grad_norm": 0.1005859375, + "learning_rate": 8.913163716814161e-06, + "loss": 0.8467, + "step": 393 + }, + { + "epoch": 0.4358407079646018, + "grad_norm": 0.10009765625, + "learning_rate": 8.910398230088495e-06, + "loss": 0.8583, + "step": 394 + }, + { + "epoch": 0.43694690265486724, + "grad_norm": 0.107421875, + "learning_rate": 8.907632743362833e-06, + "loss": 0.8209, + "step": 395 + }, + { + "epoch": 0.43805309734513276, + "grad_norm": 0.09619140625, + "learning_rate": 8.90486725663717e-06, + "loss": 0.8608, + "step": 396 + }, + { + "epoch": 0.4391592920353982, + "grad_norm": 0.09228515625, + "learning_rate": 8.902101769911506e-06, + "loss": 0.8511, + "step": 397 + }, + { + "epoch": 0.44026548672566373, + "grad_norm": 0.095703125, + "learning_rate": 8.899336283185842e-06, + "loss": 0.8158, + "step": 398 + }, + { + "epoch": 0.4413716814159292, + "grad_norm": 0.10302734375, + "learning_rate": 8.896570796460178e-06, + "loss": 0.8531, + "step": 399 + }, + { + "epoch": 0.4424778761061947, + "grad_norm": 0.10986328125, + "learning_rate": 8.893805309734514e-06, + "loss": 0.8699, + "step": 400 + }, + { + "epoch": 0.4435840707964602, + "grad_norm": 0.0908203125, + "learning_rate": 8.89103982300885e-06, + "loss": 0.8092, + "step": 401 + }, + { + "epoch": 0.4446902654867257, + "grad_norm": 0.08837890625, + "learning_rate": 8.888274336283186e-06, + "loss": 0.821, + "step": 402 + }, + { + "epoch": 0.44579646017699115, + "grad_norm": 0.09228515625, + "learning_rate": 8.885508849557522e-06, + "loss": 0.8509, + "step": 403 + }, + { + "epoch": 0.4469026548672566, + "grad_norm": 0.1064453125, + "learning_rate": 8.88274336283186e-06, + "loss": 0.8846, + "step": 404 + }, + { + "epoch": 0.4480088495575221, + "grad_norm": 0.10302734375, + "learning_rate": 8.879977876106195e-06, + "loss": 0.866, + "step": 405 + }, + { + "epoch": 0.4491150442477876, + "grad_norm": 0.09716796875, + "learning_rate": 8.877212389380532e-06, + "loss": 0.8918, + "step": 406 + }, + { + "epoch": 0.4502212389380531, + "grad_norm": 0.09814453125, + "learning_rate": 8.874446902654868e-06, + "loss": 0.8679, + "step": 407 + }, + { + "epoch": 0.45132743362831856, + "grad_norm": 0.10791015625, + "learning_rate": 8.871681415929205e-06, + "loss": 0.8862, + "step": 408 + }, + { + "epoch": 0.4524336283185841, + "grad_norm": 0.0810546875, + "learning_rate": 8.86891592920354e-06, + "loss": 0.8015, + "step": 409 + }, + { + "epoch": 0.45353982300884954, + "grad_norm": 0.099609375, + "learning_rate": 8.866150442477877e-06, + "loss": 0.8686, + "step": 410 + }, + { + "epoch": 0.45464601769911506, + "grad_norm": 0.095703125, + "learning_rate": 8.863384955752213e-06, + "loss": 0.8788, + "step": 411 + }, + { + "epoch": 0.4557522123893805, + "grad_norm": 0.0908203125, + "learning_rate": 8.860619469026549e-06, + "loss": 0.8277, + "step": 412 + }, + { + "epoch": 0.45685840707964603, + "grad_norm": 0.09423828125, + "learning_rate": 8.857853982300885e-06, + "loss": 0.8081, + "step": 413 + }, + { + "epoch": 0.4579646017699115, + "grad_norm": 0.1064453125, + "learning_rate": 8.855088495575221e-06, + "loss": 0.8373, + "step": 414 + }, + { + "epoch": 0.459070796460177, + "grad_norm": 0.10400390625, + "learning_rate": 8.852323008849557e-06, + "loss": 0.8426, + "step": 415 + }, + { + "epoch": 0.46017699115044247, + "grad_norm": 0.1396484375, + "learning_rate": 8.849557522123895e-06, + "loss": 0.8903, + "step": 416 + }, + { + "epoch": 0.461283185840708, + "grad_norm": 0.10205078125, + "learning_rate": 8.846792035398231e-06, + "loss": 0.8729, + "step": 417 + }, + { + "epoch": 0.46238938053097345, + "grad_norm": 0.09423828125, + "learning_rate": 8.844026548672568e-06, + "loss": 0.8694, + "step": 418 + }, + { + "epoch": 0.46349557522123896, + "grad_norm": 0.09033203125, + "learning_rate": 8.841261061946904e-06, + "loss": 0.8389, + "step": 419 + }, + { + "epoch": 0.4646017699115044, + "grad_norm": 0.0908203125, + "learning_rate": 8.83849557522124e-06, + "loss": 0.8533, + "step": 420 + }, + { + "epoch": 0.4657079646017699, + "grad_norm": 0.0810546875, + "learning_rate": 8.835730088495576e-06, + "loss": 0.8262, + "step": 421 + }, + { + "epoch": 0.4668141592920354, + "grad_norm": 0.11962890625, + "learning_rate": 8.832964601769912e-06, + "loss": 0.889, + "step": 422 + }, + { + "epoch": 0.46792035398230086, + "grad_norm": 0.09228515625, + "learning_rate": 8.830199115044248e-06, + "loss": 0.8181, + "step": 423 + }, + { + "epoch": 0.4690265486725664, + "grad_norm": 0.1123046875, + "learning_rate": 8.827433628318584e-06, + "loss": 0.8738, + "step": 424 + }, + { + "epoch": 0.47013274336283184, + "grad_norm": 0.171875, + "learning_rate": 8.824668141592922e-06, + "loss": 0.8676, + "step": 425 + }, + { + "epoch": 0.47123893805309736, + "grad_norm": 0.09765625, + "learning_rate": 8.821902654867257e-06, + "loss": 0.8099, + "step": 426 + }, + { + "epoch": 0.4723451327433628, + "grad_norm": 0.0859375, + "learning_rate": 8.819137168141594e-06, + "loss": 0.8439, + "step": 427 + }, + { + "epoch": 0.47345132743362833, + "grad_norm": 0.095703125, + "learning_rate": 8.816371681415929e-06, + "loss": 0.8671, + "step": 428 + }, + { + "epoch": 0.4745575221238938, + "grad_norm": 0.1025390625, + "learning_rate": 8.813606194690267e-06, + "loss": 0.8585, + "step": 429 + }, + { + "epoch": 0.4756637168141593, + "grad_norm": 0.09326171875, + "learning_rate": 8.810840707964603e-06, + "loss": 0.8208, + "step": 430 + }, + { + "epoch": 0.47676991150442477, + "grad_norm": 0.1083984375, + "learning_rate": 8.808075221238939e-06, + "loss": 0.8619, + "step": 431 + }, + { + "epoch": 0.4778761061946903, + "grad_norm": 0.0966796875, + "learning_rate": 8.805309734513275e-06, + "loss": 0.8331, + "step": 432 + }, + { + "epoch": 0.47898230088495575, + "grad_norm": 0.09033203125, + "learning_rate": 8.802544247787611e-06, + "loss": 0.8209, + "step": 433 + }, + { + "epoch": 0.48008849557522126, + "grad_norm": 0.1064453125, + "learning_rate": 8.799778761061947e-06, + "loss": 0.886, + "step": 434 + }, + { + "epoch": 0.4811946902654867, + "grad_norm": 0.1025390625, + "learning_rate": 8.797013274336283e-06, + "loss": 0.8304, + "step": 435 + }, + { + "epoch": 0.4823008849557522, + "grad_norm": 0.10009765625, + "learning_rate": 8.79424778761062e-06, + "loss": 0.8418, + "step": 436 + }, + { + "epoch": 0.4834070796460177, + "grad_norm": 0.09521484375, + "learning_rate": 8.791482300884957e-06, + "loss": 0.8284, + "step": 437 + }, + { + "epoch": 0.48451327433628316, + "grad_norm": 0.09814453125, + "learning_rate": 8.788716814159293e-06, + "loss": 0.8258, + "step": 438 + }, + { + "epoch": 0.4856194690265487, + "grad_norm": 0.09814453125, + "learning_rate": 8.78595132743363e-06, + "loss": 0.8641, + "step": 439 + }, + { + "epoch": 0.48672566371681414, + "grad_norm": 0.09765625, + "learning_rate": 8.783185840707966e-06, + "loss": 0.8718, + "step": 440 + }, + { + "epoch": 0.48783185840707965, + "grad_norm": 0.08447265625, + "learning_rate": 8.780420353982302e-06, + "loss": 0.8229, + "step": 441 + }, + { + "epoch": 0.4889380530973451, + "grad_norm": 0.0986328125, + "learning_rate": 8.777654867256638e-06, + "loss": 0.8347, + "step": 442 + }, + { + "epoch": 0.49004424778761063, + "grad_norm": 0.0791015625, + "learning_rate": 8.774889380530974e-06, + "loss": 0.7614, + "step": 443 + }, + { + "epoch": 0.4911504424778761, + "grad_norm": 0.08837890625, + "learning_rate": 8.77212389380531e-06, + "loss": 0.8231, + "step": 444 + }, + { + "epoch": 0.4922566371681416, + "grad_norm": 0.09423828125, + "learning_rate": 8.769358407079646e-06, + "loss": 0.8104, + "step": 445 + }, + { + "epoch": 0.49336283185840707, + "grad_norm": 0.0947265625, + "learning_rate": 8.766592920353984e-06, + "loss": 0.8738, + "step": 446 + }, + { + "epoch": 0.4944690265486726, + "grad_norm": 0.10107421875, + "learning_rate": 8.763827433628319e-06, + "loss": 0.8535, + "step": 447 + }, + { + "epoch": 0.49557522123893805, + "grad_norm": 0.09130859375, + "learning_rate": 8.761061946902656e-06, + "loss": 0.8282, + "step": 448 + }, + { + "epoch": 0.49668141592920356, + "grad_norm": 0.08935546875, + "learning_rate": 8.758296460176991e-06, + "loss": 0.8288, + "step": 449 + }, + { + "epoch": 0.497787610619469, + "grad_norm": 0.09716796875, + "learning_rate": 8.755530973451329e-06, + "loss": 0.8366, + "step": 450 + }, + { + "epoch": 0.49889380530973454, + "grad_norm": 0.10205078125, + "learning_rate": 8.752765486725663e-06, + "loss": 0.8576, + "step": 451 + }, + { + "epoch": 0.5, + "grad_norm": 0.1044921875, + "learning_rate": 8.750000000000001e-06, + "loss": 0.835, + "step": 452 + }, + { + "epoch": 0.5011061946902655, + "grad_norm": 0.111328125, + "learning_rate": 8.747234513274337e-06, + "loss": 0.8239, + "step": 453 + }, + { + "epoch": 0.5022123893805309, + "grad_norm": 0.09765625, + "learning_rate": 8.744469026548673e-06, + "loss": 0.8043, + "step": 454 + }, + { + "epoch": 0.5033185840707964, + "grad_norm": 0.09619140625, + "learning_rate": 8.74170353982301e-06, + "loss": 0.8495, + "step": 455 + }, + { + "epoch": 0.504424778761062, + "grad_norm": 0.10205078125, + "learning_rate": 8.738938053097345e-06, + "loss": 0.8298, + "step": 456 + }, + { + "epoch": 0.5055309734513275, + "grad_norm": 0.09912109375, + "learning_rate": 8.736172566371682e-06, + "loss": 0.8213, + "step": 457 + }, + { + "epoch": 0.5066371681415929, + "grad_norm": 0.07763671875, + "learning_rate": 8.733407079646018e-06, + "loss": 0.7884, + "step": 458 + }, + { + "epoch": 0.5077433628318584, + "grad_norm": 0.33203125, + "learning_rate": 8.730641592920356e-06, + "loss": 0.8921, + "step": 459 + }, + { + "epoch": 0.5088495575221239, + "grad_norm": 0.08447265625, + "learning_rate": 8.727876106194692e-06, + "loss": 0.8226, + "step": 460 + }, + { + "epoch": 0.5099557522123894, + "grad_norm": 0.1181640625, + "learning_rate": 8.725110619469028e-06, + "loss": 0.8441, + "step": 461 + }, + { + "epoch": 0.5110619469026548, + "grad_norm": 0.10595703125, + "learning_rate": 8.722345132743364e-06, + "loss": 0.8937, + "step": 462 + }, + { + "epoch": 0.5121681415929203, + "grad_norm": 0.11181640625, + "learning_rate": 8.7195796460177e-06, + "loss": 0.7763, + "step": 463 + }, + { + "epoch": 0.5132743362831859, + "grad_norm": 0.09765625, + "learning_rate": 8.716814159292036e-06, + "loss": 0.8601, + "step": 464 + }, + { + "epoch": 0.5143805309734514, + "grad_norm": 0.0849609375, + "learning_rate": 8.714048672566372e-06, + "loss": 0.8163, + "step": 465 + }, + { + "epoch": 0.5154867256637168, + "grad_norm": 0.09521484375, + "learning_rate": 8.711283185840708e-06, + "loss": 0.8157, + "step": 466 + }, + { + "epoch": 0.5165929203539823, + "grad_norm": 0.080078125, + "learning_rate": 8.708517699115045e-06, + "loss": 0.7975, + "step": 467 + }, + { + "epoch": 0.5176991150442478, + "grad_norm": 0.08984375, + "learning_rate": 8.70575221238938e-06, + "loss": 0.7654, + "step": 468 + }, + { + "epoch": 0.5188053097345132, + "grad_norm": 0.09228515625, + "learning_rate": 8.702986725663718e-06, + "loss": 0.8482, + "step": 469 + }, + { + "epoch": 0.5199115044247787, + "grad_norm": 0.0908203125, + "learning_rate": 8.700221238938053e-06, + "loss": 0.833, + "step": 470 + }, + { + "epoch": 0.5210176991150443, + "grad_norm": 0.09912109375, + "learning_rate": 8.69745575221239e-06, + "loss": 0.8977, + "step": 471 + }, + { + "epoch": 0.5221238938053098, + "grad_norm": 0.1064453125, + "learning_rate": 8.694690265486727e-06, + "loss": 0.8902, + "step": 472 + }, + { + "epoch": 0.5232300884955752, + "grad_norm": 0.09912109375, + "learning_rate": 8.691924778761063e-06, + "loss": 0.8423, + "step": 473 + }, + { + "epoch": 0.5243362831858407, + "grad_norm": 0.1328125, + "learning_rate": 8.689159292035399e-06, + "loss": 0.8301, + "step": 474 + }, + { + "epoch": 0.5254424778761062, + "grad_norm": 0.11865234375, + "learning_rate": 8.686393805309735e-06, + "loss": 0.8679, + "step": 475 + }, + { + "epoch": 0.5265486725663717, + "grad_norm": 0.1025390625, + "learning_rate": 8.683628318584071e-06, + "loss": 0.8334, + "step": 476 + }, + { + "epoch": 0.5276548672566371, + "grad_norm": 0.091796875, + "learning_rate": 8.680862831858407e-06, + "loss": 0.814, + "step": 477 + }, + { + "epoch": 0.5287610619469026, + "grad_norm": 0.087890625, + "learning_rate": 8.678097345132744e-06, + "loss": 0.8571, + "step": 478 + }, + { + "epoch": 0.5298672566371682, + "grad_norm": 0.09375, + "learning_rate": 8.67533185840708e-06, + "loss": 0.7996, + "step": 479 + }, + { + "epoch": 0.5309734513274337, + "grad_norm": 0.09521484375, + "learning_rate": 8.672566371681418e-06, + "loss": 0.8152, + "step": 480 + }, + { + "epoch": 0.5320796460176991, + "grad_norm": 0.08544921875, + "learning_rate": 8.669800884955752e-06, + "loss": 0.8303, + "step": 481 + }, + { + "epoch": 0.5331858407079646, + "grad_norm": 0.10546875, + "learning_rate": 8.66703539823009e-06, + "loss": 0.8647, + "step": 482 + }, + { + "epoch": 0.5342920353982301, + "grad_norm": 0.09130859375, + "learning_rate": 8.664269911504426e-06, + "loss": 0.8207, + "step": 483 + }, + { + "epoch": 0.5353982300884956, + "grad_norm": 0.09716796875, + "learning_rate": 8.661504424778762e-06, + "loss": 0.8429, + "step": 484 + }, + { + "epoch": 0.536504424778761, + "grad_norm": 0.0947265625, + "learning_rate": 8.658738938053098e-06, + "loss": 0.8134, + "step": 485 + }, + { + "epoch": 0.5376106194690266, + "grad_norm": 0.10302734375, + "learning_rate": 8.655973451327434e-06, + "loss": 0.7738, + "step": 486 + }, + { + "epoch": 0.5387168141592921, + "grad_norm": 0.099609375, + "learning_rate": 8.65320796460177e-06, + "loss": 0.8431, + "step": 487 + }, + { + "epoch": 0.5398230088495575, + "grad_norm": 0.0888671875, + "learning_rate": 8.650442477876107e-06, + "loss": 0.792, + "step": 488 + }, + { + "epoch": 0.540929203539823, + "grad_norm": 0.0830078125, + "learning_rate": 8.647676991150443e-06, + "loss": 0.7863, + "step": 489 + }, + { + "epoch": 0.5420353982300885, + "grad_norm": 0.09716796875, + "learning_rate": 8.64491150442478e-06, + "loss": 0.7907, + "step": 490 + }, + { + "epoch": 0.543141592920354, + "grad_norm": 0.1015625, + "learning_rate": 8.642146017699115e-06, + "loss": 0.8102, + "step": 491 + }, + { + "epoch": 0.5442477876106194, + "grad_norm": 0.095703125, + "learning_rate": 8.639380530973453e-06, + "loss": 0.8298, + "step": 492 + }, + { + "epoch": 0.5453539823008849, + "grad_norm": 0.1005859375, + "learning_rate": 8.636615044247789e-06, + "loss": 0.8196, + "step": 493 + }, + { + "epoch": 0.5464601769911505, + "grad_norm": 0.0927734375, + "learning_rate": 8.633849557522125e-06, + "loss": 0.8665, + "step": 494 + }, + { + "epoch": 0.547566371681416, + "grad_norm": 0.10595703125, + "learning_rate": 8.631084070796461e-06, + "loss": 0.8216, + "step": 495 + }, + { + "epoch": 0.5486725663716814, + "grad_norm": 0.09326171875, + "learning_rate": 8.628318584070797e-06, + "loss": 0.7836, + "step": 496 + }, + { + "epoch": 0.5497787610619469, + "grad_norm": 0.11181640625, + "learning_rate": 8.625553097345133e-06, + "loss": 0.8403, + "step": 497 + }, + { + "epoch": 0.5508849557522124, + "grad_norm": 0.09130859375, + "learning_rate": 8.62278761061947e-06, + "loss": 0.7662, + "step": 498 + }, + { + "epoch": 0.5519911504424779, + "grad_norm": 0.09326171875, + "learning_rate": 8.620022123893806e-06, + "loss": 0.7539, + "step": 499 + }, + { + "epoch": 0.5530973451327433, + "grad_norm": 0.1083984375, + "learning_rate": 8.617256637168142e-06, + "loss": 0.8757, + "step": 500 + }, + { + "epoch": 0.5542035398230089, + "grad_norm": 0.095703125, + "learning_rate": 8.61449115044248e-06, + "loss": 0.8628, + "step": 501 + }, + { + "epoch": 0.5553097345132744, + "grad_norm": 0.10791015625, + "learning_rate": 8.611725663716814e-06, + "loss": 0.8246, + "step": 502 + }, + { + "epoch": 0.5564159292035398, + "grad_norm": 0.09033203125, + "learning_rate": 8.608960176991152e-06, + "loss": 0.8075, + "step": 503 + }, + { + "epoch": 0.5575221238938053, + "grad_norm": 0.11181640625, + "learning_rate": 8.606194690265486e-06, + "loss": 0.8745, + "step": 504 + }, + { + "epoch": 0.5586283185840708, + "grad_norm": 0.08447265625, + "learning_rate": 8.603429203539824e-06, + "loss": 0.7894, + "step": 505 + }, + { + "epoch": 0.5597345132743363, + "grad_norm": 0.11181640625, + "learning_rate": 8.60066371681416e-06, + "loss": 0.8169, + "step": 506 + }, + { + "epoch": 0.5608407079646017, + "grad_norm": 0.0869140625, + "learning_rate": 8.597898230088496e-06, + "loss": 0.844, + "step": 507 + }, + { + "epoch": 0.5619469026548672, + "grad_norm": 0.09130859375, + "learning_rate": 8.595132743362832e-06, + "loss": 0.8059, + "step": 508 + }, + { + "epoch": 0.5630530973451328, + "grad_norm": 0.10693359375, + "learning_rate": 8.592367256637169e-06, + "loss": 0.8305, + "step": 509 + }, + { + "epoch": 0.5641592920353983, + "grad_norm": 0.10595703125, + "learning_rate": 8.589601769911505e-06, + "loss": 0.8594, + "step": 510 + }, + { + "epoch": 0.5652654867256637, + "grad_norm": 0.09619140625, + "learning_rate": 8.58683628318584e-06, + "loss": 0.8155, + "step": 511 + }, + { + "epoch": 0.5663716814159292, + "grad_norm": 0.09814453125, + "learning_rate": 8.584070796460177e-06, + "loss": 0.8764, + "step": 512 + }, + { + "epoch": 0.5674778761061947, + "grad_norm": 0.08984375, + "learning_rate": 8.581305309734515e-06, + "loss": 0.8306, + "step": 513 + }, + { + "epoch": 0.5685840707964602, + "grad_norm": 0.10009765625, + "learning_rate": 8.578539823008851e-06, + "loss": 0.8944, + "step": 514 + }, + { + "epoch": 0.5696902654867256, + "grad_norm": 0.09375, + "learning_rate": 8.575774336283187e-06, + "loss": 0.8218, + "step": 515 + }, + { + "epoch": 0.5707964601769911, + "grad_norm": 0.1015625, + "learning_rate": 8.573008849557523e-06, + "loss": 0.8683, + "step": 516 + }, + { + "epoch": 0.5719026548672567, + "grad_norm": 0.10546875, + "learning_rate": 8.57024336283186e-06, + "loss": 0.8448, + "step": 517 + }, + { + "epoch": 0.5730088495575221, + "grad_norm": 0.087890625, + "learning_rate": 8.567477876106195e-06, + "loss": 0.7946, + "step": 518 + }, + { + "epoch": 0.5741150442477876, + "grad_norm": 0.109375, + "learning_rate": 8.564712389380532e-06, + "loss": 0.8277, + "step": 519 + }, + { + "epoch": 0.5752212389380531, + "grad_norm": 0.10693359375, + "learning_rate": 8.561946902654868e-06, + "loss": 0.8808, + "step": 520 + }, + { + "epoch": 0.5763274336283186, + "grad_norm": 0.0947265625, + "learning_rate": 8.559181415929204e-06, + "loss": 0.8109, + "step": 521 + }, + { + "epoch": 0.577433628318584, + "grad_norm": 0.09423828125, + "learning_rate": 8.556415929203542e-06, + "loss": 0.7998, + "step": 522 + }, + { + "epoch": 0.5785398230088495, + "grad_norm": 0.10009765625, + "learning_rate": 8.553650442477876e-06, + "loss": 0.8062, + "step": 523 + }, + { + "epoch": 0.5796460176991151, + "grad_norm": 0.08837890625, + "learning_rate": 8.550884955752214e-06, + "loss": 0.7741, + "step": 524 + }, + { + "epoch": 0.5807522123893806, + "grad_norm": 0.10546875, + "learning_rate": 8.548119469026548e-06, + "loss": 0.8697, + "step": 525 + }, + { + "epoch": 0.581858407079646, + "grad_norm": 0.10302734375, + "learning_rate": 8.545353982300886e-06, + "loss": 0.8054, + "step": 526 + }, + { + "epoch": 0.5829646017699115, + "grad_norm": 0.10400390625, + "learning_rate": 8.542588495575222e-06, + "loss": 0.8275, + "step": 527 + }, + { + "epoch": 0.584070796460177, + "grad_norm": 0.0908203125, + "learning_rate": 8.539823008849558e-06, + "loss": 0.8137, + "step": 528 + }, + { + "epoch": 0.5851769911504425, + "grad_norm": 0.08740234375, + "learning_rate": 8.537057522123894e-06, + "loss": 0.7852, + "step": 529 + }, + { + "epoch": 0.5862831858407079, + "grad_norm": 0.087890625, + "learning_rate": 8.53429203539823e-06, + "loss": 0.809, + "step": 530 + }, + { + "epoch": 0.5873893805309734, + "grad_norm": 0.1201171875, + "learning_rate": 8.531526548672567e-06, + "loss": 0.8851, + "step": 531 + }, + { + "epoch": 0.588495575221239, + "grad_norm": 0.087890625, + "learning_rate": 8.528761061946903e-06, + "loss": 0.8105, + "step": 532 + }, + { + "epoch": 0.5896017699115044, + "grad_norm": 0.08544921875, + "learning_rate": 8.525995575221239e-06, + "loss": 0.8216, + "step": 533 + }, + { + "epoch": 0.5907079646017699, + "grad_norm": 0.08642578125, + "learning_rate": 8.523230088495575e-06, + "loss": 0.7709, + "step": 534 + }, + { + "epoch": 0.5918141592920354, + "grad_norm": 0.10107421875, + "learning_rate": 8.520464601769913e-06, + "loss": 0.8379, + "step": 535 + }, + { + "epoch": 0.5929203539823009, + "grad_norm": 0.09130859375, + "learning_rate": 8.517699115044249e-06, + "loss": 0.8057, + "step": 536 + }, + { + "epoch": 0.5940265486725663, + "grad_norm": 0.10693359375, + "learning_rate": 8.514933628318585e-06, + "loss": 0.8574, + "step": 537 + }, + { + "epoch": 0.5951327433628318, + "grad_norm": 0.09033203125, + "learning_rate": 8.512168141592921e-06, + "loss": 0.8196, + "step": 538 + }, + { + "epoch": 0.5962389380530974, + "grad_norm": 0.0986328125, + "learning_rate": 8.509402654867257e-06, + "loss": 0.7996, + "step": 539 + }, + { + "epoch": 0.5973451327433629, + "grad_norm": 0.119140625, + "learning_rate": 8.506637168141594e-06, + "loss": 0.8363, + "step": 540 + }, + { + "epoch": 0.5984513274336283, + "grad_norm": 0.10791015625, + "learning_rate": 8.50387168141593e-06, + "loss": 0.8636, + "step": 541 + }, + { + "epoch": 0.5995575221238938, + "grad_norm": 0.09765625, + "learning_rate": 8.501106194690266e-06, + "loss": 0.7922, + "step": 542 + }, + { + "epoch": 0.6006637168141593, + "grad_norm": 0.09130859375, + "learning_rate": 8.498340707964604e-06, + "loss": 0.8071, + "step": 543 + }, + { + "epoch": 0.6017699115044248, + "grad_norm": 0.10009765625, + "learning_rate": 8.495575221238938e-06, + "loss": 0.8367, + "step": 544 + }, + { + "epoch": 0.6028761061946902, + "grad_norm": 0.0947265625, + "learning_rate": 8.492809734513276e-06, + "loss": 0.8151, + "step": 545 + }, + { + "epoch": 0.6039823008849557, + "grad_norm": 0.11083984375, + "learning_rate": 8.49004424778761e-06, + "loss": 0.8159, + "step": 546 + }, + { + "epoch": 0.6050884955752213, + "grad_norm": 0.09814453125, + "learning_rate": 8.487278761061948e-06, + "loss": 0.8061, + "step": 547 + }, + { + "epoch": 0.6061946902654868, + "grad_norm": 0.091796875, + "learning_rate": 8.484513274336284e-06, + "loss": 0.8266, + "step": 548 + }, + { + "epoch": 0.6073008849557522, + "grad_norm": 0.10107421875, + "learning_rate": 8.48174778761062e-06, + "loss": 0.8631, + "step": 549 + }, + { + "epoch": 0.6084070796460177, + "grad_norm": 0.11474609375, + "learning_rate": 8.478982300884957e-06, + "loss": 0.8643, + "step": 550 + }, + { + "epoch": 0.6095132743362832, + "grad_norm": 0.1005859375, + "learning_rate": 8.476216814159293e-06, + "loss": 0.8369, + "step": 551 + }, + { + "epoch": 0.6106194690265486, + "grad_norm": 0.0966796875, + "learning_rate": 8.473451327433629e-06, + "loss": 0.8379, + "step": 552 + }, + { + "epoch": 0.6117256637168141, + "grad_norm": 0.1044921875, + "learning_rate": 8.470685840707965e-06, + "loss": 0.8289, + "step": 553 + }, + { + "epoch": 0.6128318584070797, + "grad_norm": 0.11181640625, + "learning_rate": 8.467920353982301e-06, + "loss": 0.8463, + "step": 554 + }, + { + "epoch": 0.6139380530973452, + "grad_norm": 0.1015625, + "learning_rate": 8.465154867256637e-06, + "loss": 0.8268, + "step": 555 + }, + { + "epoch": 0.6150442477876106, + "grad_norm": 0.09716796875, + "learning_rate": 8.462389380530975e-06, + "loss": 0.8208, + "step": 556 + }, + { + "epoch": 0.6161504424778761, + "grad_norm": 0.1279296875, + "learning_rate": 8.45962389380531e-06, + "loss": 0.8201, + "step": 557 + }, + { + "epoch": 0.6172566371681416, + "grad_norm": 0.10107421875, + "learning_rate": 8.456858407079647e-06, + "loss": 0.7752, + "step": 558 + }, + { + "epoch": 0.6183628318584071, + "grad_norm": 0.1015625, + "learning_rate": 8.454092920353983e-06, + "loss": 0.883, + "step": 559 + }, + { + "epoch": 0.6194690265486725, + "grad_norm": 0.10791015625, + "learning_rate": 8.45132743362832e-06, + "loss": 0.8121, + "step": 560 + }, + { + "epoch": 0.620575221238938, + "grad_norm": 0.09716796875, + "learning_rate": 8.448561946902656e-06, + "loss": 0.79, + "step": 561 + }, + { + "epoch": 0.6216814159292036, + "grad_norm": 0.09228515625, + "learning_rate": 8.445796460176992e-06, + "loss": 0.7949, + "step": 562 + }, + { + "epoch": 0.6227876106194691, + "grad_norm": 0.10009765625, + "learning_rate": 8.443030973451328e-06, + "loss": 0.7928, + "step": 563 + }, + { + "epoch": 0.6238938053097345, + "grad_norm": 0.1044921875, + "learning_rate": 8.440265486725664e-06, + "loss": 0.8075, + "step": 564 + }, + { + "epoch": 0.625, + "grad_norm": 0.09228515625, + "learning_rate": 8.4375e-06, + "loss": 0.835, + "step": 565 + }, + { + "epoch": 0.6261061946902655, + "grad_norm": 0.095703125, + "learning_rate": 8.434734513274338e-06, + "loss": 0.8108, + "step": 566 + }, + { + "epoch": 0.6272123893805309, + "grad_norm": 0.11962890625, + "learning_rate": 8.431969026548672e-06, + "loss": 0.8396, + "step": 567 + }, + { + "epoch": 0.6283185840707964, + "grad_norm": 0.1015625, + "learning_rate": 8.42920353982301e-06, + "loss": 0.8253, + "step": 568 + }, + { + "epoch": 0.629424778761062, + "grad_norm": 0.1064453125, + "learning_rate": 8.426438053097346e-06, + "loss": 0.8263, + "step": 569 + }, + { + "epoch": 0.6305309734513275, + "grad_norm": 0.10546875, + "learning_rate": 8.423672566371682e-06, + "loss": 0.8499, + "step": 570 + }, + { + "epoch": 0.6316371681415929, + "grad_norm": 0.10107421875, + "learning_rate": 8.420907079646019e-06, + "loss": 0.7957, + "step": 571 + }, + { + "epoch": 0.6327433628318584, + "grad_norm": 0.0908203125, + "learning_rate": 8.418141592920355e-06, + "loss": 0.7729, + "step": 572 + }, + { + "epoch": 0.6338495575221239, + "grad_norm": 0.09716796875, + "learning_rate": 8.41537610619469e-06, + "loss": 0.7931, + "step": 573 + }, + { + "epoch": 0.6349557522123894, + "grad_norm": 0.1005859375, + "learning_rate": 8.412610619469027e-06, + "loss": 0.8112, + "step": 574 + }, + { + "epoch": 0.6360619469026548, + "grad_norm": 0.1064453125, + "learning_rate": 8.409845132743363e-06, + "loss": 0.8417, + "step": 575 + }, + { + "epoch": 0.6371681415929203, + "grad_norm": 0.0869140625, + "learning_rate": 8.4070796460177e-06, + "loss": 0.789, + "step": 576 + }, + { + "epoch": 0.6382743362831859, + "grad_norm": 0.111328125, + "learning_rate": 8.404314159292037e-06, + "loss": 0.8299, + "step": 577 + }, + { + "epoch": 0.6393805309734514, + "grad_norm": 0.087890625, + "learning_rate": 8.401548672566371e-06, + "loss": 0.8088, + "step": 578 + }, + { + "epoch": 0.6404867256637168, + "grad_norm": 0.10009765625, + "learning_rate": 8.39878318584071e-06, + "loss": 0.8277, + "step": 579 + }, + { + "epoch": 0.6415929203539823, + "grad_norm": 0.140625, + "learning_rate": 8.396017699115044e-06, + "loss": 0.8481, + "step": 580 + }, + { + "epoch": 0.6426991150442478, + "grad_norm": 0.10009765625, + "learning_rate": 8.393252212389382e-06, + "loss": 0.829, + "step": 581 + }, + { + "epoch": 0.6438053097345132, + "grad_norm": 0.09423828125, + "learning_rate": 8.390486725663718e-06, + "loss": 0.7889, + "step": 582 + }, + { + "epoch": 0.6449115044247787, + "grad_norm": 0.09375, + "learning_rate": 8.387721238938054e-06, + "loss": 0.8297, + "step": 583 + }, + { + "epoch": 0.6460176991150443, + "grad_norm": 0.09619140625, + "learning_rate": 8.38495575221239e-06, + "loss": 0.8153, + "step": 584 + }, + { + "epoch": 0.6471238938053098, + "grad_norm": 0.107421875, + "learning_rate": 8.382190265486726e-06, + "loss": 0.7944, + "step": 585 + }, + { + "epoch": 0.6482300884955752, + "grad_norm": 0.1005859375, + "learning_rate": 8.379424778761062e-06, + "loss": 0.801, + "step": 586 + }, + { + "epoch": 0.6493362831858407, + "grad_norm": 0.11865234375, + "learning_rate": 8.376659292035398e-06, + "loss": 0.8954, + "step": 587 + }, + { + "epoch": 0.6504424778761062, + "grad_norm": 0.11083984375, + "learning_rate": 8.373893805309734e-06, + "loss": 0.8318, + "step": 588 + }, + { + "epoch": 0.6515486725663717, + "grad_norm": 0.09912109375, + "learning_rate": 8.371128318584072e-06, + "loss": 0.7723, + "step": 589 + }, + { + "epoch": 0.6526548672566371, + "grad_norm": 0.0947265625, + "learning_rate": 8.368362831858408e-06, + "loss": 0.8451, + "step": 590 + }, + { + "epoch": 0.6537610619469026, + "grad_norm": 0.09326171875, + "learning_rate": 8.365597345132744e-06, + "loss": 0.8182, + "step": 591 + }, + { + "epoch": 0.6548672566371682, + "grad_norm": 0.08837890625, + "learning_rate": 8.36283185840708e-06, + "loss": 0.7703, + "step": 592 + }, + { + "epoch": 0.6559734513274337, + "grad_norm": 0.09912109375, + "learning_rate": 8.360066371681417e-06, + "loss": 0.7927, + "step": 593 + }, + { + "epoch": 0.6570796460176991, + "grad_norm": 0.099609375, + "learning_rate": 8.357300884955753e-06, + "loss": 0.8104, + "step": 594 + }, + { + "epoch": 0.6581858407079646, + "grad_norm": 0.1064453125, + "learning_rate": 8.354535398230089e-06, + "loss": 0.7873, + "step": 595 + }, + { + "epoch": 0.6592920353982301, + "grad_norm": 0.1279296875, + "learning_rate": 8.351769911504427e-06, + "loss": 0.8329, + "step": 596 + }, + { + "epoch": 0.6603982300884956, + "grad_norm": 0.0947265625, + "learning_rate": 8.349004424778761e-06, + "loss": 0.7717, + "step": 597 + }, + { + "epoch": 0.661504424778761, + "grad_norm": 0.0849609375, + "learning_rate": 8.346238938053099e-06, + "loss": 0.7815, + "step": 598 + }, + { + "epoch": 0.6626106194690266, + "grad_norm": 0.10107421875, + "learning_rate": 8.343473451327433e-06, + "loss": 0.8017, + "step": 599 + }, + { + "epoch": 0.6637168141592921, + "grad_norm": 0.09716796875, + "learning_rate": 8.340707964601771e-06, + "loss": 0.7866, + "step": 600 + }, + { + "epoch": 0.6648230088495575, + "grad_norm": 0.0927734375, + "learning_rate": 8.337942477876106e-06, + "loss": 0.8006, + "step": 601 + }, + { + "epoch": 0.665929203539823, + "grad_norm": 0.087890625, + "learning_rate": 8.335176991150444e-06, + "loss": 0.7718, + "step": 602 + }, + { + "epoch": 0.6670353982300885, + "grad_norm": 0.0927734375, + "learning_rate": 8.33241150442478e-06, + "loss": 0.7884, + "step": 603 + }, + { + "epoch": 0.668141592920354, + "grad_norm": 0.11181640625, + "learning_rate": 8.329646017699116e-06, + "loss": 0.8159, + "step": 604 + }, + { + "epoch": 0.6692477876106194, + "grad_norm": 0.10107421875, + "learning_rate": 8.326880530973452e-06, + "loss": 0.7906, + "step": 605 + }, + { + "epoch": 0.6703539823008849, + "grad_norm": 0.09423828125, + "learning_rate": 8.324115044247788e-06, + "loss": 0.7802, + "step": 606 + }, + { + "epoch": 0.6714601769911505, + "grad_norm": 0.1328125, + "learning_rate": 8.321349557522124e-06, + "loss": 0.878, + "step": 607 + }, + { + "epoch": 0.672566371681416, + "grad_norm": 0.09228515625, + "learning_rate": 8.31858407079646e-06, + "loss": 0.8006, + "step": 608 + }, + { + "epoch": 0.6736725663716814, + "grad_norm": 0.1064453125, + "learning_rate": 8.315818584070796e-06, + "loss": 0.847, + "step": 609 + }, + { + "epoch": 0.6747787610619469, + "grad_norm": 0.1015625, + "learning_rate": 8.313053097345133e-06, + "loss": 0.8437, + "step": 610 + }, + { + "epoch": 0.6758849557522124, + "grad_norm": 0.11376953125, + "learning_rate": 8.31028761061947e-06, + "loss": 0.8285, + "step": 611 + }, + { + "epoch": 0.6769911504424779, + "grad_norm": 0.11181640625, + "learning_rate": 8.307522123893807e-06, + "loss": 0.8781, + "step": 612 + }, + { + "epoch": 0.6780973451327433, + "grad_norm": 0.09912109375, + "learning_rate": 8.304756637168143e-06, + "loss": 0.8166, + "step": 613 + }, + { + "epoch": 0.6792035398230089, + "grad_norm": 0.1318359375, + "learning_rate": 8.301991150442479e-06, + "loss": 0.8639, + "step": 614 + }, + { + "epoch": 0.6803097345132744, + "grad_norm": 0.123046875, + "learning_rate": 8.299225663716815e-06, + "loss": 0.808, + "step": 615 + }, + { + "epoch": 0.6814159292035398, + "grad_norm": 0.09765625, + "learning_rate": 8.296460176991151e-06, + "loss": 0.7733, + "step": 616 + }, + { + "epoch": 0.6825221238938053, + "grad_norm": 0.11767578125, + "learning_rate": 8.293694690265487e-06, + "loss": 0.857, + "step": 617 + }, + { + "epoch": 0.6836283185840708, + "grad_norm": 0.09765625, + "learning_rate": 8.290929203539823e-06, + "loss": 0.8342, + "step": 618 + }, + { + "epoch": 0.6847345132743363, + "grad_norm": 0.1015625, + "learning_rate": 8.288163716814161e-06, + "loss": 0.7707, + "step": 619 + }, + { + "epoch": 0.6858407079646017, + "grad_norm": 0.08984375, + "learning_rate": 8.285398230088496e-06, + "loss": 0.7488, + "step": 620 + }, + { + "epoch": 0.6869469026548672, + "grad_norm": 0.0947265625, + "learning_rate": 8.282632743362833e-06, + "loss": 0.794, + "step": 621 + }, + { + "epoch": 0.6880530973451328, + "grad_norm": 0.1103515625, + "learning_rate": 8.279867256637168e-06, + "loss": 0.831, + "step": 622 + }, + { + "epoch": 0.6891592920353983, + "grad_norm": 0.1044921875, + "learning_rate": 8.277101769911506e-06, + "loss": 0.8474, + "step": 623 + }, + { + "epoch": 0.6902654867256637, + "grad_norm": 0.146484375, + "learning_rate": 8.274336283185842e-06, + "loss": 0.8716, + "step": 624 + }, + { + "epoch": 0.6913716814159292, + "grad_norm": 0.0986328125, + "learning_rate": 8.271570796460178e-06, + "loss": 0.8067, + "step": 625 + }, + { + "epoch": 0.6924778761061947, + "grad_norm": 0.10498046875, + "learning_rate": 8.268805309734514e-06, + "loss": 0.7858, + "step": 626 + }, + { + "epoch": 0.6935840707964602, + "grad_norm": 0.115234375, + "learning_rate": 8.26603982300885e-06, + "loss": 0.8209, + "step": 627 + }, + { + "epoch": 0.6946902654867256, + "grad_norm": 0.09375, + "learning_rate": 8.263274336283186e-06, + "loss": 0.8252, + "step": 628 + }, + { + "epoch": 0.6957964601769911, + "grad_norm": 0.1455078125, + "learning_rate": 8.260508849557522e-06, + "loss": 0.8108, + "step": 629 + }, + { + "epoch": 0.6969026548672567, + "grad_norm": 0.0849609375, + "learning_rate": 8.257743362831858e-06, + "loss": 0.763, + "step": 630 + }, + { + "epoch": 0.6980088495575221, + "grad_norm": 0.11865234375, + "learning_rate": 8.254977876106195e-06, + "loss": 0.8399, + "step": 631 + }, + { + "epoch": 0.6991150442477876, + "grad_norm": 0.10595703125, + "learning_rate": 8.252212389380532e-06, + "loss": 0.8725, + "step": 632 + }, + { + "epoch": 0.7002212389380531, + "grad_norm": 0.10205078125, + "learning_rate": 8.249446902654869e-06, + "loss": 0.8419, + "step": 633 + }, + { + "epoch": 0.7013274336283186, + "grad_norm": 0.09716796875, + "learning_rate": 8.246681415929205e-06, + "loss": 0.7878, + "step": 634 + }, + { + "epoch": 0.702433628318584, + "grad_norm": 0.0947265625, + "learning_rate": 8.24391592920354e-06, + "loss": 0.7929, + "step": 635 + }, + { + "epoch": 0.7035398230088495, + "grad_norm": 0.09765625, + "learning_rate": 8.241150442477877e-06, + "loss": 0.787, + "step": 636 + }, + { + "epoch": 0.7046460176991151, + "grad_norm": 0.10009765625, + "learning_rate": 8.238384955752213e-06, + "loss": 0.7962, + "step": 637 + }, + { + "epoch": 0.7057522123893806, + "grad_norm": 0.1015625, + "learning_rate": 8.23561946902655e-06, + "loss": 0.8427, + "step": 638 + }, + { + "epoch": 0.706858407079646, + "grad_norm": 0.0859375, + "learning_rate": 8.232853982300885e-06, + "loss": 0.7975, + "step": 639 + }, + { + "epoch": 0.7079646017699115, + "grad_norm": 0.1005859375, + "learning_rate": 8.230088495575221e-06, + "loss": 0.8305, + "step": 640 + }, + { + "epoch": 0.709070796460177, + "grad_norm": 0.1123046875, + "learning_rate": 8.227323008849558e-06, + "loss": 0.8375, + "step": 641 + }, + { + "epoch": 0.7101769911504425, + "grad_norm": 0.1015625, + "learning_rate": 8.224557522123895e-06, + "loss": 0.8371, + "step": 642 + }, + { + "epoch": 0.7112831858407079, + "grad_norm": 0.10205078125, + "learning_rate": 8.22179203539823e-06, + "loss": 0.8304, + "step": 643 + }, + { + "epoch": 0.7123893805309734, + "grad_norm": 0.1103515625, + "learning_rate": 8.219026548672568e-06, + "loss": 0.7761, + "step": 644 + }, + { + "epoch": 0.713495575221239, + "grad_norm": 0.10009765625, + "learning_rate": 8.216261061946904e-06, + "loss": 0.7781, + "step": 645 + }, + { + "epoch": 0.7146017699115044, + "grad_norm": 0.10302734375, + "learning_rate": 8.21349557522124e-06, + "loss": 0.8181, + "step": 646 + }, + { + "epoch": 0.7157079646017699, + "grad_norm": 0.11083984375, + "learning_rate": 8.210730088495576e-06, + "loss": 0.8242, + "step": 647 + }, + { + "epoch": 0.7168141592920354, + "grad_norm": 0.10302734375, + "learning_rate": 8.207964601769912e-06, + "loss": 0.8388, + "step": 648 + }, + { + "epoch": 0.7179203539823009, + "grad_norm": 0.11669921875, + "learning_rate": 8.205199115044248e-06, + "loss": 0.8223, + "step": 649 + }, + { + "epoch": 0.7190265486725663, + "grad_norm": 0.123046875, + "learning_rate": 8.202433628318584e-06, + "loss": 0.845, + "step": 650 + }, + { + "epoch": 0.7201327433628318, + "grad_norm": 0.1259765625, + "learning_rate": 8.199668141592922e-06, + "loss": 0.8311, + "step": 651 + }, + { + "epoch": 0.7212389380530974, + "grad_norm": 0.1005859375, + "learning_rate": 8.196902654867257e-06, + "loss": 0.8016, + "step": 652 + }, + { + "epoch": 0.7223451327433629, + "grad_norm": 0.09130859375, + "learning_rate": 8.194137168141594e-06, + "loss": 0.8173, + "step": 653 + }, + { + "epoch": 0.7234513274336283, + "grad_norm": 0.099609375, + "learning_rate": 8.191371681415929e-06, + "loss": 0.8066, + "step": 654 + }, + { + "epoch": 0.7245575221238938, + "grad_norm": 0.09716796875, + "learning_rate": 8.188606194690267e-06, + "loss": 0.8023, + "step": 655 + }, + { + "epoch": 0.7256637168141593, + "grad_norm": 0.09375, + "learning_rate": 8.185840707964603e-06, + "loss": 0.8087, + "step": 656 + }, + { + "epoch": 0.7267699115044248, + "grad_norm": 0.0859375, + "learning_rate": 8.183075221238939e-06, + "loss": 0.7654, + "step": 657 + }, + { + "epoch": 0.7278761061946902, + "grad_norm": 0.10205078125, + "learning_rate": 8.180309734513275e-06, + "loss": 0.8418, + "step": 658 + }, + { + "epoch": 0.7289823008849557, + "grad_norm": 0.0986328125, + "learning_rate": 8.177544247787611e-06, + "loss": 0.8419, + "step": 659 + }, + { + "epoch": 0.7300884955752213, + "grad_norm": 0.205078125, + "learning_rate": 8.174778761061947e-06, + "loss": 0.7717, + "step": 660 + }, + { + "epoch": 0.7311946902654868, + "grad_norm": 0.09521484375, + "learning_rate": 8.172013274336283e-06, + "loss": 0.7719, + "step": 661 + }, + { + "epoch": 0.7323008849557522, + "grad_norm": 0.09765625, + "learning_rate": 8.16924778761062e-06, + "loss": 0.821, + "step": 662 + }, + { + "epoch": 0.7334070796460177, + "grad_norm": 0.09326171875, + "learning_rate": 8.166482300884957e-06, + "loss": 0.7988, + "step": 663 + }, + { + "epoch": 0.7345132743362832, + "grad_norm": 0.09521484375, + "learning_rate": 8.163716814159292e-06, + "loss": 0.7823, + "step": 664 + }, + { + "epoch": 0.7356194690265486, + "grad_norm": 0.091796875, + "learning_rate": 8.16095132743363e-06, + "loss": 0.7912, + "step": 665 + }, + { + "epoch": 0.7367256637168141, + "grad_norm": 0.09814453125, + "learning_rate": 8.158185840707966e-06, + "loss": 0.8055, + "step": 666 + }, + { + "epoch": 0.7378318584070797, + "grad_norm": 0.1591796875, + "learning_rate": 8.155420353982302e-06, + "loss": 0.823, + "step": 667 + }, + { + "epoch": 0.7389380530973452, + "grad_norm": 0.107421875, + "learning_rate": 8.152654867256638e-06, + "loss": 0.7857, + "step": 668 + }, + { + "epoch": 0.7400442477876106, + "grad_norm": 0.12109375, + "learning_rate": 8.149889380530974e-06, + "loss": 0.824, + "step": 669 + }, + { + "epoch": 0.7411504424778761, + "grad_norm": 0.10791015625, + "learning_rate": 8.14712389380531e-06, + "loss": 0.8127, + "step": 670 + }, + { + "epoch": 0.7422566371681416, + "grad_norm": 0.10791015625, + "learning_rate": 8.144358407079646e-06, + "loss": 0.7991, + "step": 671 + }, + { + "epoch": 0.7433628318584071, + "grad_norm": 0.09375, + "learning_rate": 8.141592920353984e-06, + "loss": 0.7724, + "step": 672 + }, + { + "epoch": 0.7444690265486725, + "grad_norm": 0.10546875, + "learning_rate": 8.138827433628319e-06, + "loss": 0.7891, + "step": 673 + }, + { + "epoch": 0.745575221238938, + "grad_norm": 0.09130859375, + "learning_rate": 8.136061946902656e-06, + "loss": 0.7939, + "step": 674 + }, + { + "epoch": 0.7466814159292036, + "grad_norm": 0.0908203125, + "learning_rate": 8.133296460176991e-06, + "loss": 0.8113, + "step": 675 + }, + { + "epoch": 0.7477876106194691, + "grad_norm": 0.10791015625, + "learning_rate": 8.130530973451329e-06, + "loss": 0.7954, + "step": 676 + }, + { + "epoch": 0.7488938053097345, + "grad_norm": 0.09765625, + "learning_rate": 8.127765486725663e-06, + "loss": 0.8043, + "step": 677 + }, + { + "epoch": 0.75, + "grad_norm": 0.11279296875, + "learning_rate": 8.125000000000001e-06, + "loss": 0.8213, + "step": 678 + }, + { + "epoch": 0.7511061946902655, + "grad_norm": 0.091796875, + "learning_rate": 8.122234513274337e-06, + "loss": 0.7792, + "step": 679 + }, + { + "epoch": 0.7522123893805309, + "grad_norm": 0.0986328125, + "learning_rate": 8.119469026548673e-06, + "loss": 0.8366, + "step": 680 + }, + { + "epoch": 0.7533185840707964, + "grad_norm": 0.0986328125, + "learning_rate": 8.11670353982301e-06, + "loss": 0.8216, + "step": 681 + }, + { + "epoch": 0.754424778761062, + "grad_norm": 0.08837890625, + "learning_rate": 8.113938053097345e-06, + "loss": 0.7821, + "step": 682 + }, + { + "epoch": 0.7555309734513275, + "grad_norm": 0.0986328125, + "learning_rate": 8.111172566371682e-06, + "loss": 0.7935, + "step": 683 + }, + { + "epoch": 0.7566371681415929, + "grad_norm": 0.10302734375, + "learning_rate": 8.108407079646018e-06, + "loss": 0.8147, + "step": 684 + }, + { + "epoch": 0.7577433628318584, + "grad_norm": 0.10009765625, + "learning_rate": 8.105641592920354e-06, + "loss": 0.8314, + "step": 685 + }, + { + "epoch": 0.7588495575221239, + "grad_norm": 0.1015625, + "learning_rate": 8.102876106194692e-06, + "loss": 0.8017, + "step": 686 + }, + { + "epoch": 0.7599557522123894, + "grad_norm": 0.091796875, + "learning_rate": 8.100110619469028e-06, + "loss": 0.7776, + "step": 687 + }, + { + "epoch": 0.7610619469026548, + "grad_norm": 0.1044921875, + "learning_rate": 8.097345132743364e-06, + "loss": 0.8183, + "step": 688 + }, + { + "epoch": 0.7621681415929203, + "grad_norm": 0.0947265625, + "learning_rate": 8.0945796460177e-06, + "loss": 0.7659, + "step": 689 + }, + { + "epoch": 0.7632743362831859, + "grad_norm": 0.0966796875, + "learning_rate": 8.091814159292036e-06, + "loss": 0.82, + "step": 690 + }, + { + "epoch": 0.7643805309734514, + "grad_norm": 0.10791015625, + "learning_rate": 8.089048672566372e-06, + "loss": 0.8103, + "step": 691 + }, + { + "epoch": 0.7654867256637168, + "grad_norm": 0.10791015625, + "learning_rate": 8.086283185840708e-06, + "loss": 0.794, + "step": 692 + }, + { + "epoch": 0.7665929203539823, + "grad_norm": 0.09619140625, + "learning_rate": 8.083517699115045e-06, + "loss": 0.8052, + "step": 693 + }, + { + "epoch": 0.7676991150442478, + "grad_norm": 0.10302734375, + "learning_rate": 8.08075221238938e-06, + "loss": 0.8367, + "step": 694 + }, + { + "epoch": 0.7688053097345132, + "grad_norm": 0.09912109375, + "learning_rate": 8.077986725663719e-06, + "loss": 0.7813, + "step": 695 + }, + { + "epoch": 0.7699115044247787, + "grad_norm": 0.091796875, + "learning_rate": 8.075221238938053e-06, + "loss": 0.7647, + "step": 696 + }, + { + "epoch": 0.7710176991150443, + "grad_norm": 0.1015625, + "learning_rate": 8.07245575221239e-06, + "loss": 0.7917, + "step": 697 + }, + { + "epoch": 0.7721238938053098, + "grad_norm": 0.09619140625, + "learning_rate": 8.069690265486725e-06, + "loss": 0.8125, + "step": 698 + }, + { + "epoch": 0.7732300884955752, + "grad_norm": 0.09521484375, + "learning_rate": 8.066924778761063e-06, + "loss": 0.7879, + "step": 699 + }, + { + "epoch": 0.7743362831858407, + "grad_norm": 0.109375, + "learning_rate": 8.064159292035399e-06, + "loss": 0.8156, + "step": 700 + }, + { + "epoch": 0.7754424778761062, + "grad_norm": 0.1220703125, + "learning_rate": 8.061393805309735e-06, + "loss": 0.7847, + "step": 701 + }, + { + "epoch": 0.7765486725663717, + "grad_norm": 0.10107421875, + "learning_rate": 8.058628318584071e-06, + "loss": 0.823, + "step": 702 + }, + { + "epoch": 0.7776548672566371, + "grad_norm": 0.08984375, + "learning_rate": 8.055862831858408e-06, + "loss": 0.7435, + "step": 703 + }, + { + "epoch": 0.7787610619469026, + "grad_norm": 0.12158203125, + "learning_rate": 8.053097345132744e-06, + "loss": 0.7767, + "step": 704 + }, + { + "epoch": 0.7798672566371682, + "grad_norm": 0.09375, + "learning_rate": 8.05033185840708e-06, + "loss": 0.7688, + "step": 705 + }, + { + "epoch": 0.7809734513274337, + "grad_norm": 0.1015625, + "learning_rate": 8.047566371681416e-06, + "loss": 0.7321, + "step": 706 + }, + { + "epoch": 0.7820796460176991, + "grad_norm": 0.10595703125, + "learning_rate": 8.044800884955752e-06, + "loss": 0.8054, + "step": 707 + }, + { + "epoch": 0.7831858407079646, + "grad_norm": 0.1064453125, + "learning_rate": 8.04203539823009e-06, + "loss": 0.7813, + "step": 708 + }, + { + "epoch": 0.7842920353982301, + "grad_norm": 0.091796875, + "learning_rate": 8.039269911504426e-06, + "loss": 0.7788, + "step": 709 + }, + { + "epoch": 0.7853982300884956, + "grad_norm": 0.11279296875, + "learning_rate": 8.036504424778762e-06, + "loss": 0.7836, + "step": 710 + }, + { + "epoch": 0.786504424778761, + "grad_norm": 0.09765625, + "learning_rate": 8.033738938053098e-06, + "loss": 0.8177, + "step": 711 + }, + { + "epoch": 0.7876106194690266, + "grad_norm": 0.08544921875, + "learning_rate": 8.030973451327434e-06, + "loss": 0.8077, + "step": 712 + }, + { + "epoch": 0.7887168141592921, + "grad_norm": 0.1044921875, + "learning_rate": 8.02820796460177e-06, + "loss": 0.8156, + "step": 713 + }, + { + "epoch": 0.7898230088495575, + "grad_norm": 0.09912109375, + "learning_rate": 8.025442477876107e-06, + "loss": 0.7796, + "step": 714 + }, + { + "epoch": 0.790929203539823, + "grad_norm": 0.099609375, + "learning_rate": 8.022676991150443e-06, + "loss": 0.7771, + "step": 715 + }, + { + "epoch": 0.7920353982300885, + "grad_norm": 0.0986328125, + "learning_rate": 8.01991150442478e-06, + "loss": 0.7921, + "step": 716 + }, + { + "epoch": 0.793141592920354, + "grad_norm": 0.1064453125, + "learning_rate": 8.017146017699115e-06, + "loss": 0.8193, + "step": 717 + }, + { + "epoch": 0.7942477876106194, + "grad_norm": 0.09765625, + "learning_rate": 8.014380530973453e-06, + "loss": 0.7846, + "step": 718 + }, + { + "epoch": 0.7953539823008849, + "grad_norm": 0.1181640625, + "learning_rate": 8.011615044247787e-06, + "loss": 0.8789, + "step": 719 + }, + { + "epoch": 0.7964601769911505, + "grad_norm": 0.10302734375, + "learning_rate": 8.008849557522125e-06, + "loss": 0.77, + "step": 720 + }, + { + "epoch": 0.797566371681416, + "grad_norm": 0.0927734375, + "learning_rate": 8.006084070796461e-06, + "loss": 0.7953, + "step": 721 + }, + { + "epoch": 0.7986725663716814, + "grad_norm": 0.09423828125, + "learning_rate": 8.003318584070797e-06, + "loss": 0.7882, + "step": 722 + }, + { + "epoch": 0.7997787610619469, + "grad_norm": 0.1533203125, + "learning_rate": 8.000553097345133e-06, + "loss": 0.8293, + "step": 723 + }, + { + "epoch": 0.8008849557522124, + "grad_norm": 0.115234375, + "learning_rate": 7.99778761061947e-06, + "loss": 0.7941, + "step": 724 + }, + { + "epoch": 0.8019911504424779, + "grad_norm": 0.10205078125, + "learning_rate": 7.995022123893806e-06, + "loss": 0.7618, + "step": 725 + }, + { + "epoch": 0.8030973451327433, + "grad_norm": 0.10693359375, + "learning_rate": 7.992256637168142e-06, + "loss": 0.825, + "step": 726 + }, + { + "epoch": 0.8042035398230089, + "grad_norm": 0.1015625, + "learning_rate": 7.98949115044248e-06, + "loss": 0.7839, + "step": 727 + }, + { + "epoch": 0.8053097345132744, + "grad_norm": 0.10693359375, + "learning_rate": 7.986725663716814e-06, + "loss": 0.7806, + "step": 728 + }, + { + "epoch": 0.8064159292035398, + "grad_norm": 0.099609375, + "learning_rate": 7.983960176991152e-06, + "loss": 0.8017, + "step": 729 + }, + { + "epoch": 0.8075221238938053, + "grad_norm": 0.111328125, + "learning_rate": 7.981194690265486e-06, + "loss": 0.8704, + "step": 730 + }, + { + "epoch": 0.8086283185840708, + "grad_norm": 0.1220703125, + "learning_rate": 7.978429203539824e-06, + "loss": 0.8549, + "step": 731 + }, + { + "epoch": 0.8097345132743363, + "grad_norm": 0.09423828125, + "learning_rate": 7.97566371681416e-06, + "loss": 0.7918, + "step": 732 + }, + { + "epoch": 0.8108407079646017, + "grad_norm": 0.10302734375, + "learning_rate": 7.972898230088496e-06, + "loss": 0.7863, + "step": 733 + }, + { + "epoch": 0.8119469026548672, + "grad_norm": 0.1171875, + "learning_rate": 7.970132743362833e-06, + "loss": 0.8723, + "step": 734 + }, + { + "epoch": 0.8130530973451328, + "grad_norm": 0.107421875, + "learning_rate": 7.967367256637169e-06, + "loss": 0.8013, + "step": 735 + }, + { + "epoch": 0.8141592920353983, + "grad_norm": 0.1005859375, + "learning_rate": 7.964601769911505e-06, + "loss": 0.8097, + "step": 736 + }, + { + "epoch": 0.8152654867256637, + "grad_norm": 0.1044921875, + "learning_rate": 7.961836283185841e-06, + "loss": 0.8117, + "step": 737 + }, + { + "epoch": 0.8163716814159292, + "grad_norm": 0.09228515625, + "learning_rate": 7.959070796460177e-06, + "loss": 0.7894, + "step": 738 + }, + { + "epoch": 0.8174778761061947, + "grad_norm": 0.09765625, + "learning_rate": 7.956305309734515e-06, + "loss": 0.8117, + "step": 739 + }, + { + "epoch": 0.8185840707964602, + "grad_norm": 0.1044921875, + "learning_rate": 7.95353982300885e-06, + "loss": 0.814, + "step": 740 + }, + { + "epoch": 0.8196902654867256, + "grad_norm": 0.1259765625, + "learning_rate": 7.950774336283187e-06, + "loss": 0.8425, + "step": 741 + }, + { + "epoch": 0.8207964601769911, + "grad_norm": 0.09521484375, + "learning_rate": 7.948008849557523e-06, + "loss": 0.7641, + "step": 742 + }, + { + "epoch": 0.8219026548672567, + "grad_norm": 0.1025390625, + "learning_rate": 7.94524336283186e-06, + "loss": 0.7799, + "step": 743 + }, + { + "epoch": 0.8230088495575221, + "grad_norm": 0.09814453125, + "learning_rate": 7.942477876106195e-06, + "loss": 0.8097, + "step": 744 + }, + { + "epoch": 0.8241150442477876, + "grad_norm": 0.10302734375, + "learning_rate": 7.939712389380532e-06, + "loss": 0.7934, + "step": 745 + }, + { + "epoch": 0.8252212389380531, + "grad_norm": 0.1064453125, + "learning_rate": 7.936946902654868e-06, + "loss": 0.8074, + "step": 746 + }, + { + "epoch": 0.8263274336283186, + "grad_norm": 0.1064453125, + "learning_rate": 7.934181415929204e-06, + "loss": 0.8232, + "step": 747 + }, + { + "epoch": 0.827433628318584, + "grad_norm": 0.10400390625, + "learning_rate": 7.931415929203542e-06, + "loss": 0.7956, + "step": 748 + }, + { + "epoch": 0.8285398230088495, + "grad_norm": 0.09521484375, + "learning_rate": 7.928650442477876e-06, + "loss": 0.7751, + "step": 749 + }, + { + "epoch": 0.8296460176991151, + "grad_norm": 0.10302734375, + "learning_rate": 7.925884955752214e-06, + "loss": 0.7923, + "step": 750 + }, + { + "epoch": 0.8307522123893806, + "grad_norm": 0.10009765625, + "learning_rate": 7.923119469026548e-06, + "loss": 0.7879, + "step": 751 + }, + { + "epoch": 0.831858407079646, + "grad_norm": 0.09423828125, + "learning_rate": 7.920353982300886e-06, + "loss": 0.7493, + "step": 752 + }, + { + "epoch": 0.8329646017699115, + "grad_norm": 0.123046875, + "learning_rate": 7.91758849557522e-06, + "loss": 0.862, + "step": 753 + }, + { + "epoch": 0.834070796460177, + "grad_norm": 0.1103515625, + "learning_rate": 7.914823008849558e-06, + "loss": 0.7811, + "step": 754 + }, + { + "epoch": 0.8351769911504425, + "grad_norm": 0.0927734375, + "learning_rate": 7.912057522123895e-06, + "loss": 0.7316, + "step": 755 + }, + { + "epoch": 0.8362831858407079, + "grad_norm": 0.0947265625, + "learning_rate": 7.90929203539823e-06, + "loss": 0.768, + "step": 756 + }, + { + "epoch": 0.8373893805309734, + "grad_norm": 0.0927734375, + "learning_rate": 7.906526548672567e-06, + "loss": 0.7744, + "step": 757 + }, + { + "epoch": 0.838495575221239, + "grad_norm": 0.1337890625, + "learning_rate": 7.903761061946903e-06, + "loss": 0.7967, + "step": 758 + }, + { + "epoch": 0.8396017699115044, + "grad_norm": 0.09814453125, + "learning_rate": 7.900995575221239e-06, + "loss": 0.7906, + "step": 759 + }, + { + "epoch": 0.8407079646017699, + "grad_norm": 0.1318359375, + "learning_rate": 7.898230088495575e-06, + "loss": 0.7468, + "step": 760 + }, + { + "epoch": 0.8418141592920354, + "grad_norm": 0.10546875, + "learning_rate": 7.895464601769911e-06, + "loss": 0.8613, + "step": 761 + }, + { + "epoch": 0.8429203539823009, + "grad_norm": 0.10302734375, + "learning_rate": 7.892699115044249e-06, + "loss": 0.8098, + "step": 762 + }, + { + "epoch": 0.8440265486725663, + "grad_norm": 0.1044921875, + "learning_rate": 7.889933628318585e-06, + "loss": 0.7901, + "step": 763 + }, + { + "epoch": 0.8451327433628318, + "grad_norm": 0.103515625, + "learning_rate": 7.887168141592921e-06, + "loss": 0.8162, + "step": 764 + }, + { + "epoch": 0.8462389380530974, + "grad_norm": 0.09375, + "learning_rate": 7.884402654867257e-06, + "loss": 0.7757, + "step": 765 + }, + { + "epoch": 0.8473451327433629, + "grad_norm": 0.10498046875, + "learning_rate": 7.881637168141594e-06, + "loss": 0.8093, + "step": 766 + }, + { + "epoch": 0.8484513274336283, + "grad_norm": 0.0966796875, + "learning_rate": 7.87887168141593e-06, + "loss": 0.7795, + "step": 767 + }, + { + "epoch": 0.8495575221238938, + "grad_norm": 0.10595703125, + "learning_rate": 7.876106194690266e-06, + "loss": 0.812, + "step": 768 + }, + { + "epoch": 0.8506637168141593, + "grad_norm": 0.09521484375, + "learning_rate": 7.873340707964604e-06, + "loss": 0.7725, + "step": 769 + }, + { + "epoch": 0.8517699115044248, + "grad_norm": 0.09814453125, + "learning_rate": 7.870575221238938e-06, + "loss": 0.7831, + "step": 770 + }, + { + "epoch": 0.8528761061946902, + "grad_norm": 0.111328125, + "learning_rate": 7.867809734513276e-06, + "loss": 0.8032, + "step": 771 + }, + { + "epoch": 0.8539823008849557, + "grad_norm": 0.1064453125, + "learning_rate": 7.86504424778761e-06, + "loss": 0.8097, + "step": 772 + }, + { + "epoch": 0.8550884955752213, + "grad_norm": 0.0927734375, + "learning_rate": 7.862278761061948e-06, + "loss": 0.8034, + "step": 773 + }, + { + "epoch": 0.8561946902654868, + "grad_norm": 0.109375, + "learning_rate": 7.859513274336283e-06, + "loss": 0.7932, + "step": 774 + }, + { + "epoch": 0.8573008849557522, + "grad_norm": 0.1005859375, + "learning_rate": 7.85674778761062e-06, + "loss": 0.787, + "step": 775 + }, + { + "epoch": 0.8584070796460177, + "grad_norm": 0.09619140625, + "learning_rate": 7.853982300884957e-06, + "loss": 0.7583, + "step": 776 + }, + { + "epoch": 0.8595132743362832, + "grad_norm": 0.10986328125, + "learning_rate": 7.851216814159293e-06, + "loss": 0.794, + "step": 777 + }, + { + "epoch": 0.8606194690265486, + "grad_norm": 0.1103515625, + "learning_rate": 7.848451327433629e-06, + "loss": 0.7937, + "step": 778 + }, + { + "epoch": 0.8617256637168141, + "grad_norm": 0.10986328125, + "learning_rate": 7.845685840707965e-06, + "loss": 0.8503, + "step": 779 + }, + { + "epoch": 0.8628318584070797, + "grad_norm": 0.11328125, + "learning_rate": 7.842920353982301e-06, + "loss": 0.8032, + "step": 780 + }, + { + "epoch": 0.8639380530973452, + "grad_norm": 0.158203125, + "learning_rate": 7.840154867256637e-06, + "loss": 0.7588, + "step": 781 + }, + { + "epoch": 0.8650442477876106, + "grad_norm": 0.1240234375, + "learning_rate": 7.837389380530975e-06, + "loss": 0.8995, + "step": 782 + }, + { + "epoch": 0.8661504424778761, + "grad_norm": 0.10302734375, + "learning_rate": 7.83462389380531e-06, + "loss": 0.7923, + "step": 783 + }, + { + "epoch": 0.8672566371681416, + "grad_norm": 0.08935546875, + "learning_rate": 7.831858407079647e-06, + "loss": 0.7528, + "step": 784 + }, + { + "epoch": 0.8683628318584071, + "grad_norm": 0.10205078125, + "learning_rate": 7.829092920353983e-06, + "loss": 0.7906, + "step": 785 + }, + { + "epoch": 0.8694690265486725, + "grad_norm": 0.1240234375, + "learning_rate": 7.82632743362832e-06, + "loss": 0.8124, + "step": 786 + }, + { + "epoch": 0.870575221238938, + "grad_norm": 0.1015625, + "learning_rate": 7.823561946902656e-06, + "loss": 0.765, + "step": 787 + }, + { + "epoch": 0.8716814159292036, + "grad_norm": 0.0966796875, + "learning_rate": 7.820796460176992e-06, + "loss": 0.8097, + "step": 788 + }, + { + "epoch": 0.8727876106194691, + "grad_norm": 0.1044921875, + "learning_rate": 7.818030973451328e-06, + "loss": 0.7986, + "step": 789 + }, + { + "epoch": 0.8738938053097345, + "grad_norm": 0.10595703125, + "learning_rate": 7.815265486725664e-06, + "loss": 0.7865, + "step": 790 + }, + { + "epoch": 0.875, + "grad_norm": 0.0947265625, + "learning_rate": 7.8125e-06, + "loss": 0.7789, + "step": 791 + }, + { + "epoch": 0.8761061946902655, + "grad_norm": 0.1044921875, + "learning_rate": 7.809734513274338e-06, + "loss": 0.8168, + "step": 792 + }, + { + "epoch": 0.8772123893805309, + "grad_norm": 0.11083984375, + "learning_rate": 7.806969026548672e-06, + "loss": 0.758, + "step": 793 + }, + { + "epoch": 0.8783185840707964, + "grad_norm": 0.09326171875, + "learning_rate": 7.80420353982301e-06, + "loss": 0.7455, + "step": 794 + }, + { + "epoch": 0.879424778761062, + "grad_norm": 0.09716796875, + "learning_rate": 7.801438053097345e-06, + "loss": 0.7781, + "step": 795 + }, + { + "epoch": 0.8805309734513275, + "grad_norm": 0.09912109375, + "learning_rate": 7.798672566371682e-06, + "loss": 0.806, + "step": 796 + }, + { + "epoch": 0.8816371681415929, + "grad_norm": 0.099609375, + "learning_rate": 7.795907079646019e-06, + "loss": 0.7764, + "step": 797 + }, + { + "epoch": 0.8827433628318584, + "grad_norm": 0.11328125, + "learning_rate": 7.793141592920355e-06, + "loss": 0.8144, + "step": 798 + }, + { + "epoch": 0.8838495575221239, + "grad_norm": 0.09619140625, + "learning_rate": 7.790376106194691e-06, + "loss": 0.7941, + "step": 799 + }, + { + "epoch": 0.8849557522123894, + "grad_norm": 0.09375, + "learning_rate": 7.787610619469027e-06, + "loss": 0.792, + "step": 800 + }, + { + "epoch": 0.8860619469026548, + "grad_norm": 0.13671875, + "learning_rate": 7.784845132743363e-06, + "loss": 0.8884, + "step": 801 + }, + { + "epoch": 0.8871681415929203, + "grad_norm": 0.10107421875, + "learning_rate": 7.7820796460177e-06, + "loss": 0.7957, + "step": 802 + }, + { + "epoch": 0.8882743362831859, + "grad_norm": 0.0947265625, + "learning_rate": 7.779314159292037e-06, + "loss": 0.7722, + "step": 803 + }, + { + "epoch": 0.8893805309734514, + "grad_norm": 0.09765625, + "learning_rate": 7.776548672566371e-06, + "loss": 0.7912, + "step": 804 + }, + { + "epoch": 0.8904867256637168, + "grad_norm": 0.103515625, + "learning_rate": 7.77378318584071e-06, + "loss": 0.7921, + "step": 805 + }, + { + "epoch": 0.8915929203539823, + "grad_norm": 0.09619140625, + "learning_rate": 7.771017699115044e-06, + "loss": 0.7901, + "step": 806 + }, + { + "epoch": 0.8926991150442478, + "grad_norm": 0.0986328125, + "learning_rate": 7.768252212389382e-06, + "loss": 0.7974, + "step": 807 + }, + { + "epoch": 0.8938053097345132, + "grad_norm": 0.09375, + "learning_rate": 7.765486725663718e-06, + "loss": 0.7852, + "step": 808 + }, + { + "epoch": 0.8949115044247787, + "grad_norm": 0.1650390625, + "learning_rate": 7.762721238938054e-06, + "loss": 0.8432, + "step": 809 + }, + { + "epoch": 0.8960176991150443, + "grad_norm": 0.11279296875, + "learning_rate": 7.75995575221239e-06, + "loss": 0.7849, + "step": 810 + }, + { + "epoch": 0.8971238938053098, + "grad_norm": 0.0966796875, + "learning_rate": 7.757190265486726e-06, + "loss": 0.7753, + "step": 811 + }, + { + "epoch": 0.8982300884955752, + "grad_norm": 0.099609375, + "learning_rate": 7.754424778761062e-06, + "loss": 0.7861, + "step": 812 + }, + { + "epoch": 0.8993362831858407, + "grad_norm": 0.09228515625, + "learning_rate": 7.751659292035398e-06, + "loss": 0.785, + "step": 813 + }, + { + "epoch": 0.9004424778761062, + "grad_norm": 0.10205078125, + "learning_rate": 7.748893805309734e-06, + "loss": 0.7624, + "step": 814 + }, + { + "epoch": 0.9015486725663717, + "grad_norm": 0.099609375, + "learning_rate": 7.746128318584072e-06, + "loss": 0.7716, + "step": 815 + }, + { + "epoch": 0.9026548672566371, + "grad_norm": 0.10791015625, + "learning_rate": 7.743362831858407e-06, + "loss": 0.7948, + "step": 816 + }, + { + "epoch": 0.9037610619469026, + "grad_norm": 0.1025390625, + "learning_rate": 7.740597345132745e-06, + "loss": 0.7978, + "step": 817 + }, + { + "epoch": 0.9048672566371682, + "grad_norm": 0.1044921875, + "learning_rate": 7.73783185840708e-06, + "loss": 0.8226, + "step": 818 + }, + { + "epoch": 0.9059734513274337, + "grad_norm": 0.1025390625, + "learning_rate": 7.735066371681417e-06, + "loss": 0.8104, + "step": 819 + }, + { + "epoch": 0.9070796460176991, + "grad_norm": 0.11083984375, + "learning_rate": 7.732300884955753e-06, + "loss": 0.7997, + "step": 820 + }, + { + "epoch": 0.9081858407079646, + "grad_norm": 0.10009765625, + "learning_rate": 7.729535398230089e-06, + "loss": 0.7535, + "step": 821 + }, + { + "epoch": 0.9092920353982301, + "grad_norm": 0.10986328125, + "learning_rate": 7.726769911504425e-06, + "loss": 0.8201, + "step": 822 + }, + { + "epoch": 0.9103982300884956, + "grad_norm": 0.09326171875, + "learning_rate": 7.724004424778761e-06, + "loss": 0.7601, + "step": 823 + }, + { + "epoch": 0.911504424778761, + "grad_norm": 0.103515625, + "learning_rate": 7.721238938053099e-06, + "loss": 0.7835, + "step": 824 + }, + { + "epoch": 0.9126106194690266, + "grad_norm": 0.10302734375, + "learning_rate": 7.718473451327434e-06, + "loss": 0.8018, + "step": 825 + }, + { + "epoch": 0.9137168141592921, + "grad_norm": 0.1552734375, + "learning_rate": 7.715707964601771e-06, + "loss": 0.8618, + "step": 826 + }, + { + "epoch": 0.9148230088495575, + "grad_norm": 0.10546875, + "learning_rate": 7.712942477876106e-06, + "loss": 0.7904, + "step": 827 + }, + { + "epoch": 0.915929203539823, + "grad_norm": 0.0927734375, + "learning_rate": 7.710176991150444e-06, + "loss": 0.7919, + "step": 828 + }, + { + "epoch": 0.9170353982300885, + "grad_norm": 0.10400390625, + "learning_rate": 7.70741150442478e-06, + "loss": 0.7709, + "step": 829 + }, + { + "epoch": 0.918141592920354, + "grad_norm": 0.0966796875, + "learning_rate": 7.704646017699116e-06, + "loss": 0.7925, + "step": 830 + }, + { + "epoch": 0.9192477876106194, + "grad_norm": 0.09716796875, + "learning_rate": 7.701880530973452e-06, + "loss": 0.7625, + "step": 831 + }, + { + "epoch": 0.9203539823008849, + "grad_norm": 0.10888671875, + "learning_rate": 7.699115044247788e-06, + "loss": 0.8081, + "step": 832 + }, + { + "epoch": 0.9214601769911505, + "grad_norm": 0.10107421875, + "learning_rate": 7.696349557522124e-06, + "loss": 0.7613, + "step": 833 + }, + { + "epoch": 0.922566371681416, + "grad_norm": 0.09130859375, + "learning_rate": 7.69358407079646e-06, + "loss": 0.7466, + "step": 834 + }, + { + "epoch": 0.9236725663716814, + "grad_norm": 0.1025390625, + "learning_rate": 7.690818584070796e-06, + "loss": 0.7835, + "step": 835 + }, + { + "epoch": 0.9247787610619469, + "grad_norm": 0.10107421875, + "learning_rate": 7.688053097345133e-06, + "loss": 0.7788, + "step": 836 + }, + { + "epoch": 0.9258849557522124, + "grad_norm": 0.11181640625, + "learning_rate": 7.68528761061947e-06, + "loss": 0.7883, + "step": 837 + }, + { + "epoch": 0.9269911504424779, + "grad_norm": 0.0986328125, + "learning_rate": 7.682522123893807e-06, + "loss": 0.8219, + "step": 838 + }, + { + "epoch": 0.9280973451327433, + "grad_norm": 0.09228515625, + "learning_rate": 7.679756637168143e-06, + "loss": 0.7471, + "step": 839 + }, + { + "epoch": 0.9292035398230089, + "grad_norm": 0.11962890625, + "learning_rate": 7.676991150442479e-06, + "loss": 0.8081, + "step": 840 + }, + { + "epoch": 0.9303097345132744, + "grad_norm": 0.09228515625, + "learning_rate": 7.674225663716815e-06, + "loss": 0.738, + "step": 841 + }, + { + "epoch": 0.9314159292035398, + "grad_norm": 0.099609375, + "learning_rate": 7.671460176991151e-06, + "loss": 0.7876, + "step": 842 + }, + { + "epoch": 0.9325221238938053, + "grad_norm": 0.08984375, + "learning_rate": 7.668694690265487e-06, + "loss": 0.7718, + "step": 843 + }, + { + "epoch": 0.9336283185840708, + "grad_norm": 0.09765625, + "learning_rate": 7.665929203539823e-06, + "loss": 0.8037, + "step": 844 + }, + { + "epoch": 0.9347345132743363, + "grad_norm": 0.10693359375, + "learning_rate": 7.663163716814161e-06, + "loss": 0.8095, + "step": 845 + }, + { + "epoch": 0.9358407079646017, + "grad_norm": 0.1005859375, + "learning_rate": 7.660398230088496e-06, + "loss": 0.7987, + "step": 846 + }, + { + "epoch": 0.9369469026548672, + "grad_norm": 0.1181640625, + "learning_rate": 7.657632743362833e-06, + "loss": 0.8356, + "step": 847 + }, + { + "epoch": 0.9380530973451328, + "grad_norm": 0.1171875, + "learning_rate": 7.654867256637168e-06, + "loss": 0.7923, + "step": 848 + }, + { + "epoch": 0.9391592920353983, + "grad_norm": 0.091796875, + "learning_rate": 7.652101769911506e-06, + "loss": 0.7622, + "step": 849 + }, + { + "epoch": 0.9402654867256637, + "grad_norm": 0.10400390625, + "learning_rate": 7.64933628318584e-06, + "loss": 0.7848, + "step": 850 + }, + { + "epoch": 0.9413716814159292, + "grad_norm": 0.0888671875, + "learning_rate": 7.646570796460178e-06, + "loss": 0.7463, + "step": 851 + }, + { + "epoch": 0.9424778761061947, + "grad_norm": 0.10009765625, + "learning_rate": 7.643805309734514e-06, + "loss": 0.7997, + "step": 852 + }, + { + "epoch": 0.9435840707964602, + "grad_norm": 0.1259765625, + "learning_rate": 7.64103982300885e-06, + "loss": 0.7875, + "step": 853 + }, + { + "epoch": 0.9446902654867256, + "grad_norm": 0.1015625, + "learning_rate": 7.638274336283186e-06, + "loss": 0.7782, + "step": 854 + }, + { + "epoch": 0.9457964601769911, + "grad_norm": 0.1142578125, + "learning_rate": 7.635508849557522e-06, + "loss": 0.8038, + "step": 855 + }, + { + "epoch": 0.9469026548672567, + "grad_norm": 0.095703125, + "learning_rate": 7.632743362831859e-06, + "loss": 0.7642, + "step": 856 + }, + { + "epoch": 0.9480088495575221, + "grad_norm": 0.12890625, + "learning_rate": 7.629977876106195e-06, + "loss": 0.7684, + "step": 857 + }, + { + "epoch": 0.9491150442477876, + "grad_norm": 0.09423828125, + "learning_rate": 7.627212389380532e-06, + "loss": 0.7846, + "step": 858 + }, + { + "epoch": 0.9502212389380531, + "grad_norm": 0.1220703125, + "learning_rate": 7.624446902654869e-06, + "loss": 0.8284, + "step": 859 + }, + { + "epoch": 0.9513274336283186, + "grad_norm": 0.08984375, + "learning_rate": 7.621681415929204e-06, + "loss": 0.7767, + "step": 860 + }, + { + "epoch": 0.952433628318584, + "grad_norm": 0.1123046875, + "learning_rate": 7.618915929203541e-06, + "loss": 0.8293, + "step": 861 + }, + { + "epoch": 0.9535398230088495, + "grad_norm": 0.10400390625, + "learning_rate": 7.616150442477876e-06, + "loss": 0.7763, + "step": 862 + }, + { + "epoch": 0.9546460176991151, + "grad_norm": 0.09130859375, + "learning_rate": 7.613384955752213e-06, + "loss": 0.7531, + "step": 863 + }, + { + "epoch": 0.9557522123893806, + "grad_norm": 0.10546875, + "learning_rate": 7.610619469026549e-06, + "loss": 0.7928, + "step": 864 + }, + { + "epoch": 0.956858407079646, + "grad_norm": 0.111328125, + "learning_rate": 7.607853982300885e-06, + "loss": 0.7837, + "step": 865 + }, + { + "epoch": 0.9579646017699115, + "grad_norm": 0.09326171875, + "learning_rate": 7.6050884955752215e-06, + "loss": 0.7642, + "step": 866 + }, + { + "epoch": 0.959070796460177, + "grad_norm": 0.0986328125, + "learning_rate": 7.6023230088495584e-06, + "loss": 0.8056, + "step": 867 + }, + { + "epoch": 0.9601769911504425, + "grad_norm": 0.1298828125, + "learning_rate": 7.5995575221238946e-06, + "loss": 0.789, + "step": 868 + }, + { + "epoch": 0.9612831858407079, + "grad_norm": 0.1064453125, + "learning_rate": 7.596792035398231e-06, + "loss": 0.7686, + "step": 869 + }, + { + "epoch": 0.9623893805309734, + "grad_norm": 0.10302734375, + "learning_rate": 7.594026548672568e-06, + "loss": 0.7833, + "step": 870 + }, + { + "epoch": 0.963495575221239, + "grad_norm": 0.10791015625, + "learning_rate": 7.591261061946903e-06, + "loss": 0.8023, + "step": 871 + }, + { + "epoch": 0.9646017699115044, + "grad_norm": 0.1298828125, + "learning_rate": 7.58849557522124e-06, + "loss": 0.8321, + "step": 872 + }, + { + "epoch": 0.9657079646017699, + "grad_norm": 0.140625, + "learning_rate": 7.585730088495575e-06, + "loss": 0.772, + "step": 873 + }, + { + "epoch": 0.9668141592920354, + "grad_norm": 0.0986328125, + "learning_rate": 7.582964601769912e-06, + "loss": 0.7741, + "step": 874 + }, + { + "epoch": 0.9679203539823009, + "grad_norm": 0.10400390625, + "learning_rate": 7.580199115044249e-06, + "loss": 0.7791, + "step": 875 + }, + { + "epoch": 0.9690265486725663, + "grad_norm": 0.09375, + "learning_rate": 7.5774336283185844e-06, + "loss": 0.7798, + "step": 876 + }, + { + "epoch": 0.9701327433628318, + "grad_norm": 0.09619140625, + "learning_rate": 7.574668141592921e-06, + "loss": 0.7987, + "step": 877 + }, + { + "epoch": 0.9712389380530974, + "grad_norm": 0.1044921875, + "learning_rate": 7.571902654867257e-06, + "loss": 0.7682, + "step": 878 + }, + { + "epoch": 0.9723451327433629, + "grad_norm": 0.111328125, + "learning_rate": 7.569137168141594e-06, + "loss": 0.8299, + "step": 879 + }, + { + "epoch": 0.9734513274336283, + "grad_norm": 0.10986328125, + "learning_rate": 7.56637168141593e-06, + "loss": 0.7879, + "step": 880 + }, + { + "epoch": 0.9745575221238938, + "grad_norm": 0.10302734375, + "learning_rate": 7.563606194690266e-06, + "loss": 0.7566, + "step": 881 + }, + { + "epoch": 0.9756637168141593, + "grad_norm": 0.1376953125, + "learning_rate": 7.560840707964603e-06, + "loss": 0.772, + "step": 882 + }, + { + "epoch": 0.9767699115044248, + "grad_norm": 0.10205078125, + "learning_rate": 7.558075221238938e-06, + "loss": 0.7672, + "step": 883 + }, + { + "epoch": 0.9778761061946902, + "grad_norm": 0.10986328125, + "learning_rate": 7.555309734513275e-06, + "loss": 0.8005, + "step": 884 + }, + { + "epoch": 0.9789823008849557, + "grad_norm": 0.095703125, + "learning_rate": 7.552544247787611e-06, + "loss": 0.7374, + "step": 885 + }, + { + "epoch": 0.9800884955752213, + "grad_norm": 0.1015625, + "learning_rate": 7.549778761061947e-06, + "loss": 0.7848, + "step": 886 + }, + { + "epoch": 0.9811946902654868, + "grad_norm": 0.1103515625, + "learning_rate": 7.5470132743362835e-06, + "loss": 0.8065, + "step": 887 + }, + { + "epoch": 0.9823008849557522, + "grad_norm": 0.12109375, + "learning_rate": 7.5442477876106205e-06, + "loss": 0.781, + "step": 888 + }, + { + "epoch": 0.9834070796460177, + "grad_norm": 0.107421875, + "learning_rate": 7.541482300884957e-06, + "loss": 0.7816, + "step": 889 + }, + { + "epoch": 0.9845132743362832, + "grad_norm": 0.10595703125, + "learning_rate": 7.538716814159293e-06, + "loss": 0.8076, + "step": 890 + }, + { + "epoch": 0.9856194690265486, + "grad_norm": 0.10009765625, + "learning_rate": 7.53595132743363e-06, + "loss": 0.7779, + "step": 891 + }, + { + "epoch": 0.9867256637168141, + "grad_norm": 0.09521484375, + "learning_rate": 7.533185840707965e-06, + "loss": 0.7624, + "step": 892 + }, + { + "epoch": 0.9878318584070797, + "grad_norm": 0.109375, + "learning_rate": 7.530420353982302e-06, + "loss": 0.8129, + "step": 893 + }, + { + "epoch": 0.9889380530973452, + "grad_norm": 0.1005859375, + "learning_rate": 7.527654867256637e-06, + "loss": 0.792, + "step": 894 + }, + { + "epoch": 0.9900442477876106, + "grad_norm": 0.1142578125, + "learning_rate": 7.524889380530974e-06, + "loss": 0.789, + "step": 895 + }, + { + "epoch": 0.9911504424778761, + "grad_norm": 0.10791015625, + "learning_rate": 7.5221238938053095e-06, + "loss": 0.8057, + "step": 896 + }, + { + "epoch": 0.9922566371681416, + "grad_norm": 0.10302734375, + "learning_rate": 7.5193584070796465e-06, + "loss": 0.8332, + "step": 897 + }, + { + "epoch": 0.9933628318584071, + "grad_norm": 0.099609375, + "learning_rate": 7.5165929203539834e-06, + "loss": 0.803, + "step": 898 + }, + { + "epoch": 0.9944690265486725, + "grad_norm": 0.11865234375, + "learning_rate": 7.513827433628319e-06, + "loss": 0.7568, + "step": 899 + }, + { + "epoch": 0.995575221238938, + "grad_norm": 0.1025390625, + "learning_rate": 7.511061946902656e-06, + "loss": 0.7369, + "step": 900 + }, + { + "epoch": 0.9966814159292036, + "grad_norm": 0.11181640625, + "learning_rate": 7.508296460176992e-06, + "loss": 0.8018, + "step": 901 + }, + { + "epoch": 0.9977876106194691, + "grad_norm": 0.09814453125, + "learning_rate": 7.505530973451328e-06, + "loss": 0.7925, + "step": 902 + }, + { + "epoch": 0.9988938053097345, + "grad_norm": 0.107421875, + "learning_rate": 7.502765486725664e-06, + "loss": 0.7932, + "step": 903 + }, + { + "epoch": 1.0, + "grad_norm": 0.1259765625, + "learning_rate": 7.500000000000001e-06, + "loss": 0.7496, + "step": 904 + }, + { + "epoch": 1.0011061946902655, + "grad_norm": 0.10205078125, + "learning_rate": 7.497234513274337e-06, + "loss": 0.7853, + "step": 905 + }, + { + "epoch": 1.002212389380531, + "grad_norm": 0.095703125, + "learning_rate": 7.494469026548673e-06, + "loss": 0.7467, + "step": 906 + }, + { + "epoch": 1.0033185840707965, + "grad_norm": 0.10693359375, + "learning_rate": 7.491703539823009e-06, + "loss": 0.7909, + "step": 907 + }, + { + "epoch": 1.0044247787610618, + "grad_norm": 0.10595703125, + "learning_rate": 7.4889380530973455e-06, + "loss": 0.8124, + "step": 908 + }, + { + "epoch": 1.0055309734513274, + "grad_norm": 0.10107421875, + "learning_rate": 7.4861725663716825e-06, + "loss": 0.7509, + "step": 909 + }, + { + "epoch": 1.0066371681415929, + "grad_norm": 0.099609375, + "learning_rate": 7.483407079646018e-06, + "loss": 0.757, + "step": 910 + }, + { + "epoch": 1.0077433628318584, + "grad_norm": 0.10546875, + "learning_rate": 7.480641592920355e-06, + "loss": 0.8034, + "step": 911 + }, + { + "epoch": 1.008849557522124, + "grad_norm": 0.0966796875, + "learning_rate": 7.477876106194692e-06, + "loss": 0.7688, + "step": 912 + }, + { + "epoch": 1.0099557522123894, + "grad_norm": 0.1142578125, + "learning_rate": 7.475110619469027e-06, + "loss": 0.8436, + "step": 913 + }, + { + "epoch": 1.011061946902655, + "grad_norm": 0.1025390625, + "learning_rate": 7.472345132743364e-06, + "loss": 0.7479, + "step": 914 + }, + { + "epoch": 1.0121681415929205, + "grad_norm": 0.1044921875, + "learning_rate": 7.469579646017699e-06, + "loss": 0.7936, + "step": 915 + }, + { + "epoch": 1.0132743362831858, + "grad_norm": 0.10498046875, + "learning_rate": 7.466814159292036e-06, + "loss": 0.7491, + "step": 916 + }, + { + "epoch": 1.0143805309734513, + "grad_norm": 0.09765625, + "learning_rate": 7.4640486725663715e-06, + "loss": 0.7553, + "step": 917 + }, + { + "epoch": 1.0154867256637168, + "grad_norm": 0.10107421875, + "learning_rate": 7.4612831858407085e-06, + "loss": 0.766, + "step": 918 + }, + { + "epoch": 1.0165929203539823, + "grad_norm": 0.1015625, + "learning_rate": 7.458517699115045e-06, + "loss": 0.7985, + "step": 919 + }, + { + "epoch": 1.0176991150442478, + "grad_norm": 0.1669921875, + "learning_rate": 7.455752212389381e-06, + "loss": 0.7635, + "step": 920 + }, + { + "epoch": 1.0188053097345133, + "grad_norm": 0.1142578125, + "learning_rate": 7.452986725663718e-06, + "loss": 0.7886, + "step": 921 + }, + { + "epoch": 1.0199115044247788, + "grad_norm": 0.091796875, + "learning_rate": 7.450221238938054e-06, + "loss": 0.7448, + "step": 922 + }, + { + "epoch": 1.0210176991150441, + "grad_norm": 0.1025390625, + "learning_rate": 7.44745575221239e-06, + "loss": 0.7628, + "step": 923 + }, + { + "epoch": 1.0221238938053097, + "grad_norm": 0.0966796875, + "learning_rate": 7.444690265486726e-06, + "loss": 0.7261, + "step": 924 + }, + { + "epoch": 1.0232300884955752, + "grad_norm": 0.09765625, + "learning_rate": 7.441924778761063e-06, + "loss": 0.7558, + "step": 925 + }, + { + "epoch": 1.0243362831858407, + "grad_norm": 0.1376953125, + "learning_rate": 7.439159292035398e-06, + "loss": 0.8527, + "step": 926 + }, + { + "epoch": 1.0254424778761062, + "grad_norm": 0.1064453125, + "learning_rate": 7.436393805309735e-06, + "loss": 0.8012, + "step": 927 + }, + { + "epoch": 1.0265486725663717, + "grad_norm": 0.0986328125, + "learning_rate": 7.4336283185840714e-06, + "loss": 0.7719, + "step": 928 + }, + { + "epoch": 1.0276548672566372, + "grad_norm": 0.11962890625, + "learning_rate": 7.4308628318584076e-06, + "loss": 0.8055, + "step": 929 + }, + { + "epoch": 1.0287610619469028, + "grad_norm": 0.10791015625, + "learning_rate": 7.4280973451327445e-06, + "loss": 0.7986, + "step": 930 + }, + { + "epoch": 1.029867256637168, + "grad_norm": 0.09521484375, + "learning_rate": 7.42533185840708e-06, + "loss": 0.753, + "step": 931 + }, + { + "epoch": 1.0309734513274336, + "grad_norm": 0.1025390625, + "learning_rate": 7.422566371681417e-06, + "loss": 0.7751, + "step": 932 + }, + { + "epoch": 1.032079646017699, + "grad_norm": 0.09765625, + "learning_rate": 7.419800884955752e-06, + "loss": 0.7807, + "step": 933 + }, + { + "epoch": 1.0331858407079646, + "grad_norm": 0.10107421875, + "learning_rate": 7.417035398230089e-06, + "loss": 0.7548, + "step": 934 + }, + { + "epoch": 1.0342920353982301, + "grad_norm": 0.1171875, + "learning_rate": 7.414269911504426e-06, + "loss": 0.8274, + "step": 935 + }, + { + "epoch": 1.0353982300884956, + "grad_norm": 0.126953125, + "learning_rate": 7.411504424778761e-06, + "loss": 0.8909, + "step": 936 + }, + { + "epoch": 1.0365044247787611, + "grad_norm": 0.109375, + "learning_rate": 7.408738938053098e-06, + "loss": 0.7474, + "step": 937 + }, + { + "epoch": 1.0376106194690267, + "grad_norm": 0.1005859375, + "learning_rate": 7.4059734513274336e-06, + "loss": 0.813, + "step": 938 + }, + { + "epoch": 1.038716814159292, + "grad_norm": 0.1025390625, + "learning_rate": 7.4032079646017705e-06, + "loss": 0.7967, + "step": 939 + }, + { + "epoch": 1.0398230088495575, + "grad_norm": 0.1162109375, + "learning_rate": 7.400442477876107e-06, + "loss": 0.8367, + "step": 940 + }, + { + "epoch": 1.040929203539823, + "grad_norm": 0.1083984375, + "learning_rate": 7.397676991150443e-06, + "loss": 0.7872, + "step": 941 + }, + { + "epoch": 1.0420353982300885, + "grad_norm": 0.09619140625, + "learning_rate": 7.39491150442478e-06, + "loss": 0.7758, + "step": 942 + }, + { + "epoch": 1.043141592920354, + "grad_norm": 0.0927734375, + "learning_rate": 7.392146017699116e-06, + "loss": 0.7607, + "step": 943 + }, + { + "epoch": 1.0442477876106195, + "grad_norm": 0.0947265625, + "learning_rate": 7.389380530973452e-06, + "loss": 0.7459, + "step": 944 + }, + { + "epoch": 1.045353982300885, + "grad_norm": 0.10595703125, + "learning_rate": 7.386615044247788e-06, + "loss": 0.7642, + "step": 945 + }, + { + "epoch": 1.0464601769911503, + "grad_norm": 0.130859375, + "learning_rate": 7.383849557522125e-06, + "loss": 0.8003, + "step": 946 + }, + { + "epoch": 1.0475663716814159, + "grad_norm": 0.119140625, + "learning_rate": 7.38108407079646e-06, + "loss": 0.8492, + "step": 947 + }, + { + "epoch": 1.0486725663716814, + "grad_norm": 0.0927734375, + "learning_rate": 7.378318584070797e-06, + "loss": 0.7293, + "step": 948 + }, + { + "epoch": 1.049778761061947, + "grad_norm": 0.1142578125, + "learning_rate": 7.375553097345133e-06, + "loss": 0.7819, + "step": 949 + }, + { + "epoch": 1.0508849557522124, + "grad_norm": 0.140625, + "learning_rate": 7.37278761061947e-06, + "loss": 0.773, + "step": 950 + }, + { + "epoch": 1.051991150442478, + "grad_norm": 0.10400390625, + "learning_rate": 7.370022123893807e-06, + "loss": 0.798, + "step": 951 + }, + { + "epoch": 1.0530973451327434, + "grad_norm": 0.095703125, + "learning_rate": 7.367256637168142e-06, + "loss": 0.7665, + "step": 952 + }, + { + "epoch": 1.0542035398230087, + "grad_norm": 0.107421875, + "learning_rate": 7.364491150442479e-06, + "loss": 0.8183, + "step": 953 + }, + { + "epoch": 1.0553097345132743, + "grad_norm": 0.12451171875, + "learning_rate": 7.361725663716814e-06, + "loss": 0.8717, + "step": 954 + }, + { + "epoch": 1.0564159292035398, + "grad_norm": 0.09912109375, + "learning_rate": 7.358960176991151e-06, + "loss": 0.7684, + "step": 955 + }, + { + "epoch": 1.0575221238938053, + "grad_norm": 0.10009765625, + "learning_rate": 7.356194690265487e-06, + "loss": 0.7748, + "step": 956 + }, + { + "epoch": 1.0586283185840708, + "grad_norm": 0.0966796875, + "learning_rate": 7.353429203539823e-06, + "loss": 0.7566, + "step": 957 + }, + { + "epoch": 1.0597345132743363, + "grad_norm": 0.09326171875, + "learning_rate": 7.35066371681416e-06, + "loss": 0.7521, + "step": 958 + }, + { + "epoch": 1.0608407079646018, + "grad_norm": 0.115234375, + "learning_rate": 7.347898230088496e-06, + "loss": 0.792, + "step": 959 + }, + { + "epoch": 1.0619469026548674, + "grad_norm": 0.1220703125, + "learning_rate": 7.3451327433628326e-06, + "loss": 0.833, + "step": 960 + }, + { + "epoch": 1.0630530973451326, + "grad_norm": 0.09326171875, + "learning_rate": 7.342367256637169e-06, + "loss": 0.7339, + "step": 961 + }, + { + "epoch": 1.0641592920353982, + "grad_norm": 0.0908203125, + "learning_rate": 7.339601769911505e-06, + "loss": 0.7248, + "step": 962 + }, + { + "epoch": 1.0652654867256637, + "grad_norm": 0.10888671875, + "learning_rate": 7.336836283185841e-06, + "loss": 0.8279, + "step": 963 + }, + { + "epoch": 1.0663716814159292, + "grad_norm": 0.103515625, + "learning_rate": 7.334070796460178e-06, + "loss": 0.7756, + "step": 964 + }, + { + "epoch": 1.0674778761061947, + "grad_norm": 0.1123046875, + "learning_rate": 7.331305309734514e-06, + "loss": 0.7877, + "step": 965 + }, + { + "epoch": 1.0685840707964602, + "grad_norm": 0.1337890625, + "learning_rate": 7.32853982300885e-06, + "loss": 0.7643, + "step": 966 + }, + { + "epoch": 1.0696902654867257, + "grad_norm": 0.10693359375, + "learning_rate": 7.325774336283187e-06, + "loss": 0.8051, + "step": 967 + }, + { + "epoch": 1.0707964601769913, + "grad_norm": 0.11767578125, + "learning_rate": 7.323008849557522e-06, + "loss": 0.7831, + "step": 968 + }, + { + "epoch": 1.0719026548672566, + "grad_norm": 0.1064453125, + "learning_rate": 7.320243362831859e-06, + "loss": 0.7633, + "step": 969 + }, + { + "epoch": 1.073008849557522, + "grad_norm": 0.09375, + "learning_rate": 7.317477876106195e-06, + "loss": 0.7842, + "step": 970 + }, + { + "epoch": 1.0741150442477876, + "grad_norm": 0.1220703125, + "learning_rate": 7.314712389380532e-06, + "loss": 0.7961, + "step": 971 + }, + { + "epoch": 1.075221238938053, + "grad_norm": 0.10205078125, + "learning_rate": 7.311946902654869e-06, + "loss": 0.7743, + "step": 972 + }, + { + "epoch": 1.0763274336283186, + "grad_norm": 0.10986328125, + "learning_rate": 7.309181415929204e-06, + "loss": 0.7785, + "step": 973 + }, + { + "epoch": 1.0774336283185841, + "grad_norm": 0.10595703125, + "learning_rate": 7.306415929203541e-06, + "loss": 0.7534, + "step": 974 + }, + { + "epoch": 1.0785398230088497, + "grad_norm": 0.1064453125, + "learning_rate": 7.303650442477876e-06, + "loss": 0.756, + "step": 975 + }, + { + "epoch": 1.079646017699115, + "grad_norm": 0.1103515625, + "learning_rate": 7.300884955752213e-06, + "loss": 0.7888, + "step": 976 + }, + { + "epoch": 1.0807522123893805, + "grad_norm": 0.09423828125, + "learning_rate": 7.298119469026549e-06, + "loss": 0.738, + "step": 977 + }, + { + "epoch": 1.081858407079646, + "grad_norm": 0.11474609375, + "learning_rate": 7.295353982300885e-06, + "loss": 0.8398, + "step": 978 + }, + { + "epoch": 1.0829646017699115, + "grad_norm": 0.11962890625, + "learning_rate": 7.2925884955752215e-06, + "loss": 0.8137, + "step": 979 + }, + { + "epoch": 1.084070796460177, + "grad_norm": 0.09716796875, + "learning_rate": 7.2898230088495585e-06, + "loss": 0.785, + "step": 980 + }, + { + "epoch": 1.0851769911504425, + "grad_norm": 0.0966796875, + "learning_rate": 7.287057522123895e-06, + "loss": 0.755, + "step": 981 + }, + { + "epoch": 1.086283185840708, + "grad_norm": 0.10791015625, + "learning_rate": 7.284292035398231e-06, + "loss": 0.8098, + "step": 982 + }, + { + "epoch": 1.0873893805309733, + "grad_norm": 0.10986328125, + "learning_rate": 7.281526548672567e-06, + "loss": 0.796, + "step": 983 + }, + { + "epoch": 1.0884955752212389, + "grad_norm": 0.11181640625, + "learning_rate": 7.278761061946903e-06, + "loss": 0.7861, + "step": 984 + }, + { + "epoch": 1.0896017699115044, + "grad_norm": 0.10595703125, + "learning_rate": 7.27599557522124e-06, + "loss": 0.7847, + "step": 985 + }, + { + "epoch": 1.0907079646017699, + "grad_norm": 0.09814453125, + "learning_rate": 7.273230088495575e-06, + "loss": 0.7623, + "step": 986 + }, + { + "epoch": 1.0918141592920354, + "grad_norm": 0.10498046875, + "learning_rate": 7.270464601769912e-06, + "loss": 0.7581, + "step": 987 + }, + { + "epoch": 1.092920353982301, + "grad_norm": 0.08935546875, + "learning_rate": 7.267699115044249e-06, + "loss": 0.7309, + "step": 988 + }, + { + "epoch": 1.0940265486725664, + "grad_norm": 0.10595703125, + "learning_rate": 7.2649336283185845e-06, + "loss": 0.7691, + "step": 989 + }, + { + "epoch": 1.095132743362832, + "grad_norm": 0.10693359375, + "learning_rate": 7.2621681415929214e-06, + "loss": 0.7982, + "step": 990 + }, + { + "epoch": 1.0962389380530972, + "grad_norm": 0.10986328125, + "learning_rate": 7.259402654867257e-06, + "loss": 0.7879, + "step": 991 + }, + { + "epoch": 1.0973451327433628, + "grad_norm": 0.08935546875, + "learning_rate": 7.256637168141594e-06, + "loss": 0.7759, + "step": 992 + }, + { + "epoch": 1.0984513274336283, + "grad_norm": 0.1025390625, + "learning_rate": 7.253871681415929e-06, + "loss": 0.7768, + "step": 993 + }, + { + "epoch": 1.0995575221238938, + "grad_norm": 0.10498046875, + "learning_rate": 7.251106194690266e-06, + "loss": 0.7541, + "step": 994 + }, + { + "epoch": 1.1006637168141593, + "grad_norm": 0.11572265625, + "learning_rate": 7.248340707964603e-06, + "loss": 0.7765, + "step": 995 + }, + { + "epoch": 1.1017699115044248, + "grad_norm": 0.1005859375, + "learning_rate": 7.245575221238938e-06, + "loss": 0.7628, + "step": 996 + }, + { + "epoch": 1.1028761061946903, + "grad_norm": 0.09814453125, + "learning_rate": 7.242809734513275e-06, + "loss": 0.7752, + "step": 997 + }, + { + "epoch": 1.1039823008849559, + "grad_norm": 0.154296875, + "learning_rate": 7.240044247787611e-06, + "loss": 0.7609, + "step": 998 + }, + { + "epoch": 1.1050884955752212, + "grad_norm": 0.11572265625, + "learning_rate": 7.237278761061947e-06, + "loss": 0.8013, + "step": 999 + }, + { + "epoch": 1.1061946902654867, + "grad_norm": 0.1025390625, + "learning_rate": 7.2345132743362835e-06, + "loss": 0.7972, + "step": 1000 + }, + { + "epoch": 1.1073008849557522, + "grad_norm": 0.11474609375, + "learning_rate": 7.2317477876106205e-06, + "loss": 0.8247, + "step": 1001 + }, + { + "epoch": 1.1084070796460177, + "grad_norm": 0.0966796875, + "learning_rate": 7.228982300884957e-06, + "loss": 0.7551, + "step": 1002 + }, + { + "epoch": 1.1095132743362832, + "grad_norm": 0.1103515625, + "learning_rate": 7.226216814159293e-06, + "loss": 0.8104, + "step": 1003 + }, + { + "epoch": 1.1106194690265487, + "grad_norm": 0.10888671875, + "learning_rate": 7.22345132743363e-06, + "loss": 0.7784, + "step": 1004 + }, + { + "epoch": 1.1117256637168142, + "grad_norm": 0.10595703125, + "learning_rate": 7.220685840707965e-06, + "loss": 0.8033, + "step": 1005 + }, + { + "epoch": 1.1128318584070795, + "grad_norm": 0.109375, + "learning_rate": 7.217920353982302e-06, + "loss": 0.7409, + "step": 1006 + }, + { + "epoch": 1.113938053097345, + "grad_norm": 0.099609375, + "learning_rate": 7.215154867256637e-06, + "loss": 0.7638, + "step": 1007 + }, + { + "epoch": 1.1150442477876106, + "grad_norm": 0.1015625, + "learning_rate": 7.212389380530974e-06, + "loss": 0.762, + "step": 1008 + }, + { + "epoch": 1.116150442477876, + "grad_norm": 0.09619140625, + "learning_rate": 7.2096238938053095e-06, + "loss": 0.7857, + "step": 1009 + }, + { + "epoch": 1.1172566371681416, + "grad_norm": 0.1025390625, + "learning_rate": 7.2068584070796465e-06, + "loss": 0.7387, + "step": 1010 + }, + { + "epoch": 1.1183628318584071, + "grad_norm": 0.10595703125, + "learning_rate": 7.2040929203539835e-06, + "loss": 0.7699, + "step": 1011 + }, + { + "epoch": 1.1194690265486726, + "grad_norm": 0.0986328125, + "learning_rate": 7.201327433628319e-06, + "loss": 0.7591, + "step": 1012 + }, + { + "epoch": 1.120575221238938, + "grad_norm": 0.1162109375, + "learning_rate": 7.198561946902656e-06, + "loss": 0.8649, + "step": 1013 + }, + { + "epoch": 1.1216814159292035, + "grad_norm": 0.09619140625, + "learning_rate": 7.195796460176991e-06, + "loss": 0.7655, + "step": 1014 + }, + { + "epoch": 1.122787610619469, + "grad_norm": 0.0966796875, + "learning_rate": 7.193030973451328e-06, + "loss": 0.7609, + "step": 1015 + }, + { + "epoch": 1.1238938053097345, + "grad_norm": 0.11572265625, + "learning_rate": 7.190265486725664e-06, + "loss": 0.7901, + "step": 1016 + }, + { + "epoch": 1.125, + "grad_norm": 0.1357421875, + "learning_rate": 7.1875e-06, + "loss": 0.8171, + "step": 1017 + }, + { + "epoch": 1.1261061946902655, + "grad_norm": 0.11279296875, + "learning_rate": 7.184734513274337e-06, + "loss": 0.742, + "step": 1018 + }, + { + "epoch": 1.127212389380531, + "grad_norm": 0.1025390625, + "learning_rate": 7.181969026548673e-06, + "loss": 0.7832, + "step": 1019 + }, + { + "epoch": 1.1283185840707965, + "grad_norm": 0.11083984375, + "learning_rate": 7.1792035398230094e-06, + "loss": 0.7978, + "step": 1020 + }, + { + "epoch": 1.129424778761062, + "grad_norm": 0.1162109375, + "learning_rate": 7.1764380530973456e-06, + "loss": 0.7898, + "step": 1021 + }, + { + "epoch": 1.1305309734513274, + "grad_norm": 0.10693359375, + "learning_rate": 7.1736725663716825e-06, + "loss": 0.7851, + "step": 1022 + }, + { + "epoch": 1.1316371681415929, + "grad_norm": 0.12255859375, + "learning_rate": 7.170907079646018e-06, + "loss": 0.8118, + "step": 1023 + }, + { + "epoch": 1.1327433628318584, + "grad_norm": 0.1005859375, + "learning_rate": 7.168141592920355e-06, + "loss": 0.7745, + "step": 1024 + }, + { + "epoch": 1.133849557522124, + "grad_norm": 0.146484375, + "learning_rate": 7.165376106194692e-06, + "loss": 0.7858, + "step": 1025 + }, + { + "epoch": 1.1349557522123894, + "grad_norm": 0.11572265625, + "learning_rate": 7.162610619469027e-06, + "loss": 0.7637, + "step": 1026 + }, + { + "epoch": 1.136061946902655, + "grad_norm": 0.10595703125, + "learning_rate": 7.159845132743364e-06, + "loss": 0.7615, + "step": 1027 + }, + { + "epoch": 1.1371681415929205, + "grad_norm": 0.11181640625, + "learning_rate": 7.157079646017699e-06, + "loss": 0.803, + "step": 1028 + }, + { + "epoch": 1.1382743362831858, + "grad_norm": 0.10693359375, + "learning_rate": 7.154314159292036e-06, + "loss": 0.7915, + "step": 1029 + }, + { + "epoch": 1.1393805309734513, + "grad_norm": 0.11474609375, + "learning_rate": 7.1515486725663715e-06, + "loss": 0.7648, + "step": 1030 + }, + { + "epoch": 1.1404867256637168, + "grad_norm": 0.08642578125, + "learning_rate": 7.1487831858407085e-06, + "loss": 0.7349, + "step": 1031 + }, + { + "epoch": 1.1415929203539823, + "grad_norm": 0.1064453125, + "learning_rate": 7.146017699115045e-06, + "loss": 0.7658, + "step": 1032 + }, + { + "epoch": 1.1426991150442478, + "grad_norm": 0.1357421875, + "learning_rate": 7.143252212389381e-06, + "loss": 0.8175, + "step": 1033 + }, + { + "epoch": 1.1438053097345133, + "grad_norm": 0.09423828125, + "learning_rate": 7.140486725663718e-06, + "loss": 0.7667, + "step": 1034 + }, + { + "epoch": 1.1449115044247788, + "grad_norm": 0.0947265625, + "learning_rate": 7.137721238938054e-06, + "loss": 0.7798, + "step": 1035 + }, + { + "epoch": 1.1460176991150441, + "grad_norm": 0.103515625, + "learning_rate": 7.13495575221239e-06, + "loss": 0.7808, + "step": 1036 + }, + { + "epoch": 1.1471238938053097, + "grad_norm": 0.10009765625, + "learning_rate": 7.132190265486726e-06, + "loss": 0.7687, + "step": 1037 + }, + { + "epoch": 1.1482300884955752, + "grad_norm": 0.1064453125, + "learning_rate": 7.129424778761062e-06, + "loss": 0.7826, + "step": 1038 + }, + { + "epoch": 1.1493362831858407, + "grad_norm": 0.1044921875, + "learning_rate": 7.126659292035398e-06, + "loss": 0.7974, + "step": 1039 + }, + { + "epoch": 1.1504424778761062, + "grad_norm": 0.1015625, + "learning_rate": 7.123893805309735e-06, + "loss": 0.7749, + "step": 1040 + }, + { + "epoch": 1.1515486725663717, + "grad_norm": 0.111328125, + "learning_rate": 7.1211283185840715e-06, + "loss": 0.842, + "step": 1041 + }, + { + "epoch": 1.1526548672566372, + "grad_norm": 0.10595703125, + "learning_rate": 7.118362831858408e-06, + "loss": 0.7612, + "step": 1042 + }, + { + "epoch": 1.1537610619469025, + "grad_norm": 0.09765625, + "learning_rate": 7.1155973451327446e-06, + "loss": 0.7619, + "step": 1043 + }, + { + "epoch": 1.154867256637168, + "grad_norm": 0.11669921875, + "learning_rate": 7.11283185840708e-06, + "loss": 0.8538, + "step": 1044 + }, + { + "epoch": 1.1559734513274336, + "grad_norm": 0.1328125, + "learning_rate": 7.110066371681417e-06, + "loss": 0.8343, + "step": 1045 + }, + { + "epoch": 1.157079646017699, + "grad_norm": 0.1220703125, + "learning_rate": 7.107300884955752e-06, + "loss": 0.8183, + "step": 1046 + }, + { + "epoch": 1.1581858407079646, + "grad_norm": 0.1044921875, + "learning_rate": 7.104535398230089e-06, + "loss": 0.78, + "step": 1047 + }, + { + "epoch": 1.1592920353982301, + "grad_norm": 0.11376953125, + "learning_rate": 7.101769911504426e-06, + "loss": 0.801, + "step": 1048 + }, + { + "epoch": 1.1603982300884956, + "grad_norm": 0.11572265625, + "learning_rate": 7.099004424778761e-06, + "loss": 0.8024, + "step": 1049 + }, + { + "epoch": 1.1615044247787611, + "grad_norm": 0.12109375, + "learning_rate": 7.096238938053098e-06, + "loss": 0.8094, + "step": 1050 + }, + { + "epoch": 1.1626106194690267, + "grad_norm": 0.10791015625, + "learning_rate": 7.093473451327434e-06, + "loss": 0.8084, + "step": 1051 + }, + { + "epoch": 1.163716814159292, + "grad_norm": 0.1123046875, + "learning_rate": 7.0907079646017706e-06, + "loss": 0.7784, + "step": 1052 + }, + { + "epoch": 1.1648230088495575, + "grad_norm": 0.1083984375, + "learning_rate": 7.087942477876107e-06, + "loss": 0.764, + "step": 1053 + }, + { + "epoch": 1.165929203539823, + "grad_norm": 0.10888671875, + "learning_rate": 7.085176991150443e-06, + "loss": 0.7925, + "step": 1054 + }, + { + "epoch": 1.1670353982300885, + "grad_norm": 0.1435546875, + "learning_rate": 7.08241150442478e-06, + "loss": 0.8845, + "step": 1055 + }, + { + "epoch": 1.168141592920354, + "grad_norm": 0.1279296875, + "learning_rate": 7.079646017699116e-06, + "loss": 0.8728, + "step": 1056 + }, + { + "epoch": 1.1692477876106195, + "grad_norm": 0.10986328125, + "learning_rate": 7.076880530973452e-06, + "loss": 0.8038, + "step": 1057 + }, + { + "epoch": 1.170353982300885, + "grad_norm": 0.11083984375, + "learning_rate": 7.074115044247788e-06, + "loss": 0.806, + "step": 1058 + }, + { + "epoch": 1.1714601769911503, + "grad_norm": 0.09326171875, + "learning_rate": 7.071349557522124e-06, + "loss": 0.7484, + "step": 1059 + }, + { + "epoch": 1.1725663716814159, + "grad_norm": 0.09423828125, + "learning_rate": 7.06858407079646e-06, + "loss": 0.7393, + "step": 1060 + }, + { + "epoch": 1.1736725663716814, + "grad_norm": 0.09228515625, + "learning_rate": 7.065818584070797e-06, + "loss": 0.7423, + "step": 1061 + }, + { + "epoch": 1.174778761061947, + "grad_norm": 0.10546875, + "learning_rate": 7.063053097345133e-06, + "loss": 0.786, + "step": 1062 + }, + { + "epoch": 1.1758849557522124, + "grad_norm": 0.09228515625, + "learning_rate": 7.06028761061947e-06, + "loss": 0.7567, + "step": 1063 + }, + { + "epoch": 1.176991150442478, + "grad_norm": 0.1044921875, + "learning_rate": 7.057522123893807e-06, + "loss": 0.7849, + "step": 1064 + }, + { + "epoch": 1.1780973451327434, + "grad_norm": 0.12109375, + "learning_rate": 7.054756637168142e-06, + "loss": 0.7922, + "step": 1065 + }, + { + "epoch": 1.1792035398230087, + "grad_norm": 0.103515625, + "learning_rate": 7.051991150442479e-06, + "loss": 0.7846, + "step": 1066 + }, + { + "epoch": 1.1803097345132743, + "grad_norm": 0.1015625, + "learning_rate": 7.049225663716814e-06, + "loss": 0.7832, + "step": 1067 + }, + { + "epoch": 1.1814159292035398, + "grad_norm": 0.115234375, + "learning_rate": 7.046460176991151e-06, + "loss": 0.765, + "step": 1068 + }, + { + "epoch": 1.1825221238938053, + "grad_norm": 0.09765625, + "learning_rate": 7.043694690265486e-06, + "loss": 0.7346, + "step": 1069 + }, + { + "epoch": 1.1836283185840708, + "grad_norm": 0.10009765625, + "learning_rate": 7.040929203539823e-06, + "loss": 0.7891, + "step": 1070 + }, + { + "epoch": 1.1847345132743363, + "grad_norm": 0.11572265625, + "learning_rate": 7.03816371681416e-06, + "loss": 0.8092, + "step": 1071 + }, + { + "epoch": 1.1858407079646018, + "grad_norm": 0.0986328125, + "learning_rate": 7.035398230088496e-06, + "loss": 0.763, + "step": 1072 + }, + { + "epoch": 1.1869469026548674, + "grad_norm": 0.1025390625, + "learning_rate": 7.032632743362833e-06, + "loss": 0.7644, + "step": 1073 + }, + { + "epoch": 1.1880530973451326, + "grad_norm": 0.1103515625, + "learning_rate": 7.029867256637169e-06, + "loss": 0.7774, + "step": 1074 + }, + { + "epoch": 1.1891592920353982, + "grad_norm": 0.10693359375, + "learning_rate": 7.027101769911505e-06, + "loss": 0.7798, + "step": 1075 + }, + { + "epoch": 1.1902654867256637, + "grad_norm": 0.103515625, + "learning_rate": 7.024336283185841e-06, + "loss": 0.8048, + "step": 1076 + }, + { + "epoch": 1.1913716814159292, + "grad_norm": 0.1015625, + "learning_rate": 7.021570796460178e-06, + "loss": 0.7817, + "step": 1077 + }, + { + "epoch": 1.1924778761061947, + "grad_norm": 0.109375, + "learning_rate": 7.018805309734514e-06, + "loss": 0.7504, + "step": 1078 + }, + { + "epoch": 1.1935840707964602, + "grad_norm": 0.08984375, + "learning_rate": 7.01603982300885e-06, + "loss": 0.7687, + "step": 1079 + }, + { + "epoch": 1.1946902654867257, + "grad_norm": 0.130859375, + "learning_rate": 7.013274336283187e-06, + "loss": 0.8765, + "step": 1080 + }, + { + "epoch": 1.1957964601769913, + "grad_norm": 0.10302734375, + "learning_rate": 7.0105088495575224e-06, + "loss": 0.7717, + "step": 1081 + }, + { + "epoch": 1.1969026548672566, + "grad_norm": 0.111328125, + "learning_rate": 7.007743362831859e-06, + "loss": 0.7628, + "step": 1082 + }, + { + "epoch": 1.198008849557522, + "grad_norm": 0.107421875, + "learning_rate": 7.004977876106195e-06, + "loss": 0.8009, + "step": 1083 + }, + { + "epoch": 1.1991150442477876, + "grad_norm": 0.111328125, + "learning_rate": 7.002212389380532e-06, + "loss": 0.7531, + "step": 1084 + }, + { + "epoch": 1.200221238938053, + "grad_norm": 0.10107421875, + "learning_rate": 6.999446902654869e-06, + "loss": 0.7927, + "step": 1085 + }, + { + "epoch": 1.2013274336283186, + "grad_norm": 0.1015625, + "learning_rate": 6.996681415929204e-06, + "loss": 0.7699, + "step": 1086 + }, + { + "epoch": 1.2024336283185841, + "grad_norm": 0.10888671875, + "learning_rate": 6.993915929203541e-06, + "loss": 0.8373, + "step": 1087 + }, + { + "epoch": 1.2035398230088497, + "grad_norm": 0.1318359375, + "learning_rate": 6.991150442477876e-06, + "loss": 0.8341, + "step": 1088 + }, + { + "epoch": 1.204646017699115, + "grad_norm": 0.10498046875, + "learning_rate": 6.988384955752213e-06, + "loss": 0.7701, + "step": 1089 + }, + { + "epoch": 1.2057522123893805, + "grad_norm": 0.0966796875, + "learning_rate": 6.985619469026549e-06, + "loss": 0.7496, + "step": 1090 + }, + { + "epoch": 1.206858407079646, + "grad_norm": 0.10693359375, + "learning_rate": 6.982853982300885e-06, + "loss": 0.7633, + "step": 1091 + }, + { + "epoch": 1.2079646017699115, + "grad_norm": 0.095703125, + "learning_rate": 6.9800884955752215e-06, + "loss": 0.7704, + "step": 1092 + }, + { + "epoch": 1.209070796460177, + "grad_norm": 0.1357421875, + "learning_rate": 6.977323008849558e-06, + "loss": 0.7652, + "step": 1093 + }, + { + "epoch": 1.2101769911504425, + "grad_norm": 0.10205078125, + "learning_rate": 6.974557522123895e-06, + "loss": 0.7599, + "step": 1094 + }, + { + "epoch": 1.211283185840708, + "grad_norm": 0.125, + "learning_rate": 6.971792035398231e-06, + "loss": 0.7608, + "step": 1095 + }, + { + "epoch": 1.2123893805309733, + "grad_norm": 0.1103515625, + "learning_rate": 6.969026548672567e-06, + "loss": 0.8105, + "step": 1096 + }, + { + "epoch": 1.2134955752212389, + "grad_norm": 0.11279296875, + "learning_rate": 6.966261061946903e-06, + "loss": 0.78, + "step": 1097 + }, + { + "epoch": 1.2146017699115044, + "grad_norm": 0.09716796875, + "learning_rate": 6.96349557522124e-06, + "loss": 0.7612, + "step": 1098 + }, + { + "epoch": 1.2157079646017699, + "grad_norm": 0.10302734375, + "learning_rate": 6.960730088495575e-06, + "loss": 0.8058, + "step": 1099 + }, + { + "epoch": 1.2168141592920354, + "grad_norm": 0.1015625, + "learning_rate": 6.957964601769912e-06, + "loss": 0.7551, + "step": 1100 + }, + { + "epoch": 1.217920353982301, + "grad_norm": 0.109375, + "learning_rate": 6.955199115044249e-06, + "loss": 0.757, + "step": 1101 + }, + { + "epoch": 1.2190265486725664, + "grad_norm": 0.10986328125, + "learning_rate": 6.9524336283185845e-06, + "loss": 0.788, + "step": 1102 + }, + { + "epoch": 1.220132743362832, + "grad_norm": 0.10400390625, + "learning_rate": 6.9496681415929215e-06, + "loss": 0.7949, + "step": 1103 + }, + { + "epoch": 1.2212389380530975, + "grad_norm": 0.1474609375, + "learning_rate": 6.946902654867257e-06, + "loss": 0.8009, + "step": 1104 + }, + { + "epoch": 1.2223451327433628, + "grad_norm": 0.123046875, + "learning_rate": 6.944137168141594e-06, + "loss": 0.8713, + "step": 1105 + }, + { + "epoch": 1.2234513274336283, + "grad_norm": 0.10498046875, + "learning_rate": 6.941371681415929e-06, + "loss": 0.7359, + "step": 1106 + }, + { + "epoch": 1.2245575221238938, + "grad_norm": 0.10400390625, + "learning_rate": 6.938606194690266e-06, + "loss": 0.8229, + "step": 1107 + }, + { + "epoch": 1.2256637168141593, + "grad_norm": 0.10693359375, + "learning_rate": 6.935840707964603e-06, + "loss": 0.7369, + "step": 1108 + }, + { + "epoch": 1.2267699115044248, + "grad_norm": 0.10546875, + "learning_rate": 6.933075221238938e-06, + "loss": 0.793, + "step": 1109 + }, + { + "epoch": 1.2278761061946903, + "grad_norm": 0.1005859375, + "learning_rate": 6.930309734513275e-06, + "loss": 0.7647, + "step": 1110 + }, + { + "epoch": 1.2289823008849559, + "grad_norm": 0.10791015625, + "learning_rate": 6.927544247787611e-06, + "loss": 0.7669, + "step": 1111 + }, + { + "epoch": 1.2300884955752212, + "grad_norm": 0.1181640625, + "learning_rate": 6.9247787610619474e-06, + "loss": 0.7999, + "step": 1112 + }, + { + "epoch": 1.2311946902654867, + "grad_norm": 0.11572265625, + "learning_rate": 6.9220132743362836e-06, + "loss": 0.7727, + "step": 1113 + }, + { + "epoch": 1.2323008849557522, + "grad_norm": 0.09912109375, + "learning_rate": 6.91924778761062e-06, + "loss": 0.7928, + "step": 1114 + }, + { + "epoch": 1.2334070796460177, + "grad_norm": 0.1015625, + "learning_rate": 6.916482300884957e-06, + "loss": 0.7367, + "step": 1115 + }, + { + "epoch": 1.2345132743362832, + "grad_norm": 0.11083984375, + "learning_rate": 6.913716814159293e-06, + "loss": 0.8032, + "step": 1116 + }, + { + "epoch": 1.2356194690265487, + "grad_norm": 0.09814453125, + "learning_rate": 6.910951327433629e-06, + "loss": 0.7747, + "step": 1117 + }, + { + "epoch": 1.2367256637168142, + "grad_norm": 0.09716796875, + "learning_rate": 6.908185840707965e-06, + "loss": 0.7694, + "step": 1118 + }, + { + "epoch": 1.2378318584070795, + "grad_norm": 0.25390625, + "learning_rate": 6.905420353982302e-06, + "loss": 0.7554, + "step": 1119 + }, + { + "epoch": 1.238938053097345, + "grad_norm": 0.1025390625, + "learning_rate": 6.902654867256637e-06, + "loss": 0.7731, + "step": 1120 + }, + { + "epoch": 1.2400442477876106, + "grad_norm": 0.0947265625, + "learning_rate": 6.899889380530974e-06, + "loss": 0.7791, + "step": 1121 + }, + { + "epoch": 1.241150442477876, + "grad_norm": 0.10009765625, + "learning_rate": 6.8971238938053095e-06, + "loss": 0.7727, + "step": 1122 + }, + { + "epoch": 1.2422566371681416, + "grad_norm": 0.12451171875, + "learning_rate": 6.8943584070796465e-06, + "loss": 0.7843, + "step": 1123 + }, + { + "epoch": 1.2433628318584071, + "grad_norm": 0.0927734375, + "learning_rate": 6.8915929203539835e-06, + "loss": 0.7487, + "step": 1124 + }, + { + "epoch": 1.2444690265486726, + "grad_norm": 0.115234375, + "learning_rate": 6.888827433628319e-06, + "loss": 0.8183, + "step": 1125 + }, + { + "epoch": 1.245575221238938, + "grad_norm": 0.11572265625, + "learning_rate": 6.886061946902656e-06, + "loss": 0.7943, + "step": 1126 + }, + { + "epoch": 1.2466814159292035, + "grad_norm": 0.12060546875, + "learning_rate": 6.883296460176991e-06, + "loss": 0.7965, + "step": 1127 + }, + { + "epoch": 1.247787610619469, + "grad_norm": 0.10546875, + "learning_rate": 6.880530973451328e-06, + "loss": 0.7322, + "step": 1128 + }, + { + "epoch": 1.2488938053097345, + "grad_norm": 0.11669921875, + "learning_rate": 6.877765486725664e-06, + "loss": 0.8306, + "step": 1129 + }, + { + "epoch": 1.25, + "grad_norm": 0.1083984375, + "learning_rate": 6.875e-06, + "loss": 0.7835, + "step": 1130 + }, + { + "epoch": 1.2511061946902655, + "grad_norm": 0.10205078125, + "learning_rate": 6.872234513274337e-06, + "loss": 0.7938, + "step": 1131 + }, + { + "epoch": 1.252212389380531, + "grad_norm": 0.1064453125, + "learning_rate": 6.869469026548673e-06, + "loss": 0.7784, + "step": 1132 + }, + { + "epoch": 1.2533185840707963, + "grad_norm": 0.10205078125, + "learning_rate": 6.8667035398230095e-06, + "loss": 0.7628, + "step": 1133 + }, + { + "epoch": 1.254424778761062, + "grad_norm": 0.1123046875, + "learning_rate": 6.863938053097346e-06, + "loss": 0.7955, + "step": 1134 + }, + { + "epoch": 1.2555309734513274, + "grad_norm": 0.11669921875, + "learning_rate": 6.8611725663716826e-06, + "loss": 0.7898, + "step": 1135 + }, + { + "epoch": 1.2566371681415929, + "grad_norm": 0.11572265625, + "learning_rate": 6.858407079646018e-06, + "loss": 0.8178, + "step": 1136 + }, + { + "epoch": 1.2577433628318584, + "grad_norm": 0.1044921875, + "learning_rate": 6.855641592920355e-06, + "loss": 0.785, + "step": 1137 + }, + { + "epoch": 1.258849557522124, + "grad_norm": 0.1044921875, + "learning_rate": 6.852876106194691e-06, + "loss": 0.7692, + "step": 1138 + }, + { + "epoch": 1.2599557522123894, + "grad_norm": 0.1123046875, + "learning_rate": 6.850110619469027e-06, + "loss": 0.7774, + "step": 1139 + }, + { + "epoch": 1.261061946902655, + "grad_norm": 0.10205078125, + "learning_rate": 6.847345132743364e-06, + "loss": 0.7896, + "step": 1140 + }, + { + "epoch": 1.2621681415929205, + "grad_norm": 0.11376953125, + "learning_rate": 6.844579646017699e-06, + "loss": 0.8065, + "step": 1141 + }, + { + "epoch": 1.2632743362831858, + "grad_norm": 0.1015625, + "learning_rate": 6.841814159292036e-06, + "loss": 0.7661, + "step": 1142 + }, + { + "epoch": 1.2643805309734513, + "grad_norm": 0.11669921875, + "learning_rate": 6.839048672566372e-06, + "loss": 0.7448, + "step": 1143 + }, + { + "epoch": 1.2654867256637168, + "grad_norm": 0.0986328125, + "learning_rate": 6.8362831858407086e-06, + "loss": 0.7713, + "step": 1144 + }, + { + "epoch": 1.2665929203539823, + "grad_norm": 0.10107421875, + "learning_rate": 6.833517699115045e-06, + "loss": 0.7594, + "step": 1145 + }, + { + "epoch": 1.2676991150442478, + "grad_norm": 0.099609375, + "learning_rate": 6.830752212389381e-06, + "loss": 0.7739, + "step": 1146 + }, + { + "epoch": 1.2688053097345133, + "grad_norm": 0.11083984375, + "learning_rate": 6.827986725663718e-06, + "loss": 0.7515, + "step": 1147 + }, + { + "epoch": 1.2699115044247788, + "grad_norm": 0.123046875, + "learning_rate": 6.825221238938053e-06, + "loss": 0.7894, + "step": 1148 + }, + { + "epoch": 1.2710176991150441, + "grad_norm": 0.11181640625, + "learning_rate": 6.82245575221239e-06, + "loss": 0.7775, + "step": 1149 + }, + { + "epoch": 1.2721238938053097, + "grad_norm": 0.10693359375, + "learning_rate": 6.819690265486726e-06, + "loss": 0.7895, + "step": 1150 + }, + { + "epoch": 1.2732300884955752, + "grad_norm": 0.12255859375, + "learning_rate": 6.816924778761062e-06, + "loss": 0.836, + "step": 1151 + }, + { + "epoch": 1.2743362831858407, + "grad_norm": 0.10498046875, + "learning_rate": 6.814159292035398e-06, + "loss": 0.8022, + "step": 1152 + }, + { + "epoch": 1.2754424778761062, + "grad_norm": 0.10302734375, + "learning_rate": 6.811393805309735e-06, + "loss": 0.7869, + "step": 1153 + }, + { + "epoch": 1.2765486725663717, + "grad_norm": 0.103515625, + "learning_rate": 6.8086283185840715e-06, + "loss": 0.7703, + "step": 1154 + }, + { + "epoch": 1.2776548672566372, + "grad_norm": 0.11474609375, + "learning_rate": 6.805862831858408e-06, + "loss": 0.7715, + "step": 1155 + }, + { + "epoch": 1.2787610619469025, + "grad_norm": 0.10498046875, + "learning_rate": 6.803097345132745e-06, + "loss": 0.7691, + "step": 1156 + }, + { + "epoch": 1.2798672566371683, + "grad_norm": 0.1064453125, + "learning_rate": 6.80033185840708e-06, + "loss": 0.7627, + "step": 1157 + }, + { + "epoch": 1.2809734513274336, + "grad_norm": 0.158203125, + "learning_rate": 6.797566371681417e-06, + "loss": 0.7931, + "step": 1158 + }, + { + "epoch": 1.282079646017699, + "grad_norm": 0.10693359375, + "learning_rate": 6.794800884955752e-06, + "loss": 0.7756, + "step": 1159 + }, + { + "epoch": 1.2831858407079646, + "grad_norm": 0.10546875, + "learning_rate": 6.792035398230089e-06, + "loss": 0.7569, + "step": 1160 + }, + { + "epoch": 1.2842920353982301, + "grad_norm": 0.11181640625, + "learning_rate": 6.789269911504426e-06, + "loss": 0.7585, + "step": 1161 + }, + { + "epoch": 1.2853982300884956, + "grad_norm": 0.1435546875, + "learning_rate": 6.786504424778761e-06, + "loss": 0.8527, + "step": 1162 + }, + { + "epoch": 1.286504424778761, + "grad_norm": 0.109375, + "learning_rate": 6.783738938053098e-06, + "loss": 0.7866, + "step": 1163 + }, + { + "epoch": 1.2876106194690267, + "grad_norm": 0.103515625, + "learning_rate": 6.780973451327434e-06, + "loss": 0.7519, + "step": 1164 + }, + { + "epoch": 1.288716814159292, + "grad_norm": 0.10009765625, + "learning_rate": 6.778207964601771e-06, + "loss": 0.7773, + "step": 1165 + }, + { + "epoch": 1.2898230088495575, + "grad_norm": 0.12890625, + "learning_rate": 6.775442477876107e-06, + "loss": 0.8457, + "step": 1166 + }, + { + "epoch": 1.290929203539823, + "grad_norm": 0.1162109375, + "learning_rate": 6.772676991150443e-06, + "loss": 0.8189, + "step": 1167 + }, + { + "epoch": 1.2920353982300885, + "grad_norm": 0.10205078125, + "learning_rate": 6.76991150442478e-06, + "loss": 0.7468, + "step": 1168 + }, + { + "epoch": 1.293141592920354, + "grad_norm": 0.1025390625, + "learning_rate": 6.767146017699115e-06, + "loss": 0.7546, + "step": 1169 + }, + { + "epoch": 1.2942477876106195, + "grad_norm": 0.0966796875, + "learning_rate": 6.764380530973452e-06, + "loss": 0.7815, + "step": 1170 + }, + { + "epoch": 1.295353982300885, + "grad_norm": 0.1484375, + "learning_rate": 6.761615044247788e-06, + "loss": 0.8779, + "step": 1171 + }, + { + "epoch": 1.2964601769911503, + "grad_norm": 0.10546875, + "learning_rate": 6.758849557522124e-06, + "loss": 0.72, + "step": 1172 + }, + { + "epoch": 1.2975663716814159, + "grad_norm": 0.09375, + "learning_rate": 6.7560840707964604e-06, + "loss": 0.7391, + "step": 1173 + }, + { + "epoch": 1.2986725663716814, + "grad_norm": 0.10302734375, + "learning_rate": 6.753318584070797e-06, + "loss": 0.7184, + "step": 1174 + }, + { + "epoch": 1.299778761061947, + "grad_norm": 0.1171875, + "learning_rate": 6.750553097345133e-06, + "loss": 0.7913, + "step": 1175 + }, + { + "epoch": 1.3008849557522124, + "grad_norm": 0.0986328125, + "learning_rate": 6.74778761061947e-06, + "loss": 0.7679, + "step": 1176 + }, + { + "epoch": 1.301991150442478, + "grad_norm": 0.107421875, + "learning_rate": 6.745022123893807e-06, + "loss": 0.792, + "step": 1177 + }, + { + "epoch": 1.3030973451327434, + "grad_norm": 0.1142578125, + "learning_rate": 6.742256637168142e-06, + "loss": 0.7582, + "step": 1178 + }, + { + "epoch": 1.3042035398230087, + "grad_norm": 0.10546875, + "learning_rate": 6.739491150442479e-06, + "loss": 0.7699, + "step": 1179 + }, + { + "epoch": 1.3053097345132743, + "grad_norm": 0.11083984375, + "learning_rate": 6.736725663716814e-06, + "loss": 0.7616, + "step": 1180 + }, + { + "epoch": 1.3064159292035398, + "grad_norm": 0.1162109375, + "learning_rate": 6.733960176991151e-06, + "loss": 0.8144, + "step": 1181 + }, + { + "epoch": 1.3075221238938053, + "grad_norm": 0.111328125, + "learning_rate": 6.731194690265486e-06, + "loss": 0.7795, + "step": 1182 + }, + { + "epoch": 1.3086283185840708, + "grad_norm": 0.10791015625, + "learning_rate": 6.728429203539823e-06, + "loss": 0.812, + "step": 1183 + }, + { + "epoch": 1.3097345132743363, + "grad_norm": 0.11376953125, + "learning_rate": 6.72566371681416e-06, + "loss": 0.7692, + "step": 1184 + }, + { + "epoch": 1.3108407079646018, + "grad_norm": 0.103515625, + "learning_rate": 6.722898230088496e-06, + "loss": 0.7729, + "step": 1185 + }, + { + "epoch": 1.3119469026548671, + "grad_norm": 0.1044921875, + "learning_rate": 6.720132743362833e-06, + "loss": 0.7648, + "step": 1186 + }, + { + "epoch": 1.3130530973451329, + "grad_norm": 0.111328125, + "learning_rate": 6.717367256637169e-06, + "loss": 0.7385, + "step": 1187 + }, + { + "epoch": 1.3141592920353982, + "grad_norm": 0.10595703125, + "learning_rate": 6.714601769911505e-06, + "loss": 0.7913, + "step": 1188 + }, + { + "epoch": 1.3152654867256637, + "grad_norm": 0.1103515625, + "learning_rate": 6.711836283185841e-06, + "loss": 0.7711, + "step": 1189 + }, + { + "epoch": 1.3163716814159292, + "grad_norm": 0.1064453125, + "learning_rate": 6.709070796460178e-06, + "loss": 0.7872, + "step": 1190 + }, + { + "epoch": 1.3174778761061947, + "grad_norm": 0.1103515625, + "learning_rate": 6.706305309734514e-06, + "loss": 0.8227, + "step": 1191 + }, + { + "epoch": 1.3185840707964602, + "grad_norm": 0.1005859375, + "learning_rate": 6.70353982300885e-06, + "loss": 0.7868, + "step": 1192 + }, + { + "epoch": 1.3196902654867257, + "grad_norm": 0.099609375, + "learning_rate": 6.700774336283186e-06, + "loss": 0.7612, + "step": 1193 + }, + { + "epoch": 1.3207964601769913, + "grad_norm": 0.1005859375, + "learning_rate": 6.6980088495575225e-06, + "loss": 0.7857, + "step": 1194 + }, + { + "epoch": 1.3219026548672566, + "grad_norm": 0.1103515625, + "learning_rate": 6.6952433628318594e-06, + "loss": 0.7508, + "step": 1195 + }, + { + "epoch": 1.323008849557522, + "grad_norm": 0.10546875, + "learning_rate": 6.692477876106195e-06, + "loss": 0.7526, + "step": 1196 + }, + { + "epoch": 1.3241150442477876, + "grad_norm": 0.103515625, + "learning_rate": 6.689712389380532e-06, + "loss": 0.8166, + "step": 1197 + }, + { + "epoch": 1.325221238938053, + "grad_norm": 0.10986328125, + "learning_rate": 6.686946902654869e-06, + "loss": 0.7383, + "step": 1198 + }, + { + "epoch": 1.3263274336283186, + "grad_norm": 0.09716796875, + "learning_rate": 6.684181415929204e-06, + "loss": 0.741, + "step": 1199 + }, + { + "epoch": 1.3274336283185841, + "grad_norm": 0.10595703125, + "learning_rate": 6.681415929203541e-06, + "loss": 0.7676, + "step": 1200 + }, + { + "epoch": 1.3285398230088497, + "grad_norm": 0.10693359375, + "learning_rate": 6.678650442477876e-06, + "loss": 0.7933, + "step": 1201 + }, + { + "epoch": 1.329646017699115, + "grad_norm": 0.103515625, + "learning_rate": 6.675884955752213e-06, + "loss": 0.7664, + "step": 1202 + }, + { + "epoch": 1.3307522123893805, + "grad_norm": 0.09375, + "learning_rate": 6.6731194690265485e-06, + "loss": 0.7462, + "step": 1203 + }, + { + "epoch": 1.331858407079646, + "grad_norm": 0.10009765625, + "learning_rate": 6.6703539823008854e-06, + "loss": 0.7726, + "step": 1204 + }, + { + "epoch": 1.3329646017699115, + "grad_norm": 0.10302734375, + "learning_rate": 6.6675884955752216e-06, + "loss": 0.7598, + "step": 1205 + }, + { + "epoch": 1.334070796460177, + "grad_norm": 0.10791015625, + "learning_rate": 6.664823008849558e-06, + "loss": 0.7645, + "step": 1206 + }, + { + "epoch": 1.3351769911504425, + "grad_norm": 0.099609375, + "learning_rate": 6.662057522123895e-06, + "loss": 0.7764, + "step": 1207 + }, + { + "epoch": 1.336283185840708, + "grad_norm": 0.1103515625, + "learning_rate": 6.659292035398231e-06, + "loss": 0.7874, + "step": 1208 + }, + { + "epoch": 1.3373893805309733, + "grad_norm": 0.1123046875, + "learning_rate": 6.656526548672567e-06, + "loss": 0.7558, + "step": 1209 + }, + { + "epoch": 1.338495575221239, + "grad_norm": 0.1298828125, + "learning_rate": 6.653761061946903e-06, + "loss": 0.7827, + "step": 1210 + }, + { + "epoch": 1.3396017699115044, + "grad_norm": 0.10400390625, + "learning_rate": 6.65099557522124e-06, + "loss": 0.7479, + "step": 1211 + }, + { + "epoch": 1.3407079646017699, + "grad_norm": 0.11181640625, + "learning_rate": 6.648230088495575e-06, + "loss": 0.832, + "step": 1212 + }, + { + "epoch": 1.3418141592920354, + "grad_norm": 0.0986328125, + "learning_rate": 6.645464601769912e-06, + "loss": 0.7491, + "step": 1213 + }, + { + "epoch": 1.342920353982301, + "grad_norm": 0.1279296875, + "learning_rate": 6.642699115044248e-06, + "loss": 0.7928, + "step": 1214 + }, + { + "epoch": 1.3440265486725664, + "grad_norm": 0.109375, + "learning_rate": 6.6399336283185845e-06, + "loss": 0.7811, + "step": 1215 + }, + { + "epoch": 1.3451327433628317, + "grad_norm": 0.11328125, + "learning_rate": 6.6371681415929215e-06, + "loss": 0.7844, + "step": 1216 + }, + { + "epoch": 1.3462389380530975, + "grad_norm": 0.10986328125, + "learning_rate": 6.634402654867257e-06, + "loss": 0.7757, + "step": 1217 + }, + { + "epoch": 1.3473451327433628, + "grad_norm": 0.0986328125, + "learning_rate": 6.631637168141594e-06, + "loss": 0.7721, + "step": 1218 + }, + { + "epoch": 1.3484513274336283, + "grad_norm": 0.1357421875, + "learning_rate": 6.628871681415929e-06, + "loss": 0.8306, + "step": 1219 + }, + { + "epoch": 1.3495575221238938, + "grad_norm": 0.107421875, + "learning_rate": 6.626106194690266e-06, + "loss": 0.7593, + "step": 1220 + }, + { + "epoch": 1.3506637168141593, + "grad_norm": 0.1103515625, + "learning_rate": 6.623340707964603e-06, + "loss": 0.7918, + "step": 1221 + }, + { + "epoch": 1.3517699115044248, + "grad_norm": 0.1279296875, + "learning_rate": 6.620575221238938e-06, + "loss": 0.8604, + "step": 1222 + }, + { + "epoch": 1.3528761061946903, + "grad_norm": 0.1123046875, + "learning_rate": 6.617809734513275e-06, + "loss": 0.7875, + "step": 1223 + }, + { + "epoch": 1.3539823008849559, + "grad_norm": 0.095703125, + "learning_rate": 6.6150442477876105e-06, + "loss": 0.7454, + "step": 1224 + }, + { + "epoch": 1.3550884955752212, + "grad_norm": 0.1279296875, + "learning_rate": 6.6122787610619475e-06, + "loss": 0.8187, + "step": 1225 + }, + { + "epoch": 1.3561946902654867, + "grad_norm": 0.12109375, + "learning_rate": 6.609513274336284e-06, + "loss": 0.8003, + "step": 1226 + }, + { + "epoch": 1.3573008849557522, + "grad_norm": 0.12353515625, + "learning_rate": 6.60674778761062e-06, + "loss": 0.8318, + "step": 1227 + }, + { + "epoch": 1.3584070796460177, + "grad_norm": 0.09814453125, + "learning_rate": 6.603982300884957e-06, + "loss": 0.7791, + "step": 1228 + }, + { + "epoch": 1.3595132743362832, + "grad_norm": 0.10205078125, + "learning_rate": 6.601216814159293e-06, + "loss": 0.7794, + "step": 1229 + }, + { + "epoch": 1.3606194690265487, + "grad_norm": 0.12451171875, + "learning_rate": 6.598451327433629e-06, + "loss": 0.7748, + "step": 1230 + }, + { + "epoch": 1.3617256637168142, + "grad_norm": 0.1083984375, + "learning_rate": 6.595685840707965e-06, + "loss": 0.7927, + "step": 1231 + }, + { + "epoch": 1.3628318584070795, + "grad_norm": 0.1513671875, + "learning_rate": 6.592920353982302e-06, + "loss": 0.7595, + "step": 1232 + }, + { + "epoch": 1.363938053097345, + "grad_norm": 0.10888671875, + "learning_rate": 6.590154867256637e-06, + "loss": 0.7952, + "step": 1233 + }, + { + "epoch": 1.3650442477876106, + "grad_norm": 0.107421875, + "learning_rate": 6.587389380530974e-06, + "loss": 0.7888, + "step": 1234 + }, + { + "epoch": 1.366150442477876, + "grad_norm": 0.1025390625, + "learning_rate": 6.5846238938053096e-06, + "loss": 0.7755, + "step": 1235 + }, + { + "epoch": 1.3672566371681416, + "grad_norm": 0.1083984375, + "learning_rate": 6.5818584070796465e-06, + "loss": 0.7885, + "step": 1236 + }, + { + "epoch": 1.3683628318584071, + "grad_norm": 0.10302734375, + "learning_rate": 6.5790929203539835e-06, + "loss": 0.7607, + "step": 1237 + }, + { + "epoch": 1.3694690265486726, + "grad_norm": 0.10302734375, + "learning_rate": 6.576327433628319e-06, + "loss": 0.771, + "step": 1238 + }, + { + "epoch": 1.370575221238938, + "grad_norm": 0.10009765625, + "learning_rate": 6.573561946902656e-06, + "loss": 0.7422, + "step": 1239 + }, + { + "epoch": 1.3716814159292037, + "grad_norm": 0.11376953125, + "learning_rate": 6.570796460176991e-06, + "loss": 0.7917, + "step": 1240 + }, + { + "epoch": 1.372787610619469, + "grad_norm": 0.10205078125, + "learning_rate": 6.568030973451328e-06, + "loss": 0.7519, + "step": 1241 + }, + { + "epoch": 1.3738938053097345, + "grad_norm": 0.1103515625, + "learning_rate": 6.565265486725664e-06, + "loss": 0.7201, + "step": 1242 + }, + { + "epoch": 1.375, + "grad_norm": 0.10205078125, + "learning_rate": 6.5625e-06, + "loss": 0.7777, + "step": 1243 + }, + { + "epoch": 1.3761061946902655, + "grad_norm": 0.123046875, + "learning_rate": 6.559734513274337e-06, + "loss": 0.7796, + "step": 1244 + }, + { + "epoch": 1.377212389380531, + "grad_norm": 0.10546875, + "learning_rate": 6.556969026548673e-06, + "loss": 0.7346, + "step": 1245 + }, + { + "epoch": 1.3783185840707963, + "grad_norm": 0.123046875, + "learning_rate": 6.5542035398230095e-06, + "loss": 0.7469, + "step": 1246 + }, + { + "epoch": 1.379424778761062, + "grad_norm": 0.10986328125, + "learning_rate": 6.551438053097346e-06, + "loss": 0.7363, + "step": 1247 + }, + { + "epoch": 1.3805309734513274, + "grad_norm": 0.111328125, + "learning_rate": 6.548672566371682e-06, + "loss": 0.7895, + "step": 1248 + }, + { + "epoch": 1.3816371681415929, + "grad_norm": 0.10107421875, + "learning_rate": 6.545907079646018e-06, + "loss": 0.742, + "step": 1249 + }, + { + "epoch": 1.3827433628318584, + "grad_norm": 0.10546875, + "learning_rate": 6.543141592920355e-06, + "loss": 0.7676, + "step": 1250 + }, + { + "epoch": 1.383849557522124, + "grad_norm": 0.09765625, + "learning_rate": 6.540376106194691e-06, + "loss": 0.7516, + "step": 1251 + }, + { + "epoch": 1.3849557522123894, + "grad_norm": 0.1083984375, + "learning_rate": 6.537610619469027e-06, + "loss": 0.8043, + "step": 1252 + }, + { + "epoch": 1.386061946902655, + "grad_norm": 0.130859375, + "learning_rate": 6.534845132743364e-06, + "loss": 0.8382, + "step": 1253 + }, + { + "epoch": 1.3871681415929205, + "grad_norm": 0.10009765625, + "learning_rate": 6.532079646017699e-06, + "loss": 0.7522, + "step": 1254 + }, + { + "epoch": 1.3882743362831858, + "grad_norm": 0.11279296875, + "learning_rate": 6.529314159292036e-06, + "loss": 0.8026, + "step": 1255 + }, + { + "epoch": 1.3893805309734513, + "grad_norm": 0.1083984375, + "learning_rate": 6.526548672566372e-06, + "loss": 0.7833, + "step": 1256 + }, + { + "epoch": 1.3904867256637168, + "grad_norm": 0.1142578125, + "learning_rate": 6.523783185840709e-06, + "loss": 0.799, + "step": 1257 + }, + { + "epoch": 1.3915929203539823, + "grad_norm": 0.099609375, + "learning_rate": 6.521017699115044e-06, + "loss": 0.7723, + "step": 1258 + }, + { + "epoch": 1.3926991150442478, + "grad_norm": 0.11181640625, + "learning_rate": 6.518252212389381e-06, + "loss": 0.7997, + "step": 1259 + }, + { + "epoch": 1.3938053097345133, + "grad_norm": 0.11279296875, + "learning_rate": 6.515486725663718e-06, + "loss": 0.7701, + "step": 1260 + }, + { + "epoch": 1.3949115044247788, + "grad_norm": 0.11572265625, + "learning_rate": 6.512721238938053e-06, + "loss": 0.7566, + "step": 1261 + }, + { + "epoch": 1.3960176991150441, + "grad_norm": 0.1025390625, + "learning_rate": 6.50995575221239e-06, + "loss": 0.7776, + "step": 1262 + }, + { + "epoch": 1.3971238938053097, + "grad_norm": 0.09375, + "learning_rate": 6.507190265486726e-06, + "loss": 0.7708, + "step": 1263 + }, + { + "epoch": 1.3982300884955752, + "grad_norm": 0.10302734375, + "learning_rate": 6.504424778761062e-06, + "loss": 0.7764, + "step": 1264 + }, + { + "epoch": 1.3993362831858407, + "grad_norm": 0.11181640625, + "learning_rate": 6.5016592920353984e-06, + "loss": 0.8041, + "step": 1265 + }, + { + "epoch": 1.4004424778761062, + "grad_norm": 0.1015625, + "learning_rate": 6.498893805309735e-06, + "loss": 0.7704, + "step": 1266 + }, + { + "epoch": 1.4015486725663717, + "grad_norm": 0.1201171875, + "learning_rate": 6.4961283185840715e-06, + "loss": 0.8234, + "step": 1267 + }, + { + "epoch": 1.4026548672566372, + "grad_norm": 0.10791015625, + "learning_rate": 6.493362831858408e-06, + "loss": 0.7865, + "step": 1268 + }, + { + "epoch": 1.4037610619469025, + "grad_norm": 0.10400390625, + "learning_rate": 6.490597345132744e-06, + "loss": 0.7973, + "step": 1269 + }, + { + "epoch": 1.4048672566371683, + "grad_norm": 0.166015625, + "learning_rate": 6.48783185840708e-06, + "loss": 0.734, + "step": 1270 + }, + { + "epoch": 1.4059734513274336, + "grad_norm": 0.1435546875, + "learning_rate": 6.485066371681417e-06, + "loss": 0.7328, + "step": 1271 + }, + { + "epoch": 1.407079646017699, + "grad_norm": 0.10009765625, + "learning_rate": 6.482300884955752e-06, + "loss": 0.7525, + "step": 1272 + }, + { + "epoch": 1.4081858407079646, + "grad_norm": 0.115234375, + "learning_rate": 6.479535398230089e-06, + "loss": 0.8237, + "step": 1273 + }, + { + "epoch": 1.4092920353982301, + "grad_norm": 0.1611328125, + "learning_rate": 6.476769911504426e-06, + "loss": 0.7651, + "step": 1274 + }, + { + "epoch": 1.4103982300884956, + "grad_norm": 0.10791015625, + "learning_rate": 6.474004424778761e-06, + "loss": 0.7534, + "step": 1275 + }, + { + "epoch": 1.411504424778761, + "grad_norm": 0.1103515625, + "learning_rate": 6.471238938053098e-06, + "loss": 0.7881, + "step": 1276 + }, + { + "epoch": 1.4126106194690267, + "grad_norm": 0.10595703125, + "learning_rate": 6.468473451327434e-06, + "loss": 0.7812, + "step": 1277 + }, + { + "epoch": 1.413716814159292, + "grad_norm": 0.107421875, + "learning_rate": 6.465707964601771e-06, + "loss": 0.742, + "step": 1278 + }, + { + "epoch": 1.4148230088495575, + "grad_norm": 0.10595703125, + "learning_rate": 6.462942477876106e-06, + "loss": 0.7834, + "step": 1279 + }, + { + "epoch": 1.415929203539823, + "grad_norm": 0.1435546875, + "learning_rate": 6.460176991150443e-06, + "loss": 0.7579, + "step": 1280 + }, + { + "epoch": 1.4170353982300885, + "grad_norm": 0.09619140625, + "learning_rate": 6.45741150442478e-06, + "loss": 0.7651, + "step": 1281 + }, + { + "epoch": 1.418141592920354, + "grad_norm": 0.11572265625, + "learning_rate": 6.454646017699115e-06, + "loss": 0.7925, + "step": 1282 + }, + { + "epoch": 1.4192477876106195, + "grad_norm": 0.12451171875, + "learning_rate": 6.451880530973452e-06, + "loss": 0.862, + "step": 1283 + }, + { + "epoch": 1.420353982300885, + "grad_norm": 0.1298828125, + "learning_rate": 6.449115044247788e-06, + "loss": 0.8362, + "step": 1284 + }, + { + "epoch": 1.4214601769911503, + "grad_norm": 0.10693359375, + "learning_rate": 6.446349557522124e-06, + "loss": 0.7326, + "step": 1285 + }, + { + "epoch": 1.4225663716814159, + "grad_norm": 0.1142578125, + "learning_rate": 6.4435840707964605e-06, + "loss": 0.7883, + "step": 1286 + }, + { + "epoch": 1.4236725663716814, + "grad_norm": 0.11083984375, + "learning_rate": 6.4408185840707974e-06, + "loss": 0.8145, + "step": 1287 + }, + { + "epoch": 1.424778761061947, + "grad_norm": 0.11865234375, + "learning_rate": 6.438053097345133e-06, + "loss": 0.8031, + "step": 1288 + }, + { + "epoch": 1.4258849557522124, + "grad_norm": 0.1064453125, + "learning_rate": 6.43528761061947e-06, + "loss": 0.79, + "step": 1289 + }, + { + "epoch": 1.426991150442478, + "grad_norm": 0.09423828125, + "learning_rate": 6.432522123893807e-06, + "loss": 0.7141, + "step": 1290 + }, + { + "epoch": 1.4280973451327434, + "grad_norm": 0.103515625, + "learning_rate": 6.429756637168142e-06, + "loss": 0.8021, + "step": 1291 + }, + { + "epoch": 1.4292035398230087, + "grad_norm": 0.1025390625, + "learning_rate": 6.426991150442479e-06, + "loss": 0.7981, + "step": 1292 + }, + { + "epoch": 1.4303097345132743, + "grad_norm": 0.09765625, + "learning_rate": 6.424225663716814e-06, + "loss": 0.7301, + "step": 1293 + }, + { + "epoch": 1.4314159292035398, + "grad_norm": 0.1416015625, + "learning_rate": 6.421460176991151e-06, + "loss": 0.7971, + "step": 1294 + }, + { + "epoch": 1.4325221238938053, + "grad_norm": 0.1123046875, + "learning_rate": 6.4186946902654864e-06, + "loss": 0.7934, + "step": 1295 + }, + { + "epoch": 1.4336283185840708, + "grad_norm": 0.10498046875, + "learning_rate": 6.415929203539823e-06, + "loss": 0.7381, + "step": 1296 + }, + { + "epoch": 1.4347345132743363, + "grad_norm": 0.111328125, + "learning_rate": 6.41316371681416e-06, + "loss": 0.7928, + "step": 1297 + }, + { + "epoch": 1.4358407079646018, + "grad_norm": 0.107421875, + "learning_rate": 6.410398230088496e-06, + "loss": 0.7714, + "step": 1298 + }, + { + "epoch": 1.4369469026548671, + "grad_norm": 0.11376953125, + "learning_rate": 6.407632743362833e-06, + "loss": 0.7851, + "step": 1299 + }, + { + "epoch": 1.4380530973451329, + "grad_norm": 0.09765625, + "learning_rate": 6.404867256637169e-06, + "loss": 0.7426, + "step": 1300 + }, + { + "epoch": 1.4391592920353982, + "grad_norm": 0.1025390625, + "learning_rate": 6.402101769911505e-06, + "loss": 0.7766, + "step": 1301 + }, + { + "epoch": 1.4402654867256637, + "grad_norm": 0.11376953125, + "learning_rate": 6.399336283185841e-06, + "loss": 0.7509, + "step": 1302 + }, + { + "epoch": 1.4413716814159292, + "grad_norm": 0.1025390625, + "learning_rate": 6.396570796460177e-06, + "loss": 0.7581, + "step": 1303 + }, + { + "epoch": 1.4424778761061947, + "grad_norm": 0.11181640625, + "learning_rate": 6.393805309734514e-06, + "loss": 0.7491, + "step": 1304 + }, + { + "epoch": 1.4435840707964602, + "grad_norm": 0.0986328125, + "learning_rate": 6.39103982300885e-06, + "loss": 0.7274, + "step": 1305 + }, + { + "epoch": 1.4446902654867257, + "grad_norm": 0.09814453125, + "learning_rate": 6.388274336283186e-06, + "loss": 0.7639, + "step": 1306 + }, + { + "epoch": 1.4457964601769913, + "grad_norm": 0.11328125, + "learning_rate": 6.3855088495575225e-06, + "loss": 0.8163, + "step": 1307 + }, + { + "epoch": 1.4469026548672566, + "grad_norm": 0.09716796875, + "learning_rate": 6.3827433628318595e-06, + "loss": 0.7313, + "step": 1308 + }, + { + "epoch": 1.448008849557522, + "grad_norm": 0.103515625, + "learning_rate": 6.379977876106195e-06, + "loss": 0.771, + "step": 1309 + }, + { + "epoch": 1.4491150442477876, + "grad_norm": 0.09912109375, + "learning_rate": 6.377212389380532e-06, + "loss": 0.7736, + "step": 1310 + }, + { + "epoch": 1.450221238938053, + "grad_norm": 0.0986328125, + "learning_rate": 6.374446902654869e-06, + "loss": 0.7662, + "step": 1311 + }, + { + "epoch": 1.4513274336283186, + "grad_norm": 0.10400390625, + "learning_rate": 6.371681415929204e-06, + "loss": 0.784, + "step": 1312 + }, + { + "epoch": 1.4524336283185841, + "grad_norm": 0.12109375, + "learning_rate": 6.368915929203541e-06, + "loss": 0.8322, + "step": 1313 + }, + { + "epoch": 1.4535398230088497, + "grad_norm": 0.1064453125, + "learning_rate": 6.366150442477876e-06, + "loss": 0.7751, + "step": 1314 + }, + { + "epoch": 1.454646017699115, + "grad_norm": 0.09619140625, + "learning_rate": 6.363384955752213e-06, + "loss": 0.7712, + "step": 1315 + }, + { + "epoch": 1.4557522123893805, + "grad_norm": 0.11083984375, + "learning_rate": 6.3606194690265485e-06, + "loss": 0.7461, + "step": 1316 + }, + { + "epoch": 1.456858407079646, + "grad_norm": 0.1318359375, + "learning_rate": 6.3578539823008855e-06, + "loss": 0.7939, + "step": 1317 + }, + { + "epoch": 1.4579646017699115, + "grad_norm": 0.11474609375, + "learning_rate": 6.355088495575222e-06, + "loss": 0.7745, + "step": 1318 + }, + { + "epoch": 1.459070796460177, + "grad_norm": 0.1201171875, + "learning_rate": 6.352323008849558e-06, + "loss": 0.8152, + "step": 1319 + }, + { + "epoch": 1.4601769911504425, + "grad_norm": 0.125, + "learning_rate": 6.349557522123895e-06, + "loss": 0.8529, + "step": 1320 + }, + { + "epoch": 1.461283185840708, + "grad_norm": 0.1171875, + "learning_rate": 6.346792035398231e-06, + "loss": 0.8006, + "step": 1321 + }, + { + "epoch": 1.4623893805309733, + "grad_norm": 0.1083984375, + "learning_rate": 6.344026548672567e-06, + "loss": 0.789, + "step": 1322 + }, + { + "epoch": 1.463495575221239, + "grad_norm": 0.1142578125, + "learning_rate": 6.341261061946903e-06, + "loss": 0.806, + "step": 1323 + }, + { + "epoch": 1.4646017699115044, + "grad_norm": 0.1162109375, + "learning_rate": 6.338495575221239e-06, + "loss": 0.7686, + "step": 1324 + }, + { + "epoch": 1.4657079646017699, + "grad_norm": 0.1005859375, + "learning_rate": 6.335730088495575e-06, + "loss": 0.7296, + "step": 1325 + }, + { + "epoch": 1.4668141592920354, + "grad_norm": 0.11865234375, + "learning_rate": 6.332964601769912e-06, + "loss": 0.831, + "step": 1326 + }, + { + "epoch": 1.467920353982301, + "grad_norm": 0.10888671875, + "learning_rate": 6.330199115044248e-06, + "loss": 0.7542, + "step": 1327 + }, + { + "epoch": 1.4690265486725664, + "grad_norm": 0.1044921875, + "learning_rate": 6.3274336283185845e-06, + "loss": 0.7748, + "step": 1328 + }, + { + "epoch": 1.4701327433628317, + "grad_norm": 0.11865234375, + "learning_rate": 6.3246681415929215e-06, + "loss": 0.7787, + "step": 1329 + }, + { + "epoch": 1.4712389380530975, + "grad_norm": 0.1064453125, + "learning_rate": 6.321902654867257e-06, + "loss": 0.7621, + "step": 1330 + }, + { + "epoch": 1.4723451327433628, + "grad_norm": 0.11572265625, + "learning_rate": 6.319137168141594e-06, + "loss": 0.7829, + "step": 1331 + }, + { + "epoch": 1.4734513274336283, + "grad_norm": 0.10693359375, + "learning_rate": 6.316371681415929e-06, + "loss": 0.8228, + "step": 1332 + }, + { + "epoch": 1.4745575221238938, + "grad_norm": 0.10546875, + "learning_rate": 6.313606194690266e-06, + "loss": 0.8048, + "step": 1333 + }, + { + "epoch": 1.4756637168141593, + "grad_norm": 0.1552734375, + "learning_rate": 6.310840707964603e-06, + "loss": 0.8119, + "step": 1334 + }, + { + "epoch": 1.4767699115044248, + "grad_norm": 0.11083984375, + "learning_rate": 6.308075221238938e-06, + "loss": 0.7564, + "step": 1335 + }, + { + "epoch": 1.4778761061946903, + "grad_norm": 0.11669921875, + "learning_rate": 6.305309734513275e-06, + "loss": 0.796, + "step": 1336 + }, + { + "epoch": 1.4789823008849559, + "grad_norm": 0.099609375, + "learning_rate": 6.3025442477876105e-06, + "loss": 0.7367, + "step": 1337 + }, + { + "epoch": 1.4800884955752212, + "grad_norm": 0.10302734375, + "learning_rate": 6.2997787610619475e-06, + "loss": 0.7695, + "step": 1338 + }, + { + "epoch": 1.4811946902654867, + "grad_norm": 0.10791015625, + "learning_rate": 6.297013274336284e-06, + "loss": 0.761, + "step": 1339 + }, + { + "epoch": 1.4823008849557522, + "grad_norm": 0.1123046875, + "learning_rate": 6.29424778761062e-06, + "loss": 0.7709, + "step": 1340 + }, + { + "epoch": 1.4834070796460177, + "grad_norm": 0.11279296875, + "learning_rate": 6.291482300884957e-06, + "loss": 0.8421, + "step": 1341 + }, + { + "epoch": 1.4845132743362832, + "grad_norm": 0.11279296875, + "learning_rate": 6.288716814159293e-06, + "loss": 0.7971, + "step": 1342 + }, + { + "epoch": 1.4856194690265487, + "grad_norm": 0.115234375, + "learning_rate": 6.285951327433629e-06, + "loss": 0.8255, + "step": 1343 + }, + { + "epoch": 1.4867256637168142, + "grad_norm": 0.10693359375, + "learning_rate": 6.283185840707965e-06, + "loss": 0.7843, + "step": 1344 + }, + { + "epoch": 1.4878318584070795, + "grad_norm": 0.10498046875, + "learning_rate": 6.280420353982302e-06, + "loss": 0.767, + "step": 1345 + }, + { + "epoch": 1.488938053097345, + "grad_norm": 0.10791015625, + "learning_rate": 6.277654867256637e-06, + "loss": 0.77, + "step": 1346 + }, + { + "epoch": 1.4900442477876106, + "grad_norm": 0.11767578125, + "learning_rate": 6.274889380530974e-06, + "loss": 0.7652, + "step": 1347 + }, + { + "epoch": 1.491150442477876, + "grad_norm": 0.09716796875, + "learning_rate": 6.27212389380531e-06, + "loss": 0.7472, + "step": 1348 + }, + { + "epoch": 1.4922566371681416, + "grad_norm": 0.10595703125, + "learning_rate": 6.2693584070796466e-06, + "loss": 0.8007, + "step": 1349 + }, + { + "epoch": 1.4933628318584071, + "grad_norm": 0.107421875, + "learning_rate": 6.2665929203539835e-06, + "loss": 0.7702, + "step": 1350 + }, + { + "epoch": 1.4944690265486726, + "grad_norm": 0.11572265625, + "learning_rate": 6.263827433628319e-06, + "loss": 0.7775, + "step": 1351 + }, + { + "epoch": 1.495575221238938, + "grad_norm": 0.10400390625, + "learning_rate": 6.261061946902656e-06, + "loss": 0.7493, + "step": 1352 + }, + { + "epoch": 1.4966814159292037, + "grad_norm": 0.115234375, + "learning_rate": 6.258296460176991e-06, + "loss": 0.8142, + "step": 1353 + }, + { + "epoch": 1.497787610619469, + "grad_norm": 0.11669921875, + "learning_rate": 6.255530973451328e-06, + "loss": 0.7946, + "step": 1354 + }, + { + "epoch": 1.4988938053097345, + "grad_norm": 0.1201171875, + "learning_rate": 6.252765486725663e-06, + "loss": 0.8088, + "step": 1355 + }, + { + "epoch": 1.5, + "grad_norm": 0.115234375, + "learning_rate": 6.25e-06, + "loss": 0.7917, + "step": 1356 + }, + { + "epoch": 1.5011061946902655, + "grad_norm": 0.10693359375, + "learning_rate": 6.247234513274337e-06, + "loss": 0.7767, + "step": 1357 + }, + { + "epoch": 1.502212389380531, + "grad_norm": 0.099609375, + "learning_rate": 6.2444690265486726e-06, + "loss": 0.7521, + "step": 1358 + }, + { + "epoch": 1.5033185840707963, + "grad_norm": 0.10546875, + "learning_rate": 6.2417035398230095e-06, + "loss": 0.7443, + "step": 1359 + }, + { + "epoch": 1.504424778761062, + "grad_norm": 0.1044921875, + "learning_rate": 6.238938053097346e-06, + "loss": 0.8354, + "step": 1360 + }, + { + "epoch": 1.5055309734513274, + "grad_norm": 0.10400390625, + "learning_rate": 6.236172566371682e-06, + "loss": 0.769, + "step": 1361 + }, + { + "epoch": 1.5066371681415929, + "grad_norm": 0.1181640625, + "learning_rate": 6.233407079646018e-06, + "loss": 0.8175, + "step": 1362 + }, + { + "epoch": 1.5077433628318584, + "grad_norm": 0.1064453125, + "learning_rate": 6.230641592920355e-06, + "loss": 0.8051, + "step": 1363 + }, + { + "epoch": 1.508849557522124, + "grad_norm": 0.10498046875, + "learning_rate": 6.227876106194691e-06, + "loss": 0.7656, + "step": 1364 + }, + { + "epoch": 1.5099557522123894, + "grad_norm": 0.10791015625, + "learning_rate": 6.225110619469027e-06, + "loss": 0.7638, + "step": 1365 + }, + { + "epoch": 1.5110619469026547, + "grad_norm": 0.1044921875, + "learning_rate": 6.222345132743364e-06, + "loss": 0.7911, + "step": 1366 + }, + { + "epoch": 1.5121681415929205, + "grad_norm": 0.11669921875, + "learning_rate": 6.219579646017699e-06, + "loss": 0.8184, + "step": 1367 + }, + { + "epoch": 1.5132743362831858, + "grad_norm": 0.1259765625, + "learning_rate": 6.216814159292036e-06, + "loss": 0.7467, + "step": 1368 + }, + { + "epoch": 1.5143805309734515, + "grad_norm": 0.1181640625, + "learning_rate": 6.214048672566372e-06, + "loss": 0.7946, + "step": 1369 + }, + { + "epoch": 1.5154867256637168, + "grad_norm": 0.1005859375, + "learning_rate": 6.211283185840709e-06, + "loss": 0.7438, + "step": 1370 + }, + { + "epoch": 1.5165929203539823, + "grad_norm": 0.1044921875, + "learning_rate": 6.208517699115044e-06, + "loss": 0.7792, + "step": 1371 + }, + { + "epoch": 1.5176991150442478, + "grad_norm": 0.0986328125, + "learning_rate": 6.205752212389381e-06, + "loss": 0.7757, + "step": 1372 + }, + { + "epoch": 1.518805309734513, + "grad_norm": 0.10986328125, + "learning_rate": 6.202986725663718e-06, + "loss": 0.7736, + "step": 1373 + }, + { + "epoch": 1.5199115044247788, + "grad_norm": 0.1103515625, + "learning_rate": 6.200221238938053e-06, + "loss": 0.7904, + "step": 1374 + }, + { + "epoch": 1.5210176991150441, + "grad_norm": 0.1015625, + "learning_rate": 6.19745575221239e-06, + "loss": 0.7462, + "step": 1375 + }, + { + "epoch": 1.5221238938053099, + "grad_norm": 0.099609375, + "learning_rate": 6.194690265486726e-06, + "loss": 0.7541, + "step": 1376 + }, + { + "epoch": 1.5232300884955752, + "grad_norm": 0.125, + "learning_rate": 6.191924778761062e-06, + "loss": 0.8348, + "step": 1377 + }, + { + "epoch": 1.5243362831858407, + "grad_norm": 0.1025390625, + "learning_rate": 6.1891592920353985e-06, + "loss": 0.7517, + "step": 1378 + }, + { + "epoch": 1.5254424778761062, + "grad_norm": 0.1171875, + "learning_rate": 6.186393805309735e-06, + "loss": 0.7829, + "step": 1379 + }, + { + "epoch": 1.5265486725663717, + "grad_norm": 0.109375, + "learning_rate": 6.1836283185840716e-06, + "loss": 0.7696, + "step": 1380 + }, + { + "epoch": 1.5276548672566372, + "grad_norm": 0.10693359375, + "learning_rate": 6.180862831858408e-06, + "loss": 0.7842, + "step": 1381 + }, + { + "epoch": 1.5287610619469025, + "grad_norm": 0.1298828125, + "learning_rate": 6.178097345132744e-06, + "loss": 0.8161, + "step": 1382 + }, + { + "epoch": 1.5298672566371683, + "grad_norm": 0.10302734375, + "learning_rate": 6.17533185840708e-06, + "loss": 0.7831, + "step": 1383 + }, + { + "epoch": 1.5309734513274336, + "grad_norm": 0.11474609375, + "learning_rate": 6.172566371681417e-06, + "loss": 0.7904, + "step": 1384 + }, + { + "epoch": 1.532079646017699, + "grad_norm": 0.099609375, + "learning_rate": 6.169800884955752e-06, + "loss": 0.783, + "step": 1385 + }, + { + "epoch": 1.5331858407079646, + "grad_norm": 0.111328125, + "learning_rate": 6.167035398230089e-06, + "loss": 0.7514, + "step": 1386 + }, + { + "epoch": 1.5342920353982301, + "grad_norm": 0.10595703125, + "learning_rate": 6.164269911504426e-06, + "loss": 0.7837, + "step": 1387 + }, + { + "epoch": 1.5353982300884956, + "grad_norm": 0.103515625, + "learning_rate": 6.161504424778761e-06, + "loss": 0.7558, + "step": 1388 + }, + { + "epoch": 1.536504424778761, + "grad_norm": 0.1240234375, + "learning_rate": 6.158738938053098e-06, + "loss": 0.8057, + "step": 1389 + }, + { + "epoch": 1.5376106194690267, + "grad_norm": 0.1015625, + "learning_rate": 6.155973451327434e-06, + "loss": 0.7409, + "step": 1390 + }, + { + "epoch": 1.538716814159292, + "grad_norm": 0.1015625, + "learning_rate": 6.153207964601771e-06, + "loss": 0.7506, + "step": 1391 + }, + { + "epoch": 1.5398230088495575, + "grad_norm": 0.1171875, + "learning_rate": 6.150442477876106e-06, + "loss": 0.7677, + "step": 1392 + }, + { + "epoch": 1.540929203539823, + "grad_norm": 0.11083984375, + "learning_rate": 6.147676991150443e-06, + "loss": 0.785, + "step": 1393 + }, + { + "epoch": 1.5420353982300885, + "grad_norm": 0.11376953125, + "learning_rate": 6.14491150442478e-06, + "loss": 0.7919, + "step": 1394 + }, + { + "epoch": 1.543141592920354, + "grad_norm": 0.1376953125, + "learning_rate": 6.142146017699115e-06, + "loss": 0.7928, + "step": 1395 + }, + { + "epoch": 1.5442477876106193, + "grad_norm": 0.10498046875, + "learning_rate": 6.139380530973452e-06, + "loss": 0.7403, + "step": 1396 + }, + { + "epoch": 1.545353982300885, + "grad_norm": 0.10400390625, + "learning_rate": 6.136615044247788e-06, + "loss": 0.7423, + "step": 1397 + }, + { + "epoch": 1.5464601769911503, + "grad_norm": 0.10546875, + "learning_rate": 6.133849557522124e-06, + "loss": 0.7626, + "step": 1398 + }, + { + "epoch": 1.547566371681416, + "grad_norm": 0.10595703125, + "learning_rate": 6.1310840707964605e-06, + "loss": 0.7716, + "step": 1399 + }, + { + "epoch": 1.5486725663716814, + "grad_norm": 0.10302734375, + "learning_rate": 6.1283185840707975e-06, + "loss": 0.7424, + "step": 1400 + }, + { + "epoch": 1.549778761061947, + "grad_norm": 0.09521484375, + "learning_rate": 6.125553097345133e-06, + "loss": 0.7253, + "step": 1401 + }, + { + "epoch": 1.5508849557522124, + "grad_norm": 0.09619140625, + "learning_rate": 6.12278761061947e-06, + "loss": 0.7797, + "step": 1402 + }, + { + "epoch": 1.551991150442478, + "grad_norm": 0.12353515625, + "learning_rate": 6.120022123893806e-06, + "loss": 0.7897, + "step": 1403 + }, + { + "epoch": 1.5530973451327434, + "grad_norm": 0.10595703125, + "learning_rate": 6.117256637168142e-06, + "loss": 0.7841, + "step": 1404 + }, + { + "epoch": 1.5542035398230087, + "grad_norm": 0.10986328125, + "learning_rate": 6.114491150442479e-06, + "loss": 0.7442, + "step": 1405 + }, + { + "epoch": 1.5553097345132745, + "grad_norm": 0.10400390625, + "learning_rate": 6.111725663716814e-06, + "loss": 0.7515, + "step": 1406 + }, + { + "epoch": 1.5564159292035398, + "grad_norm": 0.11767578125, + "learning_rate": 6.108960176991151e-06, + "loss": 0.7788, + "step": 1407 + }, + { + "epoch": 1.5575221238938053, + "grad_norm": 0.1220703125, + "learning_rate": 6.1061946902654865e-06, + "loss": 0.8172, + "step": 1408 + }, + { + "epoch": 1.5586283185840708, + "grad_norm": 0.10791015625, + "learning_rate": 6.1034292035398234e-06, + "loss": 0.8012, + "step": 1409 + }, + { + "epoch": 1.5597345132743363, + "grad_norm": 0.10791015625, + "learning_rate": 6.10066371681416e-06, + "loss": 0.7796, + "step": 1410 + }, + { + "epoch": 1.5608407079646018, + "grad_norm": 0.0986328125, + "learning_rate": 6.097898230088496e-06, + "loss": 0.7448, + "step": 1411 + }, + { + "epoch": 1.5619469026548671, + "grad_norm": 0.1123046875, + "learning_rate": 6.095132743362833e-06, + "loss": 0.7877, + "step": 1412 + }, + { + "epoch": 1.5630530973451329, + "grad_norm": 0.1162109375, + "learning_rate": 6.092367256637168e-06, + "loss": 0.7464, + "step": 1413 + }, + { + "epoch": 1.5641592920353982, + "grad_norm": 0.1005859375, + "learning_rate": 6.089601769911505e-06, + "loss": 0.7515, + "step": 1414 + }, + { + "epoch": 1.5652654867256637, + "grad_norm": 0.10009765625, + "learning_rate": 6.086836283185841e-06, + "loss": 0.7774, + "step": 1415 + }, + { + "epoch": 1.5663716814159292, + "grad_norm": 0.1572265625, + "learning_rate": 6.084070796460177e-06, + "loss": 0.9205, + "step": 1416 + }, + { + "epoch": 1.5674778761061947, + "grad_norm": 0.11767578125, + "learning_rate": 6.081305309734514e-06, + "loss": 0.7856, + "step": 1417 + }, + { + "epoch": 1.5685840707964602, + "grad_norm": 0.1328125, + "learning_rate": 6.07853982300885e-06, + "loss": 0.7921, + "step": 1418 + }, + { + "epoch": 1.5696902654867255, + "grad_norm": 0.10595703125, + "learning_rate": 6.075774336283186e-06, + "loss": 0.7577, + "step": 1419 + }, + { + "epoch": 1.5707964601769913, + "grad_norm": 0.10888671875, + "learning_rate": 6.0730088495575225e-06, + "loss": 0.7669, + "step": 1420 + }, + { + "epoch": 1.5719026548672566, + "grad_norm": 0.1064453125, + "learning_rate": 6.0702433628318595e-06, + "loss": 0.7907, + "step": 1421 + }, + { + "epoch": 1.573008849557522, + "grad_norm": 0.10888671875, + "learning_rate": 6.067477876106195e-06, + "loss": 0.7543, + "step": 1422 + }, + { + "epoch": 1.5741150442477876, + "grad_norm": 0.1103515625, + "learning_rate": 6.064712389380532e-06, + "loss": 0.8084, + "step": 1423 + }, + { + "epoch": 1.575221238938053, + "grad_norm": 0.10009765625, + "learning_rate": 6.061946902654868e-06, + "loss": 0.7494, + "step": 1424 + }, + { + "epoch": 1.5763274336283186, + "grad_norm": 0.1083984375, + "learning_rate": 6.059181415929204e-06, + "loss": 0.7702, + "step": 1425 + }, + { + "epoch": 1.577433628318584, + "grad_norm": 0.10546875, + "learning_rate": 6.056415929203541e-06, + "loss": 0.7991, + "step": 1426 + }, + { + "epoch": 1.5785398230088497, + "grad_norm": 0.09765625, + "learning_rate": 6.053650442477876e-06, + "loss": 0.7315, + "step": 1427 + }, + { + "epoch": 1.579646017699115, + "grad_norm": 0.1162109375, + "learning_rate": 6.050884955752213e-06, + "loss": 0.7738, + "step": 1428 + }, + { + "epoch": 1.5807522123893807, + "grad_norm": 0.11572265625, + "learning_rate": 6.0481194690265485e-06, + "loss": 0.7326, + "step": 1429 + }, + { + "epoch": 1.581858407079646, + "grad_norm": 0.10107421875, + "learning_rate": 6.0453539823008855e-06, + "loss": 0.7502, + "step": 1430 + }, + { + "epoch": 1.5829646017699115, + "grad_norm": 0.12060546875, + "learning_rate": 6.042588495575222e-06, + "loss": 0.8144, + "step": 1431 + }, + { + "epoch": 1.584070796460177, + "grad_norm": 0.12451171875, + "learning_rate": 6.039823008849558e-06, + "loss": 0.8158, + "step": 1432 + }, + { + "epoch": 1.5851769911504425, + "grad_norm": 0.123046875, + "learning_rate": 6.037057522123895e-06, + "loss": 0.7925, + "step": 1433 + }, + { + "epoch": 1.586283185840708, + "grad_norm": 0.11865234375, + "learning_rate": 6.03429203539823e-06, + "loss": 0.785, + "step": 1434 + }, + { + "epoch": 1.5873893805309733, + "grad_norm": 0.11669921875, + "learning_rate": 6.031526548672567e-06, + "loss": 0.8454, + "step": 1435 + }, + { + "epoch": 1.588495575221239, + "grad_norm": 0.10791015625, + "learning_rate": 6.028761061946903e-06, + "loss": 0.7438, + "step": 1436 + }, + { + "epoch": 1.5896017699115044, + "grad_norm": 0.1201171875, + "learning_rate": 6.025995575221239e-06, + "loss": 0.7923, + "step": 1437 + }, + { + "epoch": 1.5907079646017699, + "grad_norm": 0.10205078125, + "learning_rate": 6.023230088495575e-06, + "loss": 0.7397, + "step": 1438 + }, + { + "epoch": 1.5918141592920354, + "grad_norm": 0.130859375, + "learning_rate": 6.020464601769912e-06, + "loss": 0.8359, + "step": 1439 + }, + { + "epoch": 1.592920353982301, + "grad_norm": 0.0986328125, + "learning_rate": 6.0176991150442484e-06, + "loss": 0.7321, + "step": 1440 + }, + { + "epoch": 1.5940265486725664, + "grad_norm": 0.10888671875, + "learning_rate": 6.0149336283185846e-06, + "loss": 0.7625, + "step": 1441 + }, + { + "epoch": 1.5951327433628317, + "grad_norm": 0.1064453125, + "learning_rate": 6.0121681415929215e-06, + "loss": 0.7821, + "step": 1442 + }, + { + "epoch": 1.5962389380530975, + "grad_norm": 0.1083984375, + "learning_rate": 6.009402654867257e-06, + "loss": 0.7619, + "step": 1443 + }, + { + "epoch": 1.5973451327433628, + "grad_norm": 0.10107421875, + "learning_rate": 6.006637168141594e-06, + "loss": 0.7333, + "step": 1444 + }, + { + "epoch": 1.5984513274336283, + "grad_norm": 0.1015625, + "learning_rate": 6.003871681415929e-06, + "loss": 0.7691, + "step": 1445 + }, + { + "epoch": 1.5995575221238938, + "grad_norm": 0.10302734375, + "learning_rate": 6.001106194690266e-06, + "loss": 0.7728, + "step": 1446 + }, + { + "epoch": 1.6006637168141593, + "grad_norm": 0.1064453125, + "learning_rate": 5.998340707964603e-06, + "loss": 0.7761, + "step": 1447 + }, + { + "epoch": 1.6017699115044248, + "grad_norm": 0.10888671875, + "learning_rate": 5.995575221238938e-06, + "loss": 0.7417, + "step": 1448 + }, + { + "epoch": 1.6028761061946901, + "grad_norm": 0.123046875, + "learning_rate": 5.992809734513275e-06, + "loss": 0.7742, + "step": 1449 + }, + { + "epoch": 1.6039823008849559, + "grad_norm": 0.10693359375, + "learning_rate": 5.9900442477876105e-06, + "loss": 0.7954, + "step": 1450 + }, + { + "epoch": 1.6050884955752212, + "grad_norm": 0.11572265625, + "learning_rate": 5.9872787610619475e-06, + "loss": 0.8015, + "step": 1451 + }, + { + "epoch": 1.606194690265487, + "grad_norm": 0.16015625, + "learning_rate": 5.984513274336284e-06, + "loss": 0.7438, + "step": 1452 + }, + { + "epoch": 1.6073008849557522, + "grad_norm": 0.10693359375, + "learning_rate": 5.98174778761062e-06, + "loss": 0.7816, + "step": 1453 + }, + { + "epoch": 1.6084070796460177, + "grad_norm": 0.11328125, + "learning_rate": 5.978982300884957e-06, + "loss": 0.7403, + "step": 1454 + }, + { + "epoch": 1.6095132743362832, + "grad_norm": 0.11865234375, + "learning_rate": 5.976216814159293e-06, + "loss": 0.7814, + "step": 1455 + }, + { + "epoch": 1.6106194690265485, + "grad_norm": 0.126953125, + "learning_rate": 5.973451327433629e-06, + "loss": 0.7485, + "step": 1456 + }, + { + "epoch": 1.6117256637168142, + "grad_norm": 0.1474609375, + "learning_rate": 5.970685840707965e-06, + "loss": 0.8125, + "step": 1457 + }, + { + "epoch": 1.6128318584070795, + "grad_norm": 0.107421875, + "learning_rate": 5.967920353982301e-06, + "loss": 0.7774, + "step": 1458 + }, + { + "epoch": 1.6139380530973453, + "grad_norm": 0.10595703125, + "learning_rate": 5.965154867256637e-06, + "loss": 0.7866, + "step": 1459 + }, + { + "epoch": 1.6150442477876106, + "grad_norm": 0.11669921875, + "learning_rate": 5.962389380530974e-06, + "loss": 0.8027, + "step": 1460 + }, + { + "epoch": 1.616150442477876, + "grad_norm": 0.1123046875, + "learning_rate": 5.95962389380531e-06, + "loss": 0.7799, + "step": 1461 + }, + { + "epoch": 1.6172566371681416, + "grad_norm": 0.1337890625, + "learning_rate": 5.956858407079647e-06, + "loss": 0.8049, + "step": 1462 + }, + { + "epoch": 1.6183628318584071, + "grad_norm": 0.11083984375, + "learning_rate": 5.9540929203539836e-06, + "loss": 0.7752, + "step": 1463 + }, + { + "epoch": 1.6194690265486726, + "grad_norm": 0.10400390625, + "learning_rate": 5.951327433628319e-06, + "loss": 0.7555, + "step": 1464 + }, + { + "epoch": 1.620575221238938, + "grad_norm": 0.1142578125, + "learning_rate": 5.948561946902656e-06, + "loss": 0.7794, + "step": 1465 + }, + { + "epoch": 1.6216814159292037, + "grad_norm": 0.150390625, + "learning_rate": 5.945796460176991e-06, + "loss": 0.8217, + "step": 1466 + }, + { + "epoch": 1.622787610619469, + "grad_norm": 0.11181640625, + "learning_rate": 5.943030973451328e-06, + "loss": 0.748, + "step": 1467 + }, + { + "epoch": 1.6238938053097345, + "grad_norm": 0.107421875, + "learning_rate": 5.940265486725663e-06, + "loss": 0.7414, + "step": 1468 + }, + { + "epoch": 1.625, + "grad_norm": 0.10009765625, + "learning_rate": 5.9375e-06, + "loss": 0.7372, + "step": 1469 + }, + { + "epoch": 1.6261061946902655, + "grad_norm": 0.1025390625, + "learning_rate": 5.934734513274337e-06, + "loss": 0.7652, + "step": 1470 + }, + { + "epoch": 1.627212389380531, + "grad_norm": 0.1064453125, + "learning_rate": 5.931969026548673e-06, + "loss": 0.7592, + "step": 1471 + }, + { + "epoch": 1.6283185840707963, + "grad_norm": 0.1123046875, + "learning_rate": 5.9292035398230096e-06, + "loss": 0.7615, + "step": 1472 + }, + { + "epoch": 1.629424778761062, + "grad_norm": 0.10888671875, + "learning_rate": 5.926438053097346e-06, + "loss": 0.7545, + "step": 1473 + }, + { + "epoch": 1.6305309734513274, + "grad_norm": 0.12109375, + "learning_rate": 5.923672566371682e-06, + "loss": 0.793, + "step": 1474 + }, + { + "epoch": 1.6316371681415929, + "grad_norm": 0.10546875, + "learning_rate": 5.920907079646018e-06, + "loss": 0.754, + "step": 1475 + }, + { + "epoch": 1.6327433628318584, + "grad_norm": 0.125, + "learning_rate": 5.918141592920355e-06, + "loss": 0.8105, + "step": 1476 + }, + { + "epoch": 1.633849557522124, + "grad_norm": 0.11474609375, + "learning_rate": 5.915376106194691e-06, + "loss": 0.7868, + "step": 1477 + }, + { + "epoch": 1.6349557522123894, + "grad_norm": 0.107421875, + "learning_rate": 5.912610619469027e-06, + "loss": 0.7598, + "step": 1478 + }, + { + "epoch": 1.6360619469026547, + "grad_norm": 0.1064453125, + "learning_rate": 5.909845132743363e-06, + "loss": 0.7796, + "step": 1479 + }, + { + "epoch": 1.6371681415929205, + "grad_norm": 0.1181640625, + "learning_rate": 5.907079646017699e-06, + "loss": 0.7882, + "step": 1480 + }, + { + "epoch": 1.6382743362831858, + "grad_norm": 0.10693359375, + "learning_rate": 5.904314159292036e-06, + "loss": 0.7824, + "step": 1481 + }, + { + "epoch": 1.6393805309734515, + "grad_norm": 0.126953125, + "learning_rate": 5.901548672566372e-06, + "loss": 0.7533, + "step": 1482 + }, + { + "epoch": 1.6404867256637168, + "grad_norm": 0.12353515625, + "learning_rate": 5.898783185840709e-06, + "loss": 0.7913, + "step": 1483 + }, + { + "epoch": 1.6415929203539823, + "grad_norm": 0.1162109375, + "learning_rate": 5.896017699115044e-06, + "loss": 0.8106, + "step": 1484 + }, + { + "epoch": 1.6426991150442478, + "grad_norm": 0.10546875, + "learning_rate": 5.893252212389381e-06, + "loss": 0.7783, + "step": 1485 + }, + { + "epoch": 1.643805309734513, + "grad_norm": 0.1044921875, + "learning_rate": 5.890486725663718e-06, + "loss": 0.7747, + "step": 1486 + }, + { + "epoch": 1.6449115044247788, + "grad_norm": 0.1025390625, + "learning_rate": 5.887721238938053e-06, + "loss": 0.748, + "step": 1487 + }, + { + "epoch": 1.6460176991150441, + "grad_norm": 0.1279296875, + "learning_rate": 5.88495575221239e-06, + "loss": 0.795, + "step": 1488 + }, + { + "epoch": 1.6471238938053099, + "grad_norm": 0.1044921875, + "learning_rate": 5.882190265486725e-06, + "loss": 0.7913, + "step": 1489 + }, + { + "epoch": 1.6482300884955752, + "grad_norm": 0.10498046875, + "learning_rate": 5.879424778761062e-06, + "loss": 0.8064, + "step": 1490 + }, + { + "epoch": 1.6493362831858407, + "grad_norm": 0.1005859375, + "learning_rate": 5.8766592920353985e-06, + "loss": 0.745, + "step": 1491 + }, + { + "epoch": 1.6504424778761062, + "grad_norm": 0.11572265625, + "learning_rate": 5.873893805309735e-06, + "loss": 0.8044, + "step": 1492 + }, + { + "epoch": 1.6515486725663717, + "grad_norm": 0.11083984375, + "learning_rate": 5.871128318584072e-06, + "loss": 0.7558, + "step": 1493 + }, + { + "epoch": 1.6526548672566372, + "grad_norm": 0.11181640625, + "learning_rate": 5.868362831858408e-06, + "loss": 0.7956, + "step": 1494 + }, + { + "epoch": 1.6537610619469025, + "grad_norm": 0.1064453125, + "learning_rate": 5.865597345132744e-06, + "loss": 0.7605, + "step": 1495 + }, + { + "epoch": 1.6548672566371683, + "grad_norm": 0.10986328125, + "learning_rate": 5.86283185840708e-06, + "loss": 0.7831, + "step": 1496 + }, + { + "epoch": 1.6559734513274336, + "grad_norm": 0.10302734375, + "learning_rate": 5.860066371681417e-06, + "loss": 0.7522, + "step": 1497 + }, + { + "epoch": 1.657079646017699, + "grad_norm": 0.1064453125, + "learning_rate": 5.857300884955752e-06, + "loss": 0.7712, + "step": 1498 + }, + { + "epoch": 1.6581858407079646, + "grad_norm": 0.1162109375, + "learning_rate": 5.854535398230089e-06, + "loss": 0.792, + "step": 1499 + }, + { + "epoch": 1.6592920353982301, + "grad_norm": 0.109375, + "learning_rate": 5.851769911504426e-06, + "loss": 0.792, + "step": 1500 + }, + { + "epoch": 1.6603982300884956, + "grad_norm": 0.1064453125, + "learning_rate": 5.8490044247787614e-06, + "loss": 0.7801, + "step": 1501 + }, + { + "epoch": 1.661504424778761, + "grad_norm": 0.11181640625, + "learning_rate": 5.846238938053098e-06, + "loss": 0.7714, + "step": 1502 + }, + { + "epoch": 1.6626106194690267, + "grad_norm": 0.1083984375, + "learning_rate": 5.843473451327434e-06, + "loss": 0.7745, + "step": 1503 + }, + { + "epoch": 1.663716814159292, + "grad_norm": 0.10498046875, + "learning_rate": 5.840707964601771e-06, + "loss": 0.7309, + "step": 1504 + }, + { + "epoch": 1.6648230088495575, + "grad_norm": 0.1142578125, + "learning_rate": 5.837942477876106e-06, + "loss": 0.7794, + "step": 1505 + }, + { + "epoch": 1.665929203539823, + "grad_norm": 0.12060546875, + "learning_rate": 5.835176991150443e-06, + "loss": 0.8119, + "step": 1506 + }, + { + "epoch": 1.6670353982300885, + "grad_norm": 0.1064453125, + "learning_rate": 5.83241150442478e-06, + "loss": 0.749, + "step": 1507 + }, + { + "epoch": 1.668141592920354, + "grad_norm": 0.10302734375, + "learning_rate": 5.829646017699115e-06, + "loss": 0.7454, + "step": 1508 + }, + { + "epoch": 1.6692477876106193, + "grad_norm": 0.11376953125, + "learning_rate": 5.826880530973452e-06, + "loss": 0.7814, + "step": 1509 + }, + { + "epoch": 1.670353982300885, + "grad_norm": 0.1064453125, + "learning_rate": 5.824115044247787e-06, + "loss": 0.8073, + "step": 1510 + }, + { + "epoch": 1.6714601769911503, + "grad_norm": 0.11962890625, + "learning_rate": 5.821349557522124e-06, + "loss": 0.8017, + "step": 1511 + }, + { + "epoch": 1.672566371681416, + "grad_norm": 0.130859375, + "learning_rate": 5.8185840707964605e-06, + "loss": 0.8499, + "step": 1512 + }, + { + "epoch": 1.6736725663716814, + "grad_norm": 0.11572265625, + "learning_rate": 5.815818584070797e-06, + "loss": 0.7858, + "step": 1513 + }, + { + "epoch": 1.674778761061947, + "grad_norm": 0.11083984375, + "learning_rate": 5.813053097345133e-06, + "loss": 0.7649, + "step": 1514 + }, + { + "epoch": 1.6758849557522124, + "grad_norm": 0.1015625, + "learning_rate": 5.81028761061947e-06, + "loss": 0.7746, + "step": 1515 + }, + { + "epoch": 1.676991150442478, + "grad_norm": 0.10986328125, + "learning_rate": 5.807522123893806e-06, + "loss": 0.7676, + "step": 1516 + }, + { + "epoch": 1.6780973451327434, + "grad_norm": 0.09912109375, + "learning_rate": 5.804756637168142e-06, + "loss": 0.7542, + "step": 1517 + }, + { + "epoch": 1.6792035398230087, + "grad_norm": 0.099609375, + "learning_rate": 5.801991150442479e-06, + "loss": 0.7562, + "step": 1518 + }, + { + "epoch": 1.6803097345132745, + "grad_norm": 0.11376953125, + "learning_rate": 5.799225663716814e-06, + "loss": 0.8017, + "step": 1519 + }, + { + "epoch": 1.6814159292035398, + "grad_norm": 0.1025390625, + "learning_rate": 5.796460176991151e-06, + "loss": 0.7577, + "step": 1520 + }, + { + "epoch": 1.6825221238938053, + "grad_norm": 0.12109375, + "learning_rate": 5.7936946902654865e-06, + "loss": 0.8063, + "step": 1521 + }, + { + "epoch": 1.6836283185840708, + "grad_norm": 0.1123046875, + "learning_rate": 5.7909292035398235e-06, + "loss": 0.7456, + "step": 1522 + }, + { + "epoch": 1.6847345132743363, + "grad_norm": 0.119140625, + "learning_rate": 5.7881637168141604e-06, + "loss": 0.823, + "step": 1523 + }, + { + "epoch": 1.6858407079646018, + "grad_norm": 0.10546875, + "learning_rate": 5.785398230088496e-06, + "loss": 0.774, + "step": 1524 + }, + { + "epoch": 1.6869469026548671, + "grad_norm": 0.1044921875, + "learning_rate": 5.782632743362833e-06, + "loss": 0.7451, + "step": 1525 + }, + { + "epoch": 1.6880530973451329, + "grad_norm": 0.0986328125, + "learning_rate": 5.779867256637168e-06, + "loss": 0.7807, + "step": 1526 + }, + { + "epoch": 1.6891592920353982, + "grad_norm": 0.1044921875, + "learning_rate": 5.777101769911505e-06, + "loss": 0.7594, + "step": 1527 + }, + { + "epoch": 1.6902654867256637, + "grad_norm": 0.1298828125, + "learning_rate": 5.774336283185841e-06, + "loss": 0.7831, + "step": 1528 + }, + { + "epoch": 1.6913716814159292, + "grad_norm": 0.11865234375, + "learning_rate": 5.771570796460177e-06, + "loss": 0.7465, + "step": 1529 + }, + { + "epoch": 1.6924778761061947, + "grad_norm": 0.107421875, + "learning_rate": 5.768805309734514e-06, + "loss": 0.7637, + "step": 1530 + }, + { + "epoch": 1.6935840707964602, + "grad_norm": 0.111328125, + "learning_rate": 5.76603982300885e-06, + "loss": 0.7798, + "step": 1531 + }, + { + "epoch": 1.6946902654867255, + "grad_norm": 0.11181640625, + "learning_rate": 5.7632743362831864e-06, + "loss": 0.7606, + "step": 1532 + }, + { + "epoch": 1.6957964601769913, + "grad_norm": 0.11328125, + "learning_rate": 5.7605088495575226e-06, + "loss": 0.7665, + "step": 1533 + }, + { + "epoch": 1.6969026548672566, + "grad_norm": 0.10693359375, + "learning_rate": 5.757743362831859e-06, + "loss": 0.7594, + "step": 1534 + }, + { + "epoch": 1.698008849557522, + "grad_norm": 0.10546875, + "learning_rate": 5.754977876106195e-06, + "loss": 0.7501, + "step": 1535 + }, + { + "epoch": 1.6991150442477876, + "grad_norm": 0.11376953125, + "learning_rate": 5.752212389380532e-06, + "loss": 0.7838, + "step": 1536 + }, + { + "epoch": 1.700221238938053, + "grad_norm": 0.1240234375, + "learning_rate": 5.749446902654868e-06, + "loss": 0.801, + "step": 1537 + }, + { + "epoch": 1.7013274336283186, + "grad_norm": 0.1376953125, + "learning_rate": 5.746681415929204e-06, + "loss": 0.7628, + "step": 1538 + }, + { + "epoch": 1.702433628318584, + "grad_norm": 0.12353515625, + "learning_rate": 5.743915929203541e-06, + "loss": 0.7814, + "step": 1539 + }, + { + "epoch": 1.7035398230088497, + "grad_norm": 0.11865234375, + "learning_rate": 5.741150442477876e-06, + "loss": 0.7673, + "step": 1540 + }, + { + "epoch": 1.704646017699115, + "grad_norm": 0.12255859375, + "learning_rate": 5.738384955752213e-06, + "loss": 0.8113, + "step": 1541 + }, + { + "epoch": 1.7057522123893807, + "grad_norm": 0.1123046875, + "learning_rate": 5.7356194690265485e-06, + "loss": 0.791, + "step": 1542 + }, + { + "epoch": 1.706858407079646, + "grad_norm": 0.1259765625, + "learning_rate": 5.7328539823008855e-06, + "loss": 0.7841, + "step": 1543 + }, + { + "epoch": 1.7079646017699115, + "grad_norm": 0.10546875, + "learning_rate": 5.730088495575221e-06, + "loss": 0.7863, + "step": 1544 + }, + { + "epoch": 1.709070796460177, + "grad_norm": 0.10888671875, + "learning_rate": 5.727323008849558e-06, + "loss": 0.7586, + "step": 1545 + }, + { + "epoch": 1.7101769911504425, + "grad_norm": 0.1083984375, + "learning_rate": 5.724557522123895e-06, + "loss": 0.7462, + "step": 1546 + }, + { + "epoch": 1.711283185840708, + "grad_norm": 0.1044921875, + "learning_rate": 5.72179203539823e-06, + "loss": 0.7541, + "step": 1547 + }, + { + "epoch": 1.7123893805309733, + "grad_norm": 0.12109375, + "learning_rate": 5.719026548672567e-06, + "loss": 0.7956, + "step": 1548 + }, + { + "epoch": 1.713495575221239, + "grad_norm": 0.1201171875, + "learning_rate": 5.716261061946903e-06, + "loss": 0.7797, + "step": 1549 + }, + { + "epoch": 1.7146017699115044, + "grad_norm": 0.10546875, + "learning_rate": 5.713495575221239e-06, + "loss": 0.7608, + "step": 1550 + }, + { + "epoch": 1.7157079646017699, + "grad_norm": 0.1123046875, + "learning_rate": 5.710730088495575e-06, + "loss": 0.7717, + "step": 1551 + }, + { + "epoch": 1.7168141592920354, + "grad_norm": 0.1416015625, + "learning_rate": 5.707964601769912e-06, + "loss": 0.8648, + "step": 1552 + }, + { + "epoch": 1.717920353982301, + "grad_norm": 0.1328125, + "learning_rate": 5.7051991150442485e-06, + "loss": 0.8262, + "step": 1553 + }, + { + "epoch": 1.7190265486725664, + "grad_norm": 0.1044921875, + "learning_rate": 5.702433628318585e-06, + "loss": 0.7534, + "step": 1554 + }, + { + "epoch": 1.7201327433628317, + "grad_norm": 0.1142578125, + "learning_rate": 5.6996681415929216e-06, + "loss": 0.7555, + "step": 1555 + }, + { + "epoch": 1.7212389380530975, + "grad_norm": 0.1337890625, + "learning_rate": 5.696902654867257e-06, + "loss": 0.8664, + "step": 1556 + }, + { + "epoch": 1.7223451327433628, + "grad_norm": 0.1259765625, + "learning_rate": 5.694137168141594e-06, + "loss": 0.8388, + "step": 1557 + }, + { + "epoch": 1.7234513274336283, + "grad_norm": 0.11474609375, + "learning_rate": 5.691371681415929e-06, + "loss": 0.7471, + "step": 1558 + }, + { + "epoch": 1.7245575221238938, + "grad_norm": 0.134765625, + "learning_rate": 5.688606194690266e-06, + "loss": 0.8793, + "step": 1559 + }, + { + "epoch": 1.7256637168141593, + "grad_norm": 0.10302734375, + "learning_rate": 5.685840707964603e-06, + "loss": 0.7368, + "step": 1560 + }, + { + "epoch": 1.7267699115044248, + "grad_norm": 0.1064453125, + "learning_rate": 5.683075221238938e-06, + "loss": 0.7666, + "step": 1561 + }, + { + "epoch": 1.7278761061946901, + "grad_norm": 0.11572265625, + "learning_rate": 5.680309734513275e-06, + "loss": 0.7793, + "step": 1562 + }, + { + "epoch": 1.7289823008849559, + "grad_norm": 0.1064453125, + "learning_rate": 5.6775442477876106e-06, + "loss": 0.766, + "step": 1563 + }, + { + "epoch": 1.7300884955752212, + "grad_norm": 0.10888671875, + "learning_rate": 5.6747787610619475e-06, + "loss": 0.7562, + "step": 1564 + }, + { + "epoch": 1.731194690265487, + "grad_norm": 0.09912109375, + "learning_rate": 5.672013274336283e-06, + "loss": 0.7627, + "step": 1565 + }, + { + "epoch": 1.7323008849557522, + "grad_norm": 0.1298828125, + "learning_rate": 5.66924778761062e-06, + "loss": 0.7863, + "step": 1566 + }, + { + "epoch": 1.7334070796460177, + "grad_norm": 0.1103515625, + "learning_rate": 5.666482300884957e-06, + "loss": 0.7467, + "step": 1567 + }, + { + "epoch": 1.7345132743362832, + "grad_norm": 0.10400390625, + "learning_rate": 5.663716814159292e-06, + "loss": 0.7464, + "step": 1568 + }, + { + "epoch": 1.7356194690265485, + "grad_norm": 0.11669921875, + "learning_rate": 5.660951327433629e-06, + "loss": 0.7199, + "step": 1569 + }, + { + "epoch": 1.7367256637168142, + "grad_norm": 0.11669921875, + "learning_rate": 5.658185840707965e-06, + "loss": 0.784, + "step": 1570 + }, + { + "epoch": 1.7378318584070795, + "grad_norm": 0.10888671875, + "learning_rate": 5.655420353982301e-06, + "loss": 0.7578, + "step": 1571 + }, + { + "epoch": 1.7389380530973453, + "grad_norm": 0.123046875, + "learning_rate": 5.652654867256637e-06, + "loss": 0.8062, + "step": 1572 + }, + { + "epoch": 1.7400442477876106, + "grad_norm": 0.1337890625, + "learning_rate": 5.649889380530974e-06, + "loss": 0.7389, + "step": 1573 + }, + { + "epoch": 1.741150442477876, + "grad_norm": 0.115234375, + "learning_rate": 5.64712389380531e-06, + "loss": 0.7942, + "step": 1574 + }, + { + "epoch": 1.7422566371681416, + "grad_norm": 0.1552734375, + "learning_rate": 5.644358407079647e-06, + "loss": 0.7869, + "step": 1575 + }, + { + "epoch": 1.7433628318584071, + "grad_norm": 0.11083984375, + "learning_rate": 5.641592920353984e-06, + "loss": 0.7519, + "step": 1576 + }, + { + "epoch": 1.7444690265486726, + "grad_norm": 0.1044921875, + "learning_rate": 5.638827433628319e-06, + "loss": 0.7825, + "step": 1577 + }, + { + "epoch": 1.745575221238938, + "grad_norm": 0.10400390625, + "learning_rate": 5.636061946902656e-06, + "loss": 0.8034, + "step": 1578 + }, + { + "epoch": 1.7466814159292037, + "grad_norm": 0.119140625, + "learning_rate": 5.633296460176991e-06, + "loss": 0.7751, + "step": 1579 + }, + { + "epoch": 1.747787610619469, + "grad_norm": 0.09619140625, + "learning_rate": 5.630530973451328e-06, + "loss": 0.7219, + "step": 1580 + }, + { + "epoch": 1.7488938053097345, + "grad_norm": 0.1015625, + "learning_rate": 5.627765486725663e-06, + "loss": 0.7529, + "step": 1581 + }, + { + "epoch": 1.75, + "grad_norm": 0.1611328125, + "learning_rate": 5.625e-06, + "loss": 0.7434, + "step": 1582 + }, + { + "epoch": 1.7511061946902655, + "grad_norm": 0.10302734375, + "learning_rate": 5.622234513274337e-06, + "loss": 0.7684, + "step": 1583 + }, + { + "epoch": 1.752212389380531, + "grad_norm": 0.10107421875, + "learning_rate": 5.619469026548673e-06, + "loss": 0.7453, + "step": 1584 + }, + { + "epoch": 1.7533185840707963, + "grad_norm": 0.11572265625, + "learning_rate": 5.61670353982301e-06, + "loss": 0.8349, + "step": 1585 + }, + { + "epoch": 1.754424778761062, + "grad_norm": 0.107421875, + "learning_rate": 5.613938053097346e-06, + "loss": 0.7575, + "step": 1586 + }, + { + "epoch": 1.7555309734513274, + "grad_norm": 0.107421875, + "learning_rate": 5.611172566371682e-06, + "loss": 0.8107, + "step": 1587 + }, + { + "epoch": 1.7566371681415929, + "grad_norm": 0.11865234375, + "learning_rate": 5.608407079646018e-06, + "loss": 0.7631, + "step": 1588 + }, + { + "epoch": 1.7577433628318584, + "grad_norm": 0.11865234375, + "learning_rate": 5.605641592920354e-06, + "loss": 0.7943, + "step": 1589 + }, + { + "epoch": 1.758849557522124, + "grad_norm": 0.10546875, + "learning_rate": 5.602876106194691e-06, + "loss": 0.7595, + "step": 1590 + }, + { + "epoch": 1.7599557522123894, + "grad_norm": 0.1259765625, + "learning_rate": 5.600110619469027e-06, + "loss": 0.8044, + "step": 1591 + }, + { + "epoch": 1.7610619469026547, + "grad_norm": 0.11083984375, + "learning_rate": 5.597345132743363e-06, + "loss": 0.7731, + "step": 1592 + }, + { + "epoch": 1.7621681415929205, + "grad_norm": 0.11962890625, + "learning_rate": 5.5945796460176994e-06, + "loss": 0.8202, + "step": 1593 + }, + { + "epoch": 1.7632743362831858, + "grad_norm": 0.111328125, + "learning_rate": 5.591814159292036e-06, + "loss": 0.7363, + "step": 1594 + }, + { + "epoch": 1.7643805309734515, + "grad_norm": 0.1044921875, + "learning_rate": 5.589048672566372e-06, + "loss": 0.7608, + "step": 1595 + }, + { + "epoch": 1.7654867256637168, + "grad_norm": 0.11474609375, + "learning_rate": 5.586283185840709e-06, + "loss": 0.7677, + "step": 1596 + }, + { + "epoch": 1.7665929203539823, + "grad_norm": 0.11767578125, + "learning_rate": 5.583517699115044e-06, + "loss": 0.7597, + "step": 1597 + }, + { + "epoch": 1.7676991150442478, + "grad_norm": 0.1005859375, + "learning_rate": 5.580752212389381e-06, + "loss": 0.703, + "step": 1598 + }, + { + "epoch": 1.768805309734513, + "grad_norm": 0.1171875, + "learning_rate": 5.577986725663718e-06, + "loss": 0.7977, + "step": 1599 + }, + { + "epoch": 1.7699115044247788, + "grad_norm": 0.11279296875, + "learning_rate": 5.575221238938053e-06, + "loss": 0.79, + "step": 1600 + }, + { + "epoch": 1.7710176991150441, + "grad_norm": 0.1044921875, + "learning_rate": 5.57245575221239e-06, + "loss": 0.7494, + "step": 1601 + }, + { + "epoch": 1.7721238938053099, + "grad_norm": 0.1103515625, + "learning_rate": 5.569690265486725e-06, + "loss": 0.766, + "step": 1602 + }, + { + "epoch": 1.7732300884955752, + "grad_norm": 0.10888671875, + "learning_rate": 5.566924778761062e-06, + "loss": 0.7384, + "step": 1603 + }, + { + "epoch": 1.7743362831858407, + "grad_norm": 0.103515625, + "learning_rate": 5.5641592920353985e-06, + "loss": 0.768, + "step": 1604 + }, + { + "epoch": 1.7754424778761062, + "grad_norm": 0.10546875, + "learning_rate": 5.561393805309735e-06, + "loss": 0.7971, + "step": 1605 + }, + { + "epoch": 1.7765486725663717, + "grad_norm": 0.12255859375, + "learning_rate": 5.558628318584072e-06, + "loss": 0.7716, + "step": 1606 + }, + { + "epoch": 1.7776548672566372, + "grad_norm": 0.123046875, + "learning_rate": 5.555862831858408e-06, + "loss": 0.8003, + "step": 1607 + }, + { + "epoch": 1.7787610619469025, + "grad_norm": 0.1005859375, + "learning_rate": 5.553097345132744e-06, + "loss": 0.7576, + "step": 1608 + }, + { + "epoch": 1.7798672566371683, + "grad_norm": 0.11083984375, + "learning_rate": 5.55033185840708e-06, + "loss": 0.7489, + "step": 1609 + }, + { + "epoch": 1.7809734513274336, + "grad_norm": 0.10009765625, + "learning_rate": 5.547566371681416e-06, + "loss": 0.772, + "step": 1610 + }, + { + "epoch": 1.782079646017699, + "grad_norm": 0.125, + "learning_rate": 5.544800884955752e-06, + "loss": 0.7939, + "step": 1611 + }, + { + "epoch": 1.7831858407079646, + "grad_norm": 0.107421875, + "learning_rate": 5.542035398230089e-06, + "loss": 0.8179, + "step": 1612 + }, + { + "epoch": 1.7842920353982301, + "grad_norm": 0.111328125, + "learning_rate": 5.539269911504425e-06, + "loss": 0.7844, + "step": 1613 + }, + { + "epoch": 1.7853982300884956, + "grad_norm": 0.109375, + "learning_rate": 5.5365044247787615e-06, + "loss": 0.7914, + "step": 1614 + }, + { + "epoch": 1.786504424778761, + "grad_norm": 0.1064453125, + "learning_rate": 5.5337389380530984e-06, + "loss": 0.7814, + "step": 1615 + }, + { + "epoch": 1.7876106194690267, + "grad_norm": 0.10205078125, + "learning_rate": 5.530973451327434e-06, + "loss": 0.7672, + "step": 1616 + }, + { + "epoch": 1.788716814159292, + "grad_norm": 0.1025390625, + "learning_rate": 5.528207964601771e-06, + "loss": 0.778, + "step": 1617 + }, + { + "epoch": 1.7898230088495575, + "grad_norm": 0.10888671875, + "learning_rate": 5.525442477876106e-06, + "loss": 0.7963, + "step": 1618 + }, + { + "epoch": 1.790929203539823, + "grad_norm": 0.12158203125, + "learning_rate": 5.522676991150443e-06, + "loss": 0.7816, + "step": 1619 + }, + { + "epoch": 1.7920353982300885, + "grad_norm": 0.103515625, + "learning_rate": 5.51991150442478e-06, + "loss": 0.7291, + "step": 1620 + }, + { + "epoch": 1.793141592920354, + "grad_norm": 0.119140625, + "learning_rate": 5.517146017699115e-06, + "loss": 0.793, + "step": 1621 + }, + { + "epoch": 1.7942477876106193, + "grad_norm": 0.10107421875, + "learning_rate": 5.514380530973452e-06, + "loss": 0.7313, + "step": 1622 + }, + { + "epoch": 1.795353982300885, + "grad_norm": 0.09765625, + "learning_rate": 5.5116150442477875e-06, + "loss": 0.7659, + "step": 1623 + }, + { + "epoch": 1.7964601769911503, + "grad_norm": 0.11181640625, + "learning_rate": 5.508849557522124e-06, + "loss": 0.8055, + "step": 1624 + }, + { + "epoch": 1.797566371681416, + "grad_norm": 0.1083984375, + "learning_rate": 5.5060840707964605e-06, + "loss": 0.7291, + "step": 1625 + }, + { + "epoch": 1.7986725663716814, + "grad_norm": 0.10302734375, + "learning_rate": 5.503318584070797e-06, + "loss": 0.7242, + "step": 1626 + }, + { + "epoch": 1.799778761061947, + "grad_norm": 0.10546875, + "learning_rate": 5.500553097345133e-06, + "loss": 0.7585, + "step": 1627 + }, + { + "epoch": 1.8008849557522124, + "grad_norm": 0.1123046875, + "learning_rate": 5.49778761061947e-06, + "loss": 0.797, + "step": 1628 + }, + { + "epoch": 1.801991150442478, + "grad_norm": 0.09765625, + "learning_rate": 5.495022123893806e-06, + "loss": 0.7372, + "step": 1629 + }, + { + "epoch": 1.8030973451327434, + "grad_norm": 0.10107421875, + "learning_rate": 5.492256637168142e-06, + "loss": 0.7616, + "step": 1630 + }, + { + "epoch": 1.8042035398230087, + "grad_norm": 0.107421875, + "learning_rate": 5.489491150442479e-06, + "loss": 0.7653, + "step": 1631 + }, + { + "epoch": 1.8053097345132745, + "grad_norm": 0.1279296875, + "learning_rate": 5.486725663716814e-06, + "loss": 0.8818, + "step": 1632 + }, + { + "epoch": 1.8064159292035398, + "grad_norm": 0.10302734375, + "learning_rate": 5.483960176991151e-06, + "loss": 0.7661, + "step": 1633 + }, + { + "epoch": 1.8075221238938053, + "grad_norm": 0.10595703125, + "learning_rate": 5.4811946902654865e-06, + "loss": 0.7653, + "step": 1634 + }, + { + "epoch": 1.8086283185840708, + "grad_norm": 0.1103515625, + "learning_rate": 5.4784292035398235e-06, + "loss": 0.7461, + "step": 1635 + }, + { + "epoch": 1.8097345132743363, + "grad_norm": 0.1044921875, + "learning_rate": 5.4756637168141605e-06, + "loss": 0.7205, + "step": 1636 + }, + { + "epoch": 1.8108407079646018, + "grad_norm": 0.10888671875, + "learning_rate": 5.472898230088496e-06, + "loss": 0.7747, + "step": 1637 + }, + { + "epoch": 1.8119469026548671, + "grad_norm": 0.125, + "learning_rate": 5.470132743362833e-06, + "loss": 0.8055, + "step": 1638 + }, + { + "epoch": 1.8130530973451329, + "grad_norm": 0.10888671875, + "learning_rate": 5.467367256637168e-06, + "loss": 0.7803, + "step": 1639 + }, + { + "epoch": 1.8141592920353982, + "grad_norm": 0.10400390625, + "learning_rate": 5.464601769911505e-06, + "loss": 0.6922, + "step": 1640 + }, + { + "epoch": 1.8152654867256637, + "grad_norm": 0.142578125, + "learning_rate": 5.461836283185841e-06, + "loss": 0.8039, + "step": 1641 + }, + { + "epoch": 1.8163716814159292, + "grad_norm": 0.10888671875, + "learning_rate": 5.459070796460177e-06, + "loss": 0.816, + "step": 1642 + }, + { + "epoch": 1.8174778761061947, + "grad_norm": 0.1103515625, + "learning_rate": 5.456305309734514e-06, + "loss": 0.7618, + "step": 1643 + }, + { + "epoch": 1.8185840707964602, + "grad_norm": 0.10595703125, + "learning_rate": 5.4535398230088495e-06, + "loss": 0.7482, + "step": 1644 + }, + { + "epoch": 1.8196902654867255, + "grad_norm": 0.111328125, + "learning_rate": 5.4507743362831865e-06, + "loss": 0.779, + "step": 1645 + }, + { + "epoch": 1.8207964601769913, + "grad_norm": 0.10498046875, + "learning_rate": 5.448008849557523e-06, + "loss": 0.7587, + "step": 1646 + }, + { + "epoch": 1.8219026548672566, + "grad_norm": 0.09765625, + "learning_rate": 5.445243362831859e-06, + "loss": 0.7882, + "step": 1647 + }, + { + "epoch": 1.823008849557522, + "grad_norm": 0.1025390625, + "learning_rate": 5.442477876106195e-06, + "loss": 0.7451, + "step": 1648 + }, + { + "epoch": 1.8241150442477876, + "grad_norm": 0.1123046875, + "learning_rate": 5.439712389380532e-06, + "loss": 0.7763, + "step": 1649 + }, + { + "epoch": 1.825221238938053, + "grad_norm": 0.1357421875, + "learning_rate": 5.436946902654868e-06, + "loss": 0.7994, + "step": 1650 + }, + { + "epoch": 1.8263274336283186, + "grad_norm": 0.12060546875, + "learning_rate": 5.434181415929204e-06, + "loss": 0.7784, + "step": 1651 + }, + { + "epoch": 1.827433628318584, + "grad_norm": 0.10791015625, + "learning_rate": 5.431415929203541e-06, + "loss": 0.7398, + "step": 1652 + }, + { + "epoch": 1.8285398230088497, + "grad_norm": 0.1083984375, + "learning_rate": 5.428650442477876e-06, + "loss": 0.7499, + "step": 1653 + }, + { + "epoch": 1.829646017699115, + "grad_norm": 0.103515625, + "learning_rate": 5.425884955752213e-06, + "loss": 0.7481, + "step": 1654 + }, + { + "epoch": 1.8307522123893807, + "grad_norm": 0.10986328125, + "learning_rate": 5.4231194690265486e-06, + "loss": 0.7382, + "step": 1655 + }, + { + "epoch": 1.831858407079646, + "grad_norm": 0.10791015625, + "learning_rate": 5.4203539823008855e-06, + "loss": 0.7804, + "step": 1656 + }, + { + "epoch": 1.8329646017699115, + "grad_norm": 0.10986328125, + "learning_rate": 5.417588495575221e-06, + "loss": 0.7561, + "step": 1657 + }, + { + "epoch": 1.834070796460177, + "grad_norm": 0.10693359375, + "learning_rate": 5.414823008849558e-06, + "loss": 0.7346, + "step": 1658 + }, + { + "epoch": 1.8351769911504425, + "grad_norm": 0.10595703125, + "learning_rate": 5.412057522123895e-06, + "loss": 0.7562, + "step": 1659 + }, + { + "epoch": 1.836283185840708, + "grad_norm": 0.10888671875, + "learning_rate": 5.40929203539823e-06, + "loss": 0.7989, + "step": 1660 + }, + { + "epoch": 1.8373893805309733, + "grad_norm": 0.0966796875, + "learning_rate": 5.406526548672567e-06, + "loss": 0.7063, + "step": 1661 + }, + { + "epoch": 1.838495575221239, + "grad_norm": 0.10546875, + "learning_rate": 5.403761061946903e-06, + "loss": 0.7859, + "step": 1662 + }, + { + "epoch": 1.8396017699115044, + "grad_norm": 0.1162109375, + "learning_rate": 5.400995575221239e-06, + "loss": 0.8041, + "step": 1663 + }, + { + "epoch": 1.8407079646017699, + "grad_norm": 0.10546875, + "learning_rate": 5.398230088495575e-06, + "loss": 0.725, + "step": 1664 + }, + { + "epoch": 1.8418141592920354, + "grad_norm": 0.1064453125, + "learning_rate": 5.3954646017699115e-06, + "loss": 0.7809, + "step": 1665 + }, + { + "epoch": 1.842920353982301, + "grad_norm": 0.11669921875, + "learning_rate": 5.3926991150442485e-06, + "loss": 0.7783, + "step": 1666 + }, + { + "epoch": 1.8440265486725664, + "grad_norm": 0.115234375, + "learning_rate": 5.389933628318585e-06, + "loss": 0.8061, + "step": 1667 + }, + { + "epoch": 1.8451327433628317, + "grad_norm": 0.10498046875, + "learning_rate": 5.387168141592921e-06, + "loss": 0.7714, + "step": 1668 + }, + { + "epoch": 1.8462389380530975, + "grad_norm": 0.10791015625, + "learning_rate": 5.384402654867257e-06, + "loss": 0.7685, + "step": 1669 + }, + { + "epoch": 1.8473451327433628, + "grad_norm": 0.10546875, + "learning_rate": 5.381637168141594e-06, + "loss": 0.7653, + "step": 1670 + }, + { + "epoch": 1.8484513274336283, + "grad_norm": 0.11474609375, + "learning_rate": 5.378871681415929e-06, + "loss": 0.8039, + "step": 1671 + }, + { + "epoch": 1.8495575221238938, + "grad_norm": 0.134765625, + "learning_rate": 5.376106194690266e-06, + "loss": 0.7866, + "step": 1672 + }, + { + "epoch": 1.8506637168141593, + "grad_norm": 0.11328125, + "learning_rate": 5.373340707964603e-06, + "loss": 0.7821, + "step": 1673 + }, + { + "epoch": 1.8517699115044248, + "grad_norm": 0.109375, + "learning_rate": 5.370575221238938e-06, + "loss": 0.7915, + "step": 1674 + }, + { + "epoch": 1.8528761061946901, + "grad_norm": 0.111328125, + "learning_rate": 5.367809734513275e-06, + "loss": 0.7781, + "step": 1675 + }, + { + "epoch": 1.8539823008849559, + "grad_norm": 0.12353515625, + "learning_rate": 5.365044247787611e-06, + "loss": 0.7975, + "step": 1676 + }, + { + "epoch": 1.8550884955752212, + "grad_norm": 0.1103515625, + "learning_rate": 5.3622787610619476e-06, + "loss": 0.7934, + "step": 1677 + }, + { + "epoch": 1.856194690265487, + "grad_norm": 0.111328125, + "learning_rate": 5.359513274336283e-06, + "loss": 0.7418, + "step": 1678 + }, + { + "epoch": 1.8573008849557522, + "grad_norm": 0.0986328125, + "learning_rate": 5.35674778761062e-06, + "loss": 0.7796, + "step": 1679 + }, + { + "epoch": 1.8584070796460177, + "grad_norm": 0.1083984375, + "learning_rate": 5.353982300884957e-06, + "loss": 0.7647, + "step": 1680 + }, + { + "epoch": 1.8595132743362832, + "grad_norm": 0.11279296875, + "learning_rate": 5.351216814159292e-06, + "loss": 0.75, + "step": 1681 + }, + { + "epoch": 1.8606194690265485, + "grad_norm": 0.1015625, + "learning_rate": 5.348451327433629e-06, + "loss": 0.8062, + "step": 1682 + }, + { + "epoch": 1.8617256637168142, + "grad_norm": 0.10888671875, + "learning_rate": 5.345685840707965e-06, + "loss": 0.7335, + "step": 1683 + }, + { + "epoch": 1.8628318584070795, + "grad_norm": 0.11767578125, + "learning_rate": 5.342920353982301e-06, + "loss": 0.7592, + "step": 1684 + }, + { + "epoch": 1.8639380530973453, + "grad_norm": 0.1162109375, + "learning_rate": 5.3401548672566374e-06, + "loss": 0.8063, + "step": 1685 + }, + { + "epoch": 1.8650442477876106, + "grad_norm": 0.109375, + "learning_rate": 5.337389380530974e-06, + "loss": 0.7972, + "step": 1686 + }, + { + "epoch": 1.866150442477876, + "grad_norm": 0.111328125, + "learning_rate": 5.33462389380531e-06, + "loss": 0.7724, + "step": 1687 + }, + { + "epoch": 1.8672566371681416, + "grad_norm": 0.12060546875, + "learning_rate": 5.331858407079647e-06, + "loss": 0.7316, + "step": 1688 + }, + { + "epoch": 1.8683628318584071, + "grad_norm": 0.0986328125, + "learning_rate": 5.329092920353983e-06, + "loss": 0.762, + "step": 1689 + }, + { + "epoch": 1.8694690265486726, + "grad_norm": 0.1044921875, + "learning_rate": 5.326327433628319e-06, + "loss": 0.7526, + "step": 1690 + }, + { + "epoch": 1.870575221238938, + "grad_norm": 0.10107421875, + "learning_rate": 5.323561946902656e-06, + "loss": 0.7266, + "step": 1691 + }, + { + "epoch": 1.8716814159292037, + "grad_norm": 0.1083984375, + "learning_rate": 5.320796460176991e-06, + "loss": 0.7636, + "step": 1692 + }, + { + "epoch": 1.872787610619469, + "grad_norm": 0.10498046875, + "learning_rate": 5.318030973451328e-06, + "loss": 0.7351, + "step": 1693 + }, + { + "epoch": 1.8738938053097345, + "grad_norm": 0.10546875, + "learning_rate": 5.315265486725663e-06, + "loss": 0.7794, + "step": 1694 + }, + { + "epoch": 1.875, + "grad_norm": 0.103515625, + "learning_rate": 5.3125e-06, + "loss": 0.7087, + "step": 1695 + }, + { + "epoch": 1.8761061946902655, + "grad_norm": 0.103515625, + "learning_rate": 5.309734513274337e-06, + "loss": 0.7563, + "step": 1696 + }, + { + "epoch": 1.877212389380531, + "grad_norm": 0.1123046875, + "learning_rate": 5.306969026548673e-06, + "loss": 0.8121, + "step": 1697 + }, + { + "epoch": 1.8783185840707963, + "grad_norm": 0.1259765625, + "learning_rate": 5.30420353982301e-06, + "loss": 0.7873, + "step": 1698 + }, + { + "epoch": 1.879424778761062, + "grad_norm": 0.107421875, + "learning_rate": 5.301438053097345e-06, + "loss": 0.7744, + "step": 1699 + }, + { + "epoch": 1.8805309734513274, + "grad_norm": 0.1005859375, + "learning_rate": 5.298672566371682e-06, + "loss": 0.7798, + "step": 1700 + }, + { + "epoch": 1.8816371681415929, + "grad_norm": 0.11376953125, + "learning_rate": 5.295907079646018e-06, + "loss": 0.7847, + "step": 1701 + }, + { + "epoch": 1.8827433628318584, + "grad_norm": 0.11181640625, + "learning_rate": 5.293141592920354e-06, + "loss": 0.7935, + "step": 1702 + }, + { + "epoch": 1.883849557522124, + "grad_norm": 0.1044921875, + "learning_rate": 5.290376106194691e-06, + "loss": 0.7791, + "step": 1703 + }, + { + "epoch": 1.8849557522123894, + "grad_norm": 0.10595703125, + "learning_rate": 5.287610619469027e-06, + "loss": 0.7798, + "step": 1704 + }, + { + "epoch": 1.8860619469026547, + "grad_norm": 0.107421875, + "learning_rate": 5.284845132743363e-06, + "loss": 0.7475, + "step": 1705 + }, + { + "epoch": 1.8871681415929205, + "grad_norm": 0.1162109375, + "learning_rate": 5.2820796460176995e-06, + "loss": 0.7799, + "step": 1706 + }, + { + "epoch": 1.8882743362831858, + "grad_norm": 0.1064453125, + "learning_rate": 5.2793141592920364e-06, + "loss": 0.7435, + "step": 1707 + }, + { + "epoch": 1.8893805309734515, + "grad_norm": 0.1171875, + "learning_rate": 5.276548672566372e-06, + "loss": 0.753, + "step": 1708 + }, + { + "epoch": 1.8904867256637168, + "grad_norm": 0.1083984375, + "learning_rate": 5.273783185840709e-06, + "loss": 0.8119, + "step": 1709 + }, + { + "epoch": 1.8915929203539823, + "grad_norm": 0.1103515625, + "learning_rate": 5.271017699115044e-06, + "loss": 0.7779, + "step": 1710 + }, + { + "epoch": 1.8926991150442478, + "grad_norm": 0.10400390625, + "learning_rate": 5.268252212389381e-06, + "loss": 0.7643, + "step": 1711 + }, + { + "epoch": 1.893805309734513, + "grad_norm": 0.109375, + "learning_rate": 5.265486725663718e-06, + "loss": 0.782, + "step": 1712 + }, + { + "epoch": 1.8949115044247788, + "grad_norm": 0.1025390625, + "learning_rate": 5.262721238938053e-06, + "loss": 0.7611, + "step": 1713 + }, + { + "epoch": 1.8960176991150441, + "grad_norm": 0.115234375, + "learning_rate": 5.25995575221239e-06, + "loss": 0.8026, + "step": 1714 + }, + { + "epoch": 1.8971238938053099, + "grad_norm": 0.11474609375, + "learning_rate": 5.2571902654867254e-06, + "loss": 0.7635, + "step": 1715 + }, + { + "epoch": 1.8982300884955752, + "grad_norm": 0.1123046875, + "learning_rate": 5.254424778761062e-06, + "loss": 0.7746, + "step": 1716 + }, + { + "epoch": 1.8993362831858407, + "grad_norm": 0.09716796875, + "learning_rate": 5.2516592920353985e-06, + "loss": 0.7742, + "step": 1717 + }, + { + "epoch": 1.9004424778761062, + "grad_norm": 0.10107421875, + "learning_rate": 5.248893805309735e-06, + "loss": 0.7733, + "step": 1718 + }, + { + "epoch": 1.9015486725663717, + "grad_norm": 0.0986328125, + "learning_rate": 5.246128318584072e-06, + "loss": 0.7698, + "step": 1719 + }, + { + "epoch": 1.9026548672566372, + "grad_norm": 0.10546875, + "learning_rate": 5.243362831858407e-06, + "loss": 0.7838, + "step": 1720 + }, + { + "epoch": 1.9037610619469025, + "grad_norm": 0.11083984375, + "learning_rate": 5.240597345132744e-06, + "loss": 0.7286, + "step": 1721 + }, + { + "epoch": 1.9048672566371683, + "grad_norm": 0.1298828125, + "learning_rate": 5.23783185840708e-06, + "loss": 0.7633, + "step": 1722 + }, + { + "epoch": 1.9059734513274336, + "grad_norm": 0.125, + "learning_rate": 5.235066371681416e-06, + "loss": 0.8256, + "step": 1723 + }, + { + "epoch": 1.907079646017699, + "grad_norm": 0.12353515625, + "learning_rate": 5.232300884955752e-06, + "loss": 0.7846, + "step": 1724 + }, + { + "epoch": 1.9081858407079646, + "grad_norm": 0.11572265625, + "learning_rate": 5.229535398230089e-06, + "loss": 0.7704, + "step": 1725 + }, + { + "epoch": 1.9092920353982301, + "grad_norm": 0.1376953125, + "learning_rate": 5.226769911504425e-06, + "loss": 0.796, + "step": 1726 + }, + { + "epoch": 1.9103982300884956, + "grad_norm": 0.1103515625, + "learning_rate": 5.2240044247787615e-06, + "loss": 0.7357, + "step": 1727 + }, + { + "epoch": 1.911504424778761, + "grad_norm": 0.1064453125, + "learning_rate": 5.2212389380530985e-06, + "loss": 0.8218, + "step": 1728 + }, + { + "epoch": 1.9126106194690267, + "grad_norm": 0.111328125, + "learning_rate": 5.218473451327434e-06, + "loss": 0.8054, + "step": 1729 + }, + { + "epoch": 1.913716814159292, + "grad_norm": 0.1201171875, + "learning_rate": 5.215707964601771e-06, + "loss": 0.7697, + "step": 1730 + }, + { + "epoch": 1.9148230088495575, + "grad_norm": 0.1181640625, + "learning_rate": 5.212942477876106e-06, + "loss": 0.7932, + "step": 1731 + }, + { + "epoch": 1.915929203539823, + "grad_norm": 0.1015625, + "learning_rate": 5.210176991150443e-06, + "loss": 0.7608, + "step": 1732 + }, + { + "epoch": 1.9170353982300885, + "grad_norm": 0.12451171875, + "learning_rate": 5.20741150442478e-06, + "loss": 0.7856, + "step": 1733 + }, + { + "epoch": 1.918141592920354, + "grad_norm": 0.1015625, + "learning_rate": 5.204646017699115e-06, + "loss": 0.7109, + "step": 1734 + }, + { + "epoch": 1.9192477876106193, + "grad_norm": 0.09765625, + "learning_rate": 5.201880530973452e-06, + "loss": 0.7529, + "step": 1735 + }, + { + "epoch": 1.920353982300885, + "grad_norm": 0.10791015625, + "learning_rate": 5.1991150442477875e-06, + "loss": 0.801, + "step": 1736 + }, + { + "epoch": 1.9214601769911503, + "grad_norm": 0.10888671875, + "learning_rate": 5.1963495575221245e-06, + "loss": 0.7589, + "step": 1737 + }, + { + "epoch": 1.922566371681416, + "grad_norm": 0.115234375, + "learning_rate": 5.193584070796461e-06, + "loss": 0.8137, + "step": 1738 + }, + { + "epoch": 1.9236725663716814, + "grad_norm": 0.1005859375, + "learning_rate": 5.190818584070797e-06, + "loss": 0.7658, + "step": 1739 + }, + { + "epoch": 1.924778761061947, + "grad_norm": 0.1083984375, + "learning_rate": 5.188053097345133e-06, + "loss": 0.759, + "step": 1740 + }, + { + "epoch": 1.9258849557522124, + "grad_norm": 0.10888671875, + "learning_rate": 5.18528761061947e-06, + "loss": 0.7571, + "step": 1741 + }, + { + "epoch": 1.926991150442478, + "grad_norm": 0.130859375, + "learning_rate": 5.182522123893806e-06, + "loss": 0.7533, + "step": 1742 + }, + { + "epoch": 1.9280973451327434, + "grad_norm": 0.103515625, + "learning_rate": 5.179756637168142e-06, + "loss": 0.7209, + "step": 1743 + }, + { + "epoch": 1.9292035398230087, + "grad_norm": 0.10498046875, + "learning_rate": 5.176991150442478e-06, + "loss": 0.7581, + "step": 1744 + }, + { + "epoch": 1.9303097345132745, + "grad_norm": 0.10546875, + "learning_rate": 5.174225663716814e-06, + "loss": 0.7405, + "step": 1745 + }, + { + "epoch": 1.9314159292035398, + "grad_norm": 0.11767578125, + "learning_rate": 5.171460176991151e-06, + "loss": 0.7855, + "step": 1746 + }, + { + "epoch": 1.9325221238938053, + "grad_norm": 0.1025390625, + "learning_rate": 5.1686946902654866e-06, + "loss": 0.7399, + "step": 1747 + }, + { + "epoch": 1.9336283185840708, + "grad_norm": 0.10791015625, + "learning_rate": 5.1659292035398235e-06, + "loss": 0.7756, + "step": 1748 + }, + { + "epoch": 1.9347345132743363, + "grad_norm": 0.1142578125, + "learning_rate": 5.1631637168141605e-06, + "loss": 0.7713, + "step": 1749 + }, + { + "epoch": 1.9358407079646018, + "grad_norm": 0.103515625, + "learning_rate": 5.160398230088496e-06, + "loss": 0.778, + "step": 1750 + }, + { + "epoch": 1.9369469026548671, + "grad_norm": 0.12255859375, + "learning_rate": 5.157632743362833e-06, + "loss": 0.8, + "step": 1751 + }, + { + "epoch": 1.9380530973451329, + "grad_norm": 0.099609375, + "learning_rate": 5.154867256637168e-06, + "loss": 0.7415, + "step": 1752 + }, + { + "epoch": 1.9391592920353982, + "grad_norm": 0.1171875, + "learning_rate": 5.152101769911505e-06, + "loss": 0.8, + "step": 1753 + }, + { + "epoch": 1.9402654867256637, + "grad_norm": 0.10791015625, + "learning_rate": 5.14933628318584e-06, + "loss": 0.7532, + "step": 1754 + }, + { + "epoch": 1.9413716814159292, + "grad_norm": 0.103515625, + "learning_rate": 5.146570796460177e-06, + "loss": 0.7722, + "step": 1755 + }, + { + "epoch": 1.9424778761061947, + "grad_norm": 0.1123046875, + "learning_rate": 5.143805309734514e-06, + "loss": 0.7343, + "step": 1756 + }, + { + "epoch": 1.9435840707964602, + "grad_norm": 0.11669921875, + "learning_rate": 5.1410398230088495e-06, + "loss": 0.7747, + "step": 1757 + }, + { + "epoch": 1.9446902654867255, + "grad_norm": 0.107421875, + "learning_rate": 5.1382743362831865e-06, + "loss": 0.7826, + "step": 1758 + }, + { + "epoch": 1.9457964601769913, + "grad_norm": 0.107421875, + "learning_rate": 5.135508849557523e-06, + "loss": 0.7741, + "step": 1759 + }, + { + "epoch": 1.9469026548672566, + "grad_norm": 0.09521484375, + "learning_rate": 5.132743362831859e-06, + "loss": 0.7476, + "step": 1760 + }, + { + "epoch": 1.948008849557522, + "grad_norm": 0.1396484375, + "learning_rate": 5.129977876106195e-06, + "loss": 0.8713, + "step": 1761 + }, + { + "epoch": 1.9491150442477876, + "grad_norm": 0.09716796875, + "learning_rate": 5.127212389380532e-06, + "loss": 0.7594, + "step": 1762 + }, + { + "epoch": 1.950221238938053, + "grad_norm": 0.11572265625, + "learning_rate": 5.124446902654868e-06, + "loss": 0.7727, + "step": 1763 + }, + { + "epoch": 1.9513274336283186, + "grad_norm": 0.09716796875, + "learning_rate": 5.121681415929204e-06, + "loss": 0.7307, + "step": 1764 + }, + { + "epoch": 1.952433628318584, + "grad_norm": 0.10595703125, + "learning_rate": 5.11891592920354e-06, + "loss": 0.7514, + "step": 1765 + }, + { + "epoch": 1.9535398230088497, + "grad_norm": 0.10400390625, + "learning_rate": 5.116150442477876e-06, + "loss": 0.7722, + "step": 1766 + }, + { + "epoch": 1.954646017699115, + "grad_norm": 0.11474609375, + "learning_rate": 5.113384955752213e-06, + "loss": 0.7512, + "step": 1767 + }, + { + "epoch": 1.9557522123893807, + "grad_norm": 0.11376953125, + "learning_rate": 5.110619469026549e-06, + "loss": 0.7894, + "step": 1768 + }, + { + "epoch": 1.956858407079646, + "grad_norm": 0.130859375, + "learning_rate": 5.1078539823008856e-06, + "loss": 0.7474, + "step": 1769 + }, + { + "epoch": 1.9579646017699115, + "grad_norm": 0.1171875, + "learning_rate": 5.105088495575221e-06, + "loss": 0.7485, + "step": 1770 + }, + { + "epoch": 1.959070796460177, + "grad_norm": 0.1181640625, + "learning_rate": 5.102323008849558e-06, + "loss": 0.832, + "step": 1771 + }, + { + "epoch": 1.9601769911504425, + "grad_norm": 0.10791015625, + "learning_rate": 5.099557522123895e-06, + "loss": 0.7507, + "step": 1772 + }, + { + "epoch": 1.961283185840708, + "grad_norm": 0.11376953125, + "learning_rate": 5.09679203539823e-06, + "loss": 0.7685, + "step": 1773 + }, + { + "epoch": 1.9623893805309733, + "grad_norm": 0.10400390625, + "learning_rate": 5.094026548672567e-06, + "loss": 0.7377, + "step": 1774 + }, + { + "epoch": 1.963495575221239, + "grad_norm": 0.107421875, + "learning_rate": 5.091261061946902e-06, + "loss": 0.7624, + "step": 1775 + }, + { + "epoch": 1.9646017699115044, + "grad_norm": 0.1142578125, + "learning_rate": 5.088495575221239e-06, + "loss": 0.748, + "step": 1776 + }, + { + "epoch": 1.9657079646017699, + "grad_norm": 0.1171875, + "learning_rate": 5.085730088495575e-06, + "loss": 0.7834, + "step": 1777 + }, + { + "epoch": 1.9668141592920354, + "grad_norm": 0.11474609375, + "learning_rate": 5.0829646017699115e-06, + "loss": 0.757, + "step": 1778 + }, + { + "epoch": 1.967920353982301, + "grad_norm": 0.1142578125, + "learning_rate": 5.0801991150442485e-06, + "loss": 0.7936, + "step": 1779 + }, + { + "epoch": 1.9690265486725664, + "grad_norm": 0.09814453125, + "learning_rate": 5.077433628318585e-06, + "loss": 0.751, + "step": 1780 + }, + { + "epoch": 1.9701327433628317, + "grad_norm": 0.10205078125, + "learning_rate": 5.074668141592921e-06, + "loss": 0.7482, + "step": 1781 + }, + { + "epoch": 1.9712389380530975, + "grad_norm": 0.1044921875, + "learning_rate": 5.071902654867257e-06, + "loss": 0.7505, + "step": 1782 + }, + { + "epoch": 1.9723451327433628, + "grad_norm": 0.2216796875, + "learning_rate": 5.069137168141594e-06, + "loss": 0.7741, + "step": 1783 + }, + { + "epoch": 1.9734513274336283, + "grad_norm": 0.11376953125, + "learning_rate": 5.066371681415929e-06, + "loss": 0.773, + "step": 1784 + }, + { + "epoch": 1.9745575221238938, + "grad_norm": 0.11865234375, + "learning_rate": 5.063606194690266e-06, + "loss": 0.8268, + "step": 1785 + }, + { + "epoch": 1.9756637168141593, + "grad_norm": 0.11572265625, + "learning_rate": 5.060840707964603e-06, + "loss": 0.7862, + "step": 1786 + }, + { + "epoch": 1.9767699115044248, + "grad_norm": 0.1015625, + "learning_rate": 5.058075221238938e-06, + "loss": 0.7046, + "step": 1787 + }, + { + "epoch": 1.9778761061946901, + "grad_norm": 0.1162109375, + "learning_rate": 5.055309734513275e-06, + "loss": 0.7902, + "step": 1788 + }, + { + "epoch": 1.9789823008849559, + "grad_norm": 0.119140625, + "learning_rate": 5.052544247787611e-06, + "loss": 0.745, + "step": 1789 + }, + { + "epoch": 1.9800884955752212, + "grad_norm": 0.11279296875, + "learning_rate": 5.049778761061948e-06, + "loss": 0.8201, + "step": 1790 + }, + { + "epoch": 1.981194690265487, + "grad_norm": 0.1318359375, + "learning_rate": 5.047013274336283e-06, + "loss": 0.7484, + "step": 1791 + }, + { + "epoch": 1.9823008849557522, + "grad_norm": 0.1123046875, + "learning_rate": 5.04424778761062e-06, + "loss": 0.7657, + "step": 1792 + }, + { + "epoch": 1.9834070796460177, + "grad_norm": 0.11962890625, + "learning_rate": 5.041482300884957e-06, + "loss": 0.8077, + "step": 1793 + }, + { + "epoch": 1.9845132743362832, + "grad_norm": 0.11376953125, + "learning_rate": 5.038716814159292e-06, + "loss": 0.7629, + "step": 1794 + }, + { + "epoch": 1.9856194690265485, + "grad_norm": 0.103515625, + "learning_rate": 5.035951327433629e-06, + "loss": 0.8018, + "step": 1795 + }, + { + "epoch": 1.9867256637168142, + "grad_norm": 0.111328125, + "learning_rate": 5.033185840707965e-06, + "loss": 0.7589, + "step": 1796 + }, + { + "epoch": 1.9878318584070795, + "grad_norm": 0.1220703125, + "learning_rate": 5.030420353982301e-06, + "loss": 0.7971, + "step": 1797 + }, + { + "epoch": 1.9889380530973453, + "grad_norm": 0.11474609375, + "learning_rate": 5.0276548672566375e-06, + "loss": 0.8354, + "step": 1798 + }, + { + "epoch": 1.9900442477876106, + "grad_norm": 0.11962890625, + "learning_rate": 5.024889380530974e-06, + "loss": 0.7938, + "step": 1799 + }, + { + "epoch": 1.991150442477876, + "grad_norm": 0.12451171875, + "learning_rate": 5.02212389380531e-06, + "loss": 0.8132, + "step": 1800 + }, + { + "epoch": 1.9922566371681416, + "grad_norm": 0.12255859375, + "learning_rate": 5.019358407079647e-06, + "loss": 0.8051, + "step": 1801 + }, + { + "epoch": 1.9933628318584071, + "grad_norm": 0.11181640625, + "learning_rate": 5.016592920353983e-06, + "loss": 0.7749, + "step": 1802 + }, + { + "epoch": 1.9944690265486726, + "grad_norm": 0.099609375, + "learning_rate": 5.013827433628319e-06, + "loss": 0.7691, + "step": 1803 + }, + { + "epoch": 1.995575221238938, + "grad_norm": 0.10791015625, + "learning_rate": 5.011061946902656e-06, + "loss": 0.7471, + "step": 1804 + }, + { + "epoch": 1.9966814159292037, + "grad_norm": 0.10205078125, + "learning_rate": 5.008296460176991e-06, + "loss": 0.7659, + "step": 1805 + }, + { + "epoch": 1.997787610619469, + "grad_norm": 0.111328125, + "learning_rate": 5.005530973451328e-06, + "loss": 0.7529, + "step": 1806 + }, + { + "epoch": 1.9988938053097345, + "grad_norm": 0.11474609375, + "learning_rate": 5.0027654867256634e-06, + "loss": 0.7637, + "step": 1807 + }, + { + "epoch": 2.0, + "grad_norm": 0.1220703125, + "learning_rate": 5e-06, + "loss": 0.799, + "step": 1808 + }, + { + "epoch": 2.0011061946902653, + "grad_norm": 0.130859375, + "learning_rate": 4.9972345132743365e-06, + "loss": 0.7357, + "step": 1809 + }, + { + "epoch": 2.002212389380531, + "grad_norm": 0.11376953125, + "learning_rate": 4.994469026548673e-06, + "loss": 0.7745, + "step": 1810 + }, + { + "epoch": 2.0033185840707963, + "grad_norm": 0.1162109375, + "learning_rate": 4.991703539823009e-06, + "loss": 0.7839, + "step": 1811 + }, + { + "epoch": 2.004424778761062, + "grad_norm": 0.10791015625, + "learning_rate": 4.988938053097346e-06, + "loss": 0.7809, + "step": 1812 + }, + { + "epoch": 2.0055309734513274, + "grad_norm": 0.10986328125, + "learning_rate": 4.986172566371682e-06, + "loss": 0.7667, + "step": 1813 + }, + { + "epoch": 2.006637168141593, + "grad_norm": 0.1162109375, + "learning_rate": 4.983407079646018e-06, + "loss": 0.7629, + "step": 1814 + }, + { + "epoch": 2.0077433628318584, + "grad_norm": 0.12255859375, + "learning_rate": 4.980641592920354e-06, + "loss": 0.816, + "step": 1815 + }, + { + "epoch": 2.0088495575221237, + "grad_norm": 0.119140625, + "learning_rate": 4.97787610619469e-06, + "loss": 0.7724, + "step": 1816 + }, + { + "epoch": 2.0099557522123894, + "grad_norm": 0.10498046875, + "learning_rate": 4.975110619469027e-06, + "loss": 0.7728, + "step": 1817 + }, + { + "epoch": 2.0110619469026547, + "grad_norm": 0.1240234375, + "learning_rate": 4.972345132743363e-06, + "loss": 0.7711, + "step": 1818 + }, + { + "epoch": 2.0121681415929205, + "grad_norm": 0.11279296875, + "learning_rate": 4.9695796460176995e-06, + "loss": 0.7921, + "step": 1819 + }, + { + "epoch": 2.0132743362831858, + "grad_norm": 0.125, + "learning_rate": 4.966814159292036e-06, + "loss": 0.7754, + "step": 1820 + }, + { + "epoch": 2.0143805309734515, + "grad_norm": 0.11669921875, + "learning_rate": 4.964048672566373e-06, + "loss": 0.7914, + "step": 1821 + }, + { + "epoch": 2.015486725663717, + "grad_norm": 0.11572265625, + "learning_rate": 4.961283185840709e-06, + "loss": 0.7801, + "step": 1822 + }, + { + "epoch": 2.0165929203539825, + "grad_norm": 0.099609375, + "learning_rate": 4.958517699115045e-06, + "loss": 0.7433, + "step": 1823 + }, + { + "epoch": 2.017699115044248, + "grad_norm": 0.11328125, + "learning_rate": 4.955752212389381e-06, + "loss": 0.7993, + "step": 1824 + }, + { + "epoch": 2.018805309734513, + "grad_norm": 0.1142578125, + "learning_rate": 4.952986725663717e-06, + "loss": 0.8008, + "step": 1825 + }, + { + "epoch": 2.019911504424779, + "grad_norm": 0.10546875, + "learning_rate": 4.950221238938053e-06, + "loss": 0.7929, + "step": 1826 + }, + { + "epoch": 2.021017699115044, + "grad_norm": 0.11279296875, + "learning_rate": 4.94745575221239e-06, + "loss": 0.774, + "step": 1827 + }, + { + "epoch": 2.02212389380531, + "grad_norm": 0.11083984375, + "learning_rate": 4.944690265486726e-06, + "loss": 0.7876, + "step": 1828 + }, + { + "epoch": 2.023230088495575, + "grad_norm": 0.1376953125, + "learning_rate": 4.9419247787610624e-06, + "loss": 0.818, + "step": 1829 + }, + { + "epoch": 2.024336283185841, + "grad_norm": 0.1162109375, + "learning_rate": 4.9391592920353986e-06, + "loss": 0.7626, + "step": 1830 + }, + { + "epoch": 2.025442477876106, + "grad_norm": 0.162109375, + "learning_rate": 4.936393805309735e-06, + "loss": 0.7437, + "step": 1831 + }, + { + "epoch": 2.0265486725663715, + "grad_norm": 0.1103515625, + "learning_rate": 4.933628318584071e-06, + "loss": 0.7631, + "step": 1832 + }, + { + "epoch": 2.0276548672566372, + "grad_norm": 0.11962890625, + "learning_rate": 4.930862831858407e-06, + "loss": 0.7907, + "step": 1833 + }, + { + "epoch": 2.0287610619469025, + "grad_norm": 0.1630859375, + "learning_rate": 4.928097345132744e-06, + "loss": 0.8103, + "step": 1834 + }, + { + "epoch": 2.0298672566371683, + "grad_norm": 0.1025390625, + "learning_rate": 4.92533185840708e-06, + "loss": 0.7289, + "step": 1835 + }, + { + "epoch": 2.0309734513274336, + "grad_norm": 0.1396484375, + "learning_rate": 4.922566371681416e-06, + "loss": 0.8503, + "step": 1836 + }, + { + "epoch": 2.0320796460176993, + "grad_norm": 0.12060546875, + "learning_rate": 4.919800884955752e-06, + "loss": 0.7663, + "step": 1837 + }, + { + "epoch": 2.0331858407079646, + "grad_norm": 0.10205078125, + "learning_rate": 4.917035398230089e-06, + "loss": 0.7767, + "step": 1838 + }, + { + "epoch": 2.03429203539823, + "grad_norm": 0.09765625, + "learning_rate": 4.914269911504425e-06, + "loss": 0.7541, + "step": 1839 + }, + { + "epoch": 2.0353982300884956, + "grad_norm": 0.11669921875, + "learning_rate": 4.9115044247787615e-06, + "loss": 0.785, + "step": 1840 + }, + { + "epoch": 2.036504424778761, + "grad_norm": 0.10498046875, + "learning_rate": 4.908738938053098e-06, + "loss": 0.771, + "step": 1841 + }, + { + "epoch": 2.0376106194690267, + "grad_norm": 0.1123046875, + "learning_rate": 4.905973451327435e-06, + "loss": 0.8014, + "step": 1842 + }, + { + "epoch": 2.038716814159292, + "grad_norm": 0.10888671875, + "learning_rate": 4.903207964601771e-06, + "loss": 0.7512, + "step": 1843 + }, + { + "epoch": 2.0398230088495577, + "grad_norm": 0.1015625, + "learning_rate": 4.900442477876107e-06, + "loss": 0.788, + "step": 1844 + }, + { + "epoch": 2.040929203539823, + "grad_norm": 0.12060546875, + "learning_rate": 4.897676991150443e-06, + "loss": 0.7955, + "step": 1845 + }, + { + "epoch": 2.0420353982300883, + "grad_norm": 0.1279296875, + "learning_rate": 4.894911504424779e-06, + "loss": 0.8227, + "step": 1846 + }, + { + "epoch": 2.043141592920354, + "grad_norm": 0.10107421875, + "learning_rate": 4.892146017699115e-06, + "loss": 0.7276, + "step": 1847 + }, + { + "epoch": 2.0442477876106193, + "grad_norm": 0.109375, + "learning_rate": 4.889380530973451e-06, + "loss": 0.7395, + "step": 1848 + }, + { + "epoch": 2.045353982300885, + "grad_norm": 0.0986328125, + "learning_rate": 4.8866150442477875e-06, + "loss": 0.7525, + "step": 1849 + }, + { + "epoch": 2.0464601769911503, + "grad_norm": 0.10693359375, + "learning_rate": 4.8838495575221245e-06, + "loss": 0.7671, + "step": 1850 + }, + { + "epoch": 2.047566371681416, + "grad_norm": 0.1162109375, + "learning_rate": 4.881084070796461e-06, + "loss": 0.7164, + "step": 1851 + }, + { + "epoch": 2.0486725663716814, + "grad_norm": 0.1142578125, + "learning_rate": 4.878318584070797e-06, + "loss": 0.7568, + "step": 1852 + }, + { + "epoch": 2.049778761061947, + "grad_norm": 0.11767578125, + "learning_rate": 4.875553097345133e-06, + "loss": 0.8035, + "step": 1853 + }, + { + "epoch": 2.0508849557522124, + "grad_norm": 0.10595703125, + "learning_rate": 4.872787610619469e-06, + "loss": 0.7511, + "step": 1854 + }, + { + "epoch": 2.0519911504424777, + "grad_norm": 0.1728515625, + "learning_rate": 4.870022123893806e-06, + "loss": 0.7601, + "step": 1855 + }, + { + "epoch": 2.0530973451327434, + "grad_norm": 0.11376953125, + "learning_rate": 4.867256637168142e-06, + "loss": 0.7728, + "step": 1856 + }, + { + "epoch": 2.0542035398230087, + "grad_norm": 0.11376953125, + "learning_rate": 4.864491150442478e-06, + "loss": 0.7757, + "step": 1857 + }, + { + "epoch": 2.0553097345132745, + "grad_norm": 0.111328125, + "learning_rate": 4.861725663716815e-06, + "loss": 0.7742, + "step": 1858 + }, + { + "epoch": 2.0564159292035398, + "grad_norm": 0.1025390625, + "learning_rate": 4.858960176991151e-06, + "loss": 0.7539, + "step": 1859 + }, + { + "epoch": 2.0575221238938055, + "grad_norm": 0.123046875, + "learning_rate": 4.8561946902654874e-06, + "loss": 0.7929, + "step": 1860 + }, + { + "epoch": 2.058628318584071, + "grad_norm": 0.1162109375, + "learning_rate": 4.8534292035398236e-06, + "loss": 0.7565, + "step": 1861 + }, + { + "epoch": 2.059734513274336, + "grad_norm": 0.1044921875, + "learning_rate": 4.85066371681416e-06, + "loss": 0.772, + "step": 1862 + }, + { + "epoch": 2.060840707964602, + "grad_norm": 0.10009765625, + "learning_rate": 4.847898230088496e-06, + "loss": 0.7779, + "step": 1863 + }, + { + "epoch": 2.061946902654867, + "grad_norm": 0.12109375, + "learning_rate": 4.845132743362832e-06, + "loss": 0.7598, + "step": 1864 + }, + { + "epoch": 2.063053097345133, + "grad_norm": 0.1044921875, + "learning_rate": 4.842367256637169e-06, + "loss": 0.7551, + "step": 1865 + }, + { + "epoch": 2.064159292035398, + "grad_norm": 0.11181640625, + "learning_rate": 4.839601769911505e-06, + "loss": 0.7632, + "step": 1866 + }, + { + "epoch": 2.065265486725664, + "grad_norm": 0.10595703125, + "learning_rate": 4.836836283185841e-06, + "loss": 0.7393, + "step": 1867 + }, + { + "epoch": 2.066371681415929, + "grad_norm": 0.1044921875, + "learning_rate": 4.834070796460177e-06, + "loss": 0.7961, + "step": 1868 + }, + { + "epoch": 2.0674778761061945, + "grad_norm": 0.12109375, + "learning_rate": 4.831305309734513e-06, + "loss": 0.7352, + "step": 1869 + }, + { + "epoch": 2.0685840707964602, + "grad_norm": 0.107421875, + "learning_rate": 4.8285398230088495e-06, + "loss": 0.7647, + "step": 1870 + }, + { + "epoch": 2.0696902654867255, + "grad_norm": 0.158203125, + "learning_rate": 4.825774336283186e-06, + "loss": 0.8431, + "step": 1871 + }, + { + "epoch": 2.0707964601769913, + "grad_norm": 0.1005859375, + "learning_rate": 4.823008849557523e-06, + "loss": 0.7508, + "step": 1872 + }, + { + "epoch": 2.0719026548672566, + "grad_norm": 0.10986328125, + "learning_rate": 4.820243362831859e-06, + "loss": 0.7759, + "step": 1873 + }, + { + "epoch": 2.0730088495575223, + "grad_norm": 0.126953125, + "learning_rate": 4.817477876106195e-06, + "loss": 0.7535, + "step": 1874 + }, + { + "epoch": 2.0741150442477876, + "grad_norm": 0.109375, + "learning_rate": 4.814712389380531e-06, + "loss": 0.7337, + "step": 1875 + }, + { + "epoch": 2.0752212389380533, + "grad_norm": 0.10986328125, + "learning_rate": 4.811946902654868e-06, + "loss": 0.7979, + "step": 1876 + }, + { + "epoch": 2.0763274336283186, + "grad_norm": 0.095703125, + "learning_rate": 4.809181415929204e-06, + "loss": 0.7282, + "step": 1877 + }, + { + "epoch": 2.077433628318584, + "grad_norm": 0.1220703125, + "learning_rate": 4.80641592920354e-06, + "loss": 0.758, + "step": 1878 + }, + { + "epoch": 2.0785398230088497, + "grad_norm": 0.12890625, + "learning_rate": 4.803650442477876e-06, + "loss": 0.7796, + "step": 1879 + }, + { + "epoch": 2.079646017699115, + "grad_norm": 0.12890625, + "learning_rate": 4.800884955752213e-06, + "loss": 0.7693, + "step": 1880 + }, + { + "epoch": 2.0807522123893807, + "grad_norm": 0.119140625, + "learning_rate": 4.7981194690265495e-06, + "loss": 0.7598, + "step": 1881 + }, + { + "epoch": 2.081858407079646, + "grad_norm": 0.109375, + "learning_rate": 4.795353982300886e-06, + "loss": 0.8005, + "step": 1882 + }, + { + "epoch": 2.0829646017699117, + "grad_norm": 0.10693359375, + "learning_rate": 4.792588495575222e-06, + "loss": 0.7557, + "step": 1883 + }, + { + "epoch": 2.084070796460177, + "grad_norm": 0.111328125, + "learning_rate": 4.789823008849558e-06, + "loss": 0.796, + "step": 1884 + }, + { + "epoch": 2.0851769911504423, + "grad_norm": 0.1103515625, + "learning_rate": 4.787057522123894e-06, + "loss": 0.7727, + "step": 1885 + }, + { + "epoch": 2.086283185840708, + "grad_norm": 0.130859375, + "learning_rate": 4.78429203539823e-06, + "loss": 0.8011, + "step": 1886 + }, + { + "epoch": 2.0873893805309733, + "grad_norm": 0.1015625, + "learning_rate": 4.781526548672566e-06, + "loss": 0.7165, + "step": 1887 + }, + { + "epoch": 2.088495575221239, + "grad_norm": 0.1279296875, + "learning_rate": 4.778761061946903e-06, + "loss": 0.7757, + "step": 1888 + }, + { + "epoch": 2.0896017699115044, + "grad_norm": 0.109375, + "learning_rate": 4.775995575221239e-06, + "loss": 0.7542, + "step": 1889 + }, + { + "epoch": 2.09070796460177, + "grad_norm": 0.10205078125, + "learning_rate": 4.7732300884955754e-06, + "loss": 0.7627, + "step": 1890 + }, + { + "epoch": 2.0918141592920354, + "grad_norm": 0.1279296875, + "learning_rate": 4.7704646017699116e-06, + "loss": 0.8335, + "step": 1891 + }, + { + "epoch": 2.0929203539823007, + "grad_norm": 0.1650390625, + "learning_rate": 4.767699115044248e-06, + "loss": 0.7735, + "step": 1892 + }, + { + "epoch": 2.0940265486725664, + "grad_norm": 0.12109375, + "learning_rate": 4.764933628318585e-06, + "loss": 0.7839, + "step": 1893 + }, + { + "epoch": 2.0951327433628317, + "grad_norm": 0.09814453125, + "learning_rate": 4.762168141592921e-06, + "loss": 0.7094, + "step": 1894 + }, + { + "epoch": 2.0962389380530975, + "grad_norm": 0.1103515625, + "learning_rate": 4.759402654867257e-06, + "loss": 0.7571, + "step": 1895 + }, + { + "epoch": 2.0973451327433628, + "grad_norm": 0.107421875, + "learning_rate": 4.756637168141594e-06, + "loss": 0.7712, + "step": 1896 + }, + { + "epoch": 2.0984513274336285, + "grad_norm": 0.10595703125, + "learning_rate": 4.75387168141593e-06, + "loss": 0.7228, + "step": 1897 + }, + { + "epoch": 2.099557522123894, + "grad_norm": 0.11474609375, + "learning_rate": 4.751106194690266e-06, + "loss": 0.7637, + "step": 1898 + }, + { + "epoch": 2.100663716814159, + "grad_norm": 0.109375, + "learning_rate": 4.748340707964602e-06, + "loss": 0.7724, + "step": 1899 + }, + { + "epoch": 2.101769911504425, + "grad_norm": 0.10498046875, + "learning_rate": 4.745575221238938e-06, + "loss": 0.7791, + "step": 1900 + }, + { + "epoch": 2.10287610619469, + "grad_norm": 0.119140625, + "learning_rate": 4.7428097345132745e-06, + "loss": 0.8164, + "step": 1901 + }, + { + "epoch": 2.103982300884956, + "grad_norm": 0.11181640625, + "learning_rate": 4.740044247787611e-06, + "loss": 0.7508, + "step": 1902 + }, + { + "epoch": 2.105088495575221, + "grad_norm": 0.1181640625, + "learning_rate": 4.737278761061948e-06, + "loss": 0.8579, + "step": 1903 + }, + { + "epoch": 2.106194690265487, + "grad_norm": 0.1044921875, + "learning_rate": 4.734513274336284e-06, + "loss": 0.741, + "step": 1904 + }, + { + "epoch": 2.107300884955752, + "grad_norm": 0.107421875, + "learning_rate": 4.73174778761062e-06, + "loss": 0.785, + "step": 1905 + }, + { + "epoch": 2.1084070796460175, + "grad_norm": 0.1259765625, + "learning_rate": 4.728982300884956e-06, + "loss": 0.8063, + "step": 1906 + }, + { + "epoch": 2.109513274336283, + "grad_norm": 0.10888671875, + "learning_rate": 4.726216814159292e-06, + "loss": 0.8016, + "step": 1907 + }, + { + "epoch": 2.1106194690265485, + "grad_norm": 0.1318359375, + "learning_rate": 4.723451327433628e-06, + "loss": 0.7909, + "step": 1908 + }, + { + "epoch": 2.1117256637168142, + "grad_norm": 0.10888671875, + "learning_rate": 4.720685840707964e-06, + "loss": 0.7564, + "step": 1909 + }, + { + "epoch": 2.1128318584070795, + "grad_norm": 0.10107421875, + "learning_rate": 4.717920353982301e-06, + "loss": 0.7536, + "step": 1910 + }, + { + "epoch": 2.1139380530973453, + "grad_norm": 0.10595703125, + "learning_rate": 4.7151548672566375e-06, + "loss": 0.7689, + "step": 1911 + }, + { + "epoch": 2.1150442477876106, + "grad_norm": 0.10791015625, + "learning_rate": 4.712389380530974e-06, + "loss": 0.7882, + "step": 1912 + }, + { + "epoch": 2.1161504424778763, + "grad_norm": 0.0947265625, + "learning_rate": 4.70962389380531e-06, + "loss": 0.7441, + "step": 1913 + }, + { + "epoch": 2.1172566371681416, + "grad_norm": 0.11865234375, + "learning_rate": 4.706858407079647e-06, + "loss": 0.7633, + "step": 1914 + }, + { + "epoch": 2.118362831858407, + "grad_norm": 0.10302734375, + "learning_rate": 4.704092920353983e-06, + "loss": 0.7686, + "step": 1915 + }, + { + "epoch": 2.1194690265486726, + "grad_norm": 0.10546875, + "learning_rate": 4.701327433628319e-06, + "loss": 0.7717, + "step": 1916 + }, + { + "epoch": 2.120575221238938, + "grad_norm": 0.109375, + "learning_rate": 4.698561946902655e-06, + "loss": 0.7931, + "step": 1917 + }, + { + "epoch": 2.1216814159292037, + "grad_norm": 0.1103515625, + "learning_rate": 4.695796460176992e-06, + "loss": 0.7794, + "step": 1918 + }, + { + "epoch": 2.122787610619469, + "grad_norm": 0.10205078125, + "learning_rate": 4.693030973451328e-06, + "loss": 0.8011, + "step": 1919 + }, + { + "epoch": 2.1238938053097347, + "grad_norm": 0.11376953125, + "learning_rate": 4.690265486725664e-06, + "loss": 0.8103, + "step": 1920 + }, + { + "epoch": 2.125, + "grad_norm": 0.11669921875, + "learning_rate": 4.6875000000000004e-06, + "loss": 0.7779, + "step": 1921 + }, + { + "epoch": 2.1261061946902653, + "grad_norm": 0.1171875, + "learning_rate": 4.6847345132743366e-06, + "loss": 0.7945, + "step": 1922 + }, + { + "epoch": 2.127212389380531, + "grad_norm": 0.1123046875, + "learning_rate": 4.681969026548673e-06, + "loss": 0.7627, + "step": 1923 + }, + { + "epoch": 2.1283185840707963, + "grad_norm": 0.111328125, + "learning_rate": 4.679203539823009e-06, + "loss": 0.7554, + "step": 1924 + }, + { + "epoch": 2.129424778761062, + "grad_norm": 0.109375, + "learning_rate": 4.676438053097346e-06, + "loss": 0.7083, + "step": 1925 + }, + { + "epoch": 2.1305309734513274, + "grad_norm": 0.1298828125, + "learning_rate": 4.673672566371682e-06, + "loss": 0.8026, + "step": 1926 + }, + { + "epoch": 2.131637168141593, + "grad_norm": 0.11279296875, + "learning_rate": 4.670907079646018e-06, + "loss": 0.7678, + "step": 1927 + }, + { + "epoch": 2.1327433628318584, + "grad_norm": 0.1171875, + "learning_rate": 4.668141592920354e-06, + "loss": 0.785, + "step": 1928 + }, + { + "epoch": 2.133849557522124, + "grad_norm": 0.12109375, + "learning_rate": 4.66537610619469e-06, + "loss": 0.7682, + "step": 1929 + }, + { + "epoch": 2.1349557522123894, + "grad_norm": 0.10205078125, + "learning_rate": 4.662610619469026e-06, + "loss": 0.7742, + "step": 1930 + }, + { + "epoch": 2.1360619469026547, + "grad_norm": 0.09716796875, + "learning_rate": 4.659845132743363e-06, + "loss": 0.7559, + "step": 1931 + }, + { + "epoch": 2.1371681415929205, + "grad_norm": 0.1201171875, + "learning_rate": 4.6570796460176995e-06, + "loss": 0.7707, + "step": 1932 + }, + { + "epoch": 2.1382743362831858, + "grad_norm": 0.11865234375, + "learning_rate": 4.654314159292036e-06, + "loss": 0.782, + "step": 1933 + }, + { + "epoch": 2.1393805309734515, + "grad_norm": 0.1376953125, + "learning_rate": 4.651548672566373e-06, + "loss": 0.8201, + "step": 1934 + }, + { + "epoch": 2.140486725663717, + "grad_norm": 0.10595703125, + "learning_rate": 4.648783185840709e-06, + "loss": 0.7498, + "step": 1935 + }, + { + "epoch": 2.1415929203539825, + "grad_norm": 0.09228515625, + "learning_rate": 4.646017699115045e-06, + "loss": 0.7004, + "step": 1936 + }, + { + "epoch": 2.142699115044248, + "grad_norm": 0.11669921875, + "learning_rate": 4.643252212389381e-06, + "loss": 0.7553, + "step": 1937 + }, + { + "epoch": 2.143805309734513, + "grad_norm": 0.11328125, + "learning_rate": 4.640486725663717e-06, + "loss": 0.8024, + "step": 1938 + }, + { + "epoch": 2.144911504424779, + "grad_norm": 0.11083984375, + "learning_rate": 4.637721238938053e-06, + "loss": 0.8008, + "step": 1939 + }, + { + "epoch": 2.146017699115044, + "grad_norm": 0.1123046875, + "learning_rate": 4.63495575221239e-06, + "loss": 0.7655, + "step": 1940 + }, + { + "epoch": 2.14712389380531, + "grad_norm": 0.12158203125, + "learning_rate": 4.632190265486726e-06, + "loss": 0.7681, + "step": 1941 + }, + { + "epoch": 2.148230088495575, + "grad_norm": 0.125, + "learning_rate": 4.6294247787610625e-06, + "loss": 0.8482, + "step": 1942 + }, + { + "epoch": 2.149336283185841, + "grad_norm": 0.1201171875, + "learning_rate": 4.626659292035399e-06, + "loss": 0.7423, + "step": 1943 + }, + { + "epoch": 2.150442477876106, + "grad_norm": 0.10498046875, + "learning_rate": 4.623893805309735e-06, + "loss": 0.765, + "step": 1944 + }, + { + "epoch": 2.1515486725663715, + "grad_norm": 0.10400390625, + "learning_rate": 4.621128318584071e-06, + "loss": 0.7763, + "step": 1945 + }, + { + "epoch": 2.1526548672566372, + "grad_norm": 0.09716796875, + "learning_rate": 4.618362831858407e-06, + "loss": 0.7192, + "step": 1946 + }, + { + "epoch": 2.1537610619469025, + "grad_norm": 0.15625, + "learning_rate": 4.615597345132743e-06, + "loss": 0.7764, + "step": 1947 + }, + { + "epoch": 2.1548672566371683, + "grad_norm": 0.11474609375, + "learning_rate": 4.61283185840708e-06, + "loss": 0.7815, + "step": 1948 + }, + { + "epoch": 2.1559734513274336, + "grad_norm": 0.11376953125, + "learning_rate": 4.610066371681416e-06, + "loss": 0.7518, + "step": 1949 + }, + { + "epoch": 2.1570796460176993, + "grad_norm": 0.115234375, + "learning_rate": 4.607300884955752e-06, + "loss": 0.7722, + "step": 1950 + }, + { + "epoch": 2.1581858407079646, + "grad_norm": 0.1044921875, + "learning_rate": 4.604535398230089e-06, + "loss": 0.7735, + "step": 1951 + }, + { + "epoch": 2.15929203539823, + "grad_norm": 0.103515625, + "learning_rate": 4.6017699115044254e-06, + "loss": 0.7367, + "step": 1952 + }, + { + "epoch": 2.1603982300884956, + "grad_norm": 0.09912109375, + "learning_rate": 4.5990044247787616e-06, + "loss": 0.762, + "step": 1953 + }, + { + "epoch": 2.161504424778761, + "grad_norm": 0.11279296875, + "learning_rate": 4.596238938053098e-06, + "loss": 0.7805, + "step": 1954 + }, + { + "epoch": 2.1626106194690267, + "grad_norm": 0.1064453125, + "learning_rate": 4.593473451327435e-06, + "loss": 0.7555, + "step": 1955 + }, + { + "epoch": 2.163716814159292, + "grad_norm": 0.13671875, + "learning_rate": 4.590707964601771e-06, + "loss": 0.7772, + "step": 1956 + }, + { + "epoch": 2.1648230088495577, + "grad_norm": 0.09814453125, + "learning_rate": 4.587942477876107e-06, + "loss": 0.6967, + "step": 1957 + }, + { + "epoch": 2.165929203539823, + "grad_norm": 0.1396484375, + "learning_rate": 4.585176991150443e-06, + "loss": 0.7812, + "step": 1958 + }, + { + "epoch": 2.1670353982300883, + "grad_norm": 0.1025390625, + "learning_rate": 4.582411504424779e-06, + "loss": 0.7716, + "step": 1959 + }, + { + "epoch": 2.168141592920354, + "grad_norm": 0.12890625, + "learning_rate": 4.579646017699115e-06, + "loss": 0.8072, + "step": 1960 + }, + { + "epoch": 2.1692477876106193, + "grad_norm": 0.1259765625, + "learning_rate": 4.576880530973451e-06, + "loss": 0.8168, + "step": 1961 + }, + { + "epoch": 2.170353982300885, + "grad_norm": 0.11376953125, + "learning_rate": 4.5741150442477875e-06, + "loss": 0.7819, + "step": 1962 + }, + { + "epoch": 2.1714601769911503, + "grad_norm": 0.138671875, + "learning_rate": 4.5713495575221245e-06, + "loss": 0.7469, + "step": 1963 + }, + { + "epoch": 2.172566371681416, + "grad_norm": 0.11474609375, + "learning_rate": 4.568584070796461e-06, + "loss": 0.7749, + "step": 1964 + }, + { + "epoch": 2.1736725663716814, + "grad_norm": 0.11083984375, + "learning_rate": 4.565818584070797e-06, + "loss": 0.7678, + "step": 1965 + }, + { + "epoch": 2.1747787610619467, + "grad_norm": 0.1044921875, + "learning_rate": 4.563053097345133e-06, + "loss": 0.7462, + "step": 1966 + }, + { + "epoch": 2.1758849557522124, + "grad_norm": 0.11376953125, + "learning_rate": 4.560287610619469e-06, + "loss": 0.7856, + "step": 1967 + }, + { + "epoch": 2.1769911504424777, + "grad_norm": 0.11181640625, + "learning_rate": 4.557522123893805e-06, + "loss": 0.781, + "step": 1968 + }, + { + "epoch": 2.1780973451327434, + "grad_norm": 0.1083984375, + "learning_rate": 4.554756637168142e-06, + "loss": 0.782, + "step": 1969 + }, + { + "epoch": 2.1792035398230087, + "grad_norm": 0.1123046875, + "learning_rate": 4.551991150442478e-06, + "loss": 0.7558, + "step": 1970 + }, + { + "epoch": 2.1803097345132745, + "grad_norm": 0.1103515625, + "learning_rate": 4.549225663716814e-06, + "loss": 0.7557, + "step": 1971 + }, + { + "epoch": 2.1814159292035398, + "grad_norm": 0.10791015625, + "learning_rate": 4.546460176991151e-06, + "loss": 0.7661, + "step": 1972 + }, + { + "epoch": 2.1825221238938055, + "grad_norm": 0.12353515625, + "learning_rate": 4.5436946902654875e-06, + "loss": 0.8595, + "step": 1973 + }, + { + "epoch": 2.183628318584071, + "grad_norm": 0.11181640625, + "learning_rate": 4.540929203539824e-06, + "loss": 0.7621, + "step": 1974 + }, + { + "epoch": 2.184734513274336, + "grad_norm": 0.1123046875, + "learning_rate": 4.53816371681416e-06, + "loss": 0.7753, + "step": 1975 + }, + { + "epoch": 2.185840707964602, + "grad_norm": 0.12109375, + "learning_rate": 4.535398230088496e-06, + "loss": 0.7684, + "step": 1976 + }, + { + "epoch": 2.186946902654867, + "grad_norm": 0.11328125, + "learning_rate": 4.532632743362832e-06, + "loss": 0.7306, + "step": 1977 + }, + { + "epoch": 2.188053097345133, + "grad_norm": 0.10205078125, + "learning_rate": 4.529867256637169e-06, + "loss": 0.7549, + "step": 1978 + }, + { + "epoch": 2.189159292035398, + "grad_norm": 0.10546875, + "learning_rate": 4.527101769911505e-06, + "loss": 0.7767, + "step": 1979 + }, + { + "epoch": 2.190265486725664, + "grad_norm": 0.12109375, + "learning_rate": 4.524336283185841e-06, + "loss": 0.7612, + "step": 1980 + }, + { + "epoch": 2.191371681415929, + "grad_norm": 0.11083984375, + "learning_rate": 4.521570796460177e-06, + "loss": 0.7704, + "step": 1981 + }, + { + "epoch": 2.1924778761061945, + "grad_norm": 0.11083984375, + "learning_rate": 4.5188053097345134e-06, + "loss": 0.7826, + "step": 1982 + }, + { + "epoch": 2.1935840707964602, + "grad_norm": 0.10498046875, + "learning_rate": 4.5160398230088496e-06, + "loss": 0.77, + "step": 1983 + }, + { + "epoch": 2.1946902654867255, + "grad_norm": 0.1044921875, + "learning_rate": 4.513274336283186e-06, + "loss": 0.7158, + "step": 1984 + }, + { + "epoch": 2.1957964601769913, + "grad_norm": 0.10888671875, + "learning_rate": 4.510508849557522e-06, + "loss": 0.7333, + "step": 1985 + }, + { + "epoch": 2.1969026548672566, + "grad_norm": 0.12158203125, + "learning_rate": 4.507743362831859e-06, + "loss": 0.7719, + "step": 1986 + }, + { + "epoch": 2.1980088495575223, + "grad_norm": 0.1318359375, + "learning_rate": 4.504977876106195e-06, + "loss": 0.7861, + "step": 1987 + }, + { + "epoch": 2.1991150442477876, + "grad_norm": 0.1328125, + "learning_rate": 4.502212389380531e-06, + "loss": 0.7752, + "step": 1988 + }, + { + "epoch": 2.2002212389380533, + "grad_norm": 0.10595703125, + "learning_rate": 4.499446902654868e-06, + "loss": 0.7643, + "step": 1989 + }, + { + "epoch": 2.2013274336283186, + "grad_norm": 0.10498046875, + "learning_rate": 4.496681415929204e-06, + "loss": 0.737, + "step": 1990 + }, + { + "epoch": 2.202433628318584, + "grad_norm": 0.09912109375, + "learning_rate": 4.49391592920354e-06, + "loss": 0.7514, + "step": 1991 + }, + { + "epoch": 2.2035398230088497, + "grad_norm": 0.123046875, + "learning_rate": 4.491150442477876e-06, + "loss": 0.8073, + "step": 1992 + }, + { + "epoch": 2.204646017699115, + "grad_norm": 0.1328125, + "learning_rate": 4.488384955752213e-06, + "loss": 0.8034, + "step": 1993 + }, + { + "epoch": 2.2057522123893807, + "grad_norm": 0.1083984375, + "learning_rate": 4.4856194690265495e-06, + "loss": 0.7767, + "step": 1994 + }, + { + "epoch": 2.206858407079646, + "grad_norm": 0.1103515625, + "learning_rate": 4.482853982300886e-06, + "loss": 0.7686, + "step": 1995 + }, + { + "epoch": 2.2079646017699117, + "grad_norm": 0.1064453125, + "learning_rate": 4.480088495575222e-06, + "loss": 0.7836, + "step": 1996 + }, + { + "epoch": 2.209070796460177, + "grad_norm": 0.1044921875, + "learning_rate": 4.477323008849558e-06, + "loss": 0.7569, + "step": 1997 + }, + { + "epoch": 2.2101769911504423, + "grad_norm": 0.10986328125, + "learning_rate": 4.474557522123894e-06, + "loss": 0.7561, + "step": 1998 + }, + { + "epoch": 2.211283185840708, + "grad_norm": 0.11474609375, + "learning_rate": 4.47179203539823e-06, + "loss": 0.7796, + "step": 1999 + }, + { + "epoch": 2.2123893805309733, + "grad_norm": 0.10302734375, + "learning_rate": 4.469026548672566e-06, + "loss": 0.7688, + "step": 2000 + }, + { + "epoch": 2.213495575221239, + "grad_norm": 0.111328125, + "learning_rate": 4.466261061946903e-06, + "loss": 0.7861, + "step": 2001 + }, + { + "epoch": 2.2146017699115044, + "grad_norm": 0.10498046875, + "learning_rate": 4.463495575221239e-06, + "loss": 0.7472, + "step": 2002 + }, + { + "epoch": 2.21570796460177, + "grad_norm": 0.10791015625, + "learning_rate": 4.4607300884955755e-06, + "loss": 0.7388, + "step": 2003 + }, + { + "epoch": 2.2168141592920354, + "grad_norm": 0.10986328125, + "learning_rate": 4.457964601769912e-06, + "loss": 0.7941, + "step": 2004 + }, + { + "epoch": 2.2179203539823007, + "grad_norm": 0.099609375, + "learning_rate": 4.455199115044248e-06, + "loss": 0.7751, + "step": 2005 + }, + { + "epoch": 2.2190265486725664, + "grad_norm": 0.1298828125, + "learning_rate": 4.452433628318585e-06, + "loss": 0.8149, + "step": 2006 + }, + { + "epoch": 2.2201327433628317, + "grad_norm": 0.107421875, + "learning_rate": 4.449668141592921e-06, + "loss": 0.7543, + "step": 2007 + }, + { + "epoch": 2.2212389380530975, + "grad_norm": 0.1064453125, + "learning_rate": 4.446902654867257e-06, + "loss": 0.7625, + "step": 2008 + }, + { + "epoch": 2.2223451327433628, + "grad_norm": 0.109375, + "learning_rate": 4.444137168141593e-06, + "loss": 0.7727, + "step": 2009 + }, + { + "epoch": 2.2234513274336285, + "grad_norm": 0.11279296875, + "learning_rate": 4.44137168141593e-06, + "loss": 0.7738, + "step": 2010 + }, + { + "epoch": 2.224557522123894, + "grad_norm": 0.111328125, + "learning_rate": 4.438606194690266e-06, + "loss": 0.7872, + "step": 2011 + }, + { + "epoch": 2.225663716814159, + "grad_norm": 0.1552734375, + "learning_rate": 4.435840707964602e-06, + "loss": 0.782, + "step": 2012 + }, + { + "epoch": 2.226769911504425, + "grad_norm": 0.12158203125, + "learning_rate": 4.4330752212389384e-06, + "loss": 0.8091, + "step": 2013 + }, + { + "epoch": 2.22787610619469, + "grad_norm": 0.123046875, + "learning_rate": 4.4303097345132746e-06, + "loss": 0.746, + "step": 2014 + }, + { + "epoch": 2.228982300884956, + "grad_norm": 0.111328125, + "learning_rate": 4.427544247787611e-06, + "loss": 0.791, + "step": 2015 + }, + { + "epoch": 2.230088495575221, + "grad_norm": 0.1025390625, + "learning_rate": 4.424778761061948e-06, + "loss": 0.7684, + "step": 2016 + }, + { + "epoch": 2.231194690265487, + "grad_norm": 0.1591796875, + "learning_rate": 4.422013274336284e-06, + "loss": 0.781, + "step": 2017 + }, + { + "epoch": 2.232300884955752, + "grad_norm": 0.10546875, + "learning_rate": 4.41924778761062e-06, + "loss": 0.7574, + "step": 2018 + }, + { + "epoch": 2.2334070796460175, + "grad_norm": 0.10302734375, + "learning_rate": 4.416482300884956e-06, + "loss": 0.7444, + "step": 2019 + }, + { + "epoch": 2.234513274336283, + "grad_norm": 0.10546875, + "learning_rate": 4.413716814159292e-06, + "loss": 0.7728, + "step": 2020 + }, + { + "epoch": 2.2356194690265485, + "grad_norm": 0.10986328125, + "learning_rate": 4.410951327433628e-06, + "loss": 0.7731, + "step": 2021 + }, + { + "epoch": 2.2367256637168142, + "grad_norm": 0.12353515625, + "learning_rate": 4.408185840707964e-06, + "loss": 0.8071, + "step": 2022 + }, + { + "epoch": 2.2378318584070795, + "grad_norm": 0.1259765625, + "learning_rate": 4.405420353982301e-06, + "loss": 0.7655, + "step": 2023 + }, + { + "epoch": 2.2389380530973453, + "grad_norm": 0.10888671875, + "learning_rate": 4.4026548672566375e-06, + "loss": 0.7738, + "step": 2024 + }, + { + "epoch": 2.2400442477876106, + "grad_norm": 0.1044921875, + "learning_rate": 4.399889380530974e-06, + "loss": 0.7674, + "step": 2025 + }, + { + "epoch": 2.241150442477876, + "grad_norm": 0.10107421875, + "learning_rate": 4.39712389380531e-06, + "loss": 0.7415, + "step": 2026 + }, + { + "epoch": 2.2422566371681416, + "grad_norm": 0.1123046875, + "learning_rate": 4.394358407079647e-06, + "loss": 0.8308, + "step": 2027 + }, + { + "epoch": 2.243362831858407, + "grad_norm": 0.1064453125, + "learning_rate": 4.391592920353983e-06, + "loss": 0.7658, + "step": 2028 + }, + { + "epoch": 2.2444690265486726, + "grad_norm": 0.119140625, + "learning_rate": 4.388827433628319e-06, + "loss": 0.7998, + "step": 2029 + }, + { + "epoch": 2.245575221238938, + "grad_norm": 0.1474609375, + "learning_rate": 4.386061946902655e-06, + "loss": 0.7467, + "step": 2030 + }, + { + "epoch": 2.2466814159292037, + "grad_norm": 0.12158203125, + "learning_rate": 4.383296460176992e-06, + "loss": 0.8377, + "step": 2031 + }, + { + "epoch": 2.247787610619469, + "grad_norm": 0.12353515625, + "learning_rate": 4.380530973451328e-06, + "loss": 0.7349, + "step": 2032 + }, + { + "epoch": 2.2488938053097347, + "grad_norm": 0.107421875, + "learning_rate": 4.377765486725664e-06, + "loss": 0.7675, + "step": 2033 + }, + { + "epoch": 2.25, + "grad_norm": 0.10595703125, + "learning_rate": 4.3750000000000005e-06, + "loss": 0.7239, + "step": 2034 + }, + { + "epoch": 2.2511061946902653, + "grad_norm": 0.12060546875, + "learning_rate": 4.372234513274337e-06, + "loss": 0.7601, + "step": 2035 + }, + { + "epoch": 2.252212389380531, + "grad_norm": 0.09521484375, + "learning_rate": 4.369469026548673e-06, + "loss": 0.7186, + "step": 2036 + }, + { + "epoch": 2.2533185840707963, + "grad_norm": 0.103515625, + "learning_rate": 4.366703539823009e-06, + "loss": 0.7354, + "step": 2037 + }, + { + "epoch": 2.254424778761062, + "grad_norm": 0.1005859375, + "learning_rate": 4.363938053097346e-06, + "loss": 0.793, + "step": 2038 + }, + { + "epoch": 2.2555309734513274, + "grad_norm": 0.119140625, + "learning_rate": 4.361172566371682e-06, + "loss": 0.7781, + "step": 2039 + }, + { + "epoch": 2.256637168141593, + "grad_norm": 0.107421875, + "learning_rate": 4.358407079646018e-06, + "loss": 0.7597, + "step": 2040 + }, + { + "epoch": 2.2577433628318584, + "grad_norm": 0.11865234375, + "learning_rate": 4.355641592920354e-06, + "loss": 0.7915, + "step": 2041 + }, + { + "epoch": 2.258849557522124, + "grad_norm": 0.14453125, + "learning_rate": 4.35287610619469e-06, + "loss": 0.786, + "step": 2042 + }, + { + "epoch": 2.2599557522123894, + "grad_norm": 0.12060546875, + "learning_rate": 4.3501106194690264e-06, + "loss": 0.8299, + "step": 2043 + }, + { + "epoch": 2.2610619469026547, + "grad_norm": 0.11279296875, + "learning_rate": 4.347345132743363e-06, + "loss": 0.778, + "step": 2044 + }, + { + "epoch": 2.2621681415929205, + "grad_norm": 0.11962890625, + "learning_rate": 4.3445796460176995e-06, + "loss": 0.8183, + "step": 2045 + }, + { + "epoch": 2.2632743362831858, + "grad_norm": 0.11328125, + "learning_rate": 4.341814159292036e-06, + "loss": 0.8094, + "step": 2046 + }, + { + "epoch": 2.2643805309734515, + "grad_norm": 0.11279296875, + "learning_rate": 4.339048672566372e-06, + "loss": 0.7763, + "step": 2047 + }, + { + "epoch": 2.265486725663717, + "grad_norm": 0.1279296875, + "learning_rate": 4.336283185840709e-06, + "loss": 0.7934, + "step": 2048 + }, + { + "epoch": 2.2665929203539825, + "grad_norm": 0.125, + "learning_rate": 4.333517699115045e-06, + "loss": 0.7863, + "step": 2049 + }, + { + "epoch": 2.267699115044248, + "grad_norm": 0.1103515625, + "learning_rate": 4.330752212389381e-06, + "loss": 0.7578, + "step": 2050 + }, + { + "epoch": 2.268805309734513, + "grad_norm": 0.10888671875, + "learning_rate": 4.327986725663717e-06, + "loss": 0.7653, + "step": 2051 + }, + { + "epoch": 2.269911504424779, + "grad_norm": 0.126953125, + "learning_rate": 4.325221238938053e-06, + "loss": 0.8119, + "step": 2052 + }, + { + "epoch": 2.271017699115044, + "grad_norm": 0.11865234375, + "learning_rate": 4.32245575221239e-06, + "loss": 0.7836, + "step": 2053 + }, + { + "epoch": 2.27212389380531, + "grad_norm": 0.10986328125, + "learning_rate": 4.319690265486726e-06, + "loss": 0.7355, + "step": 2054 + }, + { + "epoch": 2.273230088495575, + "grad_norm": 0.1103515625, + "learning_rate": 4.3169247787610625e-06, + "loss": 0.792, + "step": 2055 + }, + { + "epoch": 2.274336283185841, + "grad_norm": 0.10888671875, + "learning_rate": 4.314159292035399e-06, + "loss": 0.772, + "step": 2056 + }, + { + "epoch": 2.275442477876106, + "grad_norm": 0.11376953125, + "learning_rate": 4.311393805309735e-06, + "loss": 0.7733, + "step": 2057 + }, + { + "epoch": 2.2765486725663715, + "grad_norm": 0.099609375, + "learning_rate": 4.308628318584071e-06, + "loss": 0.7409, + "step": 2058 + }, + { + "epoch": 2.2776548672566372, + "grad_norm": 0.10400390625, + "learning_rate": 4.305862831858407e-06, + "loss": 0.7212, + "step": 2059 + }, + { + "epoch": 2.2787610619469025, + "grad_norm": 0.115234375, + "learning_rate": 4.303097345132743e-06, + "loss": 0.8494, + "step": 2060 + }, + { + "epoch": 2.2798672566371683, + "grad_norm": 0.11572265625, + "learning_rate": 4.30033185840708e-06, + "loss": 0.7942, + "step": 2061 + }, + { + "epoch": 2.2809734513274336, + "grad_norm": 0.09521484375, + "learning_rate": 4.297566371681416e-06, + "loss": 0.7371, + "step": 2062 + }, + { + "epoch": 2.2820796460176993, + "grad_norm": 0.1142578125, + "learning_rate": 4.294800884955752e-06, + "loss": 0.7888, + "step": 2063 + }, + { + "epoch": 2.2831858407079646, + "grad_norm": 0.1220703125, + "learning_rate": 4.2920353982300885e-06, + "loss": 0.7833, + "step": 2064 + }, + { + "epoch": 2.28429203539823, + "grad_norm": 0.111328125, + "learning_rate": 4.2892699115044255e-06, + "loss": 0.7913, + "step": 2065 + }, + { + "epoch": 2.2853982300884956, + "grad_norm": 0.115234375, + "learning_rate": 4.286504424778762e-06, + "loss": 0.7786, + "step": 2066 + }, + { + "epoch": 2.286504424778761, + "grad_norm": 0.11767578125, + "learning_rate": 4.283738938053098e-06, + "loss": 0.8374, + "step": 2067 + }, + { + "epoch": 2.2876106194690267, + "grad_norm": 0.130859375, + "learning_rate": 4.280973451327434e-06, + "loss": 0.754, + "step": 2068 + }, + { + "epoch": 2.288716814159292, + "grad_norm": 0.11181640625, + "learning_rate": 4.278207964601771e-06, + "loss": 0.7698, + "step": 2069 + }, + { + "epoch": 2.2898230088495577, + "grad_norm": 0.10693359375, + "learning_rate": 4.275442477876107e-06, + "loss": 0.776, + "step": 2070 + }, + { + "epoch": 2.290929203539823, + "grad_norm": 0.10400390625, + "learning_rate": 4.272676991150443e-06, + "loss": 0.7286, + "step": 2071 + }, + { + "epoch": 2.2920353982300883, + "grad_norm": 0.11279296875, + "learning_rate": 4.269911504424779e-06, + "loss": 0.7595, + "step": 2072 + }, + { + "epoch": 2.293141592920354, + "grad_norm": 0.1162109375, + "learning_rate": 4.267146017699115e-06, + "loss": 0.7683, + "step": 2073 + }, + { + "epoch": 2.2942477876106193, + "grad_norm": 0.09814453125, + "learning_rate": 4.2643805309734514e-06, + "loss": 0.7527, + "step": 2074 + }, + { + "epoch": 2.295353982300885, + "grad_norm": 0.10546875, + "learning_rate": 4.2616150442477876e-06, + "loss": 0.7537, + "step": 2075 + }, + { + "epoch": 2.2964601769911503, + "grad_norm": 0.138671875, + "learning_rate": 4.2588495575221245e-06, + "loss": 0.7695, + "step": 2076 + }, + { + "epoch": 2.297566371681416, + "grad_norm": 0.10400390625, + "learning_rate": 4.256084070796461e-06, + "loss": 0.763, + "step": 2077 + }, + { + "epoch": 2.2986725663716814, + "grad_norm": 0.1064453125, + "learning_rate": 4.253318584070797e-06, + "loss": 0.7412, + "step": 2078 + }, + { + "epoch": 2.2997787610619467, + "grad_norm": 0.0947265625, + "learning_rate": 4.250553097345133e-06, + "loss": 0.6837, + "step": 2079 + }, + { + "epoch": 2.3008849557522124, + "grad_norm": 0.10400390625, + "learning_rate": 4.247787610619469e-06, + "loss": 0.7184, + "step": 2080 + }, + { + "epoch": 2.3019911504424777, + "grad_norm": 0.1103515625, + "learning_rate": 4.245022123893805e-06, + "loss": 0.7952, + "step": 2081 + }, + { + "epoch": 2.3030973451327434, + "grad_norm": 0.10546875, + "learning_rate": 4.242256637168142e-06, + "loss": 0.7417, + "step": 2082 + }, + { + "epoch": 2.3042035398230087, + "grad_norm": 0.1279296875, + "learning_rate": 4.239491150442478e-06, + "loss": 0.7988, + "step": 2083 + }, + { + "epoch": 2.3053097345132745, + "grad_norm": 0.1298828125, + "learning_rate": 4.236725663716814e-06, + "loss": 0.7763, + "step": 2084 + }, + { + "epoch": 2.3064159292035398, + "grad_norm": 0.10205078125, + "learning_rate": 4.2339601769911505e-06, + "loss": 0.7539, + "step": 2085 + }, + { + "epoch": 2.307522123893805, + "grad_norm": 0.10498046875, + "learning_rate": 4.2311946902654875e-06, + "loss": 0.7693, + "step": 2086 + }, + { + "epoch": 2.308628318584071, + "grad_norm": 0.1171875, + "learning_rate": 4.228429203539824e-06, + "loss": 0.8046, + "step": 2087 + }, + { + "epoch": 2.309734513274336, + "grad_norm": 0.10107421875, + "learning_rate": 4.22566371681416e-06, + "loss": 0.766, + "step": 2088 + }, + { + "epoch": 2.310840707964602, + "grad_norm": 0.11474609375, + "learning_rate": 4.222898230088496e-06, + "loss": 0.8059, + "step": 2089 + }, + { + "epoch": 2.311946902654867, + "grad_norm": 0.1025390625, + "learning_rate": 4.220132743362832e-06, + "loss": 0.7334, + "step": 2090 + }, + { + "epoch": 2.313053097345133, + "grad_norm": 0.109375, + "learning_rate": 4.217367256637169e-06, + "loss": 0.801, + "step": 2091 + }, + { + "epoch": 2.314159292035398, + "grad_norm": 0.11669921875, + "learning_rate": 4.214601769911505e-06, + "loss": 0.7761, + "step": 2092 + }, + { + "epoch": 2.315265486725664, + "grad_norm": 0.12158203125, + "learning_rate": 4.211836283185841e-06, + "loss": 0.7543, + "step": 2093 + }, + { + "epoch": 2.316371681415929, + "grad_norm": 0.12158203125, + "learning_rate": 4.209070796460177e-06, + "loss": 0.7484, + "step": 2094 + }, + { + "epoch": 2.317477876106195, + "grad_norm": 0.11962890625, + "learning_rate": 4.2063053097345135e-06, + "loss": 0.819, + "step": 2095 + }, + { + "epoch": 2.3185840707964602, + "grad_norm": 0.10498046875, + "learning_rate": 4.20353982300885e-06, + "loss": 0.7714, + "step": 2096 + }, + { + "epoch": 2.3196902654867255, + "grad_norm": 0.1240234375, + "learning_rate": 4.200774336283186e-06, + "loss": 0.7243, + "step": 2097 + }, + { + "epoch": 2.3207964601769913, + "grad_norm": 0.109375, + "learning_rate": 4.198008849557522e-06, + "loss": 0.8015, + "step": 2098 + }, + { + "epoch": 2.3219026548672566, + "grad_norm": 0.1103515625, + "learning_rate": 4.195243362831859e-06, + "loss": 0.7713, + "step": 2099 + }, + { + "epoch": 2.3230088495575223, + "grad_norm": 0.11572265625, + "learning_rate": 4.192477876106195e-06, + "loss": 0.7632, + "step": 2100 + }, + { + "epoch": 2.3241150442477876, + "grad_norm": 0.111328125, + "learning_rate": 4.189712389380531e-06, + "loss": 0.7823, + "step": 2101 + }, + { + "epoch": 2.3252212389380533, + "grad_norm": 0.1044921875, + "learning_rate": 4.186946902654867e-06, + "loss": 0.7629, + "step": 2102 + }, + { + "epoch": 2.3263274336283186, + "grad_norm": 0.12451171875, + "learning_rate": 4.184181415929204e-06, + "loss": 0.7823, + "step": 2103 + }, + { + "epoch": 2.327433628318584, + "grad_norm": 0.12158203125, + "learning_rate": 4.18141592920354e-06, + "loss": 0.7994, + "step": 2104 + }, + { + "epoch": 2.3285398230088497, + "grad_norm": 0.10888671875, + "learning_rate": 4.178650442477876e-06, + "loss": 0.7742, + "step": 2105 + }, + { + "epoch": 2.329646017699115, + "grad_norm": 0.10595703125, + "learning_rate": 4.175884955752213e-06, + "loss": 0.778, + "step": 2106 + }, + { + "epoch": 2.3307522123893807, + "grad_norm": 0.11181640625, + "learning_rate": 4.1731194690265495e-06, + "loss": 0.7618, + "step": 2107 + }, + { + "epoch": 2.331858407079646, + "grad_norm": 0.10498046875, + "learning_rate": 4.170353982300886e-06, + "loss": 0.7421, + "step": 2108 + }, + { + "epoch": 2.3329646017699117, + "grad_norm": 0.1435546875, + "learning_rate": 4.167588495575222e-06, + "loss": 0.8516, + "step": 2109 + }, + { + "epoch": 2.334070796460177, + "grad_norm": 0.11572265625, + "learning_rate": 4.164823008849558e-06, + "loss": 0.7567, + "step": 2110 + }, + { + "epoch": 2.3351769911504423, + "grad_norm": 0.11279296875, + "learning_rate": 4.162057522123894e-06, + "loss": 0.7531, + "step": 2111 + }, + { + "epoch": 2.336283185840708, + "grad_norm": 0.111328125, + "learning_rate": 4.15929203539823e-06, + "loss": 0.7477, + "step": 2112 + }, + { + "epoch": 2.3373893805309733, + "grad_norm": 0.10693359375, + "learning_rate": 4.156526548672566e-06, + "loss": 0.75, + "step": 2113 + }, + { + "epoch": 2.338495575221239, + "grad_norm": 0.11962890625, + "learning_rate": 4.153761061946903e-06, + "loss": 0.7808, + "step": 2114 + }, + { + "epoch": 2.3396017699115044, + "grad_norm": 0.10205078125, + "learning_rate": 4.150995575221239e-06, + "loss": 0.7435, + "step": 2115 + }, + { + "epoch": 2.34070796460177, + "grad_norm": 0.10693359375, + "learning_rate": 4.1482300884955755e-06, + "loss": 0.7744, + "step": 2116 + }, + { + "epoch": 2.3418141592920354, + "grad_norm": 0.11279296875, + "learning_rate": 4.145464601769912e-06, + "loss": 0.7774, + "step": 2117 + }, + { + "epoch": 2.3429203539823007, + "grad_norm": 0.11328125, + "learning_rate": 4.142699115044248e-06, + "loss": 0.7737, + "step": 2118 + }, + { + "epoch": 2.3440265486725664, + "grad_norm": 0.1171875, + "learning_rate": 4.139933628318584e-06, + "loss": 0.7803, + "step": 2119 + }, + { + "epoch": 2.3451327433628317, + "grad_norm": 0.11865234375, + "learning_rate": 4.137168141592921e-06, + "loss": 0.7661, + "step": 2120 + }, + { + "epoch": 2.3462389380530975, + "grad_norm": 0.10302734375, + "learning_rate": 4.134402654867257e-06, + "loss": 0.7429, + "step": 2121 + }, + { + "epoch": 2.3473451327433628, + "grad_norm": 0.11376953125, + "learning_rate": 4.131637168141593e-06, + "loss": 0.8049, + "step": 2122 + }, + { + "epoch": 2.3484513274336285, + "grad_norm": 0.09619140625, + "learning_rate": 4.128871681415929e-06, + "loss": 0.7612, + "step": 2123 + }, + { + "epoch": 2.349557522123894, + "grad_norm": 0.1123046875, + "learning_rate": 4.126106194690266e-06, + "loss": 0.7807, + "step": 2124 + }, + { + "epoch": 2.350663716814159, + "grad_norm": 0.11181640625, + "learning_rate": 4.123340707964602e-06, + "loss": 0.7756, + "step": 2125 + }, + { + "epoch": 2.351769911504425, + "grad_norm": 0.0966796875, + "learning_rate": 4.1205752212389385e-06, + "loss": 0.7361, + "step": 2126 + }, + { + "epoch": 2.35287610619469, + "grad_norm": 0.1103515625, + "learning_rate": 4.117809734513275e-06, + "loss": 0.8198, + "step": 2127 + }, + { + "epoch": 2.353982300884956, + "grad_norm": 0.11474609375, + "learning_rate": 4.115044247787611e-06, + "loss": 0.7218, + "step": 2128 + }, + { + "epoch": 2.355088495575221, + "grad_norm": 0.11181640625, + "learning_rate": 4.112278761061948e-06, + "loss": 0.7758, + "step": 2129 + }, + { + "epoch": 2.356194690265487, + "grad_norm": 0.11865234375, + "learning_rate": 4.109513274336284e-06, + "loss": 0.8069, + "step": 2130 + }, + { + "epoch": 2.357300884955752, + "grad_norm": 0.11376953125, + "learning_rate": 4.10674778761062e-06, + "loss": 0.751, + "step": 2131 + }, + { + "epoch": 2.3584070796460175, + "grad_norm": 0.10546875, + "learning_rate": 4.103982300884956e-06, + "loss": 0.7745, + "step": 2132 + }, + { + "epoch": 2.359513274336283, + "grad_norm": 0.10302734375, + "learning_rate": 4.101216814159292e-06, + "loss": 0.7856, + "step": 2133 + }, + { + "epoch": 2.3606194690265485, + "grad_norm": 0.1103515625, + "learning_rate": 4.098451327433628e-06, + "loss": 0.8163, + "step": 2134 + }, + { + "epoch": 2.3617256637168142, + "grad_norm": 0.1025390625, + "learning_rate": 4.0956858407079644e-06, + "loss": 0.7529, + "step": 2135 + }, + { + "epoch": 2.3628318584070795, + "grad_norm": 0.107421875, + "learning_rate": 4.092920353982301e-06, + "loss": 0.7598, + "step": 2136 + }, + { + "epoch": 2.3639380530973453, + "grad_norm": 0.1162109375, + "learning_rate": 4.0901548672566375e-06, + "loss": 0.7892, + "step": 2137 + }, + { + "epoch": 2.3650442477876106, + "grad_norm": 0.1083984375, + "learning_rate": 4.087389380530974e-06, + "loss": 0.7952, + "step": 2138 + }, + { + "epoch": 2.366150442477876, + "grad_norm": 0.10888671875, + "learning_rate": 4.08462389380531e-06, + "loss": 0.7613, + "step": 2139 + }, + { + "epoch": 2.3672566371681416, + "grad_norm": 0.1162109375, + "learning_rate": 4.081858407079646e-06, + "loss": 0.7926, + "step": 2140 + }, + { + "epoch": 2.368362831858407, + "grad_norm": 0.103515625, + "learning_rate": 4.079092920353983e-06, + "loss": 0.7445, + "step": 2141 + }, + { + "epoch": 2.3694690265486726, + "grad_norm": 0.10546875, + "learning_rate": 4.076327433628319e-06, + "loss": 0.784, + "step": 2142 + }, + { + "epoch": 2.370575221238938, + "grad_norm": 0.1181640625, + "learning_rate": 4.073561946902655e-06, + "loss": 0.7774, + "step": 2143 + }, + { + "epoch": 2.3716814159292037, + "grad_norm": 0.11376953125, + "learning_rate": 4.070796460176992e-06, + "loss": 0.7834, + "step": 2144 + }, + { + "epoch": 2.372787610619469, + "grad_norm": 0.11279296875, + "learning_rate": 4.068030973451328e-06, + "loss": 0.7923, + "step": 2145 + }, + { + "epoch": 2.3738938053097347, + "grad_norm": 0.1220703125, + "learning_rate": 4.065265486725664e-06, + "loss": 0.7481, + "step": 2146 + }, + { + "epoch": 2.375, + "grad_norm": 0.109375, + "learning_rate": 4.0625000000000005e-06, + "loss": 0.7544, + "step": 2147 + }, + { + "epoch": 2.3761061946902653, + "grad_norm": 0.1162109375, + "learning_rate": 4.059734513274337e-06, + "loss": 0.7754, + "step": 2148 + }, + { + "epoch": 2.377212389380531, + "grad_norm": 0.111328125, + "learning_rate": 4.056969026548673e-06, + "loss": 0.7807, + "step": 2149 + }, + { + "epoch": 2.3783185840707963, + "grad_norm": 0.11572265625, + "learning_rate": 4.054203539823009e-06, + "loss": 0.7597, + "step": 2150 + }, + { + "epoch": 2.379424778761062, + "grad_norm": 0.12890625, + "learning_rate": 4.051438053097346e-06, + "loss": 0.7413, + "step": 2151 + }, + { + "epoch": 2.3805309734513274, + "grad_norm": 0.10205078125, + "learning_rate": 4.048672566371682e-06, + "loss": 0.7698, + "step": 2152 + }, + { + "epoch": 2.381637168141593, + "grad_norm": 0.10693359375, + "learning_rate": 4.045907079646018e-06, + "loss": 0.7583, + "step": 2153 + }, + { + "epoch": 2.3827433628318584, + "grad_norm": 0.11767578125, + "learning_rate": 4.043141592920354e-06, + "loss": 0.7902, + "step": 2154 + }, + { + "epoch": 2.383849557522124, + "grad_norm": 0.09912109375, + "learning_rate": 4.04037610619469e-06, + "loss": 0.7444, + "step": 2155 + }, + { + "epoch": 2.3849557522123894, + "grad_norm": 0.11376953125, + "learning_rate": 4.0376106194690265e-06, + "loss": 0.7725, + "step": 2156 + }, + { + "epoch": 2.3860619469026547, + "grad_norm": 0.11083984375, + "learning_rate": 4.034845132743363e-06, + "loss": 0.7878, + "step": 2157 + }, + { + "epoch": 2.3871681415929205, + "grad_norm": 0.09765625, + "learning_rate": 4.0320796460176996e-06, + "loss": 0.7435, + "step": 2158 + }, + { + "epoch": 2.3882743362831858, + "grad_norm": 0.11279296875, + "learning_rate": 4.029314159292036e-06, + "loss": 0.8202, + "step": 2159 + }, + { + "epoch": 2.3893805309734515, + "grad_norm": 0.11181640625, + "learning_rate": 4.026548672566372e-06, + "loss": 0.7327, + "step": 2160 + }, + { + "epoch": 2.390486725663717, + "grad_norm": 0.119140625, + "learning_rate": 4.023783185840708e-06, + "loss": 0.7678, + "step": 2161 + }, + { + "epoch": 2.3915929203539825, + "grad_norm": 0.11669921875, + "learning_rate": 4.021017699115045e-06, + "loss": 0.7748, + "step": 2162 + }, + { + "epoch": 2.392699115044248, + "grad_norm": 0.10986328125, + "learning_rate": 4.018252212389381e-06, + "loss": 0.7778, + "step": 2163 + }, + { + "epoch": 2.393805309734513, + "grad_norm": 0.1123046875, + "learning_rate": 4.015486725663717e-06, + "loss": 0.7402, + "step": 2164 + }, + { + "epoch": 2.394911504424779, + "grad_norm": 0.10498046875, + "learning_rate": 4.012721238938053e-06, + "loss": 0.7566, + "step": 2165 + }, + { + "epoch": 2.396017699115044, + "grad_norm": 0.12158203125, + "learning_rate": 4.00995575221239e-06, + "loss": 0.7851, + "step": 2166 + }, + { + "epoch": 2.39712389380531, + "grad_norm": 0.11328125, + "learning_rate": 4.007190265486726e-06, + "loss": 0.7562, + "step": 2167 + }, + { + "epoch": 2.398230088495575, + "grad_norm": 0.11376953125, + "learning_rate": 4.0044247787610625e-06, + "loss": 0.7639, + "step": 2168 + }, + { + "epoch": 2.399336283185841, + "grad_norm": 0.10546875, + "learning_rate": 4.001659292035399e-06, + "loss": 0.7501, + "step": 2169 + }, + { + "epoch": 2.400442477876106, + "grad_norm": 0.1142578125, + "learning_rate": 3.998893805309735e-06, + "loss": 0.7584, + "step": 2170 + }, + { + "epoch": 2.4015486725663715, + "grad_norm": 0.10595703125, + "learning_rate": 3.996128318584071e-06, + "loss": 0.7279, + "step": 2171 + }, + { + "epoch": 2.4026548672566372, + "grad_norm": 0.130859375, + "learning_rate": 3.993362831858407e-06, + "loss": 0.7815, + "step": 2172 + }, + { + "epoch": 2.4037610619469025, + "grad_norm": 0.10693359375, + "learning_rate": 3.990597345132743e-06, + "loss": 0.7499, + "step": 2173 + }, + { + "epoch": 2.4048672566371683, + "grad_norm": 0.1123046875, + "learning_rate": 3.98783185840708e-06, + "loss": 0.7765, + "step": 2174 + }, + { + "epoch": 2.4059734513274336, + "grad_norm": 0.1181640625, + "learning_rate": 3.985066371681416e-06, + "loss": 0.804, + "step": 2175 + }, + { + "epoch": 2.4070796460176993, + "grad_norm": 0.1064453125, + "learning_rate": 3.982300884955752e-06, + "loss": 0.7739, + "step": 2176 + }, + { + "epoch": 2.4081858407079646, + "grad_norm": 0.103515625, + "learning_rate": 3.9795353982300885e-06, + "loss": 0.7588, + "step": 2177 + }, + { + "epoch": 2.40929203539823, + "grad_norm": 0.1025390625, + "learning_rate": 3.976769911504425e-06, + "loss": 0.7367, + "step": 2178 + }, + { + "epoch": 2.4103982300884956, + "grad_norm": 0.109375, + "learning_rate": 3.974004424778762e-06, + "loss": 0.7083, + "step": 2179 + }, + { + "epoch": 2.411504424778761, + "grad_norm": 0.10400390625, + "learning_rate": 3.971238938053098e-06, + "loss": 0.7451, + "step": 2180 + }, + { + "epoch": 2.4126106194690267, + "grad_norm": 0.12353515625, + "learning_rate": 3.968473451327434e-06, + "loss": 0.7788, + "step": 2181 + }, + { + "epoch": 2.413716814159292, + "grad_norm": 0.1181640625, + "learning_rate": 3.965707964601771e-06, + "loss": 0.7641, + "step": 2182 + }, + { + "epoch": 2.4148230088495577, + "grad_norm": 0.1103515625, + "learning_rate": 3.962942477876107e-06, + "loss": 0.7446, + "step": 2183 + }, + { + "epoch": 2.415929203539823, + "grad_norm": 0.10986328125, + "learning_rate": 3.960176991150443e-06, + "loss": 0.7799, + "step": 2184 + }, + { + "epoch": 2.4170353982300883, + "grad_norm": 0.11279296875, + "learning_rate": 3.957411504424779e-06, + "loss": 0.7336, + "step": 2185 + }, + { + "epoch": 2.418141592920354, + "grad_norm": 0.12109375, + "learning_rate": 3.954646017699115e-06, + "loss": 0.7699, + "step": 2186 + }, + { + "epoch": 2.4192477876106193, + "grad_norm": 0.10302734375, + "learning_rate": 3.9518805309734515e-06, + "loss": 0.7411, + "step": 2187 + }, + { + "epoch": 2.420353982300885, + "grad_norm": 0.10009765625, + "learning_rate": 3.949115044247788e-06, + "loss": 0.7705, + "step": 2188 + }, + { + "epoch": 2.4214601769911503, + "grad_norm": 0.11279296875, + "learning_rate": 3.9463495575221246e-06, + "loss": 0.7487, + "step": 2189 + }, + { + "epoch": 2.422566371681416, + "grad_norm": 0.107421875, + "learning_rate": 3.943584070796461e-06, + "loss": 0.7946, + "step": 2190 + }, + { + "epoch": 2.4236725663716814, + "grad_norm": 0.10400390625, + "learning_rate": 3.940818584070797e-06, + "loss": 0.7598, + "step": 2191 + }, + { + "epoch": 2.4247787610619467, + "grad_norm": 0.109375, + "learning_rate": 3.938053097345133e-06, + "loss": 0.791, + "step": 2192 + }, + { + "epoch": 2.4258849557522124, + "grad_norm": 0.11181640625, + "learning_rate": 3.935287610619469e-06, + "loss": 0.789, + "step": 2193 + }, + { + "epoch": 2.4269911504424777, + "grad_norm": 0.1142578125, + "learning_rate": 3.932522123893805e-06, + "loss": 0.7758, + "step": 2194 + }, + { + "epoch": 2.4280973451327434, + "grad_norm": 0.126953125, + "learning_rate": 3.929756637168141e-06, + "loss": 0.8354, + "step": 2195 + }, + { + "epoch": 2.4292035398230087, + "grad_norm": 0.10400390625, + "learning_rate": 3.926991150442478e-06, + "loss": 0.7728, + "step": 2196 + }, + { + "epoch": 2.4303097345132745, + "grad_norm": 0.11376953125, + "learning_rate": 3.924225663716814e-06, + "loss": 0.7867, + "step": 2197 + }, + { + "epoch": 2.4314159292035398, + "grad_norm": 0.1064453125, + "learning_rate": 3.9214601769911505e-06, + "loss": 0.747, + "step": 2198 + }, + { + "epoch": 2.432522123893805, + "grad_norm": 0.11083984375, + "learning_rate": 3.9186946902654875e-06, + "loss": 0.7275, + "step": 2199 + }, + { + "epoch": 2.433628318584071, + "grad_norm": 0.1181640625, + "learning_rate": 3.915929203539824e-06, + "loss": 0.7894, + "step": 2200 + }, + { + "epoch": 2.434734513274336, + "grad_norm": 0.11181640625, + "learning_rate": 3.91316371681416e-06, + "loss": 0.7704, + "step": 2201 + }, + { + "epoch": 2.435840707964602, + "grad_norm": 0.1357421875, + "learning_rate": 3.910398230088496e-06, + "loss": 0.8445, + "step": 2202 + }, + { + "epoch": 2.436946902654867, + "grad_norm": 0.11572265625, + "learning_rate": 3.907632743362832e-06, + "loss": 0.8033, + "step": 2203 + }, + { + "epoch": 2.438053097345133, + "grad_norm": 0.1083984375, + "learning_rate": 3.904867256637169e-06, + "loss": 0.7817, + "step": 2204 + }, + { + "epoch": 2.439159292035398, + "grad_norm": 0.1455078125, + "learning_rate": 3.902101769911505e-06, + "loss": 0.8007, + "step": 2205 + }, + { + "epoch": 2.440265486725664, + "grad_norm": 0.11083984375, + "learning_rate": 3.899336283185841e-06, + "loss": 0.7688, + "step": 2206 + }, + { + "epoch": 2.441371681415929, + "grad_norm": 0.10693359375, + "learning_rate": 3.896570796460177e-06, + "loss": 0.7746, + "step": 2207 + }, + { + "epoch": 2.442477876106195, + "grad_norm": 0.10009765625, + "learning_rate": 3.8938053097345135e-06, + "loss": 0.7497, + "step": 2208 + }, + { + "epoch": 2.4435840707964602, + "grad_norm": 0.10791015625, + "learning_rate": 3.89103982300885e-06, + "loss": 0.7858, + "step": 2209 + }, + { + "epoch": 2.4446902654867255, + "grad_norm": 0.107421875, + "learning_rate": 3.888274336283186e-06, + "loss": 0.778, + "step": 2210 + }, + { + "epoch": 2.4457964601769913, + "grad_norm": 0.10986328125, + "learning_rate": 3.885508849557522e-06, + "loss": 0.7778, + "step": 2211 + }, + { + "epoch": 2.4469026548672566, + "grad_norm": 0.10595703125, + "learning_rate": 3.882743362831859e-06, + "loss": 0.7368, + "step": 2212 + }, + { + "epoch": 2.4480088495575223, + "grad_norm": 0.111328125, + "learning_rate": 3.879977876106195e-06, + "loss": 0.775, + "step": 2213 + }, + { + "epoch": 2.4491150442477876, + "grad_norm": 0.11572265625, + "learning_rate": 3.877212389380531e-06, + "loss": 0.7285, + "step": 2214 + }, + { + "epoch": 2.4502212389380533, + "grad_norm": 0.1220703125, + "learning_rate": 3.874446902654867e-06, + "loss": 0.8135, + "step": 2215 + }, + { + "epoch": 2.4513274336283186, + "grad_norm": 0.1162109375, + "learning_rate": 3.871681415929203e-06, + "loss": 0.7797, + "step": 2216 + }, + { + "epoch": 2.452433628318584, + "grad_norm": 0.12060546875, + "learning_rate": 3.86891592920354e-06, + "loss": 0.777, + "step": 2217 + }, + { + "epoch": 2.4535398230088497, + "grad_norm": 0.11328125, + "learning_rate": 3.8661504424778764e-06, + "loss": 0.8014, + "step": 2218 + }, + { + "epoch": 2.454646017699115, + "grad_norm": 0.1142578125, + "learning_rate": 3.8633849557522126e-06, + "loss": 0.7906, + "step": 2219 + }, + { + "epoch": 2.4557522123893807, + "grad_norm": 0.138671875, + "learning_rate": 3.8606194690265495e-06, + "loss": 0.7628, + "step": 2220 + }, + { + "epoch": 2.456858407079646, + "grad_norm": 0.111328125, + "learning_rate": 3.857853982300886e-06, + "loss": 0.7668, + "step": 2221 + }, + { + "epoch": 2.4579646017699117, + "grad_norm": 0.11181640625, + "learning_rate": 3.855088495575222e-06, + "loss": 0.7817, + "step": 2222 + }, + { + "epoch": 2.459070796460177, + "grad_norm": 0.1064453125, + "learning_rate": 3.852323008849558e-06, + "loss": 0.7711, + "step": 2223 + }, + { + "epoch": 2.4601769911504423, + "grad_norm": 0.1044921875, + "learning_rate": 3.849557522123894e-06, + "loss": 0.7411, + "step": 2224 + }, + { + "epoch": 2.461283185840708, + "grad_norm": 0.10205078125, + "learning_rate": 3.84679203539823e-06, + "loss": 0.7766, + "step": 2225 + }, + { + "epoch": 2.4623893805309733, + "grad_norm": 0.1064453125, + "learning_rate": 3.844026548672566e-06, + "loss": 0.7806, + "step": 2226 + }, + { + "epoch": 2.463495575221239, + "grad_norm": 0.10302734375, + "learning_rate": 3.841261061946903e-06, + "loss": 0.7713, + "step": 2227 + }, + { + "epoch": 2.4646017699115044, + "grad_norm": 0.1025390625, + "learning_rate": 3.838495575221239e-06, + "loss": 0.7208, + "step": 2228 + }, + { + "epoch": 2.46570796460177, + "grad_norm": 0.1142578125, + "learning_rate": 3.8357300884955755e-06, + "loss": 0.76, + "step": 2229 + }, + { + "epoch": 2.4668141592920354, + "grad_norm": 0.11279296875, + "learning_rate": 3.832964601769912e-06, + "loss": 0.7487, + "step": 2230 + }, + { + "epoch": 2.4679203539823007, + "grad_norm": 0.1572265625, + "learning_rate": 3.830199115044248e-06, + "loss": 0.7623, + "step": 2231 + }, + { + "epoch": 2.4690265486725664, + "grad_norm": 0.1650390625, + "learning_rate": 3.827433628318584e-06, + "loss": 0.8855, + "step": 2232 + }, + { + "epoch": 2.4701327433628317, + "grad_norm": 0.1103515625, + "learning_rate": 3.82466814159292e-06, + "loss": 0.8102, + "step": 2233 + }, + { + "epoch": 2.4712389380530975, + "grad_norm": 0.1220703125, + "learning_rate": 3.821902654867257e-06, + "loss": 0.7549, + "step": 2234 + }, + { + "epoch": 2.4723451327433628, + "grad_norm": 0.1259765625, + "learning_rate": 3.819137168141593e-06, + "loss": 0.8482, + "step": 2235 + }, + { + "epoch": 2.4734513274336285, + "grad_norm": 0.11767578125, + "learning_rate": 3.816371681415929e-06, + "loss": 0.8275, + "step": 2236 + }, + { + "epoch": 2.474557522123894, + "grad_norm": 0.11474609375, + "learning_rate": 3.813606194690266e-06, + "loss": 0.7511, + "step": 2237 + }, + { + "epoch": 2.475663716814159, + "grad_norm": 0.10986328125, + "learning_rate": 3.810840707964602e-06, + "loss": 0.7424, + "step": 2238 + }, + { + "epoch": 2.476769911504425, + "grad_norm": 0.11572265625, + "learning_rate": 3.808075221238938e-06, + "loss": 0.7898, + "step": 2239 + }, + { + "epoch": 2.47787610619469, + "grad_norm": 0.11279296875, + "learning_rate": 3.8053097345132746e-06, + "loss": 0.8169, + "step": 2240 + }, + { + "epoch": 2.478982300884956, + "grad_norm": 0.11767578125, + "learning_rate": 3.8025442477876107e-06, + "loss": 0.7463, + "step": 2241 + }, + { + "epoch": 2.480088495575221, + "grad_norm": 0.10107421875, + "learning_rate": 3.7997787610619473e-06, + "loss": 0.743, + "step": 2242 + }, + { + "epoch": 2.481194690265487, + "grad_norm": 0.1298828125, + "learning_rate": 3.797013274336284e-06, + "loss": 0.7448, + "step": 2243 + }, + { + "epoch": 2.482300884955752, + "grad_norm": 0.1083984375, + "learning_rate": 3.79424778761062e-06, + "loss": 0.758, + "step": 2244 + }, + { + "epoch": 2.4834070796460175, + "grad_norm": 0.11767578125, + "learning_rate": 3.791482300884956e-06, + "loss": 0.7918, + "step": 2245 + }, + { + "epoch": 2.484513274336283, + "grad_norm": 0.1220703125, + "learning_rate": 3.7887168141592922e-06, + "loss": 0.7895, + "step": 2246 + }, + { + "epoch": 2.4856194690265485, + "grad_norm": 0.10888671875, + "learning_rate": 3.7859513274336283e-06, + "loss": 0.7391, + "step": 2247 + }, + { + "epoch": 2.4867256637168142, + "grad_norm": 0.12255859375, + "learning_rate": 3.783185840707965e-06, + "loss": 0.7993, + "step": 2248 + }, + { + "epoch": 2.4878318584070795, + "grad_norm": 0.107421875, + "learning_rate": 3.7804203539823014e-06, + "loss": 0.7699, + "step": 2249 + }, + { + "epoch": 2.4889380530973453, + "grad_norm": 0.10693359375, + "learning_rate": 3.7776548672566376e-06, + "loss": 0.76, + "step": 2250 + }, + { + "epoch": 2.4900442477876106, + "grad_norm": 0.11767578125, + "learning_rate": 3.7748893805309737e-06, + "loss": 0.7614, + "step": 2251 + }, + { + "epoch": 2.491150442477876, + "grad_norm": 0.10498046875, + "learning_rate": 3.7721238938053102e-06, + "loss": 0.7936, + "step": 2252 + }, + { + "epoch": 2.4922566371681416, + "grad_norm": 0.12109375, + "learning_rate": 3.7693584070796464e-06, + "loss": 0.8247, + "step": 2253 + }, + { + "epoch": 2.493362831858407, + "grad_norm": 0.1083984375, + "learning_rate": 3.7665929203539825e-06, + "loss": 0.8, + "step": 2254 + }, + { + "epoch": 2.4944690265486726, + "grad_norm": 0.10791015625, + "learning_rate": 3.7638274336283186e-06, + "loss": 0.7685, + "step": 2255 + }, + { + "epoch": 2.495575221238938, + "grad_norm": 0.12060546875, + "learning_rate": 3.7610619469026547e-06, + "loss": 0.7786, + "step": 2256 + }, + { + "epoch": 2.4966814159292037, + "grad_norm": 0.1123046875, + "learning_rate": 3.7582964601769917e-06, + "loss": 0.7732, + "step": 2257 + }, + { + "epoch": 2.497787610619469, + "grad_norm": 0.1123046875, + "learning_rate": 3.755530973451328e-06, + "loss": 0.7763, + "step": 2258 + }, + { + "epoch": 2.4988938053097347, + "grad_norm": 0.0966796875, + "learning_rate": 3.752765486725664e-06, + "loss": 0.7181, + "step": 2259 + }, + { + "epoch": 2.5, + "grad_norm": 0.1259765625, + "learning_rate": 3.7500000000000005e-06, + "loss": 0.8055, + "step": 2260 + }, + { + "epoch": 2.5011061946902657, + "grad_norm": 0.126953125, + "learning_rate": 3.7472345132743366e-06, + "loss": 0.8143, + "step": 2261 + }, + { + "epoch": 2.502212389380531, + "grad_norm": 0.11669921875, + "learning_rate": 3.7444690265486728e-06, + "loss": 0.7515, + "step": 2262 + }, + { + "epoch": 2.5033185840707963, + "grad_norm": 0.125, + "learning_rate": 3.741703539823009e-06, + "loss": 0.8439, + "step": 2263 + }, + { + "epoch": 2.504424778761062, + "grad_norm": 0.11181640625, + "learning_rate": 3.738938053097346e-06, + "loss": 0.7767, + "step": 2264 + }, + { + "epoch": 2.5055309734513274, + "grad_norm": 0.1181640625, + "learning_rate": 3.736172566371682e-06, + "loss": 0.7788, + "step": 2265 + }, + { + "epoch": 2.5066371681415927, + "grad_norm": 0.10791015625, + "learning_rate": 3.733407079646018e-06, + "loss": 0.7687, + "step": 2266 + }, + { + "epoch": 2.5077433628318584, + "grad_norm": 0.1357421875, + "learning_rate": 3.7306415929203542e-06, + "loss": 0.8525, + "step": 2267 + }, + { + "epoch": 2.508849557522124, + "grad_norm": 0.10791015625, + "learning_rate": 3.7278761061946904e-06, + "loss": 0.7555, + "step": 2268 + }, + { + "epoch": 2.5099557522123894, + "grad_norm": 0.126953125, + "learning_rate": 3.725110619469027e-06, + "loss": 0.802, + "step": 2269 + }, + { + "epoch": 2.5110619469026547, + "grad_norm": 0.11181640625, + "learning_rate": 3.722345132743363e-06, + "loss": 0.7642, + "step": 2270 + }, + { + "epoch": 2.5121681415929205, + "grad_norm": 0.11474609375, + "learning_rate": 3.719579646017699e-06, + "loss": 0.7416, + "step": 2271 + }, + { + "epoch": 2.5132743362831858, + "grad_norm": 0.1083984375, + "learning_rate": 3.7168141592920357e-06, + "loss": 0.7791, + "step": 2272 + }, + { + "epoch": 2.5143805309734515, + "grad_norm": 0.1103515625, + "learning_rate": 3.7140486725663723e-06, + "loss": 0.7638, + "step": 2273 + }, + { + "epoch": 2.515486725663717, + "grad_norm": 0.10986328125, + "learning_rate": 3.7112831858407084e-06, + "loss": 0.7954, + "step": 2274 + }, + { + "epoch": 2.5165929203539825, + "grad_norm": 0.10546875, + "learning_rate": 3.7085176991150445e-06, + "loss": 0.7605, + "step": 2275 + }, + { + "epoch": 2.517699115044248, + "grad_norm": 0.11083984375, + "learning_rate": 3.7057522123893807e-06, + "loss": 0.704, + "step": 2276 + }, + { + "epoch": 2.518805309734513, + "grad_norm": 0.10546875, + "learning_rate": 3.7029867256637168e-06, + "loss": 0.7748, + "step": 2277 + }, + { + "epoch": 2.519911504424779, + "grad_norm": 0.10546875, + "learning_rate": 3.7002212389380533e-06, + "loss": 0.7408, + "step": 2278 + }, + { + "epoch": 2.521017699115044, + "grad_norm": 0.11279296875, + "learning_rate": 3.69745575221239e-06, + "loss": 0.7759, + "step": 2279 + }, + { + "epoch": 2.52212389380531, + "grad_norm": 0.171875, + "learning_rate": 3.694690265486726e-06, + "loss": 0.7699, + "step": 2280 + }, + { + "epoch": 2.523230088495575, + "grad_norm": 0.1142578125, + "learning_rate": 3.6919247787610626e-06, + "loss": 0.8192, + "step": 2281 + }, + { + "epoch": 2.524336283185841, + "grad_norm": 0.09716796875, + "learning_rate": 3.6891592920353987e-06, + "loss": 0.7173, + "step": 2282 + }, + { + "epoch": 2.525442477876106, + "grad_norm": 0.10546875, + "learning_rate": 3.686393805309735e-06, + "loss": 0.8052, + "step": 2283 + }, + { + "epoch": 2.5265486725663715, + "grad_norm": 0.1015625, + "learning_rate": 3.683628318584071e-06, + "loss": 0.7501, + "step": 2284 + }, + { + "epoch": 2.5276548672566372, + "grad_norm": 0.1044921875, + "learning_rate": 3.680862831858407e-06, + "loss": 0.7899, + "step": 2285 + }, + { + "epoch": 2.5287610619469025, + "grad_norm": 0.119140625, + "learning_rate": 3.6780973451327436e-06, + "loss": 0.7858, + "step": 2286 + }, + { + "epoch": 2.5298672566371683, + "grad_norm": 0.1181640625, + "learning_rate": 3.67533185840708e-06, + "loss": 0.776, + "step": 2287 + }, + { + "epoch": 2.5309734513274336, + "grad_norm": 0.1416015625, + "learning_rate": 3.6725663716814163e-06, + "loss": 0.8493, + "step": 2288 + }, + { + "epoch": 2.5320796460176993, + "grad_norm": 0.1025390625, + "learning_rate": 3.6698008849557524e-06, + "loss": 0.7302, + "step": 2289 + }, + { + "epoch": 2.5331858407079646, + "grad_norm": 0.103515625, + "learning_rate": 3.667035398230089e-06, + "loss": 0.7547, + "step": 2290 + }, + { + "epoch": 2.53429203539823, + "grad_norm": 0.1171875, + "learning_rate": 3.664269911504425e-06, + "loss": 0.7894, + "step": 2291 + }, + { + "epoch": 2.5353982300884956, + "grad_norm": 0.12109375, + "learning_rate": 3.661504424778761e-06, + "loss": 0.8005, + "step": 2292 + }, + { + "epoch": 2.536504424778761, + "grad_norm": 0.10546875, + "learning_rate": 3.6587389380530973e-06, + "loss": 0.6834, + "step": 2293 + }, + { + "epoch": 2.5376106194690267, + "grad_norm": 0.1044921875, + "learning_rate": 3.6559734513274343e-06, + "loss": 0.7679, + "step": 2294 + }, + { + "epoch": 2.538716814159292, + "grad_norm": 0.103515625, + "learning_rate": 3.6532079646017704e-06, + "loss": 0.741, + "step": 2295 + }, + { + "epoch": 2.5398230088495577, + "grad_norm": 0.10400390625, + "learning_rate": 3.6504424778761066e-06, + "loss": 0.7697, + "step": 2296 + }, + { + "epoch": 2.540929203539823, + "grad_norm": 0.11865234375, + "learning_rate": 3.6476769911504427e-06, + "loss": 0.749, + "step": 2297 + }, + { + "epoch": 2.5420353982300883, + "grad_norm": 0.1328125, + "learning_rate": 3.6449115044247792e-06, + "loss": 0.788, + "step": 2298 + }, + { + "epoch": 2.543141592920354, + "grad_norm": 0.107421875, + "learning_rate": 3.6421460176991154e-06, + "loss": 0.7912, + "step": 2299 + }, + { + "epoch": 2.5442477876106193, + "grad_norm": 0.1064453125, + "learning_rate": 3.6393805309734515e-06, + "loss": 0.7613, + "step": 2300 + }, + { + "epoch": 2.545353982300885, + "grad_norm": 0.11376953125, + "learning_rate": 3.6366150442477876e-06, + "loss": 0.7685, + "step": 2301 + }, + { + "epoch": 2.5464601769911503, + "grad_norm": 0.123046875, + "learning_rate": 3.6338495575221246e-06, + "loss": 0.8087, + "step": 2302 + }, + { + "epoch": 2.547566371681416, + "grad_norm": 0.1572265625, + "learning_rate": 3.6310840707964607e-06, + "loss": 0.7709, + "step": 2303 + }, + { + "epoch": 2.5486725663716814, + "grad_norm": 0.1005859375, + "learning_rate": 3.628318584070797e-06, + "loss": 0.7379, + "step": 2304 + }, + { + "epoch": 2.5497787610619467, + "grad_norm": 0.11181640625, + "learning_rate": 3.625553097345133e-06, + "loss": 0.7988, + "step": 2305 + }, + { + "epoch": 2.5508849557522124, + "grad_norm": 0.10302734375, + "learning_rate": 3.622787610619469e-06, + "loss": 0.7546, + "step": 2306 + }, + { + "epoch": 2.551991150442478, + "grad_norm": 0.0986328125, + "learning_rate": 3.6200221238938056e-06, + "loss": 0.7329, + "step": 2307 + }, + { + "epoch": 2.5530973451327434, + "grad_norm": 0.1171875, + "learning_rate": 3.6172566371681418e-06, + "loss": 0.8194, + "step": 2308 + }, + { + "epoch": 2.5542035398230087, + "grad_norm": 0.10595703125, + "learning_rate": 3.6144911504424783e-06, + "loss": 0.7745, + "step": 2309 + }, + { + "epoch": 2.5553097345132745, + "grad_norm": 0.12158203125, + "learning_rate": 3.611725663716815e-06, + "loss": 0.7692, + "step": 2310 + }, + { + "epoch": 2.5564159292035398, + "grad_norm": 0.1142578125, + "learning_rate": 3.608960176991151e-06, + "loss": 0.7851, + "step": 2311 + }, + { + "epoch": 2.557522123893805, + "grad_norm": 0.1484375, + "learning_rate": 3.606194690265487e-06, + "loss": 0.7717, + "step": 2312 + }, + { + "epoch": 2.558628318584071, + "grad_norm": 0.11474609375, + "learning_rate": 3.6034292035398232e-06, + "loss": 0.8096, + "step": 2313 + }, + { + "epoch": 2.5597345132743365, + "grad_norm": 0.10546875, + "learning_rate": 3.6006637168141594e-06, + "loss": 0.762, + "step": 2314 + }, + { + "epoch": 2.560840707964602, + "grad_norm": 0.1015625, + "learning_rate": 3.5978982300884955e-06, + "loss": 0.7342, + "step": 2315 + }, + { + "epoch": 2.561946902654867, + "grad_norm": 0.1142578125, + "learning_rate": 3.595132743362832e-06, + "loss": 0.7764, + "step": 2316 + }, + { + "epoch": 2.563053097345133, + "grad_norm": 0.0986328125, + "learning_rate": 3.5923672566371686e-06, + "loss": 0.7275, + "step": 2317 + }, + { + "epoch": 2.564159292035398, + "grad_norm": 0.1142578125, + "learning_rate": 3.5896017699115047e-06, + "loss": 0.7446, + "step": 2318 + }, + { + "epoch": 2.5652654867256635, + "grad_norm": 0.126953125, + "learning_rate": 3.5868362831858413e-06, + "loss": 0.7688, + "step": 2319 + }, + { + "epoch": 2.566371681415929, + "grad_norm": 0.1005859375, + "learning_rate": 3.5840707964601774e-06, + "loss": 0.7119, + "step": 2320 + }, + { + "epoch": 2.567477876106195, + "grad_norm": 0.11279296875, + "learning_rate": 3.5813053097345135e-06, + "loss": 0.7609, + "step": 2321 + }, + { + "epoch": 2.5685840707964602, + "grad_norm": 0.1103515625, + "learning_rate": 3.5785398230088496e-06, + "loss": 0.7727, + "step": 2322 + }, + { + "epoch": 2.5696902654867255, + "grad_norm": 0.2158203125, + "learning_rate": 3.5757743362831858e-06, + "loss": 0.7741, + "step": 2323 + }, + { + "epoch": 2.5707964601769913, + "grad_norm": 0.1259765625, + "learning_rate": 3.5730088495575223e-06, + "loss": 0.7593, + "step": 2324 + }, + { + "epoch": 2.5719026548672566, + "grad_norm": 0.11328125, + "learning_rate": 3.570243362831859e-06, + "loss": 0.7676, + "step": 2325 + }, + { + "epoch": 2.573008849557522, + "grad_norm": 0.1103515625, + "learning_rate": 3.567477876106195e-06, + "loss": 0.7472, + "step": 2326 + }, + { + "epoch": 2.5741150442477876, + "grad_norm": 0.1123046875, + "learning_rate": 3.564712389380531e-06, + "loss": 0.8022, + "step": 2327 + }, + { + "epoch": 2.5752212389380533, + "grad_norm": 0.111328125, + "learning_rate": 3.5619469026548677e-06, + "loss": 0.767, + "step": 2328 + }, + { + "epoch": 2.5763274336283186, + "grad_norm": 0.138671875, + "learning_rate": 3.559181415929204e-06, + "loss": 0.7512, + "step": 2329 + }, + { + "epoch": 2.577433628318584, + "grad_norm": 0.130859375, + "learning_rate": 3.55641592920354e-06, + "loss": 0.8395, + "step": 2330 + }, + { + "epoch": 2.5785398230088497, + "grad_norm": 0.11328125, + "learning_rate": 3.553650442477876e-06, + "loss": 0.7185, + "step": 2331 + }, + { + "epoch": 2.579646017699115, + "grad_norm": 0.109375, + "learning_rate": 3.550884955752213e-06, + "loss": 0.7754, + "step": 2332 + }, + { + "epoch": 2.5807522123893807, + "grad_norm": 0.1015625, + "learning_rate": 3.548119469026549e-06, + "loss": 0.7446, + "step": 2333 + }, + { + "epoch": 2.581858407079646, + "grad_norm": 0.1083984375, + "learning_rate": 3.5453539823008853e-06, + "loss": 0.7691, + "step": 2334 + }, + { + "epoch": 2.5829646017699117, + "grad_norm": 0.107421875, + "learning_rate": 3.5425884955752214e-06, + "loss": 0.7447, + "step": 2335 + }, + { + "epoch": 2.584070796460177, + "grad_norm": 0.1162109375, + "learning_rate": 3.539823008849558e-06, + "loss": 0.8414, + "step": 2336 + }, + { + "epoch": 2.5851769911504423, + "grad_norm": 0.12353515625, + "learning_rate": 3.537057522123894e-06, + "loss": 0.7883, + "step": 2337 + }, + { + "epoch": 2.586283185840708, + "grad_norm": 0.1552734375, + "learning_rate": 3.53429203539823e-06, + "loss": 0.7557, + "step": 2338 + }, + { + "epoch": 2.5873893805309733, + "grad_norm": 0.11865234375, + "learning_rate": 3.5315265486725663e-06, + "loss": 0.7514, + "step": 2339 + }, + { + "epoch": 2.588495575221239, + "grad_norm": 0.111328125, + "learning_rate": 3.5287610619469033e-06, + "loss": 0.7463, + "step": 2340 + }, + { + "epoch": 2.5896017699115044, + "grad_norm": 0.11181640625, + "learning_rate": 3.5259955752212394e-06, + "loss": 0.7747, + "step": 2341 + }, + { + "epoch": 2.59070796460177, + "grad_norm": 0.1171875, + "learning_rate": 3.5232300884955756e-06, + "loss": 0.7773, + "step": 2342 + }, + { + "epoch": 2.5918141592920354, + "grad_norm": 0.1533203125, + "learning_rate": 3.5204646017699117e-06, + "loss": 0.8396, + "step": 2343 + }, + { + "epoch": 2.5929203539823007, + "grad_norm": 0.11279296875, + "learning_rate": 3.517699115044248e-06, + "loss": 0.7764, + "step": 2344 + }, + { + "epoch": 2.5940265486725664, + "grad_norm": 0.119140625, + "learning_rate": 3.5149336283185844e-06, + "loss": 0.7856, + "step": 2345 + }, + { + "epoch": 2.5951327433628317, + "grad_norm": 0.1142578125, + "learning_rate": 3.5121681415929205e-06, + "loss": 0.7651, + "step": 2346 + }, + { + "epoch": 2.5962389380530975, + "grad_norm": 0.1025390625, + "learning_rate": 3.509402654867257e-06, + "loss": 0.7326, + "step": 2347 + }, + { + "epoch": 2.5973451327433628, + "grad_norm": 0.10693359375, + "learning_rate": 3.5066371681415936e-06, + "loss": 0.749, + "step": 2348 + }, + { + "epoch": 2.5984513274336285, + "grad_norm": 0.11328125, + "learning_rate": 3.5038716814159297e-06, + "loss": 0.8155, + "step": 2349 + }, + { + "epoch": 2.599557522123894, + "grad_norm": 0.10986328125, + "learning_rate": 3.501106194690266e-06, + "loss": 0.759, + "step": 2350 + }, + { + "epoch": 2.600663716814159, + "grad_norm": 0.1142578125, + "learning_rate": 3.498340707964602e-06, + "loss": 0.7975, + "step": 2351 + }, + { + "epoch": 2.601769911504425, + "grad_norm": 0.11572265625, + "learning_rate": 3.495575221238938e-06, + "loss": 0.7684, + "step": 2352 + }, + { + "epoch": 2.60287610619469, + "grad_norm": 0.10986328125, + "learning_rate": 3.4928097345132746e-06, + "loss": 0.8249, + "step": 2353 + }, + { + "epoch": 2.603982300884956, + "grad_norm": 0.1171875, + "learning_rate": 3.4900442477876108e-06, + "loss": 0.7311, + "step": 2354 + }, + { + "epoch": 2.605088495575221, + "grad_norm": 0.1318359375, + "learning_rate": 3.4872787610619473e-06, + "loss": 0.7686, + "step": 2355 + }, + { + "epoch": 2.606194690265487, + "grad_norm": 0.10498046875, + "learning_rate": 3.4845132743362834e-06, + "loss": 0.755, + "step": 2356 + }, + { + "epoch": 2.607300884955752, + "grad_norm": 0.1181640625, + "learning_rate": 3.48174778761062e-06, + "loss": 0.7885, + "step": 2357 + }, + { + "epoch": 2.6084070796460175, + "grad_norm": 0.1328125, + "learning_rate": 3.478982300884956e-06, + "loss": 0.7568, + "step": 2358 + }, + { + "epoch": 2.609513274336283, + "grad_norm": 0.107421875, + "learning_rate": 3.4762168141592922e-06, + "loss": 0.7547, + "step": 2359 + }, + { + "epoch": 2.6106194690265485, + "grad_norm": 0.1005859375, + "learning_rate": 3.4734513274336284e-06, + "loss": 0.7532, + "step": 2360 + }, + { + "epoch": 2.6117256637168142, + "grad_norm": 0.1123046875, + "learning_rate": 3.4706858407079645e-06, + "loss": 0.7883, + "step": 2361 + }, + { + "epoch": 2.6128318584070795, + "grad_norm": 0.11572265625, + "learning_rate": 3.4679203539823015e-06, + "loss": 0.7652, + "step": 2362 + }, + { + "epoch": 2.6139380530973453, + "grad_norm": 0.11767578125, + "learning_rate": 3.4651548672566376e-06, + "loss": 0.8039, + "step": 2363 + }, + { + "epoch": 2.6150442477876106, + "grad_norm": 0.12158203125, + "learning_rate": 3.4623893805309737e-06, + "loss": 0.8056, + "step": 2364 + }, + { + "epoch": 2.616150442477876, + "grad_norm": 0.1298828125, + "learning_rate": 3.45962389380531e-06, + "loss": 0.8017, + "step": 2365 + }, + { + "epoch": 2.6172566371681416, + "grad_norm": 0.115234375, + "learning_rate": 3.4568584070796464e-06, + "loss": 0.7157, + "step": 2366 + }, + { + "epoch": 2.6183628318584073, + "grad_norm": 0.10986328125, + "learning_rate": 3.4540929203539825e-06, + "loss": 0.7503, + "step": 2367 + }, + { + "epoch": 2.6194690265486726, + "grad_norm": 0.1064453125, + "learning_rate": 3.4513274336283186e-06, + "loss": 0.8025, + "step": 2368 + }, + { + "epoch": 2.620575221238938, + "grad_norm": 0.1044921875, + "learning_rate": 3.4485619469026548e-06, + "loss": 0.7418, + "step": 2369 + }, + { + "epoch": 2.6216814159292037, + "grad_norm": 0.11865234375, + "learning_rate": 3.4457964601769917e-06, + "loss": 0.7625, + "step": 2370 + }, + { + "epoch": 2.622787610619469, + "grad_norm": 0.1083984375, + "learning_rate": 3.443030973451328e-06, + "loss": 0.7686, + "step": 2371 + }, + { + "epoch": 2.6238938053097343, + "grad_norm": 0.1142578125, + "learning_rate": 3.440265486725664e-06, + "loss": 0.8117, + "step": 2372 + }, + { + "epoch": 2.625, + "grad_norm": 0.115234375, + "learning_rate": 3.4375e-06, + "loss": 0.8006, + "step": 2373 + }, + { + "epoch": 2.6261061946902657, + "grad_norm": 0.10986328125, + "learning_rate": 3.4347345132743367e-06, + "loss": 0.7841, + "step": 2374 + }, + { + "epoch": 2.627212389380531, + "grad_norm": 0.1171875, + "learning_rate": 3.431969026548673e-06, + "loss": 0.7664, + "step": 2375 + }, + { + "epoch": 2.6283185840707963, + "grad_norm": 0.11181640625, + "learning_rate": 3.429203539823009e-06, + "loss": 0.7188, + "step": 2376 + }, + { + "epoch": 2.629424778761062, + "grad_norm": 0.10986328125, + "learning_rate": 3.4264380530973455e-06, + "loss": 0.7688, + "step": 2377 + }, + { + "epoch": 2.6305309734513274, + "grad_norm": 0.1123046875, + "learning_rate": 3.423672566371682e-06, + "loss": 0.7756, + "step": 2378 + }, + { + "epoch": 2.6316371681415927, + "grad_norm": 0.10302734375, + "learning_rate": 3.420907079646018e-06, + "loss": 0.7922, + "step": 2379 + }, + { + "epoch": 2.6327433628318584, + "grad_norm": 0.10546875, + "learning_rate": 3.4181415929203543e-06, + "loss": 0.7619, + "step": 2380 + }, + { + "epoch": 2.633849557522124, + "grad_norm": 0.10400390625, + "learning_rate": 3.4153761061946904e-06, + "loss": 0.7587, + "step": 2381 + }, + { + "epoch": 2.6349557522123894, + "grad_norm": 0.11279296875, + "learning_rate": 3.4126106194690265e-06, + "loss": 0.7529, + "step": 2382 + }, + { + "epoch": 2.6360619469026547, + "grad_norm": 0.1015625, + "learning_rate": 3.409845132743363e-06, + "loss": 0.7296, + "step": 2383 + }, + { + "epoch": 2.6371681415929205, + "grad_norm": 0.1181640625, + "learning_rate": 3.407079646017699e-06, + "loss": 0.7776, + "step": 2384 + }, + { + "epoch": 2.6382743362831858, + "grad_norm": 0.12353515625, + "learning_rate": 3.4043141592920358e-06, + "loss": 0.8025, + "step": 2385 + }, + { + "epoch": 2.6393805309734515, + "grad_norm": 0.1220703125, + "learning_rate": 3.4015486725663723e-06, + "loss": 0.7827, + "step": 2386 + }, + { + "epoch": 2.640486725663717, + "grad_norm": 0.12890625, + "learning_rate": 3.3987831858407084e-06, + "loss": 0.773, + "step": 2387 + }, + { + "epoch": 2.6415929203539825, + "grad_norm": 0.11669921875, + "learning_rate": 3.3960176991150446e-06, + "loss": 0.7548, + "step": 2388 + }, + { + "epoch": 2.642699115044248, + "grad_norm": 0.1318359375, + "learning_rate": 3.3932522123893807e-06, + "loss": 0.7402, + "step": 2389 + }, + { + "epoch": 2.643805309734513, + "grad_norm": 0.12060546875, + "learning_rate": 3.390486725663717e-06, + "loss": 0.7699, + "step": 2390 + }, + { + "epoch": 2.644911504424779, + "grad_norm": 0.115234375, + "learning_rate": 3.3877212389380534e-06, + "loss": 0.796, + "step": 2391 + }, + { + "epoch": 2.646017699115044, + "grad_norm": 0.10791015625, + "learning_rate": 3.38495575221239e-06, + "loss": 0.7598, + "step": 2392 + }, + { + "epoch": 2.64712389380531, + "grad_norm": 0.11181640625, + "learning_rate": 3.382190265486726e-06, + "loss": 0.7687, + "step": 2393 + }, + { + "epoch": 2.648230088495575, + "grad_norm": 0.1162109375, + "learning_rate": 3.379424778761062e-06, + "loss": 0.806, + "step": 2394 + }, + { + "epoch": 2.649336283185841, + "grad_norm": 0.140625, + "learning_rate": 3.3766592920353987e-06, + "loss": 0.8095, + "step": 2395 + }, + { + "epoch": 2.650442477876106, + "grad_norm": 0.10009765625, + "learning_rate": 3.373893805309735e-06, + "loss": 0.7476, + "step": 2396 + }, + { + "epoch": 2.6515486725663715, + "grad_norm": 0.134765625, + "learning_rate": 3.371128318584071e-06, + "loss": 0.7408, + "step": 2397 + }, + { + "epoch": 2.6526548672566372, + "grad_norm": 0.1103515625, + "learning_rate": 3.368362831858407e-06, + "loss": 0.7591, + "step": 2398 + }, + { + "epoch": 2.6537610619469025, + "grad_norm": 0.11279296875, + "learning_rate": 3.365597345132743e-06, + "loss": 0.761, + "step": 2399 + }, + { + "epoch": 2.6548672566371683, + "grad_norm": 0.1103515625, + "learning_rate": 3.36283185840708e-06, + "loss": 0.7572, + "step": 2400 + }, + { + "epoch": 2.6559734513274336, + "grad_norm": 0.140625, + "learning_rate": 3.3600663716814163e-06, + "loss": 0.8115, + "step": 2401 + }, + { + "epoch": 2.6570796460176993, + "grad_norm": 0.10595703125, + "learning_rate": 3.3573008849557524e-06, + "loss": 0.752, + "step": 2402 + }, + { + "epoch": 2.6581858407079646, + "grad_norm": 0.10791015625, + "learning_rate": 3.354535398230089e-06, + "loss": 0.7666, + "step": 2403 + }, + { + "epoch": 2.65929203539823, + "grad_norm": 0.11572265625, + "learning_rate": 3.351769911504425e-06, + "loss": 0.7992, + "step": 2404 + }, + { + "epoch": 2.6603982300884956, + "grad_norm": 0.125, + "learning_rate": 3.3490044247787612e-06, + "loss": 0.7463, + "step": 2405 + }, + { + "epoch": 2.661504424778761, + "grad_norm": 0.111328125, + "learning_rate": 3.3462389380530974e-06, + "loss": 0.7937, + "step": 2406 + }, + { + "epoch": 2.6626106194690267, + "grad_norm": 0.10302734375, + "learning_rate": 3.3434734513274343e-06, + "loss": 0.7559, + "step": 2407 + }, + { + "epoch": 2.663716814159292, + "grad_norm": 0.158203125, + "learning_rate": 3.3407079646017705e-06, + "loss": 0.8587, + "step": 2408 + }, + { + "epoch": 2.6648230088495577, + "grad_norm": 0.11474609375, + "learning_rate": 3.3379424778761066e-06, + "loss": 0.7538, + "step": 2409 + }, + { + "epoch": 2.665929203539823, + "grad_norm": 0.123046875, + "learning_rate": 3.3351769911504427e-06, + "loss": 0.7253, + "step": 2410 + }, + { + "epoch": 2.6670353982300883, + "grad_norm": 0.11669921875, + "learning_rate": 3.332411504424779e-06, + "loss": 0.7538, + "step": 2411 + }, + { + "epoch": 2.668141592920354, + "grad_norm": 0.11083984375, + "learning_rate": 3.3296460176991154e-06, + "loss": 0.7951, + "step": 2412 + }, + { + "epoch": 2.6692477876106193, + "grad_norm": 0.10986328125, + "learning_rate": 3.3268805309734515e-06, + "loss": 0.7451, + "step": 2413 + }, + { + "epoch": 2.670353982300885, + "grad_norm": 0.11474609375, + "learning_rate": 3.3241150442477876e-06, + "loss": 0.7847, + "step": 2414 + }, + { + "epoch": 2.6714601769911503, + "grad_norm": 0.11083984375, + "learning_rate": 3.321349557522124e-06, + "loss": 0.7929, + "step": 2415 + }, + { + "epoch": 2.672566371681416, + "grad_norm": 0.11328125, + "learning_rate": 3.3185840707964607e-06, + "loss": 0.7695, + "step": 2416 + }, + { + "epoch": 2.6736725663716814, + "grad_norm": 0.10107421875, + "learning_rate": 3.315818584070797e-06, + "loss": 0.7645, + "step": 2417 + }, + { + "epoch": 2.6747787610619467, + "grad_norm": 0.1240234375, + "learning_rate": 3.313053097345133e-06, + "loss": 0.8093, + "step": 2418 + }, + { + "epoch": 2.6758849557522124, + "grad_norm": 0.162109375, + "learning_rate": 3.310287610619469e-06, + "loss": 0.7731, + "step": 2419 + }, + { + "epoch": 2.676991150442478, + "grad_norm": 0.1279296875, + "learning_rate": 3.3075221238938052e-06, + "loss": 0.7986, + "step": 2420 + }, + { + "epoch": 2.6780973451327434, + "grad_norm": 0.1044921875, + "learning_rate": 3.304756637168142e-06, + "loss": 0.7538, + "step": 2421 + }, + { + "epoch": 2.6792035398230087, + "grad_norm": 0.1083984375, + "learning_rate": 3.3019911504424783e-06, + "loss": 0.764, + "step": 2422 + }, + { + "epoch": 2.6803097345132745, + "grad_norm": 0.10107421875, + "learning_rate": 3.2992256637168145e-06, + "loss": 0.7712, + "step": 2423 + }, + { + "epoch": 2.6814159292035398, + "grad_norm": 0.1171875, + "learning_rate": 3.296460176991151e-06, + "loss": 0.8029, + "step": 2424 + }, + { + "epoch": 2.682522123893805, + "grad_norm": 0.11328125, + "learning_rate": 3.293694690265487e-06, + "loss": 0.7788, + "step": 2425 + }, + { + "epoch": 2.683628318584071, + "grad_norm": 0.1640625, + "learning_rate": 3.2909292035398233e-06, + "loss": 0.7485, + "step": 2426 + }, + { + "epoch": 2.6847345132743365, + "grad_norm": 0.10595703125, + "learning_rate": 3.2881637168141594e-06, + "loss": 0.7477, + "step": 2427 + }, + { + "epoch": 2.685840707964602, + "grad_norm": 0.12255859375, + "learning_rate": 3.2853982300884955e-06, + "loss": 0.7926, + "step": 2428 + }, + { + "epoch": 2.686946902654867, + "grad_norm": 0.12890625, + "learning_rate": 3.282632743362832e-06, + "loss": 0.7902, + "step": 2429 + }, + { + "epoch": 2.688053097345133, + "grad_norm": 0.1083984375, + "learning_rate": 3.2798672566371686e-06, + "loss": 0.7606, + "step": 2430 + }, + { + "epoch": 2.689159292035398, + "grad_norm": 0.12255859375, + "learning_rate": 3.2771017699115047e-06, + "loss": 0.7647, + "step": 2431 + }, + { + "epoch": 2.6902654867256635, + "grad_norm": 0.1201171875, + "learning_rate": 3.274336283185841e-06, + "loss": 0.8484, + "step": 2432 + }, + { + "epoch": 2.691371681415929, + "grad_norm": 0.1005859375, + "learning_rate": 3.2715707964601774e-06, + "loss": 0.7867, + "step": 2433 + }, + { + "epoch": 2.692477876106195, + "grad_norm": 0.1220703125, + "learning_rate": 3.2688053097345136e-06, + "loss": 0.8294, + "step": 2434 + }, + { + "epoch": 2.6935840707964602, + "grad_norm": 0.11279296875, + "learning_rate": 3.2660398230088497e-06, + "loss": 0.7912, + "step": 2435 + }, + { + "epoch": 2.6946902654867255, + "grad_norm": 0.1181640625, + "learning_rate": 3.263274336283186e-06, + "loss": 0.7663, + "step": 2436 + }, + { + "epoch": 2.6957964601769913, + "grad_norm": 0.1259765625, + "learning_rate": 3.260508849557522e-06, + "loss": 0.7467, + "step": 2437 + }, + { + "epoch": 2.6969026548672566, + "grad_norm": 0.1328125, + "learning_rate": 3.257743362831859e-06, + "loss": 0.8142, + "step": 2438 + }, + { + "epoch": 2.698008849557522, + "grad_norm": 0.1015625, + "learning_rate": 3.254977876106195e-06, + "loss": 0.7561, + "step": 2439 + }, + { + "epoch": 2.6991150442477876, + "grad_norm": 0.1396484375, + "learning_rate": 3.252212389380531e-06, + "loss": 0.8329, + "step": 2440 + }, + { + "epoch": 2.7002212389380533, + "grad_norm": 0.11572265625, + "learning_rate": 3.2494469026548677e-06, + "loss": 0.7774, + "step": 2441 + }, + { + "epoch": 2.7013274336283186, + "grad_norm": 0.1396484375, + "learning_rate": 3.246681415929204e-06, + "loss": 0.7814, + "step": 2442 + }, + { + "epoch": 2.702433628318584, + "grad_norm": 0.11767578125, + "learning_rate": 3.24391592920354e-06, + "loss": 0.7653, + "step": 2443 + }, + { + "epoch": 2.7035398230088497, + "grad_norm": 0.10400390625, + "learning_rate": 3.241150442477876e-06, + "loss": 0.7786, + "step": 2444 + }, + { + "epoch": 2.704646017699115, + "grad_norm": 0.11083984375, + "learning_rate": 3.238384955752213e-06, + "loss": 0.7432, + "step": 2445 + }, + { + "epoch": 2.7057522123893807, + "grad_norm": 0.12060546875, + "learning_rate": 3.235619469026549e-06, + "loss": 0.8105, + "step": 2446 + }, + { + "epoch": 2.706858407079646, + "grad_norm": 0.1220703125, + "learning_rate": 3.2328539823008853e-06, + "loss": 0.7337, + "step": 2447 + }, + { + "epoch": 2.7079646017699117, + "grad_norm": 0.1201171875, + "learning_rate": 3.2300884955752214e-06, + "loss": 0.738, + "step": 2448 + }, + { + "epoch": 2.709070796460177, + "grad_norm": 0.1181640625, + "learning_rate": 3.2273230088495576e-06, + "loss": 0.8209, + "step": 2449 + }, + { + "epoch": 2.7101769911504423, + "grad_norm": 0.109375, + "learning_rate": 3.224557522123894e-06, + "loss": 0.7394, + "step": 2450 + }, + { + "epoch": 2.711283185840708, + "grad_norm": 0.10791015625, + "learning_rate": 3.2217920353982302e-06, + "loss": 0.7909, + "step": 2451 + }, + { + "epoch": 2.7123893805309733, + "grad_norm": 0.11181640625, + "learning_rate": 3.2190265486725664e-06, + "loss": 0.748, + "step": 2452 + }, + { + "epoch": 2.713495575221239, + "grad_norm": 0.115234375, + "learning_rate": 3.2162610619469033e-06, + "loss": 0.7764, + "step": 2453 + }, + { + "epoch": 2.7146017699115044, + "grad_norm": 0.0986328125, + "learning_rate": 3.2134955752212395e-06, + "loss": 0.7376, + "step": 2454 + }, + { + "epoch": 2.71570796460177, + "grad_norm": 0.111328125, + "learning_rate": 3.2107300884955756e-06, + "loss": 0.7906, + "step": 2455 + }, + { + "epoch": 2.7168141592920354, + "grad_norm": 0.10498046875, + "learning_rate": 3.2079646017699117e-06, + "loss": 0.7244, + "step": 2456 + }, + { + "epoch": 2.7179203539823007, + "grad_norm": 0.1171875, + "learning_rate": 3.205199115044248e-06, + "loss": 0.7889, + "step": 2457 + }, + { + "epoch": 2.7190265486725664, + "grad_norm": 0.1171875, + "learning_rate": 3.2024336283185844e-06, + "loss": 0.7765, + "step": 2458 + }, + { + "epoch": 2.7201327433628317, + "grad_norm": 0.10888671875, + "learning_rate": 3.1996681415929205e-06, + "loss": 0.7752, + "step": 2459 + }, + { + "epoch": 2.7212389380530975, + "grad_norm": 0.115234375, + "learning_rate": 3.196902654867257e-06, + "loss": 0.7337, + "step": 2460 + }, + { + "epoch": 2.7223451327433628, + "grad_norm": 0.1201171875, + "learning_rate": 3.194137168141593e-06, + "loss": 0.7912, + "step": 2461 + }, + { + "epoch": 2.7234513274336285, + "grad_norm": 0.11328125, + "learning_rate": 3.1913716814159297e-06, + "loss": 0.7822, + "step": 2462 + }, + { + "epoch": 2.724557522123894, + "grad_norm": 0.10791015625, + "learning_rate": 3.188606194690266e-06, + "loss": 0.795, + "step": 2463 + }, + { + "epoch": 2.725663716814159, + "grad_norm": 0.11083984375, + "learning_rate": 3.185840707964602e-06, + "loss": 0.7607, + "step": 2464 + }, + { + "epoch": 2.726769911504425, + "grad_norm": 0.1064453125, + "learning_rate": 3.183075221238938e-06, + "loss": 0.7267, + "step": 2465 + }, + { + "epoch": 2.72787610619469, + "grad_norm": 0.1318359375, + "learning_rate": 3.1803097345132742e-06, + "loss": 0.8173, + "step": 2466 + }, + { + "epoch": 2.728982300884956, + "grad_norm": 0.1162109375, + "learning_rate": 3.177544247787611e-06, + "loss": 0.7931, + "step": 2467 + }, + { + "epoch": 2.730088495575221, + "grad_norm": 0.1181640625, + "learning_rate": 3.1747787610619473e-06, + "loss": 0.7707, + "step": 2468 + }, + { + "epoch": 2.731194690265487, + "grad_norm": 0.10888671875, + "learning_rate": 3.1720132743362835e-06, + "loss": 0.7684, + "step": 2469 + }, + { + "epoch": 2.732300884955752, + "grad_norm": 0.1650390625, + "learning_rate": 3.1692477876106196e-06, + "loss": 0.823, + "step": 2470 + }, + { + "epoch": 2.7334070796460175, + "grad_norm": 0.115234375, + "learning_rate": 3.166482300884956e-06, + "loss": 0.7913, + "step": 2471 + }, + { + "epoch": 2.734513274336283, + "grad_norm": 0.10400390625, + "learning_rate": 3.1637168141592923e-06, + "loss": 0.7604, + "step": 2472 + }, + { + "epoch": 2.7356194690265485, + "grad_norm": 0.11669921875, + "learning_rate": 3.1609513274336284e-06, + "loss": 0.7729, + "step": 2473 + }, + { + "epoch": 2.7367256637168142, + "grad_norm": 0.11181640625, + "learning_rate": 3.1581858407079645e-06, + "loss": 0.782, + "step": 2474 + }, + { + "epoch": 2.7378318584070795, + "grad_norm": 0.10986328125, + "learning_rate": 3.1554203539823015e-06, + "loss": 0.7738, + "step": 2475 + }, + { + "epoch": 2.7389380530973453, + "grad_norm": 0.119140625, + "learning_rate": 3.1526548672566376e-06, + "loss": 0.8098, + "step": 2476 + }, + { + "epoch": 2.7400442477876106, + "grad_norm": 0.10595703125, + "learning_rate": 3.1498893805309737e-06, + "loss": 0.7716, + "step": 2477 + }, + { + "epoch": 2.741150442477876, + "grad_norm": 0.1015625, + "learning_rate": 3.14712389380531e-06, + "loss": 0.7059, + "step": 2478 + }, + { + "epoch": 2.7422566371681416, + "grad_norm": 0.11962890625, + "learning_rate": 3.1443584070796464e-06, + "loss": 0.7795, + "step": 2479 + }, + { + "epoch": 2.7433628318584073, + "grad_norm": 0.12158203125, + "learning_rate": 3.1415929203539825e-06, + "loss": 0.8453, + "step": 2480 + }, + { + "epoch": 2.7444690265486726, + "grad_norm": 0.1298828125, + "learning_rate": 3.1388274336283187e-06, + "loss": 0.7756, + "step": 2481 + }, + { + "epoch": 2.745575221238938, + "grad_norm": 0.1123046875, + "learning_rate": 3.136061946902655e-06, + "loss": 0.7622, + "step": 2482 + }, + { + "epoch": 2.7466814159292037, + "grad_norm": 0.1669921875, + "learning_rate": 3.1332964601769918e-06, + "loss": 0.8567, + "step": 2483 + }, + { + "epoch": 2.747787610619469, + "grad_norm": 0.1103515625, + "learning_rate": 3.130530973451328e-06, + "loss": 0.7552, + "step": 2484 + }, + { + "epoch": 2.7488938053097343, + "grad_norm": 0.1083984375, + "learning_rate": 3.127765486725664e-06, + "loss": 0.7517, + "step": 2485 + }, + { + "epoch": 2.75, + "grad_norm": 0.1142578125, + "learning_rate": 3.125e-06, + "loss": 0.7711, + "step": 2486 + }, + { + "epoch": 2.7511061946902657, + "grad_norm": 0.10302734375, + "learning_rate": 3.1222345132743363e-06, + "loss": 0.7731, + "step": 2487 + }, + { + "epoch": 2.752212389380531, + "grad_norm": 0.12255859375, + "learning_rate": 3.119469026548673e-06, + "loss": 0.7377, + "step": 2488 + }, + { + "epoch": 2.7533185840707963, + "grad_norm": 0.10498046875, + "learning_rate": 3.116703539823009e-06, + "loss": 0.7707, + "step": 2489 + }, + { + "epoch": 2.754424778761062, + "grad_norm": 0.111328125, + "learning_rate": 3.1139380530973455e-06, + "loss": 0.7778, + "step": 2490 + }, + { + "epoch": 2.7555309734513274, + "grad_norm": 0.10693359375, + "learning_rate": 3.111172566371682e-06, + "loss": 0.74, + "step": 2491 + }, + { + "epoch": 2.7566371681415927, + "grad_norm": 0.1162109375, + "learning_rate": 3.108407079646018e-06, + "loss": 0.8017, + "step": 2492 + }, + { + "epoch": 2.7577433628318584, + "grad_norm": 0.10888671875, + "learning_rate": 3.1056415929203543e-06, + "loss": 0.7959, + "step": 2493 + }, + { + "epoch": 2.758849557522124, + "grad_norm": 0.109375, + "learning_rate": 3.1028761061946904e-06, + "loss": 0.784, + "step": 2494 + }, + { + "epoch": 2.7599557522123894, + "grad_norm": 0.10546875, + "learning_rate": 3.1001106194690266e-06, + "loss": 0.7573, + "step": 2495 + }, + { + "epoch": 2.7610619469026547, + "grad_norm": 0.099609375, + "learning_rate": 3.097345132743363e-06, + "loss": 0.7235, + "step": 2496 + }, + { + "epoch": 2.7621681415929205, + "grad_norm": 0.107421875, + "learning_rate": 3.0945796460176992e-06, + "loss": 0.7746, + "step": 2497 + }, + { + "epoch": 2.7632743362831858, + "grad_norm": 0.1064453125, + "learning_rate": 3.0918141592920358e-06, + "loss": 0.737, + "step": 2498 + }, + { + "epoch": 2.7643805309734515, + "grad_norm": 0.1337890625, + "learning_rate": 3.089048672566372e-06, + "loss": 0.7887, + "step": 2499 + }, + { + "epoch": 2.765486725663717, + "grad_norm": 0.1083984375, + "learning_rate": 3.0862831858407085e-06, + "loss": 0.7436, + "step": 2500 + }, + { + "epoch": 2.7665929203539825, + "grad_norm": 0.11376953125, + "learning_rate": 3.0835176991150446e-06, + "loss": 0.768, + "step": 2501 + }, + { + "epoch": 2.767699115044248, + "grad_norm": 0.103515625, + "learning_rate": 3.0807522123893807e-06, + "loss": 0.7283, + "step": 2502 + }, + { + "epoch": 2.768805309734513, + "grad_norm": 0.10888671875, + "learning_rate": 3.077986725663717e-06, + "loss": 0.7945, + "step": 2503 + }, + { + "epoch": 2.769911504424779, + "grad_norm": 0.10400390625, + "learning_rate": 3.075221238938053e-06, + "loss": 0.7602, + "step": 2504 + }, + { + "epoch": 2.771017699115044, + "grad_norm": 0.1083984375, + "learning_rate": 3.07245575221239e-06, + "loss": 0.7633, + "step": 2505 + }, + { + "epoch": 2.77212389380531, + "grad_norm": 0.1044921875, + "learning_rate": 3.069690265486726e-06, + "loss": 0.7445, + "step": 2506 + }, + { + "epoch": 2.773230088495575, + "grad_norm": 0.11962890625, + "learning_rate": 3.066924778761062e-06, + "loss": 0.7845, + "step": 2507 + }, + { + "epoch": 2.774336283185841, + "grad_norm": 0.10986328125, + "learning_rate": 3.0641592920353987e-06, + "loss": 0.7766, + "step": 2508 + }, + { + "epoch": 2.775442477876106, + "grad_norm": 0.1298828125, + "learning_rate": 3.061393805309735e-06, + "loss": 0.749, + "step": 2509 + }, + { + "epoch": 2.7765486725663715, + "grad_norm": 0.10546875, + "learning_rate": 3.058628318584071e-06, + "loss": 0.7598, + "step": 2510 + }, + { + "epoch": 2.7776548672566372, + "grad_norm": 0.1064453125, + "learning_rate": 3.055862831858407e-06, + "loss": 0.758, + "step": 2511 + }, + { + "epoch": 2.7787610619469025, + "grad_norm": 0.10595703125, + "learning_rate": 3.0530973451327432e-06, + "loss": 0.8028, + "step": 2512 + }, + { + "epoch": 2.7798672566371683, + "grad_norm": 0.103515625, + "learning_rate": 3.05033185840708e-06, + "loss": 0.7597, + "step": 2513 + }, + { + "epoch": 2.7809734513274336, + "grad_norm": 0.119140625, + "learning_rate": 3.0475663716814163e-06, + "loss": 0.796, + "step": 2514 + }, + { + "epoch": 2.7820796460176993, + "grad_norm": 0.1083984375, + "learning_rate": 3.0448008849557525e-06, + "loss": 0.8348, + "step": 2515 + }, + { + "epoch": 2.7831858407079646, + "grad_norm": 0.10791015625, + "learning_rate": 3.0420353982300886e-06, + "loss": 0.7594, + "step": 2516 + }, + { + "epoch": 2.78429203539823, + "grad_norm": 0.193359375, + "learning_rate": 3.039269911504425e-06, + "loss": 0.7736, + "step": 2517 + }, + { + "epoch": 2.7853982300884956, + "grad_norm": 0.09912109375, + "learning_rate": 3.0365044247787613e-06, + "loss": 0.7082, + "step": 2518 + }, + { + "epoch": 2.786504424778761, + "grad_norm": 0.111328125, + "learning_rate": 3.0337389380530974e-06, + "loss": 0.8234, + "step": 2519 + }, + { + "epoch": 2.7876106194690267, + "grad_norm": 0.111328125, + "learning_rate": 3.030973451327434e-06, + "loss": 0.7222, + "step": 2520 + }, + { + "epoch": 2.788716814159292, + "grad_norm": 0.1298828125, + "learning_rate": 3.0282079646017705e-06, + "loss": 0.782, + "step": 2521 + }, + { + "epoch": 2.7898230088495577, + "grad_norm": 0.10888671875, + "learning_rate": 3.0254424778761066e-06, + "loss": 0.76, + "step": 2522 + }, + { + "epoch": 2.790929203539823, + "grad_norm": 0.115234375, + "learning_rate": 3.0226769911504427e-06, + "loss": 0.7949, + "step": 2523 + }, + { + "epoch": 2.7920353982300883, + "grad_norm": 0.1005859375, + "learning_rate": 3.019911504424779e-06, + "loss": 0.7327, + "step": 2524 + }, + { + "epoch": 2.793141592920354, + "grad_norm": 0.10009765625, + "learning_rate": 3.017146017699115e-06, + "loss": 0.7711, + "step": 2525 + }, + { + "epoch": 2.7942477876106193, + "grad_norm": 0.10302734375, + "learning_rate": 3.0143805309734515e-06, + "loss": 0.7194, + "step": 2526 + }, + { + "epoch": 2.795353982300885, + "grad_norm": 0.10791015625, + "learning_rate": 3.0116150442477877e-06, + "loss": 0.752, + "step": 2527 + }, + { + "epoch": 2.7964601769911503, + "grad_norm": 0.109375, + "learning_rate": 3.0088495575221242e-06, + "loss": 0.7692, + "step": 2528 + }, + { + "epoch": 2.797566371681416, + "grad_norm": 0.11181640625, + "learning_rate": 3.0060840707964608e-06, + "loss": 0.7849, + "step": 2529 + }, + { + "epoch": 2.7986725663716814, + "grad_norm": 0.109375, + "learning_rate": 3.003318584070797e-06, + "loss": 0.7531, + "step": 2530 + }, + { + "epoch": 2.7997787610619467, + "grad_norm": 0.12255859375, + "learning_rate": 3.000553097345133e-06, + "loss": 0.8197, + "step": 2531 + }, + { + "epoch": 2.8008849557522124, + "grad_norm": 0.107421875, + "learning_rate": 2.997787610619469e-06, + "loss": 0.7467, + "step": 2532 + }, + { + "epoch": 2.801991150442478, + "grad_norm": 0.1220703125, + "learning_rate": 2.9950221238938053e-06, + "loss": 0.7392, + "step": 2533 + }, + { + "epoch": 2.8030973451327434, + "grad_norm": 0.1005859375, + "learning_rate": 2.992256637168142e-06, + "loss": 0.7336, + "step": 2534 + }, + { + "epoch": 2.8042035398230087, + "grad_norm": 0.11962890625, + "learning_rate": 2.9894911504424784e-06, + "loss": 0.8075, + "step": 2535 + }, + { + "epoch": 2.8053097345132745, + "grad_norm": 0.12158203125, + "learning_rate": 2.9867256637168145e-06, + "loss": 0.7802, + "step": 2536 + }, + { + "epoch": 2.8064159292035398, + "grad_norm": 0.10107421875, + "learning_rate": 2.9839601769911506e-06, + "loss": 0.7427, + "step": 2537 + }, + { + "epoch": 2.807522123893805, + "grad_norm": 0.1220703125, + "learning_rate": 2.981194690265487e-06, + "loss": 0.8185, + "step": 2538 + }, + { + "epoch": 2.808628318584071, + "grad_norm": 0.10498046875, + "learning_rate": 2.9784292035398233e-06, + "loss": 0.7892, + "step": 2539 + }, + { + "epoch": 2.8097345132743365, + "grad_norm": 0.115234375, + "learning_rate": 2.9756637168141594e-06, + "loss": 0.7736, + "step": 2540 + }, + { + "epoch": 2.810840707964602, + "grad_norm": 0.11083984375, + "learning_rate": 2.9728982300884956e-06, + "loss": 0.7489, + "step": 2541 + }, + { + "epoch": 2.811946902654867, + "grad_norm": 0.11572265625, + "learning_rate": 2.9701327433628317e-06, + "loss": 0.7665, + "step": 2542 + }, + { + "epoch": 2.813053097345133, + "grad_norm": 0.1083984375, + "learning_rate": 2.9673672566371686e-06, + "loss": 0.7592, + "step": 2543 + }, + { + "epoch": 2.814159292035398, + "grad_norm": 0.11279296875, + "learning_rate": 2.9646017699115048e-06, + "loss": 0.7802, + "step": 2544 + }, + { + "epoch": 2.8152654867256635, + "grad_norm": 0.10205078125, + "learning_rate": 2.961836283185841e-06, + "loss": 0.7823, + "step": 2545 + }, + { + "epoch": 2.816371681415929, + "grad_norm": 0.115234375, + "learning_rate": 2.9590707964601775e-06, + "loss": 0.7607, + "step": 2546 + }, + { + "epoch": 2.817477876106195, + "grad_norm": 0.115234375, + "learning_rate": 2.9563053097345136e-06, + "loss": 0.7916, + "step": 2547 + }, + { + "epoch": 2.8185840707964602, + "grad_norm": 0.1455078125, + "learning_rate": 2.9535398230088497e-06, + "loss": 0.8373, + "step": 2548 + }, + { + "epoch": 2.8196902654867255, + "grad_norm": 0.10693359375, + "learning_rate": 2.950774336283186e-06, + "loss": 0.7805, + "step": 2549 + }, + { + "epoch": 2.8207964601769913, + "grad_norm": 0.1171875, + "learning_rate": 2.948008849557522e-06, + "loss": 0.8358, + "step": 2550 + }, + { + "epoch": 2.8219026548672566, + "grad_norm": 0.1103515625, + "learning_rate": 2.945243362831859e-06, + "loss": 0.7752, + "step": 2551 + }, + { + "epoch": 2.823008849557522, + "grad_norm": 0.111328125, + "learning_rate": 2.942477876106195e-06, + "loss": 0.794, + "step": 2552 + }, + { + "epoch": 2.8241150442477876, + "grad_norm": 0.109375, + "learning_rate": 2.939712389380531e-06, + "loss": 0.7589, + "step": 2553 + }, + { + "epoch": 2.8252212389380533, + "grad_norm": 0.1162109375, + "learning_rate": 2.9369469026548673e-06, + "loss": 0.7685, + "step": 2554 + }, + { + "epoch": 2.8263274336283186, + "grad_norm": 0.1123046875, + "learning_rate": 2.934181415929204e-06, + "loss": 0.7513, + "step": 2555 + }, + { + "epoch": 2.827433628318584, + "grad_norm": 0.10791015625, + "learning_rate": 2.93141592920354e-06, + "loss": 0.743, + "step": 2556 + }, + { + "epoch": 2.8285398230088497, + "grad_norm": 0.10595703125, + "learning_rate": 2.928650442477876e-06, + "loss": 0.7491, + "step": 2557 + }, + { + "epoch": 2.829646017699115, + "grad_norm": 0.11181640625, + "learning_rate": 2.925884955752213e-06, + "loss": 0.7363, + "step": 2558 + }, + { + "epoch": 2.8307522123893807, + "grad_norm": 0.09716796875, + "learning_rate": 2.923119469026549e-06, + "loss": 0.7487, + "step": 2559 + }, + { + "epoch": 2.831858407079646, + "grad_norm": 0.10498046875, + "learning_rate": 2.9203539823008853e-06, + "loss": 0.7554, + "step": 2560 + }, + { + "epoch": 2.8329646017699117, + "grad_norm": 0.12060546875, + "learning_rate": 2.9175884955752215e-06, + "loss": 0.7933, + "step": 2561 + }, + { + "epoch": 2.834070796460177, + "grad_norm": 0.126953125, + "learning_rate": 2.9148230088495576e-06, + "loss": 0.7904, + "step": 2562 + }, + { + "epoch": 2.8351769911504423, + "grad_norm": 0.10498046875, + "learning_rate": 2.9120575221238937e-06, + "loss": 0.758, + "step": 2563 + }, + { + "epoch": 2.836283185840708, + "grad_norm": 0.1279296875, + "learning_rate": 2.9092920353982303e-06, + "loss": 0.7974, + "step": 2564 + }, + { + "epoch": 2.8373893805309733, + "grad_norm": 0.10546875, + "learning_rate": 2.9065265486725664e-06, + "loss": 0.7383, + "step": 2565 + }, + { + "epoch": 2.838495575221239, + "grad_norm": 0.140625, + "learning_rate": 2.903761061946903e-06, + "loss": 0.8321, + "step": 2566 + }, + { + "epoch": 2.8396017699115044, + "grad_norm": 0.12353515625, + "learning_rate": 2.9009955752212395e-06, + "loss": 0.7976, + "step": 2567 + }, + { + "epoch": 2.84070796460177, + "grad_norm": 0.1103515625, + "learning_rate": 2.8982300884955756e-06, + "loss": 0.7554, + "step": 2568 + }, + { + "epoch": 2.8418141592920354, + "grad_norm": 0.12451171875, + "learning_rate": 2.8954646017699117e-06, + "loss": 0.7654, + "step": 2569 + }, + { + "epoch": 2.8429203539823007, + "grad_norm": 0.103515625, + "learning_rate": 2.892699115044248e-06, + "loss": 0.7178, + "step": 2570 + }, + { + "epoch": 2.8440265486725664, + "grad_norm": 0.1162109375, + "learning_rate": 2.889933628318584e-06, + "loss": 0.8131, + "step": 2571 + }, + { + "epoch": 2.8451327433628317, + "grad_norm": 0.11376953125, + "learning_rate": 2.8871681415929205e-06, + "loss": 0.7606, + "step": 2572 + }, + { + "epoch": 2.8462389380530975, + "grad_norm": 0.1142578125, + "learning_rate": 2.884402654867257e-06, + "loss": 0.8261, + "step": 2573 + }, + { + "epoch": 2.8473451327433628, + "grad_norm": 0.10791015625, + "learning_rate": 2.8816371681415932e-06, + "loss": 0.7768, + "step": 2574 + }, + { + "epoch": 2.8484513274336285, + "grad_norm": 0.119140625, + "learning_rate": 2.8788716814159293e-06, + "loss": 0.7861, + "step": 2575 + }, + { + "epoch": 2.849557522123894, + "grad_norm": 0.1259765625, + "learning_rate": 2.876106194690266e-06, + "loss": 0.7568, + "step": 2576 + }, + { + "epoch": 2.850663716814159, + "grad_norm": 0.109375, + "learning_rate": 2.873340707964602e-06, + "loss": 0.7969, + "step": 2577 + }, + { + "epoch": 2.851769911504425, + "grad_norm": 0.111328125, + "learning_rate": 2.870575221238938e-06, + "loss": 0.8032, + "step": 2578 + }, + { + "epoch": 2.85287610619469, + "grad_norm": 0.1181640625, + "learning_rate": 2.8678097345132743e-06, + "loss": 0.7372, + "step": 2579 + }, + { + "epoch": 2.853982300884956, + "grad_norm": 0.11376953125, + "learning_rate": 2.8650442477876104e-06, + "loss": 0.7744, + "step": 2580 + }, + { + "epoch": 2.855088495575221, + "grad_norm": 0.1123046875, + "learning_rate": 2.8622787610619474e-06, + "loss": 0.7524, + "step": 2581 + }, + { + "epoch": 2.856194690265487, + "grad_norm": 0.111328125, + "learning_rate": 2.8595132743362835e-06, + "loss": 0.8006, + "step": 2582 + }, + { + "epoch": 2.857300884955752, + "grad_norm": 0.1083984375, + "learning_rate": 2.8567477876106196e-06, + "loss": 0.7525, + "step": 2583 + }, + { + "epoch": 2.8584070796460175, + "grad_norm": 0.1484375, + "learning_rate": 2.853982300884956e-06, + "loss": 0.8799, + "step": 2584 + }, + { + "epoch": 2.859513274336283, + "grad_norm": 0.1201171875, + "learning_rate": 2.8512168141592923e-06, + "loss": 0.7757, + "step": 2585 + }, + { + "epoch": 2.8606194690265485, + "grad_norm": 0.11376953125, + "learning_rate": 2.8484513274336284e-06, + "loss": 0.7772, + "step": 2586 + }, + { + "epoch": 2.8617256637168142, + "grad_norm": 0.11376953125, + "learning_rate": 2.8456858407079645e-06, + "loss": 0.7849, + "step": 2587 + }, + { + "epoch": 2.8628318584070795, + "grad_norm": 0.1240234375, + "learning_rate": 2.8429203539823015e-06, + "loss": 0.7975, + "step": 2588 + }, + { + "epoch": 2.8639380530973453, + "grad_norm": 0.1630859375, + "learning_rate": 2.8401548672566376e-06, + "loss": 0.7774, + "step": 2589 + }, + { + "epoch": 2.8650442477876106, + "grad_norm": 0.10986328125, + "learning_rate": 2.8373893805309738e-06, + "loss": 0.7873, + "step": 2590 + }, + { + "epoch": 2.866150442477876, + "grad_norm": 0.12353515625, + "learning_rate": 2.83462389380531e-06, + "loss": 0.7961, + "step": 2591 + }, + { + "epoch": 2.8672566371681416, + "grad_norm": 0.10595703125, + "learning_rate": 2.831858407079646e-06, + "loss": 0.763, + "step": 2592 + }, + { + "epoch": 2.8683628318584073, + "grad_norm": 0.1123046875, + "learning_rate": 2.8290929203539826e-06, + "loss": 0.7591, + "step": 2593 + }, + { + "epoch": 2.8694690265486726, + "grad_norm": 0.10986328125, + "learning_rate": 2.8263274336283187e-06, + "loss": 0.7583, + "step": 2594 + }, + { + "epoch": 2.870575221238938, + "grad_norm": 0.11669921875, + "learning_rate": 2.823561946902655e-06, + "loss": 0.8039, + "step": 2595 + }, + { + "epoch": 2.8716814159292037, + "grad_norm": 0.10400390625, + "learning_rate": 2.820796460176992e-06, + "loss": 0.7299, + "step": 2596 + }, + { + "epoch": 2.872787610619469, + "grad_norm": 0.10107421875, + "learning_rate": 2.818030973451328e-06, + "loss": 0.7747, + "step": 2597 + }, + { + "epoch": 2.8738938053097343, + "grad_norm": 0.1044921875, + "learning_rate": 2.815265486725664e-06, + "loss": 0.7524, + "step": 2598 + }, + { + "epoch": 2.875, + "grad_norm": 0.1142578125, + "learning_rate": 2.8125e-06, + "loss": 0.7915, + "step": 2599 + }, + { + "epoch": 2.8761061946902657, + "grad_norm": 0.109375, + "learning_rate": 2.8097345132743363e-06, + "loss": 0.7859, + "step": 2600 + }, + { + "epoch": 2.877212389380531, + "grad_norm": 0.10546875, + "learning_rate": 2.806969026548673e-06, + "loss": 0.7501, + "step": 2601 + }, + { + "epoch": 2.8783185840707963, + "grad_norm": 0.10888671875, + "learning_rate": 2.804203539823009e-06, + "loss": 0.7767, + "step": 2602 + }, + { + "epoch": 2.879424778761062, + "grad_norm": 0.1181640625, + "learning_rate": 2.8014380530973455e-06, + "loss": 0.7602, + "step": 2603 + }, + { + "epoch": 2.8805309734513274, + "grad_norm": 0.1123046875, + "learning_rate": 2.7986725663716817e-06, + "loss": 0.7582, + "step": 2604 + }, + { + "epoch": 2.8816371681415927, + "grad_norm": 0.09912109375, + "learning_rate": 2.795907079646018e-06, + "loss": 0.775, + "step": 2605 + }, + { + "epoch": 2.8827433628318584, + "grad_norm": 0.11328125, + "learning_rate": 2.7931415929203543e-06, + "loss": 0.7454, + "step": 2606 + }, + { + "epoch": 2.883849557522124, + "grad_norm": 0.1044921875, + "learning_rate": 2.7903761061946905e-06, + "loss": 0.7724, + "step": 2607 + }, + { + "epoch": 2.8849557522123894, + "grad_norm": 0.11669921875, + "learning_rate": 2.7876106194690266e-06, + "loss": 0.7716, + "step": 2608 + }, + { + "epoch": 2.8860619469026547, + "grad_norm": 0.1064453125, + "learning_rate": 2.7848451327433627e-06, + "loss": 0.7485, + "step": 2609 + }, + { + "epoch": 2.8871681415929205, + "grad_norm": 0.12890625, + "learning_rate": 2.7820796460176993e-06, + "loss": 0.7318, + "step": 2610 + }, + { + "epoch": 2.8882743362831858, + "grad_norm": 0.11279296875, + "learning_rate": 2.779314159292036e-06, + "loss": 0.7535, + "step": 2611 + }, + { + "epoch": 2.8893805309734515, + "grad_norm": 0.1376953125, + "learning_rate": 2.776548672566372e-06, + "loss": 0.7504, + "step": 2612 + }, + { + "epoch": 2.890486725663717, + "grad_norm": 0.11279296875, + "learning_rate": 2.773783185840708e-06, + "loss": 0.7657, + "step": 2613 + }, + { + "epoch": 2.8915929203539825, + "grad_norm": 0.12451171875, + "learning_rate": 2.7710176991150446e-06, + "loss": 0.7919, + "step": 2614 + }, + { + "epoch": 2.892699115044248, + "grad_norm": 0.1328125, + "learning_rate": 2.7682522123893807e-06, + "loss": 0.8021, + "step": 2615 + }, + { + "epoch": 2.893805309734513, + "grad_norm": 0.1318359375, + "learning_rate": 2.765486725663717e-06, + "loss": 0.802, + "step": 2616 + }, + { + "epoch": 2.894911504424779, + "grad_norm": 0.1015625, + "learning_rate": 2.762721238938053e-06, + "loss": 0.7401, + "step": 2617 + }, + { + "epoch": 2.896017699115044, + "grad_norm": 0.12890625, + "learning_rate": 2.75995575221239e-06, + "loss": 0.8255, + "step": 2618 + }, + { + "epoch": 2.89712389380531, + "grad_norm": 0.11279296875, + "learning_rate": 2.757190265486726e-06, + "loss": 0.783, + "step": 2619 + }, + { + "epoch": 2.898230088495575, + "grad_norm": 0.099609375, + "learning_rate": 2.754424778761062e-06, + "loss": 0.7362, + "step": 2620 + }, + { + "epoch": 2.899336283185841, + "grad_norm": 0.11181640625, + "learning_rate": 2.7516592920353983e-06, + "loss": 0.8153, + "step": 2621 + }, + { + "epoch": 2.900442477876106, + "grad_norm": 0.10791015625, + "learning_rate": 2.748893805309735e-06, + "loss": 0.772, + "step": 2622 + }, + { + "epoch": 2.9015486725663715, + "grad_norm": 0.1162109375, + "learning_rate": 2.746128318584071e-06, + "loss": 0.7682, + "step": 2623 + }, + { + "epoch": 2.9026548672566372, + "grad_norm": 0.10693359375, + "learning_rate": 2.743362831858407e-06, + "loss": 0.7462, + "step": 2624 + }, + { + "epoch": 2.9037610619469025, + "grad_norm": 0.11376953125, + "learning_rate": 2.7405973451327433e-06, + "loss": 0.7736, + "step": 2625 + }, + { + "epoch": 2.9048672566371683, + "grad_norm": 0.10791015625, + "learning_rate": 2.7378318584070802e-06, + "loss": 0.7347, + "step": 2626 + }, + { + "epoch": 2.9059734513274336, + "grad_norm": 0.12158203125, + "learning_rate": 2.7350663716814164e-06, + "loss": 0.8035, + "step": 2627 + }, + { + "epoch": 2.9070796460176993, + "grad_norm": 0.10498046875, + "learning_rate": 2.7323008849557525e-06, + "loss": 0.7663, + "step": 2628 + }, + { + "epoch": 2.9081858407079646, + "grad_norm": 0.1123046875, + "learning_rate": 2.7295353982300886e-06, + "loss": 0.7724, + "step": 2629 + }, + { + "epoch": 2.90929203539823, + "grad_norm": 0.103515625, + "learning_rate": 2.7267699115044247e-06, + "loss": 0.7701, + "step": 2630 + }, + { + "epoch": 2.9103982300884956, + "grad_norm": 0.11865234375, + "learning_rate": 2.7240044247787613e-06, + "loss": 0.7918, + "step": 2631 + }, + { + "epoch": 2.911504424778761, + "grad_norm": 0.10986328125, + "learning_rate": 2.7212389380530974e-06, + "loss": 0.762, + "step": 2632 + }, + { + "epoch": 2.9126106194690267, + "grad_norm": 0.126953125, + "learning_rate": 2.718473451327434e-06, + "loss": 0.7591, + "step": 2633 + }, + { + "epoch": 2.913716814159292, + "grad_norm": 0.1171875, + "learning_rate": 2.7157079646017705e-06, + "loss": 0.766, + "step": 2634 + }, + { + "epoch": 2.9148230088495577, + "grad_norm": 0.11328125, + "learning_rate": 2.7129424778761066e-06, + "loss": 0.7912, + "step": 2635 + }, + { + "epoch": 2.915929203539823, + "grad_norm": 0.1279296875, + "learning_rate": 2.7101769911504428e-06, + "loss": 0.8579, + "step": 2636 + }, + { + "epoch": 2.9170353982300883, + "grad_norm": 0.1806640625, + "learning_rate": 2.707411504424779e-06, + "loss": 0.7368, + "step": 2637 + }, + { + "epoch": 2.918141592920354, + "grad_norm": 0.134765625, + "learning_rate": 2.704646017699115e-06, + "loss": 0.7491, + "step": 2638 + }, + { + "epoch": 2.9192477876106193, + "grad_norm": 0.12060546875, + "learning_rate": 2.7018805309734516e-06, + "loss": 0.8138, + "step": 2639 + }, + { + "epoch": 2.920353982300885, + "grad_norm": 0.119140625, + "learning_rate": 2.6991150442477877e-06, + "loss": 0.7566, + "step": 2640 + }, + { + "epoch": 2.9214601769911503, + "grad_norm": 0.111328125, + "learning_rate": 2.6963495575221242e-06, + "loss": 0.7863, + "step": 2641 + }, + { + "epoch": 2.922566371681416, + "grad_norm": 0.11962890625, + "learning_rate": 2.6935840707964604e-06, + "loss": 0.8069, + "step": 2642 + }, + { + "epoch": 2.9236725663716814, + "grad_norm": 0.10791015625, + "learning_rate": 2.690818584070797e-06, + "loss": 0.7207, + "step": 2643 + }, + { + "epoch": 2.9247787610619467, + "grad_norm": 0.10888671875, + "learning_rate": 2.688053097345133e-06, + "loss": 0.7555, + "step": 2644 + }, + { + "epoch": 2.9258849557522124, + "grad_norm": 0.109375, + "learning_rate": 2.685287610619469e-06, + "loss": 0.7643, + "step": 2645 + }, + { + "epoch": 2.926991150442478, + "grad_norm": 0.1064453125, + "learning_rate": 2.6825221238938053e-06, + "loss": 0.7705, + "step": 2646 + }, + { + "epoch": 2.9280973451327434, + "grad_norm": 0.1357421875, + "learning_rate": 2.6797566371681414e-06, + "loss": 0.84, + "step": 2647 + }, + { + "epoch": 2.9292035398230087, + "grad_norm": 0.109375, + "learning_rate": 2.6769911504424784e-06, + "loss": 0.7756, + "step": 2648 + }, + { + "epoch": 2.9303097345132745, + "grad_norm": 0.1015625, + "learning_rate": 2.6742256637168145e-06, + "loss": 0.7423, + "step": 2649 + }, + { + "epoch": 2.9314159292035398, + "grad_norm": 0.125, + "learning_rate": 2.6714601769911507e-06, + "loss": 0.7659, + "step": 2650 + }, + { + "epoch": 2.932522123893805, + "grad_norm": 0.12109375, + "learning_rate": 2.668694690265487e-06, + "loss": 0.7525, + "step": 2651 + }, + { + "epoch": 2.933628318584071, + "grad_norm": 0.1083984375, + "learning_rate": 2.6659292035398233e-06, + "loss": 0.7784, + "step": 2652 + }, + { + "epoch": 2.9347345132743365, + "grad_norm": 0.10498046875, + "learning_rate": 2.6631637168141595e-06, + "loss": 0.7236, + "step": 2653 + }, + { + "epoch": 2.935840707964602, + "grad_norm": 0.1240234375, + "learning_rate": 2.6603982300884956e-06, + "loss": 0.8299, + "step": 2654 + }, + { + "epoch": 2.936946902654867, + "grad_norm": 0.1123046875, + "learning_rate": 2.6576327433628317e-06, + "loss": 0.7475, + "step": 2655 + }, + { + "epoch": 2.938053097345133, + "grad_norm": 0.1962890625, + "learning_rate": 2.6548672566371687e-06, + "loss": 0.7602, + "step": 2656 + }, + { + "epoch": 2.939159292035398, + "grad_norm": 0.123046875, + "learning_rate": 2.652101769911505e-06, + "loss": 0.7974, + "step": 2657 + }, + { + "epoch": 2.9402654867256635, + "grad_norm": 0.107421875, + "learning_rate": 2.649336283185841e-06, + "loss": 0.7683, + "step": 2658 + }, + { + "epoch": 2.941371681415929, + "grad_norm": 0.1064453125, + "learning_rate": 2.646570796460177e-06, + "loss": 0.7651, + "step": 2659 + }, + { + "epoch": 2.942477876106195, + "grad_norm": 0.1279296875, + "learning_rate": 2.6438053097345136e-06, + "loss": 0.7807, + "step": 2660 + }, + { + "epoch": 2.9435840707964602, + "grad_norm": 0.11962890625, + "learning_rate": 2.6410398230088497e-06, + "loss": 0.7892, + "step": 2661 + }, + { + "epoch": 2.9446902654867255, + "grad_norm": 0.11181640625, + "learning_rate": 2.638274336283186e-06, + "loss": 0.7492, + "step": 2662 + }, + { + "epoch": 2.9457964601769913, + "grad_norm": 0.1015625, + "learning_rate": 2.635508849557522e-06, + "loss": 0.7247, + "step": 2663 + }, + { + "epoch": 2.9469026548672566, + "grad_norm": 0.11376953125, + "learning_rate": 2.632743362831859e-06, + "loss": 0.7487, + "step": 2664 + }, + { + "epoch": 2.948008849557522, + "grad_norm": 0.10595703125, + "learning_rate": 2.629977876106195e-06, + "loss": 0.7853, + "step": 2665 + }, + { + "epoch": 2.9491150442477876, + "grad_norm": 0.11083984375, + "learning_rate": 2.627212389380531e-06, + "loss": 0.8029, + "step": 2666 + }, + { + "epoch": 2.9502212389380533, + "grad_norm": 0.11083984375, + "learning_rate": 2.6244469026548673e-06, + "loss": 0.7528, + "step": 2667 + }, + { + "epoch": 2.9513274336283186, + "grad_norm": 0.1142578125, + "learning_rate": 2.6216814159292035e-06, + "loss": 0.744, + "step": 2668 + }, + { + "epoch": 2.952433628318584, + "grad_norm": 0.1240234375, + "learning_rate": 2.61891592920354e-06, + "loss": 0.7763, + "step": 2669 + }, + { + "epoch": 2.9535398230088497, + "grad_norm": 0.12353515625, + "learning_rate": 2.616150442477876e-06, + "loss": 0.7355, + "step": 2670 + }, + { + "epoch": 2.954646017699115, + "grad_norm": 0.11669921875, + "learning_rate": 2.6133849557522127e-06, + "loss": 0.7574, + "step": 2671 + }, + { + "epoch": 2.9557522123893807, + "grad_norm": 0.1064453125, + "learning_rate": 2.6106194690265492e-06, + "loss": 0.7428, + "step": 2672 + }, + { + "epoch": 2.956858407079646, + "grad_norm": 0.1044921875, + "learning_rate": 2.6078539823008854e-06, + "loss": 0.7437, + "step": 2673 + }, + { + "epoch": 2.9579646017699117, + "grad_norm": 0.1162109375, + "learning_rate": 2.6050884955752215e-06, + "loss": 0.796, + "step": 2674 + }, + { + "epoch": 2.959070796460177, + "grad_norm": 0.109375, + "learning_rate": 2.6023230088495576e-06, + "loss": 0.8005, + "step": 2675 + }, + { + "epoch": 2.9601769911504423, + "grad_norm": 0.123046875, + "learning_rate": 2.5995575221238937e-06, + "loss": 0.7542, + "step": 2676 + }, + { + "epoch": 2.961283185840708, + "grad_norm": 0.109375, + "learning_rate": 2.5967920353982303e-06, + "loss": 0.7926, + "step": 2677 + }, + { + "epoch": 2.9623893805309733, + "grad_norm": 0.123046875, + "learning_rate": 2.5940265486725664e-06, + "loss": 0.772, + "step": 2678 + }, + { + "epoch": 2.963495575221239, + "grad_norm": 0.109375, + "learning_rate": 2.591261061946903e-06, + "loss": 0.7729, + "step": 2679 + }, + { + "epoch": 2.9646017699115044, + "grad_norm": 0.1572265625, + "learning_rate": 2.588495575221239e-06, + "loss": 0.8041, + "step": 2680 + }, + { + "epoch": 2.96570796460177, + "grad_norm": 0.1064453125, + "learning_rate": 2.5857300884955756e-06, + "loss": 0.7746, + "step": 2681 + }, + { + "epoch": 2.9668141592920354, + "grad_norm": 0.107421875, + "learning_rate": 2.5829646017699118e-06, + "loss": 0.7561, + "step": 2682 + }, + { + "epoch": 2.9679203539823007, + "grad_norm": 0.123046875, + "learning_rate": 2.580199115044248e-06, + "loss": 0.7789, + "step": 2683 + }, + { + "epoch": 2.9690265486725664, + "grad_norm": 0.10546875, + "learning_rate": 2.577433628318584e-06, + "loss": 0.7476, + "step": 2684 + }, + { + "epoch": 2.9701327433628317, + "grad_norm": 0.109375, + "learning_rate": 2.57466814159292e-06, + "loss": 0.7733, + "step": 2685 + }, + { + "epoch": 2.9712389380530975, + "grad_norm": 0.103515625, + "learning_rate": 2.571902654867257e-06, + "loss": 0.7434, + "step": 2686 + }, + { + "epoch": 2.9723451327433628, + "grad_norm": 0.1162109375, + "learning_rate": 2.5691371681415932e-06, + "loss": 0.8044, + "step": 2687 + }, + { + "epoch": 2.9734513274336285, + "grad_norm": 0.10888671875, + "learning_rate": 2.5663716814159294e-06, + "loss": 0.783, + "step": 2688 + }, + { + "epoch": 2.974557522123894, + "grad_norm": 0.12255859375, + "learning_rate": 2.563606194690266e-06, + "loss": 0.7533, + "step": 2689 + }, + { + "epoch": 2.975663716814159, + "grad_norm": 0.1162109375, + "learning_rate": 2.560840707964602e-06, + "loss": 0.7351, + "step": 2690 + }, + { + "epoch": 2.976769911504425, + "grad_norm": 0.1240234375, + "learning_rate": 2.558075221238938e-06, + "loss": 0.75, + "step": 2691 + }, + { + "epoch": 2.97787610619469, + "grad_norm": 0.107421875, + "learning_rate": 2.5553097345132743e-06, + "loss": 0.7659, + "step": 2692 + }, + { + "epoch": 2.978982300884956, + "grad_norm": 0.12109375, + "learning_rate": 2.5525442477876104e-06, + "loss": 0.7907, + "step": 2693 + }, + { + "epoch": 2.980088495575221, + "grad_norm": 0.115234375, + "learning_rate": 2.5497787610619474e-06, + "loss": 0.7494, + "step": 2694 + }, + { + "epoch": 2.981194690265487, + "grad_norm": 0.1103515625, + "learning_rate": 2.5470132743362835e-06, + "loss": 0.7548, + "step": 2695 + }, + { + "epoch": 2.982300884955752, + "grad_norm": 0.0986328125, + "learning_rate": 2.5442477876106196e-06, + "loss": 0.7395, + "step": 2696 + }, + { + "epoch": 2.9834070796460175, + "grad_norm": 0.107421875, + "learning_rate": 2.5414823008849558e-06, + "loss": 0.783, + "step": 2697 + }, + { + "epoch": 2.984513274336283, + "grad_norm": 0.10693359375, + "learning_rate": 2.5387168141592923e-06, + "loss": 0.7398, + "step": 2698 + }, + { + "epoch": 2.9856194690265485, + "grad_norm": 0.12109375, + "learning_rate": 2.5359513274336284e-06, + "loss": 0.8038, + "step": 2699 + }, + { + "epoch": 2.9867256637168142, + "grad_norm": 0.11669921875, + "learning_rate": 2.5331858407079646e-06, + "loss": 0.7897, + "step": 2700 + }, + { + "epoch": 2.9878318584070795, + "grad_norm": 0.1083984375, + "learning_rate": 2.5304203539823015e-06, + "loss": 0.7616, + "step": 2701 + }, + { + "epoch": 2.9889380530973453, + "grad_norm": 0.10888671875, + "learning_rate": 2.5276548672566377e-06, + "loss": 0.7309, + "step": 2702 + }, + { + "epoch": 2.9900442477876106, + "grad_norm": 0.1171875, + "learning_rate": 2.524889380530974e-06, + "loss": 0.7755, + "step": 2703 + }, + { + "epoch": 2.991150442477876, + "grad_norm": 0.10400390625, + "learning_rate": 2.52212389380531e-06, + "loss": 0.7615, + "step": 2704 + }, + { + "epoch": 2.9922566371681416, + "grad_norm": 0.10595703125, + "learning_rate": 2.519358407079646e-06, + "loss": 0.7519, + "step": 2705 + }, + { + "epoch": 2.9933628318584073, + "grad_norm": 0.109375, + "learning_rate": 2.5165929203539826e-06, + "loss": 0.7537, + "step": 2706 + }, + { + "epoch": 2.9944690265486726, + "grad_norm": 0.11279296875, + "learning_rate": 2.5138274336283187e-06, + "loss": 0.7562, + "step": 2707 + }, + { + "epoch": 2.995575221238938, + "grad_norm": 0.1064453125, + "learning_rate": 2.511061946902655e-06, + "loss": 0.7406, + "step": 2708 + }, + { + "epoch": 2.9966814159292037, + "grad_norm": 0.640625, + "learning_rate": 2.5082964601769914e-06, + "loss": 0.7834, + "step": 2709 + }, + { + "epoch": 2.997787610619469, + "grad_norm": 0.11962890625, + "learning_rate": 2.505530973451328e-06, + "loss": 0.7617, + "step": 2710 + }, + { + "epoch": 2.9988938053097343, + "grad_norm": 0.107421875, + "learning_rate": 2.502765486725664e-06, + "loss": 0.7631, + "step": 2711 + }, + { + "epoch": 3.0, + "grad_norm": 0.12451171875, + "learning_rate": 2.5e-06, + "loss": 0.8323, + "step": 2712 + }, + { + "epoch": 3.0011061946902653, + "grad_norm": 0.10205078125, + "learning_rate": 2.4972345132743363e-06, + "loss": 0.7353, + "step": 2713 + }, + { + "epoch": 3.002212389380531, + "grad_norm": 0.1171875, + "learning_rate": 2.494469026548673e-06, + "loss": 0.7365, + "step": 2714 + }, + { + "epoch": 3.0033185840707963, + "grad_norm": 0.11474609375, + "learning_rate": 2.491703539823009e-06, + "loss": 0.8122, + "step": 2715 + }, + { + "epoch": 3.004424778761062, + "grad_norm": 0.11376953125, + "learning_rate": 2.488938053097345e-06, + "loss": 0.7858, + "step": 2716 + }, + { + "epoch": 3.0055309734513274, + "grad_norm": 0.1259765625, + "learning_rate": 2.4861725663716817e-06, + "loss": 0.8404, + "step": 2717 + }, + { + "epoch": 3.006637168141593, + "grad_norm": 0.10986328125, + "learning_rate": 2.483407079646018e-06, + "loss": 0.7802, + "step": 2718 + }, + { + "epoch": 3.0077433628318584, + "grad_norm": 0.11962890625, + "learning_rate": 2.4806415929203544e-06, + "loss": 0.7585, + "step": 2719 + }, + { + "epoch": 3.0088495575221237, + "grad_norm": 0.1083984375, + "learning_rate": 2.4778761061946905e-06, + "loss": 0.7818, + "step": 2720 + }, + { + "epoch": 3.0099557522123894, + "grad_norm": 0.1123046875, + "learning_rate": 2.4751106194690266e-06, + "loss": 0.7688, + "step": 2721 + }, + { + "epoch": 3.0110619469026547, + "grad_norm": 0.115234375, + "learning_rate": 2.472345132743363e-06, + "loss": 0.7416, + "step": 2722 + }, + { + "epoch": 3.0121681415929205, + "grad_norm": 0.10888671875, + "learning_rate": 2.4695796460176993e-06, + "loss": 0.7626, + "step": 2723 + }, + { + "epoch": 3.0132743362831858, + "grad_norm": 0.11083984375, + "learning_rate": 2.4668141592920354e-06, + "loss": 0.7587, + "step": 2724 + }, + { + "epoch": 3.0143805309734515, + "grad_norm": 0.11279296875, + "learning_rate": 2.464048672566372e-06, + "loss": 0.7763, + "step": 2725 + }, + { + "epoch": 3.015486725663717, + "grad_norm": 0.103515625, + "learning_rate": 2.461283185840708e-06, + "loss": 0.7405, + "step": 2726 + }, + { + "epoch": 3.0165929203539825, + "grad_norm": 0.11962890625, + "learning_rate": 2.4585176991150446e-06, + "loss": 0.7701, + "step": 2727 + }, + { + "epoch": 3.017699115044248, + "grad_norm": 0.11083984375, + "learning_rate": 2.4557522123893808e-06, + "loss": 0.7994, + "step": 2728 + }, + { + "epoch": 3.018805309734513, + "grad_norm": 0.10888671875, + "learning_rate": 2.4529867256637173e-06, + "loss": 0.7854, + "step": 2729 + }, + { + "epoch": 3.019911504424779, + "grad_norm": 0.11083984375, + "learning_rate": 2.4502212389380534e-06, + "loss": 0.7875, + "step": 2730 + }, + { + "epoch": 3.021017699115044, + "grad_norm": 0.11962890625, + "learning_rate": 2.4474557522123896e-06, + "loss": 0.7883, + "step": 2731 + }, + { + "epoch": 3.02212389380531, + "grad_norm": 0.11572265625, + "learning_rate": 2.4446902654867257e-06, + "loss": 0.7275, + "step": 2732 + }, + { + "epoch": 3.023230088495575, + "grad_norm": 0.0986328125, + "learning_rate": 2.4419247787610622e-06, + "loss": 0.7312, + "step": 2733 + }, + { + "epoch": 3.024336283185841, + "grad_norm": 0.134765625, + "learning_rate": 2.4391592920353984e-06, + "loss": 0.7491, + "step": 2734 + }, + { + "epoch": 3.025442477876106, + "grad_norm": 0.10498046875, + "learning_rate": 2.4363938053097345e-06, + "loss": 0.7622, + "step": 2735 + }, + { + "epoch": 3.0265486725663715, + "grad_norm": 0.1259765625, + "learning_rate": 2.433628318584071e-06, + "loss": 0.7987, + "step": 2736 + }, + { + "epoch": 3.0276548672566372, + "grad_norm": 0.0986328125, + "learning_rate": 2.4308628318584076e-06, + "loss": 0.7522, + "step": 2737 + }, + { + "epoch": 3.0287610619469025, + "grad_norm": 0.1044921875, + "learning_rate": 2.4280973451327437e-06, + "loss": 0.7829, + "step": 2738 + }, + { + "epoch": 3.0298672566371683, + "grad_norm": 0.11181640625, + "learning_rate": 2.42533185840708e-06, + "loss": 0.7701, + "step": 2739 + }, + { + "epoch": 3.0309734513274336, + "grad_norm": 0.109375, + "learning_rate": 2.422566371681416e-06, + "loss": 0.7666, + "step": 2740 + }, + { + "epoch": 3.0320796460176993, + "grad_norm": 0.1064453125, + "learning_rate": 2.4198008849557525e-06, + "loss": 0.7525, + "step": 2741 + }, + { + "epoch": 3.0331858407079646, + "grad_norm": 0.138671875, + "learning_rate": 2.4170353982300886e-06, + "loss": 0.7576, + "step": 2742 + }, + { + "epoch": 3.03429203539823, + "grad_norm": 0.10546875, + "learning_rate": 2.4142699115044248e-06, + "loss": 0.7514, + "step": 2743 + }, + { + "epoch": 3.0353982300884956, + "grad_norm": 0.11181640625, + "learning_rate": 2.4115044247787613e-06, + "loss": 0.7925, + "step": 2744 + }, + { + "epoch": 3.036504424778761, + "grad_norm": 0.103515625, + "learning_rate": 2.4087389380530974e-06, + "loss": 0.7516, + "step": 2745 + }, + { + "epoch": 3.0376106194690267, + "grad_norm": 0.1279296875, + "learning_rate": 2.405973451327434e-06, + "loss": 0.7799, + "step": 2746 + }, + { + "epoch": 3.038716814159292, + "grad_norm": 0.09814453125, + "learning_rate": 2.40320796460177e-06, + "loss": 0.721, + "step": 2747 + }, + { + "epoch": 3.0398230088495577, + "grad_norm": 0.1025390625, + "learning_rate": 2.4004424778761067e-06, + "loss": 0.7176, + "step": 2748 + }, + { + "epoch": 3.040929203539823, + "grad_norm": 0.1220703125, + "learning_rate": 2.397676991150443e-06, + "loss": 0.8004, + "step": 2749 + }, + { + "epoch": 3.0420353982300883, + "grad_norm": 0.11572265625, + "learning_rate": 2.394911504424779e-06, + "loss": 0.7476, + "step": 2750 + }, + { + "epoch": 3.043141592920354, + "grad_norm": 0.12890625, + "learning_rate": 2.392146017699115e-06, + "loss": 0.8049, + "step": 2751 + }, + { + "epoch": 3.0442477876106193, + "grad_norm": 0.111328125, + "learning_rate": 2.3893805309734516e-06, + "loss": 0.7859, + "step": 2752 + }, + { + "epoch": 3.045353982300885, + "grad_norm": 0.1162109375, + "learning_rate": 2.3866150442477877e-06, + "loss": 0.7638, + "step": 2753 + }, + { + "epoch": 3.0464601769911503, + "grad_norm": 0.109375, + "learning_rate": 2.383849557522124e-06, + "loss": 0.759, + "step": 2754 + }, + { + "epoch": 3.047566371681416, + "grad_norm": 0.1201171875, + "learning_rate": 2.3810840707964604e-06, + "loss": 0.8131, + "step": 2755 + }, + { + "epoch": 3.0486725663716814, + "grad_norm": 0.10546875, + "learning_rate": 2.378318584070797e-06, + "loss": 0.7922, + "step": 2756 + }, + { + "epoch": 3.049778761061947, + "grad_norm": 0.1162109375, + "learning_rate": 2.375553097345133e-06, + "loss": 0.7695, + "step": 2757 + }, + { + "epoch": 3.0508849557522124, + "grad_norm": 0.111328125, + "learning_rate": 2.372787610619469e-06, + "loss": 0.7616, + "step": 2758 + }, + { + "epoch": 3.0519911504424777, + "grad_norm": 0.12890625, + "learning_rate": 2.3700221238938053e-06, + "loss": 0.7728, + "step": 2759 + }, + { + "epoch": 3.0530973451327434, + "grad_norm": 0.10693359375, + "learning_rate": 2.367256637168142e-06, + "loss": 0.7438, + "step": 2760 + }, + { + "epoch": 3.0542035398230087, + "grad_norm": 0.11474609375, + "learning_rate": 2.364491150442478e-06, + "loss": 0.8006, + "step": 2761 + }, + { + "epoch": 3.0553097345132745, + "grad_norm": 0.12109375, + "learning_rate": 2.361725663716814e-06, + "loss": 0.8048, + "step": 2762 + }, + { + "epoch": 3.0564159292035398, + "grad_norm": 0.11865234375, + "learning_rate": 2.3589601769911507e-06, + "loss": 0.7804, + "step": 2763 + }, + { + "epoch": 3.0575221238938055, + "grad_norm": 0.1025390625, + "learning_rate": 2.356194690265487e-06, + "loss": 0.768, + "step": 2764 + }, + { + "epoch": 3.058628318584071, + "grad_norm": 0.11083984375, + "learning_rate": 2.3534292035398234e-06, + "loss": 0.7316, + "step": 2765 + }, + { + "epoch": 3.059734513274336, + "grad_norm": 0.1044921875, + "learning_rate": 2.3506637168141595e-06, + "loss": 0.7479, + "step": 2766 + }, + { + "epoch": 3.060840707964602, + "grad_norm": 0.1220703125, + "learning_rate": 2.347898230088496e-06, + "loss": 0.8223, + "step": 2767 + }, + { + "epoch": 3.061946902654867, + "grad_norm": 0.10986328125, + "learning_rate": 2.345132743362832e-06, + "loss": 0.8086, + "step": 2768 + }, + { + "epoch": 3.063053097345133, + "grad_norm": 0.11328125, + "learning_rate": 2.3423672566371683e-06, + "loss": 0.7698, + "step": 2769 + }, + { + "epoch": 3.064159292035398, + "grad_norm": 0.111328125, + "learning_rate": 2.3396017699115044e-06, + "loss": 0.7939, + "step": 2770 + }, + { + "epoch": 3.065265486725664, + "grad_norm": 0.1142578125, + "learning_rate": 2.336836283185841e-06, + "loss": 0.8147, + "step": 2771 + }, + { + "epoch": 3.066371681415929, + "grad_norm": 0.11083984375, + "learning_rate": 2.334070796460177e-06, + "loss": 0.7843, + "step": 2772 + }, + { + "epoch": 3.0674778761061945, + "grad_norm": 0.1220703125, + "learning_rate": 2.331305309734513e-06, + "loss": 0.7709, + "step": 2773 + }, + { + "epoch": 3.0685840707964602, + "grad_norm": 0.109375, + "learning_rate": 2.3285398230088498e-06, + "loss": 0.7551, + "step": 2774 + }, + { + "epoch": 3.0696902654867255, + "grad_norm": 0.1171875, + "learning_rate": 2.3257743362831863e-06, + "loss": 0.7861, + "step": 2775 + }, + { + "epoch": 3.0707964601769913, + "grad_norm": 0.11669921875, + "learning_rate": 2.3230088495575224e-06, + "loss": 0.781, + "step": 2776 + }, + { + "epoch": 3.0719026548672566, + "grad_norm": 0.1103515625, + "learning_rate": 2.3202433628318586e-06, + "loss": 0.7659, + "step": 2777 + }, + { + "epoch": 3.0730088495575223, + "grad_norm": 0.11474609375, + "learning_rate": 2.317477876106195e-06, + "loss": 0.811, + "step": 2778 + }, + { + "epoch": 3.0741150442477876, + "grad_norm": 0.10546875, + "learning_rate": 2.3147123893805312e-06, + "loss": 0.7508, + "step": 2779 + }, + { + "epoch": 3.0752212389380533, + "grad_norm": 0.11181640625, + "learning_rate": 2.3119469026548674e-06, + "loss": 0.7642, + "step": 2780 + }, + { + "epoch": 3.0763274336283186, + "grad_norm": 0.11376953125, + "learning_rate": 2.3091814159292035e-06, + "loss": 0.7858, + "step": 2781 + }, + { + "epoch": 3.077433628318584, + "grad_norm": 0.1142578125, + "learning_rate": 2.30641592920354e-06, + "loss": 0.8014, + "step": 2782 + }, + { + "epoch": 3.0785398230088497, + "grad_norm": 0.123046875, + "learning_rate": 2.303650442477876e-06, + "loss": 0.8055, + "step": 2783 + }, + { + "epoch": 3.079646017699115, + "grad_norm": 0.1279296875, + "learning_rate": 2.3008849557522127e-06, + "loss": 0.8464, + "step": 2784 + }, + { + "epoch": 3.0807522123893807, + "grad_norm": 0.11328125, + "learning_rate": 2.298119469026549e-06, + "loss": 0.7589, + "step": 2785 + }, + { + "epoch": 3.081858407079646, + "grad_norm": 0.11328125, + "learning_rate": 2.2953539823008854e-06, + "loss": 0.8282, + "step": 2786 + }, + { + "epoch": 3.0829646017699117, + "grad_norm": 0.1083984375, + "learning_rate": 2.2925884955752215e-06, + "loss": 0.7437, + "step": 2787 + }, + { + "epoch": 3.084070796460177, + "grad_norm": 0.1171875, + "learning_rate": 2.2898230088495576e-06, + "loss": 0.7701, + "step": 2788 + }, + { + "epoch": 3.0851769911504423, + "grad_norm": 0.10595703125, + "learning_rate": 2.2870575221238938e-06, + "loss": 0.7392, + "step": 2789 + }, + { + "epoch": 3.086283185840708, + "grad_norm": 0.11376953125, + "learning_rate": 2.2842920353982303e-06, + "loss": 0.8021, + "step": 2790 + }, + { + "epoch": 3.0873893805309733, + "grad_norm": 0.11181640625, + "learning_rate": 2.2815265486725664e-06, + "loss": 0.7547, + "step": 2791 + }, + { + "epoch": 3.088495575221239, + "grad_norm": 0.1064453125, + "learning_rate": 2.2787610619469026e-06, + "loss": 0.7846, + "step": 2792 + }, + { + "epoch": 3.0896017699115044, + "grad_norm": 0.10888671875, + "learning_rate": 2.275995575221239e-06, + "loss": 0.7786, + "step": 2793 + }, + { + "epoch": 3.09070796460177, + "grad_norm": 0.10888671875, + "learning_rate": 2.2732300884955757e-06, + "loss": 0.7572, + "step": 2794 + }, + { + "epoch": 3.0918141592920354, + "grad_norm": 0.1630859375, + "learning_rate": 2.270464601769912e-06, + "loss": 0.7836, + "step": 2795 + }, + { + "epoch": 3.0929203539823007, + "grad_norm": 0.1474609375, + "learning_rate": 2.267699115044248e-06, + "loss": 0.8459, + "step": 2796 + }, + { + "epoch": 3.0940265486725664, + "grad_norm": 0.126953125, + "learning_rate": 2.2649336283185845e-06, + "loss": 0.7843, + "step": 2797 + }, + { + "epoch": 3.0951327433628317, + "grad_norm": 0.10498046875, + "learning_rate": 2.2621681415929206e-06, + "loss": 0.7609, + "step": 2798 + }, + { + "epoch": 3.0962389380530975, + "grad_norm": 0.10595703125, + "learning_rate": 2.2594026548672567e-06, + "loss": 0.7579, + "step": 2799 + }, + { + "epoch": 3.0973451327433628, + "grad_norm": 0.1083984375, + "learning_rate": 2.256637168141593e-06, + "loss": 0.7803, + "step": 2800 + }, + { + "epoch": 3.0984513274336285, + "grad_norm": 0.1259765625, + "learning_rate": 2.2538716814159294e-06, + "loss": 0.7708, + "step": 2801 + }, + { + "epoch": 3.099557522123894, + "grad_norm": 0.10888671875, + "learning_rate": 2.2511061946902655e-06, + "loss": 0.7491, + "step": 2802 + }, + { + "epoch": 3.100663716814159, + "grad_norm": 0.10546875, + "learning_rate": 2.248340707964602e-06, + "loss": 0.739, + "step": 2803 + }, + { + "epoch": 3.101769911504425, + "grad_norm": 0.1201171875, + "learning_rate": 2.245575221238938e-06, + "loss": 0.7812, + "step": 2804 + }, + { + "epoch": 3.10287610619469, + "grad_norm": 0.11669921875, + "learning_rate": 2.2428097345132747e-06, + "loss": 0.7738, + "step": 2805 + }, + { + "epoch": 3.103982300884956, + "grad_norm": 0.1142578125, + "learning_rate": 2.240044247787611e-06, + "loss": 0.7838, + "step": 2806 + }, + { + "epoch": 3.105088495575221, + "grad_norm": 0.12353515625, + "learning_rate": 2.237278761061947e-06, + "loss": 0.7915, + "step": 2807 + }, + { + "epoch": 3.106194690265487, + "grad_norm": 0.1181640625, + "learning_rate": 2.234513274336283e-06, + "loss": 0.7902, + "step": 2808 + }, + { + "epoch": 3.107300884955752, + "grad_norm": 0.11572265625, + "learning_rate": 2.2317477876106197e-06, + "loss": 0.734, + "step": 2809 + }, + { + "epoch": 3.1084070796460175, + "grad_norm": 0.1025390625, + "learning_rate": 2.228982300884956e-06, + "loss": 0.7441, + "step": 2810 + }, + { + "epoch": 3.109513274336283, + "grad_norm": 0.1064453125, + "learning_rate": 2.2262168141592924e-06, + "loss": 0.7977, + "step": 2811 + }, + { + "epoch": 3.1106194690265485, + "grad_norm": 0.109375, + "learning_rate": 2.2234513274336285e-06, + "loss": 0.7591, + "step": 2812 + }, + { + "epoch": 3.1117256637168142, + "grad_norm": 0.1103515625, + "learning_rate": 2.220685840707965e-06, + "loss": 0.7634, + "step": 2813 + }, + { + "epoch": 3.1128318584070795, + "grad_norm": 0.10791015625, + "learning_rate": 2.217920353982301e-06, + "loss": 0.7796, + "step": 2814 + }, + { + "epoch": 3.1139380530973453, + "grad_norm": 0.1220703125, + "learning_rate": 2.2151548672566373e-06, + "loss": 0.8021, + "step": 2815 + }, + { + "epoch": 3.1150442477876106, + "grad_norm": 0.123046875, + "learning_rate": 2.212389380530974e-06, + "loss": 0.781, + "step": 2816 + }, + { + "epoch": 3.1161504424778763, + "grad_norm": 0.109375, + "learning_rate": 2.20962389380531e-06, + "loss": 0.7094, + "step": 2817 + }, + { + "epoch": 3.1172566371681416, + "grad_norm": 0.10009765625, + "learning_rate": 2.206858407079646e-06, + "loss": 0.7593, + "step": 2818 + }, + { + "epoch": 3.118362831858407, + "grad_norm": 0.1259765625, + "learning_rate": 2.204092920353982e-06, + "loss": 0.7821, + "step": 2819 + }, + { + "epoch": 3.1194690265486726, + "grad_norm": 0.12060546875, + "learning_rate": 2.2013274336283188e-06, + "loss": 0.7766, + "step": 2820 + }, + { + "epoch": 3.120575221238938, + "grad_norm": 0.1142578125, + "learning_rate": 2.198561946902655e-06, + "loss": 0.776, + "step": 2821 + }, + { + "epoch": 3.1216814159292037, + "grad_norm": 0.13671875, + "learning_rate": 2.1957964601769914e-06, + "loss": 0.784, + "step": 2822 + }, + { + "epoch": 3.122787610619469, + "grad_norm": 0.10302734375, + "learning_rate": 2.1930309734513276e-06, + "loss": 0.8007, + "step": 2823 + }, + { + "epoch": 3.1238938053097347, + "grad_norm": 0.1201171875, + "learning_rate": 2.190265486725664e-06, + "loss": 0.8002, + "step": 2824 + }, + { + "epoch": 3.125, + "grad_norm": 0.1240234375, + "learning_rate": 2.1875000000000002e-06, + "loss": 0.7038, + "step": 2825 + }, + { + "epoch": 3.1261061946902653, + "grad_norm": 0.109375, + "learning_rate": 2.1847345132743364e-06, + "loss": 0.7475, + "step": 2826 + }, + { + "epoch": 3.127212389380531, + "grad_norm": 0.11767578125, + "learning_rate": 2.181969026548673e-06, + "loss": 0.8141, + "step": 2827 + }, + { + "epoch": 3.1283185840707963, + "grad_norm": 0.10498046875, + "learning_rate": 2.179203539823009e-06, + "loss": 0.7839, + "step": 2828 + }, + { + "epoch": 3.129424778761062, + "grad_norm": 0.12158203125, + "learning_rate": 2.176438053097345e-06, + "loss": 0.7681, + "step": 2829 + }, + { + "epoch": 3.1305309734513274, + "grad_norm": 0.16015625, + "learning_rate": 2.1736725663716817e-06, + "loss": 0.7944, + "step": 2830 + }, + { + "epoch": 3.131637168141593, + "grad_norm": 0.111328125, + "learning_rate": 2.170907079646018e-06, + "loss": 0.7803, + "step": 2831 + }, + { + "epoch": 3.1327433628318584, + "grad_norm": 0.1259765625, + "learning_rate": 2.1681415929203544e-06, + "loss": 0.7756, + "step": 2832 + }, + { + "epoch": 3.133849557522124, + "grad_norm": 0.11181640625, + "learning_rate": 2.1653761061946905e-06, + "loss": 0.7811, + "step": 2833 + }, + { + "epoch": 3.1349557522123894, + "grad_norm": 0.1181640625, + "learning_rate": 2.1626106194690266e-06, + "loss": 0.7733, + "step": 2834 + }, + { + "epoch": 3.1360619469026547, + "grad_norm": 0.111328125, + "learning_rate": 2.159845132743363e-06, + "loss": 0.7738, + "step": 2835 + }, + { + "epoch": 3.1371681415929205, + "grad_norm": 0.11279296875, + "learning_rate": 2.1570796460176993e-06, + "loss": 0.7946, + "step": 2836 + }, + { + "epoch": 3.1382743362831858, + "grad_norm": 0.1435546875, + "learning_rate": 2.1543141592920354e-06, + "loss": 0.7766, + "step": 2837 + }, + { + "epoch": 3.1393805309734515, + "grad_norm": 0.1103515625, + "learning_rate": 2.1515486725663716e-06, + "loss": 0.7893, + "step": 2838 + }, + { + "epoch": 3.140486725663717, + "grad_norm": 0.1279296875, + "learning_rate": 2.148783185840708e-06, + "loss": 0.7826, + "step": 2839 + }, + { + "epoch": 3.1415929203539825, + "grad_norm": 0.1162109375, + "learning_rate": 2.1460176991150442e-06, + "loss": 0.7852, + "step": 2840 + }, + { + "epoch": 3.142699115044248, + "grad_norm": 0.1181640625, + "learning_rate": 2.143252212389381e-06, + "loss": 0.7772, + "step": 2841 + }, + { + "epoch": 3.143805309734513, + "grad_norm": 0.1123046875, + "learning_rate": 2.140486725663717e-06, + "loss": 0.8138, + "step": 2842 + }, + { + "epoch": 3.144911504424779, + "grad_norm": 0.11474609375, + "learning_rate": 2.1377212389380535e-06, + "loss": 0.7907, + "step": 2843 + }, + { + "epoch": 3.146017699115044, + "grad_norm": 0.1455078125, + "learning_rate": 2.1349557522123896e-06, + "loss": 0.8189, + "step": 2844 + }, + { + "epoch": 3.14712389380531, + "grad_norm": 0.1201171875, + "learning_rate": 2.1321902654867257e-06, + "loss": 0.7855, + "step": 2845 + }, + { + "epoch": 3.148230088495575, + "grad_norm": 0.12890625, + "learning_rate": 2.1294247787610623e-06, + "loss": 0.7913, + "step": 2846 + }, + { + "epoch": 3.149336283185841, + "grad_norm": 0.1083984375, + "learning_rate": 2.1266592920353984e-06, + "loss": 0.7784, + "step": 2847 + }, + { + "epoch": 3.150442477876106, + "grad_norm": 0.1025390625, + "learning_rate": 2.1238938053097345e-06, + "loss": 0.7606, + "step": 2848 + }, + { + "epoch": 3.1515486725663715, + "grad_norm": 0.125, + "learning_rate": 2.121128318584071e-06, + "loss": 0.8237, + "step": 2849 + }, + { + "epoch": 3.1526548672566372, + "grad_norm": 0.11376953125, + "learning_rate": 2.118362831858407e-06, + "loss": 0.7448, + "step": 2850 + }, + { + "epoch": 3.1537610619469025, + "grad_norm": 0.1201171875, + "learning_rate": 2.1155973451327437e-06, + "loss": 0.8151, + "step": 2851 + }, + { + "epoch": 3.1548672566371683, + "grad_norm": 0.1259765625, + "learning_rate": 2.11283185840708e-06, + "loss": 0.7845, + "step": 2852 + }, + { + "epoch": 3.1559734513274336, + "grad_norm": 0.1279296875, + "learning_rate": 2.110066371681416e-06, + "loss": 0.7629, + "step": 2853 + }, + { + "epoch": 3.1570796460176993, + "grad_norm": 0.1796875, + "learning_rate": 2.1073008849557525e-06, + "loss": 0.8094, + "step": 2854 + }, + { + "epoch": 3.1581858407079646, + "grad_norm": 0.1064453125, + "learning_rate": 2.1045353982300887e-06, + "loss": 0.8006, + "step": 2855 + }, + { + "epoch": 3.15929203539823, + "grad_norm": 0.1220703125, + "learning_rate": 2.101769911504425e-06, + "loss": 0.773, + "step": 2856 + }, + { + "epoch": 3.1603982300884956, + "grad_norm": 0.1181640625, + "learning_rate": 2.099004424778761e-06, + "loss": 0.737, + "step": 2857 + }, + { + "epoch": 3.161504424778761, + "grad_norm": 0.11279296875, + "learning_rate": 2.0962389380530975e-06, + "loss": 0.7619, + "step": 2858 + }, + { + "epoch": 3.1626106194690267, + "grad_norm": 0.166015625, + "learning_rate": 2.0934734513274336e-06, + "loss": 0.7561, + "step": 2859 + }, + { + "epoch": 3.163716814159292, + "grad_norm": 0.10546875, + "learning_rate": 2.09070796460177e-06, + "loss": 0.737, + "step": 2860 + }, + { + "epoch": 3.1648230088495577, + "grad_norm": 0.1083984375, + "learning_rate": 2.0879424778761067e-06, + "loss": 0.7534, + "step": 2861 + }, + { + "epoch": 3.165929203539823, + "grad_norm": 0.1669921875, + "learning_rate": 2.085176991150443e-06, + "loss": 0.8883, + "step": 2862 + }, + { + "epoch": 3.1670353982300883, + "grad_norm": 0.1083984375, + "learning_rate": 2.082411504424779e-06, + "loss": 0.7557, + "step": 2863 + }, + { + "epoch": 3.168141592920354, + "grad_norm": 0.1259765625, + "learning_rate": 2.079646017699115e-06, + "loss": 0.7934, + "step": 2864 + }, + { + "epoch": 3.1692477876106193, + "grad_norm": 0.11083984375, + "learning_rate": 2.0768805309734516e-06, + "loss": 0.767, + "step": 2865 + }, + { + "epoch": 3.170353982300885, + "grad_norm": 0.11083984375, + "learning_rate": 2.0741150442477878e-06, + "loss": 0.7835, + "step": 2866 + }, + { + "epoch": 3.1714601769911503, + "grad_norm": 0.11767578125, + "learning_rate": 2.071349557522124e-06, + "loss": 0.8144, + "step": 2867 + }, + { + "epoch": 3.172566371681416, + "grad_norm": 0.11279296875, + "learning_rate": 2.0685840707964604e-06, + "loss": 0.7783, + "step": 2868 + }, + { + "epoch": 3.1736725663716814, + "grad_norm": 0.1162109375, + "learning_rate": 2.0658185840707966e-06, + "loss": 0.8025, + "step": 2869 + }, + { + "epoch": 3.1747787610619467, + "grad_norm": 0.11279296875, + "learning_rate": 2.063053097345133e-06, + "loss": 0.7893, + "step": 2870 + }, + { + "epoch": 3.1758849557522124, + "grad_norm": 0.11328125, + "learning_rate": 2.0602876106194692e-06, + "loss": 0.7664, + "step": 2871 + }, + { + "epoch": 3.1769911504424777, + "grad_norm": 0.1162109375, + "learning_rate": 2.0575221238938054e-06, + "loss": 0.717, + "step": 2872 + }, + { + "epoch": 3.1780973451327434, + "grad_norm": 0.10107421875, + "learning_rate": 2.054756637168142e-06, + "loss": 0.7099, + "step": 2873 + }, + { + "epoch": 3.1792035398230087, + "grad_norm": 0.1103515625, + "learning_rate": 2.051991150442478e-06, + "loss": 0.7594, + "step": 2874 + }, + { + "epoch": 3.1803097345132745, + "grad_norm": 0.11865234375, + "learning_rate": 2.049225663716814e-06, + "loss": 0.7685, + "step": 2875 + }, + { + "epoch": 3.1814159292035398, + "grad_norm": 0.10498046875, + "learning_rate": 2.0464601769911507e-06, + "loss": 0.7627, + "step": 2876 + }, + { + "epoch": 3.1825221238938055, + "grad_norm": 0.125, + "learning_rate": 2.043694690265487e-06, + "loss": 0.8107, + "step": 2877 + }, + { + "epoch": 3.183628318584071, + "grad_norm": 0.10400390625, + "learning_rate": 2.040929203539823e-06, + "loss": 0.7667, + "step": 2878 + }, + { + "epoch": 3.184734513274336, + "grad_norm": 0.12451171875, + "learning_rate": 2.0381637168141595e-06, + "loss": 0.809, + "step": 2879 + }, + { + "epoch": 3.185840707964602, + "grad_norm": 0.1044921875, + "learning_rate": 2.035398230088496e-06, + "loss": 0.7528, + "step": 2880 + }, + { + "epoch": 3.186946902654867, + "grad_norm": 0.10400390625, + "learning_rate": 2.032632743362832e-06, + "loss": 0.7706, + "step": 2881 + }, + { + "epoch": 3.188053097345133, + "grad_norm": 0.10888671875, + "learning_rate": 2.0298672566371683e-06, + "loss": 0.764, + "step": 2882 + }, + { + "epoch": 3.189159292035398, + "grad_norm": 0.11767578125, + "learning_rate": 2.0271017699115044e-06, + "loss": 0.789, + "step": 2883 + }, + { + "epoch": 3.190265486725664, + "grad_norm": 0.1005859375, + "learning_rate": 2.024336283185841e-06, + "loss": 0.7503, + "step": 2884 + }, + { + "epoch": 3.191371681415929, + "grad_norm": 0.1064453125, + "learning_rate": 2.021570796460177e-06, + "loss": 0.7369, + "step": 2885 + }, + { + "epoch": 3.1924778761061945, + "grad_norm": 0.1220703125, + "learning_rate": 2.0188053097345132e-06, + "loss": 0.7999, + "step": 2886 + }, + { + "epoch": 3.1935840707964602, + "grad_norm": 0.12060546875, + "learning_rate": 2.0160398230088498e-06, + "loss": 0.7879, + "step": 2887 + }, + { + "epoch": 3.1946902654867255, + "grad_norm": 0.10986328125, + "learning_rate": 2.013274336283186e-06, + "loss": 0.7614, + "step": 2888 + }, + { + "epoch": 3.1957964601769913, + "grad_norm": 0.115234375, + "learning_rate": 2.0105088495575225e-06, + "loss": 0.7465, + "step": 2889 + }, + { + "epoch": 3.1969026548672566, + "grad_norm": 0.12158203125, + "learning_rate": 2.0077433628318586e-06, + "loss": 0.7801, + "step": 2890 + }, + { + "epoch": 3.1980088495575223, + "grad_norm": 0.1259765625, + "learning_rate": 2.004977876106195e-06, + "loss": 0.803, + "step": 2891 + }, + { + "epoch": 3.1991150442477876, + "grad_norm": 0.109375, + "learning_rate": 2.0022123893805313e-06, + "loss": 0.7847, + "step": 2892 + }, + { + "epoch": 3.2002212389380533, + "grad_norm": 0.1064453125, + "learning_rate": 1.9994469026548674e-06, + "loss": 0.786, + "step": 2893 + }, + { + "epoch": 3.2013274336283186, + "grad_norm": 0.126953125, + "learning_rate": 1.9966814159292035e-06, + "loss": 0.8162, + "step": 2894 + }, + { + "epoch": 3.202433628318584, + "grad_norm": 0.1142578125, + "learning_rate": 1.99391592920354e-06, + "loss": 0.7222, + "step": 2895 + }, + { + "epoch": 3.2035398230088497, + "grad_norm": 0.10791015625, + "learning_rate": 1.991150442477876e-06, + "loss": 0.7881, + "step": 2896 + }, + { + "epoch": 3.204646017699115, + "grad_norm": 0.10986328125, + "learning_rate": 1.9883849557522123e-06, + "loss": 0.7555, + "step": 2897 + }, + { + "epoch": 3.2057522123893807, + "grad_norm": 0.11328125, + "learning_rate": 1.985619469026549e-06, + "loss": 0.761, + "step": 2898 + }, + { + "epoch": 3.206858407079646, + "grad_norm": 0.11669921875, + "learning_rate": 1.9828539823008854e-06, + "loss": 0.7351, + "step": 2899 + }, + { + "epoch": 3.2079646017699117, + "grad_norm": 0.1025390625, + "learning_rate": 1.9800884955752215e-06, + "loss": 0.7738, + "step": 2900 + }, + { + "epoch": 3.209070796460177, + "grad_norm": 0.11279296875, + "learning_rate": 1.9773230088495577e-06, + "loss": 0.7453, + "step": 2901 + }, + { + "epoch": 3.2101769911504423, + "grad_norm": 0.11181640625, + "learning_rate": 1.974557522123894e-06, + "loss": 0.757, + "step": 2902 + }, + { + "epoch": 3.211283185840708, + "grad_norm": 0.10986328125, + "learning_rate": 1.9717920353982303e-06, + "loss": 0.7606, + "step": 2903 + }, + { + "epoch": 3.2123893805309733, + "grad_norm": 0.11865234375, + "learning_rate": 1.9690265486725665e-06, + "loss": 0.7194, + "step": 2904 + }, + { + "epoch": 3.213495575221239, + "grad_norm": 0.10986328125, + "learning_rate": 1.9662610619469026e-06, + "loss": 0.7648, + "step": 2905 + }, + { + "epoch": 3.2146017699115044, + "grad_norm": 0.10498046875, + "learning_rate": 1.963495575221239e-06, + "loss": 0.7249, + "step": 2906 + }, + { + "epoch": 3.21570796460177, + "grad_norm": 0.11474609375, + "learning_rate": 1.9607300884955753e-06, + "loss": 0.7637, + "step": 2907 + }, + { + "epoch": 3.2168141592920354, + "grad_norm": 0.12353515625, + "learning_rate": 1.957964601769912e-06, + "loss": 0.7764, + "step": 2908 + }, + { + "epoch": 3.2179203539823007, + "grad_norm": 0.10888671875, + "learning_rate": 1.955199115044248e-06, + "loss": 0.7902, + "step": 2909 + }, + { + "epoch": 3.2190265486725664, + "grad_norm": 0.11083984375, + "learning_rate": 1.9524336283185845e-06, + "loss": 0.7839, + "step": 2910 + }, + { + "epoch": 3.2201327433628317, + "grad_norm": 0.10986328125, + "learning_rate": 1.9496681415929206e-06, + "loss": 0.7768, + "step": 2911 + }, + { + "epoch": 3.2212389380530975, + "grad_norm": 0.10302734375, + "learning_rate": 1.9469026548672567e-06, + "loss": 0.7338, + "step": 2912 + }, + { + "epoch": 3.2223451327433628, + "grad_norm": 0.10791015625, + "learning_rate": 1.944137168141593e-06, + "loss": 0.7739, + "step": 2913 + }, + { + "epoch": 3.2234513274336285, + "grad_norm": 0.11376953125, + "learning_rate": 1.9413716814159294e-06, + "loss": 0.7928, + "step": 2914 + }, + { + "epoch": 3.224557522123894, + "grad_norm": 0.119140625, + "learning_rate": 1.9386061946902655e-06, + "loss": 0.7403, + "step": 2915 + }, + { + "epoch": 3.225663716814159, + "grad_norm": 0.11181640625, + "learning_rate": 1.9358407079646017e-06, + "loss": 0.78, + "step": 2916 + }, + { + "epoch": 3.226769911504425, + "grad_norm": 0.109375, + "learning_rate": 1.9330752212389382e-06, + "loss": 0.7632, + "step": 2917 + }, + { + "epoch": 3.22787610619469, + "grad_norm": 0.10595703125, + "learning_rate": 1.9303097345132748e-06, + "loss": 0.7885, + "step": 2918 + }, + { + "epoch": 3.228982300884956, + "grad_norm": 0.11376953125, + "learning_rate": 1.927544247787611e-06, + "loss": 0.7794, + "step": 2919 + }, + { + "epoch": 3.230088495575221, + "grad_norm": 0.11181640625, + "learning_rate": 1.924778761061947e-06, + "loss": 0.7903, + "step": 2920 + }, + { + "epoch": 3.231194690265487, + "grad_norm": 0.107421875, + "learning_rate": 1.922013274336283e-06, + "loss": 0.7684, + "step": 2921 + }, + { + "epoch": 3.232300884955752, + "grad_norm": 0.1064453125, + "learning_rate": 1.9192477876106197e-06, + "loss": 0.7718, + "step": 2922 + }, + { + "epoch": 3.2334070796460175, + "grad_norm": 0.11181640625, + "learning_rate": 1.916482300884956e-06, + "loss": 0.7546, + "step": 2923 + }, + { + "epoch": 3.234513274336283, + "grad_norm": 0.11328125, + "learning_rate": 1.913716814159292e-06, + "loss": 0.7633, + "step": 2924 + }, + { + "epoch": 3.2356194690265485, + "grad_norm": 0.10400390625, + "learning_rate": 1.9109513274336285e-06, + "loss": 0.7436, + "step": 2925 + }, + { + "epoch": 3.2367256637168142, + "grad_norm": 0.11962890625, + "learning_rate": 1.9081858407079646e-06, + "loss": 0.7871, + "step": 2926 + }, + { + "epoch": 3.2378318584070795, + "grad_norm": 0.130859375, + "learning_rate": 1.905420353982301e-06, + "loss": 0.8066, + "step": 2927 + }, + { + "epoch": 3.2389380530973453, + "grad_norm": 0.11865234375, + "learning_rate": 1.9026548672566373e-06, + "loss": 0.7831, + "step": 2928 + }, + { + "epoch": 3.2400442477876106, + "grad_norm": 0.099609375, + "learning_rate": 1.8998893805309736e-06, + "loss": 0.7719, + "step": 2929 + }, + { + "epoch": 3.241150442477876, + "grad_norm": 0.1123046875, + "learning_rate": 1.89712389380531e-06, + "loss": 0.7883, + "step": 2930 + }, + { + "epoch": 3.2422566371681416, + "grad_norm": 0.11279296875, + "learning_rate": 1.8943584070796461e-06, + "loss": 0.8061, + "step": 2931 + }, + { + "epoch": 3.243362831858407, + "grad_norm": 0.138671875, + "learning_rate": 1.8915929203539824e-06, + "loss": 0.7544, + "step": 2932 + }, + { + "epoch": 3.2444690265486726, + "grad_norm": 0.109375, + "learning_rate": 1.8888274336283188e-06, + "loss": 0.7397, + "step": 2933 + }, + { + "epoch": 3.245575221238938, + "grad_norm": 0.12060546875, + "learning_rate": 1.8860619469026551e-06, + "loss": 0.7969, + "step": 2934 + }, + { + "epoch": 3.2466814159292037, + "grad_norm": 0.1162109375, + "learning_rate": 1.8832964601769912e-06, + "loss": 0.738, + "step": 2935 + }, + { + "epoch": 3.247787610619469, + "grad_norm": 0.1142578125, + "learning_rate": 1.8805309734513274e-06, + "loss": 0.7945, + "step": 2936 + }, + { + "epoch": 3.2488938053097347, + "grad_norm": 0.1005859375, + "learning_rate": 1.877765486725664e-06, + "loss": 0.7263, + "step": 2937 + }, + { + "epoch": 3.25, + "grad_norm": 0.1083984375, + "learning_rate": 1.8750000000000003e-06, + "loss": 0.7602, + "step": 2938 + }, + { + "epoch": 3.2511061946902653, + "grad_norm": 0.10546875, + "learning_rate": 1.8722345132743364e-06, + "loss": 0.7344, + "step": 2939 + }, + { + "epoch": 3.252212389380531, + "grad_norm": 0.107421875, + "learning_rate": 1.869469026548673e-06, + "loss": 0.7388, + "step": 2940 + }, + { + "epoch": 3.2533185840707963, + "grad_norm": 0.1044921875, + "learning_rate": 1.866703539823009e-06, + "loss": 0.774, + "step": 2941 + }, + { + "epoch": 3.254424778761062, + "grad_norm": 0.11962890625, + "learning_rate": 1.8639380530973452e-06, + "loss": 0.7454, + "step": 2942 + }, + { + "epoch": 3.2555309734513274, + "grad_norm": 0.10693359375, + "learning_rate": 1.8611725663716815e-06, + "loss": 0.7708, + "step": 2943 + }, + { + "epoch": 3.256637168141593, + "grad_norm": 0.10693359375, + "learning_rate": 1.8584070796460179e-06, + "loss": 0.7667, + "step": 2944 + }, + { + "epoch": 3.2577433628318584, + "grad_norm": 0.10888671875, + "learning_rate": 1.8556415929203542e-06, + "loss": 0.7468, + "step": 2945 + }, + { + "epoch": 3.258849557522124, + "grad_norm": 0.11328125, + "learning_rate": 1.8528761061946903e-06, + "loss": 0.7674, + "step": 2946 + }, + { + "epoch": 3.2599557522123894, + "grad_norm": 0.1279296875, + "learning_rate": 1.8501106194690267e-06, + "loss": 0.7551, + "step": 2947 + }, + { + "epoch": 3.2610619469026547, + "grad_norm": 0.1083984375, + "learning_rate": 1.847345132743363e-06, + "loss": 0.7462, + "step": 2948 + }, + { + "epoch": 3.2621681415929205, + "grad_norm": 0.11083984375, + "learning_rate": 1.8445796460176993e-06, + "loss": 0.7763, + "step": 2949 + }, + { + "epoch": 3.2632743362831858, + "grad_norm": 0.10302734375, + "learning_rate": 1.8418141592920355e-06, + "loss": 0.7796, + "step": 2950 + }, + { + "epoch": 3.2643805309734515, + "grad_norm": 0.12060546875, + "learning_rate": 1.8390486725663718e-06, + "loss": 0.7605, + "step": 2951 + }, + { + "epoch": 3.265486725663717, + "grad_norm": 0.1181640625, + "learning_rate": 1.8362831858407081e-06, + "loss": 0.7623, + "step": 2952 + }, + { + "epoch": 3.2665929203539825, + "grad_norm": 0.138671875, + "learning_rate": 1.8335176991150445e-06, + "loss": 0.7485, + "step": 2953 + }, + { + "epoch": 3.267699115044248, + "grad_norm": 0.10888671875, + "learning_rate": 1.8307522123893806e-06, + "loss": 0.7708, + "step": 2954 + }, + { + "epoch": 3.268805309734513, + "grad_norm": 0.130859375, + "learning_rate": 1.8279867256637172e-06, + "loss": 0.802, + "step": 2955 + }, + { + "epoch": 3.269911504424779, + "grad_norm": 0.1015625, + "learning_rate": 1.8252212389380533e-06, + "loss": 0.7382, + "step": 2956 + }, + { + "epoch": 3.271017699115044, + "grad_norm": 0.10400390625, + "learning_rate": 1.8224557522123896e-06, + "loss": 0.7506, + "step": 2957 + }, + { + "epoch": 3.27212389380531, + "grad_norm": 0.11328125, + "learning_rate": 1.8196902654867257e-06, + "loss": 0.8172, + "step": 2958 + }, + { + "epoch": 3.273230088495575, + "grad_norm": 0.109375, + "learning_rate": 1.8169247787610623e-06, + "loss": 0.7783, + "step": 2959 + }, + { + "epoch": 3.274336283185841, + "grad_norm": 0.10791015625, + "learning_rate": 1.8141592920353984e-06, + "loss": 0.7499, + "step": 2960 + }, + { + "epoch": 3.275442477876106, + "grad_norm": 0.107421875, + "learning_rate": 1.8113938053097345e-06, + "loss": 0.767, + "step": 2961 + }, + { + "epoch": 3.2765486725663715, + "grad_norm": 0.10791015625, + "learning_rate": 1.8086283185840709e-06, + "loss": 0.7402, + "step": 2962 + }, + { + "epoch": 3.2776548672566372, + "grad_norm": 0.11669921875, + "learning_rate": 1.8058628318584074e-06, + "loss": 0.7699, + "step": 2963 + }, + { + "epoch": 3.2787610619469025, + "grad_norm": 0.11474609375, + "learning_rate": 1.8030973451327436e-06, + "loss": 0.7677, + "step": 2964 + }, + { + "epoch": 3.2798672566371683, + "grad_norm": 0.10205078125, + "learning_rate": 1.8003318584070797e-06, + "loss": 0.7644, + "step": 2965 + }, + { + "epoch": 3.2809734513274336, + "grad_norm": 0.11767578125, + "learning_rate": 1.797566371681416e-06, + "loss": 0.8009, + "step": 2966 + }, + { + "epoch": 3.2820796460176993, + "grad_norm": 0.1083984375, + "learning_rate": 1.7948008849557524e-06, + "loss": 0.7547, + "step": 2967 + }, + { + "epoch": 3.2831858407079646, + "grad_norm": 0.1142578125, + "learning_rate": 1.7920353982300887e-06, + "loss": 0.7688, + "step": 2968 + }, + { + "epoch": 3.28429203539823, + "grad_norm": 0.09765625, + "learning_rate": 1.7892699115044248e-06, + "loss": 0.7168, + "step": 2969 + }, + { + "epoch": 3.2853982300884956, + "grad_norm": 0.12060546875, + "learning_rate": 1.7865044247787612e-06, + "loss": 0.7853, + "step": 2970 + }, + { + "epoch": 3.286504424778761, + "grad_norm": 0.125, + "learning_rate": 1.7837389380530975e-06, + "loss": 0.7943, + "step": 2971 + }, + { + "epoch": 3.2876106194690267, + "grad_norm": 0.10791015625, + "learning_rate": 1.7809734513274338e-06, + "loss": 0.7802, + "step": 2972 + }, + { + "epoch": 3.288716814159292, + "grad_norm": 0.11279296875, + "learning_rate": 1.77820796460177e-06, + "loss": 0.7915, + "step": 2973 + }, + { + "epoch": 3.2898230088495577, + "grad_norm": 0.1103515625, + "learning_rate": 1.7754424778761065e-06, + "loss": 0.7523, + "step": 2974 + }, + { + "epoch": 3.290929203539823, + "grad_norm": 0.12890625, + "learning_rate": 1.7726769911504426e-06, + "loss": 0.8403, + "step": 2975 + }, + { + "epoch": 3.2920353982300883, + "grad_norm": 0.11279296875, + "learning_rate": 1.769911504424779e-06, + "loss": 0.7436, + "step": 2976 + }, + { + "epoch": 3.293141592920354, + "grad_norm": 0.10302734375, + "learning_rate": 1.767146017699115e-06, + "loss": 0.7487, + "step": 2977 + }, + { + "epoch": 3.2942477876106193, + "grad_norm": 0.115234375, + "learning_rate": 1.7643805309734517e-06, + "loss": 0.7969, + "step": 2978 + }, + { + "epoch": 3.295353982300885, + "grad_norm": 0.126953125, + "learning_rate": 1.7616150442477878e-06, + "loss": 0.7556, + "step": 2979 + }, + { + "epoch": 3.2964601769911503, + "grad_norm": 0.0947265625, + "learning_rate": 1.758849557522124e-06, + "loss": 0.7382, + "step": 2980 + }, + { + "epoch": 3.297566371681416, + "grad_norm": 0.115234375, + "learning_rate": 1.7560840707964602e-06, + "loss": 0.7446, + "step": 2981 + }, + { + "epoch": 3.2986725663716814, + "grad_norm": 0.115234375, + "learning_rate": 1.7533185840707968e-06, + "loss": 0.7206, + "step": 2982 + }, + { + "epoch": 3.2997787610619467, + "grad_norm": 0.125, + "learning_rate": 1.750553097345133e-06, + "loss": 0.8439, + "step": 2983 + }, + { + "epoch": 3.3008849557522124, + "grad_norm": 0.11181640625, + "learning_rate": 1.747787610619469e-06, + "loss": 0.7918, + "step": 2984 + }, + { + "epoch": 3.3019911504424777, + "grad_norm": 0.10009765625, + "learning_rate": 1.7450221238938054e-06, + "loss": 0.7106, + "step": 2985 + }, + { + "epoch": 3.3030973451327434, + "grad_norm": 0.1064453125, + "learning_rate": 1.7422566371681417e-06, + "loss": 0.764, + "step": 2986 + }, + { + "epoch": 3.3042035398230087, + "grad_norm": 0.11474609375, + "learning_rate": 1.739491150442478e-06, + "loss": 0.7522, + "step": 2987 + }, + { + "epoch": 3.3053097345132745, + "grad_norm": 0.10107421875, + "learning_rate": 1.7367256637168142e-06, + "loss": 0.746, + "step": 2988 + }, + { + "epoch": 3.3064159292035398, + "grad_norm": 0.1044921875, + "learning_rate": 1.7339601769911507e-06, + "loss": 0.7917, + "step": 2989 + }, + { + "epoch": 3.307522123893805, + "grad_norm": 0.10498046875, + "learning_rate": 1.7311946902654869e-06, + "loss": 0.7682, + "step": 2990 + }, + { + "epoch": 3.308628318584071, + "grad_norm": 0.111328125, + "learning_rate": 1.7284292035398232e-06, + "loss": 0.7659, + "step": 2991 + }, + { + "epoch": 3.309734513274336, + "grad_norm": 0.10986328125, + "learning_rate": 1.7256637168141593e-06, + "loss": 0.7351, + "step": 2992 + }, + { + "epoch": 3.310840707964602, + "grad_norm": 0.1162109375, + "learning_rate": 1.7228982300884959e-06, + "loss": 0.756, + "step": 2993 + }, + { + "epoch": 3.311946902654867, + "grad_norm": 0.11669921875, + "learning_rate": 1.720132743362832e-06, + "loss": 0.8222, + "step": 2994 + }, + { + "epoch": 3.313053097345133, + "grad_norm": 0.1181640625, + "learning_rate": 1.7173672566371683e-06, + "loss": 0.7635, + "step": 2995 + }, + { + "epoch": 3.314159292035398, + "grad_norm": 0.12060546875, + "learning_rate": 1.7146017699115045e-06, + "loss": 0.7967, + "step": 2996 + }, + { + "epoch": 3.315265486725664, + "grad_norm": 0.1044921875, + "learning_rate": 1.711836283185841e-06, + "loss": 0.7629, + "step": 2997 + }, + { + "epoch": 3.316371681415929, + "grad_norm": 0.111328125, + "learning_rate": 1.7090707964601771e-06, + "loss": 0.7521, + "step": 2998 + }, + { + "epoch": 3.317477876106195, + "grad_norm": 0.10302734375, + "learning_rate": 1.7063053097345133e-06, + "loss": 0.7631, + "step": 2999 + }, + { + "epoch": 3.3185840707964602, + "grad_norm": 0.12109375, + "learning_rate": 1.7035398230088496e-06, + "loss": 0.8189, + "step": 3000 + }, + { + "epoch": 3.3196902654867255, + "grad_norm": 0.10595703125, + "learning_rate": 1.7007743362831862e-06, + "loss": 0.7463, + "step": 3001 + }, + { + "epoch": 3.3207964601769913, + "grad_norm": 0.10693359375, + "learning_rate": 1.6980088495575223e-06, + "loss": 0.7441, + "step": 3002 + }, + { + "epoch": 3.3219026548672566, + "grad_norm": 0.11279296875, + "learning_rate": 1.6952433628318584e-06, + "loss": 0.7705, + "step": 3003 + }, + { + "epoch": 3.3230088495575223, + "grad_norm": 0.1083984375, + "learning_rate": 1.692477876106195e-06, + "loss": 0.7763, + "step": 3004 + }, + { + "epoch": 3.3241150442477876, + "grad_norm": 0.10888671875, + "learning_rate": 1.689712389380531e-06, + "loss": 0.7616, + "step": 3005 + }, + { + "epoch": 3.3252212389380533, + "grad_norm": 0.1044921875, + "learning_rate": 1.6869469026548674e-06, + "loss": 0.7593, + "step": 3006 + }, + { + "epoch": 3.3263274336283186, + "grad_norm": 0.10595703125, + "learning_rate": 1.6841814159292035e-06, + "loss": 0.7687, + "step": 3007 + }, + { + "epoch": 3.327433628318584, + "grad_norm": 0.1171875, + "learning_rate": 1.68141592920354e-06, + "loss": 0.7595, + "step": 3008 + }, + { + "epoch": 3.3285398230088497, + "grad_norm": 0.11962890625, + "learning_rate": 1.6786504424778762e-06, + "loss": 0.8121, + "step": 3009 + }, + { + "epoch": 3.329646017699115, + "grad_norm": 0.1240234375, + "learning_rate": 1.6758849557522126e-06, + "loss": 0.8113, + "step": 3010 + }, + { + "epoch": 3.3307522123893807, + "grad_norm": 0.1494140625, + "learning_rate": 1.6731194690265487e-06, + "loss": 0.8839, + "step": 3011 + }, + { + "epoch": 3.331858407079646, + "grad_norm": 0.12158203125, + "learning_rate": 1.6703539823008852e-06, + "loss": 0.7943, + "step": 3012 + }, + { + "epoch": 3.3329646017699117, + "grad_norm": 0.1103515625, + "learning_rate": 1.6675884955752214e-06, + "loss": 0.7401, + "step": 3013 + }, + { + "epoch": 3.334070796460177, + "grad_norm": 0.10791015625, + "learning_rate": 1.6648230088495577e-06, + "loss": 0.7391, + "step": 3014 + }, + { + "epoch": 3.3351769911504423, + "grad_norm": 0.10205078125, + "learning_rate": 1.6620575221238938e-06, + "loss": 0.7725, + "step": 3015 + }, + { + "epoch": 3.336283185840708, + "grad_norm": 0.11572265625, + "learning_rate": 1.6592920353982304e-06, + "loss": 0.8269, + "step": 3016 + }, + { + "epoch": 3.3373893805309733, + "grad_norm": 0.11767578125, + "learning_rate": 1.6565265486725665e-06, + "loss": 0.8016, + "step": 3017 + }, + { + "epoch": 3.338495575221239, + "grad_norm": 0.11279296875, + "learning_rate": 1.6537610619469026e-06, + "loss": 0.7656, + "step": 3018 + }, + { + "epoch": 3.3396017699115044, + "grad_norm": 0.12060546875, + "learning_rate": 1.6509955752212392e-06, + "loss": 0.8293, + "step": 3019 + }, + { + "epoch": 3.34070796460177, + "grad_norm": 0.1083984375, + "learning_rate": 1.6482300884955755e-06, + "loss": 0.7724, + "step": 3020 + }, + { + "epoch": 3.3418141592920354, + "grad_norm": 0.1337890625, + "learning_rate": 1.6454646017699116e-06, + "loss": 0.8, + "step": 3021 + }, + { + "epoch": 3.3429203539823007, + "grad_norm": 0.10302734375, + "learning_rate": 1.6426991150442478e-06, + "loss": 0.7376, + "step": 3022 + }, + { + "epoch": 3.3440265486725664, + "grad_norm": 0.1044921875, + "learning_rate": 1.6399336283185843e-06, + "loss": 0.7594, + "step": 3023 + }, + { + "epoch": 3.3451327433628317, + "grad_norm": 0.10009765625, + "learning_rate": 1.6371681415929204e-06, + "loss": 0.7712, + "step": 3024 + }, + { + "epoch": 3.3462389380530975, + "grad_norm": 0.142578125, + "learning_rate": 1.6344026548672568e-06, + "loss": 0.7826, + "step": 3025 + }, + { + "epoch": 3.3473451327433628, + "grad_norm": 0.130859375, + "learning_rate": 1.631637168141593e-06, + "loss": 0.8136, + "step": 3026 + }, + { + "epoch": 3.3484513274336285, + "grad_norm": 0.10791015625, + "learning_rate": 1.6288716814159295e-06, + "loss": 0.7503, + "step": 3027 + }, + { + "epoch": 3.349557522123894, + "grad_norm": 0.11572265625, + "learning_rate": 1.6261061946902656e-06, + "loss": 0.7463, + "step": 3028 + }, + { + "epoch": 3.350663716814159, + "grad_norm": 0.107421875, + "learning_rate": 1.623340707964602e-06, + "loss": 0.7692, + "step": 3029 + }, + { + "epoch": 3.351769911504425, + "grad_norm": 0.1240234375, + "learning_rate": 1.620575221238938e-06, + "loss": 0.7478, + "step": 3030 + }, + { + "epoch": 3.35287610619469, + "grad_norm": 0.11083984375, + "learning_rate": 1.6178097345132746e-06, + "loss": 0.8248, + "step": 3031 + }, + { + "epoch": 3.353982300884956, + "grad_norm": 0.19140625, + "learning_rate": 1.6150442477876107e-06, + "loss": 0.8745, + "step": 3032 + }, + { + "epoch": 3.355088495575221, + "grad_norm": 0.10986328125, + "learning_rate": 1.612278761061947e-06, + "loss": 0.7555, + "step": 3033 + }, + { + "epoch": 3.356194690265487, + "grad_norm": 0.11767578125, + "learning_rate": 1.6095132743362832e-06, + "loss": 0.7665, + "step": 3034 + }, + { + "epoch": 3.357300884955752, + "grad_norm": 0.1103515625, + "learning_rate": 1.6067477876106197e-06, + "loss": 0.7723, + "step": 3035 + }, + { + "epoch": 3.3584070796460175, + "grad_norm": 0.10205078125, + "learning_rate": 1.6039823008849559e-06, + "loss": 0.7564, + "step": 3036 + }, + { + "epoch": 3.359513274336283, + "grad_norm": 0.09912109375, + "learning_rate": 1.6012168141592922e-06, + "loss": 0.7207, + "step": 3037 + }, + { + "epoch": 3.3606194690265485, + "grad_norm": 0.109375, + "learning_rate": 1.5984513274336285e-06, + "loss": 0.7483, + "step": 3038 + }, + { + "epoch": 3.3617256637168142, + "grad_norm": 0.111328125, + "learning_rate": 1.5956858407079649e-06, + "loss": 0.7782, + "step": 3039 + }, + { + "epoch": 3.3628318584070795, + "grad_norm": 0.11767578125, + "learning_rate": 1.592920353982301e-06, + "loss": 0.7785, + "step": 3040 + }, + { + "epoch": 3.3639380530973453, + "grad_norm": 0.11767578125, + "learning_rate": 1.5901548672566371e-06, + "loss": 0.7876, + "step": 3041 + }, + { + "epoch": 3.3650442477876106, + "grad_norm": 0.119140625, + "learning_rate": 1.5873893805309737e-06, + "loss": 0.8199, + "step": 3042 + }, + { + "epoch": 3.366150442477876, + "grad_norm": 0.11962890625, + "learning_rate": 1.5846238938053098e-06, + "loss": 0.8004, + "step": 3043 + }, + { + "epoch": 3.3672566371681416, + "grad_norm": 0.11767578125, + "learning_rate": 1.5818584070796461e-06, + "loss": 0.7821, + "step": 3044 + }, + { + "epoch": 3.368362831858407, + "grad_norm": 0.119140625, + "learning_rate": 1.5790929203539823e-06, + "loss": 0.8074, + "step": 3045 + }, + { + "epoch": 3.3694690265486726, + "grad_norm": 0.1083984375, + "learning_rate": 1.5763274336283188e-06, + "loss": 0.7672, + "step": 3046 + }, + { + "epoch": 3.370575221238938, + "grad_norm": 0.11376953125, + "learning_rate": 1.573561946902655e-06, + "loss": 0.7739, + "step": 3047 + }, + { + "epoch": 3.3716814159292037, + "grad_norm": 0.11279296875, + "learning_rate": 1.5707964601769913e-06, + "loss": 0.7628, + "step": 3048 + }, + { + "epoch": 3.372787610619469, + "grad_norm": 0.11328125, + "learning_rate": 1.5680309734513274e-06, + "loss": 0.7639, + "step": 3049 + }, + { + "epoch": 3.3738938053097347, + "grad_norm": 0.10400390625, + "learning_rate": 1.565265486725664e-06, + "loss": 0.7579, + "step": 3050 + }, + { + "epoch": 3.375, + "grad_norm": 0.1162109375, + "learning_rate": 1.5625e-06, + "loss": 0.7965, + "step": 3051 + }, + { + "epoch": 3.3761061946902653, + "grad_norm": 0.11669921875, + "learning_rate": 1.5597345132743364e-06, + "loss": 0.8198, + "step": 3052 + }, + { + "epoch": 3.377212389380531, + "grad_norm": 0.13671875, + "learning_rate": 1.5569690265486728e-06, + "loss": 0.7436, + "step": 3053 + }, + { + "epoch": 3.3783185840707963, + "grad_norm": 0.138671875, + "learning_rate": 1.554203539823009e-06, + "loss": 0.7583, + "step": 3054 + }, + { + "epoch": 3.379424778761062, + "grad_norm": 0.111328125, + "learning_rate": 1.5514380530973452e-06, + "loss": 0.751, + "step": 3055 + }, + { + "epoch": 3.3805309734513274, + "grad_norm": 0.11181640625, + "learning_rate": 1.5486725663716816e-06, + "loss": 0.8007, + "step": 3056 + }, + { + "epoch": 3.381637168141593, + "grad_norm": 0.10546875, + "learning_rate": 1.5459070796460179e-06, + "loss": 0.7488, + "step": 3057 + }, + { + "epoch": 3.3827433628318584, + "grad_norm": 0.1298828125, + "learning_rate": 1.5431415929203542e-06, + "loss": 0.7603, + "step": 3058 + }, + { + "epoch": 3.383849557522124, + "grad_norm": 0.10595703125, + "learning_rate": 1.5403761061946904e-06, + "loss": 0.767, + "step": 3059 + }, + { + "epoch": 3.3849557522123894, + "grad_norm": 0.10498046875, + "learning_rate": 1.5376106194690265e-06, + "loss": 0.7882, + "step": 3060 + }, + { + "epoch": 3.3860619469026547, + "grad_norm": 0.1015625, + "learning_rate": 1.534845132743363e-06, + "loss": 0.7316, + "step": 3061 + }, + { + "epoch": 3.3871681415929205, + "grad_norm": 0.11962890625, + "learning_rate": 1.5320796460176994e-06, + "loss": 0.7543, + "step": 3062 + }, + { + "epoch": 3.3882743362831858, + "grad_norm": 0.12353515625, + "learning_rate": 1.5293141592920355e-06, + "loss": 0.818, + "step": 3063 + }, + { + "epoch": 3.3893805309734515, + "grad_norm": 0.12060546875, + "learning_rate": 1.5265486725663716e-06, + "loss": 0.7762, + "step": 3064 + }, + { + "epoch": 3.390486725663717, + "grad_norm": 0.119140625, + "learning_rate": 1.5237831858407082e-06, + "loss": 0.7873, + "step": 3065 + }, + { + "epoch": 3.3915929203539825, + "grad_norm": 0.1259765625, + "learning_rate": 1.5210176991150443e-06, + "loss": 0.7977, + "step": 3066 + }, + { + "epoch": 3.392699115044248, + "grad_norm": 0.109375, + "learning_rate": 1.5182522123893806e-06, + "loss": 0.8028, + "step": 3067 + }, + { + "epoch": 3.393805309734513, + "grad_norm": 0.11181640625, + "learning_rate": 1.515486725663717e-06, + "loss": 0.7551, + "step": 3068 + }, + { + "epoch": 3.394911504424779, + "grad_norm": 0.12109375, + "learning_rate": 1.5127212389380533e-06, + "loss": 0.7861, + "step": 3069 + }, + { + "epoch": 3.396017699115044, + "grad_norm": 0.11669921875, + "learning_rate": 1.5099557522123894e-06, + "loss": 0.8023, + "step": 3070 + }, + { + "epoch": 3.39712389380531, + "grad_norm": 0.11279296875, + "learning_rate": 1.5071902654867258e-06, + "loss": 0.7769, + "step": 3071 + }, + { + "epoch": 3.398230088495575, + "grad_norm": 0.1279296875, + "learning_rate": 1.5044247787610621e-06, + "loss": 0.8126, + "step": 3072 + }, + { + "epoch": 3.399336283185841, + "grad_norm": 0.12109375, + "learning_rate": 1.5016592920353984e-06, + "loss": 0.6925, + "step": 3073 + }, + { + "epoch": 3.400442477876106, + "grad_norm": 0.103515625, + "learning_rate": 1.4988938053097346e-06, + "loss": 0.7322, + "step": 3074 + }, + { + "epoch": 3.4015486725663715, + "grad_norm": 0.1123046875, + "learning_rate": 1.496128318584071e-06, + "loss": 0.7938, + "step": 3075 + }, + { + "epoch": 3.4026548672566372, + "grad_norm": 0.109375, + "learning_rate": 1.4933628318584072e-06, + "loss": 0.7724, + "step": 3076 + }, + { + "epoch": 3.4037610619469025, + "grad_norm": 0.11181640625, + "learning_rate": 1.4905973451327436e-06, + "loss": 0.7471, + "step": 3077 + }, + { + "epoch": 3.4048672566371683, + "grad_norm": 0.1044921875, + "learning_rate": 1.4878318584070797e-06, + "loss": 0.7635, + "step": 3078 + }, + { + "epoch": 3.4059734513274336, + "grad_norm": 0.1162109375, + "learning_rate": 1.4850663716814158e-06, + "loss": 0.8145, + "step": 3079 + }, + { + "epoch": 3.4070796460176993, + "grad_norm": 0.1171875, + "learning_rate": 1.4823008849557524e-06, + "loss": 0.7641, + "step": 3080 + }, + { + "epoch": 3.4081858407079646, + "grad_norm": 0.10546875, + "learning_rate": 1.4795353982300887e-06, + "loss": 0.7644, + "step": 3081 + }, + { + "epoch": 3.40929203539823, + "grad_norm": 0.10595703125, + "learning_rate": 1.4767699115044249e-06, + "loss": 0.7885, + "step": 3082 + }, + { + "epoch": 3.4103982300884956, + "grad_norm": 0.1259765625, + "learning_rate": 1.474004424778761e-06, + "loss": 0.7944, + "step": 3083 + }, + { + "epoch": 3.411504424778761, + "grad_norm": 0.11376953125, + "learning_rate": 1.4712389380530975e-06, + "loss": 0.7514, + "step": 3084 + }, + { + "epoch": 3.4126106194690267, + "grad_norm": 0.1142578125, + "learning_rate": 1.4684734513274337e-06, + "loss": 0.787, + "step": 3085 + }, + { + "epoch": 3.413716814159292, + "grad_norm": 0.1640625, + "learning_rate": 1.46570796460177e-06, + "loss": 0.7894, + "step": 3086 + }, + { + "epoch": 3.4148230088495577, + "grad_norm": 0.09912109375, + "learning_rate": 1.4629424778761065e-06, + "loss": 0.7489, + "step": 3087 + }, + { + "epoch": 3.415929203539823, + "grad_norm": 0.11181640625, + "learning_rate": 1.4601769911504427e-06, + "loss": 0.7375, + "step": 3088 + }, + { + "epoch": 3.4170353982300883, + "grad_norm": 0.1123046875, + "learning_rate": 1.4574115044247788e-06, + "loss": 0.7296, + "step": 3089 + }, + { + "epoch": 3.418141592920354, + "grad_norm": 0.1162109375, + "learning_rate": 1.4546460176991151e-06, + "loss": 0.7683, + "step": 3090 + }, + { + "epoch": 3.4192477876106193, + "grad_norm": 0.1162109375, + "learning_rate": 1.4518805309734515e-06, + "loss": 0.7462, + "step": 3091 + }, + { + "epoch": 3.420353982300885, + "grad_norm": 0.1162109375, + "learning_rate": 1.4491150442477878e-06, + "loss": 0.7746, + "step": 3092 + }, + { + "epoch": 3.4214601769911503, + "grad_norm": 0.125, + "learning_rate": 1.446349557522124e-06, + "loss": 0.7948, + "step": 3093 + }, + { + "epoch": 3.422566371681416, + "grad_norm": 0.1064453125, + "learning_rate": 1.4435840707964603e-06, + "loss": 0.7759, + "step": 3094 + }, + { + "epoch": 3.4236725663716814, + "grad_norm": 0.1171875, + "learning_rate": 1.4408185840707966e-06, + "loss": 0.7402, + "step": 3095 + }, + { + "epoch": 3.4247787610619467, + "grad_norm": 0.1005859375, + "learning_rate": 1.438053097345133e-06, + "loss": 0.7578, + "step": 3096 + }, + { + "epoch": 3.4258849557522124, + "grad_norm": 0.130859375, + "learning_rate": 1.435287610619469e-06, + "loss": 0.788, + "step": 3097 + }, + { + "epoch": 3.4269911504424777, + "grad_norm": 0.11669921875, + "learning_rate": 1.4325221238938052e-06, + "loss": 0.7669, + "step": 3098 + }, + { + "epoch": 3.4280973451327434, + "grad_norm": 0.1181640625, + "learning_rate": 1.4297566371681417e-06, + "loss": 0.783, + "step": 3099 + }, + { + "epoch": 3.4292035398230087, + "grad_norm": 0.10693359375, + "learning_rate": 1.426991150442478e-06, + "loss": 0.7729, + "step": 3100 + }, + { + "epoch": 3.4303097345132745, + "grad_norm": 0.11474609375, + "learning_rate": 1.4242256637168142e-06, + "loss": 0.7259, + "step": 3101 + }, + { + "epoch": 3.4314159292035398, + "grad_norm": 0.111328125, + "learning_rate": 1.4214601769911508e-06, + "loss": 0.7694, + "step": 3102 + }, + { + "epoch": 3.432522123893805, + "grad_norm": 0.10595703125, + "learning_rate": 1.4186946902654869e-06, + "loss": 0.7564, + "step": 3103 + }, + { + "epoch": 3.433628318584071, + "grad_norm": 0.1103515625, + "learning_rate": 1.415929203539823e-06, + "loss": 0.7785, + "step": 3104 + }, + { + "epoch": 3.434734513274336, + "grad_norm": 0.11669921875, + "learning_rate": 1.4131637168141594e-06, + "loss": 0.7685, + "step": 3105 + }, + { + "epoch": 3.435840707964602, + "grad_norm": 0.10888671875, + "learning_rate": 1.410398230088496e-06, + "loss": 0.7482, + "step": 3106 + }, + { + "epoch": 3.436946902654867, + "grad_norm": 0.1220703125, + "learning_rate": 1.407632743362832e-06, + "loss": 0.7776, + "step": 3107 + }, + { + "epoch": 3.438053097345133, + "grad_norm": 0.1162109375, + "learning_rate": 1.4048672566371682e-06, + "loss": 0.8133, + "step": 3108 + }, + { + "epoch": 3.439159292035398, + "grad_norm": 0.11181640625, + "learning_rate": 1.4021017699115045e-06, + "loss": 0.7506, + "step": 3109 + }, + { + "epoch": 3.440265486725664, + "grad_norm": 0.1162109375, + "learning_rate": 1.3993362831858408e-06, + "loss": 0.774, + "step": 3110 + }, + { + "epoch": 3.441371681415929, + "grad_norm": 0.11767578125, + "learning_rate": 1.3965707964601772e-06, + "loss": 0.8146, + "step": 3111 + }, + { + "epoch": 3.442477876106195, + "grad_norm": 0.1162109375, + "learning_rate": 1.3938053097345133e-06, + "loss": 0.7578, + "step": 3112 + }, + { + "epoch": 3.4435840707964602, + "grad_norm": 0.11376953125, + "learning_rate": 1.3910398230088496e-06, + "loss": 0.7311, + "step": 3113 + }, + { + "epoch": 3.4446902654867255, + "grad_norm": 0.1328125, + "learning_rate": 1.388274336283186e-06, + "loss": 0.753, + "step": 3114 + }, + { + "epoch": 3.4457964601769913, + "grad_norm": 0.10009765625, + "learning_rate": 1.3855088495575223e-06, + "loss": 0.7302, + "step": 3115 + }, + { + "epoch": 3.4469026548672566, + "grad_norm": 0.115234375, + "learning_rate": 1.3827433628318584e-06, + "loss": 0.7934, + "step": 3116 + }, + { + "epoch": 3.4480088495575223, + "grad_norm": 0.103515625, + "learning_rate": 1.379977876106195e-06, + "loss": 0.7162, + "step": 3117 + }, + { + "epoch": 3.4491150442477876, + "grad_norm": 0.11865234375, + "learning_rate": 1.377212389380531e-06, + "loss": 0.789, + "step": 3118 + }, + { + "epoch": 3.4502212389380533, + "grad_norm": 0.1044921875, + "learning_rate": 1.3744469026548674e-06, + "loss": 0.796, + "step": 3119 + }, + { + "epoch": 3.4513274336283186, + "grad_norm": 0.10791015625, + "learning_rate": 1.3716814159292036e-06, + "loss": 0.7788, + "step": 3120 + }, + { + "epoch": 3.452433628318584, + "grad_norm": 0.11376953125, + "learning_rate": 1.3689159292035401e-06, + "loss": 0.8145, + "step": 3121 + }, + { + "epoch": 3.4535398230088497, + "grad_norm": 0.11669921875, + "learning_rate": 1.3661504424778762e-06, + "loss": 0.7648, + "step": 3122 + }, + { + "epoch": 3.454646017699115, + "grad_norm": 0.09716796875, + "learning_rate": 1.3633849557522124e-06, + "loss": 0.7182, + "step": 3123 + }, + { + "epoch": 3.4557522123893807, + "grad_norm": 0.10888671875, + "learning_rate": 1.3606194690265487e-06, + "loss": 0.7394, + "step": 3124 + }, + { + "epoch": 3.456858407079646, + "grad_norm": 0.1357421875, + "learning_rate": 1.3578539823008853e-06, + "loss": 0.7572, + "step": 3125 + }, + { + "epoch": 3.4579646017699117, + "grad_norm": 0.11083984375, + "learning_rate": 1.3550884955752214e-06, + "loss": 0.7561, + "step": 3126 + }, + { + "epoch": 3.459070796460177, + "grad_norm": 0.119140625, + "learning_rate": 1.3523230088495575e-06, + "loss": 0.7885, + "step": 3127 + }, + { + "epoch": 3.4601769911504423, + "grad_norm": 0.11669921875, + "learning_rate": 1.3495575221238938e-06, + "loss": 0.7643, + "step": 3128 + }, + { + "epoch": 3.461283185840708, + "grad_norm": 0.1171875, + "learning_rate": 1.3467920353982302e-06, + "loss": 0.744, + "step": 3129 + }, + { + "epoch": 3.4623893805309733, + "grad_norm": 0.10400390625, + "learning_rate": 1.3440265486725665e-06, + "loss": 0.728, + "step": 3130 + }, + { + "epoch": 3.463495575221239, + "grad_norm": 0.12060546875, + "learning_rate": 1.3412610619469026e-06, + "loss": 0.7464, + "step": 3131 + }, + { + "epoch": 3.4646017699115044, + "grad_norm": 0.1328125, + "learning_rate": 1.3384955752212392e-06, + "loss": 0.7714, + "step": 3132 + }, + { + "epoch": 3.46570796460177, + "grad_norm": 0.11279296875, + "learning_rate": 1.3357300884955753e-06, + "loss": 0.7504, + "step": 3133 + }, + { + "epoch": 3.4668141592920354, + "grad_norm": 0.11279296875, + "learning_rate": 1.3329646017699117e-06, + "loss": 0.7745, + "step": 3134 + }, + { + "epoch": 3.4679203539823007, + "grad_norm": 0.111328125, + "learning_rate": 1.3301991150442478e-06, + "loss": 0.7698, + "step": 3135 + }, + { + "epoch": 3.4690265486725664, + "grad_norm": 0.12158203125, + "learning_rate": 1.3274336283185843e-06, + "loss": 0.7525, + "step": 3136 + }, + { + "epoch": 3.4701327433628317, + "grad_norm": 0.09912109375, + "learning_rate": 1.3246681415929205e-06, + "loss": 0.7358, + "step": 3137 + }, + { + "epoch": 3.4712389380530975, + "grad_norm": 0.10595703125, + "learning_rate": 1.3219026548672568e-06, + "loss": 0.7834, + "step": 3138 + }, + { + "epoch": 3.4723451327433628, + "grad_norm": 0.115234375, + "learning_rate": 1.319137168141593e-06, + "loss": 0.7735, + "step": 3139 + }, + { + "epoch": 3.4734513274336285, + "grad_norm": 0.11181640625, + "learning_rate": 1.3163716814159295e-06, + "loss": 0.7444, + "step": 3140 + }, + { + "epoch": 3.474557522123894, + "grad_norm": 0.1064453125, + "learning_rate": 1.3136061946902656e-06, + "loss": 0.7934, + "step": 3141 + }, + { + "epoch": 3.475663716814159, + "grad_norm": 0.11669921875, + "learning_rate": 1.3108407079646017e-06, + "loss": 0.8013, + "step": 3142 + }, + { + "epoch": 3.476769911504425, + "grad_norm": 0.138671875, + "learning_rate": 1.308075221238938e-06, + "loss": 0.8086, + "step": 3143 + }, + { + "epoch": 3.47787610619469, + "grad_norm": 0.1103515625, + "learning_rate": 1.3053097345132746e-06, + "loss": 0.8043, + "step": 3144 + }, + { + "epoch": 3.478982300884956, + "grad_norm": 0.1123046875, + "learning_rate": 1.3025442477876107e-06, + "loss": 0.7868, + "step": 3145 + }, + { + "epoch": 3.480088495575221, + "grad_norm": 0.11181640625, + "learning_rate": 1.2997787610619469e-06, + "loss": 0.7846, + "step": 3146 + }, + { + "epoch": 3.481194690265487, + "grad_norm": 0.11083984375, + "learning_rate": 1.2970132743362832e-06, + "loss": 0.7728, + "step": 3147 + }, + { + "epoch": 3.482300884955752, + "grad_norm": 0.099609375, + "learning_rate": 1.2942477876106195e-06, + "loss": 0.7356, + "step": 3148 + }, + { + "epoch": 3.4834070796460175, + "grad_norm": 0.1162109375, + "learning_rate": 1.2914823008849559e-06, + "loss": 0.7895, + "step": 3149 + }, + { + "epoch": 3.484513274336283, + "grad_norm": 0.10791015625, + "learning_rate": 1.288716814159292e-06, + "loss": 0.7684, + "step": 3150 + }, + { + "epoch": 3.4856194690265485, + "grad_norm": 0.10791015625, + "learning_rate": 1.2859513274336286e-06, + "loss": 0.7624, + "step": 3151 + }, + { + "epoch": 3.4867256637168142, + "grad_norm": 0.11279296875, + "learning_rate": 1.2831858407079647e-06, + "loss": 0.7947, + "step": 3152 + }, + { + "epoch": 3.4878318584070795, + "grad_norm": 0.10986328125, + "learning_rate": 1.280420353982301e-06, + "loss": 0.7658, + "step": 3153 + }, + { + "epoch": 3.4889380530973453, + "grad_norm": 0.171875, + "learning_rate": 1.2776548672566371e-06, + "loss": 0.7711, + "step": 3154 + }, + { + "epoch": 3.4900442477876106, + "grad_norm": 0.10205078125, + "learning_rate": 1.2748893805309737e-06, + "loss": 0.7283, + "step": 3155 + }, + { + "epoch": 3.491150442477876, + "grad_norm": 0.1103515625, + "learning_rate": 1.2721238938053098e-06, + "loss": 0.7852, + "step": 3156 + }, + { + "epoch": 3.4922566371681416, + "grad_norm": 0.1044921875, + "learning_rate": 1.2693584070796462e-06, + "loss": 0.7342, + "step": 3157 + }, + { + "epoch": 3.493362831858407, + "grad_norm": 0.14453125, + "learning_rate": 1.2665929203539823e-06, + "loss": 0.7884, + "step": 3158 + }, + { + "epoch": 3.4944690265486726, + "grad_norm": 0.115234375, + "learning_rate": 1.2638274336283188e-06, + "loss": 0.7812, + "step": 3159 + }, + { + "epoch": 3.495575221238938, + "grad_norm": 0.10791015625, + "learning_rate": 1.261061946902655e-06, + "loss": 0.7284, + "step": 3160 + }, + { + "epoch": 3.4966814159292037, + "grad_norm": 0.10546875, + "learning_rate": 1.2582964601769913e-06, + "loss": 0.7506, + "step": 3161 + }, + { + "epoch": 3.497787610619469, + "grad_norm": 0.11669921875, + "learning_rate": 1.2555309734513274e-06, + "loss": 0.7465, + "step": 3162 + }, + { + "epoch": 3.4988938053097347, + "grad_norm": 0.10888671875, + "learning_rate": 1.252765486725664e-06, + "loss": 0.7455, + "step": 3163 + }, + { + "epoch": 3.5, + "grad_norm": 0.10400390625, + "learning_rate": 1.25e-06, + "loss": 0.7328, + "step": 3164 + }, + { + "epoch": 3.5011061946902657, + "grad_norm": 0.1171875, + "learning_rate": 1.2472345132743364e-06, + "loss": 0.7815, + "step": 3165 + }, + { + "epoch": 3.502212389380531, + "grad_norm": 0.12109375, + "learning_rate": 1.2444690265486726e-06, + "loss": 0.7559, + "step": 3166 + }, + { + "epoch": 3.5033185840707963, + "grad_norm": 0.11474609375, + "learning_rate": 1.241703539823009e-06, + "loss": 0.7893, + "step": 3167 + }, + { + "epoch": 3.504424778761062, + "grad_norm": 0.1279296875, + "learning_rate": 1.2389380530973452e-06, + "loss": 0.8277, + "step": 3168 + }, + { + "epoch": 3.5055309734513274, + "grad_norm": 0.10595703125, + "learning_rate": 1.2361725663716816e-06, + "loss": 0.784, + "step": 3169 + }, + { + "epoch": 3.5066371681415927, + "grad_norm": 0.11474609375, + "learning_rate": 1.2334070796460177e-06, + "loss": 0.7988, + "step": 3170 + }, + { + "epoch": 3.5077433628318584, + "grad_norm": 0.11669921875, + "learning_rate": 1.230641592920354e-06, + "loss": 0.7549, + "step": 3171 + }, + { + "epoch": 3.508849557522124, + "grad_norm": 0.10205078125, + "learning_rate": 1.2278761061946904e-06, + "loss": 0.7492, + "step": 3172 + }, + { + "epoch": 3.5099557522123894, + "grad_norm": 0.11669921875, + "learning_rate": 1.2251106194690267e-06, + "loss": 0.8062, + "step": 3173 + }, + { + "epoch": 3.5110619469026547, + "grad_norm": 0.10107421875, + "learning_rate": 1.2223451327433628e-06, + "loss": 0.7559, + "step": 3174 + }, + { + "epoch": 3.5121681415929205, + "grad_norm": 0.10302734375, + "learning_rate": 1.2195796460176992e-06, + "loss": 0.7651, + "step": 3175 + }, + { + "epoch": 3.5132743362831858, + "grad_norm": 0.115234375, + "learning_rate": 1.2168141592920355e-06, + "loss": 0.775, + "step": 3176 + }, + { + "epoch": 3.5143805309734515, + "grad_norm": 0.10400390625, + "learning_rate": 1.2140486725663719e-06, + "loss": 0.7448, + "step": 3177 + }, + { + "epoch": 3.515486725663717, + "grad_norm": 0.11962890625, + "learning_rate": 1.211283185840708e-06, + "loss": 0.7734, + "step": 3178 + }, + { + "epoch": 3.5165929203539825, + "grad_norm": 0.10693359375, + "learning_rate": 1.2085176991150443e-06, + "loss": 0.7844, + "step": 3179 + }, + { + "epoch": 3.517699115044248, + "grad_norm": 0.0966796875, + "learning_rate": 1.2057522123893807e-06, + "loss": 0.7549, + "step": 3180 + }, + { + "epoch": 3.518805309734513, + "grad_norm": 0.1025390625, + "learning_rate": 1.202986725663717e-06, + "loss": 0.7611, + "step": 3181 + }, + { + "epoch": 3.519911504424779, + "grad_norm": 0.109375, + "learning_rate": 1.2002212389380533e-06, + "loss": 0.7597, + "step": 3182 + }, + { + "epoch": 3.521017699115044, + "grad_norm": 0.099609375, + "learning_rate": 1.1974557522123895e-06, + "loss": 0.7502, + "step": 3183 + }, + { + "epoch": 3.52212389380531, + "grad_norm": 0.150390625, + "learning_rate": 1.1946902654867258e-06, + "loss": 0.7852, + "step": 3184 + }, + { + "epoch": 3.523230088495575, + "grad_norm": 0.11669921875, + "learning_rate": 1.191924778761062e-06, + "loss": 0.7705, + "step": 3185 + }, + { + "epoch": 3.524336283185841, + "grad_norm": 0.1279296875, + "learning_rate": 1.1891592920353985e-06, + "loss": 0.8095, + "step": 3186 + }, + { + "epoch": 3.525442477876106, + "grad_norm": 0.107421875, + "learning_rate": 1.1863938053097346e-06, + "loss": 0.7799, + "step": 3187 + }, + { + "epoch": 3.5265486725663715, + "grad_norm": 0.1103515625, + "learning_rate": 1.183628318584071e-06, + "loss": 0.7736, + "step": 3188 + }, + { + "epoch": 3.5276548672566372, + "grad_norm": 0.11279296875, + "learning_rate": 1.180862831858407e-06, + "loss": 0.814, + "step": 3189 + }, + { + "epoch": 3.5287610619469025, + "grad_norm": 0.10107421875, + "learning_rate": 1.1780973451327434e-06, + "loss": 0.7429, + "step": 3190 + }, + { + "epoch": 3.5298672566371683, + "grad_norm": 0.1103515625, + "learning_rate": 1.1753318584070797e-06, + "loss": 0.7459, + "step": 3191 + }, + { + "epoch": 3.5309734513274336, + "grad_norm": 0.1474609375, + "learning_rate": 1.172566371681416e-06, + "loss": 0.8166, + "step": 3192 + }, + { + "epoch": 3.5320796460176993, + "grad_norm": 0.10986328125, + "learning_rate": 1.1698008849557522e-06, + "loss": 0.7658, + "step": 3193 + }, + { + "epoch": 3.5331858407079646, + "grad_norm": 0.10107421875, + "learning_rate": 1.1670353982300885e-06, + "loss": 0.7665, + "step": 3194 + }, + { + "epoch": 3.53429203539823, + "grad_norm": 0.11669921875, + "learning_rate": 1.1642699115044249e-06, + "loss": 0.7663, + "step": 3195 + }, + { + "epoch": 3.5353982300884956, + "grad_norm": 0.10986328125, + "learning_rate": 1.1615044247787612e-06, + "loss": 0.7568, + "step": 3196 + }, + { + "epoch": 3.536504424778761, + "grad_norm": 0.123046875, + "learning_rate": 1.1587389380530976e-06, + "loss": 0.8013, + "step": 3197 + }, + { + "epoch": 3.5376106194690267, + "grad_norm": 0.11767578125, + "learning_rate": 1.1559734513274337e-06, + "loss": 0.7748, + "step": 3198 + }, + { + "epoch": 3.538716814159292, + "grad_norm": 0.09912109375, + "learning_rate": 1.15320796460177e-06, + "loss": 0.7326, + "step": 3199 + }, + { + "epoch": 3.5398230088495577, + "grad_norm": 0.1015625, + "learning_rate": 1.1504424778761064e-06, + "loss": 0.7426, + "step": 3200 + }, + { + "epoch": 3.540929203539823, + "grad_norm": 0.10498046875, + "learning_rate": 1.1476769911504427e-06, + "loss": 0.7614, + "step": 3201 + }, + { + "epoch": 3.5420353982300883, + "grad_norm": 0.10302734375, + "learning_rate": 1.1449115044247788e-06, + "loss": 0.7352, + "step": 3202 + }, + { + "epoch": 3.543141592920354, + "grad_norm": 0.111328125, + "learning_rate": 1.1421460176991152e-06, + "loss": 0.7816, + "step": 3203 + }, + { + "epoch": 3.5442477876106193, + "grad_norm": 0.12890625, + "learning_rate": 1.1393805309734513e-06, + "loss": 0.8013, + "step": 3204 + }, + { + "epoch": 3.545353982300885, + "grad_norm": 0.11572265625, + "learning_rate": 1.1366150442477878e-06, + "loss": 0.7381, + "step": 3205 + }, + { + "epoch": 3.5464601769911503, + "grad_norm": 0.1142578125, + "learning_rate": 1.133849557522124e-06, + "loss": 0.8078, + "step": 3206 + }, + { + "epoch": 3.547566371681416, + "grad_norm": 0.11376953125, + "learning_rate": 1.1310840707964603e-06, + "loss": 0.7874, + "step": 3207 + }, + { + "epoch": 3.5486725663716814, + "grad_norm": 0.12451171875, + "learning_rate": 1.1283185840707964e-06, + "loss": 0.8247, + "step": 3208 + }, + { + "epoch": 3.5497787610619467, + "grad_norm": 0.14453125, + "learning_rate": 1.1255530973451328e-06, + "loss": 0.8038, + "step": 3209 + }, + { + "epoch": 3.5508849557522124, + "grad_norm": 0.10595703125, + "learning_rate": 1.122787610619469e-06, + "loss": 0.7503, + "step": 3210 + }, + { + "epoch": 3.551991150442478, + "grad_norm": 0.1298828125, + "learning_rate": 1.1200221238938054e-06, + "loss": 0.8698, + "step": 3211 + }, + { + "epoch": 3.5530973451327434, + "grad_norm": 0.10595703125, + "learning_rate": 1.1172566371681416e-06, + "loss": 0.7221, + "step": 3212 + }, + { + "epoch": 3.5542035398230087, + "grad_norm": 0.11328125, + "learning_rate": 1.114491150442478e-06, + "loss": 0.7487, + "step": 3213 + }, + { + "epoch": 3.5553097345132745, + "grad_norm": 0.10400390625, + "learning_rate": 1.1117256637168142e-06, + "loss": 0.7465, + "step": 3214 + }, + { + "epoch": 3.5564159292035398, + "grad_norm": 0.2177734375, + "learning_rate": 1.1089601769911506e-06, + "loss": 0.7584, + "step": 3215 + }, + { + "epoch": 3.557522123893805, + "grad_norm": 0.11962890625, + "learning_rate": 1.106194690265487e-06, + "loss": 0.797, + "step": 3216 + }, + { + "epoch": 3.558628318584071, + "grad_norm": 0.1083984375, + "learning_rate": 1.103429203539823e-06, + "loss": 0.772, + "step": 3217 + }, + { + "epoch": 3.5597345132743365, + "grad_norm": 0.1318359375, + "learning_rate": 1.1006637168141594e-06, + "loss": 0.805, + "step": 3218 + }, + { + "epoch": 3.560840707964602, + "grad_norm": 0.12060546875, + "learning_rate": 1.0978982300884957e-06, + "loss": 0.7804, + "step": 3219 + }, + { + "epoch": 3.561946902654867, + "grad_norm": 0.119140625, + "learning_rate": 1.095132743362832e-06, + "loss": 0.7903, + "step": 3220 + }, + { + "epoch": 3.563053097345133, + "grad_norm": 0.12158203125, + "learning_rate": 1.0923672566371682e-06, + "loss": 0.7809, + "step": 3221 + }, + { + "epoch": 3.564159292035398, + "grad_norm": 0.125, + "learning_rate": 1.0896017699115045e-06, + "loss": 0.844, + "step": 3222 + }, + { + "epoch": 3.5652654867256635, + "grad_norm": 0.1015625, + "learning_rate": 1.0868362831858409e-06, + "loss": 0.7668, + "step": 3223 + }, + { + "epoch": 3.566371681415929, + "grad_norm": 0.111328125, + "learning_rate": 1.0840707964601772e-06, + "loss": 0.7811, + "step": 3224 + }, + { + "epoch": 3.567477876106195, + "grad_norm": 0.109375, + "learning_rate": 1.0813053097345133e-06, + "loss": 0.7853, + "step": 3225 + }, + { + "epoch": 3.5685840707964602, + "grad_norm": 0.109375, + "learning_rate": 1.0785398230088497e-06, + "loss": 0.7572, + "step": 3226 + }, + { + "epoch": 3.5696902654867255, + "grad_norm": 0.10205078125, + "learning_rate": 1.0757743362831858e-06, + "loss": 0.7584, + "step": 3227 + }, + { + "epoch": 3.5707964601769913, + "grad_norm": 0.142578125, + "learning_rate": 1.0730088495575221e-06, + "loss": 0.7527, + "step": 3228 + }, + { + "epoch": 3.5719026548672566, + "grad_norm": 0.119140625, + "learning_rate": 1.0702433628318585e-06, + "loss": 0.7381, + "step": 3229 + }, + { + "epoch": 3.573008849557522, + "grad_norm": 0.11669921875, + "learning_rate": 1.0674778761061948e-06, + "loss": 0.8225, + "step": 3230 + }, + { + "epoch": 3.5741150442477876, + "grad_norm": 0.10986328125, + "learning_rate": 1.0647123893805311e-06, + "loss": 0.7771, + "step": 3231 + }, + { + "epoch": 3.5752212389380533, + "grad_norm": 0.125, + "learning_rate": 1.0619469026548673e-06, + "loss": 0.7878, + "step": 3232 + }, + { + "epoch": 3.5763274336283186, + "grad_norm": 0.1279296875, + "learning_rate": 1.0591814159292036e-06, + "loss": 0.7325, + "step": 3233 + }, + { + "epoch": 3.577433628318584, + "grad_norm": 0.1005859375, + "learning_rate": 1.05641592920354e-06, + "loss": 0.7527, + "step": 3234 + }, + { + "epoch": 3.5785398230088497, + "grad_norm": 0.12060546875, + "learning_rate": 1.0536504424778763e-06, + "loss": 0.7746, + "step": 3235 + }, + { + "epoch": 3.579646017699115, + "grad_norm": 0.10888671875, + "learning_rate": 1.0508849557522124e-06, + "loss": 0.7669, + "step": 3236 + }, + { + "epoch": 3.5807522123893807, + "grad_norm": 0.10986328125, + "learning_rate": 1.0481194690265487e-06, + "loss": 0.7817, + "step": 3237 + }, + { + "epoch": 3.581858407079646, + "grad_norm": 0.10498046875, + "learning_rate": 1.045353982300885e-06, + "loss": 0.7405, + "step": 3238 + }, + { + "epoch": 3.5829646017699117, + "grad_norm": 0.10595703125, + "learning_rate": 1.0425884955752214e-06, + "loss": 0.7462, + "step": 3239 + }, + { + "epoch": 3.584070796460177, + "grad_norm": 0.13671875, + "learning_rate": 1.0398230088495575e-06, + "loss": 0.7862, + "step": 3240 + }, + { + "epoch": 3.5851769911504423, + "grad_norm": 0.115234375, + "learning_rate": 1.0370575221238939e-06, + "loss": 0.7976, + "step": 3241 + }, + { + "epoch": 3.586283185840708, + "grad_norm": 0.10986328125, + "learning_rate": 1.0342920353982302e-06, + "loss": 0.7655, + "step": 3242 + }, + { + "epoch": 3.5873893805309733, + "grad_norm": 0.11181640625, + "learning_rate": 1.0315265486725666e-06, + "loss": 0.7768, + "step": 3243 + }, + { + "epoch": 3.588495575221239, + "grad_norm": 0.11376953125, + "learning_rate": 1.0287610619469027e-06, + "loss": 0.7894, + "step": 3244 + }, + { + "epoch": 3.5896017699115044, + "grad_norm": 0.11279296875, + "learning_rate": 1.025995575221239e-06, + "loss": 0.7665, + "step": 3245 + }, + { + "epoch": 3.59070796460177, + "grad_norm": 0.11474609375, + "learning_rate": 1.0232300884955754e-06, + "loss": 0.7815, + "step": 3246 + }, + { + "epoch": 3.5918141592920354, + "grad_norm": 0.1103515625, + "learning_rate": 1.0204646017699115e-06, + "loss": 0.7578, + "step": 3247 + }, + { + "epoch": 3.5929203539823007, + "grad_norm": 0.1162109375, + "learning_rate": 1.017699115044248e-06, + "loss": 0.7622, + "step": 3248 + }, + { + "epoch": 3.5940265486725664, + "grad_norm": 0.12158203125, + "learning_rate": 1.0149336283185842e-06, + "loss": 0.741, + "step": 3249 + }, + { + "epoch": 3.5951327433628317, + "grad_norm": 0.109375, + "learning_rate": 1.0121681415929205e-06, + "loss": 0.7711, + "step": 3250 + }, + { + "epoch": 3.5962389380530975, + "grad_norm": 0.111328125, + "learning_rate": 1.0094026548672566e-06, + "loss": 0.7345, + "step": 3251 + }, + { + "epoch": 3.5973451327433628, + "grad_norm": 0.11328125, + "learning_rate": 1.006637168141593e-06, + "loss": 0.8154, + "step": 3252 + }, + { + "epoch": 3.5984513274336285, + "grad_norm": 0.11767578125, + "learning_rate": 1.0038716814159293e-06, + "loss": 0.8184, + "step": 3253 + }, + { + "epoch": 3.599557522123894, + "grad_norm": 0.1162109375, + "learning_rate": 1.0011061946902656e-06, + "loss": 0.7724, + "step": 3254 + }, + { + "epoch": 3.600663716814159, + "grad_norm": 0.1181640625, + "learning_rate": 9.983407079646018e-07, + "loss": 0.7719, + "step": 3255 + }, + { + "epoch": 3.601769911504425, + "grad_norm": 0.1064453125, + "learning_rate": 9.95575221238938e-07, + "loss": 0.7636, + "step": 3256 + }, + { + "epoch": 3.60287610619469, + "grad_norm": 0.09765625, + "learning_rate": 9.928097345132744e-07, + "loss": 0.7308, + "step": 3257 + }, + { + "epoch": 3.603982300884956, + "grad_norm": 0.099609375, + "learning_rate": 9.900442477876108e-07, + "loss": 0.7328, + "step": 3258 + }, + { + "epoch": 3.605088495575221, + "grad_norm": 0.10205078125, + "learning_rate": 9.87278761061947e-07, + "loss": 0.746, + "step": 3259 + }, + { + "epoch": 3.606194690265487, + "grad_norm": 0.11474609375, + "learning_rate": 9.845132743362832e-07, + "loss": 0.7477, + "step": 3260 + }, + { + "epoch": 3.607300884955752, + "grad_norm": 0.1171875, + "learning_rate": 9.817477876106196e-07, + "loss": 0.7759, + "step": 3261 + }, + { + "epoch": 3.6084070796460175, + "grad_norm": 0.115234375, + "learning_rate": 9.78982300884956e-07, + "loss": 0.7518, + "step": 3262 + }, + { + "epoch": 3.609513274336283, + "grad_norm": 0.10302734375, + "learning_rate": 9.762168141592922e-07, + "loss": 0.7493, + "step": 3263 + }, + { + "epoch": 3.6106194690265485, + "grad_norm": 0.1162109375, + "learning_rate": 9.734513274336284e-07, + "loss": 0.7691, + "step": 3264 + }, + { + "epoch": 3.6117256637168142, + "grad_norm": 0.134765625, + "learning_rate": 9.706858407079647e-07, + "loss": 0.7905, + "step": 3265 + }, + { + "epoch": 3.6128318584070795, + "grad_norm": 0.10546875, + "learning_rate": 9.679203539823008e-07, + "loss": 0.7734, + "step": 3266 + }, + { + "epoch": 3.6139380530973453, + "grad_norm": 0.11279296875, + "learning_rate": 9.651548672566374e-07, + "loss": 0.7727, + "step": 3267 + }, + { + "epoch": 3.6150442477876106, + "grad_norm": 0.11279296875, + "learning_rate": 9.623893805309735e-07, + "loss": 0.7749, + "step": 3268 + }, + { + "epoch": 3.616150442477876, + "grad_norm": 0.130859375, + "learning_rate": 9.596238938053099e-07, + "loss": 0.8299, + "step": 3269 + }, + { + "epoch": 3.6172566371681416, + "grad_norm": 0.14453125, + "learning_rate": 9.56858407079646e-07, + "loss": 0.8083, + "step": 3270 + }, + { + "epoch": 3.6183628318584073, + "grad_norm": 0.12158203125, + "learning_rate": 9.540929203539823e-07, + "loss": 0.7638, + "step": 3271 + }, + { + "epoch": 3.6194690265486726, + "grad_norm": 0.12451171875, + "learning_rate": 9.513274336283187e-07, + "loss": 0.7957, + "step": 3272 + }, + { + "epoch": 3.620575221238938, + "grad_norm": 0.107421875, + "learning_rate": 9.48561946902655e-07, + "loss": 0.7823, + "step": 3273 + }, + { + "epoch": 3.6216814159292037, + "grad_norm": 0.1142578125, + "learning_rate": 9.457964601769912e-07, + "loss": 0.7679, + "step": 3274 + }, + { + "epoch": 3.622787610619469, + "grad_norm": 0.11376953125, + "learning_rate": 9.430309734513276e-07, + "loss": 0.7591, + "step": 3275 + }, + { + "epoch": 3.6238938053097343, + "grad_norm": 0.11328125, + "learning_rate": 9.402654867256637e-07, + "loss": 0.8074, + "step": 3276 + }, + { + "epoch": 3.625, + "grad_norm": 0.11083984375, + "learning_rate": 9.375000000000001e-07, + "loss": 0.7747, + "step": 3277 + }, + { + "epoch": 3.6261061946902657, + "grad_norm": 0.1044921875, + "learning_rate": 9.347345132743365e-07, + "loss": 0.7266, + "step": 3278 + }, + { + "epoch": 3.627212389380531, + "grad_norm": 0.10888671875, + "learning_rate": 9.319690265486726e-07, + "loss": 0.7633, + "step": 3279 + }, + { + "epoch": 3.6283185840707963, + "grad_norm": 0.10888671875, + "learning_rate": 9.292035398230089e-07, + "loss": 0.7993, + "step": 3280 + }, + { + "epoch": 3.629424778761062, + "grad_norm": 0.1103515625, + "learning_rate": 9.264380530973452e-07, + "loss": 0.7843, + "step": 3281 + }, + { + "epoch": 3.6305309734513274, + "grad_norm": 0.10791015625, + "learning_rate": 9.236725663716815e-07, + "loss": 0.7541, + "step": 3282 + }, + { + "epoch": 3.6316371681415927, + "grad_norm": 0.11181640625, + "learning_rate": 9.209070796460177e-07, + "loss": 0.7793, + "step": 3283 + }, + { + "epoch": 3.6327433628318584, + "grad_norm": 0.1083984375, + "learning_rate": 9.181415929203541e-07, + "loss": 0.7286, + "step": 3284 + }, + { + "epoch": 3.633849557522124, + "grad_norm": 0.10546875, + "learning_rate": 9.153761061946903e-07, + "loss": 0.7659, + "step": 3285 + }, + { + "epoch": 3.6349557522123894, + "grad_norm": 0.11474609375, + "learning_rate": 9.126106194690266e-07, + "loss": 0.7862, + "step": 3286 + }, + { + "epoch": 3.6360619469026547, + "grad_norm": 0.1005859375, + "learning_rate": 9.098451327433629e-07, + "loss": 0.7268, + "step": 3287 + }, + { + "epoch": 3.6371681415929205, + "grad_norm": 0.103515625, + "learning_rate": 9.070796460176992e-07, + "loss": 0.7267, + "step": 3288 + }, + { + "epoch": 3.6382743362831858, + "grad_norm": 0.1142578125, + "learning_rate": 9.043141592920354e-07, + "loss": 0.7843, + "step": 3289 + }, + { + "epoch": 3.6393805309734515, + "grad_norm": 0.115234375, + "learning_rate": 9.015486725663718e-07, + "loss": 0.7737, + "step": 3290 + }, + { + "epoch": 3.640486725663717, + "grad_norm": 0.1083984375, + "learning_rate": 8.98783185840708e-07, + "loss": 0.78, + "step": 3291 + }, + { + "epoch": 3.6415929203539825, + "grad_norm": 0.11767578125, + "learning_rate": 8.960176991150443e-07, + "loss": 0.7591, + "step": 3292 + }, + { + "epoch": 3.642699115044248, + "grad_norm": 0.11083984375, + "learning_rate": 8.932522123893806e-07, + "loss": 0.7642, + "step": 3293 + }, + { + "epoch": 3.643805309734513, + "grad_norm": 0.1083984375, + "learning_rate": 8.904867256637169e-07, + "loss": 0.7467, + "step": 3294 + }, + { + "epoch": 3.644911504424779, + "grad_norm": 0.103515625, + "learning_rate": 8.877212389380533e-07, + "loss": 0.7281, + "step": 3295 + }, + { + "epoch": 3.646017699115044, + "grad_norm": 0.11865234375, + "learning_rate": 8.849557522123895e-07, + "loss": 0.7957, + "step": 3296 + }, + { + "epoch": 3.64712389380531, + "grad_norm": 0.150390625, + "learning_rate": 8.821902654867258e-07, + "loss": 0.8408, + "step": 3297 + }, + { + "epoch": 3.648230088495575, + "grad_norm": 0.09765625, + "learning_rate": 8.79424778761062e-07, + "loss": 0.7069, + "step": 3298 + }, + { + "epoch": 3.649336283185841, + "grad_norm": 0.11181640625, + "learning_rate": 8.766592920353984e-07, + "loss": 0.7436, + "step": 3299 + }, + { + "epoch": 3.650442477876106, + "grad_norm": 0.1455078125, + "learning_rate": 8.738938053097345e-07, + "loss": 0.7438, + "step": 3300 + }, + { + "epoch": 3.6515486725663715, + "grad_norm": 0.099609375, + "learning_rate": 8.711283185840709e-07, + "loss": 0.7164, + "step": 3301 + }, + { + "epoch": 3.6526548672566372, + "grad_norm": 0.1064453125, + "learning_rate": 8.683628318584071e-07, + "loss": 0.7624, + "step": 3302 + }, + { + "epoch": 3.6537610619469025, + "grad_norm": 0.10498046875, + "learning_rate": 8.655973451327434e-07, + "loss": 0.765, + "step": 3303 + }, + { + "epoch": 3.6548672566371683, + "grad_norm": 0.10693359375, + "learning_rate": 8.628318584070797e-07, + "loss": 0.7902, + "step": 3304 + }, + { + "epoch": 3.6559734513274336, + "grad_norm": 0.107421875, + "learning_rate": 8.60066371681416e-07, + "loss": 0.7641, + "step": 3305 + }, + { + "epoch": 3.6570796460176993, + "grad_norm": 0.134765625, + "learning_rate": 8.573008849557522e-07, + "loss": 0.7784, + "step": 3306 + }, + { + "epoch": 3.6581858407079646, + "grad_norm": 0.11279296875, + "learning_rate": 8.545353982300886e-07, + "loss": 0.7599, + "step": 3307 + }, + { + "epoch": 3.65929203539823, + "grad_norm": 0.10546875, + "learning_rate": 8.517699115044248e-07, + "loss": 0.7526, + "step": 3308 + }, + { + "epoch": 3.6603982300884956, + "grad_norm": 0.11279296875, + "learning_rate": 8.490044247787611e-07, + "loss": 0.8062, + "step": 3309 + }, + { + "epoch": 3.661504424778761, + "grad_norm": 0.1064453125, + "learning_rate": 8.462389380530975e-07, + "loss": 0.7701, + "step": 3310 + }, + { + "epoch": 3.6626106194690267, + "grad_norm": 0.126953125, + "learning_rate": 8.434734513274337e-07, + "loss": 0.8195, + "step": 3311 + }, + { + "epoch": 3.663716814159292, + "grad_norm": 0.10888671875, + "learning_rate": 8.4070796460177e-07, + "loss": 0.7711, + "step": 3312 + }, + { + "epoch": 3.6648230088495577, + "grad_norm": 0.1142578125, + "learning_rate": 8.379424778761063e-07, + "loss": 0.7768, + "step": 3313 + }, + { + "epoch": 3.665929203539823, + "grad_norm": 0.1357421875, + "learning_rate": 8.351769911504426e-07, + "loss": 0.7912, + "step": 3314 + }, + { + "epoch": 3.6670353982300883, + "grad_norm": 0.11669921875, + "learning_rate": 8.324115044247788e-07, + "loss": 0.7458, + "step": 3315 + }, + { + "epoch": 3.668141592920354, + "grad_norm": 0.1015625, + "learning_rate": 8.296460176991152e-07, + "loss": 0.7717, + "step": 3316 + }, + { + "epoch": 3.6692477876106193, + "grad_norm": 0.109375, + "learning_rate": 8.268805309734513e-07, + "loss": 0.7777, + "step": 3317 + }, + { + "epoch": 3.670353982300885, + "grad_norm": 0.11181640625, + "learning_rate": 8.241150442477878e-07, + "loss": 0.7946, + "step": 3318 + }, + { + "epoch": 3.6714601769911503, + "grad_norm": 0.1083984375, + "learning_rate": 8.213495575221239e-07, + "loss": 0.7773, + "step": 3319 + }, + { + "epoch": 3.672566371681416, + "grad_norm": 0.111328125, + "learning_rate": 8.185840707964602e-07, + "loss": 0.801, + "step": 3320 + }, + { + "epoch": 3.6736725663716814, + "grad_norm": 0.13671875, + "learning_rate": 8.158185840707965e-07, + "loss": 0.7738, + "step": 3321 + }, + { + "epoch": 3.6747787610619467, + "grad_norm": 0.10791015625, + "learning_rate": 8.130530973451328e-07, + "loss": 0.7937, + "step": 3322 + }, + { + "epoch": 3.6758849557522124, + "grad_norm": 0.134765625, + "learning_rate": 8.10287610619469e-07, + "loss": 0.7997, + "step": 3323 + }, + { + "epoch": 3.676991150442478, + "grad_norm": 0.12158203125, + "learning_rate": 8.075221238938054e-07, + "loss": 0.7684, + "step": 3324 + }, + { + "epoch": 3.6780973451327434, + "grad_norm": 0.11083984375, + "learning_rate": 8.047566371681416e-07, + "loss": 0.7853, + "step": 3325 + }, + { + "epoch": 3.6792035398230087, + "grad_norm": 0.1318359375, + "learning_rate": 8.019911504424779e-07, + "loss": 0.8065, + "step": 3326 + }, + { + "epoch": 3.6803097345132745, + "grad_norm": 0.1142578125, + "learning_rate": 7.992256637168143e-07, + "loss": 0.7538, + "step": 3327 + }, + { + "epoch": 3.6814159292035398, + "grad_norm": 0.1044921875, + "learning_rate": 7.964601769911505e-07, + "loss": 0.7674, + "step": 3328 + }, + { + "epoch": 3.682522123893805, + "grad_norm": 0.10888671875, + "learning_rate": 7.936946902654868e-07, + "loss": 0.7552, + "step": 3329 + }, + { + "epoch": 3.683628318584071, + "grad_norm": 0.10888671875, + "learning_rate": 7.909292035398231e-07, + "loss": 0.7853, + "step": 3330 + }, + { + "epoch": 3.6847345132743365, + "grad_norm": 0.1240234375, + "learning_rate": 7.881637168141594e-07, + "loss": 0.7873, + "step": 3331 + }, + { + "epoch": 3.685840707964602, + "grad_norm": 0.10205078125, + "learning_rate": 7.853982300884956e-07, + "loss": 0.7665, + "step": 3332 + }, + { + "epoch": 3.686946902654867, + "grad_norm": 0.11767578125, + "learning_rate": 7.82632743362832e-07, + "loss": 0.7901, + "step": 3333 + }, + { + "epoch": 3.688053097345133, + "grad_norm": 0.1083984375, + "learning_rate": 7.798672566371682e-07, + "loss": 0.7743, + "step": 3334 + }, + { + "epoch": 3.689159292035398, + "grad_norm": 0.10498046875, + "learning_rate": 7.771017699115045e-07, + "loss": 0.7681, + "step": 3335 + }, + { + "epoch": 3.6902654867256635, + "grad_norm": 0.1064453125, + "learning_rate": 7.743362831858408e-07, + "loss": 0.7793, + "step": 3336 + }, + { + "epoch": 3.691371681415929, + "grad_norm": 0.1083984375, + "learning_rate": 7.715707964601771e-07, + "loss": 0.7195, + "step": 3337 + }, + { + "epoch": 3.692477876106195, + "grad_norm": 0.10498046875, + "learning_rate": 7.688053097345132e-07, + "loss": 0.7547, + "step": 3338 + }, + { + "epoch": 3.6935840707964602, + "grad_norm": 0.10205078125, + "learning_rate": 7.660398230088497e-07, + "loss": 0.7535, + "step": 3339 + }, + { + "epoch": 3.6946902654867255, + "grad_norm": 0.12255859375, + "learning_rate": 7.632743362831858e-07, + "loss": 0.7995, + "step": 3340 + }, + { + "epoch": 3.6957964601769913, + "grad_norm": 0.10498046875, + "learning_rate": 7.605088495575221e-07, + "loss": 0.7566, + "step": 3341 + }, + { + "epoch": 3.6969026548672566, + "grad_norm": 0.11962890625, + "learning_rate": 7.577433628318585e-07, + "loss": 0.7889, + "step": 3342 + }, + { + "epoch": 3.698008849557522, + "grad_norm": 0.10693359375, + "learning_rate": 7.549778761061947e-07, + "loss": 0.7561, + "step": 3343 + }, + { + "epoch": 3.6991150442477876, + "grad_norm": 0.10400390625, + "learning_rate": 7.522123893805311e-07, + "loss": 0.7623, + "step": 3344 + }, + { + "epoch": 3.7002212389380533, + "grad_norm": 0.1181640625, + "learning_rate": 7.494469026548673e-07, + "loss": 0.7952, + "step": 3345 + }, + { + "epoch": 3.7013274336283186, + "grad_norm": 0.1044921875, + "learning_rate": 7.466814159292036e-07, + "loss": 0.7769, + "step": 3346 + }, + { + "epoch": 3.702433628318584, + "grad_norm": 0.115234375, + "learning_rate": 7.439159292035399e-07, + "loss": 0.7138, + "step": 3347 + }, + { + "epoch": 3.7035398230088497, + "grad_norm": 0.12158203125, + "learning_rate": 7.411504424778762e-07, + "loss": 0.8225, + "step": 3348 + }, + { + "epoch": 3.704646017699115, + "grad_norm": 0.11474609375, + "learning_rate": 7.383849557522124e-07, + "loss": 0.775, + "step": 3349 + }, + { + "epoch": 3.7057522123893807, + "grad_norm": 0.1025390625, + "learning_rate": 7.356194690265488e-07, + "loss": 0.7498, + "step": 3350 + }, + { + "epoch": 3.706858407079646, + "grad_norm": 0.1064453125, + "learning_rate": 7.32853982300885e-07, + "loss": 0.7434, + "step": 3351 + }, + { + "epoch": 3.7079646017699117, + "grad_norm": 0.1123046875, + "learning_rate": 7.300884955752213e-07, + "loss": 0.7705, + "step": 3352 + }, + { + "epoch": 3.709070796460177, + "grad_norm": 0.1376953125, + "learning_rate": 7.273230088495576e-07, + "loss": 0.8518, + "step": 3353 + }, + { + "epoch": 3.7101769911504423, + "grad_norm": 0.1064453125, + "learning_rate": 7.245575221238939e-07, + "loss": 0.7707, + "step": 3354 + }, + { + "epoch": 3.711283185840708, + "grad_norm": 0.1064453125, + "learning_rate": 7.217920353982301e-07, + "loss": 0.7706, + "step": 3355 + }, + { + "epoch": 3.7123893805309733, + "grad_norm": 0.1005859375, + "learning_rate": 7.190265486725665e-07, + "loss": 0.7248, + "step": 3356 + }, + { + "epoch": 3.713495575221239, + "grad_norm": 0.1220703125, + "learning_rate": 7.162610619469026e-07, + "loss": 0.8013, + "step": 3357 + }, + { + "epoch": 3.7146017699115044, + "grad_norm": 0.09912109375, + "learning_rate": 7.13495575221239e-07, + "loss": 0.7465, + "step": 3358 + }, + { + "epoch": 3.71570796460177, + "grad_norm": 0.1201171875, + "learning_rate": 7.107300884955754e-07, + "loss": 0.7879, + "step": 3359 + }, + { + "epoch": 3.7168141592920354, + "grad_norm": 0.158203125, + "learning_rate": 7.079646017699115e-07, + "loss": 0.7226, + "step": 3360 + }, + { + "epoch": 3.7179203539823007, + "grad_norm": 0.1015625, + "learning_rate": 7.05199115044248e-07, + "loss": 0.7876, + "step": 3361 + }, + { + "epoch": 3.7190265486725664, + "grad_norm": 0.1181640625, + "learning_rate": 7.024336283185841e-07, + "loss": 0.7764, + "step": 3362 + }, + { + "epoch": 3.7201327433628317, + "grad_norm": 0.10791015625, + "learning_rate": 6.996681415929204e-07, + "loss": 0.7867, + "step": 3363 + }, + { + "epoch": 3.7212389380530975, + "grad_norm": 0.11328125, + "learning_rate": 6.969026548672566e-07, + "loss": 0.7741, + "step": 3364 + }, + { + "epoch": 3.7223451327433628, + "grad_norm": 0.1015625, + "learning_rate": 6.94137168141593e-07, + "loss": 0.7408, + "step": 3365 + }, + { + "epoch": 3.7234513274336285, + "grad_norm": 0.10498046875, + "learning_rate": 6.913716814159292e-07, + "loss": 0.7406, + "step": 3366 + }, + { + "epoch": 3.724557522123894, + "grad_norm": 0.11572265625, + "learning_rate": 6.886061946902656e-07, + "loss": 0.7508, + "step": 3367 + }, + { + "epoch": 3.725663716814159, + "grad_norm": 0.11279296875, + "learning_rate": 6.858407079646018e-07, + "loss": 0.7754, + "step": 3368 + }, + { + "epoch": 3.726769911504425, + "grad_norm": 0.1376953125, + "learning_rate": 6.830752212389381e-07, + "loss": 0.7929, + "step": 3369 + }, + { + "epoch": 3.72787610619469, + "grad_norm": 0.10009765625, + "learning_rate": 6.803097345132744e-07, + "loss": 0.7341, + "step": 3370 + }, + { + "epoch": 3.728982300884956, + "grad_norm": 0.1142578125, + "learning_rate": 6.775442477876107e-07, + "loss": 0.7528, + "step": 3371 + }, + { + "epoch": 3.730088495575221, + "grad_norm": 0.1259765625, + "learning_rate": 6.747787610619469e-07, + "loss": 0.8679, + "step": 3372 + }, + { + "epoch": 3.731194690265487, + "grad_norm": 0.1005859375, + "learning_rate": 6.720132743362833e-07, + "loss": 0.7369, + "step": 3373 + }, + { + "epoch": 3.732300884955752, + "grad_norm": 0.1083984375, + "learning_rate": 6.692477876106196e-07, + "loss": 0.7855, + "step": 3374 + }, + { + "epoch": 3.7334070796460175, + "grad_norm": 0.11669921875, + "learning_rate": 6.664823008849558e-07, + "loss": 0.7358, + "step": 3375 + }, + { + "epoch": 3.734513274336283, + "grad_norm": 0.10498046875, + "learning_rate": 6.637168141592922e-07, + "loss": 0.7425, + "step": 3376 + }, + { + "epoch": 3.7356194690265485, + "grad_norm": 0.11572265625, + "learning_rate": 6.609513274336284e-07, + "loss": 0.7889, + "step": 3377 + }, + { + "epoch": 3.7367256637168142, + "grad_norm": 0.1533203125, + "learning_rate": 6.581858407079647e-07, + "loss": 0.8149, + "step": 3378 + }, + { + "epoch": 3.7378318584070795, + "grad_norm": 0.11962890625, + "learning_rate": 6.554203539823009e-07, + "loss": 0.8119, + "step": 3379 + }, + { + "epoch": 3.7389380530973453, + "grad_norm": 0.1201171875, + "learning_rate": 6.526548672566373e-07, + "loss": 0.7998, + "step": 3380 + }, + { + "epoch": 3.7400442477876106, + "grad_norm": 0.111328125, + "learning_rate": 6.498893805309734e-07, + "loss": 0.7325, + "step": 3381 + }, + { + "epoch": 3.741150442477876, + "grad_norm": 0.10888671875, + "learning_rate": 6.471238938053098e-07, + "loss": 0.7372, + "step": 3382 + }, + { + "epoch": 3.7422566371681416, + "grad_norm": 0.1865234375, + "learning_rate": 6.44358407079646e-07, + "loss": 0.7462, + "step": 3383 + }, + { + "epoch": 3.7433628318584073, + "grad_norm": 0.10302734375, + "learning_rate": 6.415929203539823e-07, + "loss": 0.7652, + "step": 3384 + }, + { + "epoch": 3.7444690265486726, + "grad_norm": 0.10888671875, + "learning_rate": 6.388274336283186e-07, + "loss": 0.7462, + "step": 3385 + }, + { + "epoch": 3.745575221238938, + "grad_norm": 0.1142578125, + "learning_rate": 6.360619469026549e-07, + "loss": 0.7574, + "step": 3386 + }, + { + "epoch": 3.7466814159292037, + "grad_norm": 0.1181640625, + "learning_rate": 6.332964601769911e-07, + "loss": 0.8215, + "step": 3387 + }, + { + "epoch": 3.747787610619469, + "grad_norm": 0.1123046875, + "learning_rate": 6.305309734513275e-07, + "loss": 0.7772, + "step": 3388 + }, + { + "epoch": 3.7488938053097343, + "grad_norm": 0.11279296875, + "learning_rate": 6.277654867256637e-07, + "loss": 0.7491, + "step": 3389 + }, + { + "epoch": 3.75, + "grad_norm": 0.109375, + "learning_rate": 6.25e-07, + "loss": 0.7195, + "step": 3390 + }, + { + "epoch": 3.7511061946902657, + "grad_norm": 0.1083984375, + "learning_rate": 6.222345132743363e-07, + "loss": 0.7868, + "step": 3391 + }, + { + "epoch": 3.752212389380531, + "grad_norm": 0.107421875, + "learning_rate": 6.194690265486726e-07, + "loss": 0.767, + "step": 3392 + }, + { + "epoch": 3.7533185840707963, + "grad_norm": 0.11767578125, + "learning_rate": 6.167035398230089e-07, + "loss": 0.7578, + "step": 3393 + }, + { + "epoch": 3.754424778761062, + "grad_norm": 0.10693359375, + "learning_rate": 6.139380530973452e-07, + "loss": 0.7519, + "step": 3394 + }, + { + "epoch": 3.7555309734513274, + "grad_norm": 0.10498046875, + "learning_rate": 6.111725663716814e-07, + "loss": 0.786, + "step": 3395 + }, + { + "epoch": 3.7566371681415927, + "grad_norm": 0.1005859375, + "learning_rate": 6.084070796460178e-07, + "loss": 0.7297, + "step": 3396 + }, + { + "epoch": 3.7577433628318584, + "grad_norm": 0.11376953125, + "learning_rate": 6.05641592920354e-07, + "loss": 0.7849, + "step": 3397 + }, + { + "epoch": 3.758849557522124, + "grad_norm": 0.109375, + "learning_rate": 6.028761061946903e-07, + "loss": 0.793, + "step": 3398 + }, + { + "epoch": 3.7599557522123894, + "grad_norm": 0.10595703125, + "learning_rate": 6.001106194690267e-07, + "loss": 0.7566, + "step": 3399 + }, + { + "epoch": 3.7610619469026547, + "grad_norm": 0.12060546875, + "learning_rate": 5.973451327433629e-07, + "loss": 0.7404, + "step": 3400 + }, + { + "epoch": 3.7621681415929205, + "grad_norm": 0.123046875, + "learning_rate": 5.945796460176992e-07, + "loss": 0.7673, + "step": 3401 + }, + { + "epoch": 3.7632743362831858, + "grad_norm": 0.11474609375, + "learning_rate": 5.918141592920355e-07, + "loss": 0.7683, + "step": 3402 + }, + { + "epoch": 3.7643805309734515, + "grad_norm": 0.10986328125, + "learning_rate": 5.890486725663717e-07, + "loss": 0.7572, + "step": 3403 + }, + { + "epoch": 3.765486725663717, + "grad_norm": 0.1142578125, + "learning_rate": 5.86283185840708e-07, + "loss": 0.7719, + "step": 3404 + }, + { + "epoch": 3.7665929203539825, + "grad_norm": 0.11669921875, + "learning_rate": 5.835176991150443e-07, + "loss": 0.7635, + "step": 3405 + }, + { + "epoch": 3.767699115044248, + "grad_norm": 0.1220703125, + "learning_rate": 5.807522123893806e-07, + "loss": 0.8151, + "step": 3406 + }, + { + "epoch": 3.768805309734513, + "grad_norm": 0.10595703125, + "learning_rate": 5.779867256637168e-07, + "loss": 0.7845, + "step": 3407 + }, + { + "epoch": 3.769911504424779, + "grad_norm": 0.12353515625, + "learning_rate": 5.752212389380532e-07, + "loss": 0.7951, + "step": 3408 + }, + { + "epoch": 3.771017699115044, + "grad_norm": 0.10791015625, + "learning_rate": 5.724557522123894e-07, + "loss": 0.7838, + "step": 3409 + }, + { + "epoch": 3.77212389380531, + "grad_norm": 0.10888671875, + "learning_rate": 5.696902654867256e-07, + "loss": 0.6889, + "step": 3410 + }, + { + "epoch": 3.773230088495575, + "grad_norm": 0.111328125, + "learning_rate": 5.66924778761062e-07, + "loss": 0.7545, + "step": 3411 + }, + { + "epoch": 3.774336283185841, + "grad_norm": 0.1591796875, + "learning_rate": 5.641592920353982e-07, + "loss": 0.8127, + "step": 3412 + }, + { + "epoch": 3.775442477876106, + "grad_norm": 0.11767578125, + "learning_rate": 5.613938053097345e-07, + "loss": 0.7462, + "step": 3413 + }, + { + "epoch": 3.7765486725663715, + "grad_norm": 0.1025390625, + "learning_rate": 5.586283185840708e-07, + "loss": 0.7565, + "step": 3414 + }, + { + "epoch": 3.7776548672566372, + "grad_norm": 0.11669921875, + "learning_rate": 5.558628318584071e-07, + "loss": 0.7894, + "step": 3415 + }, + { + "epoch": 3.7787610619469025, + "grad_norm": 0.099609375, + "learning_rate": 5.530973451327435e-07, + "loss": 0.7328, + "step": 3416 + }, + { + "epoch": 3.7798672566371683, + "grad_norm": 0.107421875, + "learning_rate": 5.503318584070797e-07, + "loss": 0.7369, + "step": 3417 + }, + { + "epoch": 3.7809734513274336, + "grad_norm": 0.1103515625, + "learning_rate": 5.47566371681416e-07, + "loss": 0.7763, + "step": 3418 + }, + { + "epoch": 3.7820796460176993, + "grad_norm": 0.10205078125, + "learning_rate": 5.448008849557523e-07, + "loss": 0.7564, + "step": 3419 + }, + { + "epoch": 3.7831858407079646, + "grad_norm": 0.11279296875, + "learning_rate": 5.420353982300886e-07, + "loss": 0.8017, + "step": 3420 + }, + { + "epoch": 3.78429203539823, + "grad_norm": 0.123046875, + "learning_rate": 5.392699115044248e-07, + "loss": 0.7912, + "step": 3421 + }, + { + "epoch": 3.7853982300884956, + "grad_norm": 0.09912109375, + "learning_rate": 5.365044247787611e-07, + "loss": 0.7862, + "step": 3422 + }, + { + "epoch": 3.786504424778761, + "grad_norm": 0.1240234375, + "learning_rate": 5.337389380530974e-07, + "loss": 0.7979, + "step": 3423 + }, + { + "epoch": 3.7876106194690267, + "grad_norm": 0.12158203125, + "learning_rate": 5.309734513274336e-07, + "loss": 0.7697, + "step": 3424 + }, + { + "epoch": 3.788716814159292, + "grad_norm": 0.10546875, + "learning_rate": 5.2820796460177e-07, + "loss": 0.724, + "step": 3425 + }, + { + "epoch": 3.7898230088495577, + "grad_norm": 0.10546875, + "learning_rate": 5.254424778761062e-07, + "loss": 0.763, + "step": 3426 + }, + { + "epoch": 3.790929203539823, + "grad_norm": 0.1201171875, + "learning_rate": 5.226769911504425e-07, + "loss": 0.8013, + "step": 3427 + }, + { + "epoch": 3.7920353982300883, + "grad_norm": 0.1328125, + "learning_rate": 5.199115044247788e-07, + "loss": 0.8185, + "step": 3428 + }, + { + "epoch": 3.793141592920354, + "grad_norm": 0.10546875, + "learning_rate": 5.171460176991151e-07, + "loss": 0.7786, + "step": 3429 + }, + { + "epoch": 3.7942477876106193, + "grad_norm": 0.130859375, + "learning_rate": 5.143805309734513e-07, + "loss": 0.8212, + "step": 3430 + }, + { + "epoch": 3.795353982300885, + "grad_norm": 0.1162109375, + "learning_rate": 5.116150442477877e-07, + "loss": 0.7802, + "step": 3431 + }, + { + "epoch": 3.7964601769911503, + "grad_norm": 0.10791015625, + "learning_rate": 5.08849557522124e-07, + "loss": 0.7415, + "step": 3432 + }, + { + "epoch": 3.797566371681416, + "grad_norm": 0.10791015625, + "learning_rate": 5.060840707964602e-07, + "loss": 0.7847, + "step": 3433 + }, + { + "epoch": 3.7986725663716814, + "grad_norm": 0.1357421875, + "learning_rate": 5.033185840707965e-07, + "loss": 0.8451, + "step": 3434 + }, + { + "epoch": 3.7997787610619467, + "grad_norm": 0.10302734375, + "learning_rate": 5.005530973451328e-07, + "loss": 0.7168, + "step": 3435 + }, + { + "epoch": 3.8008849557522124, + "grad_norm": 0.1240234375, + "learning_rate": 4.97787610619469e-07, + "loss": 0.7505, + "step": 3436 + }, + { + "epoch": 3.801991150442478, + "grad_norm": 0.1044921875, + "learning_rate": 4.950221238938054e-07, + "loss": 0.7266, + "step": 3437 + }, + { + "epoch": 3.8030973451327434, + "grad_norm": 0.10986328125, + "learning_rate": 4.922566371681416e-07, + "loss": 0.7283, + "step": 3438 + }, + { + "epoch": 3.8042035398230087, + "grad_norm": 0.1162109375, + "learning_rate": 4.89491150442478e-07, + "loss": 0.7988, + "step": 3439 + }, + { + "epoch": 3.8053097345132745, + "grad_norm": 0.11328125, + "learning_rate": 4.867256637168142e-07, + "loss": 0.7461, + "step": 3440 + }, + { + "epoch": 3.8064159292035398, + "grad_norm": 0.109375, + "learning_rate": 4.839601769911504e-07, + "loss": 0.7763, + "step": 3441 + }, + { + "epoch": 3.807522123893805, + "grad_norm": 0.10400390625, + "learning_rate": 4.811946902654868e-07, + "loss": 0.7649, + "step": 3442 + }, + { + "epoch": 3.808628318584071, + "grad_norm": 0.11474609375, + "learning_rate": 4.78429203539823e-07, + "loss": 0.7712, + "step": 3443 + }, + { + "epoch": 3.8097345132743365, + "grad_norm": 0.11328125, + "learning_rate": 4.756637168141593e-07, + "loss": 0.7553, + "step": 3444 + }, + { + "epoch": 3.810840707964602, + "grad_norm": 0.11181640625, + "learning_rate": 4.728982300884956e-07, + "loss": 0.7509, + "step": 3445 + }, + { + "epoch": 3.811946902654867, + "grad_norm": 0.111328125, + "learning_rate": 4.7013274336283184e-07, + "loss": 0.7929, + "step": 3446 + }, + { + "epoch": 3.813053097345133, + "grad_norm": 0.1064453125, + "learning_rate": 4.6736725663716823e-07, + "loss": 0.7374, + "step": 3447 + }, + { + "epoch": 3.814159292035398, + "grad_norm": 0.11767578125, + "learning_rate": 4.6460176991150447e-07, + "loss": 0.7663, + "step": 3448 + }, + { + "epoch": 3.8152654867256635, + "grad_norm": 0.11376953125, + "learning_rate": 4.6183628318584075e-07, + "loss": 0.7745, + "step": 3449 + }, + { + "epoch": 3.816371681415929, + "grad_norm": 0.10693359375, + "learning_rate": 4.5907079646017704e-07, + "loss": 0.7571, + "step": 3450 + }, + { + "epoch": 3.817477876106195, + "grad_norm": 0.1357421875, + "learning_rate": 4.563053097345133e-07, + "loss": 0.8059, + "step": 3451 + }, + { + "epoch": 3.8185840707964602, + "grad_norm": 0.1083984375, + "learning_rate": 4.535398230088496e-07, + "loss": 0.785, + "step": 3452 + }, + { + "epoch": 3.8196902654867255, + "grad_norm": 0.1142578125, + "learning_rate": 4.507743362831859e-07, + "loss": 0.8131, + "step": 3453 + }, + { + "epoch": 3.8207964601769913, + "grad_norm": 0.1044921875, + "learning_rate": 4.480088495575222e-07, + "loss": 0.8001, + "step": 3454 + }, + { + "epoch": 3.8219026548672566, + "grad_norm": 0.1064453125, + "learning_rate": 4.4524336283185846e-07, + "loss": 0.7726, + "step": 3455 + }, + { + "epoch": 3.823008849557522, + "grad_norm": 0.107421875, + "learning_rate": 4.4247787610619474e-07, + "loss": 0.7942, + "step": 3456 + }, + { + "epoch": 3.8241150442477876, + "grad_norm": 0.10986328125, + "learning_rate": 4.39712389380531e-07, + "loss": 0.7572, + "step": 3457 + }, + { + "epoch": 3.8252212389380533, + "grad_norm": 0.1279296875, + "learning_rate": 4.3694690265486726e-07, + "loss": 0.8153, + "step": 3458 + }, + { + "epoch": 3.8263274336283186, + "grad_norm": 0.10888671875, + "learning_rate": 4.3418141592920355e-07, + "loss": 0.7322, + "step": 3459 + }, + { + "epoch": 3.827433628318584, + "grad_norm": 0.1337890625, + "learning_rate": 4.3141592920353983e-07, + "loss": 0.7983, + "step": 3460 + }, + { + "epoch": 3.8285398230088497, + "grad_norm": 0.12890625, + "learning_rate": 4.286504424778761e-07, + "loss": 0.7811, + "step": 3461 + }, + { + "epoch": 3.829646017699115, + "grad_norm": 0.12109375, + "learning_rate": 4.258849557522124e-07, + "loss": 0.8147, + "step": 3462 + }, + { + "epoch": 3.8307522123893807, + "grad_norm": 0.115234375, + "learning_rate": 4.2311946902654874e-07, + "loss": 0.8276, + "step": 3463 + }, + { + "epoch": 3.831858407079646, + "grad_norm": 0.111328125, + "learning_rate": 4.20353982300885e-07, + "loss": 0.7804, + "step": 3464 + }, + { + "epoch": 3.8329646017699117, + "grad_norm": 0.12353515625, + "learning_rate": 4.175884955752213e-07, + "loss": 0.8356, + "step": 3465 + }, + { + "epoch": 3.834070796460177, + "grad_norm": 0.10107421875, + "learning_rate": 4.148230088495576e-07, + "loss": 0.7462, + "step": 3466 + }, + { + "epoch": 3.8351769911504423, + "grad_norm": 0.1328125, + "learning_rate": 4.120575221238939e-07, + "loss": 0.8482, + "step": 3467 + }, + { + "epoch": 3.836283185840708, + "grad_norm": 0.1416015625, + "learning_rate": 4.092920353982301e-07, + "loss": 0.7886, + "step": 3468 + }, + { + "epoch": 3.8373893805309733, + "grad_norm": 0.11083984375, + "learning_rate": 4.065265486725664e-07, + "loss": 0.7656, + "step": 3469 + }, + { + "epoch": 3.838495575221239, + "grad_norm": 0.0986328125, + "learning_rate": 4.037610619469027e-07, + "loss": 0.7435, + "step": 3470 + }, + { + "epoch": 3.8396017699115044, + "grad_norm": 0.1123046875, + "learning_rate": 4.0099557522123896e-07, + "loss": 0.7621, + "step": 3471 + }, + { + "epoch": 3.84070796460177, + "grad_norm": 0.11328125, + "learning_rate": 3.9823008849557525e-07, + "loss": 0.7819, + "step": 3472 + }, + { + "epoch": 3.8418141592920354, + "grad_norm": 0.10693359375, + "learning_rate": 3.9546460176991153e-07, + "loss": 0.7828, + "step": 3473 + }, + { + "epoch": 3.8429203539823007, + "grad_norm": 0.10986328125, + "learning_rate": 3.926991150442478e-07, + "loss": 0.7652, + "step": 3474 + }, + { + "epoch": 3.8440265486725664, + "grad_norm": 0.11865234375, + "learning_rate": 3.899336283185841e-07, + "loss": 0.7957, + "step": 3475 + }, + { + "epoch": 3.8451327433628317, + "grad_norm": 0.1044921875, + "learning_rate": 3.871681415929204e-07, + "loss": 0.7634, + "step": 3476 + }, + { + "epoch": 3.8462389380530975, + "grad_norm": 0.1025390625, + "learning_rate": 3.844026548672566e-07, + "loss": 0.7718, + "step": 3477 + }, + { + "epoch": 3.8473451327433628, + "grad_norm": 0.11669921875, + "learning_rate": 3.816371681415929e-07, + "loss": 0.7728, + "step": 3478 + }, + { + "epoch": 3.8484513274336285, + "grad_norm": 0.1630859375, + "learning_rate": 3.7887168141592924e-07, + "loss": 0.7569, + "step": 3479 + }, + { + "epoch": 3.849557522123894, + "grad_norm": 0.1103515625, + "learning_rate": 3.7610619469026553e-07, + "loss": 0.7745, + "step": 3480 + }, + { + "epoch": 3.850663716814159, + "grad_norm": 0.1611328125, + "learning_rate": 3.733407079646018e-07, + "loss": 0.8341, + "step": 3481 + }, + { + "epoch": 3.851769911504425, + "grad_norm": 0.109375, + "learning_rate": 3.705752212389381e-07, + "loss": 0.7309, + "step": 3482 + }, + { + "epoch": 3.85287610619469, + "grad_norm": 0.11279296875, + "learning_rate": 3.678097345132744e-07, + "loss": 0.7498, + "step": 3483 + }, + { + "epoch": 3.853982300884956, + "grad_norm": 0.10400390625, + "learning_rate": 3.6504424778761067e-07, + "loss": 0.7414, + "step": 3484 + }, + { + "epoch": 3.855088495575221, + "grad_norm": 0.125, + "learning_rate": 3.6227876106194695e-07, + "loss": 0.7439, + "step": 3485 + }, + { + "epoch": 3.856194690265487, + "grad_norm": 0.12060546875, + "learning_rate": 3.5951327433628324e-07, + "loss": 0.8091, + "step": 3486 + }, + { + "epoch": 3.857300884955752, + "grad_norm": 0.11572265625, + "learning_rate": 3.567477876106195e-07, + "loss": 0.7728, + "step": 3487 + }, + { + "epoch": 3.8584070796460175, + "grad_norm": 0.103515625, + "learning_rate": 3.5398230088495575e-07, + "loss": 0.7647, + "step": 3488 + }, + { + "epoch": 3.859513274336283, + "grad_norm": 0.119140625, + "learning_rate": 3.5121681415929204e-07, + "loss": 0.7726, + "step": 3489 + }, + { + "epoch": 3.8606194690265485, + "grad_norm": 0.11572265625, + "learning_rate": 3.484513274336283e-07, + "loss": 0.8064, + "step": 3490 + }, + { + "epoch": 3.8617256637168142, + "grad_norm": 0.10986328125, + "learning_rate": 3.456858407079646e-07, + "loss": 0.7224, + "step": 3491 + }, + { + "epoch": 3.8628318584070795, + "grad_norm": 0.10107421875, + "learning_rate": 3.429203539823009e-07, + "loss": 0.7417, + "step": 3492 + }, + { + "epoch": 3.8639380530973453, + "grad_norm": 0.11181640625, + "learning_rate": 3.401548672566372e-07, + "loss": 0.7696, + "step": 3493 + }, + { + "epoch": 3.8650442477876106, + "grad_norm": 0.1162109375, + "learning_rate": 3.3738938053097346e-07, + "loss": 0.7677, + "step": 3494 + }, + { + "epoch": 3.866150442477876, + "grad_norm": 0.1103515625, + "learning_rate": 3.346238938053098e-07, + "loss": 0.7532, + "step": 3495 + }, + { + "epoch": 3.8672566371681416, + "grad_norm": 0.12255859375, + "learning_rate": 3.318584070796461e-07, + "loss": 0.7974, + "step": 3496 + }, + { + "epoch": 3.8683628318584073, + "grad_norm": 0.10595703125, + "learning_rate": 3.2909292035398237e-07, + "loss": 0.7544, + "step": 3497 + }, + { + "epoch": 3.8694690265486726, + "grad_norm": 0.10546875, + "learning_rate": 3.2632743362831865e-07, + "loss": 0.753, + "step": 3498 + }, + { + "epoch": 3.870575221238938, + "grad_norm": 0.1015625, + "learning_rate": 3.235619469026549e-07, + "loss": 0.7775, + "step": 3499 + }, + { + "epoch": 3.8716814159292037, + "grad_norm": 0.11279296875, + "learning_rate": 3.2079646017699117e-07, + "loss": 0.7652, + "step": 3500 + }, + { + "epoch": 3.872787610619469, + "grad_norm": 0.1435546875, + "learning_rate": 3.1803097345132746e-07, + "loss": 0.8548, + "step": 3501 + }, + { + "epoch": 3.8738938053097343, + "grad_norm": 0.10791015625, + "learning_rate": 3.1526548672566374e-07, + "loss": 0.777, + "step": 3502 + }, + { + "epoch": 3.875, + "grad_norm": 0.1318359375, + "learning_rate": 3.125e-07, + "loss": 0.8376, + "step": 3503 + }, + { + "epoch": 3.8761061946902657, + "grad_norm": 0.11083984375, + "learning_rate": 3.097345132743363e-07, + "loss": 0.7782, + "step": 3504 + }, + { + "epoch": 3.877212389380531, + "grad_norm": 0.10791015625, + "learning_rate": 3.069690265486726e-07, + "loss": 0.7666, + "step": 3505 + }, + { + "epoch": 3.8783185840707963, + "grad_norm": 0.1171875, + "learning_rate": 3.042035398230089e-07, + "loss": 0.7697, + "step": 3506 + }, + { + "epoch": 3.879424778761062, + "grad_norm": 0.134765625, + "learning_rate": 3.0143805309734516e-07, + "loss": 0.8063, + "step": 3507 + }, + { + "epoch": 3.8805309734513274, + "grad_norm": 0.16015625, + "learning_rate": 2.9867256637168145e-07, + "loss": 0.7782, + "step": 3508 + }, + { + "epoch": 3.8816371681415927, + "grad_norm": 0.12255859375, + "learning_rate": 2.9590707964601773e-07, + "loss": 0.775, + "step": 3509 + }, + { + "epoch": 3.8827433628318584, + "grad_norm": 0.111328125, + "learning_rate": 2.93141592920354e-07, + "loss": 0.7697, + "step": 3510 + }, + { + "epoch": 3.883849557522124, + "grad_norm": 0.10791015625, + "learning_rate": 2.903761061946903e-07, + "loss": 0.7791, + "step": 3511 + }, + { + "epoch": 3.8849557522123894, + "grad_norm": 0.10546875, + "learning_rate": 2.876106194690266e-07, + "loss": 0.7329, + "step": 3512 + }, + { + "epoch": 3.8860619469026547, + "grad_norm": 0.1044921875, + "learning_rate": 2.848451327433628e-07, + "loss": 0.7207, + "step": 3513 + }, + { + "epoch": 3.8871681415929205, + "grad_norm": 0.11474609375, + "learning_rate": 2.820796460176991e-07, + "loss": 0.7888, + "step": 3514 + }, + { + "epoch": 3.8882743362831858, + "grad_norm": 0.10986328125, + "learning_rate": 2.793141592920354e-07, + "loss": 0.7652, + "step": 3515 + }, + { + "epoch": 3.8893805309734515, + "grad_norm": 0.11083984375, + "learning_rate": 2.7654867256637173e-07, + "loss": 0.7872, + "step": 3516 + }, + { + "epoch": 3.890486725663717, + "grad_norm": 0.11474609375, + "learning_rate": 2.73783185840708e-07, + "loss": 0.8109, + "step": 3517 + }, + { + "epoch": 3.8915929203539825, + "grad_norm": 0.1298828125, + "learning_rate": 2.710176991150443e-07, + "loss": 0.8481, + "step": 3518 + }, + { + "epoch": 3.892699115044248, + "grad_norm": 0.11474609375, + "learning_rate": 2.6825221238938053e-07, + "loss": 0.7889, + "step": 3519 + }, + { + "epoch": 3.893805309734513, + "grad_norm": 0.109375, + "learning_rate": 2.654867256637168e-07, + "loss": 0.7418, + "step": 3520 + }, + { + "epoch": 3.894911504424779, + "grad_norm": 0.107421875, + "learning_rate": 2.627212389380531e-07, + "loss": 0.7506, + "step": 3521 + }, + { + "epoch": 3.896017699115044, + "grad_norm": 0.1083984375, + "learning_rate": 2.599557522123894e-07, + "loss": 0.78, + "step": 3522 + }, + { + "epoch": 3.89712389380531, + "grad_norm": 0.11376953125, + "learning_rate": 2.5719026548672567e-07, + "loss": 0.7726, + "step": 3523 + }, + { + "epoch": 3.898230088495575, + "grad_norm": 0.111328125, + "learning_rate": 2.54424778761062e-07, + "loss": 0.7595, + "step": 3524 + }, + { + "epoch": 3.899336283185841, + "grad_norm": 0.1123046875, + "learning_rate": 2.5165929203539824e-07, + "loss": 0.8189, + "step": 3525 + }, + { + "epoch": 3.900442477876106, + "grad_norm": 0.10888671875, + "learning_rate": 2.488938053097345e-07, + "loss": 0.7426, + "step": 3526 + }, + { + "epoch": 3.9015486725663715, + "grad_norm": 0.11865234375, + "learning_rate": 2.461283185840708e-07, + "loss": 0.7856, + "step": 3527 + }, + { + "epoch": 3.9026548672566372, + "grad_norm": 0.11474609375, + "learning_rate": 2.433628318584071e-07, + "loss": 0.7655, + "step": 3528 + }, + { + "epoch": 3.9037610619469025, + "grad_norm": 0.09912109375, + "learning_rate": 2.405973451327434e-07, + "loss": 0.7007, + "step": 3529 + }, + { + "epoch": 3.9048672566371683, + "grad_norm": 0.166015625, + "learning_rate": 2.3783185840707966e-07, + "loss": 0.7861, + "step": 3530 + }, + { + "epoch": 3.9059734513274336, + "grad_norm": 0.123046875, + "learning_rate": 2.3506637168141592e-07, + "loss": 0.816, + "step": 3531 + }, + { + "epoch": 3.9070796460176993, + "grad_norm": 0.11181640625, + "learning_rate": 2.3230088495575223e-07, + "loss": 0.7619, + "step": 3532 + }, + { + "epoch": 3.9081858407079646, + "grad_norm": 0.126953125, + "learning_rate": 2.2953539823008852e-07, + "loss": 0.8237, + "step": 3533 + }, + { + "epoch": 3.90929203539823, + "grad_norm": 0.10888671875, + "learning_rate": 2.267699115044248e-07, + "loss": 0.7601, + "step": 3534 + }, + { + "epoch": 3.9103982300884956, + "grad_norm": 0.1220703125, + "learning_rate": 2.240044247787611e-07, + "loss": 0.7206, + "step": 3535 + }, + { + "epoch": 3.911504424778761, + "grad_norm": 0.109375, + "learning_rate": 2.2123893805309737e-07, + "loss": 0.7475, + "step": 3536 + }, + { + "epoch": 3.9126106194690267, + "grad_norm": 0.10888671875, + "learning_rate": 2.1847345132743363e-07, + "loss": 0.7761, + "step": 3537 + }, + { + "epoch": 3.913716814159292, + "grad_norm": 0.166015625, + "learning_rate": 2.1570796460176992e-07, + "loss": 0.7445, + "step": 3538 + }, + { + "epoch": 3.9148230088495577, + "grad_norm": 0.1083984375, + "learning_rate": 2.129424778761062e-07, + "loss": 0.763, + "step": 3539 + }, + { + "epoch": 3.915929203539823, + "grad_norm": 0.1298828125, + "learning_rate": 2.101769911504425e-07, + "loss": 0.8248, + "step": 3540 + }, + { + "epoch": 3.9170353982300883, + "grad_norm": 0.10302734375, + "learning_rate": 2.074115044247788e-07, + "loss": 0.7363, + "step": 3541 + }, + { + "epoch": 3.918141592920354, + "grad_norm": 0.11865234375, + "learning_rate": 2.0464601769911505e-07, + "loss": 0.8015, + "step": 3542 + }, + { + "epoch": 3.9192477876106193, + "grad_norm": 0.1533203125, + "learning_rate": 2.0188053097345134e-07, + "loss": 0.7846, + "step": 3543 + }, + { + "epoch": 3.920353982300885, + "grad_norm": 0.10498046875, + "learning_rate": 1.9911504424778762e-07, + "loss": 0.7781, + "step": 3544 + }, + { + "epoch": 3.9214601769911503, + "grad_norm": 0.10302734375, + "learning_rate": 1.963495575221239e-07, + "loss": 0.7601, + "step": 3545 + }, + { + "epoch": 3.922566371681416, + "grad_norm": 0.1376953125, + "learning_rate": 1.935840707964602e-07, + "loss": 0.7387, + "step": 3546 + }, + { + "epoch": 3.9236725663716814, + "grad_norm": 0.10986328125, + "learning_rate": 1.9081858407079645e-07, + "loss": 0.743, + "step": 3547 + }, + { + "epoch": 3.9247787610619467, + "grad_norm": 0.11376953125, + "learning_rate": 1.8805309734513276e-07, + "loss": 0.7477, + "step": 3548 + }, + { + "epoch": 3.9258849557522124, + "grad_norm": 0.10302734375, + "learning_rate": 1.8528761061946905e-07, + "loss": 0.7312, + "step": 3549 + }, + { + "epoch": 3.926991150442478, + "grad_norm": 0.10791015625, + "learning_rate": 1.8252212389380533e-07, + "loss": 0.7648, + "step": 3550 + }, + { + "epoch": 3.9280973451327434, + "grad_norm": 0.11767578125, + "learning_rate": 1.7975663716814162e-07, + "loss": 0.7756, + "step": 3551 + }, + { + "epoch": 3.9292035398230087, + "grad_norm": 0.10498046875, + "learning_rate": 1.7699115044247788e-07, + "loss": 0.7435, + "step": 3552 + }, + { + "epoch": 3.9303097345132745, + "grad_norm": 0.1494140625, + "learning_rate": 1.7422566371681416e-07, + "loss": 0.7355, + "step": 3553 + }, + { + "epoch": 3.9314159292035398, + "grad_norm": 0.11669921875, + "learning_rate": 1.7146017699115045e-07, + "loss": 0.7788, + "step": 3554 + }, + { + "epoch": 3.932522123893805, + "grad_norm": 0.1337890625, + "learning_rate": 1.6869469026548673e-07, + "loss": 0.7776, + "step": 3555 + }, + { + "epoch": 3.933628318584071, + "grad_norm": 0.09716796875, + "learning_rate": 1.6592920353982304e-07, + "loss": 0.7268, + "step": 3556 + }, + { + "epoch": 3.9347345132743365, + "grad_norm": 0.1142578125, + "learning_rate": 1.6316371681415933e-07, + "loss": 0.7926, + "step": 3557 + }, + { + "epoch": 3.935840707964602, + "grad_norm": 0.10400390625, + "learning_rate": 1.6039823008849559e-07, + "loss": 0.7526, + "step": 3558 + }, + { + "epoch": 3.936946902654867, + "grad_norm": 0.111328125, + "learning_rate": 1.5763274336283187e-07, + "loss": 0.787, + "step": 3559 + }, + { + "epoch": 3.938053097345133, + "grad_norm": 0.1083984375, + "learning_rate": 1.5486725663716816e-07, + "loss": 0.7571, + "step": 3560 + }, + { + "epoch": 3.939159292035398, + "grad_norm": 0.109375, + "learning_rate": 1.5210176991150444e-07, + "loss": 0.7465, + "step": 3561 + }, + { + "epoch": 3.9402654867256635, + "grad_norm": 0.1123046875, + "learning_rate": 1.4933628318584072e-07, + "loss": 0.801, + "step": 3562 + }, + { + "epoch": 3.941371681415929, + "grad_norm": 0.11572265625, + "learning_rate": 1.46570796460177e-07, + "loss": 0.7511, + "step": 3563 + }, + { + "epoch": 3.942477876106195, + "grad_norm": 0.109375, + "learning_rate": 1.438053097345133e-07, + "loss": 0.7446, + "step": 3564 + }, + { + "epoch": 3.9435840707964602, + "grad_norm": 0.1083984375, + "learning_rate": 1.4103982300884955e-07, + "loss": 0.7334, + "step": 3565 + }, + { + "epoch": 3.9446902654867255, + "grad_norm": 0.1123046875, + "learning_rate": 1.3827433628318586e-07, + "loss": 0.7963, + "step": 3566 + }, + { + "epoch": 3.9457964601769913, + "grad_norm": 0.111328125, + "learning_rate": 1.3550884955752215e-07, + "loss": 0.7607, + "step": 3567 + }, + { + "epoch": 3.9469026548672566, + "grad_norm": 0.1787109375, + "learning_rate": 1.327433628318584e-07, + "loss": 0.7825, + "step": 3568 + }, + { + "epoch": 3.948008849557522, + "grad_norm": 0.10791015625, + "learning_rate": 1.299778761061947e-07, + "loss": 0.7628, + "step": 3569 + }, + { + "epoch": 3.9491150442477876, + "grad_norm": 0.10595703125, + "learning_rate": 1.27212389380531e-07, + "loss": 0.7761, + "step": 3570 + }, + { + "epoch": 3.9502212389380533, + "grad_norm": 0.115234375, + "learning_rate": 1.2444690265486726e-07, + "loss": 0.75, + "step": 3571 + }, + { + "epoch": 3.9513274336283186, + "grad_norm": 0.1337890625, + "learning_rate": 1.2168141592920355e-07, + "loss": 0.7487, + "step": 3572 + }, + { + "epoch": 3.952433628318584, + "grad_norm": 0.166015625, + "learning_rate": 1.1891592920353983e-07, + "loss": 0.7636, + "step": 3573 + }, + { + "epoch": 3.9535398230088497, + "grad_norm": 0.12451171875, + "learning_rate": 1.1615044247787612e-07, + "loss": 0.7855, + "step": 3574 + }, + { + "epoch": 3.954646017699115, + "grad_norm": 0.12109375, + "learning_rate": 1.133849557522124e-07, + "loss": 0.7893, + "step": 3575 + }, + { + "epoch": 3.9557522123893807, + "grad_norm": 0.11083984375, + "learning_rate": 1.1061946902654869e-07, + "loss": 0.7756, + "step": 3576 + }, + { + "epoch": 3.956858407079646, + "grad_norm": 0.12060546875, + "learning_rate": 1.0785398230088496e-07, + "loss": 0.8081, + "step": 3577 + }, + { + "epoch": 3.9579646017699117, + "grad_norm": 0.109375, + "learning_rate": 1.0508849557522126e-07, + "loss": 0.7685, + "step": 3578 + }, + { + "epoch": 3.959070796460177, + "grad_norm": 0.1142578125, + "learning_rate": 1.0232300884955753e-07, + "loss": 0.7497, + "step": 3579 + }, + { + "epoch": 3.9601769911504423, + "grad_norm": 0.1123046875, + "learning_rate": 9.955752212389381e-08, + "loss": 0.7398, + "step": 3580 + }, + { + "epoch": 3.961283185840708, + "grad_norm": 0.11669921875, + "learning_rate": 9.67920353982301e-08, + "loss": 0.7704, + "step": 3581 + }, + { + "epoch": 3.9623893805309733, + "grad_norm": 0.1806640625, + "learning_rate": 9.402654867256638e-08, + "loss": 0.7609, + "step": 3582 + }, + { + "epoch": 3.963495575221239, + "grad_norm": 0.11328125, + "learning_rate": 9.126106194690267e-08, + "loss": 0.7699, + "step": 3583 + }, + { + "epoch": 3.9646017699115044, + "grad_norm": 0.10693359375, + "learning_rate": 8.849557522123894e-08, + "loss": 0.7692, + "step": 3584 + }, + { + "epoch": 3.96570796460177, + "grad_norm": 0.10498046875, + "learning_rate": 8.573008849557522e-08, + "loss": 0.731, + "step": 3585 + }, + { + "epoch": 3.9668141592920354, + "grad_norm": 0.109375, + "learning_rate": 8.296460176991152e-08, + "loss": 0.7784, + "step": 3586 + }, + { + "epoch": 3.9679203539823007, + "grad_norm": 0.11376953125, + "learning_rate": 8.019911504424779e-08, + "loss": 0.741, + "step": 3587 + }, + { + "epoch": 3.9690265486725664, + "grad_norm": 0.11083984375, + "learning_rate": 7.743362831858408e-08, + "loss": 0.7622, + "step": 3588 + }, + { + "epoch": 3.9701327433628317, + "grad_norm": 0.10986328125, + "learning_rate": 7.466814159292036e-08, + "loss": 0.7787, + "step": 3589 + }, + { + "epoch": 3.9712389380530975, + "grad_norm": 0.119140625, + "learning_rate": 7.190265486725665e-08, + "loss": 0.7842, + "step": 3590 + }, + { + "epoch": 3.9723451327433628, + "grad_norm": 0.1083984375, + "learning_rate": 6.913716814159293e-08, + "loss": 0.7662, + "step": 3591 + }, + { + "epoch": 3.9734513274336285, + "grad_norm": 0.11181640625, + "learning_rate": 6.63716814159292e-08, + "loss": 0.7615, + "step": 3592 + }, + { + "epoch": 3.974557522123894, + "grad_norm": 0.1259765625, + "learning_rate": 6.36061946902655e-08, + "loss": 0.7459, + "step": 3593 + }, + { + "epoch": 3.975663716814159, + "grad_norm": 0.1103515625, + "learning_rate": 6.084070796460177e-08, + "loss": 0.7654, + "step": 3594 + }, + { + "epoch": 3.976769911504425, + "grad_norm": 0.10986328125, + "learning_rate": 5.807522123893806e-08, + "loss": 0.7868, + "step": 3595 + }, + { + "epoch": 3.97787610619469, + "grad_norm": 0.11279296875, + "learning_rate": 5.530973451327434e-08, + "loss": 0.7866, + "step": 3596 + }, + { + "epoch": 3.978982300884956, + "grad_norm": 0.134765625, + "learning_rate": 5.254424778761063e-08, + "loss": 0.8269, + "step": 3597 + }, + { + "epoch": 3.980088495575221, + "grad_norm": 0.11474609375, + "learning_rate": 4.9778761061946906e-08, + "loss": 0.7598, + "step": 3598 + }, + { + "epoch": 3.981194690265487, + "grad_norm": 0.10986328125, + "learning_rate": 4.701327433628319e-08, + "loss": 0.7803, + "step": 3599 + }, + { + "epoch": 3.982300884955752, + "grad_norm": 0.115234375, + "learning_rate": 4.424778761061947e-08, + "loss": 0.783, + "step": 3600 + }, + { + "epoch": 3.9834070796460175, + "grad_norm": 0.111328125, + "learning_rate": 4.148230088495576e-08, + "loss": 0.7567, + "step": 3601 + }, + { + "epoch": 3.984513274336283, + "grad_norm": 0.111328125, + "learning_rate": 3.871681415929204e-08, + "loss": 0.786, + "step": 3602 + }, + { + "epoch": 3.9856194690265485, + "grad_norm": 0.10302734375, + "learning_rate": 3.5951327433628324e-08, + "loss": 0.7732, + "step": 3603 + }, + { + "epoch": 3.9867256637168142, + "grad_norm": 0.1376953125, + "learning_rate": 3.31858407079646e-08, + "loss": 0.7544, + "step": 3604 + }, + { + "epoch": 3.9878318584070795, + "grad_norm": 0.1240234375, + "learning_rate": 3.0420353982300887e-08, + "loss": 0.7933, + "step": 3605 + }, + { + "epoch": 3.9889380530973453, + "grad_norm": 0.123046875, + "learning_rate": 2.765486725663717e-08, + "loss": 0.7583, + "step": 3606 + }, + { + "epoch": 3.9900442477876106, + "grad_norm": 0.1220703125, + "learning_rate": 2.4889380530973453e-08, + "loss": 0.8267, + "step": 3607 + }, + { + "epoch": 3.991150442477876, + "grad_norm": 0.10498046875, + "learning_rate": 2.2123893805309735e-08, + "loss": 0.7518, + "step": 3608 + }, + { + "epoch": 3.9922566371681416, + "grad_norm": 0.11279296875, + "learning_rate": 1.935840707964602e-08, + "loss": 0.7463, + "step": 3609 + }, + { + "epoch": 3.9933628318584073, + "grad_norm": 0.1103515625, + "learning_rate": 1.65929203539823e-08, + "loss": 0.7644, + "step": 3610 + }, + { + "epoch": 3.9944690265486726, + "grad_norm": 0.1083984375, + "learning_rate": 1.3827433628318586e-08, + "loss": 0.7414, + "step": 3611 + }, + { + "epoch": 3.995575221238938, + "grad_norm": 0.1494140625, + "learning_rate": 1.1061946902654867e-08, + "loss": 0.762, + "step": 3612 + }, + { + "epoch": 3.9966814159292037, + "grad_norm": 0.11376953125, + "learning_rate": 8.29646017699115e-09, + "loss": 0.7632, + "step": 3613 + }, + { + "epoch": 3.997787610619469, + "grad_norm": 0.1162109375, + "learning_rate": 5.530973451327434e-09, + "loss": 0.7431, + "step": 3614 + }, + { + "epoch": 3.9988938053097343, + "grad_norm": 0.1181640625, + "learning_rate": 2.765486725663717e-09, + "loss": 0.7647, + "step": 3615 + }, + { + "epoch": 4.0, + "grad_norm": 0.1162109375, + "learning_rate": 0.0, + "loss": 0.7439, + "step": 3616 + } + ], + "logging_steps": 1.0, + "max_steps": 3616, + "num_input_tokens_seen": 0, + "num_train_epochs": 4, + "save_steps": 0, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 1.1647918971822277e+19, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}