{ "best_metric": null, "best_model_checkpoint": null, "epoch": 0.2293314986813439, "eval_steps": 500, "global_step": 2000, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.0, "grad_norm": 0.0, "learning_rate": 0.0, "loss": 6.2779, "step": 1 }, { "epoch": 0.0, "grad_norm": 0.0, "learning_rate": 0.0, "loss": 6.1581, "step": 2 }, { "epoch": 0.0, "grad_norm": 14.976716136383175, "learning_rate": 3.816793893129771e-06, "loss": 6.2063, "step": 3 }, { "epoch": 0.0, "grad_norm": 14.397488353328699, "learning_rate": 7.633587786259541e-06, "loss": 6.2989, "step": 4 }, { "epoch": 0.0, "grad_norm": 14.397488353328699, "learning_rate": 7.633587786259541e-06, "loss": 6.2908, "step": 5 }, { "epoch": 0.0, "grad_norm": 18.89007945034403, "learning_rate": 1.1450381679389314e-05, "loss": 6.0954, "step": 6 }, { "epoch": 0.0, "grad_norm": 18.89007945034403, "learning_rate": 1.1450381679389314e-05, "loss": 6.2841, "step": 7 }, { "epoch": 0.0, "grad_norm": 11.409346480686976, "learning_rate": 1.5267175572519083e-05, "loss": 6.3522, "step": 8 }, { "epoch": 0.0, "grad_norm": 28.346056360110595, "learning_rate": 1.9083969465648855e-05, "loss": 6.2796, "step": 9 }, { "epoch": 0.0, "grad_norm": 15.852481128362015, "learning_rate": 2.2900763358778628e-05, "loss": 6.2723, "step": 10 }, { "epoch": 0.0, "grad_norm": 15.852481128362015, "learning_rate": 2.2900763358778628e-05, "loss": 5.9952, "step": 11 }, { "epoch": 0.0, "grad_norm": 48.40977193887413, "learning_rate": 2.6717557251908397e-05, "loss": 6.1167, "step": 12 }, { "epoch": 0.0, "grad_norm": 9.259901239279529, "learning_rate": 3.0534351145038166e-05, "loss": 6.2158, "step": 13 }, { "epoch": 0.0, "grad_norm": 11.429600547441563, "learning_rate": 3.435114503816794e-05, "loss": 6.1169, "step": 14 }, { "epoch": 0.0, "grad_norm": 11.429600547441563, "learning_rate": 3.435114503816794e-05, "loss": 6.1389, "step": 15 }, { "epoch": 0.0, "grad_norm": 15.704367677180992, "learning_rate": 3.816793893129771e-05, "loss": 6.0987, "step": 16 }, { "epoch": 0.0, "grad_norm": 33.68032910168496, "learning_rate": 4.198473282442748e-05, "loss": 6.0965, "step": 17 }, { "epoch": 0.0, "grad_norm": 11.896102639886562, "learning_rate": 4.5801526717557256e-05, "loss": 5.6258, "step": 18 }, { "epoch": 0.0, "grad_norm": 15.481580862690418, "learning_rate": 4.9618320610687025e-05, "loss": 5.9343, "step": 19 }, { "epoch": 0.0, "grad_norm": 12.662713159465534, "learning_rate": 5.3435114503816794e-05, "loss": 5.7158, "step": 20 }, { "epoch": 0.0, "grad_norm": 15.649889306348113, "learning_rate": 5.725190839694656e-05, "loss": 5.8075, "step": 21 }, { "epoch": 0.0, "grad_norm": 65.94567816487748, "learning_rate": 6.106870229007633e-05, "loss": 5.6104, "step": 22 }, { "epoch": 0.0, "grad_norm": 13.500161461978536, "learning_rate": 6.488549618320611e-05, "loss": 5.7245, "step": 23 }, { "epoch": 0.0, "grad_norm": 39.63275193406311, "learning_rate": 6.870229007633588e-05, "loss": 5.4938, "step": 24 }, { "epoch": 0.0, "grad_norm": 25.79160793928747, "learning_rate": 7.251908396946565e-05, "loss": 5.8096, "step": 25 }, { "epoch": 0.0, "grad_norm": 8.833397229553675, "learning_rate": 7.633587786259542e-05, "loss": 5.6985, "step": 26 }, { "epoch": 0.0, "grad_norm": 7.395053164765045, "learning_rate": 8.015267175572518e-05, "loss": 5.6925, "step": 27 }, { "epoch": 0.0, "grad_norm": 41.89909059331258, "learning_rate": 8.396946564885496e-05, "loss": 5.4813, "step": 28 }, { "epoch": 0.0, "grad_norm": 10.966138043160779, "learning_rate": 8.778625954198472e-05, "loss": 5.6698, "step": 29 }, { "epoch": 0.0, "grad_norm": 12.7283315374098, "learning_rate": 9.160305343511451e-05, "loss": 5.6617, "step": 30 }, { "epoch": 0.0, "grad_norm": 11.675016207985045, "learning_rate": 9.541984732824429e-05, "loss": 5.2054, "step": 31 }, { "epoch": 0.0, "grad_norm": 9.324595112650963, "learning_rate": 9.923664122137405e-05, "loss": 5.2573, "step": 32 }, { "epoch": 0.0, "grad_norm": 5.462088289687755, "learning_rate": 0.00010305343511450383, "loss": 5.3966, "step": 33 }, { "epoch": 0.0, "grad_norm": 28.258431587421967, "learning_rate": 0.00010687022900763359, "loss": 5.5193, "step": 34 }, { "epoch": 0.0, "grad_norm": 8.847354229930158, "learning_rate": 0.00011068702290076336, "loss": 5.1725, "step": 35 }, { "epoch": 0.0, "grad_norm": 5.897059143557035, "learning_rate": 0.00011450381679389313, "loss": 5.1518, "step": 36 }, { "epoch": 0.0, "grad_norm": 5.112023703007317, "learning_rate": 0.0001183206106870229, "loss": 5.1591, "step": 37 }, { "epoch": 0.0, "grad_norm": 5.073706203205232, "learning_rate": 0.00012213740458015266, "loss": 5.0398, "step": 38 }, { "epoch": 0.0, "grad_norm": 10.989327102728486, "learning_rate": 0.00012595419847328244, "loss": 5.112, "step": 39 }, { "epoch": 0.0, "grad_norm": 8.494888378939828, "learning_rate": 0.00012977099236641222, "loss": 5.2983, "step": 40 }, { "epoch": 0.0, "grad_norm": 6.029727471334503, "learning_rate": 0.000133587786259542, "loss": 5.1413, "step": 41 }, { "epoch": 0.0, "grad_norm": 10.580341148041809, "learning_rate": 0.00013740458015267177, "loss": 5.3161, "step": 42 }, { "epoch": 0.0, "grad_norm": 7.861892307202481, "learning_rate": 0.00014122137404580154, "loss": 5.0632, "step": 43 }, { "epoch": 0.01, "grad_norm": 5.032287490494244, "learning_rate": 0.0001450381679389313, "loss": 5.0655, "step": 44 }, { "epoch": 0.01, "grad_norm": 5.226651239598461, "learning_rate": 0.00014885496183206107, "loss": 5.2151, "step": 45 }, { "epoch": 0.01, "grad_norm": 5.2839008800377965, "learning_rate": 0.00015267175572519084, "loss": 5.2635, "step": 46 }, { "epoch": 0.01, "grad_norm": 16.208329066599074, "learning_rate": 0.00015648854961832062, "loss": 5.2023, "step": 47 }, { "epoch": 0.01, "grad_norm": 13.549653768489149, "learning_rate": 0.00016030534351145037, "loss": 5.2272, "step": 48 }, { "epoch": 0.01, "grad_norm": 5.811721203312905, "learning_rate": 0.00016412213740458014, "loss": 4.9626, "step": 49 }, { "epoch": 0.01, "grad_norm": 6.572309930980864, "learning_rate": 0.00016793893129770992, "loss": 5.0291, "step": 50 }, { "epoch": 0.01, "grad_norm": 5.17916044761434, "learning_rate": 0.0001717557251908397, "loss": 5.1589, "step": 51 }, { "epoch": 0.01, "grad_norm": 7.194489680135254, "learning_rate": 0.00017557251908396944, "loss": 5.2516, "step": 52 }, { "epoch": 0.01, "grad_norm": 10.55005706873481, "learning_rate": 0.00017938931297709925, "loss": 5.0086, "step": 53 }, { "epoch": 0.01, "grad_norm": 3.6321119276045897, "learning_rate": 0.00018320610687022902, "loss": 5.0754, "step": 54 }, { "epoch": 0.01, "grad_norm": 7.810860089915512, "learning_rate": 0.0001870229007633588, "loss": 4.9908, "step": 55 }, { "epoch": 0.01, "grad_norm": 3.940902186870088, "learning_rate": 0.00019083969465648857, "loss": 4.9833, "step": 56 }, { "epoch": 0.01, "grad_norm": 4.611002381269936, "learning_rate": 0.00019465648854961832, "loss": 5.0436, "step": 57 }, { "epoch": 0.01, "grad_norm": 20.054002059058206, "learning_rate": 0.0001984732824427481, "loss": 5.0632, "step": 58 }, { "epoch": 0.01, "grad_norm": 3.911527035099265, "learning_rate": 0.00020229007633587788, "loss": 4.6758, "step": 59 }, { "epoch": 0.01, "grad_norm": 11.37665907428031, "learning_rate": 0.00020610687022900765, "loss": 5.1242, "step": 60 }, { "epoch": 0.01, "grad_norm": 3.704198024162129, "learning_rate": 0.0002099236641221374, "loss": 5.1245, "step": 61 }, { "epoch": 0.01, "grad_norm": 3.7871548009528184, "learning_rate": 0.00021374045801526718, "loss": 4.891, "step": 62 }, { "epoch": 0.01, "grad_norm": 3.136501595775406, "learning_rate": 0.00021755725190839695, "loss": 4.7869, "step": 63 }, { "epoch": 0.01, "grad_norm": 5.568374478750777, "learning_rate": 0.00022137404580152673, "loss": 4.8957, "step": 64 }, { "epoch": 0.01, "grad_norm": 3.697055076084128, "learning_rate": 0.00022519083969465648, "loss": 4.5216, "step": 65 }, { "epoch": 0.01, "grad_norm": 4.569269081902782, "learning_rate": 0.00022900763358778625, "loss": 4.9846, "step": 66 }, { "epoch": 0.01, "grad_norm": 6.716615480986148, "learning_rate": 0.00023282442748091603, "loss": 4.7787, "step": 67 }, { "epoch": 0.01, "grad_norm": 3.3323716057495862, "learning_rate": 0.0002366412213740458, "loss": 4.8606, "step": 68 }, { "epoch": 0.01, "grad_norm": 9.383012479834845, "learning_rate": 0.00024045801526717558, "loss": 4.7711, "step": 69 }, { "epoch": 0.01, "grad_norm": 4.032737783937775, "learning_rate": 0.00024427480916030533, "loss": 4.6477, "step": 70 }, { "epoch": 0.01, "grad_norm": 5.309847236686972, "learning_rate": 0.00024809160305343513, "loss": 4.7642, "step": 71 }, { "epoch": 0.01, "grad_norm": 5.573701930155391, "learning_rate": 0.0002519083969465649, "loss": 4.8587, "step": 72 }, { "epoch": 0.01, "grad_norm": 8.322277059585852, "learning_rate": 0.00025572519083969463, "loss": 4.5204, "step": 73 }, { "epoch": 0.01, "grad_norm": 5.665817232441201, "learning_rate": 0.00025954198473282443, "loss": 4.7847, "step": 74 }, { "epoch": 0.01, "grad_norm": 139.03167086367495, "learning_rate": 0.0002633587786259542, "loss": 4.6664, "step": 75 }, { "epoch": 0.01, "grad_norm": 17.6803224637041, "learning_rate": 0.000267175572519084, "loss": 5.0634, "step": 76 }, { "epoch": 0.01, "grad_norm": 22.524418929291922, "learning_rate": 0.00027099236641221373, "loss": 5.5552, "step": 77 }, { "epoch": 0.01, "grad_norm": 6.950517134284506, "learning_rate": 0.00027480916030534353, "loss": 5.3287, "step": 78 }, { "epoch": 0.01, "grad_norm": 15.379633137654181, "learning_rate": 0.0002786259541984733, "loss": 5.2802, "step": 79 }, { "epoch": 0.01, "grad_norm": 20.268995253997705, "learning_rate": 0.0002824427480916031, "loss": 5.0836, "step": 80 }, { "epoch": 0.01, "grad_norm": 29.259856808399377, "learning_rate": 0.0002862595419847328, "loss": 5.2734, "step": 81 }, { "epoch": 0.01, "grad_norm": 9.348142659709751, "learning_rate": 0.0002900763358778626, "loss": 5.0488, "step": 82 }, { "epoch": 0.01, "grad_norm": 14.07726663498388, "learning_rate": 0.0002938931297709924, "loss": 5.0828, "step": 83 }, { "epoch": 0.01, "grad_norm": 38.94377862083665, "learning_rate": 0.00029770992366412214, "loss": 5.059, "step": 84 }, { "epoch": 0.01, "grad_norm": 5.189460782597058, "learning_rate": 0.00030152671755725194, "loss": 4.9703, "step": 85 }, { "epoch": 0.01, "grad_norm": 11.055190070541768, "learning_rate": 0.0003053435114503817, "loss": 4.7264, "step": 86 }, { "epoch": 0.01, "grad_norm": 9.82024718477547, "learning_rate": 0.0003091603053435115, "loss": 5.0345, "step": 87 }, { "epoch": 0.01, "grad_norm": 3.978344495248824, "learning_rate": 0.00031297709923664124, "loss": 4.7281, "step": 88 }, { "epoch": 0.01, "grad_norm": 5.101628294492602, "learning_rate": 0.000316793893129771, "loss": 4.6132, "step": 89 }, { "epoch": 0.01, "grad_norm": 5.145155139352889, "learning_rate": 0.00032061068702290074, "loss": 4.8203, "step": 90 }, { "epoch": 0.01, "grad_norm": 4.541789314626976, "learning_rate": 0.00032442748091603054, "loss": 4.9099, "step": 91 }, { "epoch": 0.01, "grad_norm": 22.161052158363848, "learning_rate": 0.0003282442748091603, "loss": 4.9713, "step": 92 }, { "epoch": 0.01, "grad_norm": 5.7331702462659, "learning_rate": 0.0003320610687022901, "loss": 4.8406, "step": 93 }, { "epoch": 0.01, "grad_norm": 5.288023385461357, "learning_rate": 0.00033587786259541984, "loss": 4.7399, "step": 94 }, { "epoch": 0.01, "grad_norm": 10.254748606389127, "learning_rate": 0.00033969465648854964, "loss": 4.6848, "step": 95 }, { "epoch": 0.01, "grad_norm": 9.771604247240735, "learning_rate": 0.0003435114503816794, "loss": 4.7795, "step": 96 }, { "epoch": 0.01, "grad_norm": 3.695161580609848, "learning_rate": 0.0003473282442748092, "loss": 4.5722, "step": 97 }, { "epoch": 0.01, "grad_norm": 4.980790618859492, "learning_rate": 0.0003511450381679389, "loss": 4.6186, "step": 98 }, { "epoch": 0.01, "grad_norm": 22.240432294680442, "learning_rate": 0.0003549618320610687, "loss": 4.825, "step": 99 }, { "epoch": 0.01, "grad_norm": 8.239680655788304, "learning_rate": 0.0003587786259541985, "loss": 4.6153, "step": 100 }, { "epoch": 0.01, "grad_norm": 7.551497396505139, "learning_rate": 0.00036259541984732824, "loss": 4.6794, "step": 101 }, { "epoch": 0.01, "grad_norm": 7.914826041030228, "learning_rate": 0.00036641221374045805, "loss": 4.6659, "step": 102 }, { "epoch": 0.01, "grad_norm": 5.029399027607916, "learning_rate": 0.0003702290076335878, "loss": 4.5387, "step": 103 }, { "epoch": 0.01, "grad_norm": 3.660324171777086, "learning_rate": 0.0003740458015267176, "loss": 4.5091, "step": 104 }, { "epoch": 0.01, "grad_norm": 7.040733663725759, "learning_rate": 0.00037786259541984735, "loss": 4.7529, "step": 105 }, { "epoch": 0.01, "grad_norm": 4.8592159543896996, "learning_rate": 0.00038167938931297715, "loss": 4.7159, "step": 106 }, { "epoch": 0.01, "grad_norm": 4.6294828525314395, "learning_rate": 0.00038549618320610684, "loss": 4.7294, "step": 107 }, { "epoch": 0.01, "grad_norm": 3.979668727923852, "learning_rate": 0.00038931297709923665, "loss": 4.3714, "step": 108 }, { "epoch": 0.01, "grad_norm": 5.581717881378127, "learning_rate": 0.0003931297709923664, "loss": 4.6298, "step": 109 }, { "epoch": 0.01, "grad_norm": 4.448372221263443, "learning_rate": 0.0003969465648854962, "loss": 4.7571, "step": 110 }, { "epoch": 0.01, "grad_norm": 3.2948860235700668, "learning_rate": 0.00040076335877862595, "loss": 4.3805, "step": 111 }, { "epoch": 0.01, "grad_norm": 3.9831330136880223, "learning_rate": 0.00040458015267175575, "loss": 4.5136, "step": 112 }, { "epoch": 0.01, "grad_norm": 2.6498001940669886, "learning_rate": 0.0004083969465648855, "loss": 4.49, "step": 113 }, { "epoch": 0.01, "grad_norm": 3.3364447260683896, "learning_rate": 0.0004122137404580153, "loss": 4.5943, "step": 114 }, { "epoch": 0.01, "grad_norm": 2.991466654429088, "learning_rate": 0.00041603053435114505, "loss": 4.52, "step": 115 }, { "epoch": 0.01, "grad_norm": 5.082429278038854, "learning_rate": 0.0004198473282442748, "loss": 4.4996, "step": 116 }, { "epoch": 0.01, "grad_norm": 52.230357543862944, "learning_rate": 0.00042366412213740455, "loss": 4.7485, "step": 117 }, { "epoch": 0.01, "grad_norm": 4.21748080112374, "learning_rate": 0.00042748091603053435, "loss": 4.4279, "step": 118 }, { "epoch": 0.01, "grad_norm": 4.91623677075748, "learning_rate": 0.00043129770992366415, "loss": 4.3156, "step": 119 }, { "epoch": 0.01, "grad_norm": 3.450892416973839, "learning_rate": 0.0004351145038167939, "loss": 4.3349, "step": 120 }, { "epoch": 0.01, "grad_norm": 4.104235347067771, "learning_rate": 0.0004389312977099237, "loss": 4.7161, "step": 121 }, { "epoch": 0.01, "grad_norm": 2.0993732703424324, "learning_rate": 0.00044274809160305345, "loss": 4.4699, "step": 122 }, { "epoch": 0.01, "grad_norm": 2.960992832423532, "learning_rate": 0.00044656488549618326, "loss": 4.3197, "step": 123 }, { "epoch": 0.01, "grad_norm": 3.212027498483674, "learning_rate": 0.00045038167938931295, "loss": 4.5707, "step": 124 }, { "epoch": 0.01, "grad_norm": 2.4869173074265962, "learning_rate": 0.00045419847328244275, "loss": 4.5839, "step": 125 }, { "epoch": 0.01, "grad_norm": 4.181475099579808, "learning_rate": 0.0004580152671755725, "loss": 4.5064, "step": 126 }, { "epoch": 0.01, "grad_norm": 2.1942828551524878, "learning_rate": 0.0004618320610687023, "loss": 4.3979, "step": 127 }, { "epoch": 0.01, "grad_norm": 6.115485717946443, "learning_rate": 0.00046564885496183206, "loss": 4.3856, "step": 128 }, { "epoch": 0.01, "grad_norm": 3.141524556275388, "learning_rate": 0.00046946564885496186, "loss": 4.5029, "step": 129 }, { "epoch": 0.01, "grad_norm": 2.382205404063897, "learning_rate": 0.0004732824427480916, "loss": 4.3509, "step": 130 }, { "epoch": 0.02, "grad_norm": 5.5162684169269705, "learning_rate": 0.0004770992366412214, "loss": 4.4551, "step": 131 }, { "epoch": 0.02, "grad_norm": 4.9351584500250985, "learning_rate": 0.00048091603053435116, "loss": 4.5373, "step": 132 }, { "epoch": 0.02, "grad_norm": 1.9141791947655433, "learning_rate": 0.0004847328244274809, "loss": 4.29, "step": 133 }, { "epoch": 0.02, "grad_norm": 2.124128141029574, "learning_rate": 0.0004885496183206107, "loss": 4.2364, "step": 134 }, { "epoch": 0.02, "grad_norm": 1.9783730396723103, "learning_rate": 0.0004923664122137404, "loss": 4.6155, "step": 135 }, { "epoch": 0.02, "grad_norm": 4.07325353745457, "learning_rate": 0.0004961832061068703, "loss": 4.5372, "step": 136 }, { "epoch": 0.02, "grad_norm": 2.8313079291733185, "learning_rate": 0.0005, "loss": 4.3088, "step": 137 }, { "epoch": 0.02, "grad_norm": 2.152865710451047, "learning_rate": 0.0005038167938931298, "loss": 4.6045, "step": 138 }, { "epoch": 0.02, "grad_norm": 3.3967945768183245, "learning_rate": 0.0005076335877862596, "loss": 4.2817, "step": 139 }, { "epoch": 0.02, "grad_norm": 2.4644266643742863, "learning_rate": 0.0005114503816793893, "loss": 4.4599, "step": 140 }, { "epoch": 0.02, "grad_norm": 1.544944406113682, "learning_rate": 0.0005152671755725191, "loss": 4.1953, "step": 141 }, { "epoch": 0.02, "grad_norm": 6.150666133802115, "learning_rate": 0.0005190839694656489, "loss": 4.2241, "step": 142 }, { "epoch": 0.02, "grad_norm": 2.705915106052184, "learning_rate": 0.0005229007633587787, "loss": 4.2867, "step": 143 }, { "epoch": 0.02, "grad_norm": 2.1087313750685692, "learning_rate": 0.0005267175572519084, "loss": 4.2412, "step": 144 }, { "epoch": 0.02, "grad_norm": 3.289305669092866, "learning_rate": 0.0005305343511450382, "loss": 4.3098, "step": 145 }, { "epoch": 0.02, "grad_norm": 1.4885574223852382, "learning_rate": 0.000534351145038168, "loss": 4.4438, "step": 146 }, { "epoch": 0.02, "grad_norm": 3.087139734866523, "learning_rate": 0.0005381679389312977, "loss": 4.2856, "step": 147 }, { "epoch": 0.02, "grad_norm": 2.6985085546222614, "learning_rate": 0.0005419847328244275, "loss": 4.4142, "step": 148 }, { "epoch": 0.02, "grad_norm": 1.727728121506411, "learning_rate": 0.0005458015267175572, "loss": 4.4135, "step": 149 }, { "epoch": 0.02, "grad_norm": 1.948736840465955, "learning_rate": 0.0005496183206106871, "loss": 4.5285, "step": 150 }, { "epoch": 0.02, "grad_norm": 2.4424225307169785, "learning_rate": 0.0005534351145038168, "loss": 4.0518, "step": 151 }, { "epoch": 0.02, "grad_norm": 2.2461109523508633, "learning_rate": 0.0005572519083969466, "loss": 4.3069, "step": 152 }, { "epoch": 0.02, "grad_norm": 2.0770259323732496, "learning_rate": 0.0005610687022900763, "loss": 4.3466, "step": 153 }, { "epoch": 0.02, "grad_norm": 1.5353662806779629, "learning_rate": 0.0005648854961832062, "loss": 4.2371, "step": 154 }, { "epoch": 0.02, "grad_norm": 2.0783920823522064, "learning_rate": 0.0005687022900763359, "loss": 4.44, "step": 155 }, { "epoch": 0.02, "grad_norm": 2.106928793234778, "learning_rate": 0.0005725190839694656, "loss": 4.3789, "step": 156 }, { "epoch": 0.02, "grad_norm": 3.3997577002285153, "learning_rate": 0.0005763358778625954, "loss": 4.2723, "step": 157 }, { "epoch": 0.02, "grad_norm": 125.74215330630898, "learning_rate": 0.0005801526717557252, "loss": 4.5233, "step": 158 }, { "epoch": 0.02, "grad_norm": 2.0252204462249193, "learning_rate": 0.000583969465648855, "loss": 4.3092, "step": 159 }, { "epoch": 0.02, "grad_norm": 3.0938463664760625, "learning_rate": 0.0005877862595419848, "loss": 4.4223, "step": 160 }, { "epoch": 0.02, "grad_norm": 5.469689179908854, "learning_rate": 0.0005916030534351145, "loss": 4.4815, "step": 161 }, { "epoch": 0.02, "grad_norm": 16.71766273367972, "learning_rate": 0.0005954198473282443, "loss": 4.3158, "step": 162 }, { "epoch": 0.02, "grad_norm": 9.235621807932343, "learning_rate": 0.0005992366412213741, "loss": 4.5763, "step": 163 }, { "epoch": 0.02, "grad_norm": 5.078315302431695, "learning_rate": 0.0006030534351145039, "loss": 4.6196, "step": 164 }, { "epoch": 0.02, "grad_norm": 4.1877889855812995, "learning_rate": 0.0006068702290076335, "loss": 4.427, "step": 165 }, { "epoch": 0.02, "grad_norm": 29.50152144195882, "learning_rate": 0.0006106870229007634, "loss": 4.3172, "step": 166 }, { "epoch": 0.02, "grad_norm": 3.7986073777991605, "learning_rate": 0.0006145038167938931, "loss": 4.487, "step": 167 }, { "epoch": 0.02, "grad_norm": 3.3206921118180057, "learning_rate": 0.000618320610687023, "loss": 4.5846, "step": 168 }, { "epoch": 0.02, "grad_norm": 2.9789890885059767, "learning_rate": 0.0006221374045801526, "loss": 4.4977, "step": 169 }, { "epoch": 0.02, "grad_norm": 2.7208789856933224, "learning_rate": 0.0006259541984732825, "loss": 4.3776, "step": 170 }, { "epoch": 0.02, "grad_norm": 2.529932794834876, "learning_rate": 0.0006297709923664122, "loss": 4.5379, "step": 171 }, { "epoch": 0.02, "grad_norm": 5.069807212846386, "learning_rate": 0.000633587786259542, "loss": 4.538, "step": 172 }, { "epoch": 0.02, "grad_norm": 2.843360393508488, "learning_rate": 0.0006374045801526717, "loss": 4.4068, "step": 173 }, { "epoch": 0.02, "grad_norm": 3.979015124661183, "learning_rate": 0.0006412213740458015, "loss": 4.3093, "step": 174 }, { "epoch": 0.02, "grad_norm": 23.118008024134554, "learning_rate": 0.0006450381679389313, "loss": 4.5523, "step": 175 }, { "epoch": 0.02, "grad_norm": 2.676702098627031, "learning_rate": 0.0006488549618320611, "loss": 4.1649, "step": 176 }, { "epoch": 0.02, "grad_norm": 5.48001509237802, "learning_rate": 0.0006526717557251909, "loss": 4.1914, "step": 177 }, { "epoch": 0.02, "grad_norm": 2.164314292544173, "learning_rate": 0.0006564885496183206, "loss": 4.5253, "step": 178 }, { "epoch": 0.02, "grad_norm": 2.797441546838409, "learning_rate": 0.0006603053435114504, "loss": 4.5076, "step": 179 }, { "epoch": 0.02, "grad_norm": 2.0967654547103542, "learning_rate": 0.0006641221374045802, "loss": 4.3244, "step": 180 }, { "epoch": 0.02, "grad_norm": 3.6853708103680716, "learning_rate": 0.0006679389312977099, "loss": 4.2549, "step": 181 }, { "epoch": 0.02, "grad_norm": 3.5984768315593856, "learning_rate": 0.0006717557251908397, "loss": 4.3358, "step": 182 }, { "epoch": 0.02, "grad_norm": 2.1474846376523535, "learning_rate": 0.0006755725190839694, "loss": 4.3137, "step": 183 }, { "epoch": 0.02, "grad_norm": 1.7440657356867424, "learning_rate": 0.0006793893129770993, "loss": 4.6206, "step": 184 }, { "epoch": 0.02, "grad_norm": 1.7374322470508698, "learning_rate": 0.000683206106870229, "loss": 4.0924, "step": 185 }, { "epoch": 0.02, "grad_norm": 1.552721310519864, "learning_rate": 0.0006870229007633588, "loss": 4.2106, "step": 186 }, { "epoch": 0.02, "grad_norm": 1.639952888552088, "learning_rate": 0.0006908396946564885, "loss": 4.4707, "step": 187 }, { "epoch": 0.02, "grad_norm": 1.4401389809364398, "learning_rate": 0.0006946564885496184, "loss": 4.4782, "step": 188 }, { "epoch": 0.02, "grad_norm": 1.693920501101157, "learning_rate": 0.0006984732824427481, "loss": 4.3477, "step": 189 }, { "epoch": 0.02, "grad_norm": 1.820752534444456, "learning_rate": 0.0007022900763358778, "loss": 4.3333, "step": 190 }, { "epoch": 0.02, "grad_norm": 4.454411472460561, "learning_rate": 0.0007061068702290076, "loss": 4.3562, "step": 191 }, { "epoch": 0.02, "grad_norm": 2.339729032032386, "learning_rate": 0.0007099236641221374, "loss": 4.3534, "step": 192 }, { "epoch": 0.02, "grad_norm": 1.7533030617578518, "learning_rate": 0.0007137404580152672, "loss": 4.1145, "step": 193 }, { "epoch": 0.02, "grad_norm": 1.5959634267068015, "learning_rate": 0.000717557251908397, "loss": 4.503, "step": 194 }, { "epoch": 0.02, "grad_norm": 1.8956991079901098, "learning_rate": 0.0007213740458015267, "loss": 4.552, "step": 195 }, { "epoch": 0.02, "grad_norm": 1.4517139263532806, "learning_rate": 0.0007251908396946565, "loss": 4.5581, "step": 196 }, { "epoch": 0.02, "grad_norm": 1.5816501245363008, "learning_rate": 0.0007290076335877863, "loss": 4.1946, "step": 197 }, { "epoch": 0.02, "grad_norm": 1.2445169239722784, "learning_rate": 0.0007328244274809161, "loss": 4.2591, "step": 198 }, { "epoch": 0.02, "grad_norm": 1.2714761168683029, "learning_rate": 0.0007366412213740457, "loss": 4.4627, "step": 199 }, { "epoch": 0.02, "grad_norm": 1.5303754696273073, "learning_rate": 0.0007404580152671756, "loss": 4.4048, "step": 200 }, { "epoch": 0.02, "grad_norm": 2.2085041225393867, "learning_rate": 0.0007442748091603053, "loss": 4.2643, "step": 201 }, { "epoch": 0.02, "grad_norm": 1.9112308799012625, "learning_rate": 0.0007480916030534352, "loss": 4.2295, "step": 202 }, { "epoch": 0.02, "grad_norm": 1.5908188587207266, "learning_rate": 0.0007519083969465648, "loss": 4.2813, "step": 203 }, { "epoch": 0.02, "grad_norm": 1.1982689595146219, "learning_rate": 0.0007557251908396947, "loss": 4.247, "step": 204 }, { "epoch": 0.02, "grad_norm": 2.3881318187911207, "learning_rate": 0.0007595419847328244, "loss": 4.3874, "step": 205 }, { "epoch": 0.02, "grad_norm": 1.3609424524329823, "learning_rate": 0.0007633587786259543, "loss": 4.4435, "step": 206 }, { "epoch": 0.02, "grad_norm": 1.1937703208585233, "learning_rate": 0.0007671755725190839, "loss": 4.5193, "step": 207 }, { "epoch": 0.02, "grad_norm": 2.4920614705406754, "learning_rate": 0.0007709923664122137, "loss": 4.3129, "step": 208 }, { "epoch": 0.02, "grad_norm": 7.240631868415255, "learning_rate": 0.0007748091603053435, "loss": 4.3302, "step": 209 }, { "epoch": 0.02, "grad_norm": 1.653358300173251, "learning_rate": 0.0007786259541984733, "loss": 4.3083, "step": 210 }, { "epoch": 0.02, "grad_norm": 1.6839357956548613, "learning_rate": 0.000782442748091603, "loss": 4.3758, "step": 211 }, { "epoch": 0.02, "grad_norm": 1.3938372295633423, "learning_rate": 0.0007862595419847328, "loss": 4.4719, "step": 212 }, { "epoch": 0.02, "grad_norm": 1.324803896504514, "learning_rate": 0.0007900763358778626, "loss": 4.3297, "step": 213 }, { "epoch": 0.02, "grad_norm": 1.2204173773929294, "learning_rate": 0.0007938931297709924, "loss": 4.389, "step": 214 }, { "epoch": 0.02, "grad_norm": 11.811043448698852, "learning_rate": 0.0007977099236641223, "loss": 4.3733, "step": 215 }, { "epoch": 0.02, "grad_norm": 2.0040900817656526, "learning_rate": 0.0008015267175572519, "loss": 4.0515, "step": 216 }, { "epoch": 0.02, "grad_norm": 1.9331803662548388, "learning_rate": 0.0008053435114503816, "loss": 4.4991, "step": 217 }, { "epoch": 0.02, "grad_norm": 1.4537028726656207, "learning_rate": 0.0008091603053435115, "loss": 4.2445, "step": 218 }, { "epoch": 0.03, "grad_norm": 5.53463013377651, "learning_rate": 0.0008129770992366412, "loss": 4.1712, "step": 219 }, { "epoch": 0.03, "grad_norm": 2.5106546596987123, "learning_rate": 0.000816793893129771, "loss": 4.3001, "step": 220 }, { "epoch": 0.03, "grad_norm": 1.9745634275850867, "learning_rate": 0.0008206106870229007, "loss": 4.2108, "step": 221 }, { "epoch": 0.03, "grad_norm": 1.268432076903743, "learning_rate": 0.0008244274809160306, "loss": 4.5328, "step": 222 }, { "epoch": 0.03, "grad_norm": 2.1903152686894933, "learning_rate": 0.0008282442748091604, "loss": 4.2968, "step": 223 }, { "epoch": 0.03, "grad_norm": 1.9380468409358829, "learning_rate": 0.0008320610687022901, "loss": 4.4417, "step": 224 }, { "epoch": 0.03, "grad_norm": 1.3963413799534377, "learning_rate": 0.0008358778625954198, "loss": 4.2871, "step": 225 }, { "epoch": 0.03, "grad_norm": 2.43582208783276, "learning_rate": 0.0008396946564885496, "loss": 4.2819, "step": 226 }, { "epoch": 0.03, "grad_norm": 1.5940462815400278, "learning_rate": 0.0008435114503816795, "loss": 4.4052, "step": 227 }, { "epoch": 0.03, "grad_norm": 1.2051802193213408, "learning_rate": 0.0008473282442748091, "loss": 4.2222, "step": 228 }, { "epoch": 0.03, "grad_norm": 2.0333850802718665, "learning_rate": 0.000851145038167939, "loss": 4.4192, "step": 229 }, { "epoch": 0.03, "grad_norm": 3.400417782835079, "learning_rate": 0.0008549618320610687, "loss": 4.4592, "step": 230 }, { "epoch": 0.03, "grad_norm": 1.7577865572295235, "learning_rate": 0.0008587786259541986, "loss": 4.2213, "step": 231 }, { "epoch": 0.03, "grad_norm": 1.4174422384212568, "learning_rate": 0.0008625954198473283, "loss": 4.4664, "step": 232 }, { "epoch": 0.03, "grad_norm": 2.055376126295404, "learning_rate": 0.0008664122137404581, "loss": 4.2514, "step": 233 }, { "epoch": 0.03, "grad_norm": 2.504414708093824, "learning_rate": 0.0008702290076335878, "loss": 4.0844, "step": 234 }, { "epoch": 0.03, "grad_norm": 1.4343526319105533, "learning_rate": 0.0008740458015267176, "loss": 4.2079, "step": 235 }, { "epoch": 0.03, "grad_norm": 1.2562820442089322, "learning_rate": 0.0008778625954198474, "loss": 4.4373, "step": 236 }, { "epoch": 0.03, "grad_norm": 3.7457012916108225, "learning_rate": 0.000881679389312977, "loss": 4.1786, "step": 237 }, { "epoch": 0.03, "grad_norm": 1.1790063308907726, "learning_rate": 0.0008854961832061069, "loss": 4.2874, "step": 238 }, { "epoch": 0.03, "grad_norm": 1.2991863547672804, "learning_rate": 0.0008893129770992367, "loss": 4.1354, "step": 239 }, { "epoch": 0.03, "grad_norm": 2.5286489750714036, "learning_rate": 0.0008931297709923665, "loss": 4.3006, "step": 240 }, { "epoch": 0.03, "grad_norm": 1.558510226836997, "learning_rate": 0.0008969465648854962, "loss": 4.0331, "step": 241 }, { "epoch": 0.03, "grad_norm": 1.4903547154898784, "learning_rate": 0.0009007633587786259, "loss": 4.1535, "step": 242 }, { "epoch": 0.03, "grad_norm": 1.2714238683862837, "learning_rate": 0.0009045801526717558, "loss": 4.1045, "step": 243 }, { "epoch": 0.03, "grad_norm": 8.463507600954246, "learning_rate": 0.0009083969465648855, "loss": 4.2937, "step": 244 }, { "epoch": 0.03, "grad_norm": 1.3496143257230424, "learning_rate": 0.0009122137404580153, "loss": 4.2393, "step": 245 }, { "epoch": 0.03, "grad_norm": 2.0867375064306093, "learning_rate": 0.000916030534351145, "loss": 4.2803, "step": 246 }, { "epoch": 0.03, "grad_norm": 1.6739263840471548, "learning_rate": 0.0009198473282442749, "loss": 4.147, "step": 247 }, { "epoch": 0.03, "grad_norm": 1.627907574888252, "learning_rate": 0.0009236641221374046, "loss": 4.1545, "step": 248 }, { "epoch": 0.03, "grad_norm": 1.6288505381343623, "learning_rate": 0.0009274809160305345, "loss": 4.2188, "step": 249 }, { "epoch": 0.03, "grad_norm": 1.5053658945789288, "learning_rate": 0.0009312977099236641, "loss": 4.2284, "step": 250 }, { "epoch": 0.03, "grad_norm": 1.9751834803714665, "learning_rate": 0.0009351145038167939, "loss": 4.3051, "step": 251 }, { "epoch": 0.03, "grad_norm": 1.635500123712402, "learning_rate": 0.0009389312977099237, "loss": 4.1021, "step": 252 }, { "epoch": 0.03, "grad_norm": 3.18029868298015, "learning_rate": 0.0009427480916030535, "loss": 4.323, "step": 253 }, { "epoch": 0.03, "grad_norm": 2.294919077766256, "learning_rate": 0.0009465648854961832, "loss": 4.2792, "step": 254 }, { "epoch": 0.03, "grad_norm": 3.4364356766600967, "learning_rate": 0.000950381679389313, "loss": 4.4129, "step": 255 }, { "epoch": 0.03, "grad_norm": 6.557641748511485, "learning_rate": 0.0009541984732824428, "loss": 4.529, "step": 256 }, { "epoch": 0.03, "grad_norm": 8.226298705783996, "learning_rate": 0.0009580152671755726, "loss": 3.8596, "step": 257 }, { "epoch": 0.03, "grad_norm": 1.8150668785318773, "learning_rate": 0.0009618320610687023, "loss": 4.2955, "step": 258 }, { "epoch": 0.03, "grad_norm": 1.4614750127674727, "learning_rate": 0.0009656488549618321, "loss": 4.1755, "step": 259 }, { "epoch": 0.03, "grad_norm": 1.7379610237004675, "learning_rate": 0.0009694656488549618, "loss": 3.9348, "step": 260 }, { "epoch": 0.03, "grad_norm": 3.2592173885528375, "learning_rate": 0.0009732824427480917, "loss": 4.3136, "step": 261 }, { "epoch": 0.03, "grad_norm": 2.211981835728531, "learning_rate": 0.0009770992366412213, "loss": 4.2098, "step": 262 }, { "epoch": 0.03, "grad_norm": 1.577569854133963, "learning_rate": 0.0009809160305343512, "loss": 4.2233, "step": 263 }, { "epoch": 0.03, "grad_norm": 3.9361841564074918, "learning_rate": 0.0009847328244274808, "loss": 4.3255, "step": 264 }, { "epoch": 0.03, "grad_norm": 2.0544551880751634, "learning_rate": 0.0009885496183206107, "loss": 4.26, "step": 265 }, { "epoch": 0.03, "grad_norm": 1.6958226014336415, "learning_rate": 0.0009923664122137405, "loss": 4.2677, "step": 266 }, { "epoch": 0.03, "grad_norm": 1.7074348821947696, "learning_rate": 0.0009961832061068704, "loss": 3.9851, "step": 267 }, { "epoch": 0.03, "grad_norm": 1.299170018256827, "learning_rate": 0.001, "loss": 4.26, "step": 268 }, { "epoch": 0.03, "grad_norm": 1.814824816036798, "learning_rate": 0.0009999999655172654, "loss": 4.0256, "step": 269 }, { "epoch": 0.03, "grad_norm": 3.8008135241767156, "learning_rate": 0.0009999998620690664, "loss": 4.2543, "step": 270 }, { "epoch": 0.03, "grad_norm": 4.301365760272003, "learning_rate": 0.0009999996896554175, "loss": 4.0231, "step": 271 }, { "epoch": 0.03, "grad_norm": 1.6300346529640273, "learning_rate": 0.0009999994482763422, "loss": 4.1036, "step": 272 }, { "epoch": 0.03, "grad_norm": 8.685751962780564, "learning_rate": 0.0009999991379318737, "loss": 4.1403, "step": 273 }, { "epoch": 0.03, "grad_norm": 2.2498080971780685, "learning_rate": 0.000999998758622055, "loss": 4.2029, "step": 274 }, { "epoch": 0.03, "grad_norm": 1.8047538088429735, "learning_rate": 0.0009999983103469385, "loss": 4.2558, "step": 275 }, { "epoch": 0.03, "grad_norm": 1.937065425249134, "learning_rate": 0.0009999977931065857, "loss": 4.1651, "step": 276 }, { "epoch": 0.03, "grad_norm": 1.437196394731803, "learning_rate": 0.0009999972069010686, "loss": 3.9668, "step": 277 }, { "epoch": 0.03, "grad_norm": 2.8630403951444725, "learning_rate": 0.0009999965517304673, "loss": 4.2141, "step": 278 }, { "epoch": 0.03, "grad_norm": 1.4378229128006432, "learning_rate": 0.0009999958275948725, "loss": 4.1225, "step": 279 }, { "epoch": 0.03, "grad_norm": 6.68608702787061, "learning_rate": 0.0009999950344943842, "loss": 4.2859, "step": 280 }, { "epoch": 0.03, "grad_norm": 4.067793589494028, "learning_rate": 0.0009999941724291115, "loss": 4.1785, "step": 281 }, { "epoch": 0.03, "grad_norm": 2.3218467147143906, "learning_rate": 0.0009999932413991737, "loss": 4.1513, "step": 282 }, { "epoch": 0.03, "grad_norm": 2.232099383205669, "learning_rate": 0.0009999922414046986, "loss": 4.2677, "step": 283 }, { "epoch": 0.03, "grad_norm": 2.184228211822442, "learning_rate": 0.0009999911724458248, "loss": 4.2871, "step": 284 }, { "epoch": 0.03, "grad_norm": 19.99359159829405, "learning_rate": 0.0009999900345226994, "loss": 4.2853, "step": 285 }, { "epoch": 0.03, "grad_norm": 1.7875740824340312, "learning_rate": 0.0009999888276354795, "loss": 4.2122, "step": 286 }, { "epoch": 0.03, "grad_norm": 6.093503870880048, "learning_rate": 0.0009999875517843315, "loss": 4.1125, "step": 287 }, { "epoch": 0.03, "grad_norm": 1.6980520670634527, "learning_rate": 0.0009999862069694312, "loss": 4.2265, "step": 288 }, { "epoch": 0.03, "grad_norm": 1.2798750609827148, "learning_rate": 0.0009999847931909645, "loss": 4.2363, "step": 289 }, { "epoch": 0.03, "grad_norm": 3.5566968015684757, "learning_rate": 0.000999983310449126, "loss": 4.0132, "step": 290 }, { "epoch": 0.03, "grad_norm": 19.18546091614537, "learning_rate": 0.0009999817587441203, "loss": 4.061, "step": 291 }, { "epoch": 0.03, "grad_norm": 1.8829242069222285, "learning_rate": 0.0009999801380761615, "loss": 4.1262, "step": 292 }, { "epoch": 0.03, "grad_norm": 2.6842149025296167, "learning_rate": 0.0009999784484454734, "loss": 4.4524, "step": 293 }, { "epoch": 0.03, "grad_norm": 1.859868024080571, "learning_rate": 0.0009999766898522884, "loss": 4.1414, "step": 294 }, { "epoch": 0.03, "grad_norm": 1.8045651847204816, "learning_rate": 0.0009999748622968496, "loss": 4.1495, "step": 295 }, { "epoch": 0.03, "grad_norm": 2.124044628417353, "learning_rate": 0.000999972965779409, "loss": 4.1097, "step": 296 }, { "epoch": 0.03, "grad_norm": 1.5011576239743278, "learning_rate": 0.000999971000300228, "loss": 4.1823, "step": 297 }, { "epoch": 0.03, "grad_norm": 2.01347098774198, "learning_rate": 0.000999968965859578, "loss": 4.1435, "step": 298 }, { "epoch": 0.03, "grad_norm": 1.5913183739162184, "learning_rate": 0.0009999668624577395, "loss": 4.3186, "step": 299 }, { "epoch": 0.03, "grad_norm": 1.47856588583951, "learning_rate": 0.0009999646900950023, "loss": 4.3772, "step": 300 }, { "epoch": 0.03, "grad_norm": 4.908248562541376, "learning_rate": 0.0009999624487716666, "loss": 4.193, "step": 301 }, { "epoch": 0.03, "grad_norm": 1.9294532257161214, "learning_rate": 0.000999960138488041, "loss": 4.2368, "step": 302 }, { "epoch": 0.03, "grad_norm": 1.7995978756885962, "learning_rate": 0.0009999577592444443, "loss": 4.1151, "step": 303 }, { "epoch": 0.03, "grad_norm": 3.0729673487442666, "learning_rate": 0.000999955311041205, "loss": 4.0308, "step": 304 }, { "epoch": 0.03, "grad_norm": 1.4546240183668138, "learning_rate": 0.0009999527938786606, "loss": 4.3905, "step": 305 }, { "epoch": 0.04, "grad_norm": 1.7263021428362864, "learning_rate": 0.0009999502077571581, "loss": 4.0221, "step": 306 }, { "epoch": 0.04, "grad_norm": 2.164973600653351, "learning_rate": 0.0009999475526770545, "loss": 4.3111, "step": 307 }, { "epoch": 0.04, "grad_norm": 2.0563555618686613, "learning_rate": 0.0009999448286387158, "loss": 4.0792, "step": 308 }, { "epoch": 0.04, "grad_norm": 1.2742839474490744, "learning_rate": 0.0009999420356425178, "loss": 4.4126, "step": 309 }, { "epoch": 0.04, "grad_norm": 1.6548903017718362, "learning_rate": 0.0009999391736888457, "loss": 4.1291, "step": 310 }, { "epoch": 0.04, "grad_norm": 1.4706621032242517, "learning_rate": 0.0009999362427780942, "loss": 4.0174, "step": 311 }, { "epoch": 0.04, "grad_norm": 1.2522898936448108, "learning_rate": 0.0009999332429106679, "loss": 4.0531, "step": 312 }, { "epoch": 0.04, "grad_norm": 1.758527838442011, "learning_rate": 0.00099993017408698, "loss": 4.0504, "step": 313 }, { "epoch": 0.04, "grad_norm": 1.926369759156153, "learning_rate": 0.0009999270363074547, "loss": 4.1235, "step": 314 }, { "epoch": 0.04, "grad_norm": 1.4216792867629067, "learning_rate": 0.0009999238295725237, "loss": 4.1285, "step": 315 }, { "epoch": 0.04, "grad_norm": 2.1110654819036574, "learning_rate": 0.00099992055388263, "loss": 4.2889, "step": 316 }, { "epoch": 0.04, "grad_norm": 1.699020600438759, "learning_rate": 0.0009999172092382252, "loss": 4.2314, "step": 317 }, { "epoch": 0.04, "grad_norm": 1.779461678736526, "learning_rate": 0.0009999137956397707, "loss": 4.0981, "step": 318 }, { "epoch": 0.04, "grad_norm": 1.1840521271759301, "learning_rate": 0.0009999103130877373, "loss": 4.2072, "step": 319 }, { "epoch": 0.04, "grad_norm": 4.6987405064690515, "learning_rate": 0.0009999067615826054, "loss": 4.1763, "step": 320 }, { "epoch": 0.04, "grad_norm": 1.4758379470902467, "learning_rate": 0.000999903141124865, "loss": 3.9832, "step": 321 }, { "epoch": 0.04, "grad_norm": 40.39742878916515, "learning_rate": 0.000999899451715015, "loss": 4.0352, "step": 322 }, { "epoch": 0.04, "grad_norm": 1.4712620565356995, "learning_rate": 0.0009998956933535649, "loss": 4.2948, "step": 323 }, { "epoch": 0.04, "grad_norm": 2.0661716179642924, "learning_rate": 0.0009998918660410324, "loss": 4.1999, "step": 324 }, { "epoch": 0.04, "grad_norm": 3.6555152768866663, "learning_rate": 0.000999887969777946, "loss": 4.4547, "step": 325 }, { "epoch": 0.04, "grad_norm": 14.09646828820387, "learning_rate": 0.000999884004564843, "loss": 4.6558, "step": 326 }, { "epoch": 0.04, "grad_norm": 5.657304255504427, "learning_rate": 0.00099987997040227, "loss": 4.2387, "step": 327 }, { "epoch": 0.04, "grad_norm": 2.3774895165908796, "learning_rate": 0.0009998758672907838, "loss": 4.3581, "step": 328 }, { "epoch": 0.04, "grad_norm": 2.1797131810201, "learning_rate": 0.0009998716952309501, "loss": 4.4357, "step": 329 }, { "epoch": 0.04, "grad_norm": 5.725734880635553, "learning_rate": 0.0009998674542233445, "loss": 4.1463, "step": 330 }, { "epoch": 0.04, "grad_norm": 2.3915923885287005, "learning_rate": 0.000999863144268552, "loss": 4.4046, "step": 331 }, { "epoch": 0.04, "grad_norm": 23.666879056714148, "learning_rate": 0.000999858765367167, "loss": 4.213, "step": 332 }, { "epoch": 0.04, "grad_norm": 1.58069587940314, "learning_rate": 0.0009998543175197936, "loss": 4.1942, "step": 333 }, { "epoch": 0.04, "grad_norm": 1.6186294395438663, "learning_rate": 0.000999849800727045, "loss": 4.3216, "step": 334 }, { "epoch": 0.04, "grad_norm": 5.074442349441321, "learning_rate": 0.0009998452149895445, "loss": 4.1695, "step": 335 }, { "epoch": 0.04, "grad_norm": 2.361950031079178, "learning_rate": 0.0009998405603079243, "loss": 4.3722, "step": 336 }, { "epoch": 0.04, "grad_norm": 1.778983463763034, "learning_rate": 0.0009998358366828269, "loss": 4.3397, "step": 337 }, { "epoch": 0.04, "grad_norm": 2.250987163401557, "learning_rate": 0.0009998310441149034, "loss": 4.3531, "step": 338 }, { "epoch": 0.04, "grad_norm": 1.3896684163928483, "learning_rate": 0.000999826182604815, "loss": 4.3145, "step": 339 }, { "epoch": 0.04, "grad_norm": 1.2348250676899448, "learning_rate": 0.0009998212521532325, "loss": 4.2225, "step": 340 }, { "epoch": 0.04, "grad_norm": 2.0328905315690773, "learning_rate": 0.0009998162527608354, "loss": 4.2112, "step": 341 }, { "epoch": 0.04, "grad_norm": 49.319649477815474, "learning_rate": 0.0009998111844283137, "loss": 4.4247, "step": 342 }, { "epoch": 0.04, "grad_norm": 1.8499403591502945, "learning_rate": 0.0009998060471563665, "loss": 4.4534, "step": 343 }, { "epoch": 0.04, "grad_norm": 1.9487614905504818, "learning_rate": 0.0009998008409457023, "loss": 4.4077, "step": 344 }, { "epoch": 0.04, "grad_norm": 2.010154781846001, "learning_rate": 0.000999795565797039, "loss": 4.1284, "step": 345 }, { "epoch": 0.04, "grad_norm": 2.087743461932623, "learning_rate": 0.0009997902217111045, "loss": 4.1874, "step": 346 }, { "epoch": 0.04, "grad_norm": 2.2264590678729124, "learning_rate": 0.0009997848086886357, "loss": 4.4414, "step": 347 }, { "epoch": 0.04, "grad_norm": 1.6353900773523025, "learning_rate": 0.0009997793267303792, "loss": 4.3114, "step": 348 }, { "epoch": 0.04, "grad_norm": 3.388842554980238, "learning_rate": 0.0009997737758370914, "loss": 4.5055, "step": 349 }, { "epoch": 0.04, "grad_norm": 6.371677050558135, "learning_rate": 0.0009997681560095378, "loss": 4.3519, "step": 350 }, { "epoch": 0.04, "grad_norm": 1.808691177664785, "learning_rate": 0.0009997624672484933, "loss": 4.2633, "step": 351 }, { "epoch": 0.04, "grad_norm": 1.5466615306023854, "learning_rate": 0.0009997567095547432, "loss": 4.335, "step": 352 }, { "epoch": 0.04, "grad_norm": 7.315933330068135, "learning_rate": 0.000999750882929081, "loss": 4.4478, "step": 353 }, { "epoch": 0.04, "grad_norm": 4.944573607809598, "learning_rate": 0.0009997449873723105, "loss": 4.3793, "step": 354 }, { "epoch": 0.04, "grad_norm": 6.500319948924547, "learning_rate": 0.000999739022885245, "loss": 4.4322, "step": 355 }, { "epoch": 0.04, "grad_norm": 1.1455197638185235, "learning_rate": 0.0009997329894687072, "loss": 4.278, "step": 356 }, { "epoch": 0.04, "grad_norm": 9.105573634207436, "learning_rate": 0.0009997268871235296, "loss": 4.2941, "step": 357 }, { "epoch": 0.04, "grad_norm": 5.130138375210888, "learning_rate": 0.0009997207158505533, "loss": 4.321, "step": 358 }, { "epoch": 0.04, "grad_norm": 2.0490440876383507, "learning_rate": 0.0009997144756506298, "loss": 4.3293, "step": 359 }, { "epoch": 0.04, "grad_norm": 1.5517935437634631, "learning_rate": 0.00099970816652462, "loss": 4.1769, "step": 360 }, { "epoch": 0.04, "grad_norm": 1.8913769648293703, "learning_rate": 0.0009997017884733938, "loss": 4.1776, "step": 361 }, { "epoch": 0.04, "grad_norm": 1.15488805085044, "learning_rate": 0.000999695341497831, "loss": 3.973, "step": 362 }, { "epoch": 0.04, "grad_norm": 1.4561573957234442, "learning_rate": 0.0009996888255988207, "loss": 4.1945, "step": 363 }, { "epoch": 0.04, "grad_norm": 1.21447529449004, "learning_rate": 0.0009996822407772623, "loss": 4.1921, "step": 364 }, { "epoch": 0.04, "grad_norm": 3.312013471518438, "learning_rate": 0.0009996755870340633, "loss": 4.1811, "step": 365 }, { "epoch": 0.04, "grad_norm": 1.1717353129345247, "learning_rate": 0.0009996688643701419, "loss": 4.3077, "step": 366 }, { "epoch": 0.04, "grad_norm": 1.5318520759135914, "learning_rate": 0.0009996620727864252, "loss": 4.2601, "step": 367 }, { "epoch": 0.04, "grad_norm": 1.1480331730448254, "learning_rate": 0.00099965521228385, "loss": 4.0155, "step": 368 }, { "epoch": 0.04, "grad_norm": 1.5368186700592597, "learning_rate": 0.0009996482828633624, "loss": 4.4481, "step": 369 }, { "epoch": 0.04, "grad_norm": 1.3222058672091872, "learning_rate": 0.0009996412845259183, "loss": 4.2202, "step": 370 }, { "epoch": 0.04, "grad_norm": 1.6202185673482237, "learning_rate": 0.0009996342172724833, "loss": 4.2343, "step": 371 }, { "epoch": 0.04, "grad_norm": 1.2894933539204734, "learning_rate": 0.0009996270811040318, "loss": 3.9833, "step": 372 }, { "epoch": 0.04, "grad_norm": 1.6855540418044326, "learning_rate": 0.0009996198760215483, "loss": 3.9795, "step": 373 }, { "epoch": 0.04, "grad_norm": 1.1924223767278768, "learning_rate": 0.0009996126020260262, "loss": 4.2868, "step": 374 }, { "epoch": 0.04, "grad_norm": 0.9729971906461921, "learning_rate": 0.0009996052591184695, "loss": 4.2176, "step": 375 }, { "epoch": 0.04, "grad_norm": 2.8480767564801504, "learning_rate": 0.0009995978472998905, "loss": 4.2889, "step": 376 }, { "epoch": 0.04, "grad_norm": 1.0565771798226582, "learning_rate": 0.0009995903665713118, "loss": 4.1568, "step": 377 }, { "epoch": 0.04, "grad_norm": 1.360456416142257, "learning_rate": 0.000999582816933765, "loss": 4.0579, "step": 378 }, { "epoch": 0.04, "grad_norm": 1.3195693821900203, "learning_rate": 0.0009995751983882914, "loss": 4.1984, "step": 379 }, { "epoch": 0.04, "grad_norm": 3.08569068216271, "learning_rate": 0.000999567510935942, "loss": 4.3036, "step": 380 }, { "epoch": 0.04, "grad_norm": 1.3444103142098056, "learning_rate": 0.0009995597545777771, "loss": 4.2186, "step": 381 }, { "epoch": 0.04, "grad_norm": 1.5426138852906555, "learning_rate": 0.0009995519293148666, "loss": 4.1794, "step": 382 }, { "epoch": 0.04, "grad_norm": 1.496592540452236, "learning_rate": 0.0009995440351482897, "loss": 4.2067, "step": 383 }, { "epoch": 0.04, "grad_norm": 1.2735700045022074, "learning_rate": 0.0009995360720791353, "loss": 4.2875, "step": 384 }, { "epoch": 0.04, "grad_norm": 2.565558198597502, "learning_rate": 0.000999528040108502, "loss": 4.1659, "step": 385 }, { "epoch": 0.04, "grad_norm": 2.5383763017486842, "learning_rate": 0.0009995199392374972, "loss": 4.3205, "step": 386 }, { "epoch": 0.04, "grad_norm": 1.0953126095411645, "learning_rate": 0.0009995117694672386, "loss": 4.2132, "step": 387 }, { "epoch": 0.04, "grad_norm": 1.9375458858753798, "learning_rate": 0.000999503530798853, "loss": 4.191, "step": 388 }, { "epoch": 0.04, "grad_norm": 3.2772614397000868, "learning_rate": 0.0009994952232334766, "loss": 4.271, "step": 389 }, { "epoch": 0.04, "grad_norm": 1.7061787526984669, "learning_rate": 0.0009994868467722556, "loss": 4.0973, "step": 390 }, { "epoch": 0.04, "grad_norm": 1.5312397789414427, "learning_rate": 0.0009994784014163449, "loss": 3.992, "step": 391 }, { "epoch": 0.04, "grad_norm": 1.0706364696174504, "learning_rate": 0.0009994698871669098, "loss": 4.2831, "step": 392 }, { "epoch": 0.05, "grad_norm": 1.865631611652127, "learning_rate": 0.0009994613040251246, "loss": 4.2814, "step": 393 }, { "epoch": 0.05, "grad_norm": 1.6606265690727582, "learning_rate": 0.000999452651992173, "loss": 4.3857, "step": 394 }, { "epoch": 0.05, "grad_norm": 1.2448059068201522, "learning_rate": 0.0009994439310692486, "loss": 3.8996, "step": 395 }, { "epoch": 0.05, "grad_norm": 1.2984123516923842, "learning_rate": 0.0009994351412575542, "loss": 4.0641, "step": 396 }, { "epoch": 0.05, "grad_norm": 3.335713094373029, "learning_rate": 0.000999426282558302, "loss": 4.2992, "step": 397 }, { "epoch": 0.05, "grad_norm": 2.05883423286947, "learning_rate": 0.000999417354972714, "loss": 4.0104, "step": 398 }, { "epoch": 0.05, "grad_norm": 1.5888049575668968, "learning_rate": 0.000999408358502022, "loss": 4.1403, "step": 399 }, { "epoch": 0.05, "grad_norm": 1.4650361222185284, "learning_rate": 0.0009993992931474661, "loss": 4.1965, "step": 400 }, { "epoch": 0.05, "grad_norm": 1.1101048971432796, "learning_rate": 0.0009993901589102974, "loss": 4.2827, "step": 401 }, { "epoch": 0.05, "grad_norm": 1.9167226703746987, "learning_rate": 0.0009993809557917754, "loss": 4.1344, "step": 402 }, { "epoch": 0.05, "grad_norm": 1.141850988985642, "learning_rate": 0.0009993716837931696, "loss": 4.2622, "step": 403 }, { "epoch": 0.05, "grad_norm": 1.2495698459200755, "learning_rate": 0.000999362342915759, "loss": 4.2708, "step": 404 }, { "epoch": 0.05, "grad_norm": 1.1061939151991775, "learning_rate": 0.0009993529331608318, "loss": 4.179, "step": 405 }, { "epoch": 0.05, "grad_norm": 1.0820094796277944, "learning_rate": 0.0009993434545296862, "loss": 3.9815, "step": 406 }, { "epoch": 0.05, "grad_norm": 1.2330433334635982, "learning_rate": 0.0009993339070236292, "loss": 4.053, "step": 407 }, { "epoch": 0.05, "grad_norm": 1.2112113674769647, "learning_rate": 0.000999324290643978, "loss": 3.8885, "step": 408 }, { "epoch": 0.05, "grad_norm": 2.2601834908232417, "learning_rate": 0.0009993146053920588, "loss": 3.8749, "step": 409 }, { "epoch": 0.05, "grad_norm": 1.0771956521575363, "learning_rate": 0.0009993048512692078, "loss": 3.8832, "step": 410 }, { "epoch": 0.05, "grad_norm": 3.1380153674100617, "learning_rate": 0.00099929502827677, "loss": 4.1943, "step": 411 }, { "epoch": 0.05, "grad_norm": 1.311143180266052, "learning_rate": 0.0009992851364161006, "loss": 4.1327, "step": 412 }, { "epoch": 0.05, "grad_norm": 1.2367140965361143, "learning_rate": 0.0009992751756885637, "loss": 4.1799, "step": 413 }, { "epoch": 0.05, "grad_norm": 1.6106443097675247, "learning_rate": 0.0009992651460955335, "loss": 4.2873, "step": 414 }, { "epoch": 0.05, "grad_norm": 2.0876301309960565, "learning_rate": 0.0009992550476383931, "loss": 4.0479, "step": 415 }, { "epoch": 0.05, "grad_norm": 1.2051395887921463, "learning_rate": 0.0009992448803185356, "loss": 4.1903, "step": 416 }, { "epoch": 0.05, "grad_norm": 1.5932434720450677, "learning_rate": 0.0009992346441373633, "loss": 3.919, "step": 417 }, { "epoch": 0.05, "grad_norm": 1.4427534603119214, "learning_rate": 0.0009992243390962883, "loss": 4.1708, "step": 418 }, { "epoch": 0.05, "grad_norm": 2.385736890036496, "learning_rate": 0.0009992139651967319, "loss": 4.144, "step": 419 }, { "epoch": 0.05, "grad_norm": 2.079967188258638, "learning_rate": 0.0009992035224401245, "loss": 4.1111, "step": 420 }, { "epoch": 0.05, "grad_norm": 2.2856489197098147, "learning_rate": 0.0009991930108279074, "loss": 4.1762, "step": 421 }, { "epoch": 0.05, "grad_norm": 1.3584985734107513, "learning_rate": 0.0009991824303615293, "loss": 4.1227, "step": 422 }, { "epoch": 0.05, "grad_norm": 1.1962832701569566, "learning_rate": 0.0009991717810424506, "loss": 4.1031, "step": 423 }, { "epoch": 0.05, "grad_norm": 1.2879293150575224, "learning_rate": 0.0009991610628721397, "loss": 4.3398, "step": 424 }, { "epoch": 0.05, "grad_norm": 0.9798353155821227, "learning_rate": 0.000999150275852075, "loss": 4.0816, "step": 425 }, { "epoch": 0.05, "grad_norm": 1.1679414045514513, "learning_rate": 0.0009991394199837444, "loss": 4.1019, "step": 426 }, { "epoch": 0.05, "grad_norm": 1.1310909720079843, "learning_rate": 0.0009991284952686455, "loss": 3.9163, "step": 427 }, { "epoch": 0.05, "grad_norm": 1.2169637793504569, "learning_rate": 0.0009991175017082848, "loss": 4.0028, "step": 428 }, { "epoch": 0.05, "grad_norm": 1.1690667052571782, "learning_rate": 0.0009991064393041786, "loss": 4.1311, "step": 429 }, { "epoch": 0.05, "grad_norm": 1.0864768016068007, "learning_rate": 0.0009990953080578533, "loss": 4.1679, "step": 430 }, { "epoch": 0.05, "grad_norm": 0.9532566683053302, "learning_rate": 0.0009990841079708435, "loss": 4.0866, "step": 431 }, { "epoch": 0.05, "grad_norm": 1.0329163094124467, "learning_rate": 0.0009990728390446946, "loss": 3.8579, "step": 432 }, { "epoch": 0.05, "grad_norm": 1.2544594261947024, "learning_rate": 0.0009990615012809608, "loss": 3.9562, "step": 433 }, { "epoch": 0.05, "grad_norm": 1.1763266600931532, "learning_rate": 0.0009990500946812058, "loss": 4.3207, "step": 434 }, { "epoch": 0.05, "grad_norm": 1.068722478985381, "learning_rate": 0.000999038619247003, "loss": 4.0708, "step": 435 }, { "epoch": 0.05, "grad_norm": 9.278565266188739, "learning_rate": 0.0009990270749799352, "loss": 4.1883, "step": 436 }, { "epoch": 0.05, "grad_norm": 1.0817615488588326, "learning_rate": 0.0009990154618815948, "loss": 3.9218, "step": 437 }, { "epoch": 0.05, "grad_norm": 1.2426238347080267, "learning_rate": 0.0009990037799535833, "loss": 4.4039, "step": 438 }, { "epoch": 0.05, "grad_norm": 1.0993328383491558, "learning_rate": 0.0009989920291975124, "loss": 4.1526, "step": 439 }, { "epoch": 0.05, "grad_norm": 2.2563924336520977, "learning_rate": 0.0009989802096150029, "loss": 4.2703, "step": 440 }, { "epoch": 0.05, "grad_norm": 1.8318851823902311, "learning_rate": 0.0009989683212076848, "loss": 3.9792, "step": 441 }, { "epoch": 0.05, "grad_norm": 1.027318592959613, "learning_rate": 0.0009989563639771978, "loss": 4.1156, "step": 442 }, { "epoch": 0.05, "grad_norm": 1.551374002379629, "learning_rate": 0.0009989443379251916, "loss": 4.2062, "step": 443 }, { "epoch": 0.05, "grad_norm": 1.663527277241027, "learning_rate": 0.0009989322430533245, "loss": 4.2456, "step": 444 }, { "epoch": 0.05, "grad_norm": 1.1294039872013137, "learning_rate": 0.0009989200793632652, "loss": 4.0084, "step": 445 }, { "epoch": 0.05, "grad_norm": 1.991541396545525, "learning_rate": 0.0009989078468566912, "loss": 4.0946, "step": 446 }, { "epoch": 0.05, "grad_norm": 1.5884350569850685, "learning_rate": 0.0009988955455352898, "loss": 4.227, "step": 447 }, { "epoch": 0.05, "grad_norm": 1.0780095775459435, "learning_rate": 0.0009988831754007576, "loss": 4.112, "step": 448 }, { "epoch": 0.05, "grad_norm": 1.0436200832180618, "learning_rate": 0.000998870736454801, "loss": 3.9062, "step": 449 }, { "epoch": 0.05, "grad_norm": 1.2323208628469333, "learning_rate": 0.0009988582286991356, "loss": 4.1227, "step": 450 }, { "epoch": 0.05, "grad_norm": 1.092529117596904, "learning_rate": 0.0009988456521354868, "loss": 4.0009, "step": 451 }, { "epoch": 0.05, "grad_norm": 2.778822307425622, "learning_rate": 0.000998833006765589, "loss": 3.9905, "step": 452 }, { "epoch": 0.05, "grad_norm": 1.0428166764829823, "learning_rate": 0.0009988202925911864, "loss": 4.155, "step": 453 }, { "epoch": 0.05, "grad_norm": 2.0777766526303507, "learning_rate": 0.000998807509614033, "loss": 4.1294, "step": 454 }, { "epoch": 0.05, "grad_norm": 1.3560606439742944, "learning_rate": 0.0009987946578358918, "loss": 4.192, "step": 455 }, { "epoch": 0.05, "grad_norm": 1.1686812519653973, "learning_rate": 0.0009987817372585355, "loss": 3.9685, "step": 456 }, { "epoch": 0.05, "grad_norm": 1.199886267327284, "learning_rate": 0.000998768747883746, "loss": 3.9751, "step": 457 }, { "epoch": 0.05, "grad_norm": 1.28051981801247, "learning_rate": 0.0009987556897133151, "loss": 4.1387, "step": 458 }, { "epoch": 0.05, "grad_norm": 1.398752201967532, "learning_rate": 0.0009987425627490441, "loss": 4.3152, "step": 459 }, { "epoch": 0.05, "grad_norm": 1.0358707654903616, "learning_rate": 0.0009987293669927436, "loss": 4.0209, "step": 460 }, { "epoch": 0.05, "grad_norm": 1.5618766801048318, "learning_rate": 0.0009987161024462333, "loss": 4.2922, "step": 461 }, { "epoch": 0.05, "grad_norm": 1.1293077120946913, "learning_rate": 0.0009987027691113432, "loss": 4.1711, "step": 462 }, { "epoch": 0.05, "grad_norm": 1.1834193535955804, "learning_rate": 0.0009986893669899123, "loss": 3.9076, "step": 463 }, { "epoch": 0.05, "grad_norm": 1.0382649056081357, "learning_rate": 0.0009986758960837889, "loss": 4.1549, "step": 464 }, { "epoch": 0.05, "grad_norm": 1.2187286862291253, "learning_rate": 0.0009986623563948314, "loss": 4.1721, "step": 465 }, { "epoch": 0.05, "grad_norm": 1.8609658883974771, "learning_rate": 0.000998648747924907, "loss": 4.1522, "step": 466 }, { "epoch": 0.05, "grad_norm": 1.1155638497574973, "learning_rate": 0.0009986350706758934, "loss": 4.1651, "step": 467 }, { "epoch": 0.05, "grad_norm": 1.3738184830032705, "learning_rate": 0.0009986213246496762, "loss": 4.2313, "step": 468 }, { "epoch": 0.05, "grad_norm": 1.0951433298423616, "learning_rate": 0.000998607509848152, "loss": 4.0457, "step": 469 }, { "epoch": 0.05, "grad_norm": 1.066652661314097, "learning_rate": 0.0009985936262732263, "loss": 4.1577, "step": 470 }, { "epoch": 0.05, "grad_norm": 1.1376056134457013, "learning_rate": 0.0009985796739268138, "loss": 4.1039, "step": 471 }, { "epoch": 0.05, "grad_norm": 1.3271850520417687, "learning_rate": 0.000998565652810839, "loss": 3.7839, "step": 472 }, { "epoch": 0.05, "grad_norm": 2.2475621503457206, "learning_rate": 0.000998551562927236, "loss": 4.1462, "step": 473 }, { "epoch": 0.05, "grad_norm": 1.6045387137904503, "learning_rate": 0.000998537404277948, "loss": 3.9835, "step": 474 }, { "epoch": 0.05, "grad_norm": 2.7006939747549183, "learning_rate": 0.0009985231768649284, "loss": 4.1179, "step": 475 }, { "epoch": 0.05, "grad_norm": 1.6887332154261994, "learning_rate": 0.000998508880690139, "loss": 4.1517, "step": 476 }, { "epoch": 0.05, "grad_norm": 1.1778855649437638, "learning_rate": 0.000998494515755552, "loss": 4.0779, "step": 477 }, { "epoch": 0.05, "grad_norm": 1.446096024452147, "learning_rate": 0.0009984800820631488, "loss": 4.1715, "step": 478 }, { "epoch": 0.05, "grad_norm": 1.4455122321620089, "learning_rate": 0.0009984655796149201, "loss": 4.1734, "step": 479 }, { "epoch": 0.06, "grad_norm": 1.0806047789948092, "learning_rate": 0.0009984510084128661, "loss": 4.0459, "step": 480 }, { "epoch": 0.06, "grad_norm": 1.3034820783296346, "learning_rate": 0.0009984363684589972, "loss": 4.0169, "step": 481 }, { "epoch": 0.06, "grad_norm": 1.343110208424966, "learning_rate": 0.0009984216597553322, "loss": 4.0863, "step": 482 }, { "epoch": 0.06, "grad_norm": 1.0345473584785687, "learning_rate": 0.0009984068823039, "loss": 4.037, "step": 483 }, { "epoch": 0.06, "grad_norm": 1.2596976228501233, "learning_rate": 0.0009983920361067388, "loss": 4.0488, "step": 484 }, { "epoch": 0.06, "grad_norm": 3.6754874814706047, "learning_rate": 0.0009983771211658965, "loss": 4.0807, "step": 485 }, { "epoch": 0.06, "grad_norm": 2.1297953873784135, "learning_rate": 0.0009983621374834303, "loss": 4.0427, "step": 486 }, { "epoch": 0.06, "grad_norm": 1.2101768363629144, "learning_rate": 0.0009983470850614068, "loss": 3.9335, "step": 487 }, { "epoch": 0.06, "grad_norm": 2.468270571069635, "learning_rate": 0.0009983319639019024, "loss": 3.9995, "step": 488 }, { "epoch": 0.06, "grad_norm": 1.1587324164941648, "learning_rate": 0.0009983167740070025, "loss": 4.1912, "step": 489 }, { "epoch": 0.06, "grad_norm": 1.7808245295106495, "learning_rate": 0.0009983015153788026, "loss": 3.9911, "step": 490 }, { "epoch": 0.06, "grad_norm": 1.192055817645341, "learning_rate": 0.000998286188019407, "loss": 4.0281, "step": 491 }, { "epoch": 0.06, "grad_norm": 1.2574076509866403, "learning_rate": 0.00099827079193093, "loss": 4.0027, "step": 492 }, { "epoch": 0.06, "grad_norm": 1.4192671064936926, "learning_rate": 0.0009982553271154953, "loss": 3.9539, "step": 493 }, { "epoch": 0.06, "grad_norm": 1.0134641904966863, "learning_rate": 0.0009982397935752356, "loss": 3.9714, "step": 494 }, { "epoch": 0.06, "grad_norm": 11.683648604597867, "learning_rate": 0.0009982241913122937, "loss": 3.9447, "step": 495 }, { "epoch": 0.06, "grad_norm": 1.783985761581361, "learning_rate": 0.000998208520328822, "loss": 4.3434, "step": 496 }, { "epoch": 0.06, "grad_norm": 1.2507224995234405, "learning_rate": 0.0009981927806269812, "loss": 3.9493, "step": 497 }, { "epoch": 0.06, "grad_norm": 1.7577153994572337, "learning_rate": 0.0009981769722089428, "loss": 4.2464, "step": 498 }, { "epoch": 0.06, "grad_norm": 1.4431069203273459, "learning_rate": 0.0009981610950768873, "loss": 3.9504, "step": 499 }, { "epoch": 0.06, "grad_norm": 1.0900601757684483, "learning_rate": 0.0009981451492330046, "loss": 4.1113, "step": 500 }, { "epoch": 0.06, "grad_norm": 9.492716403446362, "learning_rate": 0.000998129134679494, "loss": 4.1127, "step": 501 }, { "epoch": 0.06, "grad_norm": 1.0761124032807439, "learning_rate": 0.0009981130514185646, "loss": 4.0216, "step": 502 }, { "epoch": 0.06, "grad_norm": 1.2934283885560995, "learning_rate": 0.0009980968994524344, "loss": 4.0085, "step": 503 }, { "epoch": 0.06, "grad_norm": 1.2722402185340702, "learning_rate": 0.0009980806787833316, "loss": 4.2799, "step": 504 }, { "epoch": 0.06, "grad_norm": 1.7145797987384368, "learning_rate": 0.0009980643894134935, "loss": 4.1183, "step": 505 }, { "epoch": 0.06, "grad_norm": 1.39746703327607, "learning_rate": 0.000998048031345167, "loss": 3.9937, "step": 506 }, { "epoch": 0.06, "grad_norm": 3.879708885508142, "learning_rate": 0.0009980316045806082, "loss": 3.9038, "step": 507 }, { "epoch": 0.06, "grad_norm": 1.171093034680459, "learning_rate": 0.0009980151091220826, "loss": 4.0122, "step": 508 }, { "epoch": 0.06, "grad_norm": 3.563034483084031, "learning_rate": 0.000997998544971866, "loss": 4.302, "step": 509 }, { "epoch": 0.06, "grad_norm": 1.2457009152199578, "learning_rate": 0.0009979819121322426, "loss": 3.9838, "step": 510 }, { "epoch": 0.06, "grad_norm": 1.3956653274257755, "learning_rate": 0.000997965210605507, "loss": 4.1505, "step": 511 }, { "epoch": 0.06, "grad_norm": 1.4552619496380232, "learning_rate": 0.0009979484403939626, "loss": 3.8988, "step": 512 }, { "epoch": 0.06, "grad_norm": 1.3036964736196672, "learning_rate": 0.0009979316014999226, "loss": 4.0173, "step": 513 }, { "epoch": 0.06, "grad_norm": 1.5853377677123257, "learning_rate": 0.0009979146939257098, "loss": 4.041, "step": 514 }, { "epoch": 0.06, "grad_norm": 1.1099636757797788, "learning_rate": 0.000997897717673656, "loss": 4.0047, "step": 515 }, { "epoch": 0.06, "grad_norm": 1.1249837924978534, "learning_rate": 0.0009978806727461028, "loss": 3.9929, "step": 516 }, { "epoch": 0.06, "grad_norm": 2.1190538814048168, "learning_rate": 0.000997863559145401, "loss": 4.154, "step": 517 }, { "epoch": 0.06, "grad_norm": 1.2202539139607076, "learning_rate": 0.0009978463768739118, "loss": 4.1019, "step": 518 }, { "epoch": 0.06, "grad_norm": 1.1941719420893548, "learning_rate": 0.0009978291259340045, "loss": 3.8154, "step": 519 }, { "epoch": 0.06, "grad_norm": 1.1001779694858749, "learning_rate": 0.0009978118063280587, "loss": 4.1668, "step": 520 }, { "epoch": 0.06, "grad_norm": 1.0526018043421055, "learning_rate": 0.0009977944180584637, "loss": 3.841, "step": 521 }, { "epoch": 0.06, "grad_norm": 1.1309280667773838, "learning_rate": 0.0009977769611276173, "loss": 3.9242, "step": 522 }, { "epoch": 0.06, "grad_norm": 1.209338166494763, "learning_rate": 0.0009977594355379275, "loss": 3.8905, "step": 523 }, { "epoch": 0.06, "grad_norm": 1.317492702298252, "learning_rate": 0.000997741841291812, "loss": 4.0329, "step": 524 }, { "epoch": 0.06, "grad_norm": 1.1830053375198593, "learning_rate": 0.000997724178391697, "loss": 4.2554, "step": 525 }, { "epoch": 0.06, "grad_norm": 1.116456297721812, "learning_rate": 0.0009977064468400193, "loss": 4.0597, "step": 526 }, { "epoch": 0.06, "grad_norm": 1.167414354613278, "learning_rate": 0.0009976886466392244, "loss": 4.0593, "step": 527 }, { "epoch": 0.06, "grad_norm": 0.9948125581199422, "learning_rate": 0.0009976707777917676, "loss": 3.9882, "step": 528 }, { "epoch": 0.06, "grad_norm": 1.2243898779337137, "learning_rate": 0.0009976528403001133, "loss": 3.8375, "step": 529 }, { "epoch": 0.06, "grad_norm": 1.1084328206525864, "learning_rate": 0.0009976348341667358, "loss": 4.1369, "step": 530 }, { "epoch": 0.06, "grad_norm": 1.0970530849024587, "learning_rate": 0.0009976167593941188, "loss": 4.1603, "step": 531 }, { "epoch": 0.06, "grad_norm": 1.0875265358749018, "learning_rate": 0.000997598615984755, "loss": 4.1142, "step": 532 }, { "epoch": 0.06, "grad_norm": 1.1963104200399926, "learning_rate": 0.0009975804039411475, "loss": 4.1495, "step": 533 }, { "epoch": 0.06, "grad_norm": 0.9534352857631794, "learning_rate": 0.0009975621232658082, "loss": 4.092, "step": 534 }, { "epoch": 0.06, "grad_norm": 0.9980043728935059, "learning_rate": 0.000997543773961258, "loss": 4.0697, "step": 535 }, { "epoch": 0.06, "grad_norm": 1.3228467679356763, "learning_rate": 0.0009975253560300283, "loss": 3.8682, "step": 536 }, { "epoch": 0.06, "grad_norm": 1.2793617773650656, "learning_rate": 0.0009975068694746596, "loss": 4.1459, "step": 537 }, { "epoch": 0.06, "grad_norm": 1.2773022144338195, "learning_rate": 0.0009974883142977015, "loss": 4.3009, "step": 538 }, { "epoch": 0.06, "grad_norm": 3.2144853722947357, "learning_rate": 0.0009974696905017135, "loss": 3.9673, "step": 539 }, { "epoch": 0.06, "grad_norm": 1.0462135449876753, "learning_rate": 0.0009974509980892642, "loss": 3.9808, "step": 540 }, { "epoch": 0.06, "grad_norm": 1.1931487705312933, "learning_rate": 0.0009974322370629321, "loss": 4.1655, "step": 541 }, { "epoch": 0.06, "grad_norm": 1.047704443564962, "learning_rate": 0.000997413407425305, "loss": 3.9892, "step": 542 }, { "epoch": 0.06, "grad_norm": 5.431836044959927, "learning_rate": 0.0009973945091789796, "loss": 4.0737, "step": 543 }, { "epoch": 0.06, "grad_norm": 1.3574136419934706, "learning_rate": 0.000997375542326563, "loss": 4.0258, "step": 544 }, { "epoch": 0.06, "grad_norm": 1.2484324174567876, "learning_rate": 0.0009973565068706711, "loss": 3.8645, "step": 545 }, { "epoch": 0.06, "grad_norm": 1.0670779726939208, "learning_rate": 0.0009973374028139296, "loss": 4.0809, "step": 546 }, { "epoch": 0.06, "grad_norm": 1.00789163371799, "learning_rate": 0.0009973182301589736, "loss": 4.071, "step": 547 }, { "epoch": 0.06, "grad_norm": 1.0338300245477874, "learning_rate": 0.0009972989889084473, "loss": 3.9504, "step": 548 }, { "epoch": 0.06, "grad_norm": 1.6691050999369597, "learning_rate": 0.000997279679065005, "loss": 4.0306, "step": 549 }, { "epoch": 0.06, "grad_norm": 1.0251625987564845, "learning_rate": 0.0009972603006313098, "loss": 4.2332, "step": 550 }, { "epoch": 0.06, "grad_norm": 1.2845964351464307, "learning_rate": 0.000997240853610035, "loss": 4.0558, "step": 551 }, { "epoch": 0.06, "grad_norm": 1.4575420998231525, "learning_rate": 0.0009972213380038627, "loss": 3.9813, "step": 552 }, { "epoch": 0.06, "grad_norm": 1.0498509990573848, "learning_rate": 0.0009972017538154845, "loss": 4.1315, "step": 553 }, { "epoch": 0.06, "grad_norm": 1.2435502290262965, "learning_rate": 0.000997182101047602, "loss": 3.9056, "step": 554 }, { "epoch": 0.06, "grad_norm": 1.1316660489182702, "learning_rate": 0.0009971623797029258, "loss": 3.9719, "step": 555 }, { "epoch": 0.06, "grad_norm": 1.2339845970692411, "learning_rate": 0.0009971425897841765, "loss": 3.8693, "step": 556 }, { "epoch": 0.06, "grad_norm": 1.1917792142387893, "learning_rate": 0.0009971227312940826, "loss": 4.1151, "step": 557 }, { "epoch": 0.06, "grad_norm": 1.0801720642026404, "learning_rate": 0.0009971028042353844, "loss": 3.9647, "step": 558 }, { "epoch": 0.06, "grad_norm": 1.1047783844652286, "learning_rate": 0.00099708280861083, "loss": 4.0665, "step": 559 }, { "epoch": 0.06, "grad_norm": 1.0425391662554808, "learning_rate": 0.0009970627444231776, "loss": 4.0256, "step": 560 }, { "epoch": 0.06, "grad_norm": 1.5294558586923575, "learning_rate": 0.000997042611675194, "loss": 3.7925, "step": 561 }, { "epoch": 0.06, "grad_norm": 1.0730985523561904, "learning_rate": 0.0009970224103696568, "loss": 4.0216, "step": 562 }, { "epoch": 0.06, "grad_norm": 1.0970009523396473, "learning_rate": 0.0009970021405093523, "loss": 4.1078, "step": 563 }, { "epoch": 0.06, "grad_norm": 0.9911459993513541, "learning_rate": 0.0009969818020970761, "loss": 3.9093, "step": 564 }, { "epoch": 0.06, "grad_norm": 1.2011683868949352, "learning_rate": 0.0009969613951356338, "loss": 4.0084, "step": 565 }, { "epoch": 0.06, "grad_norm": 0.9013135092696802, "learning_rate": 0.0009969409196278398, "loss": 3.9724, "step": 566 }, { "epoch": 0.07, "grad_norm": 13.208014656328935, "learning_rate": 0.0009969203755765186, "loss": 4.1441, "step": 567 }, { "epoch": 0.07, "grad_norm": 1.0969621868469979, "learning_rate": 0.0009968997629845038, "loss": 4.076, "step": 568 }, { "epoch": 0.07, "grad_norm": 1.437276737778198, "learning_rate": 0.0009968790818546383, "loss": 3.9697, "step": 569 }, { "epoch": 0.07, "grad_norm": 1.2006767674664782, "learning_rate": 0.000996858332189775, "loss": 3.9736, "step": 570 }, { "epoch": 0.07, "grad_norm": 4.83730972230385, "learning_rate": 0.0009968375139927756, "loss": 3.9491, "step": 571 }, { "epoch": 0.07, "grad_norm": 3.4139022174024958, "learning_rate": 0.000996816627266512, "loss": 4.2448, "step": 572 }, { "epoch": 0.07, "grad_norm": 1.0852405502649718, "learning_rate": 0.0009967956720138647, "loss": 3.9133, "step": 573 }, { "epoch": 0.07, "grad_norm": 1.4677284806849238, "learning_rate": 0.0009967746482377243, "loss": 4.0837, "step": 574 }, { "epoch": 0.07, "grad_norm": 1.5182628271121645, "learning_rate": 0.0009967535559409905, "loss": 4.0463, "step": 575 }, { "epoch": 0.07, "grad_norm": 1.3119732062847278, "learning_rate": 0.0009967323951265725, "loss": 3.9247, "step": 576 }, { "epoch": 0.07, "grad_norm": 1.5829729238076788, "learning_rate": 0.0009967111657973892, "loss": 4.0435, "step": 577 }, { "epoch": 0.07, "grad_norm": 0.9757103539690971, "learning_rate": 0.000996689867956369, "loss": 4.0654, "step": 578 }, { "epoch": 0.07, "grad_norm": 1.0122109034985007, "learning_rate": 0.0009966685016064491, "loss": 4.2211, "step": 579 }, { "epoch": 0.07, "grad_norm": 1.0693864205815913, "learning_rate": 0.0009966470667505767, "loss": 4.1024, "step": 580 }, { "epoch": 0.07, "grad_norm": 1.0789930424642413, "learning_rate": 0.0009966255633917086, "loss": 4.0756, "step": 581 }, { "epoch": 0.07, "grad_norm": 1.1594346922702778, "learning_rate": 0.0009966039915328105, "loss": 4.1535, "step": 582 }, { "epoch": 0.07, "grad_norm": 1.1167767671618678, "learning_rate": 0.0009965823511768578, "loss": 4.0344, "step": 583 }, { "epoch": 0.07, "grad_norm": 1.220196100134562, "learning_rate": 0.0009965606423268355, "loss": 4.1537, "step": 584 }, { "epoch": 0.07, "grad_norm": 1.0584694460874193, "learning_rate": 0.000996538864985738, "loss": 4.1343, "step": 585 }, { "epoch": 0.07, "grad_norm": 1.0159307461891351, "learning_rate": 0.0009965170191565688, "loss": 4.0014, "step": 586 }, { "epoch": 0.07, "grad_norm": 0.979051403666123, "learning_rate": 0.0009964951048423414, "loss": 4.0529, "step": 587 }, { "epoch": 0.07, "grad_norm": 1.0737042238713586, "learning_rate": 0.0009964731220460784, "loss": 4.1109, "step": 588 }, { "epoch": 0.07, "grad_norm": 1.0976324714400292, "learning_rate": 0.000996451070770812, "loss": 4.1789, "step": 589 }, { "epoch": 0.07, "grad_norm": 1.177141319134397, "learning_rate": 0.0009964289510195831, "loss": 4.1089, "step": 590 }, { "epoch": 0.07, "grad_norm": 5.845499650279221, "learning_rate": 0.0009964067627954436, "loss": 4.0542, "step": 591 }, { "epoch": 0.07, "grad_norm": 1.0327410606400718, "learning_rate": 0.0009963845061014534, "loss": 4.1311, "step": 592 }, { "epoch": 0.07, "grad_norm": 1.8872697504861227, "learning_rate": 0.0009963621809406826, "loss": 4.2138, "step": 593 }, { "epoch": 0.07, "grad_norm": 0.9715735278119713, "learning_rate": 0.0009963397873162107, "loss": 3.8796, "step": 594 }, { "epoch": 0.07, "grad_norm": 1.396484083580813, "learning_rate": 0.0009963173252311257, "loss": 4.3613, "step": 595 }, { "epoch": 0.07, "grad_norm": 1.087102400979372, "learning_rate": 0.0009962947946885268, "loss": 4.0034, "step": 596 }, { "epoch": 0.07, "grad_norm": 1.243137341425614, "learning_rate": 0.000996272195691521, "loss": 4.0134, "step": 597 }, { "epoch": 0.07, "grad_norm": 1.164944204791553, "learning_rate": 0.0009962495282432255, "loss": 4.0279, "step": 598 }, { "epoch": 0.07, "grad_norm": 1.1922008678038478, "learning_rate": 0.0009962267923467672, "loss": 4.0981, "step": 599 }, { "epoch": 0.07, "grad_norm": 1.2708796676811687, "learning_rate": 0.0009962039880052817, "loss": 4.2194, "step": 600 }, { "epoch": 0.07, "grad_norm": 1.0841555708928168, "learning_rate": 0.0009961811152219148, "loss": 4.1596, "step": 601 }, { "epoch": 0.07, "grad_norm": 1.2385443973044648, "learning_rate": 0.0009961581739998209, "loss": 4.0312, "step": 602 }, { "epoch": 0.07, "grad_norm": 1.2028767241875324, "learning_rate": 0.0009961351643421646, "loss": 4.1067, "step": 603 }, { "epoch": 0.07, "grad_norm": 1.2310479343752814, "learning_rate": 0.0009961120862521195, "loss": 4.15, "step": 604 }, { "epoch": 0.07, "grad_norm": 1.5314113521550154, "learning_rate": 0.000996088939732869, "loss": 3.9311, "step": 605 }, { "epoch": 0.07, "grad_norm": 1.1454211356184008, "learning_rate": 0.0009960657247876056, "loss": 4.2223, "step": 606 }, { "epoch": 0.07, "grad_norm": 1.1993726476533393, "learning_rate": 0.000996042441419531, "loss": 4.0683, "step": 607 }, { "epoch": 0.07, "grad_norm": 1.0608622751507033, "learning_rate": 0.0009960190896318572, "loss": 3.9369, "step": 608 }, { "epoch": 0.07, "grad_norm": 1.0521679654159657, "learning_rate": 0.0009959956694278052, "loss": 3.9274, "step": 609 }, { "epoch": 0.07, "grad_norm": 1.1003977104744176, "learning_rate": 0.000995972180810605, "loss": 3.7949, "step": 610 }, { "epoch": 0.07, "grad_norm": 1.4170906972367623, "learning_rate": 0.0009959486237834964, "loss": 3.7741, "step": 611 }, { "epoch": 0.07, "grad_norm": 1.0458262622318437, "learning_rate": 0.0009959249983497289, "loss": 4.153, "step": 612 }, { "epoch": 0.07, "grad_norm": 1.2039826291625173, "learning_rate": 0.0009959013045125612, "loss": 3.9622, "step": 613 }, { "epoch": 0.07, "grad_norm": 1.1281695800105496, "learning_rate": 0.000995877542275261, "loss": 3.9142, "step": 614 }, { "epoch": 0.07, "grad_norm": 1.2592342179278442, "learning_rate": 0.0009958537116411064, "loss": 4.007, "step": 615 }, { "epoch": 0.07, "grad_norm": 1.2370153859047168, "learning_rate": 0.000995829812613384, "loss": 4.1878, "step": 616 }, { "epoch": 0.07, "grad_norm": 1.1557899550570732, "learning_rate": 0.0009958058451953902, "loss": 4.0741, "step": 617 }, { "epoch": 0.07, "grad_norm": 0.9950810165134163, "learning_rate": 0.0009957818093904313, "loss": 4.1322, "step": 618 }, { "epoch": 0.07, "grad_norm": 1.0622000307870776, "learning_rate": 0.000995757705201822, "loss": 4.0737, "step": 619 }, { "epoch": 0.07, "grad_norm": 0.9522213326536202, "learning_rate": 0.0009957335326328874, "loss": 4.1483, "step": 620 }, { "epoch": 0.07, "grad_norm": 0.872545612406632, "learning_rate": 0.0009957092916869613, "loss": 4.0376, "step": 621 }, { "epoch": 0.07, "grad_norm": 1.3212288739460372, "learning_rate": 0.0009956849823673877, "loss": 3.8471, "step": 622 }, { "epoch": 0.07, "grad_norm": 0.8741241742111597, "learning_rate": 0.0009956606046775192, "loss": 4.0802, "step": 623 }, { "epoch": 0.07, "grad_norm": 0.9697166520873234, "learning_rate": 0.0009956361586207186, "loss": 4.0258, "step": 624 }, { "epoch": 0.07, "grad_norm": 1.5366076789215608, "learning_rate": 0.0009956116442003575, "loss": 4.0789, "step": 625 }, { "epoch": 0.07, "grad_norm": 1.1351325188396468, "learning_rate": 0.0009955870614198174, "loss": 3.8349, "step": 626 }, { "epoch": 0.07, "grad_norm": 0.9904423078435894, "learning_rate": 0.000995562410282489, "loss": 4.1877, "step": 627 }, { "epoch": 0.07, "grad_norm": 1.2214548363493867, "learning_rate": 0.0009955376907917722, "loss": 3.9226, "step": 628 }, { "epoch": 0.07, "grad_norm": 1.4872237285653587, "learning_rate": 0.0009955129029510768, "loss": 4.1086, "step": 629 }, { "epoch": 0.07, "grad_norm": 1.0586558181679757, "learning_rate": 0.0009954880467638219, "loss": 4.1311, "step": 630 }, { "epoch": 0.07, "grad_norm": 1.1166873306787115, "learning_rate": 0.0009954631222334356, "loss": 4.1628, "step": 631 }, { "epoch": 0.07, "grad_norm": 1.1020912519355728, "learning_rate": 0.0009954381293633561, "loss": 4.1827, "step": 632 }, { "epoch": 0.07, "grad_norm": 2.541200983213323, "learning_rate": 0.0009954130681570305, "loss": 4.0049, "step": 633 }, { "epoch": 0.07, "grad_norm": 1.4224241887794318, "learning_rate": 0.0009953879386179157, "loss": 4.2396, "step": 634 }, { "epoch": 0.07, "grad_norm": 0.9968253551830029, "learning_rate": 0.0009953627407494777, "loss": 4.1226, "step": 635 }, { "epoch": 0.07, "grad_norm": 1.4185501347974874, "learning_rate": 0.000995337474555192, "loss": 3.8936, "step": 636 }, { "epoch": 0.07, "grad_norm": 1.0898506044349183, "learning_rate": 0.0009953121400385438, "loss": 3.8433, "step": 637 }, { "epoch": 0.07, "grad_norm": 1.5626121980531213, "learning_rate": 0.0009952867372030273, "loss": 4.0309, "step": 638 }, { "epoch": 0.07, "grad_norm": 0.9483500985414226, "learning_rate": 0.0009952612660521466, "loss": 4.0238, "step": 639 }, { "epoch": 0.07, "grad_norm": 1.0794690646472656, "learning_rate": 0.0009952357265894146, "loss": 3.9044, "step": 640 }, { "epoch": 0.07, "grad_norm": 0.9944819636930577, "learning_rate": 0.000995210118818354, "loss": 4.1289, "step": 641 }, { "epoch": 0.07, "grad_norm": 0.9706289386802778, "learning_rate": 0.0009951844427424973, "loss": 4.0482, "step": 642 }, { "epoch": 0.07, "grad_norm": 0.9929649298053834, "learning_rate": 0.0009951586983653858, "loss": 3.9995, "step": 643 }, { "epoch": 0.07, "grad_norm": 0.9302516483805908, "learning_rate": 0.0009951328856905703, "loss": 3.9215, "step": 644 }, { "epoch": 0.07, "grad_norm": 1.062409809140414, "learning_rate": 0.0009951070047216116, "loss": 4.0465, "step": 645 }, { "epoch": 0.07, "grad_norm": 1.0190850036087398, "learning_rate": 0.000995081055462079, "loss": 4.0241, "step": 646 }, { "epoch": 0.07, "grad_norm": 2.2025402598501818, "learning_rate": 0.0009950550379155519, "loss": 3.843, "step": 647 }, { "epoch": 0.07, "grad_norm": 1.0838895111910496, "learning_rate": 0.000995028952085619, "loss": 3.9391, "step": 648 }, { "epoch": 0.07, "grad_norm": 0.9156855323769093, "learning_rate": 0.0009950027979758781, "loss": 3.8727, "step": 649 }, { "epoch": 0.07, "grad_norm": 0.9831840876750831, "learning_rate": 0.0009949765755899369, "loss": 4.0466, "step": 650 }, { "epoch": 0.07, "grad_norm": 1.0144260482607197, "learning_rate": 0.0009949502849314123, "loss": 4.0569, "step": 651 }, { "epoch": 0.07, "grad_norm": 1.087532418616258, "learning_rate": 0.0009949239260039304, "loss": 4.0124, "step": 652 }, { "epoch": 0.07, "grad_norm": 1.1115011584707322, "learning_rate": 0.0009948974988111272, "loss": 3.9083, "step": 653 }, { "epoch": 0.07, "grad_norm": 1.1754413854281331, "learning_rate": 0.0009948710033566475, "loss": 3.7598, "step": 654 }, { "epoch": 0.08, "grad_norm": 0.9073616922253441, "learning_rate": 0.000994844439644146, "loss": 3.8556, "step": 655 }, { "epoch": 0.08, "grad_norm": 0.9770191942150841, "learning_rate": 0.0009948178076772867, "loss": 3.9657, "step": 656 }, { "epoch": 0.08, "grad_norm": 1.2098902115510097, "learning_rate": 0.0009947911074597428, "loss": 4.0797, "step": 657 }, { "epoch": 0.08, "grad_norm": 1.1240122584040244, "learning_rate": 0.0009947643389951973, "loss": 4.1461, "step": 658 }, { "epoch": 0.08, "grad_norm": 1.2067032998807046, "learning_rate": 0.0009947375022873422, "loss": 4.0355, "step": 659 }, { "epoch": 0.08, "grad_norm": 0.9114366762460284, "learning_rate": 0.0009947105973398794, "loss": 3.8396, "step": 660 }, { "epoch": 0.08, "grad_norm": 1.1253322947399755, "learning_rate": 0.0009946836241565195, "loss": 4.3915, "step": 661 }, { "epoch": 0.08, "grad_norm": 1.021944561425325, "learning_rate": 0.0009946565827409833, "loss": 3.902, "step": 662 }, { "epoch": 0.08, "grad_norm": 0.9158393543874208, "learning_rate": 0.0009946294730970005, "loss": 4.0877, "step": 663 }, { "epoch": 0.08, "grad_norm": 1.2119808024846095, "learning_rate": 0.0009946022952283106, "loss": 3.8803, "step": 664 }, { "epoch": 0.08, "grad_norm": 0.8761944842006684, "learning_rate": 0.0009945750491386616, "loss": 4.056, "step": 665 }, { "epoch": 0.08, "grad_norm": 19.48656722070216, "learning_rate": 0.0009945477348318123, "loss": 4.2876, "step": 666 }, { "epoch": 0.08, "grad_norm": 1.0325658117466887, "learning_rate": 0.00099452035231153, "loss": 4.1135, "step": 667 }, { "epoch": 0.08, "grad_norm": 1.9440027707655119, "learning_rate": 0.0009944929015815913, "loss": 4.0217, "step": 668 }, { "epoch": 0.08, "grad_norm": 1.1902740653392259, "learning_rate": 0.0009944653826457828, "loss": 3.8958, "step": 669 }, { "epoch": 0.08, "grad_norm": 0.9534347488300721, "learning_rate": 0.0009944377955079004, "loss": 3.937, "step": 670 }, { "epoch": 0.08, "grad_norm": 1.2427934301429755, "learning_rate": 0.0009944101401717486, "loss": 3.9359, "step": 671 }, { "epoch": 0.08, "grad_norm": 1.1803128820308535, "learning_rate": 0.0009943824166411424, "loss": 3.9248, "step": 672 }, { "epoch": 0.08, "grad_norm": 1.3226917163657204, "learning_rate": 0.0009943546249199056, "loss": 4.0167, "step": 673 }, { "epoch": 0.08, "grad_norm": 1.2392777021034116, "learning_rate": 0.0009943267650118716, "loss": 3.7435, "step": 674 }, { "epoch": 0.08, "grad_norm": 1.6423405371266244, "learning_rate": 0.0009942988369208829, "loss": 4.0427, "step": 675 }, { "epoch": 0.08, "grad_norm": 1.012721279386241, "learning_rate": 0.000994270840650792, "loss": 3.8385, "step": 676 }, { "epoch": 0.08, "grad_norm": 1.1603970960331422, "learning_rate": 0.0009942427762054604, "loss": 3.9409, "step": 677 }, { "epoch": 0.08, "grad_norm": 1.3868886939442144, "learning_rate": 0.0009942146435887589, "loss": 3.9335, "step": 678 }, { "epoch": 0.08, "grad_norm": 1.107754114523732, "learning_rate": 0.0009941864428045677, "loss": 4.2012, "step": 679 }, { "epoch": 0.08, "grad_norm": 1.1108239019460056, "learning_rate": 0.0009941581738567768, "loss": 4.1193, "step": 680 }, { "epoch": 0.08, "grad_norm": 1.4730349915863195, "learning_rate": 0.0009941298367492854, "loss": 4.1845, "step": 681 }, { "epoch": 0.08, "grad_norm": 1.2935560769252694, "learning_rate": 0.0009941014314860021, "loss": 4.1548, "step": 682 }, { "epoch": 0.08, "grad_norm": 1.1690456014057595, "learning_rate": 0.0009940729580708448, "loss": 4.022, "step": 683 }, { "epoch": 0.08, "grad_norm": 1.1350933927702227, "learning_rate": 0.0009940444165077408, "loss": 4.0981, "step": 684 }, { "epoch": 0.08, "grad_norm": 1.0663624502353592, "learning_rate": 0.0009940158068006267, "loss": 4.159, "step": 685 }, { "epoch": 0.08, "grad_norm": 1.1967570939312862, "learning_rate": 0.0009939871289534488, "loss": 4.0572, "step": 686 }, { "epoch": 0.08, "grad_norm": 1.4308361240039056, "learning_rate": 0.0009939583829701628, "loss": 3.9628, "step": 687 }, { "epoch": 0.08, "grad_norm": 1.0749087936397916, "learning_rate": 0.0009939295688547337, "loss": 4.0917, "step": 688 }, { "epoch": 0.08, "grad_norm": 0.9961854543075069, "learning_rate": 0.0009939006866111356, "loss": 4.3358, "step": 689 }, { "epoch": 0.08, "grad_norm": 1.0662450286553637, "learning_rate": 0.0009938717362433524, "loss": 4.0191, "step": 690 }, { "epoch": 0.08, "grad_norm": 1.203978675004886, "learning_rate": 0.0009938427177553773, "loss": 4.1932, "step": 691 }, { "epoch": 0.08, "grad_norm": 1.0547630034650501, "learning_rate": 0.0009938136311512127, "loss": 4.0764, "step": 692 }, { "epoch": 0.08, "grad_norm": 1.0074051321598327, "learning_rate": 0.0009937844764348707, "loss": 3.8708, "step": 693 }, { "epoch": 0.08, "grad_norm": 0.998779577972688, "learning_rate": 0.0009937552536103727, "loss": 3.9777, "step": 694 }, { "epoch": 0.08, "grad_norm": 0.9559575919471303, "learning_rate": 0.000993725962681749, "loss": 4.0575, "step": 695 }, { "epoch": 0.08, "grad_norm": 1.5793672294254124, "learning_rate": 0.0009936966036530402, "loss": 3.7767, "step": 696 }, { "epoch": 0.08, "grad_norm": 1.0000291986542595, "learning_rate": 0.0009936671765282956, "loss": 4.2494, "step": 697 }, { "epoch": 0.08, "grad_norm": 0.9365793776478671, "learning_rate": 0.0009936376813115741, "loss": 3.6897, "step": 698 }, { "epoch": 0.08, "grad_norm": 0.9531200207053346, "learning_rate": 0.000993608118006944, "loss": 4.0084, "step": 699 }, { "epoch": 0.08, "grad_norm": 0.9648629400206552, "learning_rate": 0.0009935784866184833, "loss": 3.9867, "step": 700 }, { "epoch": 0.08, "grad_norm": 1.1217945452813942, "learning_rate": 0.0009935487871502787, "loss": 4.0233, "step": 701 }, { "epoch": 0.08, "grad_norm": 5.468742717439602, "learning_rate": 0.0009935190196064267, "loss": 3.8694, "step": 702 }, { "epoch": 0.08, "grad_norm": 1.0970464348668187, "learning_rate": 0.0009934891839910333, "loss": 4.1408, "step": 703 }, { "epoch": 0.08, "grad_norm": 1.0527563665166895, "learning_rate": 0.0009934592803082138, "loss": 3.9224, "step": 704 }, { "epoch": 0.08, "grad_norm": 1.1788564709369937, "learning_rate": 0.0009934293085620929, "loss": 4.25, "step": 705 }, { "epoch": 0.08, "grad_norm": 1.1177080766521004, "learning_rate": 0.0009933992687568044, "loss": 3.7747, "step": 706 }, { "epoch": 0.08, "grad_norm": 1.331533023082532, "learning_rate": 0.0009933691608964917, "loss": 4.0996, "step": 707 }, { "epoch": 0.08, "grad_norm": 0.9932438632422246, "learning_rate": 0.0009933389849853078, "loss": 3.931, "step": 708 }, { "epoch": 0.08, "grad_norm": 0.9473595200284471, "learning_rate": 0.0009933087410274148, "loss": 3.9864, "step": 709 }, { "epoch": 0.08, "grad_norm": 1.1266013054058719, "learning_rate": 0.0009932784290269843, "loss": 4.1737, "step": 710 }, { "epoch": 0.08, "grad_norm": 0.9884842226835872, "learning_rate": 0.0009932480489881974, "loss": 4.0466, "step": 711 }, { "epoch": 0.08, "grad_norm": 0.9530817273057067, "learning_rate": 0.0009932176009152442, "loss": 4.1339, "step": 712 }, { "epoch": 0.08, "grad_norm": 1.1501946911028074, "learning_rate": 0.0009931870848123245, "loss": 4.1296, "step": 713 }, { "epoch": 0.08, "grad_norm": 1.4350472895321407, "learning_rate": 0.0009931565006836476, "loss": 3.9715, "step": 714 }, { "epoch": 0.08, "grad_norm": 1.18946767117426, "learning_rate": 0.0009931258485334315, "loss": 4.0962, "step": 715 }, { "epoch": 0.08, "grad_norm": 1.1231827461701815, "learning_rate": 0.0009930951283659048, "loss": 3.7519, "step": 716 }, { "epoch": 0.08, "grad_norm": 0.9354466592494857, "learning_rate": 0.0009930643401853043, "loss": 3.8916, "step": 717 }, { "epoch": 0.08, "grad_norm": 0.9972892020163304, "learning_rate": 0.0009930334839958765, "loss": 3.9692, "step": 718 }, { "epoch": 0.08, "grad_norm": 0.9218413712056626, "learning_rate": 0.000993002559801878, "loss": 3.898, "step": 719 }, { "epoch": 0.08, "grad_norm": 1.0490325173709558, "learning_rate": 0.0009929715676075736, "loss": 3.8237, "step": 720 }, { "epoch": 0.08, "grad_norm": 1.049671714073017, "learning_rate": 0.0009929405074172383, "loss": 4.3424, "step": 721 }, { "epoch": 0.08, "grad_norm": 1.081740112093551, "learning_rate": 0.0009929093792351567, "loss": 3.8379, "step": 722 }, { "epoch": 0.08, "grad_norm": 0.9795891942566914, "learning_rate": 0.0009928781830656215, "loss": 4.1246, "step": 723 }, { "epoch": 0.08, "grad_norm": 1.3036431583133856, "learning_rate": 0.0009928469189129363, "loss": 4.11, "step": 724 }, { "epoch": 0.08, "grad_norm": 1.4459492283264697, "learning_rate": 0.0009928155867814131, "loss": 4.0217, "step": 725 }, { "epoch": 0.08, "grad_norm": 2.1151386796548914, "learning_rate": 0.0009927841866753735, "loss": 4.0514, "step": 726 }, { "epoch": 0.08, "grad_norm": 1.0899364326406684, "learning_rate": 0.000992752718599149, "loss": 4.2129, "step": 727 }, { "epoch": 0.08, "grad_norm": 1.065228216471037, "learning_rate": 0.0009927211825570793, "loss": 3.9278, "step": 728 }, { "epoch": 0.08, "grad_norm": 1.0817986510380675, "learning_rate": 0.000992689578553515, "loss": 3.9406, "step": 729 }, { "epoch": 0.08, "grad_norm": 1.0862220484169132, "learning_rate": 0.0009926579065928144, "loss": 3.846, "step": 730 }, { "epoch": 0.08, "grad_norm": 1.0620328173436737, "learning_rate": 0.000992626166679347, "loss": 4.0841, "step": 731 }, { "epoch": 0.08, "grad_norm": 1.0124476830654119, "learning_rate": 0.0009925943588174897, "loss": 3.8932, "step": 732 }, { "epoch": 0.08, "grad_norm": 0.890905917969885, "learning_rate": 0.0009925624830116305, "loss": 4.2086, "step": 733 }, { "epoch": 0.08, "grad_norm": 1.1394942261731105, "learning_rate": 0.000992530539266166, "loss": 4.0573, "step": 734 }, { "epoch": 0.08, "grad_norm": 17.58134690738853, "learning_rate": 0.0009924985275855018, "loss": 4.1003, "step": 735 }, { "epoch": 0.08, "grad_norm": 0.9102741055274967, "learning_rate": 0.000992466447974054, "loss": 3.7821, "step": 736 }, { "epoch": 0.08, "grad_norm": 0.9754834362355617, "learning_rate": 0.0009924343004362466, "loss": 3.8204, "step": 737 }, { "epoch": 0.08, "grad_norm": 1.187679547843071, "learning_rate": 0.0009924020849765142, "loss": 3.8235, "step": 738 }, { "epoch": 0.08, "grad_norm": 1.2989011173430143, "learning_rate": 0.0009923698015993003, "loss": 3.6622, "step": 739 }, { "epoch": 0.08, "grad_norm": 1.0182550696581316, "learning_rate": 0.0009923374503090577, "loss": 4.1227, "step": 740 }, { "epoch": 0.08, "grad_norm": 4.398123225700433, "learning_rate": 0.0009923050311102487, "loss": 3.9437, "step": 741 }, { "epoch": 0.09, "grad_norm": 1.9306596644099066, "learning_rate": 0.0009922725440073446, "loss": 4.097, "step": 742 }, { "epoch": 0.09, "grad_norm": 0.9930199840085645, "learning_rate": 0.0009922399890048268, "loss": 3.7801, "step": 743 }, { "epoch": 0.09, "grad_norm": 1.0933343536839801, "learning_rate": 0.0009922073661071855, "loss": 3.9452, "step": 744 }, { "epoch": 0.09, "grad_norm": 1.0066584553571114, "learning_rate": 0.0009921746753189203, "loss": 4.0686, "step": 745 }, { "epoch": 0.09, "grad_norm": 1.1074190292369288, "learning_rate": 0.0009921419166445404, "loss": 3.9655, "step": 746 }, { "epoch": 0.09, "grad_norm": 0.9432259455621148, "learning_rate": 0.0009921090900885641, "loss": 4.0372, "step": 747 }, { "epoch": 0.09, "grad_norm": 3.017674450039497, "learning_rate": 0.0009920761956555193, "loss": 3.9217, "step": 748 }, { "epoch": 0.09, "grad_norm": 1.0280470835173998, "learning_rate": 0.0009920432333499433, "loss": 4.0152, "step": 749 }, { "epoch": 0.09, "grad_norm": 0.9823285755893133, "learning_rate": 0.0009920102031763822, "loss": 4.0291, "step": 750 }, { "epoch": 0.09, "grad_norm": 0.9669013726727463, "learning_rate": 0.0009919771051393922, "loss": 3.8503, "step": 751 }, { "epoch": 0.09, "grad_norm": 1.537456166035882, "learning_rate": 0.0009919439392435385, "loss": 3.769, "step": 752 }, { "epoch": 0.09, "grad_norm": 0.8548767732033391, "learning_rate": 0.0009919107054933956, "loss": 4.0237, "step": 753 }, { "epoch": 0.09, "grad_norm": 0.8811001801027458, "learning_rate": 0.0009918774038935477, "loss": 4.1467, "step": 754 }, { "epoch": 0.09, "grad_norm": 1.2427744326603332, "learning_rate": 0.000991844034448588, "loss": 3.8224, "step": 755 }, { "epoch": 0.09, "grad_norm": 0.9975893449290646, "learning_rate": 0.000991810597163119, "loss": 3.8876, "step": 756 }, { "epoch": 0.09, "grad_norm": 1.1122379622875842, "learning_rate": 0.000991777092041753, "loss": 3.7849, "step": 757 }, { "epoch": 0.09, "grad_norm": 0.8907202185325103, "learning_rate": 0.0009917435190891111, "loss": 3.9553, "step": 758 }, { "epoch": 0.09, "grad_norm": 1.6546105708760155, "learning_rate": 0.0009917098783098243, "loss": 3.9838, "step": 759 }, { "epoch": 0.09, "grad_norm": 1.0196671000600643, "learning_rate": 0.0009916761697085327, "loss": 4.1361, "step": 760 }, { "epoch": 0.09, "grad_norm": 1.3762598045752321, "learning_rate": 0.0009916423932898857, "loss": 3.9431, "step": 761 }, { "epoch": 0.09, "grad_norm": 1.2241709652745174, "learning_rate": 0.0009916085490585423, "loss": 4.0906, "step": 762 }, { "epoch": 0.09, "grad_norm": 2.5344877136963833, "learning_rate": 0.0009915746370191701, "loss": 3.9918, "step": 763 }, { "epoch": 0.09, "grad_norm": 0.8689955149528441, "learning_rate": 0.0009915406571764471, "loss": 4.0184, "step": 764 }, { "epoch": 0.09, "grad_norm": 1.2959543445298283, "learning_rate": 0.0009915066095350603, "loss": 4.1736, "step": 765 }, { "epoch": 0.09, "grad_norm": 1.1204533833763546, "learning_rate": 0.0009914724940997053, "loss": 4.0835, "step": 766 }, { "epoch": 0.09, "grad_norm": 0.9640682617579192, "learning_rate": 0.0009914383108750883, "loss": 3.8902, "step": 767 }, { "epoch": 0.09, "grad_norm": 1.166510744809395, "learning_rate": 0.000991404059865924, "loss": 4.0781, "step": 768 }, { "epoch": 0.09, "grad_norm": 1.1577045215436794, "learning_rate": 0.0009913697410769366, "loss": 3.883, "step": 769 }, { "epoch": 0.09, "grad_norm": 1.122449767602365, "learning_rate": 0.0009913353545128597, "loss": 3.9835, "step": 770 }, { "epoch": 0.09, "grad_norm": 1.1571686982537643, "learning_rate": 0.0009913009001784364, "loss": 4.1395, "step": 771 }, { "epoch": 0.09, "grad_norm": 1.040446710303204, "learning_rate": 0.0009912663780784188, "loss": 4.0916, "step": 772 }, { "epoch": 0.09, "grad_norm": 1.7034025429253001, "learning_rate": 0.000991231788217569, "loss": 3.8143, "step": 773 }, { "epoch": 0.09, "grad_norm": 1.9180330986454783, "learning_rate": 0.0009911971306006575, "loss": 4.0155, "step": 774 }, { "epoch": 0.09, "grad_norm": 6.111673262997343, "learning_rate": 0.000991162405232465, "loss": 3.9434, "step": 775 }, { "epoch": 0.09, "grad_norm": 1.374875056672268, "learning_rate": 0.0009911276121177812, "loss": 4.2326, "step": 776 }, { "epoch": 0.09, "grad_norm": 1.0396137237423868, "learning_rate": 0.0009910927512614051, "loss": 4.0597, "step": 777 }, { "epoch": 0.09, "grad_norm": 1.0137671887451662, "learning_rate": 0.000991057822668145, "loss": 3.9569, "step": 778 }, { "epoch": 0.09, "grad_norm": 1.1563368676764412, "learning_rate": 0.0009910228263428186, "loss": 3.9306, "step": 779 }, { "epoch": 0.09, "grad_norm": 1.0598592427689284, "learning_rate": 0.000990987762290253, "loss": 3.8386, "step": 780 }, { "epoch": 0.09, "grad_norm": 3.577903073121024, "learning_rate": 0.0009909526305152848, "loss": 4.1691, "step": 781 }, { "epoch": 0.09, "grad_norm": 1.9241809607397247, "learning_rate": 0.0009909174310227596, "loss": 3.8345, "step": 782 }, { "epoch": 0.09, "grad_norm": 0.9315987122865419, "learning_rate": 0.0009908821638175325, "loss": 3.9253, "step": 783 }, { "epoch": 0.09, "grad_norm": 1.0251002931386355, "learning_rate": 0.000990846828904468, "loss": 4.1566, "step": 784 }, { "epoch": 0.09, "grad_norm": 1.0199818235234839, "learning_rate": 0.0009908114262884397, "loss": 4.1043, "step": 785 }, { "epoch": 0.09, "grad_norm": 1.1156558138385453, "learning_rate": 0.0009907759559743311, "loss": 3.9011, "step": 786 }, { "epoch": 0.09, "grad_norm": 1.3564312470349347, "learning_rate": 0.0009907404179670342, "loss": 3.9485, "step": 787 }, { "epoch": 0.09, "grad_norm": 0.9091196002896215, "learning_rate": 0.000990704812271451, "loss": 4.1194, "step": 788 }, { "epoch": 0.09, "grad_norm": 0.9502055692126391, "learning_rate": 0.0009906691388924928, "loss": 4.2732, "step": 789 }, { "epoch": 0.09, "grad_norm": 1.035813467791336, "learning_rate": 0.0009906333978350799, "loss": 3.8516, "step": 790 }, { "epoch": 0.09, "grad_norm": 0.9775579581560876, "learning_rate": 0.000990597589104142, "loss": 4.0115, "step": 791 }, { "epoch": 0.09, "grad_norm": 0.9738646568445928, "learning_rate": 0.0009905617127046182, "loss": 3.8153, "step": 792 }, { "epoch": 0.09, "grad_norm": 0.9013381831949332, "learning_rate": 0.0009905257686414573, "loss": 3.9992, "step": 793 }, { "epoch": 0.09, "grad_norm": 1.015169838689599, "learning_rate": 0.0009904897569196168, "loss": 4.1797, "step": 794 }, { "epoch": 0.09, "grad_norm": 0.9274410229356569, "learning_rate": 0.0009904536775440641, "loss": 3.8443, "step": 795 }, { "epoch": 0.09, "grad_norm": 0.944539838615447, "learning_rate": 0.0009904175305197752, "loss": 3.9231, "step": 796 }, { "epoch": 0.09, "grad_norm": 0.8035238524617749, "learning_rate": 0.0009903813158517363, "loss": 3.8491, "step": 797 }, { "epoch": 0.09, "grad_norm": 1.05305129000883, "learning_rate": 0.0009903450335449423, "loss": 4.0726, "step": 798 }, { "epoch": 0.09, "grad_norm": 1.1211758829609977, "learning_rate": 0.0009903086836043978, "loss": 4.1236, "step": 799 }, { "epoch": 0.09, "grad_norm": 1.8295000288987961, "learning_rate": 0.0009902722660351166, "loss": 3.949, "step": 800 }, { "epoch": 0.09, "grad_norm": 0.9436444990787903, "learning_rate": 0.0009902357808421218, "loss": 3.79, "step": 801 }, { "epoch": 0.09, "grad_norm": 1.032163990231982, "learning_rate": 0.0009901992280304456, "loss": 4.0251, "step": 802 }, { "epoch": 0.09, "grad_norm": 0.9752542368413061, "learning_rate": 0.00099016260760513, "loss": 3.9477, "step": 803 }, { "epoch": 0.09, "grad_norm": 1.0107518548941548, "learning_rate": 0.000990125919571226, "loss": 3.9448, "step": 804 }, { "epoch": 0.09, "grad_norm": 1.1096800317514424, "learning_rate": 0.000990089163933794, "loss": 3.8131, "step": 805 }, { "epoch": 0.09, "grad_norm": 1.6176905104762076, "learning_rate": 0.000990052340697904, "loss": 4.0832, "step": 806 }, { "epoch": 0.09, "grad_norm": 1.134213600726209, "learning_rate": 0.0009900154498686349, "loss": 4.2693, "step": 807 }, { "epoch": 0.09, "grad_norm": 0.850471581902836, "learning_rate": 0.0009899784914510748, "loss": 3.5897, "step": 808 }, { "epoch": 0.09, "grad_norm": 1.285536995743178, "learning_rate": 0.0009899414654503216, "loss": 4.0772, "step": 809 }, { "epoch": 0.09, "grad_norm": 0.9542309977895963, "learning_rate": 0.0009899043718714826, "loss": 3.7273, "step": 810 }, { "epoch": 0.09, "grad_norm": 1.0242560575682236, "learning_rate": 0.0009898672107196739, "loss": 4.1553, "step": 811 }, { "epoch": 0.09, "grad_norm": 2.0283671739658398, "learning_rate": 0.000989829982000021, "loss": 4.0134, "step": 812 }, { "epoch": 0.09, "grad_norm": 0.9105800453033387, "learning_rate": 0.000989792685717659, "loss": 3.6721, "step": 813 }, { "epoch": 0.09, "grad_norm": 1.0205731976565942, "learning_rate": 0.0009897553218777327, "loss": 4.1639, "step": 814 }, { "epoch": 0.09, "grad_norm": 0.9840457866716322, "learning_rate": 0.000989717890485395, "loss": 3.9441, "step": 815 }, { "epoch": 0.09, "grad_norm": 0.9845232997449745, "learning_rate": 0.0009896803915458094, "loss": 4.0387, "step": 816 }, { "epoch": 0.09, "grad_norm": 1.0526905024878224, "learning_rate": 0.0009896428250641479, "loss": 3.8775, "step": 817 }, { "epoch": 0.09, "grad_norm": 0.9491552905833132, "learning_rate": 0.000989605191045592, "loss": 3.9813, "step": 818 }, { "epoch": 0.09, "grad_norm": 0.9222606919245979, "learning_rate": 0.0009895674894953327, "loss": 3.8994, "step": 819 }, { "epoch": 0.09, "grad_norm": 0.8609930429913497, "learning_rate": 0.0009895297204185706, "loss": 3.946, "step": 820 }, { "epoch": 0.09, "grad_norm": 0.9271486263768111, "learning_rate": 0.0009894918838205145, "loss": 4.0584, "step": 821 }, { "epoch": 0.09, "grad_norm": 1.1179652988962563, "learning_rate": 0.0009894539797063837, "loss": 3.9282, "step": 822 }, { "epoch": 0.09, "grad_norm": 1.0673829988589105, "learning_rate": 0.0009894160080814061, "loss": 4.0913, "step": 823 }, { "epoch": 0.09, "grad_norm": 1.1792253913818123, "learning_rate": 0.0009893779689508194, "loss": 4.0481, "step": 824 }, { "epoch": 0.09, "grad_norm": 1.557280297610356, "learning_rate": 0.0009893398623198703, "loss": 3.9037, "step": 825 }, { "epoch": 0.09, "grad_norm": 1.0303765353183911, "learning_rate": 0.0009893016881938148, "loss": 4.2995, "step": 826 }, { "epoch": 0.09, "grad_norm": 1.0662828684673717, "learning_rate": 0.0009892634465779185, "loss": 3.8875, "step": 827 }, { "epoch": 0.09, "grad_norm": 0.9013754159793762, "learning_rate": 0.000989225137477456, "loss": 3.9274, "step": 828 }, { "epoch": 0.1, "grad_norm": 1.0670664254850324, "learning_rate": 0.000989186760897711, "loss": 3.9905, "step": 829 }, { "epoch": 0.1, "grad_norm": 2.4531558653745784, "learning_rate": 0.0009891483168439773, "loss": 4.0812, "step": 830 }, { "epoch": 0.1, "grad_norm": 1.0121252031629329, "learning_rate": 0.000989109805321557, "loss": 4.0183, "step": 831 }, { "epoch": 0.1, "grad_norm": 1.0401444028810551, "learning_rate": 0.0009890712263357626, "loss": 4.0018, "step": 832 }, { "epoch": 0.1, "grad_norm": 0.9562074112219895, "learning_rate": 0.000989032579891915, "loss": 4.0212, "step": 833 }, { "epoch": 0.1, "grad_norm": 1.0790867286609265, "learning_rate": 0.000988993865995345, "loss": 4.0656, "step": 834 }, { "epoch": 0.1, "grad_norm": 0.9600375726341359, "learning_rate": 0.000988955084651392, "loss": 4.0144, "step": 835 }, { "epoch": 0.1, "grad_norm": 1.0000204253716836, "learning_rate": 0.0009889162358654056, "loss": 3.9309, "step": 836 }, { "epoch": 0.1, "grad_norm": 0.861165516547268, "learning_rate": 0.000988877319642744, "loss": 4.0203, "step": 837 }, { "epoch": 0.1, "grad_norm": 1.016795584224561, "learning_rate": 0.000988838335988775, "loss": 3.9911, "step": 838 }, { "epoch": 0.1, "grad_norm": 1.3210403599451914, "learning_rate": 0.0009887992849088754, "loss": 4.1468, "step": 839 }, { "epoch": 0.1, "grad_norm": 0.9810663948603184, "learning_rate": 0.000988760166408432, "loss": 4.037, "step": 840 }, { "epoch": 0.1, "grad_norm": 0.9140102356291326, "learning_rate": 0.0009887209804928404, "loss": 3.9914, "step": 841 }, { "epoch": 0.1, "grad_norm": 2.5225827808760473, "learning_rate": 0.0009886817271675052, "loss": 3.8276, "step": 842 }, { "epoch": 0.1, "grad_norm": 0.86655829226022, "learning_rate": 0.000988642406437841, "loss": 4.0931, "step": 843 }, { "epoch": 0.1, "grad_norm": 0.8915003777704951, "learning_rate": 0.0009886030183092712, "loss": 4.063, "step": 844 }, { "epoch": 0.1, "grad_norm": 1.001193182588885, "learning_rate": 0.0009885635627872285, "loss": 4.1955, "step": 845 }, { "epoch": 0.1, "grad_norm": 0.8749579509123556, "learning_rate": 0.0009885240398771554, "loss": 3.8727, "step": 846 }, { "epoch": 0.1, "grad_norm": 0.9174336282191572, "learning_rate": 0.0009884844495845029, "loss": 3.7108, "step": 847 }, { "epoch": 0.1, "grad_norm": 0.9767350147353064, "learning_rate": 0.000988444791914732, "loss": 3.8084, "step": 848 }, { "epoch": 0.1, "grad_norm": 0.9490028396447484, "learning_rate": 0.0009884050668733126, "loss": 3.7333, "step": 849 }, { "epoch": 0.1, "grad_norm": 0.9128354970329658, "learning_rate": 0.0009883652744657244, "loss": 3.8161, "step": 850 }, { "epoch": 0.1, "grad_norm": 1.4251674169878141, "learning_rate": 0.0009883254146974554, "loss": 3.869, "step": 851 }, { "epoch": 0.1, "grad_norm": 1.2676229440569307, "learning_rate": 0.0009882854875740037, "loss": 3.8149, "step": 852 }, { "epoch": 0.1, "grad_norm": 1.0518737930407933, "learning_rate": 0.0009882454931008768, "loss": 3.945, "step": 853 }, { "epoch": 0.1, "grad_norm": 0.8419933816407473, "learning_rate": 0.0009882054312835907, "loss": 3.9068, "step": 854 }, { "epoch": 0.1, "grad_norm": 1.0270560328038532, "learning_rate": 0.0009881653021276715, "loss": 4.0392, "step": 855 }, { "epoch": 0.1, "grad_norm": 0.9956358241626961, "learning_rate": 0.0009881251056386541, "loss": 3.9998, "step": 856 }, { "epoch": 0.1, "grad_norm": 1.0648682651479946, "learning_rate": 0.000988084841822083, "loss": 3.9471, "step": 857 }, { "epoch": 0.1, "grad_norm": 1.212187276856189, "learning_rate": 0.0009880445106835117, "loss": 3.9788, "step": 858 }, { "epoch": 0.1, "grad_norm": 0.8813828334403914, "learning_rate": 0.000988004112228503, "loss": 3.8079, "step": 859 }, { "epoch": 0.1, "grad_norm": 0.9946388133677568, "learning_rate": 0.0009879636464626294, "loss": 4.0274, "step": 860 }, { "epoch": 0.1, "grad_norm": 0.9672115346545923, "learning_rate": 0.0009879231133914721, "loss": 3.8767, "step": 861 }, { "epoch": 0.1, "grad_norm": 1.0569634420713294, "learning_rate": 0.000987882513020622, "loss": 4.0523, "step": 862 }, { "epoch": 0.1, "grad_norm": 1.211133236457914, "learning_rate": 0.000987841845355679, "loss": 3.9364, "step": 863 }, { "epoch": 0.1, "grad_norm": 0.934838587017001, "learning_rate": 0.0009878011104022526, "loss": 4.1468, "step": 864 }, { "epoch": 0.1, "grad_norm": 4.132954560078843, "learning_rate": 0.0009877603081659614, "loss": 4.1508, "step": 865 }, { "epoch": 0.1, "grad_norm": 1.140751159603247, "learning_rate": 0.0009877194386524334, "loss": 3.9636, "step": 866 }, { "epoch": 0.1, "grad_norm": 1.0031415043052765, "learning_rate": 0.0009876785018673054, "loss": 3.7413, "step": 867 }, { "epoch": 0.1, "grad_norm": 1.3087147606184937, "learning_rate": 0.0009876374978162242, "loss": 3.9689, "step": 868 }, { "epoch": 0.1, "grad_norm": 0.969628456875773, "learning_rate": 0.0009875964265048452, "loss": 4.0432, "step": 869 }, { "epoch": 0.1, "grad_norm": 1.0141809866142528, "learning_rate": 0.0009875552879388336, "loss": 3.78, "step": 870 }, { "epoch": 0.1, "grad_norm": 0.853587675727436, "learning_rate": 0.000987514082123864, "loss": 4.0411, "step": 871 }, { "epoch": 0.1, "grad_norm": 0.9371715538706589, "learning_rate": 0.0009874728090656193, "loss": 3.8285, "step": 872 }, { "epoch": 0.1, "grad_norm": 0.983714183927287, "learning_rate": 0.0009874314687697927, "loss": 4.0886, "step": 873 }, { "epoch": 0.1, "grad_norm": 0.8491315684346135, "learning_rate": 0.0009873900612420866, "loss": 3.7661, "step": 874 }, { "epoch": 0.1, "grad_norm": 1.303370392847567, "learning_rate": 0.0009873485864882116, "loss": 4.0109, "step": 875 }, { "epoch": 0.1, "grad_norm": 0.9843773132718185, "learning_rate": 0.000987307044513889, "loss": 3.9235, "step": 876 }, { "epoch": 0.1, "grad_norm": 1.2089768001883556, "learning_rate": 0.0009872654353248486, "loss": 3.9614, "step": 877 }, { "epoch": 0.1, "grad_norm": 0.9459274126677216, "learning_rate": 0.0009872237589268295, "loss": 3.9068, "step": 878 }, { "epoch": 0.1, "grad_norm": 0.9471254680257389, "learning_rate": 0.00098718201532558, "loss": 3.9324, "step": 879 }, { "epoch": 0.1, "grad_norm": 1.11106568823311, "learning_rate": 0.0009871402045268582, "loss": 3.9746, "step": 880 }, { "epoch": 0.1, "grad_norm": 1.1287428425278907, "learning_rate": 0.000987098326536431, "loss": 3.9029, "step": 881 }, { "epoch": 0.1, "grad_norm": 0.9783137258621587, "learning_rate": 0.0009870563813600744, "loss": 3.8805, "step": 882 }, { "epoch": 0.1, "grad_norm": 0.9780148690051447, "learning_rate": 0.0009870143690035743, "loss": 4.0786, "step": 883 }, { "epoch": 0.1, "grad_norm": 1.068655865522098, "learning_rate": 0.0009869722894727251, "loss": 3.8926, "step": 884 }, { "epoch": 0.1, "grad_norm": 1.048614788764977, "learning_rate": 0.0009869301427733314, "loss": 3.87, "step": 885 }, { "epoch": 0.1, "grad_norm": 1.4846155192388608, "learning_rate": 0.000986887928911206, "loss": 4.071, "step": 886 }, { "epoch": 0.1, "grad_norm": 1.3897928309650462, "learning_rate": 0.0009868456478921719, "loss": 3.9234, "step": 887 }, { "epoch": 0.1, "grad_norm": 1.5798116263426913, "learning_rate": 0.0009868032997220608, "loss": 4.013, "step": 888 }, { "epoch": 0.1, "grad_norm": 0.9463328064283754, "learning_rate": 0.0009867608844067136, "loss": 3.9455, "step": 889 }, { "epoch": 0.1, "grad_norm": 1.5295475704882031, "learning_rate": 0.000986718401951981, "loss": 3.954, "step": 890 }, { "epoch": 0.1, "grad_norm": 0.9136178718754765, "learning_rate": 0.0009866758523637228, "loss": 3.9869, "step": 891 }, { "epoch": 0.1, "grad_norm": 1.0605046744467053, "learning_rate": 0.0009866332356478075, "loss": 3.8271, "step": 892 }, { "epoch": 0.1, "grad_norm": 1.1290090070732615, "learning_rate": 0.000986590551810113, "loss": 3.8631, "step": 893 }, { "epoch": 0.1, "grad_norm": 1.1679820681090167, "learning_rate": 0.0009865478008565275, "loss": 4.0724, "step": 894 }, { "epoch": 0.1, "grad_norm": 1.0264440385505964, "learning_rate": 0.0009865049827929475, "loss": 3.9268, "step": 895 }, { "epoch": 0.1, "grad_norm": 1.1053938338315987, "learning_rate": 0.0009864620976252785, "loss": 4.199, "step": 896 }, { "epoch": 0.1, "grad_norm": 0.9976409799636035, "learning_rate": 0.000986419145359436, "loss": 4.0217, "step": 897 }, { "epoch": 0.1, "grad_norm": 1.0350905792400589, "learning_rate": 0.0009863761260013443, "loss": 4.0266, "step": 898 }, { "epoch": 0.1, "grad_norm": 0.9475569607656157, "learning_rate": 0.0009863330395569374, "loss": 3.9545, "step": 899 }, { "epoch": 0.1, "grad_norm": 0.9279763736236232, "learning_rate": 0.000986289886032158, "loss": 3.9865, "step": 900 }, { "epoch": 0.1, "grad_norm": 0.9382359932198319, "learning_rate": 0.0009862466654329582, "loss": 4.0322, "step": 901 }, { "epoch": 0.1, "grad_norm": 1.009630499341495, "learning_rate": 0.0009862033777652997, "loss": 4.0391, "step": 902 }, { "epoch": 0.1, "grad_norm": 1.1326568399400498, "learning_rate": 0.000986160023035153, "loss": 3.9343, "step": 903 }, { "epoch": 0.1, "grad_norm": 1.0673938861130499, "learning_rate": 0.0009861166012484982, "loss": 3.8234, "step": 904 }, { "epoch": 0.1, "grad_norm": 0.9578065327885132, "learning_rate": 0.0009860731124113247, "loss": 3.7129, "step": 905 }, { "epoch": 0.1, "grad_norm": 1.446209034150686, "learning_rate": 0.0009860295565296306, "loss": 4.0171, "step": 906 }, { "epoch": 0.1, "grad_norm": 0.9279686496506361, "learning_rate": 0.000985985933609424, "loss": 3.7854, "step": 907 }, { "epoch": 0.1, "grad_norm": 1.0134078190994902, "learning_rate": 0.0009859422436567212, "loss": 3.9741, "step": 908 }, { "epoch": 0.1, "grad_norm": 1.0887634824285362, "learning_rate": 0.000985898486677549, "loss": 3.9212, "step": 909 }, { "epoch": 0.1, "grad_norm": 1.0777712494328513, "learning_rate": 0.0009858546626779425, "loss": 3.9345, "step": 910 }, { "epoch": 0.1, "grad_norm": 11.825139921093331, "learning_rate": 0.0009858107716639464, "loss": 4.0774, "step": 911 }, { "epoch": 0.1, "grad_norm": 0.9989047995884904, "learning_rate": 0.000985766813641615, "loss": 3.9242, "step": 912 }, { "epoch": 0.1, "grad_norm": 1.049722250373986, "learning_rate": 0.0009857227886170112, "loss": 3.8436, "step": 913 }, { "epoch": 0.1, "grad_norm": 1.5195700925841094, "learning_rate": 0.0009856786965962074, "loss": 3.83, "step": 914 }, { "epoch": 0.1, "grad_norm": 1.126797217352382, "learning_rate": 0.0009856345375852853, "loss": 4.0606, "step": 915 }, { "epoch": 0.11, "grad_norm": 1.0172499410184732, "learning_rate": 0.0009855903115903357, "loss": 4.0294, "step": 916 }, { "epoch": 0.11, "grad_norm": 0.9889958979682758, "learning_rate": 0.0009855460186174588, "loss": 4.099, "step": 917 }, { "epoch": 0.11, "grad_norm": 1.138362331823372, "learning_rate": 0.000985501658672764, "loss": 3.8066, "step": 918 }, { "epoch": 0.11, "grad_norm": 0.9452156896068032, "learning_rate": 0.0009854572317623698, "loss": 3.8519, "step": 919 }, { "epoch": 0.11, "grad_norm": 0.9880588692204164, "learning_rate": 0.0009854127378924043, "loss": 4.004, "step": 920 }, { "epoch": 0.11, "grad_norm": 1.4311088715173952, "learning_rate": 0.0009853681770690043, "loss": 3.9666, "step": 921 }, { "epoch": 0.11, "grad_norm": 1.1277321117183687, "learning_rate": 0.0009853235492983164, "loss": 3.8508, "step": 922 }, { "epoch": 0.11, "grad_norm": 1.2975300754835295, "learning_rate": 0.000985278854586496, "loss": 3.7989, "step": 923 }, { "epoch": 0.11, "grad_norm": 1.173906149678658, "learning_rate": 0.0009852340929397076, "loss": 3.8196, "step": 924 }, { "epoch": 0.11, "grad_norm": 0.9721975277088422, "learning_rate": 0.0009851892643641257, "loss": 3.8182, "step": 925 }, { "epoch": 0.11, "grad_norm": 1.088509826807722, "learning_rate": 0.000985144368865933, "loss": 4.011, "step": 926 }, { "epoch": 0.11, "grad_norm": 0.9749600605983302, "learning_rate": 0.0009850994064513226, "loss": 4.0598, "step": 927 }, { "epoch": 0.11, "grad_norm": 0.9711442662062042, "learning_rate": 0.000985054377126496, "loss": 3.8963, "step": 928 }, { "epoch": 0.11, "grad_norm": 0.9947141307376325, "learning_rate": 0.0009850092808976639, "loss": 3.9325, "step": 929 }, { "epoch": 0.11, "grad_norm": 2.927100842140584, "learning_rate": 0.0009849641177710467, "loss": 4.2408, "step": 930 }, { "epoch": 0.11, "grad_norm": 1.0760633188767201, "learning_rate": 0.0009849188877528736, "loss": 4.2261, "step": 931 }, { "epoch": 0.11, "grad_norm": 1.0243260241102548, "learning_rate": 0.0009848735908493834, "loss": 4.01, "step": 932 }, { "epoch": 0.11, "grad_norm": 1.2803498807092963, "learning_rate": 0.0009848282270668238, "loss": 4.0896, "step": 933 }, { "epoch": 0.11, "grad_norm": 0.9634594293415681, "learning_rate": 0.000984782796411452, "loss": 3.7973, "step": 934 }, { "epoch": 0.11, "grad_norm": 1.0590276488895651, "learning_rate": 0.0009847372988895343, "loss": 3.8941, "step": 935 }, { "epoch": 0.11, "grad_norm": 0.9590614112339397, "learning_rate": 0.000984691734507346, "loss": 3.9082, "step": 936 }, { "epoch": 0.11, "grad_norm": 0.9621083171471031, "learning_rate": 0.0009846461032711723, "loss": 3.6889, "step": 937 }, { "epoch": 0.11, "grad_norm": 1.1535105086169881, "learning_rate": 0.0009846004051873066, "loss": 3.9091, "step": 938 }, { "epoch": 0.11, "grad_norm": 0.9910555244318018, "learning_rate": 0.0009845546402620523, "loss": 3.9293, "step": 939 }, { "epoch": 0.11, "grad_norm": 1.1437731125629356, "learning_rate": 0.0009845088085017218, "loss": 4.0819, "step": 940 }, { "epoch": 0.11, "grad_norm": 1.0169184615801394, "learning_rate": 0.000984462909912637, "loss": 4.0344, "step": 941 }, { "epoch": 0.11, "grad_norm": 3.2667335475860932, "learning_rate": 0.0009844169445011282, "loss": 4.0599, "step": 942 }, { "epoch": 0.11, "grad_norm": 0.96515354446607, "learning_rate": 0.0009843709122735358, "loss": 3.95, "step": 943 }, { "epoch": 0.11, "grad_norm": 1.0180271021484424, "learning_rate": 0.000984324813236209, "loss": 3.8148, "step": 944 }, { "epoch": 0.11, "grad_norm": 1.124646891645496, "learning_rate": 0.0009842786473955062, "loss": 3.9023, "step": 945 }, { "epoch": 0.11, "grad_norm": 1.1022123103648478, "learning_rate": 0.0009842324147577954, "loss": 3.7699, "step": 946 }, { "epoch": 0.11, "grad_norm": 0.8641990303714225, "learning_rate": 0.0009841861153294534, "loss": 4.0165, "step": 947 }, { "epoch": 0.11, "grad_norm": 1.1137558023121101, "learning_rate": 0.000984139749116866, "loss": 3.8662, "step": 948 }, { "epoch": 0.11, "grad_norm": 1.0550813438030957, "learning_rate": 0.0009840933161264288, "loss": 4.0374, "step": 949 }, { "epoch": 0.11, "grad_norm": 1.2801259017187434, "learning_rate": 0.0009840468163645462, "loss": 3.968, "step": 950 }, { "epoch": 0.11, "grad_norm": 1.0581138918386634, "learning_rate": 0.0009840002498376322, "loss": 4.2772, "step": 951 }, { "epoch": 0.11, "grad_norm": 1.468404368530255, "learning_rate": 0.0009839536165521094, "loss": 3.8075, "step": 952 }, { "epoch": 0.11, "grad_norm": 1.0579489195975014, "learning_rate": 0.0009839069165144103, "loss": 4.0129, "step": 953 }, { "epoch": 0.11, "grad_norm": 1.1827342868024504, "learning_rate": 0.0009838601497309763, "loss": 3.8264, "step": 954 }, { "epoch": 0.11, "grad_norm": 0.9053441946748603, "learning_rate": 0.0009838133162082578, "loss": 3.8842, "step": 955 }, { "epoch": 0.11, "grad_norm": 0.9006873716267609, "learning_rate": 0.0009837664159527146, "loss": 3.9033, "step": 956 }, { "epoch": 0.11, "grad_norm": 1.0432290926220478, "learning_rate": 0.0009837194489708157, "loss": 4.0429, "step": 957 }, { "epoch": 0.11, "grad_norm": 2.594864037385885, "learning_rate": 0.0009836724152690395, "loss": 3.9817, "step": 958 }, { "epoch": 0.11, "grad_norm": 1.0057062435935376, "learning_rate": 0.0009836253148538731, "loss": 3.9606, "step": 959 }, { "epoch": 0.11, "grad_norm": 1.0305832027166706, "learning_rate": 0.0009835781477318133, "loss": 4.0169, "step": 960 }, { "epoch": 0.11, "grad_norm": 1.0705518913802718, "learning_rate": 0.000983530913909366, "loss": 4.1068, "step": 961 }, { "epoch": 0.11, "grad_norm": 0.9899523926793627, "learning_rate": 0.0009834836133930458, "loss": 4.0017, "step": 962 }, { "epoch": 0.11, "grad_norm": 1.1864832209769205, "learning_rate": 0.0009834362461893773, "loss": 3.9282, "step": 963 }, { "epoch": 0.11, "grad_norm": 1.1234925733797614, "learning_rate": 0.0009833888123048937, "loss": 3.9176, "step": 964 }, { "epoch": 0.11, "grad_norm": 1.685584816776114, "learning_rate": 0.0009833413117461378, "loss": 3.8626, "step": 965 }, { "epoch": 0.11, "grad_norm": 1.0112552710642482, "learning_rate": 0.0009832937445196613, "loss": 3.7683, "step": 966 }, { "epoch": 0.11, "grad_norm": 0.9565318131956579, "learning_rate": 0.000983246110632025, "loss": 4.139, "step": 967 }, { "epoch": 0.11, "grad_norm": 1.3075776421652565, "learning_rate": 0.0009831984100897994, "loss": 3.9563, "step": 968 }, { "epoch": 0.11, "grad_norm": 0.8845577896104776, "learning_rate": 0.0009831506428995636, "loss": 4.0439, "step": 969 }, { "epoch": 0.11, "grad_norm": 0.894457684118974, "learning_rate": 0.0009831028090679064, "loss": 3.8352, "step": 970 }, { "epoch": 0.11, "grad_norm": 1.1072904092835272, "learning_rate": 0.0009830549086014254, "loss": 3.9061, "step": 971 }, { "epoch": 0.11, "grad_norm": 1.0069951502626817, "learning_rate": 0.0009830069415067276, "loss": 3.7084, "step": 972 }, { "epoch": 0.11, "grad_norm": 0.9255986894818868, "learning_rate": 0.0009829589077904293, "loss": 3.8614, "step": 973 }, { "epoch": 0.11, "grad_norm": 1.6985012498781358, "learning_rate": 0.0009829108074591556, "loss": 3.7893, "step": 974 }, { "epoch": 0.11, "grad_norm": 1.2817526819866831, "learning_rate": 0.0009828626405195412, "loss": 3.571, "step": 975 }, { "epoch": 0.11, "grad_norm": 0.89370450580645, "learning_rate": 0.0009828144069782296, "loss": 4.0162, "step": 976 }, { "epoch": 0.11, "grad_norm": 0.8702613444039932, "learning_rate": 0.0009827661068418738, "loss": 3.9715, "step": 977 }, { "epoch": 0.11, "grad_norm": 0.8421932244146796, "learning_rate": 0.0009827177401171361, "loss": 3.9282, "step": 978 }, { "epoch": 0.11, "grad_norm": 1.2138194095477044, "learning_rate": 0.0009826693068106876, "loss": 3.8928, "step": 979 }, { "epoch": 0.11, "grad_norm": 1.5299901798749944, "learning_rate": 0.0009826208069292086, "loss": 3.907, "step": 980 }, { "epoch": 0.11, "grad_norm": 0.871226334841303, "learning_rate": 0.000982572240479389, "loss": 3.9077, "step": 981 }, { "epoch": 0.11, "grad_norm": 0.922406486616021, "learning_rate": 0.0009825236074679274, "loss": 3.9062, "step": 982 }, { "epoch": 0.11, "grad_norm": 1.0249492350825813, "learning_rate": 0.0009824749079015318, "loss": 4.019, "step": 983 }, { "epoch": 0.11, "grad_norm": 1.0086605391722083, "learning_rate": 0.0009824261417869197, "loss": 3.7347, "step": 984 }, { "epoch": 0.11, "grad_norm": 0.9512075031329892, "learning_rate": 0.000982377309130817, "loss": 3.9904, "step": 985 }, { "epoch": 0.11, "grad_norm": 0.8919191046963494, "learning_rate": 0.0009823284099399596, "loss": 3.9638, "step": 986 }, { "epoch": 0.11, "grad_norm": 0.9256174428816256, "learning_rate": 0.000982279444221092, "loss": 4.0487, "step": 987 }, { "epoch": 0.11, "grad_norm": 0.9344695530765355, "learning_rate": 0.0009822304119809682, "loss": 3.8153, "step": 988 }, { "epoch": 0.11, "grad_norm": 1.1618597474833134, "learning_rate": 0.0009821813132263513, "loss": 3.7621, "step": 989 }, { "epoch": 0.11, "grad_norm": 1.1694020693267522, "learning_rate": 0.0009821321479640134, "loss": 4.0705, "step": 990 }, { "epoch": 0.11, "grad_norm": 1.1910474258879573, "learning_rate": 0.0009820829162007357, "loss": 3.8143, "step": 991 }, { "epoch": 0.11, "grad_norm": 1.0254334914806764, "learning_rate": 0.0009820336179433091, "loss": 4.0099, "step": 992 }, { "epoch": 0.11, "grad_norm": 0.9579173060420042, "learning_rate": 0.0009819842531985337, "loss": 3.977, "step": 993 }, { "epoch": 0.11, "grad_norm": 0.9358655906582398, "learning_rate": 0.0009819348219732176, "loss": 3.9295, "step": 994 }, { "epoch": 0.11, "grad_norm": 1.444047997788228, "learning_rate": 0.0009818853242741796, "loss": 3.9272, "step": 995 }, { "epoch": 0.11, "grad_norm": 1.0017835733805611, "learning_rate": 0.0009818357601082467, "loss": 3.715, "step": 996 }, { "epoch": 0.11, "grad_norm": 0.9954102067885197, "learning_rate": 0.0009817861294822551, "loss": 3.9233, "step": 997 }, { "epoch": 0.11, "grad_norm": 1.153248118935177, "learning_rate": 0.0009817364324030506, "loss": 3.8396, "step": 998 }, { "epoch": 0.11, "grad_norm": 0.8938412800416489, "learning_rate": 0.0009816866688774882, "loss": 4.0166, "step": 999 }, { "epoch": 0.11, "grad_norm": 1.0703556677998551, "learning_rate": 0.0009816368389124314, "loss": 4.1067, "step": 1000 }, { "epoch": 0.11, "grad_norm": 0.8779486440666361, "learning_rate": 0.0009815869425147537, "loss": 3.8926, "step": 1001 }, { "epoch": 0.11, "grad_norm": 0.8402222763064704, "learning_rate": 0.0009815369796913373, "loss": 4.0144, "step": 1002 }, { "epoch": 0.12, "grad_norm": 0.8720916938147475, "learning_rate": 0.0009814869504490731, "loss": 3.9013, "step": 1003 }, { "epoch": 0.12, "grad_norm": 1.0503146728479664, "learning_rate": 0.0009814368547948623, "loss": 3.917, "step": 1004 }, { "epoch": 0.12, "grad_norm": 1.2664811563250036, "learning_rate": 0.0009813866927356142, "loss": 4.0945, "step": 1005 }, { "epoch": 0.12, "grad_norm": 0.850409310378976, "learning_rate": 0.000981336464278248, "loss": 3.9544, "step": 1006 }, { "epoch": 0.12, "grad_norm": 3.1562403798935703, "learning_rate": 0.0009812861694296917, "loss": 4.1595, "step": 1007 }, { "epoch": 0.12, "grad_norm": 0.8611576242078582, "learning_rate": 0.0009812358081968825, "loss": 3.816, "step": 1008 }, { "epoch": 0.12, "grad_norm": 0.9601032419503073, "learning_rate": 0.0009811853805867668, "loss": 3.8064, "step": 1009 }, { "epoch": 0.12, "grad_norm": 1.002170866931476, "learning_rate": 0.0009811348866063, "loss": 3.9235, "step": 1010 }, { "epoch": 0.12, "grad_norm": 1.0656947634857703, "learning_rate": 0.0009810843262624467, "loss": 4.181, "step": 1011 }, { "epoch": 0.12, "grad_norm": 1.0753461893624978, "learning_rate": 0.000981033699562181, "loss": 4.2012, "step": 1012 }, { "epoch": 0.12, "grad_norm": 1.1098913276962474, "learning_rate": 0.0009809830065124858, "loss": 4.0372, "step": 1013 }, { "epoch": 0.12, "grad_norm": 0.8169105624953324, "learning_rate": 0.0009809322471203534, "loss": 3.8732, "step": 1014 }, { "epoch": 0.12, "grad_norm": 0.4528576114590877, "learning_rate": 0.0009808814213927847, "loss": 3.9142, "step": 1015 }, { "epoch": 0.12, "grad_norm": 0.9777098750022875, "learning_rate": 0.0009808305293367904, "loss": 4.0211, "step": 1016 }, { "epoch": 0.12, "grad_norm": 0.9782073638625153, "learning_rate": 0.00098077957095939, "loss": 3.9494, "step": 1017 }, { "epoch": 0.12, "grad_norm": 8.065703197536335, "learning_rate": 0.0009807285462676122, "loss": 3.9032, "step": 1018 }, { "epoch": 0.12, "grad_norm": 0.9595601135376, "learning_rate": 0.0009806774552684953, "loss": 3.9296, "step": 1019 }, { "epoch": 0.12, "grad_norm": 1.0802493092494738, "learning_rate": 0.0009806262979690857, "loss": 3.7423, "step": 1020 }, { "epoch": 0.12, "grad_norm": 0.9486785369449944, "learning_rate": 0.00098057507437644, "loss": 3.7127, "step": 1021 }, { "epoch": 0.12, "grad_norm": 1.0014242351339855, "learning_rate": 0.0009805237844976234, "loss": 3.8458, "step": 1022 }, { "epoch": 0.12, "grad_norm": 1.0327791793212355, "learning_rate": 0.00098047242833971, "loss": 3.922, "step": 1023 }, { "epoch": 0.12, "grad_norm": 1.2100402984317076, "learning_rate": 0.0009804210059097841, "loss": 3.9106, "step": 1024 }, { "epoch": 0.12, "grad_norm": 0.8817661392560533, "learning_rate": 0.0009803695172149382, "loss": 3.8888, "step": 1025 }, { "epoch": 0.12, "grad_norm": 0.8793720280109008, "learning_rate": 0.0009803179622622738, "loss": 3.8335, "step": 1026 }, { "epoch": 0.12, "grad_norm": 0.9229476187394094, "learning_rate": 0.0009802663410589023, "loss": 4.0084, "step": 1027 }, { "epoch": 0.12, "grad_norm": 0.90576892869129, "learning_rate": 0.0009802146536119437, "loss": 3.8512, "step": 1028 }, { "epoch": 0.12, "grad_norm": 0.8997648987761988, "learning_rate": 0.0009801628999285274, "loss": 3.8298, "step": 1029 }, { "epoch": 0.12, "grad_norm": 0.8960727440752199, "learning_rate": 0.000980111080015792, "loss": 4.0569, "step": 1030 }, { "epoch": 0.12, "grad_norm": 1.1329530184939047, "learning_rate": 0.0009800591938808846, "loss": 3.6638, "step": 1031 }, { "epoch": 0.12, "grad_norm": 1.0287127995449656, "learning_rate": 0.0009800072415309623, "loss": 4.0796, "step": 1032 }, { "epoch": 0.12, "grad_norm": 0.9757130951595525, "learning_rate": 0.0009799552229731907, "loss": 3.6638, "step": 1033 }, { "epoch": 0.12, "grad_norm": 1.1210693178755677, "learning_rate": 0.0009799031382147448, "loss": 4.0427, "step": 1034 }, { "epoch": 0.12, "grad_norm": 1.0815215580001027, "learning_rate": 0.000979850987262809, "loss": 3.8554, "step": 1035 }, { "epoch": 0.12, "grad_norm": 0.9233147763416208, "learning_rate": 0.0009797987701245761, "loss": 3.7957, "step": 1036 }, { "epoch": 0.12, "grad_norm": 0.849507997381606, "learning_rate": 0.0009797464868072487, "loss": 3.9477, "step": 1037 }, { "epoch": 0.12, "grad_norm": 1.053247892875251, "learning_rate": 0.0009796941373180384, "loss": 4.1077, "step": 1038 }, { "epoch": 0.12, "grad_norm": 1.0125863073977779, "learning_rate": 0.0009796417216641653, "loss": 4.0688, "step": 1039 }, { "epoch": 0.12, "grad_norm": 0.9355725066955353, "learning_rate": 0.00097958923985286, "loss": 3.8537, "step": 1040 }, { "epoch": 0.12, "grad_norm": 1.0102238144367948, "learning_rate": 0.0009795366918913604, "loss": 3.9841, "step": 1041 }, { "epoch": 0.12, "grad_norm": 1.286312791698429, "learning_rate": 0.0009794840777869152, "loss": 3.9493, "step": 1042 }, { "epoch": 0.12, "grad_norm": 1.0009944917807516, "learning_rate": 0.0009794313975467813, "loss": 4.1359, "step": 1043 }, { "epoch": 0.12, "grad_norm": 1.7019179369903037, "learning_rate": 0.0009793786511782248, "loss": 3.8608, "step": 1044 }, { "epoch": 0.12, "grad_norm": 1.1226239568174194, "learning_rate": 0.000979325838688521, "loss": 3.9338, "step": 1045 }, { "epoch": 0.12, "grad_norm": 1.04998013546436, "learning_rate": 0.000979272960084955, "loss": 4.0046, "step": 1046 }, { "epoch": 0.12, "grad_norm": 23.127785065075507, "learning_rate": 0.0009792200153748195, "loss": 3.8931, "step": 1047 }, { "epoch": 0.12, "grad_norm": 1.0233491385564062, "learning_rate": 0.0009791670045654177, "loss": 3.9848, "step": 1048 }, { "epoch": 0.12, "grad_norm": 0.9708739678339066, "learning_rate": 0.0009791139276640614, "loss": 3.7089, "step": 1049 }, { "epoch": 0.12, "grad_norm": 1.3131747514785732, "learning_rate": 0.0009790607846780718, "loss": 3.9716, "step": 1050 }, { "epoch": 0.12, "grad_norm": 0.9130761288766097, "learning_rate": 0.0009790075756147783, "loss": 3.9495, "step": 1051 }, { "epoch": 0.12, "grad_norm": 1.002724071290709, "learning_rate": 0.0009789543004815207, "loss": 3.888, "step": 1052 }, { "epoch": 0.12, "grad_norm": 1.141595559541945, "learning_rate": 0.000978900959285647, "loss": 4.1737, "step": 1053 }, { "epoch": 0.12, "grad_norm": 5.397410353361243, "learning_rate": 0.0009788475520345146, "loss": 4.1982, "step": 1054 }, { "epoch": 0.12, "grad_norm": 1.0184457212853797, "learning_rate": 0.0009787940787354902, "loss": 3.9478, "step": 1055 }, { "epoch": 0.12, "grad_norm": 1.0252821127272502, "learning_rate": 0.000978740539395949, "loss": 4.2953, "step": 1056 }, { "epoch": 0.12, "grad_norm": 1.1329847435647367, "learning_rate": 0.0009786869340232761, "loss": 4.1711, "step": 1057 }, { "epoch": 0.12, "grad_norm": 1.4394819911694596, "learning_rate": 0.0009786332626248655, "loss": 3.8764, "step": 1058 }, { "epoch": 0.12, "grad_norm": 1.0609581740236735, "learning_rate": 0.0009785795252081199, "loss": 4.0235, "step": 1059 }, { "epoch": 0.12, "grad_norm": 1.1760910160225522, "learning_rate": 0.000978525721780451, "loss": 4.0556, "step": 1060 }, { "epoch": 0.12, "grad_norm": 1.0769567091621046, "learning_rate": 0.0009784718523492804, "loss": 3.8768, "step": 1061 }, { "epoch": 0.12, "grad_norm": 0.967710877572883, "learning_rate": 0.0009784179169220384, "loss": 4.1386, "step": 1062 }, { "epoch": 0.12, "grad_norm": 1.4635240714726403, "learning_rate": 0.0009783639155061643, "loss": 3.7415, "step": 1063 }, { "epoch": 0.12, "grad_norm": 1.3595409786574135, "learning_rate": 0.0009783098481091063, "loss": 4.0034, "step": 1064 }, { "epoch": 0.12, "grad_norm": 3.019660819496443, "learning_rate": 0.0009782557147383225, "loss": 3.8662, "step": 1065 }, { "epoch": 0.12, "grad_norm": 0.9728337803487399, "learning_rate": 0.0009782015154012789, "loss": 3.9876, "step": 1066 }, { "epoch": 0.12, "grad_norm": 1.9812589155217257, "learning_rate": 0.0009781472501054517, "loss": 3.894, "step": 1067 }, { "epoch": 0.12, "grad_norm": 0.9911645329615667, "learning_rate": 0.0009780929188583256, "loss": 3.9516, "step": 1068 }, { "epoch": 0.12, "grad_norm": 0.9923902928553388, "learning_rate": 0.000978038521667395, "loss": 3.9346, "step": 1069 }, { "epoch": 0.12, "grad_norm": 1.3905833246413395, "learning_rate": 0.000977984058540162, "loss": 3.9289, "step": 1070 }, { "epoch": 0.12, "grad_norm": 0.9675031408698997, "learning_rate": 0.0009779295294841397, "loss": 3.8922, "step": 1071 }, { "epoch": 0.12, "grad_norm": 0.9389966918489403, "learning_rate": 0.0009778749345068487, "loss": 3.9755, "step": 1072 }, { "epoch": 0.12, "grad_norm": 1.405637937238413, "learning_rate": 0.00097782027361582, "loss": 3.9464, "step": 1073 }, { "epoch": 0.12, "grad_norm": 1.073691417837058, "learning_rate": 0.0009777655468185924, "loss": 3.8502, "step": 1074 }, { "epoch": 0.12, "grad_norm": 3.0378858820685037, "learning_rate": 0.0009777107541227147, "loss": 3.9764, "step": 1075 }, { "epoch": 0.12, "grad_norm": 0.9890756635492401, "learning_rate": 0.0009776558955357443, "loss": 3.9754, "step": 1076 }, { "epoch": 0.12, "grad_norm": 0.8296281014599762, "learning_rate": 0.0009776009710652483, "loss": 3.8537, "step": 1077 }, { "epoch": 0.12, "grad_norm": 1.7290123304984844, "learning_rate": 0.0009775459807188022, "loss": 3.8663, "step": 1078 }, { "epoch": 0.12, "grad_norm": 0.9817978258209319, "learning_rate": 0.0009774909245039909, "loss": 3.7691, "step": 1079 }, { "epoch": 0.12, "grad_norm": 0.8339342488787881, "learning_rate": 0.0009774358024284082, "loss": 3.9371, "step": 1080 }, { "epoch": 0.12, "grad_norm": 17.442307799141258, "learning_rate": 0.0009773806144996575, "loss": 3.8432, "step": 1081 }, { "epoch": 0.12, "grad_norm": 0.9967067697136237, "learning_rate": 0.0009773253607253507, "loss": 3.9011, "step": 1082 }, { "epoch": 0.12, "grad_norm": 1.0017458744696415, "learning_rate": 0.000977270041113109, "loss": 3.7896, "step": 1083 }, { "epoch": 0.12, "grad_norm": 0.9002236822287353, "learning_rate": 0.0009772146556705629, "loss": 3.9173, "step": 1084 }, { "epoch": 0.12, "grad_norm": 0.9977173807841384, "learning_rate": 0.0009771592044053512, "loss": 3.9252, "step": 1085 }, { "epoch": 0.12, "grad_norm": 1.0660577587375237, "learning_rate": 0.000977103687325123, "loss": 4.1303, "step": 1086 }, { "epoch": 0.12, "grad_norm": 1.0083702956163125, "learning_rate": 0.0009770481044375356, "loss": 3.7057, "step": 1087 }, { "epoch": 0.12, "grad_norm": 1.212960703024689, "learning_rate": 0.0009769924557502553, "loss": 4.0127, "step": 1088 }, { "epoch": 0.12, "grad_norm": 1.1826640503187567, "learning_rate": 0.0009769367412709585, "loss": 4.0358, "step": 1089 }, { "epoch": 0.12, "grad_norm": 1.1891628075347187, "learning_rate": 0.0009768809610073291, "loss": 3.8099, "step": 1090 }, { "epoch": 0.13, "grad_norm": 1.1849385218204642, "learning_rate": 0.0009768251149670614, "loss": 3.9771, "step": 1091 }, { "epoch": 0.13, "grad_norm": 1.0960542877740704, "learning_rate": 0.000976769203157858, "loss": 4.009, "step": 1092 }, { "epoch": 0.13, "grad_norm": 1.1200359925239947, "learning_rate": 0.0009767132255874315, "loss": 4.0382, "step": 1093 }, { "epoch": 0.13, "grad_norm": 1.739728366117808, "learning_rate": 0.0009766571822635022, "loss": 3.9109, "step": 1094 }, { "epoch": 0.13, "grad_norm": 1.1876864859921121, "learning_rate": 0.0009766010731938007, "loss": 4.0763, "step": 1095 }, { "epoch": 0.13, "grad_norm": 0.9969784528544836, "learning_rate": 0.0009765448983860658, "loss": 3.9442, "step": 1096 }, { "epoch": 0.13, "grad_norm": 0.9787198916271599, "learning_rate": 0.0009764886578480461, "loss": 3.8233, "step": 1097 }, { "epoch": 0.13, "grad_norm": 1.0066562113136273, "learning_rate": 0.0009764323515874986, "loss": 4.0883, "step": 1098 }, { "epoch": 0.13, "grad_norm": 0.9424351528688372, "learning_rate": 0.00097637597961219, "loss": 4.1467, "step": 1099 }, { "epoch": 0.13, "grad_norm": 0.9308179235296616, "learning_rate": 0.0009763195419298955, "loss": 3.905, "step": 1100 }, { "epoch": 0.13, "grad_norm": 0.8350404954052347, "learning_rate": 0.0009762630385483997, "loss": 3.9141, "step": 1101 }, { "epoch": 0.13, "grad_norm": 0.9793223355773603, "learning_rate": 0.000976206469475496, "loss": 3.7877, "step": 1102 }, { "epoch": 0.13, "grad_norm": 0.9135308758085584, "learning_rate": 0.0009761498347189872, "loss": 4.0504, "step": 1103 }, { "epoch": 0.13, "grad_norm": 1.079484610301383, "learning_rate": 0.000976093134286685, "loss": 4.0459, "step": 1104 }, { "epoch": 0.13, "grad_norm": 0.978483905972476, "learning_rate": 0.0009760363681864102, "loss": 3.9595, "step": 1105 }, { "epoch": 0.13, "grad_norm": 0.8603675204627352, "learning_rate": 0.0009759795364259923, "loss": 3.8813, "step": 1106 }, { "epoch": 0.13, "grad_norm": 3.318098850626356, "learning_rate": 0.0009759226390132704, "loss": 3.911, "step": 1107 }, { "epoch": 0.13, "grad_norm": 0.7861355971294172, "learning_rate": 0.0009758656759560923, "loss": 3.796, "step": 1108 }, { "epoch": 0.13, "grad_norm": 0.819559094728256, "learning_rate": 0.0009758086472623151, "loss": 3.8339, "step": 1109 }, { "epoch": 0.13, "grad_norm": 0.9790363418652527, "learning_rate": 0.0009757515529398047, "loss": 3.7797, "step": 1110 }, { "epoch": 0.13, "grad_norm": 0.8292137228396096, "learning_rate": 0.0009756943929964363, "loss": 3.8246, "step": 1111 }, { "epoch": 0.13, "grad_norm": 1.4099819499835757, "learning_rate": 0.0009756371674400939, "loss": 3.8785, "step": 1112 }, { "epoch": 0.13, "grad_norm": 1.196210265983327, "learning_rate": 0.0009755798762786707, "loss": 3.9509, "step": 1113 }, { "epoch": 0.13, "grad_norm": 1.5932713830996734, "learning_rate": 0.0009755225195200689, "loss": 3.7561, "step": 1114 }, { "epoch": 0.13, "grad_norm": 0.8865499541842148, "learning_rate": 0.0009754650971722, "loss": 4.0031, "step": 1115 }, { "epoch": 0.13, "grad_norm": 1.050241254972174, "learning_rate": 0.000975407609242984, "loss": 3.8365, "step": 1116 }, { "epoch": 0.13, "grad_norm": 0.979100013549394, "learning_rate": 0.0009753500557403504, "loss": 3.924, "step": 1117 }, { "epoch": 0.13, "grad_norm": 0.9226737305817291, "learning_rate": 0.0009752924366722376, "loss": 3.9107, "step": 1118 }, { "epoch": 0.13, "grad_norm": 1.2910312605123768, "learning_rate": 0.0009752347520465931, "loss": 4.0942, "step": 1119 }, { "epoch": 0.13, "grad_norm": 1.0363270903051176, "learning_rate": 0.0009751770018713734, "loss": 4.0032, "step": 1120 }, { "epoch": 0.13, "grad_norm": 1.0710260221815096, "learning_rate": 0.0009751191861545439, "loss": 3.9134, "step": 1121 }, { "epoch": 0.13, "grad_norm": 0.9590511023952412, "learning_rate": 0.0009750613049040792, "loss": 4.1202, "step": 1122 }, { "epoch": 0.13, "grad_norm": 1.0740443034445635, "learning_rate": 0.0009750033581279632, "loss": 3.8447, "step": 1123 }, { "epoch": 0.13, "grad_norm": 1.2479679222789966, "learning_rate": 0.0009749453458341882, "loss": 4.0468, "step": 1124 }, { "epoch": 0.13, "grad_norm": 1.0708737222341365, "learning_rate": 0.000974887268030756, "loss": 4.0188, "step": 1125 }, { "epoch": 0.13, "grad_norm": 0.9324155150206787, "learning_rate": 0.0009748291247256774, "loss": 3.9363, "step": 1126 }, { "epoch": 0.13, "grad_norm": 1.0026845219515736, "learning_rate": 0.000974770915926972, "loss": 4.0482, "step": 1127 }, { "epoch": 0.13, "grad_norm": 1.2178892594430921, "learning_rate": 0.0009747126416426688, "loss": 3.9245, "step": 1128 }, { "epoch": 0.13, "grad_norm": 0.9349787746654444, "learning_rate": 0.0009746543018808057, "loss": 3.8602, "step": 1129 }, { "epoch": 0.13, "grad_norm": 1.3786632824107816, "learning_rate": 0.000974595896649429, "loss": 3.952, "step": 1130 }, { "epoch": 0.13, "grad_norm": 0.9194498173676063, "learning_rate": 0.0009745374259565953, "loss": 3.855, "step": 1131 }, { "epoch": 0.13, "grad_norm": 1.0000839234551935, "learning_rate": 0.0009744788898103691, "loss": 3.7253, "step": 1132 }, { "epoch": 0.13, "grad_norm": 0.8219108142358327, "learning_rate": 0.0009744202882188245, "loss": 3.8803, "step": 1133 }, { "epoch": 0.13, "grad_norm": 1.6275263292561466, "learning_rate": 0.0009743616211900443, "loss": 4.3225, "step": 1134 }, { "epoch": 0.13, "grad_norm": 0.8770319765623361, "learning_rate": 0.0009743028887321206, "loss": 4.0887, "step": 1135 }, { "epoch": 0.13, "grad_norm": 0.9949940276746041, "learning_rate": 0.0009742440908531545, "loss": 3.8521, "step": 1136 }, { "epoch": 0.13, "grad_norm": 1.1078975560683888, "learning_rate": 0.0009741852275612559, "loss": 3.926, "step": 1137 }, { "epoch": 0.13, "grad_norm": 0.9353909686477906, "learning_rate": 0.0009741262988645441, "loss": 3.5989, "step": 1138 }, { "epoch": 0.13, "grad_norm": 1.0736536842874682, "learning_rate": 0.000974067304771147, "loss": 4.1232, "step": 1139 }, { "epoch": 0.13, "grad_norm": 0.9971858344764342, "learning_rate": 0.0009740082452892017, "loss": 3.8146, "step": 1140 }, { "epoch": 0.13, "grad_norm": 0.8552780525329229, "learning_rate": 0.0009739491204268545, "loss": 3.7645, "step": 1141 }, { "epoch": 0.13, "grad_norm": 1.3475018234506315, "learning_rate": 0.0009738899301922602, "loss": 3.8519, "step": 1142 }, { "epoch": 0.13, "grad_norm": 0.9430418954860429, "learning_rate": 0.0009738306745935833, "loss": 3.6373, "step": 1143 }, { "epoch": 0.13, "grad_norm": 1.0411807247329004, "learning_rate": 0.0009737713536389969, "loss": 4.0995, "step": 1144 }, { "epoch": 0.13, "grad_norm": 1.7056517923553942, "learning_rate": 0.0009737119673366832, "loss": 3.9293, "step": 1145 }, { "epoch": 0.13, "grad_norm": 1.4654860798799498, "learning_rate": 0.0009736525156948333, "loss": 4.0413, "step": 1146 }, { "epoch": 0.13, "grad_norm": 1.1876264805564414, "learning_rate": 0.0009735929987216476, "loss": 3.7652, "step": 1147 }, { "epoch": 0.13, "grad_norm": 0.9931410616179552, "learning_rate": 0.0009735334164253351, "loss": 3.9004, "step": 1148 }, { "epoch": 0.13, "grad_norm": 1.3799693589094666, "learning_rate": 0.0009734737688141142, "loss": 3.9107, "step": 1149 }, { "epoch": 0.13, "grad_norm": 1.0810662208730164, "learning_rate": 0.0009734140558962123, "loss": 3.8137, "step": 1150 }, { "epoch": 0.13, "grad_norm": 1.0751209991460242, "learning_rate": 0.0009733542776798653, "loss": 3.9201, "step": 1151 }, { "epoch": 0.13, "grad_norm": 0.9391578210044854, "learning_rate": 0.0009732944341733188, "loss": 3.8752, "step": 1152 }, { "epoch": 0.13, "grad_norm": 0.9103164985905934, "learning_rate": 0.0009732345253848267, "loss": 3.9437, "step": 1153 }, { "epoch": 0.13, "grad_norm": 0.8805930826421503, "learning_rate": 0.0009731745513226526, "loss": 3.8616, "step": 1154 }, { "epoch": 0.13, "grad_norm": 0.9862488819729535, "learning_rate": 0.0009731145119950686, "loss": 3.8947, "step": 1155 }, { "epoch": 0.13, "grad_norm": 0.8701133860437691, "learning_rate": 0.0009730544074103562, "loss": 3.9924, "step": 1156 }, { "epoch": 0.13, "grad_norm": 0.8806260045882947, "learning_rate": 0.0009729942375768055, "loss": 4.0764, "step": 1157 }, { "epoch": 0.13, "grad_norm": 1.0750024293515343, "learning_rate": 0.0009729340025027158, "loss": 4.0736, "step": 1158 }, { "epoch": 0.13, "grad_norm": 0.9539820319101168, "learning_rate": 0.0009728737021963954, "loss": 4.1145, "step": 1159 }, { "epoch": 0.13, "grad_norm": 0.8400434721530677, "learning_rate": 0.0009728133366661615, "loss": 3.7788, "step": 1160 }, { "epoch": 0.13, "grad_norm": 1.0448476305721772, "learning_rate": 0.0009727529059203406, "loss": 3.8515, "step": 1161 }, { "epoch": 0.13, "grad_norm": 0.8076328172221366, "learning_rate": 0.0009726924099672676, "loss": 3.7931, "step": 1162 }, { "epoch": 0.13, "grad_norm": 0.8910235966377034, "learning_rate": 0.0009726318488152872, "loss": 3.8467, "step": 1163 }, { "epoch": 0.13, "grad_norm": 0.8571276406922334, "learning_rate": 0.0009725712224727523, "loss": 3.9025, "step": 1164 }, { "epoch": 0.13, "grad_norm": 0.9590413218400093, "learning_rate": 0.0009725105309480253, "loss": 3.8987, "step": 1165 }, { "epoch": 0.13, "grad_norm": 1.0992033490170205, "learning_rate": 0.0009724497742494776, "loss": 3.9042, "step": 1166 }, { "epoch": 0.13, "grad_norm": 0.9293464628060762, "learning_rate": 0.000972388952385489, "loss": 3.9285, "step": 1167 }, { "epoch": 0.13, "grad_norm": 0.8336888188418624, "learning_rate": 0.000972328065364449, "loss": 3.7819, "step": 1168 }, { "epoch": 0.13, "grad_norm": 0.862776407006224, "learning_rate": 0.0009722671131947559, "loss": 3.73, "step": 1169 }, { "epoch": 0.13, "grad_norm": 0.9177773761208528, "learning_rate": 0.0009722060958848168, "loss": 3.7145, "step": 1170 }, { "epoch": 0.13, "grad_norm": 0.9323202380596227, "learning_rate": 0.0009721450134430478, "loss": 3.6659, "step": 1171 }, { "epoch": 0.13, "grad_norm": 1.7276673438564107, "learning_rate": 0.000972083865877874, "loss": 4.0897, "step": 1172 }, { "epoch": 0.13, "grad_norm": 1.101331670613349, "learning_rate": 0.0009720226531977296, "loss": 4.0631, "step": 1173 }, { "epoch": 0.13, "grad_norm": 1.585525437709138, "learning_rate": 0.0009719613754110578, "loss": 3.9867, "step": 1174 }, { "epoch": 0.13, "grad_norm": 0.8583964109428411, "learning_rate": 0.0009719000325263109, "loss": 3.7867, "step": 1175 }, { "epoch": 0.13, "grad_norm": 0.9082085823031396, "learning_rate": 0.0009718386245519495, "loss": 4.0505, "step": 1176 }, { "epoch": 0.13, "grad_norm": 0.948182118660867, "learning_rate": 0.0009717771514964439, "loss": 3.8383, "step": 1177 }, { "epoch": 0.14, "grad_norm": 0.8985462306153706, "learning_rate": 0.0009717156133682734, "loss": 3.884, "step": 1178 }, { "epoch": 0.14, "grad_norm": 0.9231041336272474, "learning_rate": 0.0009716540101759255, "loss": 3.9573, "step": 1179 }, { "epoch": 0.14, "grad_norm": 2.2811277116731867, "learning_rate": 0.0009715923419278976, "loss": 4.1143, "step": 1180 }, { "epoch": 0.14, "grad_norm": 1.3713842833463334, "learning_rate": 0.0009715306086326954, "loss": 3.9203, "step": 1181 }, { "epoch": 0.14, "grad_norm": 1.0078768577373445, "learning_rate": 0.0009714688102988339, "loss": 3.746, "step": 1182 }, { "epoch": 0.14, "grad_norm": 0.9918324555669916, "learning_rate": 0.000971406946934837, "loss": 3.9872, "step": 1183 }, { "epoch": 0.14, "grad_norm": 0.8379789885252091, "learning_rate": 0.0009713450185492378, "loss": 3.8235, "step": 1184 }, { "epoch": 0.14, "grad_norm": 0.9684206558888561, "learning_rate": 0.0009712830251505778, "loss": 4.0387, "step": 1185 }, { "epoch": 0.14, "grad_norm": 1.0559634744904582, "learning_rate": 0.0009712209667474079, "loss": 3.8792, "step": 1186 }, { "epoch": 0.14, "grad_norm": 0.9237859787622809, "learning_rate": 0.0009711588433482881, "loss": 4.0836, "step": 1187 }, { "epoch": 0.14, "grad_norm": 1.0723381152955602, "learning_rate": 0.0009710966549617868, "loss": 3.9188, "step": 1188 }, { "epoch": 0.14, "grad_norm": 1.0918463392391256, "learning_rate": 0.0009710344015964819, "loss": 3.9639, "step": 1189 }, { "epoch": 0.14, "grad_norm": 0.9265418349746243, "learning_rate": 0.00097097208326096, "loss": 4.0069, "step": 1190 }, { "epoch": 0.14, "grad_norm": 1.5178891156216168, "learning_rate": 0.000970909699963817, "loss": 3.7814, "step": 1191 }, { "epoch": 0.14, "grad_norm": 2.6779770924143507, "learning_rate": 0.0009708472517136569, "loss": 4.1803, "step": 1192 }, { "epoch": 0.14, "grad_norm": 1.2165605602695155, "learning_rate": 0.0009707847385190938, "loss": 3.9068, "step": 1193 }, { "epoch": 0.14, "grad_norm": 1.0190319722154504, "learning_rate": 0.00097072216038875, "loss": 3.7683, "step": 1194 }, { "epoch": 0.14, "grad_norm": 5.1837821332477105, "learning_rate": 0.000970659517331257, "loss": 3.7351, "step": 1195 }, { "epoch": 0.14, "grad_norm": 0.9766148641113799, "learning_rate": 0.000970596809355255, "loss": 3.9429, "step": 1196 }, { "epoch": 0.14, "grad_norm": 0.8260206247312476, "learning_rate": 0.0009705340364693935, "loss": 3.7627, "step": 1197 }, { "epoch": 0.14, "grad_norm": 0.8411051019628619, "learning_rate": 0.0009704711986823311, "loss": 3.7795, "step": 1198 }, { "epoch": 0.14, "grad_norm": 0.9496432667931038, "learning_rate": 0.0009704082960027348, "loss": 3.7983, "step": 1199 }, { "epoch": 0.14, "grad_norm": 0.9915788351529774, "learning_rate": 0.0009703453284392807, "loss": 4.319, "step": 1200 }, { "epoch": 0.14, "grad_norm": 0.9717986654886581, "learning_rate": 0.0009702822960006544, "loss": 4.0243, "step": 1201 }, { "epoch": 0.14, "grad_norm": 0.8185385468634446, "learning_rate": 0.0009702191986955494, "loss": 3.8957, "step": 1202 }, { "epoch": 0.14, "grad_norm": 1.0586923077649357, "learning_rate": 0.0009701560365326694, "loss": 3.8052, "step": 1203 }, { "epoch": 0.14, "grad_norm": 1.1035780592267228, "learning_rate": 0.0009700928095207259, "loss": 3.97, "step": 1204 }, { "epoch": 0.14, "grad_norm": 1.024785938354129, "learning_rate": 0.0009700295176684404, "loss": 3.8009, "step": 1205 }, { "epoch": 0.14, "grad_norm": 0.8752527924479723, "learning_rate": 0.0009699661609845425, "loss": 3.9964, "step": 1206 }, { "epoch": 0.14, "grad_norm": 0.9247902479294354, "learning_rate": 0.0009699027394777708, "loss": 3.8331, "step": 1207 }, { "epoch": 0.14, "grad_norm": 1.8210148630626366, "learning_rate": 0.0009698392531568736, "loss": 3.9239, "step": 1208 }, { "epoch": 0.14, "grad_norm": 1.1764690578291919, "learning_rate": 0.0009697757020306072, "loss": 3.9713, "step": 1209 }, { "epoch": 0.14, "grad_norm": 0.9953746671649932, "learning_rate": 0.0009697120861077375, "loss": 3.9377, "step": 1210 }, { "epoch": 0.14, "grad_norm": 0.8694677857898438, "learning_rate": 0.0009696484053970391, "loss": 3.7729, "step": 1211 }, { "epoch": 0.14, "grad_norm": 0.903146042590022, "learning_rate": 0.0009695846599072955, "loss": 3.9141, "step": 1212 }, { "epoch": 0.14, "grad_norm": 0.8669656628328067, "learning_rate": 0.0009695208496472991, "loss": 3.8157, "step": 1213 }, { "epoch": 0.14, "grad_norm": 2.1188933199264146, "learning_rate": 0.0009694569746258514, "loss": 3.9868, "step": 1214 }, { "epoch": 0.14, "grad_norm": 1.3689524870133447, "learning_rate": 0.0009693930348517628, "loss": 3.8533, "step": 1215 }, { "epoch": 0.14, "grad_norm": 0.8243949113960238, "learning_rate": 0.0009693290303338524, "loss": 3.9983, "step": 1216 }, { "epoch": 0.14, "grad_norm": 1.0370525853140824, "learning_rate": 0.0009692649610809485, "loss": 3.7288, "step": 1217 }, { "epoch": 0.14, "grad_norm": 0.9213611034514192, "learning_rate": 0.0009692008271018883, "loss": 3.8335, "step": 1218 }, { "epoch": 0.14, "grad_norm": 1.599298364329189, "learning_rate": 0.0009691366284055176, "loss": 3.941, "step": 1219 }, { "epoch": 0.14, "grad_norm": 1.6653181362401195, "learning_rate": 0.0009690723650006917, "loss": 3.9675, "step": 1220 }, { "epoch": 0.14, "grad_norm": 0.9312186132161909, "learning_rate": 0.0009690080368962744, "loss": 3.774, "step": 1221 }, { "epoch": 0.14, "grad_norm": 0.9692639306437891, "learning_rate": 0.0009689436441011384, "loss": 3.8665, "step": 1222 }, { "epoch": 0.14, "grad_norm": 0.8273415757242735, "learning_rate": 0.0009688791866241657, "loss": 3.9077, "step": 1223 }, { "epoch": 0.14, "grad_norm": 1.0663875762438544, "learning_rate": 0.0009688146644742468, "loss": 3.8998, "step": 1224 }, { "epoch": 0.14, "grad_norm": 0.9616301414674919, "learning_rate": 0.0009687500776602813, "loss": 3.8716, "step": 1225 }, { "epoch": 0.14, "grad_norm": 1.0740893948761774, "learning_rate": 0.0009686854261911779, "loss": 3.9514, "step": 1226 }, { "epoch": 0.14, "grad_norm": 1.1757782467496745, "learning_rate": 0.0009686207100758538, "loss": 3.9485, "step": 1227 }, { "epoch": 0.14, "grad_norm": 0.8496873265597074, "learning_rate": 0.0009685559293232355, "loss": 3.7576, "step": 1228 }, { "epoch": 0.14, "grad_norm": 1.9374119557694822, "learning_rate": 0.0009684910839422582, "loss": 3.9243, "step": 1229 }, { "epoch": 0.14, "grad_norm": 0.9619536208307495, "learning_rate": 0.0009684261739418663, "loss": 4.0992, "step": 1230 }, { "epoch": 0.14, "grad_norm": 1.1671513566718728, "learning_rate": 0.0009683611993310127, "loss": 3.9332, "step": 1231 }, { "epoch": 0.14, "grad_norm": 0.9778001991457772, "learning_rate": 0.0009682961601186593, "loss": 3.5797, "step": 1232 }, { "epoch": 0.14, "grad_norm": 1.0265196995344394, "learning_rate": 0.000968231056313777, "loss": 3.9148, "step": 1233 }, { "epoch": 0.14, "grad_norm": 0.9059116270277968, "learning_rate": 0.0009681658879253461, "loss": 3.9402, "step": 1234 }, { "epoch": 0.14, "grad_norm": 0.9069721940576991, "learning_rate": 0.0009681006549623548, "loss": 4.0391, "step": 1235 }, { "epoch": 0.14, "grad_norm": 2.9526955685459364, "learning_rate": 0.000968035357433801, "loss": 3.8473, "step": 1236 }, { "epoch": 0.14, "grad_norm": 1.5774996971858504, "learning_rate": 0.0009679699953486913, "loss": 3.9154, "step": 1237 }, { "epoch": 0.14, "grad_norm": 0.873144291779001, "learning_rate": 0.0009679045687160411, "loss": 3.9218, "step": 1238 }, { "epoch": 0.14, "grad_norm": 0.9817585569336963, "learning_rate": 0.0009678390775448745, "loss": 3.9995, "step": 1239 }, { "epoch": 0.14, "grad_norm": 0.8812904909526799, "learning_rate": 0.0009677735218442252, "loss": 3.8041, "step": 1240 }, { "epoch": 0.14, "grad_norm": 0.9133073847331136, "learning_rate": 0.0009677079016231349, "loss": 3.6559, "step": 1241 }, { "epoch": 0.14, "grad_norm": 0.8747530026434839, "learning_rate": 0.000967642216890655, "loss": 3.8392, "step": 1242 }, { "epoch": 0.14, "grad_norm": 0.7411111083745054, "learning_rate": 0.0009675764676558454, "loss": 3.9429, "step": 1243 }, { "epoch": 0.14, "grad_norm": 0.7965217929417112, "learning_rate": 0.000967510653927775, "loss": 3.6893, "step": 1244 }, { "epoch": 0.14, "grad_norm": 0.8904969095504146, "learning_rate": 0.0009674447757155213, "loss": 4.0174, "step": 1245 }, { "epoch": 0.14, "grad_norm": 1.0354428267514901, "learning_rate": 0.0009673788330281709, "loss": 4.0429, "step": 1246 }, { "epoch": 0.14, "grad_norm": 1.0165140078196517, "learning_rate": 0.0009673128258748199, "loss": 4.0743, "step": 1247 }, { "epoch": 0.14, "grad_norm": 0.9627060747892144, "learning_rate": 0.0009672467542645722, "loss": 3.6733, "step": 1248 }, { "epoch": 0.14, "grad_norm": 0.7952932516583989, "learning_rate": 0.0009671806182065414, "loss": 3.7791, "step": 1249 }, { "epoch": 0.14, "grad_norm": 0.9093254163507255, "learning_rate": 0.0009671144177098494, "loss": 3.8856, "step": 1250 }, { "epoch": 0.14, "grad_norm": 0.877167926426142, "learning_rate": 0.0009670481527836276, "loss": 4.0906, "step": 1251 }, { "epoch": 0.14, "grad_norm": 0.9528383678188778, "learning_rate": 0.000966981823437016, "loss": 3.855, "step": 1252 }, { "epoch": 0.14, "grad_norm": 0.9803766757838255, "learning_rate": 0.0009669154296791632, "loss": 3.867, "step": 1253 }, { "epoch": 0.14, "grad_norm": 0.9565626384520503, "learning_rate": 0.000966848971519227, "loss": 4.0007, "step": 1254 }, { "epoch": 0.14, "grad_norm": 0.9049454020266207, "learning_rate": 0.0009667824489663743, "loss": 3.8138, "step": 1255 }, { "epoch": 0.14, "grad_norm": 0.9131884616341465, "learning_rate": 0.0009667158620297803, "loss": 4.0401, "step": 1256 }, { "epoch": 0.14, "grad_norm": 0.8536423883013678, "learning_rate": 0.0009666492107186296, "loss": 3.8983, "step": 1257 }, { "epoch": 0.14, "grad_norm": 0.832657604313244, "learning_rate": 0.0009665824950421155, "loss": 3.7254, "step": 1258 }, { "epoch": 0.14, "grad_norm": 0.9824151155200311, "learning_rate": 0.00096651571500944, "loss": 3.9107, "step": 1259 }, { "epoch": 0.14, "grad_norm": 0.8276177811256961, "learning_rate": 0.0009664488706298142, "loss": 3.831, "step": 1260 }, { "epoch": 0.14, "grad_norm": 0.7468232282004362, "learning_rate": 0.0009663819619124581, "loss": 3.7489, "step": 1261 }, { "epoch": 0.14, "grad_norm": 0.9434729693255196, "learning_rate": 0.0009663149888666003, "loss": 3.842, "step": 1262 }, { "epoch": 0.14, "grad_norm": 1.1658334038404314, "learning_rate": 0.0009662479515014786, "loss": 3.9475, "step": 1263 }, { "epoch": 0.14, "grad_norm": 0.7075301083317621, "learning_rate": 0.0009661808498263396, "loss": 3.7535, "step": 1264 }, { "epoch": 0.15, "grad_norm": 0.8225387168517201, "learning_rate": 0.0009661136838504385, "loss": 4.0481, "step": 1265 }, { "epoch": 0.15, "grad_norm": 0.928875318025512, "learning_rate": 0.0009660464535830395, "loss": 3.8676, "step": 1266 }, { "epoch": 0.15, "grad_norm": 0.8002942613710375, "learning_rate": 0.0009659791590334162, "loss": 4.0203, "step": 1267 }, { "epoch": 0.15, "grad_norm": 0.8346160161789712, "learning_rate": 0.00096591180021085, "loss": 3.9455, "step": 1268 }, { "epoch": 0.15, "grad_norm": 0.8933388746841424, "learning_rate": 0.0009658443771246322, "loss": 3.9002, "step": 1269 }, { "epoch": 0.15, "grad_norm": 0.9007159497871604, "learning_rate": 0.0009657768897840623, "loss": 3.713, "step": 1270 }, { "epoch": 0.15, "grad_norm": 2.97180683670039, "learning_rate": 0.000965709338198449, "loss": 4.2397, "step": 1271 }, { "epoch": 0.15, "grad_norm": 0.8870994127514898, "learning_rate": 0.0009656417223771097, "loss": 3.9728, "step": 1272 }, { "epoch": 0.15, "grad_norm": 0.8368788807463914, "learning_rate": 0.0009655740423293708, "loss": 3.8196, "step": 1273 }, { "epoch": 0.15, "grad_norm": 1.014448825042988, "learning_rate": 0.0009655062980645673, "loss": 4.0431, "step": 1274 }, { "epoch": 0.15, "grad_norm": 0.9436348863244459, "learning_rate": 0.0009654384895920434, "loss": 3.8817, "step": 1275 }, { "epoch": 0.15, "grad_norm": 0.8354934620718959, "learning_rate": 0.0009653706169211519, "loss": 3.6851, "step": 1276 }, { "epoch": 0.15, "grad_norm": 0.8973061654584843, "learning_rate": 0.0009653026800612545, "loss": 3.8962, "step": 1277 }, { "epoch": 0.15, "grad_norm": 0.8312593493267536, "learning_rate": 0.0009652346790217221, "loss": 3.776, "step": 1278 }, { "epoch": 0.15, "grad_norm": 0.8589715391081593, "learning_rate": 0.0009651666138119337, "loss": 3.7952, "step": 1279 }, { "epoch": 0.15, "grad_norm": 0.8005301083109356, "learning_rate": 0.000965098484441278, "loss": 3.743, "step": 1280 }, { "epoch": 0.15, "grad_norm": 0.755995690059652, "learning_rate": 0.0009650302909191517, "loss": 3.6772, "step": 1281 }, { "epoch": 0.15, "grad_norm": 1.1373651802508393, "learning_rate": 0.0009649620332549613, "loss": 3.8801, "step": 1282 }, { "epoch": 0.15, "grad_norm": 0.9040321464447719, "learning_rate": 0.0009648937114581212, "loss": 3.7437, "step": 1283 }, { "epoch": 0.15, "grad_norm": 0.8630069007659117, "learning_rate": 0.0009648253255380554, "loss": 3.8694, "step": 1284 }, { "epoch": 0.15, "grad_norm": 0.9710698534622276, "learning_rate": 0.0009647568755041963, "loss": 3.7386, "step": 1285 }, { "epoch": 0.15, "grad_norm": 0.8432617763973004, "learning_rate": 0.0009646883613659851, "loss": 3.7495, "step": 1286 }, { "epoch": 0.15, "grad_norm": 0.8465614012657681, "learning_rate": 0.0009646197831328725, "loss": 4.0602, "step": 1287 }, { "epoch": 0.15, "grad_norm": 0.9064616818639412, "learning_rate": 0.0009645511408143171, "loss": 3.9505, "step": 1288 }, { "epoch": 0.15, "grad_norm": 0.8084622098369357, "learning_rate": 0.0009644824344197872, "loss": 3.7778, "step": 1289 }, { "epoch": 0.15, "grad_norm": 0.8689209397891293, "learning_rate": 0.0009644136639587591, "loss": 4.0088, "step": 1290 }, { "epoch": 0.15, "grad_norm": 0.9916119943830137, "learning_rate": 0.0009643448294407186, "loss": 3.8514, "step": 1291 }, { "epoch": 0.15, "grad_norm": 0.8307468634746445, "learning_rate": 0.0009642759308751601, "loss": 3.788, "step": 1292 }, { "epoch": 0.15, "grad_norm": 1.1219192852661597, "learning_rate": 0.0009642069682715868, "loss": 4.0463, "step": 1293 }, { "epoch": 0.15, "grad_norm": 0.8824334248037283, "learning_rate": 0.0009641379416395109, "loss": 3.9621, "step": 1294 }, { "epoch": 0.15, "grad_norm": 0.9422907916618047, "learning_rate": 0.0009640688509884532, "loss": 3.9566, "step": 1295 }, { "epoch": 0.15, "grad_norm": 0.892132150044263, "learning_rate": 0.0009639996963279435, "loss": 4.1511, "step": 1296 }, { "epoch": 0.15, "grad_norm": 1.047357009921113, "learning_rate": 0.0009639304776675204, "loss": 3.78, "step": 1297 }, { "epoch": 0.15, "grad_norm": 0.9773680715175178, "learning_rate": 0.0009638611950167311, "loss": 3.9136, "step": 1298 }, { "epoch": 0.15, "grad_norm": 1.1464280036974428, "learning_rate": 0.000963791848385132, "loss": 3.9273, "step": 1299 }, { "epoch": 0.15, "grad_norm": 2.3674130327612986, "learning_rate": 0.000963722437782288, "loss": 3.9309, "step": 1300 }, { "epoch": 0.15, "grad_norm": 0.8721458001584989, "learning_rate": 0.0009636529632177732, "loss": 3.9968, "step": 1301 }, { "epoch": 0.15, "grad_norm": 0.895984417247757, "learning_rate": 0.0009635834247011701, "loss": 3.8964, "step": 1302 }, { "epoch": 0.15, "grad_norm": 0.9600763332124445, "learning_rate": 0.0009635138222420703, "loss": 4.0515, "step": 1303 }, { "epoch": 0.15, "grad_norm": 0.8154250062168833, "learning_rate": 0.000963444155850074, "loss": 3.74, "step": 1304 }, { "epoch": 0.15, "grad_norm": 1.1605719827918148, "learning_rate": 0.0009633744255347905, "loss": 3.8061, "step": 1305 }, { "epoch": 0.15, "grad_norm": 0.7590006555552549, "learning_rate": 0.0009633046313058378, "loss": 3.7027, "step": 1306 }, { "epoch": 0.15, "grad_norm": 0.7169831921807566, "learning_rate": 0.0009632347731728424, "loss": 3.7103, "step": 1307 }, { "epoch": 0.15, "grad_norm": 0.8770826866495424, "learning_rate": 0.0009631648511454402, "loss": 3.9115, "step": 1308 }, { "epoch": 0.15, "grad_norm": 1.0442792768535483, "learning_rate": 0.0009630948652332756, "loss": 3.9245, "step": 1309 }, { "epoch": 0.15, "grad_norm": 0.9231483862403553, "learning_rate": 0.0009630248154460017, "loss": 3.8329, "step": 1310 }, { "epoch": 0.15, "grad_norm": 1.0125368751432804, "learning_rate": 0.0009629547017932805, "loss": 3.8609, "step": 1311 }, { "epoch": 0.15, "grad_norm": 0.9052641190427552, "learning_rate": 0.0009628845242847829, "loss": 3.6307, "step": 1312 }, { "epoch": 0.15, "grad_norm": 0.9328776451401802, "learning_rate": 0.0009628142829301886, "loss": 3.8112, "step": 1313 }, { "epoch": 0.15, "grad_norm": 1.1176516795359284, "learning_rate": 0.000962743977739186, "loss": 3.848, "step": 1314 }, { "epoch": 0.15, "grad_norm": 0.8663653568120429, "learning_rate": 0.0009626736087214724, "loss": 3.9584, "step": 1315 }, { "epoch": 0.15, "grad_norm": 0.8757822750567124, "learning_rate": 0.0009626031758867538, "loss": 3.7025, "step": 1316 }, { "epoch": 0.15, "grad_norm": 0.9151154299429095, "learning_rate": 0.0009625326792447451, "loss": 4.0714, "step": 1317 }, { "epoch": 0.15, "grad_norm": 0.8472711838486595, "learning_rate": 0.00096246211880517, "loss": 3.7862, "step": 1318 }, { "epoch": 0.15, "grad_norm": 0.8389505833941808, "learning_rate": 0.000962391494577761, "loss": 3.6895, "step": 1319 }, { "epoch": 0.15, "grad_norm": 0.8896074400237217, "learning_rate": 0.0009623208065722592, "loss": 3.9731, "step": 1320 }, { "epoch": 0.15, "grad_norm": 0.9099393134619518, "learning_rate": 0.0009622500547984147, "loss": 3.911, "step": 1321 }, { "epoch": 0.15, "grad_norm": 1.027851962307429, "learning_rate": 0.0009621792392659867, "loss": 3.8772, "step": 1322 }, { "epoch": 0.15, "grad_norm": 0.8588708996292934, "learning_rate": 0.0009621083599847424, "loss": 3.8149, "step": 1323 }, { "epoch": 0.15, "grad_norm": 0.7471017004927911, "learning_rate": 0.0009620374169644583, "loss": 3.9552, "step": 1324 }, { "epoch": 0.15, "grad_norm": 0.9548901110839529, "learning_rate": 0.0009619664102149201, "loss": 3.9511, "step": 1325 }, { "epoch": 0.15, "grad_norm": 0.9729491403642043, "learning_rate": 0.0009618953397459211, "loss": 3.7585, "step": 1326 }, { "epoch": 0.15, "grad_norm": 1.142541884602134, "learning_rate": 0.0009618242055672648, "loss": 3.8907, "step": 1327 }, { "epoch": 0.15, "grad_norm": 0.837472054918629, "learning_rate": 0.0009617530076887624, "loss": 3.9658, "step": 1328 }, { "epoch": 0.15, "grad_norm": 0.8418913839647469, "learning_rate": 0.0009616817461202345, "loss": 3.8776, "step": 1329 }, { "epoch": 0.15, "grad_norm": 0.9682140941402299, "learning_rate": 0.0009616104208715101, "loss": 3.7543, "step": 1330 }, { "epoch": 0.15, "grad_norm": 0.8351318604275011, "learning_rate": 0.0009615390319524272, "loss": 3.8855, "step": 1331 }, { "epoch": 0.15, "grad_norm": 0.7895729419228837, "learning_rate": 0.0009614675793728327, "loss": 3.9336, "step": 1332 }, { "epoch": 0.15, "grad_norm": 1.0467883841600463, "learning_rate": 0.0009613960631425818, "loss": 3.7066, "step": 1333 }, { "epoch": 0.15, "grad_norm": 1.1515093810252361, "learning_rate": 0.000961324483271539, "loss": 3.8395, "step": 1334 }, { "epoch": 0.15, "grad_norm": 0.9157140218290246, "learning_rate": 0.0009612528397695777, "loss": 3.834, "step": 1335 }, { "epoch": 0.15, "grad_norm": 0.7889100256223686, "learning_rate": 0.0009611811326465791, "loss": 3.8557, "step": 1336 }, { "epoch": 0.15, "grad_norm": 0.9009750925686082, "learning_rate": 0.0009611093619124344, "loss": 3.8886, "step": 1337 }, { "epoch": 0.15, "grad_norm": 0.926422369415665, "learning_rate": 0.0009610375275770427, "loss": 3.9171, "step": 1338 }, { "epoch": 0.15, "grad_norm": 0.8994071324066222, "learning_rate": 0.0009609656296503121, "loss": 3.7646, "step": 1339 }, { "epoch": 0.15, "grad_norm": 0.8856720474014015, "learning_rate": 0.0009608936681421599, "loss": 3.8905, "step": 1340 }, { "epoch": 0.15, "grad_norm": 0.9304596087192142, "learning_rate": 0.0009608216430625114, "loss": 4.1114, "step": 1341 }, { "epoch": 0.15, "grad_norm": 0.8869970667136994, "learning_rate": 0.0009607495544213014, "loss": 3.8797, "step": 1342 }, { "epoch": 0.15, "grad_norm": 0.932280576761245, "learning_rate": 0.000960677402228473, "loss": 3.8959, "step": 1343 }, { "epoch": 0.15, "grad_norm": 0.8621201819116923, "learning_rate": 0.0009606051864939785, "loss": 3.6971, "step": 1344 }, { "epoch": 0.15, "grad_norm": 0.8049991112830159, "learning_rate": 0.0009605329072277782, "loss": 3.7375, "step": 1345 }, { "epoch": 0.15, "grad_norm": 1.0974918642394877, "learning_rate": 0.000960460564439842, "loss": 3.8616, "step": 1346 }, { "epoch": 0.15, "grad_norm": 1.6641828320860899, "learning_rate": 0.0009603881581401482, "loss": 3.8839, "step": 1347 }, { "epoch": 0.15, "grad_norm": 1.0861884983737076, "learning_rate": 0.0009603156883386836, "loss": 3.9676, "step": 1348 }, { "epoch": 0.15, "grad_norm": 1.0244211583302405, "learning_rate": 0.0009602431550454442, "loss": 3.8066, "step": 1349 }, { "epoch": 0.15, "grad_norm": 0.8563337985715797, "learning_rate": 0.0009601705582704348, "loss": 3.869, "step": 1350 }, { "epoch": 0.15, "grad_norm": 1.198118512910373, "learning_rate": 0.0009600978980236683, "loss": 3.9266, "step": 1351 }, { "epoch": 0.16, "grad_norm": 0.848668418666416, "learning_rate": 0.0009600251743151672, "loss": 3.9361, "step": 1352 }, { "epoch": 0.16, "grad_norm": 0.7900897168160965, "learning_rate": 0.0009599523871549621, "loss": 3.8051, "step": 1353 }, { "epoch": 0.16, "grad_norm": 0.8324642499920368, "learning_rate": 0.0009598795365530928, "loss": 3.7465, "step": 1354 }, { "epoch": 0.16, "grad_norm": 0.8412680673522067, "learning_rate": 0.0009598066225196074, "loss": 3.9093, "step": 1355 }, { "epoch": 0.16, "grad_norm": 0.9728173637673643, "learning_rate": 0.0009597336450645633, "loss": 3.7059, "step": 1356 }, { "epoch": 0.16, "grad_norm": 0.876193662132438, "learning_rate": 0.000959660604198026, "loss": 3.9082, "step": 1357 }, { "epoch": 0.16, "grad_norm": 0.7655315804375767, "learning_rate": 0.0009595874999300703, "loss": 3.9181, "step": 1358 }, { "epoch": 0.16, "grad_norm": 0.853681014310914, "learning_rate": 0.0009595143322707795, "loss": 3.9391, "step": 1359 }, { "epoch": 0.16, "grad_norm": 0.7964059368770235, "learning_rate": 0.0009594411012302459, "loss": 3.5117, "step": 1360 }, { "epoch": 0.16, "grad_norm": 0.8580626302316371, "learning_rate": 0.0009593678068185701, "loss": 3.6968, "step": 1361 }, { "epoch": 0.16, "grad_norm": 1.087579312729565, "learning_rate": 0.0009592944490458614, "loss": 3.8156, "step": 1362 }, { "epoch": 0.16, "grad_norm": 1.1299281768825828, "learning_rate": 0.0009592210279222386, "loss": 4.2495, "step": 1363 }, { "epoch": 0.16, "grad_norm": 0.9640799539214135, "learning_rate": 0.0009591475434578286, "loss": 4.0634, "step": 1364 }, { "epoch": 0.16, "grad_norm": 0.8668561059440153, "learning_rate": 0.0009590739956627671, "loss": 3.753, "step": 1365 }, { "epoch": 0.16, "grad_norm": 0.7885281899151589, "learning_rate": 0.0009590003845471987, "loss": 3.7043, "step": 1366 }, { "epoch": 0.16, "grad_norm": 0.9261394854150734, "learning_rate": 0.0009589267101212764, "loss": 3.6236, "step": 1367 }, { "epoch": 0.16, "grad_norm": 0.9429537580988815, "learning_rate": 0.0009588529723951625, "loss": 3.86, "step": 1368 }, { "epoch": 0.16, "grad_norm": 0.8598241321518687, "learning_rate": 0.0009587791713790276, "loss": 3.9132, "step": 1369 }, { "epoch": 0.16, "grad_norm": 0.860429607143447, "learning_rate": 0.0009587053070830512, "loss": 3.8315, "step": 1370 }, { "epoch": 0.16, "grad_norm": 0.9738170039946603, "learning_rate": 0.0009586313795174213, "loss": 3.8584, "step": 1371 }, { "epoch": 0.16, "grad_norm": 0.9401436272043237, "learning_rate": 0.0009585573886923349, "loss": 4.0028, "step": 1372 }, { "epoch": 0.16, "grad_norm": 0.8186962896664751, "learning_rate": 0.0009584833346179977, "loss": 3.9204, "step": 1373 }, { "epoch": 0.16, "grad_norm": 1.2556251541233945, "learning_rate": 0.000958409217304624, "loss": 3.8679, "step": 1374 }, { "epoch": 0.16, "grad_norm": 0.7697209464479603, "learning_rate": 0.0009583350367624366, "loss": 3.7894, "step": 1375 }, { "epoch": 0.16, "grad_norm": 9.507549994913516, "learning_rate": 0.0009582607930016678, "loss": 3.9594, "step": 1376 }, { "epoch": 0.16, "grad_norm": 0.9532645638863354, "learning_rate": 0.0009581864860325577, "loss": 3.865, "step": 1377 }, { "epoch": 0.16, "grad_norm": 0.9623082091869931, "learning_rate": 0.0009581121158653558, "loss": 4.1883, "step": 1378 }, { "epoch": 0.16, "grad_norm": 0.9040667625633498, "learning_rate": 0.0009580376825103199, "loss": 3.776, "step": 1379 }, { "epoch": 0.16, "grad_norm": 2.0746365056026415, "learning_rate": 0.0009579631859777167, "loss": 4.1083, "step": 1380 }, { "epoch": 0.16, "grad_norm": 0.9437937263199113, "learning_rate": 0.0009578886262778214, "loss": 3.7122, "step": 1381 }, { "epoch": 0.16, "grad_norm": 1.116488444653678, "learning_rate": 0.0009578140034209185, "loss": 3.7763, "step": 1382 }, { "epoch": 0.16, "grad_norm": 0.9142975430367627, "learning_rate": 0.0009577393174173004, "loss": 3.8347, "step": 1383 }, { "epoch": 0.16, "grad_norm": 0.9263977272223054, "learning_rate": 0.0009576645682772689, "loss": 3.8793, "step": 1384 }, { "epoch": 0.16, "grad_norm": 0.8145280008171305, "learning_rate": 0.0009575897560111339, "loss": 3.7852, "step": 1385 }, { "epoch": 0.16, "grad_norm": 0.8546542710397201, "learning_rate": 0.0009575148806292146, "loss": 3.7826, "step": 1386 }, { "epoch": 0.16, "grad_norm": 0.7778925084507263, "learning_rate": 0.0009574399421418388, "loss": 3.8225, "step": 1387 }, { "epoch": 0.16, "grad_norm": 0.8519461698431795, "learning_rate": 0.0009573649405593422, "loss": 3.8641, "step": 1388 }, { "epoch": 0.16, "grad_norm": 0.9856506255186301, "learning_rate": 0.0009572898758920704, "loss": 3.6021, "step": 1389 }, { "epoch": 0.16, "grad_norm": 0.8880775624844572, "learning_rate": 0.000957214748150377, "loss": 3.901, "step": 1390 }, { "epoch": 0.16, "grad_norm": 0.88782666091247, "learning_rate": 0.0009571395573446242, "loss": 3.8895, "step": 1391 }, { "epoch": 0.16, "grad_norm": 0.8207024024884569, "learning_rate": 0.0009570643034851835, "loss": 3.7878, "step": 1392 }, { "epoch": 0.16, "grad_norm": 0.864460529218865, "learning_rate": 0.0009569889865824345, "loss": 4.1187, "step": 1393 }, { "epoch": 0.16, "grad_norm": 0.895396743529384, "learning_rate": 0.0009569136066467659, "loss": 3.904, "step": 1394 }, { "epoch": 0.16, "grad_norm": 0.8227959276072406, "learning_rate": 0.0009568381636885747, "loss": 3.8694, "step": 1395 }, { "epoch": 0.16, "grad_norm": 0.9679815361079878, "learning_rate": 0.0009567626577182671, "loss": 3.8404, "step": 1396 }, { "epoch": 0.16, "grad_norm": 0.8648004126631317, "learning_rate": 0.0009566870887462573, "loss": 3.9228, "step": 1397 }, { "epoch": 0.16, "grad_norm": 0.899389022610693, "learning_rate": 0.0009566114567829691, "loss": 3.6404, "step": 1398 }, { "epoch": 0.16, "grad_norm": 0.8959860816301616, "learning_rate": 0.0009565357618388342, "loss": 3.7719, "step": 1399 }, { "epoch": 0.16, "grad_norm": 1.094924359055759, "learning_rate": 0.0009564600039242932, "loss": 3.7904, "step": 1400 }, { "epoch": 0.16, "grad_norm": 0.8446670399193721, "learning_rate": 0.0009563841830497957, "loss": 3.9743, "step": 1401 }, { "epoch": 0.16, "grad_norm": 0.8958998969059287, "learning_rate": 0.0009563082992257996, "loss": 3.9753, "step": 1402 }, { "epoch": 0.16, "grad_norm": 0.9084923371191541, "learning_rate": 0.0009562323524627716, "loss": 3.8617, "step": 1403 }, { "epoch": 0.16, "grad_norm": 0.9136328753715062, "learning_rate": 0.0009561563427711872, "loss": 3.9676, "step": 1404 }, { "epoch": 0.16, "grad_norm": 4.891810412391087, "learning_rate": 0.0009560802701615304, "loss": 3.8785, "step": 1405 }, { "epoch": 0.16, "grad_norm": 0.7557373806901146, "learning_rate": 0.0009560041346442941, "loss": 3.7352, "step": 1406 }, { "epoch": 0.16, "grad_norm": 0.8049526231273595, "learning_rate": 0.0009559279362299796, "loss": 3.7288, "step": 1407 }, { "epoch": 0.16, "grad_norm": 0.7348808034774211, "learning_rate": 0.0009558516749290971, "loss": 3.8559, "step": 1408 }, { "epoch": 0.16, "grad_norm": 0.8832127608214954, "learning_rate": 0.0009557753507521653, "loss": 3.6959, "step": 1409 }, { "epoch": 0.16, "grad_norm": 1.1720270642285266, "learning_rate": 0.0009556989637097118, "loss": 3.7822, "step": 1410 }, { "epoch": 0.16, "grad_norm": 0.8337517554933296, "learning_rate": 0.0009556225138122727, "loss": 3.9447, "step": 1411 }, { "epoch": 0.16, "grad_norm": 0.9227469288336163, "learning_rate": 0.0009555460010703927, "loss": 3.828, "step": 1412 }, { "epoch": 0.16, "grad_norm": 0.8816760042677255, "learning_rate": 0.0009554694254946252, "loss": 3.743, "step": 1413 }, { "epoch": 0.16, "grad_norm": 0.8362884977986006, "learning_rate": 0.0009553927870955327, "loss": 3.6962, "step": 1414 }, { "epoch": 0.16, "grad_norm": 0.8967808253010532, "learning_rate": 0.0009553160858836858, "loss": 3.7658, "step": 1415 }, { "epoch": 0.16, "grad_norm": 0.9110474357833716, "learning_rate": 0.000955239321869664, "loss": 3.8064, "step": 1416 }, { "epoch": 0.16, "grad_norm": 0.8845506805266313, "learning_rate": 0.0009551624950640552, "loss": 3.6623, "step": 1417 }, { "epoch": 0.16, "grad_norm": 0.8192694046625639, "learning_rate": 0.0009550856054774566, "loss": 3.8059, "step": 1418 }, { "epoch": 0.16, "grad_norm": 1.1541944222905989, "learning_rate": 0.0009550086531204733, "loss": 3.9599, "step": 1419 }, { "epoch": 0.16, "grad_norm": 1.0772825961084247, "learning_rate": 0.0009549316380037196, "loss": 3.7892, "step": 1420 }, { "epoch": 0.16, "grad_norm": 0.9651778558174329, "learning_rate": 0.0009548545601378183, "loss": 4.0461, "step": 1421 }, { "epoch": 0.16, "grad_norm": 1.149649039566938, "learning_rate": 0.0009547774195334007, "loss": 3.9114, "step": 1422 }, { "epoch": 0.16, "grad_norm": 0.8375106345677006, "learning_rate": 0.0009547002162011069, "loss": 3.9692, "step": 1423 }, { "epoch": 0.16, "grad_norm": 0.8539130174313645, "learning_rate": 0.0009546229501515856, "loss": 3.7928, "step": 1424 }, { "epoch": 0.16, "grad_norm": 0.8292832259062058, "learning_rate": 0.0009545456213954944, "loss": 3.8811, "step": 1425 }, { "epoch": 0.16, "grad_norm": 0.9122768600395526, "learning_rate": 0.000954468229943499, "loss": 3.9504, "step": 1426 }, { "epoch": 0.16, "grad_norm": 0.8427279401605012, "learning_rate": 0.0009543907758062742, "loss": 3.9653, "step": 1427 }, { "epoch": 0.16, "grad_norm": 0.9718384380069693, "learning_rate": 0.0009543132589945034, "loss": 3.8199, "step": 1428 }, { "epoch": 0.16, "grad_norm": 1.388479385024393, "learning_rate": 0.0009542356795188786, "loss": 3.6234, "step": 1429 }, { "epoch": 0.16, "grad_norm": 1.1135206294121325, "learning_rate": 0.0009541580373901002, "loss": 3.6418, "step": 1430 }, { "epoch": 0.16, "grad_norm": 1.0600352743282997, "learning_rate": 0.0009540803326188777, "loss": 3.6644, "step": 1431 }, { "epoch": 0.16, "grad_norm": 0.8970042100748042, "learning_rate": 0.0009540025652159288, "loss": 3.8966, "step": 1432 }, { "epoch": 0.16, "grad_norm": 1.076806394421253, "learning_rate": 0.0009539247351919802, "loss": 4.1181, "step": 1433 }, { "epoch": 0.16, "grad_norm": 0.9505175053075758, "learning_rate": 0.0009538468425577669, "loss": 3.9308, "step": 1434 }, { "epoch": 0.16, "grad_norm": 0.8641169394011997, "learning_rate": 0.0009537688873240327, "loss": 4.007, "step": 1435 }, { "epoch": 0.16, "grad_norm": 0.9268405539347068, "learning_rate": 0.0009536908695015303, "loss": 3.9137, "step": 1436 }, { "epoch": 0.16, "grad_norm": 0.8592229175334658, "learning_rate": 0.0009536127891010205, "loss": 3.9452, "step": 1437 }, { "epoch": 0.16, "grad_norm": 100.03254640056228, "learning_rate": 0.000953534646133273, "loss": 3.7717, "step": 1438 }, { "epoch": 0.17, "grad_norm": 1.097820931106454, "learning_rate": 0.0009534564406090664, "loss": 3.696, "step": 1439 }, { "epoch": 0.17, "grad_norm": 0.9316812179864764, "learning_rate": 0.0009533781725391872, "loss": 3.914, "step": 1440 }, { "epoch": 0.17, "grad_norm": 1.8547042079933125, "learning_rate": 0.0009532998419344316, "loss": 3.985, "step": 1441 }, { "epoch": 0.17, "grad_norm": 2.524241479048602, "learning_rate": 0.0009532214488056032, "loss": 3.9608, "step": 1442 }, { "epoch": 0.17, "grad_norm": 1.3782703551829998, "learning_rate": 0.0009531429931635154, "loss": 3.9929, "step": 1443 }, { "epoch": 0.17, "grad_norm": 1.139360157938609, "learning_rate": 0.0009530644750189892, "loss": 4.2135, "step": 1444 }, { "epoch": 0.17, "grad_norm": 2.165247443753805, "learning_rate": 0.0009529858943828548, "loss": 3.8277, "step": 1445 }, { "epoch": 0.17, "grad_norm": 1.2065276610782492, "learning_rate": 0.0009529072512659512, "loss": 3.8855, "step": 1446 }, { "epoch": 0.17, "grad_norm": 1.461429428752609, "learning_rate": 0.0009528285456791253, "loss": 4.1549, "step": 1447 }, { "epoch": 0.17, "grad_norm": 1.2448386635813538, "learning_rate": 0.0009527497776332334, "loss": 4.1072, "step": 1448 }, { "epoch": 0.17, "grad_norm": 1.6769539626506422, "learning_rate": 0.0009526709471391397, "loss": 4.1297, "step": 1449 }, { "epoch": 0.17, "grad_norm": 1.2687245786765955, "learning_rate": 0.0009525920542077176, "loss": 4.1963, "step": 1450 }, { "epoch": 0.17, "grad_norm": 2.4414397569336557, "learning_rate": 0.0009525130988498489, "loss": 3.8882, "step": 1451 }, { "epoch": 0.17, "grad_norm": 1.5153911338165331, "learning_rate": 0.0009524340810764237, "loss": 4.0018, "step": 1452 }, { "epoch": 0.17, "grad_norm": 4.436535378393621, "learning_rate": 0.0009523550008983413, "loss": 4.0123, "step": 1453 }, { "epoch": 0.17, "grad_norm": 2.318419462297184, "learning_rate": 0.0009522758583265092, "loss": 3.9551, "step": 1454 }, { "epoch": 0.17, "grad_norm": 1.221249356173666, "learning_rate": 0.0009521966533718436, "loss": 4.0276, "step": 1455 }, { "epoch": 0.17, "grad_norm": 5.687136138333517, "learning_rate": 0.0009521173860452695, "loss": 3.8629, "step": 1456 }, { "epoch": 0.17, "grad_norm": 1.346346554399234, "learning_rate": 0.0009520380563577198, "loss": 3.9149, "step": 1457 }, { "epoch": 0.17, "grad_norm": 4.086625604690991, "learning_rate": 0.000951958664320137, "loss": 4.0661, "step": 1458 }, { "epoch": 0.17, "grad_norm": 1.4721430727683573, "learning_rate": 0.0009518792099434717, "loss": 4.1447, "step": 1459 }, { "epoch": 0.17, "grad_norm": 1.2976886361092492, "learning_rate": 0.0009517996932386827, "loss": 4.0543, "step": 1460 }, { "epoch": 0.17, "grad_norm": 1.6697103624943264, "learning_rate": 0.0009517201142167385, "loss": 3.989, "step": 1461 }, { "epoch": 0.17, "grad_norm": 4.0215592848270525, "learning_rate": 0.0009516404728886148, "loss": 4.16, "step": 1462 }, { "epoch": 0.17, "grad_norm": 1.4697379135773903, "learning_rate": 0.000951560769265297, "loss": 3.8362, "step": 1463 }, { "epoch": 0.17, "grad_norm": 1.1524336185141049, "learning_rate": 0.0009514810033577786, "loss": 3.7538, "step": 1464 }, { "epoch": 0.17, "grad_norm": 1.0493847440526527, "learning_rate": 0.0009514011751770618, "loss": 3.9116, "step": 1465 }, { "epoch": 0.17, "grad_norm": 1.23190628404012, "learning_rate": 0.0009513212847341573, "loss": 4.0073, "step": 1466 }, { "epoch": 0.17, "grad_norm": 2.6248181854318813, "learning_rate": 0.0009512413320400847, "loss": 3.8487, "step": 1467 }, { "epoch": 0.17, "grad_norm": 1.2971342296501958, "learning_rate": 0.0009511613171058717, "loss": 3.9764, "step": 1468 }, { "epoch": 0.17, "grad_norm": 1.445263290007701, "learning_rate": 0.000951081239942555, "loss": 3.9668, "step": 1469 }, { "epoch": 0.17, "grad_norm": 1.4571797461658378, "learning_rate": 0.0009510011005611796, "loss": 3.9339, "step": 1470 }, { "epoch": 0.17, "grad_norm": 1.2450298431296896, "learning_rate": 0.0009509208989727992, "loss": 4.057, "step": 1471 }, { "epoch": 0.17, "grad_norm": 1.237546372047046, "learning_rate": 0.000950840635188476, "loss": 4.0864, "step": 1472 }, { "epoch": 0.17, "grad_norm": 1.4929855017389642, "learning_rate": 0.0009507603092192812, "loss": 3.9271, "step": 1473 }, { "epoch": 0.17, "grad_norm": 1.3189996569272737, "learning_rate": 0.000950679921076294, "loss": 4.1884, "step": 1474 }, { "epoch": 0.17, "grad_norm": 0.99328213896486, "learning_rate": 0.0009505994707706023, "loss": 3.9981, "step": 1475 }, { "epoch": 0.17, "grad_norm": 2.013478153612559, "learning_rate": 0.000950518958313303, "loss": 4.1527, "step": 1476 }, { "epoch": 0.17, "grad_norm": 0.9889837202721596, "learning_rate": 0.0009504383837155008, "loss": 3.9588, "step": 1477 }, { "epoch": 0.17, "grad_norm": 1.190983343959375, "learning_rate": 0.0009503577469883098, "loss": 3.8343, "step": 1478 }, { "epoch": 0.17, "grad_norm": 1.1477902413587535, "learning_rate": 0.0009502770481428525, "loss": 3.9696, "step": 1479 }, { "epoch": 0.17, "grad_norm": 1.1100575455524337, "learning_rate": 0.000950196287190259, "loss": 3.9837, "step": 1480 }, { "epoch": 0.17, "grad_norm": 1.3399869539582643, "learning_rate": 0.0009501154641416695, "loss": 4.1498, "step": 1481 }, { "epoch": 0.17, "grad_norm": 1.0409958644089108, "learning_rate": 0.0009500345790082317, "loss": 3.9849, "step": 1482 }, { "epoch": 0.17, "grad_norm": 1.2192002708950527, "learning_rate": 0.0009499536318011019, "loss": 3.8925, "step": 1483 }, { "epoch": 0.17, "grad_norm": 1.1164133077716576, "learning_rate": 0.0009498726225314458, "loss": 4.1419, "step": 1484 }, { "epoch": 0.17, "grad_norm": 0.9516042613942988, "learning_rate": 0.0009497915512104367, "loss": 4.1061, "step": 1485 }, { "epoch": 0.17, "grad_norm": 1.0761513596116465, "learning_rate": 0.0009497104178492568, "loss": 3.9046, "step": 1486 }, { "epoch": 0.17, "grad_norm": 1.1571756675128302, "learning_rate": 0.0009496292224590973, "loss": 3.9149, "step": 1487 }, { "epoch": 0.17, "grad_norm": 1.2972725958310365, "learning_rate": 0.000949547965051157, "loss": 4.124, "step": 1488 }, { "epoch": 0.17, "grad_norm": 0.872410383831955, "learning_rate": 0.0009494666456366441, "loss": 3.762, "step": 1489 }, { "epoch": 0.17, "grad_norm": 2.8477867956326395, "learning_rate": 0.0009493852642267751, "loss": 4.1682, "step": 1490 }, { "epoch": 0.17, "grad_norm": 1.0949012112852705, "learning_rate": 0.0009493038208327749, "loss": 3.8855, "step": 1491 }, { "epoch": 0.17, "grad_norm": 1.4820274398500546, "learning_rate": 0.0009492223154658773, "loss": 4.0535, "step": 1492 }, { "epoch": 0.17, "grad_norm": 1.0338407045086, "learning_rate": 0.0009491407481373241, "loss": 3.9, "step": 1493 }, { "epoch": 0.17, "grad_norm": 1.2365497846020905, "learning_rate": 0.0009490591188583661, "loss": 3.9303, "step": 1494 }, { "epoch": 0.17, "grad_norm": 1.016761854506582, "learning_rate": 0.0009489774276402625, "loss": 3.6776, "step": 1495 }, { "epoch": 0.17, "grad_norm": 1.2383822971701957, "learning_rate": 0.0009488956744942811, "loss": 3.9083, "step": 1496 }, { "epoch": 0.17, "grad_norm": 1.3734595514182755, "learning_rate": 0.0009488138594316982, "loss": 3.8552, "step": 1497 }, { "epoch": 0.17, "grad_norm": 1.1644271060663924, "learning_rate": 0.0009487319824637983, "loss": 3.9592, "step": 1498 }, { "epoch": 0.17, "grad_norm": 1.0977849033455462, "learning_rate": 0.0009486500436018752, "loss": 4.2405, "step": 1499 }, { "epoch": 0.17, "grad_norm": 1.2055573402943967, "learning_rate": 0.0009485680428572308, "loss": 3.9396, "step": 1500 }, { "epoch": 0.17, "grad_norm": 1.1374716930789182, "learning_rate": 0.0009484859802411751, "loss": 3.8888, "step": 1501 }, { "epoch": 0.17, "grad_norm": 1.296678328768621, "learning_rate": 0.0009484038557650274, "loss": 3.9994, "step": 1502 }, { "epoch": 0.17, "grad_norm": 1.055013626944651, "learning_rate": 0.0009483216694401152, "loss": 3.8271, "step": 1503 }, { "epoch": 0.17, "grad_norm": 1.0186429480108605, "learning_rate": 0.0009482394212777745, "loss": 3.9758, "step": 1504 }, { "epoch": 0.17, "grad_norm": 1.06816243495957, "learning_rate": 0.0009481571112893498, "loss": 4.2682, "step": 1505 }, { "epoch": 0.17, "grad_norm": 1.2746938476499259, "learning_rate": 0.0009480747394861944, "loss": 3.8056, "step": 1506 }, { "epoch": 0.17, "grad_norm": 3.3143444691467647, "learning_rate": 0.0009479923058796695, "loss": 3.9706, "step": 1507 }, { "epoch": 0.17, "grad_norm": 9.560587365045166, "learning_rate": 0.0009479098104811459, "loss": 4.1207, "step": 1508 }, { "epoch": 0.17, "grad_norm": 2.9078763675290515, "learning_rate": 0.0009478272533020016, "loss": 3.8487, "step": 1509 }, { "epoch": 0.17, "grad_norm": 1.4529298475723176, "learning_rate": 0.0009477446343536241, "loss": 4.0019, "step": 1510 }, { "epoch": 0.17, "grad_norm": 1.0733672459692003, "learning_rate": 0.0009476619536474091, "loss": 3.9246, "step": 1511 }, { "epoch": 0.17, "grad_norm": 0.9352742201273678, "learning_rate": 0.0009475792111947607, "loss": 4.078, "step": 1512 }, { "epoch": 0.17, "grad_norm": 1.265932280169156, "learning_rate": 0.0009474964070070919, "loss": 4.1204, "step": 1513 }, { "epoch": 0.17, "grad_norm": 1.023218237649011, "learning_rate": 0.0009474135410958239, "loss": 3.9499, "step": 1514 }, { "epoch": 0.17, "grad_norm": 1.3532599547223871, "learning_rate": 0.0009473306134723862, "loss": 3.9292, "step": 1515 }, { "epoch": 0.17, "grad_norm": 1.2315722301854204, "learning_rate": 0.0009472476241482173, "loss": 3.9577, "step": 1516 }, { "epoch": 0.17, "grad_norm": 24.04739000407346, "learning_rate": 0.000947164573134764, "loss": 4.0181, "step": 1517 }, { "epoch": 0.17, "grad_norm": 0.9936336517979658, "learning_rate": 0.0009470814604434816, "loss": 3.9791, "step": 1518 }, { "epoch": 0.17, "grad_norm": 1.217222741629316, "learning_rate": 0.000946998286085834, "loss": 3.852, "step": 1519 }, { "epoch": 0.17, "grad_norm": 1.51117778184176, "learning_rate": 0.0009469150500732932, "loss": 4.096, "step": 1520 }, { "epoch": 0.17, "grad_norm": 1.5809599260014615, "learning_rate": 0.0009468317524173402, "loss": 4.4122, "step": 1521 }, { "epoch": 0.17, "grad_norm": 1.5046749860915247, "learning_rate": 0.0009467483931294644, "loss": 4.1132, "step": 1522 }, { "epoch": 0.17, "grad_norm": 1.3334451624224986, "learning_rate": 0.0009466649722211635, "loss": 4.2068, "step": 1523 }, { "epoch": 0.17, "grad_norm": 1.2237324082068999, "learning_rate": 0.000946581489703944, "loss": 4.1176, "step": 1524 }, { "epoch": 0.17, "grad_norm": 1.219661009942738, "learning_rate": 0.0009464979455893205, "loss": 4.1237, "step": 1525 }, { "epoch": 0.17, "grad_norm": 1.146067514462624, "learning_rate": 0.0009464143398888166, "loss": 4.1596, "step": 1526 }, { "epoch": 0.18, "grad_norm": 0.9523317630879572, "learning_rate": 0.0009463306726139638, "loss": 3.9609, "step": 1527 }, { "epoch": 0.18, "grad_norm": 0.7650553678302636, "learning_rate": 0.0009462469437763026, "loss": 3.9253, "step": 1528 }, { "epoch": 0.18, "grad_norm": 1.2136665087837992, "learning_rate": 0.0009461631533873818, "loss": 4.1455, "step": 1529 }, { "epoch": 0.18, "grad_norm": 0.9704508737243392, "learning_rate": 0.0009460793014587585, "loss": 4.0206, "step": 1530 }, { "epoch": 0.18, "grad_norm": 0.9117749646748721, "learning_rate": 0.0009459953880019987, "loss": 4.2635, "step": 1531 }, { "epoch": 0.18, "grad_norm": 0.9277265592837262, "learning_rate": 0.0009459114130286766, "loss": 4.0095, "step": 1532 }, { "epoch": 0.18, "grad_norm": 0.9304485127432531, "learning_rate": 0.0009458273765503749, "loss": 3.9747, "step": 1533 }, { "epoch": 0.18, "grad_norm": 0.8501982683726252, "learning_rate": 0.0009457432785786848, "loss": 4.0774, "step": 1534 }, { "epoch": 0.18, "grad_norm": 0.9991426301048467, "learning_rate": 0.0009456591191252061, "loss": 3.8427, "step": 1535 }, { "epoch": 0.18, "grad_norm": 0.9804655560531712, "learning_rate": 0.0009455748982015468, "loss": 4.2069, "step": 1536 }, { "epoch": 0.18, "grad_norm": 1.5784437476664313, "learning_rate": 0.0009454906158193239, "loss": 4.0708, "step": 1537 }, { "epoch": 0.18, "grad_norm": 1.193697252983465, "learning_rate": 0.0009454062719901624, "loss": 3.789, "step": 1538 }, { "epoch": 0.18, "grad_norm": 0.8229813174629523, "learning_rate": 0.0009453218667256958, "loss": 4.0608, "step": 1539 }, { "epoch": 0.18, "grad_norm": 9.674128229081756, "learning_rate": 0.0009452374000375664, "loss": 3.8153, "step": 1540 }, { "epoch": 0.18, "grad_norm": 1.1564343287849521, "learning_rate": 0.0009451528719374245, "loss": 4.0524, "step": 1541 }, { "epoch": 0.18, "grad_norm": 1.9500107507142022, "learning_rate": 0.0009450682824369294, "loss": 4.3137, "step": 1542 }, { "epoch": 0.18, "grad_norm": 1.3586169269801127, "learning_rate": 0.0009449836315477485, "loss": 4.1483, "step": 1543 }, { "epoch": 0.18, "grad_norm": 1.193557500630441, "learning_rate": 0.0009448989192815578, "loss": 3.9929, "step": 1544 }, { "epoch": 0.18, "grad_norm": 0.9576633356336297, "learning_rate": 0.0009448141456500416, "loss": 3.7209, "step": 1545 }, { "epoch": 0.18, "grad_norm": 0.9564482486986808, "learning_rate": 0.0009447293106648931, "loss": 3.919, "step": 1546 }, { "epoch": 0.18, "grad_norm": 0.8284744779787477, "learning_rate": 0.0009446444143378134, "loss": 3.8823, "step": 1547 }, { "epoch": 0.18, "grad_norm": 1.0445801764427596, "learning_rate": 0.0009445594566805126, "loss": 4.0477, "step": 1548 }, { "epoch": 0.18, "grad_norm": 0.8123353106551443, "learning_rate": 0.0009444744377047088, "loss": 3.8422, "step": 1549 }, { "epoch": 0.18, "grad_norm": 1.0680378005644848, "learning_rate": 0.0009443893574221286, "loss": 3.9865, "step": 1550 }, { "epoch": 0.18, "grad_norm": 0.810638326995603, "learning_rate": 0.0009443042158445074, "loss": 3.9771, "step": 1551 }, { "epoch": 0.18, "grad_norm": 2.022334654841791, "learning_rate": 0.000944219012983589, "loss": 3.9238, "step": 1552 }, { "epoch": 0.18, "grad_norm": 0.8558674804976343, "learning_rate": 0.0009441337488511252, "loss": 4.0984, "step": 1553 }, { "epoch": 0.18, "grad_norm": 1.1785105958799997, "learning_rate": 0.0009440484234588766, "loss": 4.0524, "step": 1554 }, { "epoch": 0.18, "grad_norm": 0.7951909862614366, "learning_rate": 0.0009439630368186125, "loss": 3.8537, "step": 1555 }, { "epoch": 0.18, "grad_norm": 0.879575418996428, "learning_rate": 0.0009438775889421102, "loss": 4.021, "step": 1556 }, { "epoch": 0.18, "grad_norm": 0.9481200475103162, "learning_rate": 0.0009437920798411554, "loss": 4.2121, "step": 1557 }, { "epoch": 0.18, "grad_norm": 0.7959816321982579, "learning_rate": 0.0009437065095275429, "loss": 3.8938, "step": 1558 }, { "epoch": 0.18, "grad_norm": 0.8096393262236952, "learning_rate": 0.0009436208780130751, "loss": 3.9656, "step": 1559 }, { "epoch": 0.18, "grad_norm": 5.167593975881696, "learning_rate": 0.0009435351853095633, "loss": 4.2262, "step": 1560 }, { "epoch": 0.18, "grad_norm": 0.972610552078697, "learning_rate": 0.0009434494314288273, "loss": 3.8205, "step": 1561 }, { "epoch": 0.18, "grad_norm": 1.1002194924816127, "learning_rate": 0.0009433636163826951, "loss": 3.8968, "step": 1562 }, { "epoch": 0.18, "grad_norm": 1.0283139853241532, "learning_rate": 0.0009432777401830033, "loss": 4.0238, "step": 1563 }, { "epoch": 0.18, "grad_norm": 4.313783840843981, "learning_rate": 0.0009431918028415969, "loss": 4.0874, "step": 1564 }, { "epoch": 0.18, "grad_norm": 0.7397377527852067, "learning_rate": 0.0009431058043703293, "loss": 3.674, "step": 1565 }, { "epoch": 0.18, "grad_norm": 1.1820265790251154, "learning_rate": 0.0009430197447810625, "loss": 4.056, "step": 1566 }, { "epoch": 0.18, "grad_norm": 0.8199724368057919, "learning_rate": 0.0009429336240856662, "loss": 4.1426, "step": 1567 }, { "epoch": 0.18, "grad_norm": 0.7419548672209354, "learning_rate": 0.00094284744229602, "loss": 3.9271, "step": 1568 }, { "epoch": 0.18, "grad_norm": 0.8815043237422977, "learning_rate": 0.0009427611994240104, "loss": 4.0304, "step": 1569 }, { "epoch": 0.18, "grad_norm": 0.7904006837456864, "learning_rate": 0.0009426748954815332, "loss": 4.0589, "step": 1570 }, { "epoch": 0.18, "grad_norm": 0.8735374751290628, "learning_rate": 0.0009425885304804922, "loss": 3.7514, "step": 1571 }, { "epoch": 0.18, "grad_norm": 0.9342411924816054, "learning_rate": 0.0009425021044328, "loss": 3.9183, "step": 1572 }, { "epoch": 0.18, "grad_norm": 0.8715894308471137, "learning_rate": 0.0009424156173503772, "loss": 4.1754, "step": 1573 }, { "epoch": 0.18, "grad_norm": 0.7973900423560782, "learning_rate": 0.0009423290692451534, "loss": 4.226, "step": 1574 }, { "epoch": 0.18, "grad_norm": 0.76011519970482, "learning_rate": 0.000942242460129066, "loss": 4.0764, "step": 1575 }, { "epoch": 0.18, "grad_norm": 0.9107290131367709, "learning_rate": 0.0009421557900140612, "loss": 4.1168, "step": 1576 }, { "epoch": 0.18, "grad_norm": 0.849593226471349, "learning_rate": 0.0009420690589120932, "loss": 3.9129, "step": 1577 }, { "epoch": 0.18, "grad_norm": 1.0000657201654148, "learning_rate": 0.0009419822668351255, "loss": 3.5998, "step": 1578 }, { "epoch": 0.18, "grad_norm": 0.7279339698657166, "learning_rate": 0.0009418954137951288, "loss": 3.7973, "step": 1579 }, { "epoch": 0.18, "grad_norm": 0.8413751506774984, "learning_rate": 0.000941808499804083, "loss": 4.0201, "step": 1580 }, { "epoch": 0.18, "grad_norm": 1.8181680616477367, "learning_rate": 0.0009417215248739764, "loss": 4.0575, "step": 1581 }, { "epoch": 0.18, "grad_norm": 2.1747989231598868, "learning_rate": 0.0009416344890168054, "loss": 3.9116, "step": 1582 }, { "epoch": 0.18, "grad_norm": 2.421598318262801, "learning_rate": 0.000941547392244575, "loss": 4.0196, "step": 1583 }, { "epoch": 0.18, "grad_norm": 0.7547175061390377, "learning_rate": 0.0009414602345692984, "loss": 3.7012, "step": 1584 }, { "epoch": 0.18, "grad_norm": 2.5826797581345966, "learning_rate": 0.0009413730160029974, "loss": 3.8621, "step": 1585 }, { "epoch": 0.18, "grad_norm": 1.1070260331650765, "learning_rate": 0.0009412857365577023, "loss": 4.1316, "step": 1586 }, { "epoch": 0.18, "grad_norm": 1.3883932107405876, "learning_rate": 0.0009411983962454515, "loss": 3.9567, "step": 1587 }, { "epoch": 0.18, "grad_norm": 0.936034317123037, "learning_rate": 0.0009411109950782919, "loss": 3.6744, "step": 1588 }, { "epoch": 0.18, "grad_norm": 0.9210368930577055, "learning_rate": 0.0009410235330682788, "loss": 3.7847, "step": 1589 }, { "epoch": 0.18, "grad_norm": 0.7362103916198269, "learning_rate": 0.0009409360102274761, "loss": 3.7751, "step": 1590 }, { "epoch": 0.18, "grad_norm": 0.8055415788013954, "learning_rate": 0.0009408484265679558, "loss": 3.8055, "step": 1591 }, { "epoch": 0.18, "grad_norm": 0.8571256899522368, "learning_rate": 0.0009407607821017983, "loss": 3.8728, "step": 1592 }, { "epoch": 0.18, "grad_norm": 0.8064263379105193, "learning_rate": 0.0009406730768410927, "loss": 3.8357, "step": 1593 }, { "epoch": 0.18, "grad_norm": 0.8946668171330434, "learning_rate": 0.0009405853107979361, "loss": 3.8633, "step": 1594 }, { "epoch": 0.18, "grad_norm": 0.9536201270201781, "learning_rate": 0.0009404974839844341, "loss": 3.9501, "step": 1595 }, { "epoch": 0.18, "grad_norm": 0.8312478199505631, "learning_rate": 0.0009404095964127008, "loss": 3.8984, "step": 1596 }, { "epoch": 0.18, "grad_norm": 1.078526366735139, "learning_rate": 0.0009403216480948589, "loss": 4.0352, "step": 1597 }, { "epoch": 0.18, "grad_norm": 1.3225514410492496, "learning_rate": 0.0009402336390430388, "loss": 3.8265, "step": 1598 }, { "epoch": 0.18, "grad_norm": 0.8197474651758992, "learning_rate": 0.0009401455692693798, "loss": 3.9119, "step": 1599 }, { "epoch": 0.18, "grad_norm": 0.92057023464428, "learning_rate": 0.0009400574387860294, "loss": 4.0379, "step": 1600 }, { "epoch": 0.18, "grad_norm": 0.8132215355699012, "learning_rate": 0.0009399692476051436, "loss": 3.7861, "step": 1601 }, { "epoch": 0.18, "grad_norm": 0.833548101742673, "learning_rate": 0.0009398809957388868, "loss": 3.8447, "step": 1602 }, { "epoch": 0.18, "grad_norm": 3.8332069686852757, "learning_rate": 0.0009397926831994314, "loss": 4.0532, "step": 1603 }, { "epoch": 0.18, "grad_norm": 0.9185271456748192, "learning_rate": 0.0009397043099989587, "loss": 3.8971, "step": 1604 }, { "epoch": 0.18, "grad_norm": 0.9425320342615834, "learning_rate": 0.0009396158761496577, "loss": 3.9502, "step": 1605 }, { "epoch": 0.18, "grad_norm": 1.404888821930948, "learning_rate": 0.0009395273816637267, "loss": 4.0818, "step": 1606 }, { "epoch": 0.18, "grad_norm": 0.8844218704775557, "learning_rate": 0.0009394388265533713, "loss": 4.0767, "step": 1607 }, { "epoch": 0.18, "grad_norm": 0.8528465410415034, "learning_rate": 0.0009393502108308064, "loss": 3.9177, "step": 1608 }, { "epoch": 0.18, "grad_norm": 0.7899627795532184, "learning_rate": 0.0009392615345082547, "loss": 3.9613, "step": 1609 }, { "epoch": 0.18, "grad_norm": 0.9213105646111837, "learning_rate": 0.0009391727975979474, "loss": 4.0821, "step": 1610 }, { "epoch": 0.18, "grad_norm": 0.8460076098727417, "learning_rate": 0.0009390840001121239, "loss": 3.8884, "step": 1611 }, { "epoch": 0.18, "grad_norm": 0.9467796437566335, "learning_rate": 0.0009389951420630325, "loss": 3.9064, "step": 1612 }, { "epoch": 0.18, "grad_norm": 0.8273534642521473, "learning_rate": 0.0009389062234629292, "loss": 3.8386, "step": 1613 }, { "epoch": 0.19, "grad_norm": 0.8297470549250323, "learning_rate": 0.0009388172443240788, "loss": 3.7848, "step": 1614 }, { "epoch": 0.19, "grad_norm": 1.7988434522509076, "learning_rate": 0.0009387282046587539, "loss": 3.7489, "step": 1615 }, { "epoch": 0.19, "grad_norm": 0.7861922341286257, "learning_rate": 0.0009386391044792363, "loss": 3.9704, "step": 1616 }, { "epoch": 0.19, "grad_norm": 0.6713201587247575, "learning_rate": 0.0009385499437978153, "loss": 3.8542, "step": 1617 }, { "epoch": 0.19, "grad_norm": 0.7783170485905523, "learning_rate": 0.0009384607226267891, "loss": 4.139, "step": 1618 }, { "epoch": 0.19, "grad_norm": 0.7969057870169202, "learning_rate": 0.0009383714409784643, "loss": 3.6903, "step": 1619 }, { "epoch": 0.19, "grad_norm": 0.8276175065589113, "learning_rate": 0.000938282098865155, "loss": 3.9059, "step": 1620 }, { "epoch": 0.19, "grad_norm": 0.8181316850078265, "learning_rate": 0.0009381926962991847, "loss": 3.9939, "step": 1621 }, { "epoch": 0.19, "grad_norm": 0.7002785078678915, "learning_rate": 0.0009381032332928847, "loss": 3.7722, "step": 1622 }, { "epoch": 0.19, "grad_norm": 0.9223497190758715, "learning_rate": 0.0009380137098585946, "loss": 4.1211, "step": 1623 }, { "epoch": 0.19, "grad_norm": 0.7529287177545222, "learning_rate": 0.0009379241260086626, "loss": 3.9051, "step": 1624 }, { "epoch": 0.19, "grad_norm": 0.8076963284551374, "learning_rate": 0.0009378344817554449, "loss": 3.8456, "step": 1625 }, { "epoch": 0.19, "grad_norm": 0.7686064022008927, "learning_rate": 0.0009377447771113065, "loss": 3.8874, "step": 1626 }, { "epoch": 0.19, "grad_norm": 0.7493614990883468, "learning_rate": 0.0009376550120886203, "loss": 3.9097, "step": 1627 }, { "epoch": 0.19, "grad_norm": 0.8736190721445515, "learning_rate": 0.0009375651866997674, "loss": 3.7398, "step": 1628 }, { "epoch": 0.19, "grad_norm": 0.8490897050280988, "learning_rate": 0.0009374753009571379, "loss": 3.8893, "step": 1629 }, { "epoch": 0.19, "grad_norm": 0.7491797176064796, "learning_rate": 0.0009373853548731297, "loss": 3.8386, "step": 1630 }, { "epoch": 0.19, "grad_norm": 1.1272079851857126, "learning_rate": 0.000937295348460149, "loss": 4.0181, "step": 1631 }, { "epoch": 0.19, "grad_norm": 0.7333622769505543, "learning_rate": 0.0009372052817306106, "loss": 3.8798, "step": 1632 }, { "epoch": 0.19, "grad_norm": 0.9629933054130179, "learning_rate": 0.0009371151546969376, "loss": 3.8187, "step": 1633 }, { "epoch": 0.19, "grad_norm": 0.753777036520204, "learning_rate": 0.0009370249673715611, "loss": 3.9808, "step": 1634 }, { "epoch": 0.19, "grad_norm": 0.9179589990776899, "learning_rate": 0.0009369347197669207, "loss": 3.9414, "step": 1635 }, { "epoch": 0.19, "grad_norm": 0.8206318918357529, "learning_rate": 0.0009368444118954646, "loss": 3.9135, "step": 1636 }, { "epoch": 0.19, "grad_norm": 0.9581877896806545, "learning_rate": 0.0009367540437696489, "loss": 4.0041, "step": 1637 }, { "epoch": 0.19, "grad_norm": 1.0497348716078656, "learning_rate": 0.0009366636154019381, "loss": 4.0523, "step": 1638 }, { "epoch": 0.19, "grad_norm": 0.8960927057628982, "learning_rate": 0.0009365731268048052, "loss": 4.068, "step": 1639 }, { "epoch": 0.19, "grad_norm": 1.952542194081992, "learning_rate": 0.0009364825779907311, "loss": 3.9961, "step": 1640 }, { "epoch": 0.19, "grad_norm": 0.9146892852264598, "learning_rate": 0.0009363919689722056, "loss": 3.8575, "step": 1641 }, { "epoch": 0.19, "grad_norm": 0.9114499760732222, "learning_rate": 0.0009363012997617264, "loss": 3.8914, "step": 1642 }, { "epoch": 0.19, "grad_norm": 0.829714448292901, "learning_rate": 0.0009362105703717994, "loss": 3.9217, "step": 1643 }, { "epoch": 0.19, "grad_norm": 0.8082979094520754, "learning_rate": 0.0009361197808149393, "loss": 3.9194, "step": 1644 }, { "epoch": 0.19, "grad_norm": 2.0337575180932346, "learning_rate": 0.0009360289311036688, "loss": 3.8969, "step": 1645 }, { "epoch": 0.19, "grad_norm": 1.4726775104047347, "learning_rate": 0.0009359380212505184, "loss": 3.8682, "step": 1646 }, { "epoch": 0.19, "grad_norm": 0.9743521122422942, "learning_rate": 0.0009358470512680278, "loss": 3.8105, "step": 1647 }, { "epoch": 0.19, "grad_norm": 2.090128331060611, "learning_rate": 0.0009357560211687445, "loss": 3.9929, "step": 1648 }, { "epoch": 0.19, "grad_norm": 0.9535019585618488, "learning_rate": 0.0009356649309652243, "loss": 4.0109, "step": 1649 }, { "epoch": 0.19, "grad_norm": 6.797417280233116, "learning_rate": 0.0009355737806700315, "loss": 3.8693, "step": 1650 }, { "epoch": 0.19, "grad_norm": 0.8577025814415281, "learning_rate": 0.0009354825702957383, "loss": 3.9917, "step": 1651 }, { "epoch": 0.19, "grad_norm": 0.8604061744666929, "learning_rate": 0.0009353912998549259, "loss": 3.7423, "step": 1652 }, { "epoch": 0.19, "grad_norm": 0.762141008956544, "learning_rate": 0.0009352999693601827, "loss": 3.7144, "step": 1653 }, { "epoch": 0.19, "grad_norm": 0.7781953832937091, "learning_rate": 0.0009352085788241064, "loss": 3.9071, "step": 1654 }, { "epoch": 0.19, "grad_norm": 0.9058494095421323, "learning_rate": 0.0009351171282593026, "loss": 3.9115, "step": 1655 }, { "epoch": 0.19, "grad_norm": 0.7667131026365983, "learning_rate": 0.0009350256176783847, "loss": 3.7106, "step": 1656 }, { "epoch": 0.19, "grad_norm": 0.780151354407763, "learning_rate": 0.0009349340470939753, "loss": 3.9903, "step": 1657 }, { "epoch": 0.19, "grad_norm": 0.8538957671923801, "learning_rate": 0.0009348424165187049, "loss": 3.9689, "step": 1658 }, { "epoch": 0.19, "grad_norm": 1.0701679972706033, "learning_rate": 0.0009347507259652119, "loss": 3.9973, "step": 1659 }, { "epoch": 0.19, "grad_norm": 0.8249055808082802, "learning_rate": 0.0009346589754461433, "loss": 4.0428, "step": 1660 }, { "epoch": 0.19, "grad_norm": 0.9436817844502606, "learning_rate": 0.0009345671649741545, "loss": 3.9746, "step": 1661 }, { "epoch": 0.19, "grad_norm": 1.0399213961447786, "learning_rate": 0.0009344752945619089, "loss": 3.6444, "step": 1662 }, { "epoch": 0.19, "grad_norm": 0.8929596272267862, "learning_rate": 0.0009343833642220781, "loss": 3.9582, "step": 1663 }, { "epoch": 0.19, "grad_norm": 0.8093334227261171, "learning_rate": 0.0009342913739673424, "loss": 4.0904, "step": 1664 }, { "epoch": 0.19, "grad_norm": 0.9219805722206545, "learning_rate": 0.00093419932381039, "loss": 4.1267, "step": 1665 }, { "epoch": 0.19, "grad_norm": 0.7532023837495229, "learning_rate": 0.0009341072137639175, "loss": 3.8564, "step": 1666 }, { "epoch": 0.19, "grad_norm": 0.7779195152595106, "learning_rate": 0.0009340150438406296, "loss": 3.9052, "step": 1667 }, { "epoch": 0.19, "grad_norm": 0.7457270737522609, "learning_rate": 0.0009339228140532396, "loss": 3.815, "step": 1668 }, { "epoch": 0.19, "grad_norm": 0.7647594144786466, "learning_rate": 0.0009338305244144687, "loss": 4.0877, "step": 1669 }, { "epoch": 0.19, "grad_norm": 0.7021595546337704, "learning_rate": 0.0009337381749370463, "loss": 4.1293, "step": 1670 }, { "epoch": 0.19, "grad_norm": 0.906568690032215, "learning_rate": 0.0009336457656337108, "loss": 4.0644, "step": 1671 }, { "epoch": 0.19, "grad_norm": 0.7012265097192134, "learning_rate": 0.0009335532965172079, "loss": 4.1072, "step": 1672 }, { "epoch": 0.19, "grad_norm": 0.7732403071715452, "learning_rate": 0.0009334607676002919, "loss": 3.9331, "step": 1673 }, { "epoch": 0.19, "grad_norm": 0.7923006863552206, "learning_rate": 0.0009333681788957256, "loss": 3.6242, "step": 1674 }, { "epoch": 0.19, "grad_norm": 0.8032684546739858, "learning_rate": 0.0009332755304162798, "loss": 3.7891, "step": 1675 }, { "epoch": 0.19, "grad_norm": 0.7222815984055483, "learning_rate": 0.0009331828221747335, "loss": 3.7836, "step": 1676 }, { "epoch": 0.19, "grad_norm": 0.9917444213289307, "learning_rate": 0.0009330900541838741, "loss": 4.0377, "step": 1677 }, { "epoch": 0.19, "grad_norm": 0.8171278402653075, "learning_rate": 0.0009329972264564972, "loss": 4.1606, "step": 1678 }, { "epoch": 0.19, "grad_norm": 0.7672392564253656, "learning_rate": 0.0009329043390054066, "loss": 3.9218, "step": 1679 }, { "epoch": 0.19, "grad_norm": 0.7584724642661171, "learning_rate": 0.0009328113918434142, "loss": 4.0897, "step": 1680 }, { "epoch": 0.19, "grad_norm": 0.7724129285953324, "learning_rate": 0.0009327183849833406, "loss": 4.0097, "step": 1681 }, { "epoch": 0.19, "grad_norm": 0.7507744463502709, "learning_rate": 0.0009326253184380141, "loss": 3.821, "step": 1682 }, { "epoch": 0.19, "grad_norm": 1.4669691118757309, "learning_rate": 0.0009325321922202716, "loss": 3.9893, "step": 1683 }, { "epoch": 0.19, "grad_norm": 0.7938763496320073, "learning_rate": 0.0009324390063429578, "loss": 3.9173, "step": 1684 }, { "epoch": 0.19, "grad_norm": 0.7643137269748055, "learning_rate": 0.0009323457608189263, "loss": 3.8917, "step": 1685 }, { "epoch": 0.19, "grad_norm": 0.8344994935118849, "learning_rate": 0.0009322524556610384, "loss": 3.8858, "step": 1686 }, { "epoch": 0.19, "grad_norm": 0.7922495896139837, "learning_rate": 0.0009321590908821635, "loss": 3.855, "step": 1687 }, { "epoch": 0.19, "grad_norm": 0.7318636723197567, "learning_rate": 0.0009320656664951797, "loss": 3.7846, "step": 1688 }, { "epoch": 0.19, "grad_norm": 0.8048885033748879, "learning_rate": 0.0009319721825129734, "loss": 3.9843, "step": 1689 }, { "epoch": 0.19, "grad_norm": 1.6788426578606397, "learning_rate": 0.0009318786389484383, "loss": 3.8998, "step": 1690 }, { "epoch": 0.19, "grad_norm": 1.1904636374596727, "learning_rate": 0.0009317850358144778, "loss": 3.844, "step": 1691 }, { "epoch": 0.19, "grad_norm": 0.8615193138836781, "learning_rate": 0.0009316913731240018, "loss": 3.9595, "step": 1692 }, { "epoch": 0.19, "grad_norm": 0.8742236947980859, "learning_rate": 0.0009315976508899298, "loss": 4.0222, "step": 1693 }, { "epoch": 0.19, "grad_norm": 0.8087282051723653, "learning_rate": 0.0009315038691251887, "loss": 4.2426, "step": 1694 }, { "epoch": 0.19, "grad_norm": 0.7926203782658983, "learning_rate": 0.0009314100278427143, "loss": 4.0534, "step": 1695 }, { "epoch": 0.19, "grad_norm": 0.8359905977572799, "learning_rate": 0.0009313161270554498, "loss": 3.8151, "step": 1696 }, { "epoch": 0.19, "grad_norm": 11.604295267328535, "learning_rate": 0.0009312221667763472, "loss": 3.8582, "step": 1697 }, { "epoch": 0.19, "grad_norm": 0.8537436856084206, "learning_rate": 0.0009311281470183667, "loss": 3.9817, "step": 1698 }, { "epoch": 0.19, "grad_norm": 0.8062037709247555, "learning_rate": 0.0009310340677944762, "loss": 3.629, "step": 1699 }, { "epoch": 0.19, "grad_norm": 0.8393436748602189, "learning_rate": 0.0009309399291176524, "loss": 3.7647, "step": 1700 }, { "epoch": 0.2, "grad_norm": 0.8116359873529164, "learning_rate": 0.0009308457310008798, "loss": 3.7836, "step": 1701 }, { "epoch": 0.2, "grad_norm": 0.7517490744134788, "learning_rate": 0.0009307514734571514, "loss": 3.6888, "step": 1702 }, { "epoch": 0.2, "grad_norm": 0.8253786834923303, "learning_rate": 0.0009306571564994679, "loss": 3.8984, "step": 1703 }, { "epoch": 0.2, "grad_norm": 0.8015326217350007, "learning_rate": 0.000930562780140839, "loss": 4.1322, "step": 1704 }, { "epoch": 0.2, "grad_norm": 0.7858679107150502, "learning_rate": 0.0009304683443942816, "loss": 3.8469, "step": 1705 }, { "epoch": 0.2, "grad_norm": 0.8218888911618002, "learning_rate": 0.0009303738492728216, "loss": 4.2129, "step": 1706 }, { "epoch": 0.2, "grad_norm": 0.843649693780184, "learning_rate": 0.000930279294789493, "loss": 4.0119, "step": 1707 }, { "epoch": 0.2, "grad_norm": 0.7257506556096036, "learning_rate": 0.0009301846809573373, "loss": 3.9074, "step": 1708 }, { "epoch": 0.2, "grad_norm": 0.9542468769680683, "learning_rate": 0.000930090007789405, "loss": 3.986, "step": 1709 }, { "epoch": 0.2, "grad_norm": 0.7997410615586994, "learning_rate": 0.0009299952752987544, "loss": 4.0993, "step": 1710 }, { "epoch": 0.2, "grad_norm": 0.8987492700026134, "learning_rate": 0.0009299004834984519, "loss": 3.948, "step": 1711 }, { "epoch": 0.2, "grad_norm": 0.8415929862194048, "learning_rate": 0.0009298056324015724, "loss": 4.0483, "step": 1712 }, { "epoch": 0.2, "grad_norm": 0.8261529669197057, "learning_rate": 0.0009297107220211988, "loss": 3.7812, "step": 1713 }, { "epoch": 0.2, "grad_norm": 0.9515629041107165, "learning_rate": 0.0009296157523704223, "loss": 3.9395, "step": 1714 }, { "epoch": 0.2, "grad_norm": 0.7455512850288687, "learning_rate": 0.0009295207234623418, "loss": 3.802, "step": 1715 }, { "epoch": 0.2, "grad_norm": 0.7662096731200948, "learning_rate": 0.000929425635310065, "loss": 3.8961, "step": 1716 }, { "epoch": 0.2, "grad_norm": 0.855663163393645, "learning_rate": 0.0009293304879267073, "loss": 4.1121, "step": 1717 }, { "epoch": 0.2, "grad_norm": 0.8799383891984581, "learning_rate": 0.0009292352813253926, "loss": 3.7898, "step": 1718 }, { "epoch": 0.2, "grad_norm": 0.7783464561415505, "learning_rate": 0.0009291400155192528, "loss": 3.9871, "step": 1719 }, { "epoch": 0.2, "grad_norm": 0.9576272483913795, "learning_rate": 0.0009290446905214281, "loss": 3.9688, "step": 1720 }, { "epoch": 0.2, "grad_norm": 0.7261910084645548, "learning_rate": 0.0009289493063450666, "loss": 3.7884, "step": 1721 }, { "epoch": 0.2, "grad_norm": 0.7791356841865663, "learning_rate": 0.0009288538630033247, "loss": 3.8566, "step": 1722 }, { "epoch": 0.2, "grad_norm": 0.8966443865678572, "learning_rate": 0.0009287583605093674, "loss": 3.8291, "step": 1723 }, { "epoch": 0.2, "grad_norm": 0.7594350827599745, "learning_rate": 0.000928662798876367, "loss": 3.878, "step": 1724 }, { "epoch": 0.2, "grad_norm": 0.6998314455870827, "learning_rate": 0.0009285671781175045, "loss": 3.7937, "step": 1725 }, { "epoch": 0.2, "grad_norm": 0.7538798918095796, "learning_rate": 0.000928471498245969, "loss": 3.9407, "step": 1726 }, { "epoch": 0.2, "grad_norm": 0.8949359104559965, "learning_rate": 0.0009283757592749577, "loss": 3.6857, "step": 1727 }, { "epoch": 0.2, "grad_norm": 0.997048592744118, "learning_rate": 0.0009282799612176762, "loss": 3.8139, "step": 1728 }, { "epoch": 0.2, "grad_norm": 0.7096870755998815, "learning_rate": 0.0009281841040873376, "loss": 3.9418, "step": 1729 }, { "epoch": 0.2, "grad_norm": 1.4256473461416777, "learning_rate": 0.0009280881878971637, "loss": 4.0744, "step": 1730 }, { "epoch": 0.2, "grad_norm": 0.7891220120543624, "learning_rate": 0.0009279922126603846, "loss": 3.7633, "step": 1731 }, { "epoch": 0.2, "grad_norm": 1.8120797148426995, "learning_rate": 0.000927896178390238, "loss": 3.8918, "step": 1732 }, { "epoch": 0.2, "grad_norm": 2.8351171858637474, "learning_rate": 0.0009278000850999699, "loss": 3.7812, "step": 1733 }, { "epoch": 0.2, "grad_norm": 0.8344408713837536, "learning_rate": 0.0009277039328028347, "loss": 3.7804, "step": 1734 }, { "epoch": 0.2, "grad_norm": 0.931313187750291, "learning_rate": 0.0009276077215120949, "loss": 3.9961, "step": 1735 }, { "epoch": 0.2, "grad_norm": 1.1476245919951218, "learning_rate": 0.0009275114512410208, "loss": 4.0585, "step": 1736 }, { "epoch": 0.2, "grad_norm": 0.7535830476151243, "learning_rate": 0.000927415122002891, "loss": 3.8156, "step": 1737 }, { "epoch": 0.2, "grad_norm": 0.9351259409107812, "learning_rate": 0.0009273187338109925, "loss": 3.8141, "step": 1738 }, { "epoch": 0.2, "grad_norm": 0.9441794320389927, "learning_rate": 0.0009272222866786201, "loss": 3.8083, "step": 1739 }, { "epoch": 0.2, "grad_norm": 1.1663459174316086, "learning_rate": 0.0009271257806190769, "loss": 3.766, "step": 1740 }, { "epoch": 0.2, "grad_norm": 0.7962866347647475, "learning_rate": 0.0009270292156456738, "loss": 3.7229, "step": 1741 }, { "epoch": 0.2, "grad_norm": 11.616583877491955, "learning_rate": 0.0009269325917717306, "loss": 3.9633, "step": 1742 }, { "epoch": 0.2, "grad_norm": 1.0458903472976837, "learning_rate": 0.0009268359090105743, "loss": 3.9312, "step": 1743 }, { "epoch": 0.2, "grad_norm": 0.7778326497556163, "learning_rate": 0.0009267391673755405, "loss": 3.8247, "step": 1744 }, { "epoch": 0.2, "grad_norm": 0.9819921840205279, "learning_rate": 0.0009266423668799731, "loss": 3.8534, "step": 1745 }, { "epoch": 0.2, "grad_norm": 0.8671874484727672, "learning_rate": 0.0009265455075372237, "loss": 3.8799, "step": 1746 }, { "epoch": 0.2, "grad_norm": 0.8489935037793109, "learning_rate": 0.0009264485893606523, "loss": 4.106, "step": 1747 }, { "epoch": 0.2, "grad_norm": 0.971229627090724, "learning_rate": 0.0009263516123636267, "loss": 3.9358, "step": 1748 }, { "epoch": 0.2, "grad_norm": 0.8045513352178814, "learning_rate": 0.0009262545765595232, "loss": 4.1579, "step": 1749 }, { "epoch": 0.2, "grad_norm": 1.065910225156871, "learning_rate": 0.000926157481961726, "loss": 3.8547, "step": 1750 }, { "epoch": 0.2, "grad_norm": 0.9454332248180906, "learning_rate": 0.0009260603285836276, "loss": 3.8257, "step": 1751 }, { "epoch": 0.2, "grad_norm": 0.9348987501065008, "learning_rate": 0.0009259631164386282, "loss": 3.9301, "step": 1752 }, { "epoch": 0.2, "grad_norm": 0.8073237945709126, "learning_rate": 0.0009258658455401365, "loss": 4.0541, "step": 1753 }, { "epoch": 0.2, "grad_norm": 0.846591835454308, "learning_rate": 0.0009257685159015692, "loss": 3.927, "step": 1754 }, { "epoch": 0.2, "grad_norm": 0.8831930649897933, "learning_rate": 0.0009256711275363509, "loss": 4.0913, "step": 1755 }, { "epoch": 0.2, "grad_norm": 0.8146969547146948, "learning_rate": 0.0009255736804579147, "loss": 4.2306, "step": 1756 }, { "epoch": 0.2, "grad_norm": 0.859883479624131, "learning_rate": 0.0009254761746797013, "loss": 3.9352, "step": 1757 }, { "epoch": 0.2, "grad_norm": 0.791658449959038, "learning_rate": 0.0009253786102151602, "loss": 3.9185, "step": 1758 }, { "epoch": 0.2, "grad_norm": 0.7992737629348875, "learning_rate": 0.0009252809870777481, "loss": 3.9275, "step": 1759 }, { "epoch": 0.2, "grad_norm": 0.823155707534069, "learning_rate": 0.0009251833052809304, "loss": 3.857, "step": 1760 }, { "epoch": 0.2, "grad_norm": 0.7476570237676693, "learning_rate": 0.0009250855648381805, "loss": 4.0202, "step": 1761 }, { "epoch": 0.2, "grad_norm": 0.8925801737834225, "learning_rate": 0.0009249877657629799, "loss": 3.9271, "step": 1762 }, { "epoch": 0.2, "grad_norm": 0.8735460533612577, "learning_rate": 0.0009248899080688178, "loss": 3.9404, "step": 1763 }, { "epoch": 0.2, "grad_norm": 0.8394179098760687, "learning_rate": 0.0009247919917691923, "loss": 3.8906, "step": 1764 }, { "epoch": 0.2, "grad_norm": 0.6953944212479741, "learning_rate": 0.0009246940168776086, "loss": 3.8831, "step": 1765 }, { "epoch": 0.2, "grad_norm": 0.7875327213678065, "learning_rate": 0.0009245959834075807, "loss": 3.9056, "step": 1766 }, { "epoch": 0.2, "grad_norm": 0.7431720907168309, "learning_rate": 0.0009244978913726304, "loss": 3.8732, "step": 1767 }, { "epoch": 0.2, "grad_norm": 0.7919507758347588, "learning_rate": 0.0009243997407862878, "loss": 3.7194, "step": 1768 }, { "epoch": 0.2, "grad_norm": 0.7550298797389087, "learning_rate": 0.0009243015316620906, "loss": 3.9597, "step": 1769 }, { "epoch": 0.2, "grad_norm": 0.7688099175732305, "learning_rate": 0.0009242032640135852, "loss": 3.839, "step": 1770 }, { "epoch": 0.2, "grad_norm": 0.7923743287095547, "learning_rate": 0.0009241049378543254, "loss": 4.0319, "step": 1771 }, { "epoch": 0.2, "grad_norm": 0.8016773094046883, "learning_rate": 0.0009240065531978736, "loss": 3.9313, "step": 1772 }, { "epoch": 0.2, "grad_norm": 0.7245499745674537, "learning_rate": 0.0009239081100578002, "loss": 4.0616, "step": 1773 }, { "epoch": 0.2, "grad_norm": 0.7349379964966979, "learning_rate": 0.0009238096084476832, "loss": 3.8669, "step": 1774 }, { "epoch": 0.2, "grad_norm": 0.8883865150224716, "learning_rate": 0.0009237110483811096, "loss": 3.927, "step": 1775 }, { "epoch": 0.2, "grad_norm": 0.6844550126018027, "learning_rate": 0.0009236124298716734, "loss": 3.8708, "step": 1776 }, { "epoch": 0.2, "grad_norm": 0.8989022935128861, "learning_rate": 0.0009235137529329772, "loss": 4.0256, "step": 1777 }, { "epoch": 0.2, "grad_norm": 0.7477214716622635, "learning_rate": 0.0009234150175786318, "loss": 4.0495, "step": 1778 }, { "epoch": 0.2, "grad_norm": 0.8517445964292621, "learning_rate": 0.0009233162238222556, "loss": 3.8835, "step": 1779 }, { "epoch": 0.2, "grad_norm": 0.7899104988648538, "learning_rate": 0.0009232173716774757, "loss": 3.8527, "step": 1780 }, { "epoch": 0.2, "grad_norm": 0.7808154590149765, "learning_rate": 0.0009231184611579265, "loss": 4.074, "step": 1781 }, { "epoch": 0.2, "grad_norm": 0.753827120218923, "learning_rate": 0.000923019492277251, "loss": 3.9111, "step": 1782 }, { "epoch": 0.2, "grad_norm": 0.7511817397495992, "learning_rate": 0.0009229204650491001, "loss": 3.7422, "step": 1783 }, { "epoch": 0.2, "grad_norm": 0.7311636094782424, "learning_rate": 0.0009228213794871325, "loss": 3.9425, "step": 1784 }, { "epoch": 0.2, "grad_norm": 0.8130288451187192, "learning_rate": 0.0009227222356050154, "loss": 3.9246, "step": 1785 }, { "epoch": 0.2, "grad_norm": 0.6859527839557056, "learning_rate": 0.0009226230334164236, "loss": 3.9071, "step": 1786 }, { "epoch": 0.2, "grad_norm": 0.6554642927013951, "learning_rate": 0.0009225237729350403, "loss": 3.8776, "step": 1787 }, { "epoch": 0.21, "grad_norm": 0.6799951737970095, "learning_rate": 0.0009224244541745566, "loss": 3.8753, "step": 1788 }, { "epoch": 0.21, "grad_norm": 1.6646520844313295, "learning_rate": 0.0009223250771486717, "loss": 4.1116, "step": 1789 }, { "epoch": 0.21, "grad_norm": 0.7719803771986256, "learning_rate": 0.0009222256418710923, "loss": 3.8908, "step": 1790 }, { "epoch": 0.21, "grad_norm": 0.6406067536316352, "learning_rate": 0.0009221261483555343, "loss": 3.7226, "step": 1791 }, { "epoch": 0.21, "grad_norm": 0.6580578428348078, "learning_rate": 0.0009220265966157205, "loss": 3.9598, "step": 1792 }, { "epoch": 0.21, "grad_norm": 0.8482858376155865, "learning_rate": 0.0009219269866653823, "loss": 4.0739, "step": 1793 }, { "epoch": 0.21, "grad_norm": 0.6123555971166764, "learning_rate": 0.0009218273185182588, "loss": 3.7107, "step": 1794 }, { "epoch": 0.21, "grad_norm": 0.7444375156775016, "learning_rate": 0.0009217275921880976, "loss": 3.9803, "step": 1795 }, { "epoch": 0.21, "grad_norm": 0.7530992421856789, "learning_rate": 0.000921627807688654, "loss": 4.1292, "step": 1796 }, { "epoch": 0.21, "grad_norm": 0.8191054366305114, "learning_rate": 0.0009215279650336911, "loss": 4.0176, "step": 1797 }, { "epoch": 0.21, "grad_norm": 0.8438937131357908, "learning_rate": 0.0009214280642369806, "loss": 3.9014, "step": 1798 }, { "epoch": 0.21, "grad_norm": 0.7271331352492184, "learning_rate": 0.0009213281053123018, "loss": 3.7445, "step": 1799 }, { "epoch": 0.21, "grad_norm": 0.8140841300279414, "learning_rate": 0.000921228088273442, "loss": 3.8131, "step": 1800 }, { "epoch": 0.21, "grad_norm": 0.8031924890203358, "learning_rate": 0.0009211280131341968, "loss": 3.8962, "step": 1801 }, { "epoch": 0.21, "grad_norm": 0.8874699153381481, "learning_rate": 0.0009210278799083695, "loss": 4.0392, "step": 1802 }, { "epoch": 0.21, "grad_norm": 0.7355808677346425, "learning_rate": 0.000920927688609772, "loss": 3.9477, "step": 1803 }, { "epoch": 0.21, "grad_norm": 0.8743984453870507, "learning_rate": 0.0009208274392522231, "loss": 3.736, "step": 1804 }, { "epoch": 0.21, "grad_norm": 0.8879938107849425, "learning_rate": 0.0009207271318495509, "loss": 3.8183, "step": 1805 }, { "epoch": 0.21, "grad_norm": 0.7651803721305752, "learning_rate": 0.0009206267664155906, "loss": 3.8609, "step": 1806 }, { "epoch": 0.21, "grad_norm": 1.0002119888770291, "learning_rate": 0.0009205263429641857, "loss": 3.9593, "step": 1807 }, { "epoch": 0.21, "grad_norm": 0.755914410267963, "learning_rate": 0.0009204258615091879, "loss": 4.0576, "step": 1808 }, { "epoch": 0.21, "grad_norm": 0.707192874180788, "learning_rate": 0.0009203253220644564, "loss": 3.9897, "step": 1809 }, { "epoch": 0.21, "grad_norm": 0.902495213379042, "learning_rate": 0.0009202247246438589, "loss": 3.7554, "step": 1810 }, { "epoch": 0.21, "grad_norm": 0.8281844281527484, "learning_rate": 0.0009201240692612708, "loss": 4.0296, "step": 1811 }, { "epoch": 0.21, "grad_norm": 0.7687804425058603, "learning_rate": 0.0009200233559305758, "loss": 3.6559, "step": 1812 }, { "epoch": 0.21, "grad_norm": 0.8466753724426532, "learning_rate": 0.0009199225846656649, "loss": 3.9031, "step": 1813 }, { "epoch": 0.21, "grad_norm": 0.7624971298739351, "learning_rate": 0.0009198217554804382, "loss": 3.8011, "step": 1814 }, { "epoch": 0.21, "grad_norm": 1.014555041063583, "learning_rate": 0.0009197208683888028, "loss": 3.7543, "step": 1815 }, { "epoch": 0.21, "grad_norm": 0.9639278815623971, "learning_rate": 0.0009196199234046741, "loss": 3.9144, "step": 1816 }, { "epoch": 0.21, "grad_norm": 0.8192233948572364, "learning_rate": 0.0009195189205419757, "loss": 3.635, "step": 1817 }, { "epoch": 0.21, "grad_norm": 0.672990505689812, "learning_rate": 0.000919417859814639, "loss": 4.1342, "step": 1818 }, { "epoch": 0.21, "grad_norm": 1.1506955777088501, "learning_rate": 0.0009193167412366034, "loss": 4.0161, "step": 1819 }, { "epoch": 0.21, "grad_norm": 0.8265074984790751, "learning_rate": 0.0009192155648218162, "loss": 3.742, "step": 1820 }, { "epoch": 0.21, "grad_norm": 0.7634441509013512, "learning_rate": 0.0009191143305842329, "loss": 3.7862, "step": 1821 }, { "epoch": 0.21, "grad_norm": 0.8291031773683417, "learning_rate": 0.0009190130385378166, "loss": 3.816, "step": 1822 }, { "epoch": 0.21, "grad_norm": 0.8230051077879045, "learning_rate": 0.0009189116886965388, "loss": 3.7654, "step": 1823 }, { "epoch": 0.21, "grad_norm": 0.7790386666004395, "learning_rate": 0.0009188102810743788, "loss": 3.9439, "step": 1824 }, { "epoch": 0.21, "grad_norm": 0.9363530618420868, "learning_rate": 0.0009187088156853236, "loss": 3.7313, "step": 1825 }, { "epoch": 0.21, "grad_norm": 0.7081668138081271, "learning_rate": 0.0009186072925433689, "loss": 3.955, "step": 1826 }, { "epoch": 0.21, "grad_norm": 0.7781707783032774, "learning_rate": 0.0009185057116625172, "loss": 3.8381, "step": 1827 }, { "epoch": 0.21, "grad_norm": 1.0191013808768887, "learning_rate": 0.0009184040730567803, "loss": 3.9728, "step": 1828 }, { "epoch": 0.21, "grad_norm": 0.7789452398353326, "learning_rate": 0.0009183023767401769, "loss": 3.7614, "step": 1829 }, { "epoch": 0.21, "grad_norm": 0.737485223435407, "learning_rate": 0.0009182006227267343, "loss": 3.7757, "step": 1830 }, { "epoch": 0.21, "grad_norm": 1.2679289133418514, "learning_rate": 0.0009180988110304873, "loss": 3.8905, "step": 1831 }, { "epoch": 0.21, "grad_norm": 0.8429420251021547, "learning_rate": 0.000917996941665479, "loss": 3.8348, "step": 1832 }, { "epoch": 0.21, "grad_norm": 0.8267997301456189, "learning_rate": 0.0009178950146457606, "loss": 3.9278, "step": 1833 }, { "epoch": 0.21, "grad_norm": 0.7778305424837477, "learning_rate": 0.0009177930299853903, "loss": 3.7844, "step": 1834 }, { "epoch": 0.21, "grad_norm": 0.8438975589576946, "learning_rate": 0.0009176909876984356, "loss": 3.8029, "step": 1835 }, { "epoch": 0.21, "grad_norm": 0.7887038280353068, "learning_rate": 0.0009175888877989712, "loss": 3.9373, "step": 1836 }, { "epoch": 0.21, "grad_norm": 0.7917287022730356, "learning_rate": 0.0009174867303010795, "loss": 3.8645, "step": 1837 }, { "epoch": 0.21, "grad_norm": 0.7858372896969633, "learning_rate": 0.0009173845152188516, "loss": 3.7449, "step": 1838 }, { "epoch": 0.21, "grad_norm": 0.919925507440622, "learning_rate": 0.0009172822425663855, "loss": 3.9729, "step": 1839 }, { "epoch": 0.21, "grad_norm": 0.7658943378631716, "learning_rate": 0.0009171799123577886, "loss": 3.9106, "step": 1840 }, { "epoch": 0.21, "grad_norm": 0.9375314599172345, "learning_rate": 0.0009170775246071747, "loss": 3.7064, "step": 1841 }, { "epoch": 0.21, "grad_norm": 0.7705771052998489, "learning_rate": 0.0009169750793286667, "loss": 4.1107, "step": 1842 }, { "epoch": 0.21, "grad_norm": 0.8236536063075164, "learning_rate": 0.0009168725765363946, "loss": 3.8327, "step": 1843 }, { "epoch": 0.21, "grad_norm": 0.7604607176306432, "learning_rate": 0.0009167700162444969, "loss": 4.1545, "step": 1844 }, { "epoch": 0.21, "grad_norm": 0.8230602815712921, "learning_rate": 0.0009166673984671198, "loss": 4.0707, "step": 1845 }, { "epoch": 0.21, "grad_norm": 0.8425585918564236, "learning_rate": 0.0009165647232184176, "loss": 3.6911, "step": 1846 }, { "epoch": 0.21, "grad_norm": 0.9760367661105567, "learning_rate": 0.0009164619905125522, "loss": 4.1045, "step": 1847 }, { "epoch": 0.21, "grad_norm": 1.3801627427716208, "learning_rate": 0.0009163592003636936, "loss": 4.0525, "step": 1848 }, { "epoch": 0.21, "grad_norm": 0.7512200452746538, "learning_rate": 0.00091625635278602, "loss": 3.7542, "step": 1849 }, { "epoch": 0.21, "grad_norm": 0.7317553373794693, "learning_rate": 0.000916153447793717, "loss": 3.9897, "step": 1850 }, { "epoch": 0.21, "grad_norm": 0.6494220629829268, "learning_rate": 0.0009160504854009786, "loss": 3.8179, "step": 1851 }, { "epoch": 0.21, "grad_norm": 0.8998951049503917, "learning_rate": 0.0009159474656220063, "loss": 3.7069, "step": 1852 }, { "epoch": 0.21, "grad_norm": 0.6875317935421839, "learning_rate": 0.0009158443884710097, "loss": 3.8136, "step": 1853 }, { "epoch": 0.21, "grad_norm": 0.7862866529427058, "learning_rate": 0.0009157412539622065, "loss": 3.9661, "step": 1854 }, { "epoch": 0.21, "grad_norm": 0.7279264963532441, "learning_rate": 0.0009156380621098221, "loss": 3.823, "step": 1855 }, { "epoch": 0.21, "grad_norm": 0.8142735591548157, "learning_rate": 0.0009155348129280898, "loss": 3.9211, "step": 1856 }, { "epoch": 0.21, "grad_norm": 1.5653489373235878, "learning_rate": 0.000915431506431251, "loss": 3.9058, "step": 1857 }, { "epoch": 0.21, "grad_norm": 0.7764229853632041, "learning_rate": 0.0009153281426335547, "loss": 3.8099, "step": 1858 }, { "epoch": 0.21, "grad_norm": 0.8359349117264595, "learning_rate": 0.0009152247215492577, "loss": 3.635, "step": 1859 }, { "epoch": 0.21, "grad_norm": 0.8042832959697725, "learning_rate": 0.0009151212431926256, "loss": 3.7794, "step": 1860 }, { "epoch": 0.21, "grad_norm": 0.782330829803909, "learning_rate": 0.0009150177075779308, "loss": 3.875, "step": 1861 }, { "epoch": 0.21, "grad_norm": 0.7089863621982626, "learning_rate": 0.0009149141147194542, "loss": 3.7641, "step": 1862 }, { "epoch": 0.21, "grad_norm": 0.8236834522705743, "learning_rate": 0.0009148104646314844, "loss": 4.1159, "step": 1863 }, { "epoch": 0.21, "grad_norm": 0.8510699317920725, "learning_rate": 0.000914706757328318, "loss": 4.0558, "step": 1864 }, { "epoch": 0.21, "grad_norm": 0.7765692534307267, "learning_rate": 0.0009146029928242596, "loss": 3.7697, "step": 1865 }, { "epoch": 0.21, "grad_norm": 0.8373970432684154, "learning_rate": 0.0009144991711336214, "loss": 4.0329, "step": 1866 }, { "epoch": 0.21, "grad_norm": 0.7889179335701646, "learning_rate": 0.0009143952922707235, "loss": 4.0346, "step": 1867 }, { "epoch": 0.21, "grad_norm": 0.745680691083741, "learning_rate": 0.0009142913562498942, "loss": 3.9779, "step": 1868 }, { "epoch": 0.21, "grad_norm": 0.715853609255922, "learning_rate": 0.0009141873630854694, "loss": 3.6749, "step": 1869 }, { "epoch": 0.21, "grad_norm": 0.8374924245709989, "learning_rate": 0.0009140833127917929, "loss": 3.9144, "step": 1870 }, { "epoch": 0.21, "grad_norm": 0.8150094207887062, "learning_rate": 0.0009139792053832166, "loss": 3.7356, "step": 1871 }, { "epoch": 0.21, "grad_norm": 0.8486350829809888, "learning_rate": 0.0009138750408741001, "loss": 3.776, "step": 1872 }, { "epoch": 0.21, "grad_norm": 0.7168042142083179, "learning_rate": 0.000913770819278811, "loss": 4.0069, "step": 1873 }, { "epoch": 0.21, "grad_norm": 0.737257814021183, "learning_rate": 0.0009136665406117244, "loss": 3.9646, "step": 1874 }, { "epoch": 0.21, "grad_norm": 0.8023341444635669, "learning_rate": 0.0009135622048872238, "loss": 4.1729, "step": 1875 }, { "epoch": 0.22, "grad_norm": 0.787586192629909, "learning_rate": 0.0009134578121197002, "loss": 3.9172, "step": 1876 }, { "epoch": 0.22, "grad_norm": 0.8086846678599074, "learning_rate": 0.0009133533623235526, "loss": 3.8854, "step": 1877 }, { "epoch": 0.22, "grad_norm": 0.7654224177481054, "learning_rate": 0.000913248855513188, "loss": 3.8118, "step": 1878 }, { "epoch": 0.22, "grad_norm": 0.6966105734241713, "learning_rate": 0.0009131442917030211, "loss": 3.8454, "step": 1879 }, { "epoch": 0.22, "grad_norm": 0.7511299460136852, "learning_rate": 0.0009130396709074741, "loss": 3.7579, "step": 1880 }, { "epoch": 0.22, "grad_norm": 0.7658076535013472, "learning_rate": 0.0009129349931409781, "loss": 3.8432, "step": 1881 }, { "epoch": 0.22, "grad_norm": 0.845521630657328, "learning_rate": 0.0009128302584179708, "loss": 3.9815, "step": 1882 }, { "epoch": 0.22, "grad_norm": 0.8057288971208282, "learning_rate": 0.0009127254667528988, "loss": 3.7148, "step": 1883 }, { "epoch": 0.22, "grad_norm": 0.8479880670693027, "learning_rate": 0.0009126206181602158, "loss": 3.637, "step": 1884 }, { "epoch": 0.22, "grad_norm": 0.8928818442290861, "learning_rate": 0.0009125157126543838, "loss": 4.0144, "step": 1885 }, { "epoch": 0.22, "grad_norm": 0.8250737958305291, "learning_rate": 0.0009124107502498725, "loss": 3.8882, "step": 1886 }, { "epoch": 0.22, "grad_norm": 0.9283298052785008, "learning_rate": 0.0009123057309611595, "loss": 3.8937, "step": 1887 }, { "epoch": 0.22, "grad_norm": 0.9361109070855631, "learning_rate": 0.0009122006548027302, "loss": 3.9103, "step": 1888 }, { "epoch": 0.22, "grad_norm": 0.7157477299426674, "learning_rate": 0.0009120955217890778, "loss": 3.8233, "step": 1889 }, { "epoch": 0.22, "grad_norm": 0.7367734720067374, "learning_rate": 0.0009119903319347034, "loss": 3.8003, "step": 1890 }, { "epoch": 0.22, "grad_norm": 0.803302277594201, "learning_rate": 0.000911885085254116, "loss": 4.0575, "step": 1891 }, { "epoch": 0.22, "grad_norm": 0.8384415192309669, "learning_rate": 0.0009117797817618323, "loss": 3.6837, "step": 1892 }, { "epoch": 0.22, "grad_norm": 0.7765752248559238, "learning_rate": 0.000911674421472377, "loss": 3.7868, "step": 1893 }, { "epoch": 0.22, "grad_norm": 0.783679942864075, "learning_rate": 0.0009115690044002824, "loss": 4.0815, "step": 1894 }, { "epoch": 0.22, "grad_norm": 0.9174847503966219, "learning_rate": 0.0009114635305600889, "loss": 3.7492, "step": 1895 }, { "epoch": 0.22, "grad_norm": 0.8343904465590879, "learning_rate": 0.0009113579999663447, "loss": 3.8396, "step": 1896 }, { "epoch": 0.22, "grad_norm": 0.6606211945574318, "learning_rate": 0.0009112524126336054, "loss": 3.7131, "step": 1897 }, { "epoch": 0.22, "grad_norm": 0.7892498117175678, "learning_rate": 0.0009111467685764351, "loss": 3.6507, "step": 1898 }, { "epoch": 0.22, "grad_norm": 0.6738211710550357, "learning_rate": 0.0009110410678094051, "loss": 3.7384, "step": 1899 }, { "epoch": 0.22, "grad_norm": 0.734039570032989, "learning_rate": 0.0009109353103470951, "loss": 3.9633, "step": 1900 }, { "epoch": 0.22, "grad_norm": 0.7367034375505778, "learning_rate": 0.0009108294962040921, "loss": 3.9019, "step": 1901 }, { "epoch": 0.22, "grad_norm": 0.8155576192936432, "learning_rate": 0.0009107236253949912, "loss": 3.7207, "step": 1902 }, { "epoch": 0.22, "grad_norm": 0.8191568115814969, "learning_rate": 0.0009106176979343955, "loss": 3.7114, "step": 1903 }, { "epoch": 0.22, "grad_norm": 0.7659376802400928, "learning_rate": 0.0009105117138369151, "loss": 3.9048, "step": 1904 }, { "epoch": 0.22, "grad_norm": 0.8645358968946236, "learning_rate": 0.0009104056731171691, "loss": 3.764, "step": 1905 }, { "epoch": 0.22, "grad_norm": 0.743512245489565, "learning_rate": 0.0009102995757897834, "loss": 4.0159, "step": 1906 }, { "epoch": 0.22, "grad_norm": 0.9874288868372059, "learning_rate": 0.0009101934218693923, "loss": 3.8555, "step": 1907 }, { "epoch": 0.22, "grad_norm": 0.7417169356225424, "learning_rate": 0.0009100872113706375, "loss": 3.757, "step": 1908 }, { "epoch": 0.22, "grad_norm": 0.8688799585931287, "learning_rate": 0.0009099809443081691, "loss": 3.9607, "step": 1909 }, { "epoch": 0.22, "grad_norm": 0.7855815392661997, "learning_rate": 0.0009098746206966443, "loss": 3.8604, "step": 1910 }, { "epoch": 0.22, "grad_norm": 0.779294703214083, "learning_rate": 0.0009097682405507285, "loss": 3.836, "step": 1911 }, { "epoch": 0.22, "grad_norm": 0.802981965605641, "learning_rate": 0.0009096618038850948, "loss": 3.7794, "step": 1912 }, { "epoch": 0.22, "grad_norm": 0.7755808893316486, "learning_rate": 0.0009095553107144241, "loss": 3.9996, "step": 1913 }, { "epoch": 0.22, "grad_norm": 0.7509275435967226, "learning_rate": 0.0009094487610534052, "loss": 3.8933, "step": 1914 }, { "epoch": 0.22, "grad_norm": 0.9782908684647244, "learning_rate": 0.0009093421549167343, "loss": 3.9325, "step": 1915 }, { "epoch": 0.22, "grad_norm": 0.785092811355111, "learning_rate": 0.0009092354923191161, "loss": 3.9434, "step": 1916 }, { "epoch": 0.22, "grad_norm": 0.7332001658406608, "learning_rate": 0.0009091287732752624, "loss": 3.7755, "step": 1917 }, { "epoch": 0.22, "grad_norm": 0.8036973770978606, "learning_rate": 0.0009090219977998933, "loss": 3.9152, "step": 1918 }, { "epoch": 0.22, "grad_norm": 0.6927973935071611, "learning_rate": 0.000908915165907736, "loss": 3.831, "step": 1919 }, { "epoch": 0.22, "grad_norm": 0.8270807484327787, "learning_rate": 0.0009088082776135263, "loss": 3.9398, "step": 1920 }, { "epoch": 0.22, "grad_norm": 0.7404107686238394, "learning_rate": 0.0009087013329320073, "loss": 3.9327, "step": 1921 }, { "epoch": 0.22, "grad_norm": 0.7606837600028491, "learning_rate": 0.0009085943318779301, "loss": 3.929, "step": 1922 }, { "epoch": 0.22, "grad_norm": 0.8069176759296681, "learning_rate": 0.0009084872744660532, "loss": 3.9459, "step": 1923 }, { "epoch": 0.22, "grad_norm": 1.1093902626127157, "learning_rate": 0.0009083801607111433, "loss": 3.5617, "step": 1924 }, { "epoch": 0.22, "grad_norm": 0.791581672317756, "learning_rate": 0.0009082729906279746, "loss": 3.9925, "step": 1925 }, { "epoch": 0.22, "grad_norm": 0.7517541397884347, "learning_rate": 0.0009081657642313292, "loss": 3.7571, "step": 1926 }, { "epoch": 0.22, "grad_norm": 0.7825258256250016, "learning_rate": 0.0009080584815359972, "loss": 3.9193, "step": 1927 }, { "epoch": 0.22, "grad_norm": 1.1595234216895824, "learning_rate": 0.0009079511425567759, "loss": 3.892, "step": 1928 }, { "epoch": 0.22, "grad_norm": 0.7670731979033678, "learning_rate": 0.0009078437473084706, "loss": 4.027, "step": 1929 }, { "epoch": 0.22, "grad_norm": 0.8607211981699647, "learning_rate": 0.0009077362958058946, "loss": 3.7733, "step": 1930 }, { "epoch": 0.22, "grad_norm": 0.8049263088180011, "learning_rate": 0.0009076287880638689, "loss": 3.6674, "step": 1931 }, { "epoch": 0.22, "grad_norm": 1.691509536228359, "learning_rate": 0.0009075212240972218, "loss": 3.8831, "step": 1932 }, { "epoch": 0.22, "grad_norm": 0.8216622557736523, "learning_rate": 0.00090741360392079, "loss": 4.0133, "step": 1933 }, { "epoch": 0.22, "grad_norm": 0.8288696875682553, "learning_rate": 0.0009073059275494176, "loss": 3.9177, "step": 1934 }, { "epoch": 0.22, "grad_norm": 0.9203377439830267, "learning_rate": 0.0009071981949979564, "loss": 3.8384, "step": 1935 }, { "epoch": 0.22, "grad_norm": 0.7781585509007852, "learning_rate": 0.000907090406281266, "loss": 3.9732, "step": 1936 }, { "epoch": 0.22, "grad_norm": 0.8280677722696165, "learning_rate": 0.000906982561414214, "loss": 3.634, "step": 1937 }, { "epoch": 0.22, "grad_norm": 0.7518900611477576, "learning_rate": 0.0009068746604116755, "loss": 3.8711, "step": 1938 }, { "epoch": 0.22, "grad_norm": 0.8106404644925548, "learning_rate": 0.0009067667032885334, "loss": 3.8588, "step": 1939 }, { "epoch": 0.22, "grad_norm": 0.6361541551754019, "learning_rate": 0.0009066586900596781, "loss": 3.8098, "step": 1940 }, { "epoch": 0.22, "grad_norm": 0.7432764438217282, "learning_rate": 0.000906550620740008, "loss": 3.8003, "step": 1941 }, { "epoch": 0.22, "grad_norm": 0.732384818892767, "learning_rate": 0.0009064424953444296, "loss": 3.7279, "step": 1942 }, { "epoch": 0.22, "grad_norm": 0.9980519538446884, "learning_rate": 0.0009063343138878563, "loss": 4.0684, "step": 1943 }, { "epoch": 0.22, "grad_norm": 1.632460493196114, "learning_rate": 0.0009062260763852099, "loss": 3.6478, "step": 1944 }, { "epoch": 0.22, "grad_norm": 0.7642968508702586, "learning_rate": 0.0009061177828514198, "loss": 3.7716, "step": 1945 }, { "epoch": 0.22, "grad_norm": 0.7378431925689061, "learning_rate": 0.0009060094333014226, "loss": 3.8119, "step": 1946 }, { "epoch": 0.22, "grad_norm": 0.8674244382184695, "learning_rate": 0.0009059010277501634, "loss": 3.8387, "step": 1947 }, { "epoch": 0.22, "grad_norm": 0.7395142721521389, "learning_rate": 0.0009057925662125946, "loss": 3.7808, "step": 1948 }, { "epoch": 0.22, "grad_norm": 0.6009771416139514, "learning_rate": 0.0009056840487036764, "loss": 3.7449, "step": 1949 }, { "epoch": 0.22, "grad_norm": 2.203273373696835, "learning_rate": 0.0009055754752383768, "loss": 3.8459, "step": 1950 }, { "epoch": 0.22, "grad_norm": 0.8843068352665313, "learning_rate": 0.0009054668458316713, "loss": 3.7464, "step": 1951 }, { "epoch": 0.22, "grad_norm": 0.800926332755142, "learning_rate": 0.0009053581604985433, "loss": 3.918, "step": 1952 }, { "epoch": 0.22, "grad_norm": 0.7685167081390272, "learning_rate": 0.000905249419253984, "loss": 3.7961, "step": 1953 }, { "epoch": 0.22, "grad_norm": 0.9226753810285627, "learning_rate": 0.0009051406221129919, "loss": 3.777, "step": 1954 }, { "epoch": 0.22, "grad_norm": 0.8547811644391046, "learning_rate": 0.0009050317690905737, "loss": 3.7778, "step": 1955 }, { "epoch": 0.22, "grad_norm": 1.2227429562391292, "learning_rate": 0.0009049228602017437, "loss": 3.6934, "step": 1956 }, { "epoch": 0.22, "grad_norm": 0.7837950862346422, "learning_rate": 0.0009048138954615235, "loss": 3.8584, "step": 1957 }, { "epoch": 0.22, "grad_norm": 0.7528380751923922, "learning_rate": 0.0009047048748849429, "loss": 4.0075, "step": 1958 }, { "epoch": 0.22, "grad_norm": 0.8334144698450073, "learning_rate": 0.0009045957984870393, "loss": 3.8519, "step": 1959 }, { "epoch": 0.22, "grad_norm": 1.0811523992115364, "learning_rate": 0.0009044866662828575, "loss": 3.6694, "step": 1960 }, { "epoch": 0.22, "grad_norm": 0.8495816965140921, "learning_rate": 0.0009043774782874503, "loss": 3.9996, "step": 1961 }, { "epoch": 0.22, "grad_norm": 0.7749261423145792, "learning_rate": 0.0009042682345158781, "loss": 3.9862, "step": 1962 }, { "epoch": 0.23, "grad_norm": 0.9227475897478702, "learning_rate": 0.0009041589349832091, "loss": 3.9152, "step": 1963 }, { "epoch": 0.23, "grad_norm": 0.8475081027530467, "learning_rate": 0.000904049579704519, "loss": 3.7206, "step": 1964 }, { "epoch": 0.23, "grad_norm": 0.9566101619415467, "learning_rate": 0.0009039401686948912, "loss": 4.044, "step": 1965 }, { "epoch": 0.23, "grad_norm": 0.8892129339208293, "learning_rate": 0.0009038307019694169, "loss": 3.7331, "step": 1966 }, { "epoch": 0.23, "grad_norm": 0.7641767318056124, "learning_rate": 0.000903721179543195, "loss": 3.7638, "step": 1967 }, { "epoch": 0.23, "grad_norm": 1.2407977318541443, "learning_rate": 0.0009036116014313321, "loss": 3.8328, "step": 1968 }, { "epoch": 0.23, "grad_norm": 0.9155852515990093, "learning_rate": 0.0009035019676489422, "loss": 3.7025, "step": 1969 }, { "epoch": 0.23, "grad_norm": 0.7659430900868343, "learning_rate": 0.0009033922782111473, "loss": 3.7129, "step": 1970 }, { "epoch": 0.23, "grad_norm": 0.9472145739973923, "learning_rate": 0.000903282533133077, "loss": 3.9758, "step": 1971 }, { "epoch": 0.23, "grad_norm": 0.7617990564909911, "learning_rate": 0.0009031727324298686, "loss": 3.6597, "step": 1972 }, { "epoch": 0.23, "grad_norm": 0.7335432871610481, "learning_rate": 0.0009030628761166668, "loss": 3.845, "step": 1973 }, { "epoch": 0.23, "grad_norm": 0.8064227453612388, "learning_rate": 0.0009029529642086245, "loss": 3.9377, "step": 1974 }, { "epoch": 0.23, "grad_norm": 0.7066322481379579, "learning_rate": 0.0009028429967209015, "loss": 3.9424, "step": 1975 }, { "epoch": 0.23, "grad_norm": 0.9572870170320034, "learning_rate": 0.0009027329736686663, "loss": 3.8628, "step": 1976 }, { "epoch": 0.23, "grad_norm": 0.8082987502478605, "learning_rate": 0.000902622895067094, "loss": 3.7577, "step": 1977 }, { "epoch": 0.23, "grad_norm": 0.7520252941106546, "learning_rate": 0.000902512760931368, "loss": 3.8899, "step": 1978 }, { "epoch": 0.23, "grad_norm": 0.8268251566722512, "learning_rate": 0.0009024025712766792, "loss": 3.9555, "step": 1979 }, { "epoch": 0.23, "grad_norm": 1.2371682447529315, "learning_rate": 0.0009022923261182264, "loss": 3.9017, "step": 1980 }, { "epoch": 0.23, "grad_norm": 1.525743532097485, "learning_rate": 0.0009021820254712153, "loss": 3.9388, "step": 1981 }, { "epoch": 0.23, "grad_norm": 0.8420085655139568, "learning_rate": 0.0009020716693508602, "loss": 3.889, "step": 1982 }, { "epoch": 0.23, "grad_norm": 0.7732562003767723, "learning_rate": 0.0009019612577723826, "loss": 3.643, "step": 1983 }, { "epoch": 0.23, "grad_norm": 2.3540444843049353, "learning_rate": 0.0009018507907510114, "loss": 3.7167, "step": 1984 }, { "epoch": 0.23, "grad_norm": 0.8681951724928355, "learning_rate": 0.0009017402683019838, "loss": 4.0191, "step": 1985 }, { "epoch": 0.23, "grad_norm": 0.7473612536305064, "learning_rate": 0.0009016296904405439, "loss": 3.6434, "step": 1986 }, { "epoch": 0.23, "grad_norm": 2.8371563551217536, "learning_rate": 0.0009015190571819438, "loss": 3.8937, "step": 1987 }, { "epoch": 0.23, "grad_norm": 0.8281837540136497, "learning_rate": 0.0009014083685414437, "loss": 3.9874, "step": 1988 }, { "epoch": 0.23, "grad_norm": 0.7216865191352599, "learning_rate": 0.0009012976245343106, "loss": 3.8798, "step": 1989 }, { "epoch": 0.23, "grad_norm": 0.7310562616815615, "learning_rate": 0.0009011868251758195, "loss": 4.0126, "step": 1990 }, { "epoch": 0.23, "grad_norm": 0.7863301358877134, "learning_rate": 0.0009010759704812533, "loss": 3.994, "step": 1991 }, { "epoch": 0.23, "grad_norm": 0.9997000058176198, "learning_rate": 0.0009009650604659023, "loss": 3.7062, "step": 1992 }, { "epoch": 0.23, "grad_norm": 1.5939330143181982, "learning_rate": 0.0009008540951450641, "loss": 3.8502, "step": 1993 }, { "epoch": 0.23, "grad_norm": 0.9004997480496979, "learning_rate": 0.0009007430745340446, "loss": 3.938, "step": 1994 }, { "epoch": 0.23, "grad_norm": 0.7469871916243367, "learning_rate": 0.0009006319986481567, "loss": 4.0888, "step": 1995 }, { "epoch": 0.23, "grad_norm": 0.7956750844256011, "learning_rate": 0.0009005208675027215, "loss": 3.7604, "step": 1996 }, { "epoch": 0.23, "grad_norm": 1.9616605125875768, "learning_rate": 0.000900409681113067, "loss": 3.9209, "step": 1997 }, { "epoch": 0.23, "grad_norm": 0.9479515174477374, "learning_rate": 0.0009002984394945298, "loss": 3.9356, "step": 1998 }, { "epoch": 0.23, "grad_norm": 0.7796120932780577, "learning_rate": 0.0009001871426624528, "loss": 3.9046, "step": 1999 }, { "epoch": 0.23, "grad_norm": 0.7378291584580816, "learning_rate": 0.0009000757906321882, "loss": 3.6397, "step": 2000 } ], "logging_steps": 1.0, "max_steps": 8721, "num_input_tokens_seen": 0, "num_train_epochs": 1, "save_steps": 1000, "total_flos": 6542275092480.0, "train_batch_size": 32, "trial_name": null, "trial_params": null }