MoTCoder-7B-v1.5 / trainer_state.json
JulietLJY
commit
a6de97b
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.08187912594033059,
"eval_steps": 500,
"global_step": 400,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.00020469781485082646,
"grad_norm": 1.694807571622714,
"learning_rate": 1.3605442176870747e-08,
"loss": 0.6346,
"step": 1
},
{
"epoch": 0.0004093956297016529,
"grad_norm": 1.7333604659657242,
"learning_rate": 2.7210884353741493e-08,
"loss": 0.5956,
"step": 2
},
{
"epoch": 0.0006140934445524794,
"grad_norm": 1.675170156463719,
"learning_rate": 4.081632653061224e-08,
"loss": 0.6494,
"step": 3
},
{
"epoch": 0.0008187912594033058,
"grad_norm": 1.4535412459526658,
"learning_rate": 5.442176870748299e-08,
"loss": 0.6097,
"step": 4
},
{
"epoch": 0.0010234890742541324,
"grad_norm": 1.6204240919715567,
"learning_rate": 6.802721088435375e-08,
"loss": 0.6259,
"step": 5
},
{
"epoch": 0.0012281868891049587,
"grad_norm": 1.6382909584778356,
"learning_rate": 8.163265306122448e-08,
"loss": 0.6049,
"step": 6
},
{
"epoch": 0.0014328847039557853,
"grad_norm": 1.6591863389965569,
"learning_rate": 9.523809523809523e-08,
"loss": 0.6093,
"step": 7
},
{
"epoch": 0.0016375825188066117,
"grad_norm": 1.529188807208944,
"learning_rate": 1.0884353741496597e-07,
"loss": 0.625,
"step": 8
},
{
"epoch": 0.0018422803336574382,
"grad_norm": 1.7414059653199376,
"learning_rate": 1.2244897959183673e-07,
"loss": 0.6148,
"step": 9
},
{
"epoch": 0.002046978148508265,
"grad_norm": 1.6622320550472127,
"learning_rate": 1.360544217687075e-07,
"loss": 0.5797,
"step": 10
},
{
"epoch": 0.002251675963359091,
"grad_norm": 1.6508189144245708,
"learning_rate": 1.4965986394557823e-07,
"loss": 0.6484,
"step": 11
},
{
"epoch": 0.0024563737782099175,
"grad_norm": 1.7202133207821506,
"learning_rate": 1.6326530612244896e-07,
"loss": 0.6216,
"step": 12
},
{
"epoch": 0.0026610715930607443,
"grad_norm": 1.5235060143030161,
"learning_rate": 1.7687074829931972e-07,
"loss": 0.6452,
"step": 13
},
{
"epoch": 0.0028657694079115706,
"grad_norm": 1.5363560852946705,
"learning_rate": 1.9047619047619045e-07,
"loss": 0.6405,
"step": 14
},
{
"epoch": 0.003070467222762397,
"grad_norm": 1.6730999257251689,
"learning_rate": 2.0408163265306121e-07,
"loss": 0.6497,
"step": 15
},
{
"epoch": 0.0032751650376132233,
"grad_norm": 1.5758327138243107,
"learning_rate": 2.1768707482993195e-07,
"loss": 0.6336,
"step": 16
},
{
"epoch": 0.00347986285246405,
"grad_norm": 1.5492535238923828,
"learning_rate": 2.312925170068027e-07,
"loss": 0.6037,
"step": 17
},
{
"epoch": 0.0036845606673148765,
"grad_norm": 1.6696926699572276,
"learning_rate": 2.4489795918367347e-07,
"loss": 0.6139,
"step": 18
},
{
"epoch": 0.003889258482165703,
"grad_norm": 1.6544769292475,
"learning_rate": 2.5850340136054423e-07,
"loss": 0.6315,
"step": 19
},
{
"epoch": 0.00409395629701653,
"grad_norm": 1.6860896587110352,
"learning_rate": 2.72108843537415e-07,
"loss": 0.6324,
"step": 20
},
{
"epoch": 0.0042986541118673555,
"grad_norm": 1.5451565683271684,
"learning_rate": 2.857142857142857e-07,
"loss": 0.6227,
"step": 21
},
{
"epoch": 0.004503351926718182,
"grad_norm": 1.5658617650258626,
"learning_rate": 2.9931972789115645e-07,
"loss": 0.5873,
"step": 22
},
{
"epoch": 0.004708049741569009,
"grad_norm": 1.6014268573351107,
"learning_rate": 3.129251700680272e-07,
"loss": 0.6066,
"step": 23
},
{
"epoch": 0.004912747556419835,
"grad_norm": 1.560124972985785,
"learning_rate": 3.265306122448979e-07,
"loss": 0.6062,
"step": 24
},
{
"epoch": 0.005117445371270662,
"grad_norm": 1.6662191020723245,
"learning_rate": 3.401360544217687e-07,
"loss": 0.5968,
"step": 25
},
{
"epoch": 0.0053221431861214885,
"grad_norm": 1.7137595321931511,
"learning_rate": 3.5374149659863944e-07,
"loss": 0.6325,
"step": 26
},
{
"epoch": 0.0055268410009723145,
"grad_norm": 1.570188696390546,
"learning_rate": 3.673469387755102e-07,
"loss": 0.6375,
"step": 27
},
{
"epoch": 0.005731538815823141,
"grad_norm": 1.5585726347237283,
"learning_rate": 3.809523809523809e-07,
"loss": 0.6216,
"step": 28
},
{
"epoch": 0.005936236630673967,
"grad_norm": 1.4585947364133294,
"learning_rate": 3.9455782312925167e-07,
"loss": 0.5975,
"step": 29
},
{
"epoch": 0.006140934445524794,
"grad_norm": 1.5313204391085877,
"learning_rate": 4.0816326530612243e-07,
"loss": 0.6461,
"step": 30
},
{
"epoch": 0.006345632260375621,
"grad_norm": 1.4690318982818216,
"learning_rate": 4.217687074829932e-07,
"loss": 0.6054,
"step": 31
},
{
"epoch": 0.006550330075226447,
"grad_norm": 1.5256726957060316,
"learning_rate": 4.353741496598639e-07,
"loss": 0.6507,
"step": 32
},
{
"epoch": 0.006755027890077273,
"grad_norm": 1.541131533646238,
"learning_rate": 4.4897959183673465e-07,
"loss": 0.6185,
"step": 33
},
{
"epoch": 0.0069597257049281,
"grad_norm": 1.5233070330699345,
"learning_rate": 4.625850340136054e-07,
"loss": 0.6541,
"step": 34
},
{
"epoch": 0.007164423519778926,
"grad_norm": 1.4300240195672376,
"learning_rate": 4.761904761904761e-07,
"loss": 0.6156,
"step": 35
},
{
"epoch": 0.007369121334629753,
"grad_norm": 1.3386118655838508,
"learning_rate": 4.897959183673469e-07,
"loss": 0.5943,
"step": 36
},
{
"epoch": 0.00757381914948058,
"grad_norm": 1.3270928639031936,
"learning_rate": 5.034013605442177e-07,
"loss": 0.6139,
"step": 37
},
{
"epoch": 0.007778516964331406,
"grad_norm": 1.3116129547815811,
"learning_rate": 5.170068027210885e-07,
"loss": 0.6119,
"step": 38
},
{
"epoch": 0.007983214779182232,
"grad_norm": 1.2451803048665653,
"learning_rate": 5.306122448979592e-07,
"loss": 0.5463,
"step": 39
},
{
"epoch": 0.00818791259403306,
"grad_norm": 1.2351915311334578,
"learning_rate": 5.4421768707483e-07,
"loss": 0.5762,
"step": 40
},
{
"epoch": 0.008392610408883885,
"grad_norm": 1.3425104949855924,
"learning_rate": 5.578231292517006e-07,
"loss": 0.5866,
"step": 41
},
{
"epoch": 0.008597308223734711,
"grad_norm": 1.3464358053560985,
"learning_rate": 5.714285714285714e-07,
"loss": 0.6134,
"step": 42
},
{
"epoch": 0.008802006038585539,
"grad_norm": 1.3225968492677225,
"learning_rate": 5.850340136054421e-07,
"loss": 0.6034,
"step": 43
},
{
"epoch": 0.009006703853436365,
"grad_norm": 1.2483346937333237,
"learning_rate": 5.986394557823129e-07,
"loss": 0.5495,
"step": 44
},
{
"epoch": 0.00921140166828719,
"grad_norm": 1.1648688787665145,
"learning_rate": 6.122448979591837e-07,
"loss": 0.616,
"step": 45
},
{
"epoch": 0.009416099483138018,
"grad_norm": 1.2616996144445687,
"learning_rate": 6.258503401360544e-07,
"loss": 0.57,
"step": 46
},
{
"epoch": 0.009620797297988844,
"grad_norm": 1.3108653064941627,
"learning_rate": 6.394557823129252e-07,
"loss": 0.5814,
"step": 47
},
{
"epoch": 0.00982549511283967,
"grad_norm": 1.1754918916726766,
"learning_rate": 6.530612244897958e-07,
"loss": 0.5754,
"step": 48
},
{
"epoch": 0.010030192927690498,
"grad_norm": 1.272022559229399,
"learning_rate": 6.666666666666666e-07,
"loss": 0.5944,
"step": 49
},
{
"epoch": 0.010234890742541324,
"grad_norm": 1.13107848406085,
"learning_rate": 6.802721088435374e-07,
"loss": 0.5945,
"step": 50
},
{
"epoch": 0.01043958855739215,
"grad_norm": 1.1273813534766033,
"learning_rate": 6.938775510204081e-07,
"loss": 0.5538,
"step": 51
},
{
"epoch": 0.010644286372242977,
"grad_norm": 1.1293664677810216,
"learning_rate": 7.074829931972789e-07,
"loss": 0.5854,
"step": 52
},
{
"epoch": 0.010848984187093803,
"grad_norm": 0.9728651370750258,
"learning_rate": 7.210884353741496e-07,
"loss": 0.5108,
"step": 53
},
{
"epoch": 0.011053682001944629,
"grad_norm": 1.0432420839745669,
"learning_rate": 7.346938775510204e-07,
"loss": 0.5346,
"step": 54
},
{
"epoch": 0.011258379816795457,
"grad_norm": 1.0023551080535893,
"learning_rate": 7.482993197278912e-07,
"loss": 0.5799,
"step": 55
},
{
"epoch": 0.011463077631646282,
"grad_norm": 0.9638908320867696,
"learning_rate": 7.619047619047618e-07,
"loss": 0.555,
"step": 56
},
{
"epoch": 0.011667775446497108,
"grad_norm": 1.0398584356633989,
"learning_rate": 7.755102040816326e-07,
"loss": 0.5147,
"step": 57
},
{
"epoch": 0.011872473261347934,
"grad_norm": 0.9629896909635629,
"learning_rate": 7.891156462585033e-07,
"loss": 0.5413,
"step": 58
},
{
"epoch": 0.012077171076198762,
"grad_norm": 0.9770292637339174,
"learning_rate": 8.027210884353741e-07,
"loss": 0.5205,
"step": 59
},
{
"epoch": 0.012281868891049588,
"grad_norm": 0.971945782703798,
"learning_rate": 8.163265306122449e-07,
"loss": 0.5422,
"step": 60
},
{
"epoch": 0.012486566705900414,
"grad_norm": 0.950398975311517,
"learning_rate": 8.299319727891156e-07,
"loss": 0.5071,
"step": 61
},
{
"epoch": 0.012691264520751241,
"grad_norm": 0.9049285150490526,
"learning_rate": 8.435374149659864e-07,
"loss": 0.4964,
"step": 62
},
{
"epoch": 0.012895962335602067,
"grad_norm": 0.8793095995125478,
"learning_rate": 8.57142857142857e-07,
"loss": 0.5331,
"step": 63
},
{
"epoch": 0.013100660150452893,
"grad_norm": 0.8515461613654705,
"learning_rate": 8.707482993197278e-07,
"loss": 0.5283,
"step": 64
},
{
"epoch": 0.013305357965303721,
"grad_norm": 0.867859420385022,
"learning_rate": 8.843537414965985e-07,
"loss": 0.5164,
"step": 65
},
{
"epoch": 0.013510055780154547,
"grad_norm": 0.8786706131313361,
"learning_rate": 8.979591836734693e-07,
"loss": 0.5645,
"step": 66
},
{
"epoch": 0.013714753595005373,
"grad_norm": 0.8579092596142676,
"learning_rate": 9.115646258503401e-07,
"loss": 0.5399,
"step": 67
},
{
"epoch": 0.0139194514098562,
"grad_norm": 0.8773908463960428,
"learning_rate": 9.251700680272108e-07,
"loss": 0.5229,
"step": 68
},
{
"epoch": 0.014124149224707026,
"grad_norm": 0.8528366708567172,
"learning_rate": 9.387755102040816e-07,
"loss": 0.5349,
"step": 69
},
{
"epoch": 0.014328847039557852,
"grad_norm": 0.9184139371914097,
"learning_rate": 9.523809523809522e-07,
"loss": 0.5331,
"step": 70
},
{
"epoch": 0.01453354485440868,
"grad_norm": 0.8507461371837629,
"learning_rate": 9.65986394557823e-07,
"loss": 0.4801,
"step": 71
},
{
"epoch": 0.014738242669259506,
"grad_norm": 0.8374936253263676,
"learning_rate": 9.795918367346939e-07,
"loss": 0.4931,
"step": 72
},
{
"epoch": 0.014942940484110332,
"grad_norm": 0.8174848059151317,
"learning_rate": 9.931972789115645e-07,
"loss": 0.5248,
"step": 73
},
{
"epoch": 0.01514763829896116,
"grad_norm": 0.8174077531772923,
"learning_rate": 1.0068027210884354e-06,
"loss": 0.5036,
"step": 74
},
{
"epoch": 0.015352336113811985,
"grad_norm": 0.7262562022534738,
"learning_rate": 1.020408163265306e-06,
"loss": 0.5232,
"step": 75
},
{
"epoch": 0.015557033928662811,
"grad_norm": 0.7855250505927771,
"learning_rate": 1.034013605442177e-06,
"loss": 0.5098,
"step": 76
},
{
"epoch": 0.015761731743513637,
"grad_norm": 0.8278680336215173,
"learning_rate": 1.0476190476190476e-06,
"loss": 0.4829,
"step": 77
},
{
"epoch": 0.015966429558364463,
"grad_norm": 0.797196328457245,
"learning_rate": 1.0612244897959184e-06,
"loss": 0.5037,
"step": 78
},
{
"epoch": 0.016171127373215292,
"grad_norm": 0.7507210642711485,
"learning_rate": 1.074829931972789e-06,
"loss": 0.4944,
"step": 79
},
{
"epoch": 0.01637582518806612,
"grad_norm": 0.826047544790976,
"learning_rate": 1.08843537414966e-06,
"loss": 0.5179,
"step": 80
},
{
"epoch": 0.016580523002916944,
"grad_norm": 0.7746315656318813,
"learning_rate": 1.1020408163265304e-06,
"loss": 0.5223,
"step": 81
},
{
"epoch": 0.01678522081776777,
"grad_norm": 0.778762710130468,
"learning_rate": 1.1156462585034013e-06,
"loss": 0.4845,
"step": 82
},
{
"epoch": 0.016989918632618596,
"grad_norm": 0.749908717861716,
"learning_rate": 1.129251700680272e-06,
"loss": 0.5175,
"step": 83
},
{
"epoch": 0.017194616447469422,
"grad_norm": 0.7582554704845739,
"learning_rate": 1.1428571428571428e-06,
"loss": 0.4978,
"step": 84
},
{
"epoch": 0.01739931426232025,
"grad_norm": 0.7595367961287336,
"learning_rate": 1.1564625850340134e-06,
"loss": 0.4966,
"step": 85
},
{
"epoch": 0.017604012077171077,
"grad_norm": 0.7488555001974914,
"learning_rate": 1.1700680272108843e-06,
"loss": 0.5025,
"step": 86
},
{
"epoch": 0.017808709892021903,
"grad_norm": 0.8307772703305798,
"learning_rate": 1.183673469387755e-06,
"loss": 0.5144,
"step": 87
},
{
"epoch": 0.01801340770687273,
"grad_norm": 0.7317615547098743,
"learning_rate": 1.1972789115646258e-06,
"loss": 0.4817,
"step": 88
},
{
"epoch": 0.018218105521723555,
"grad_norm": 0.8210594860542216,
"learning_rate": 1.2108843537414965e-06,
"loss": 0.5058,
"step": 89
},
{
"epoch": 0.01842280333657438,
"grad_norm": 0.7250535412206353,
"learning_rate": 1.2244897959183673e-06,
"loss": 0.4796,
"step": 90
},
{
"epoch": 0.018627501151425207,
"grad_norm": 0.7476633557284366,
"learning_rate": 1.238095238095238e-06,
"loss": 0.4732,
"step": 91
},
{
"epoch": 0.018832198966276036,
"grad_norm": 0.7245302420505394,
"learning_rate": 1.2517006802721089e-06,
"loss": 0.5085,
"step": 92
},
{
"epoch": 0.019036896781126862,
"grad_norm": 0.7287781044325405,
"learning_rate": 1.2653061224489795e-06,
"loss": 0.4837,
"step": 93
},
{
"epoch": 0.019241594595977688,
"grad_norm": 0.7461257075758424,
"learning_rate": 1.2789115646258504e-06,
"loss": 0.4847,
"step": 94
},
{
"epoch": 0.019446292410828514,
"grad_norm": 0.7500567577642135,
"learning_rate": 1.292517006802721e-06,
"loss": 0.5023,
"step": 95
},
{
"epoch": 0.01965099022567934,
"grad_norm": 0.7516926737451503,
"learning_rate": 1.3061224489795917e-06,
"loss": 0.4944,
"step": 96
},
{
"epoch": 0.019855688040530166,
"grad_norm": 0.8160475040600308,
"learning_rate": 1.3197278911564623e-06,
"loss": 0.4707,
"step": 97
},
{
"epoch": 0.020060385855380995,
"grad_norm": 0.7313987935291313,
"learning_rate": 1.3333333333333332e-06,
"loss": 0.4631,
"step": 98
},
{
"epoch": 0.02026508367023182,
"grad_norm": 0.7272827048713341,
"learning_rate": 1.3469387755102039e-06,
"loss": 0.4912,
"step": 99
},
{
"epoch": 0.020469781485082647,
"grad_norm": 0.7148392974765637,
"learning_rate": 1.3605442176870747e-06,
"loss": 0.4686,
"step": 100
},
{
"epoch": 0.020674479299933473,
"grad_norm": 0.8073254642999934,
"learning_rate": 1.3741496598639456e-06,
"loss": 0.4889,
"step": 101
},
{
"epoch": 0.0208791771147843,
"grad_norm": 0.7585784341693678,
"learning_rate": 1.3877551020408162e-06,
"loss": 0.4661,
"step": 102
},
{
"epoch": 0.021083874929635125,
"grad_norm": 0.750059071249337,
"learning_rate": 1.4013605442176871e-06,
"loss": 0.4856,
"step": 103
},
{
"epoch": 0.021288572744485954,
"grad_norm": 0.7391246566572075,
"learning_rate": 1.4149659863945578e-06,
"loss": 0.4835,
"step": 104
},
{
"epoch": 0.02149327055933678,
"grad_norm": 0.7961401475792825,
"learning_rate": 1.4285714285714286e-06,
"loss": 0.5078,
"step": 105
},
{
"epoch": 0.021697968374187606,
"grad_norm": 0.736443177126423,
"learning_rate": 1.4421768707482993e-06,
"loss": 0.4754,
"step": 106
},
{
"epoch": 0.021902666189038432,
"grad_norm": 0.7433000385873849,
"learning_rate": 1.4557823129251701e-06,
"loss": 0.5051,
"step": 107
},
{
"epoch": 0.022107364003889258,
"grad_norm": 0.742852894387874,
"learning_rate": 1.4693877551020408e-06,
"loss": 0.4815,
"step": 108
},
{
"epoch": 0.022312061818740084,
"grad_norm": 0.7321778169129644,
"learning_rate": 1.4829931972789117e-06,
"loss": 0.4883,
"step": 109
},
{
"epoch": 0.022516759633590913,
"grad_norm": 0.7374200652655346,
"learning_rate": 1.4965986394557823e-06,
"loss": 0.5235,
"step": 110
},
{
"epoch": 0.02272145744844174,
"grad_norm": 0.772981855244519,
"learning_rate": 1.510204081632653e-06,
"loss": 0.5008,
"step": 111
},
{
"epoch": 0.022926155263292565,
"grad_norm": 0.7342674795579016,
"learning_rate": 1.5238095238095236e-06,
"loss": 0.4671,
"step": 112
},
{
"epoch": 0.02313085307814339,
"grad_norm": 0.7795902315585469,
"learning_rate": 1.5374149659863945e-06,
"loss": 0.507,
"step": 113
},
{
"epoch": 0.023335550892994217,
"grad_norm": 0.7765099211131105,
"learning_rate": 1.5510204081632651e-06,
"loss": 0.5251,
"step": 114
},
{
"epoch": 0.023540248707845043,
"grad_norm": 0.7386929957340117,
"learning_rate": 1.564625850340136e-06,
"loss": 0.4578,
"step": 115
},
{
"epoch": 0.02374494652269587,
"grad_norm": 0.7248512159636582,
"learning_rate": 1.5782312925170067e-06,
"loss": 0.48,
"step": 116
},
{
"epoch": 0.023949644337546698,
"grad_norm": 0.7545806311647761,
"learning_rate": 1.5918367346938775e-06,
"loss": 0.4884,
"step": 117
},
{
"epoch": 0.024154342152397524,
"grad_norm": 0.7683287783699582,
"learning_rate": 1.6054421768707482e-06,
"loss": 0.4834,
"step": 118
},
{
"epoch": 0.02435903996724835,
"grad_norm": 0.775426549385026,
"learning_rate": 1.619047619047619e-06,
"loss": 0.4822,
"step": 119
},
{
"epoch": 0.024563737782099176,
"grad_norm": 0.7149469826873975,
"learning_rate": 1.6326530612244897e-06,
"loss": 0.4721,
"step": 120
},
{
"epoch": 0.02476843559695,
"grad_norm": 0.7985393152422335,
"learning_rate": 1.6462585034013606e-06,
"loss": 0.5166,
"step": 121
},
{
"epoch": 0.024973133411800828,
"grad_norm": 0.7885248764092557,
"learning_rate": 1.6598639455782312e-06,
"loss": 0.5091,
"step": 122
},
{
"epoch": 0.025177831226651657,
"grad_norm": 0.7531097232781883,
"learning_rate": 1.673469387755102e-06,
"loss": 0.5133,
"step": 123
},
{
"epoch": 0.025382529041502483,
"grad_norm": 0.7097213698861701,
"learning_rate": 1.6870748299319727e-06,
"loss": 0.5001,
"step": 124
},
{
"epoch": 0.02558722685635331,
"grad_norm": 0.6936318152279768,
"learning_rate": 1.7006802721088434e-06,
"loss": 0.4611,
"step": 125
},
{
"epoch": 0.025791924671204135,
"grad_norm": 0.7442480820206602,
"learning_rate": 1.714285714285714e-06,
"loss": 0.5107,
"step": 126
},
{
"epoch": 0.02599662248605496,
"grad_norm": 0.7310368101162509,
"learning_rate": 1.727891156462585e-06,
"loss": 0.4568,
"step": 127
},
{
"epoch": 0.026201320300905787,
"grad_norm": 0.7723563494615043,
"learning_rate": 1.7414965986394556e-06,
"loss": 0.4976,
"step": 128
},
{
"epoch": 0.026406018115756616,
"grad_norm": 0.7688284872373655,
"learning_rate": 1.7551020408163264e-06,
"loss": 0.4876,
"step": 129
},
{
"epoch": 0.026610715930607442,
"grad_norm": 0.7663908612309938,
"learning_rate": 1.768707482993197e-06,
"loss": 0.5089,
"step": 130
},
{
"epoch": 0.026815413745458268,
"grad_norm": 0.6966352320510637,
"learning_rate": 1.782312925170068e-06,
"loss": 0.4537,
"step": 131
},
{
"epoch": 0.027020111560309094,
"grad_norm": 0.6933747179682217,
"learning_rate": 1.7959183673469386e-06,
"loss": 0.4431,
"step": 132
},
{
"epoch": 0.02722480937515992,
"grad_norm": 0.7620187715357651,
"learning_rate": 1.8095238095238095e-06,
"loss": 0.5002,
"step": 133
},
{
"epoch": 0.027429507190010746,
"grad_norm": 0.694229773433825,
"learning_rate": 1.8231292517006801e-06,
"loss": 0.4602,
"step": 134
},
{
"epoch": 0.02763420500486157,
"grad_norm": 0.7152627743695282,
"learning_rate": 1.836734693877551e-06,
"loss": 0.46,
"step": 135
},
{
"epoch": 0.0278389028197124,
"grad_norm": 0.7175467946942147,
"learning_rate": 1.8503401360544217e-06,
"loss": 0.4687,
"step": 136
},
{
"epoch": 0.028043600634563227,
"grad_norm": 0.7852808070086453,
"learning_rate": 1.8639455782312925e-06,
"loss": 0.5045,
"step": 137
},
{
"epoch": 0.028248298449414053,
"grad_norm": 0.7212069697520485,
"learning_rate": 1.8775510204081632e-06,
"loss": 0.458,
"step": 138
},
{
"epoch": 0.02845299626426488,
"grad_norm": 0.6901869666091209,
"learning_rate": 1.891156462585034e-06,
"loss": 0.4873,
"step": 139
},
{
"epoch": 0.028657694079115704,
"grad_norm": 0.7038286804084832,
"learning_rate": 1.9047619047619045e-06,
"loss": 0.4575,
"step": 140
},
{
"epoch": 0.02886239189396653,
"grad_norm": 0.7736514303776025,
"learning_rate": 1.918367346938775e-06,
"loss": 0.4989,
"step": 141
},
{
"epoch": 0.02906708970881736,
"grad_norm": 0.7546506061753928,
"learning_rate": 1.931972789115646e-06,
"loss": 0.6157,
"step": 142
},
{
"epoch": 0.029271787523668186,
"grad_norm": 0.7502287441885653,
"learning_rate": 1.945578231292517e-06,
"loss": 0.4744,
"step": 143
},
{
"epoch": 0.02947648533851901,
"grad_norm": 0.7544747394474504,
"learning_rate": 1.9591836734693877e-06,
"loss": 0.4652,
"step": 144
},
{
"epoch": 0.029681183153369838,
"grad_norm": 0.7400203790224271,
"learning_rate": 1.972789115646258e-06,
"loss": 0.4749,
"step": 145
},
{
"epoch": 0.029885880968220663,
"grad_norm": 0.7756604473816919,
"learning_rate": 1.986394557823129e-06,
"loss": 0.4879,
"step": 146
},
{
"epoch": 0.03009057878307149,
"grad_norm": 0.7364241267157726,
"learning_rate": 2e-06,
"loss": 0.4641,
"step": 147
},
{
"epoch": 0.03029527659792232,
"grad_norm": 0.7509999341558731,
"learning_rate": 1.9999997801737146e-06,
"loss": 0.4716,
"step": 148
},
{
"epoch": 0.030499974412773145,
"grad_norm": 0.7817167258395246,
"learning_rate": 1.9999991206949555e-06,
"loss": 0.478,
"step": 149
},
{
"epoch": 0.03070467222762397,
"grad_norm": 0.6975681554994494,
"learning_rate": 1.9999980215640124e-06,
"loss": 0.4698,
"step": 150
},
{
"epoch": 0.030909370042474796,
"grad_norm": 0.6671508819481775,
"learning_rate": 1.9999964827813685e-06,
"loss": 0.4502,
"step": 151
},
{
"epoch": 0.031114067857325622,
"grad_norm": 0.7588040820967348,
"learning_rate": 1.9999945043477006e-06,
"loss": 0.4932,
"step": 152
},
{
"epoch": 0.03131876567217645,
"grad_norm": 0.7918237654214221,
"learning_rate": 1.9999920862638785e-06,
"loss": 0.4676,
"step": 153
},
{
"epoch": 0.031523463487027274,
"grad_norm": 0.7313481708497578,
"learning_rate": 1.999989228530965e-06,
"loss": 0.458,
"step": 154
},
{
"epoch": 0.031728161301878104,
"grad_norm": 0.7363675329922608,
"learning_rate": 1.9999859311502164e-06,
"loss": 0.4794,
"step": 155
},
{
"epoch": 0.031932859116728926,
"grad_norm": 0.7511639384926047,
"learning_rate": 1.999982194123083e-06,
"loss": 0.4811,
"step": 156
},
{
"epoch": 0.032137556931579755,
"grad_norm": 0.6977601768137399,
"learning_rate": 1.9999780174512074e-06,
"loss": 0.5046,
"step": 157
},
{
"epoch": 0.032342254746430585,
"grad_norm": 0.6871160044462953,
"learning_rate": 1.999973401136426e-06,
"loss": 0.4473,
"step": 158
},
{
"epoch": 0.03254695256128141,
"grad_norm": 0.735276225575691,
"learning_rate": 1.999968345180768e-06,
"loss": 0.4769,
"step": 159
},
{
"epoch": 0.03275165037613224,
"grad_norm": 0.6707772270842888,
"learning_rate": 1.999962849586457e-06,
"loss": 0.4395,
"step": 160
},
{
"epoch": 0.03295634819098306,
"grad_norm": 0.7714337406838349,
"learning_rate": 1.9999569143559085e-06,
"loss": 0.4658,
"step": 161
},
{
"epoch": 0.03316104600583389,
"grad_norm": 0.7896606525524605,
"learning_rate": 1.999950539491732e-06,
"loss": 0.4645,
"step": 162
},
{
"epoch": 0.03336574382068471,
"grad_norm": 0.7359544675011239,
"learning_rate": 1.999943724996731e-06,
"loss": 0.4671,
"step": 163
},
{
"epoch": 0.03357044163553554,
"grad_norm": 0.6942155729771998,
"learning_rate": 1.9999364708739005e-06,
"loss": 0.4567,
"step": 164
},
{
"epoch": 0.03377513945038637,
"grad_norm": 0.7156881239994389,
"learning_rate": 1.9999287771264305e-06,
"loss": 0.4871,
"step": 165
},
{
"epoch": 0.03397983726523719,
"grad_norm": 0.7027330885247588,
"learning_rate": 1.999920643757703e-06,
"loss": 0.4371,
"step": 166
},
{
"epoch": 0.03418453508008802,
"grad_norm": 0.7022489082551948,
"learning_rate": 1.9999120707712943e-06,
"loss": 0.45,
"step": 167
},
{
"epoch": 0.034389232894938844,
"grad_norm": 0.7600832151805308,
"learning_rate": 1.9999030581709736e-06,
"loss": 0.4812,
"step": 168
},
{
"epoch": 0.03459393070978967,
"grad_norm": 0.7757365362216246,
"learning_rate": 1.9998936059607028e-06,
"loss": 0.4951,
"step": 169
},
{
"epoch": 0.0347986285246405,
"grad_norm": 0.7180624522326351,
"learning_rate": 1.9998837141446378e-06,
"loss": 0.4733,
"step": 170
},
{
"epoch": 0.035003326339491325,
"grad_norm": 0.8012677566963108,
"learning_rate": 1.9998733827271277e-06,
"loss": 0.4854,
"step": 171
},
{
"epoch": 0.035208024154342155,
"grad_norm": 0.715314370525801,
"learning_rate": 1.999862611712715e-06,
"loss": 0.4777,
"step": 172
},
{
"epoch": 0.03541272196919298,
"grad_norm": 0.7191382757417352,
"learning_rate": 1.9998514011061344e-06,
"loss": 0.4637,
"step": 173
},
{
"epoch": 0.035617419784043806,
"grad_norm": 0.7116753984408628,
"learning_rate": 1.9998397509123154e-06,
"loss": 0.4536,
"step": 174
},
{
"epoch": 0.03582211759889463,
"grad_norm": 0.7598078550909712,
"learning_rate": 1.9998276611363797e-06,
"loss": 0.4908,
"step": 175
},
{
"epoch": 0.03602681541374546,
"grad_norm": 0.6563039003765047,
"learning_rate": 1.999815131783643e-06,
"loss": 0.449,
"step": 176
},
{
"epoch": 0.03623151322859629,
"grad_norm": 0.719561949641505,
"learning_rate": 1.999802162859613e-06,
"loss": 0.4741,
"step": 177
},
{
"epoch": 0.03643621104344711,
"grad_norm": 0.7109902438469043,
"learning_rate": 1.999788754369993e-06,
"loss": 0.4701,
"step": 178
},
{
"epoch": 0.03664090885829794,
"grad_norm": 0.7065392449298251,
"learning_rate": 1.9997749063206762e-06,
"loss": 0.4714,
"step": 179
},
{
"epoch": 0.03684560667314876,
"grad_norm": 0.7696360740535267,
"learning_rate": 1.9997606187177524e-06,
"loss": 0.4875,
"step": 180
},
{
"epoch": 0.03705030448799959,
"grad_norm": 0.7305783700088637,
"learning_rate": 1.999745891567502e-06,
"loss": 0.4606,
"step": 181
},
{
"epoch": 0.037255002302850414,
"grad_norm": 0.7270975727384246,
"learning_rate": 1.9997307248764014e-06,
"loss": 0.4198,
"step": 182
},
{
"epoch": 0.03745970011770124,
"grad_norm": 0.7569285986642791,
"learning_rate": 1.9997151186511173e-06,
"loss": 0.4354,
"step": 183
},
{
"epoch": 0.03766439793255207,
"grad_norm": 0.7219339206651326,
"learning_rate": 1.9996990728985115e-06,
"loss": 0.4378,
"step": 184
},
{
"epoch": 0.037869095747402895,
"grad_norm": 0.7690405011750759,
"learning_rate": 1.9996825876256386e-06,
"loss": 0.4791,
"step": 185
},
{
"epoch": 0.038073793562253724,
"grad_norm": 0.7552362068529521,
"learning_rate": 1.9996656628397466e-06,
"loss": 0.4672,
"step": 186
},
{
"epoch": 0.03827849137710455,
"grad_norm": 0.7341580278198813,
"learning_rate": 1.999648298548276e-06,
"loss": 0.4677,
"step": 187
},
{
"epoch": 0.038483189191955376,
"grad_norm": 0.7067121453226938,
"learning_rate": 1.9996304947588612e-06,
"loss": 0.4727,
"step": 188
},
{
"epoch": 0.038687887006806206,
"grad_norm": 0.7237165727925357,
"learning_rate": 1.99961225147933e-06,
"loss": 0.4446,
"step": 189
},
{
"epoch": 0.03889258482165703,
"grad_norm": 0.7069486805133093,
"learning_rate": 1.999593568717703e-06,
"loss": 0.4599,
"step": 190
},
{
"epoch": 0.03909728263650786,
"grad_norm": 0.897481774030034,
"learning_rate": 1.9995744464821936e-06,
"loss": 0.5129,
"step": 191
},
{
"epoch": 0.03930198045135868,
"grad_norm": 0.7488636278687589,
"learning_rate": 1.9995548847812097e-06,
"loss": 0.5184,
"step": 192
},
{
"epoch": 0.03950667826620951,
"grad_norm": 0.7194583225483666,
"learning_rate": 1.9995348836233515e-06,
"loss": 0.4915,
"step": 193
},
{
"epoch": 0.03971137608106033,
"grad_norm": 0.7384378767131218,
"learning_rate": 1.999514443017412e-06,
"loss": 0.4487,
"step": 194
},
{
"epoch": 0.03991607389591116,
"grad_norm": 0.7577332351147034,
"learning_rate": 1.9994935629723784e-06,
"loss": 0.4842,
"step": 195
},
{
"epoch": 0.04012077171076199,
"grad_norm": 0.7207002083905842,
"learning_rate": 1.999472243497431e-06,
"loss": 0.4698,
"step": 196
},
{
"epoch": 0.04032546952561281,
"grad_norm": 0.8010492120535461,
"learning_rate": 1.9994504846019423e-06,
"loss": 0.4561,
"step": 197
},
{
"epoch": 0.04053016734046364,
"grad_norm": 0.7453701461541147,
"learning_rate": 1.9994282862954787e-06,
"loss": 0.4806,
"step": 198
},
{
"epoch": 0.040734865155314465,
"grad_norm": 0.7255193966716207,
"learning_rate": 1.9994056485878002e-06,
"loss": 0.4511,
"step": 199
},
{
"epoch": 0.040939562970165294,
"grad_norm": 0.7957588909816856,
"learning_rate": 1.9993825714888594e-06,
"loss": 0.4775,
"step": 200
},
{
"epoch": 0.04114426078501612,
"grad_norm": 0.7304580504624026,
"learning_rate": 1.999359055008802e-06,
"loss": 0.4476,
"step": 201
},
{
"epoch": 0.041348958599866946,
"grad_norm": 0.8052138479705295,
"learning_rate": 1.999335099157967e-06,
"loss": 0.4621,
"step": 202
},
{
"epoch": 0.041553656414717775,
"grad_norm": 0.7344879094324241,
"learning_rate": 1.999310703946887e-06,
"loss": 0.448,
"step": 203
},
{
"epoch": 0.0417583542295686,
"grad_norm": 0.804269507197302,
"learning_rate": 1.999285869386287e-06,
"loss": 0.471,
"step": 204
},
{
"epoch": 0.04196305204441943,
"grad_norm": 0.7284627322104599,
"learning_rate": 1.9992605954870867e-06,
"loss": 0.4418,
"step": 205
},
{
"epoch": 0.04216774985927025,
"grad_norm": 0.7243013667651625,
"learning_rate": 1.999234882260396e-06,
"loss": 0.4669,
"step": 206
},
{
"epoch": 0.04237244767412108,
"grad_norm": 0.677583318692503,
"learning_rate": 1.9992087297175213e-06,
"loss": 0.4447,
"step": 207
},
{
"epoch": 0.04257714548897191,
"grad_norm": 0.7334595699121094,
"learning_rate": 1.9991821378699598e-06,
"loss": 0.4719,
"step": 208
},
{
"epoch": 0.04278184330382273,
"grad_norm": 0.7351912069847943,
"learning_rate": 1.999155106729403e-06,
"loss": 0.4758,
"step": 209
},
{
"epoch": 0.04298654111867356,
"grad_norm": 0.7262994043092325,
"learning_rate": 1.9991276363077355e-06,
"loss": 0.4636,
"step": 210
},
{
"epoch": 0.04319123893352438,
"grad_norm": 0.7170624975773432,
"learning_rate": 1.999099726617034e-06,
"loss": 0.4432,
"step": 211
},
{
"epoch": 0.04339593674837521,
"grad_norm": 0.7756861925710989,
"learning_rate": 1.9990713776695697e-06,
"loss": 0.4277,
"step": 212
},
{
"epoch": 0.043600634563226034,
"grad_norm": 0.7766290291464314,
"learning_rate": 1.999042589477806e-06,
"loss": 0.4521,
"step": 213
},
{
"epoch": 0.043805332378076864,
"grad_norm": 0.7675003445260637,
"learning_rate": 1.9990133620543992e-06,
"loss": 0.4728,
"step": 214
},
{
"epoch": 0.04401003019292769,
"grad_norm": 0.7502537566865808,
"learning_rate": 1.9989836954122006e-06,
"loss": 0.4919,
"step": 215
},
{
"epoch": 0.044214728007778516,
"grad_norm": 0.7256221777073304,
"learning_rate": 1.998953589564252e-06,
"loss": 0.4427,
"step": 216
},
{
"epoch": 0.044419425822629345,
"grad_norm": 0.7209802907423725,
"learning_rate": 1.9989230445237905e-06,
"loss": 0.4482,
"step": 217
},
{
"epoch": 0.04462412363748017,
"grad_norm": 0.761848969478383,
"learning_rate": 1.9988920603042437e-06,
"loss": 0.4623,
"step": 218
},
{
"epoch": 0.044828821452331,
"grad_norm": 0.7511377700619639,
"learning_rate": 1.9988606369192357e-06,
"loss": 0.4695,
"step": 219
},
{
"epoch": 0.045033519267181826,
"grad_norm": 0.684910692983434,
"learning_rate": 1.998828774382581e-06,
"loss": 0.4546,
"step": 220
},
{
"epoch": 0.04523821708203265,
"grad_norm": 0.8229255435418116,
"learning_rate": 1.998796472708288e-06,
"loss": 0.4736,
"step": 221
},
{
"epoch": 0.04544291489688348,
"grad_norm": 0.7208349457907924,
"learning_rate": 1.998763731910558e-06,
"loss": 0.4464,
"step": 222
},
{
"epoch": 0.0456476127117343,
"grad_norm": 0.8196660504458043,
"learning_rate": 1.998730552003786e-06,
"loss": 0.5129,
"step": 223
},
{
"epoch": 0.04585231052658513,
"grad_norm": 0.7410783352083353,
"learning_rate": 1.99869693300256e-06,
"loss": 0.4716,
"step": 224
},
{
"epoch": 0.04605700834143595,
"grad_norm": 0.6829979843011463,
"learning_rate": 1.9986628749216598e-06,
"loss": 0.4543,
"step": 225
},
{
"epoch": 0.04626170615628678,
"grad_norm": 0.6883777439066587,
"learning_rate": 1.9986283777760598e-06,
"loss": 0.4441,
"step": 226
},
{
"epoch": 0.04646640397113761,
"grad_norm": 0.7411624814334564,
"learning_rate": 1.9985934415809266e-06,
"loss": 0.4902,
"step": 227
},
{
"epoch": 0.046671101785988434,
"grad_norm": 0.7207442928316582,
"learning_rate": 1.99855806635162e-06,
"loss": 0.4502,
"step": 228
},
{
"epoch": 0.04687579960083926,
"grad_norm": 0.7535800049970842,
"learning_rate": 1.9985222521036923e-06,
"loss": 0.4588,
"step": 229
},
{
"epoch": 0.047080497415690085,
"grad_norm": 0.755131046087142,
"learning_rate": 1.9984859988528896e-06,
"loss": 0.478,
"step": 230
},
{
"epoch": 0.047285195230540915,
"grad_norm": 0.7363267944788255,
"learning_rate": 1.9984493066151515e-06,
"loss": 0.4722,
"step": 231
},
{
"epoch": 0.04748989304539174,
"grad_norm": 0.7175156333070826,
"learning_rate": 1.9984121754066084e-06,
"loss": 0.4284,
"step": 232
},
{
"epoch": 0.04769459086024257,
"grad_norm": 0.7021868758602576,
"learning_rate": 1.9983746052435867e-06,
"loss": 0.4549,
"step": 233
},
{
"epoch": 0.047899288675093396,
"grad_norm": 0.7661651319293146,
"learning_rate": 1.998336596142603e-06,
"loss": 0.4626,
"step": 234
},
{
"epoch": 0.04810398648994422,
"grad_norm": 0.7200117942966474,
"learning_rate": 1.9982981481203685e-06,
"loss": 0.4602,
"step": 235
},
{
"epoch": 0.04830868430479505,
"grad_norm": 0.7402636115111145,
"learning_rate": 1.9982592611937875e-06,
"loss": 0.462,
"step": 236
},
{
"epoch": 0.04851338211964587,
"grad_norm": 0.7289521015317652,
"learning_rate": 1.998219935379956e-06,
"loss": 0.4247,
"step": 237
},
{
"epoch": 0.0487180799344967,
"grad_norm": 0.8254564955090967,
"learning_rate": 1.9981801706961637e-06,
"loss": 0.445,
"step": 238
},
{
"epoch": 0.04892277774934753,
"grad_norm": 0.711987818470011,
"learning_rate": 1.9981399671598938e-06,
"loss": 0.4373,
"step": 239
},
{
"epoch": 0.04912747556419835,
"grad_norm": 0.7578069994316992,
"learning_rate": 1.9980993247888215e-06,
"loss": 0.4397,
"step": 240
},
{
"epoch": 0.04933217337904918,
"grad_norm": 0.7111156315088227,
"learning_rate": 1.9980582436008155e-06,
"loss": 0.444,
"step": 241
},
{
"epoch": 0.0495368711939,
"grad_norm": 0.6825451116028749,
"learning_rate": 1.998016723613937e-06,
"loss": 0.4216,
"step": 242
},
{
"epoch": 0.04974156900875083,
"grad_norm": 0.717357311135891,
"learning_rate": 1.9979747648464406e-06,
"loss": 0.4393,
"step": 243
},
{
"epoch": 0.049946266823601655,
"grad_norm": 0.7431878666336732,
"learning_rate": 1.9979323673167735e-06,
"loss": 0.4588,
"step": 244
},
{
"epoch": 0.050150964638452485,
"grad_norm": 0.7393037207206594,
"learning_rate": 1.997889531043576e-06,
"loss": 0.4547,
"step": 245
},
{
"epoch": 0.050355662453303314,
"grad_norm": 0.751624588819876,
"learning_rate": 1.997846256045681e-06,
"loss": 0.4423,
"step": 246
},
{
"epoch": 0.050560360268154136,
"grad_norm": 0.7291703435937729,
"learning_rate": 1.9978025423421143e-06,
"loss": 0.4577,
"step": 247
},
{
"epoch": 0.050765058083004966,
"grad_norm": 0.7730058435275767,
"learning_rate": 1.9977583899520954e-06,
"loss": 0.4725,
"step": 248
},
{
"epoch": 0.05096975589785579,
"grad_norm": 0.7388265631923211,
"learning_rate": 1.9977137988950354e-06,
"loss": 0.4915,
"step": 249
},
{
"epoch": 0.05117445371270662,
"grad_norm": 0.7253056655402751,
"learning_rate": 1.9976687691905393e-06,
"loss": 0.4463,
"step": 250
},
{
"epoch": 0.05137915152755744,
"grad_norm": 0.7279513578282504,
"learning_rate": 1.997623300858404e-06,
"loss": 0.4692,
"step": 251
},
{
"epoch": 0.05158384934240827,
"grad_norm": 0.7758619445867678,
"learning_rate": 1.99757739391862e-06,
"loss": 0.4359,
"step": 252
},
{
"epoch": 0.0517885471572591,
"grad_norm": 0.6934936677414176,
"learning_rate": 1.9975310483913706e-06,
"loss": 0.4342,
"step": 253
},
{
"epoch": 0.05199324497210992,
"grad_norm": 0.785492944076531,
"learning_rate": 1.9974842642970316e-06,
"loss": 0.4762,
"step": 254
},
{
"epoch": 0.05219794278696075,
"grad_norm": 0.693346587621168,
"learning_rate": 1.9974370416561716e-06,
"loss": 0.4077,
"step": 255
},
{
"epoch": 0.05240264060181157,
"grad_norm": 0.7036807587452536,
"learning_rate": 1.9973893804895526e-06,
"loss": 0.4559,
"step": 256
},
{
"epoch": 0.0526073384166624,
"grad_norm": 0.6487321250079171,
"learning_rate": 1.997341280818128e-06,
"loss": 0.4445,
"step": 257
},
{
"epoch": 0.05281203623151323,
"grad_norm": 0.7581793864091325,
"learning_rate": 1.9972927426630464e-06,
"loss": 0.4189,
"step": 258
},
{
"epoch": 0.053016734046364054,
"grad_norm": 0.7654003299344445,
"learning_rate": 1.9972437660456465e-06,
"loss": 0.4772,
"step": 259
},
{
"epoch": 0.053221431861214884,
"grad_norm": 0.7482689765950153,
"learning_rate": 1.9971943509874614e-06,
"loss": 0.4577,
"step": 260
},
{
"epoch": 0.053426129676065706,
"grad_norm": 0.7328225919609754,
"learning_rate": 1.997144497510217e-06,
"loss": 0.4301,
"step": 261
},
{
"epoch": 0.053630827490916536,
"grad_norm": 0.734577771662883,
"learning_rate": 1.9970942056358307e-06,
"loss": 0.4721,
"step": 262
},
{
"epoch": 0.05383552530576736,
"grad_norm": 0.7974471697046129,
"learning_rate": 1.997043475386414e-06,
"loss": 0.4759,
"step": 263
},
{
"epoch": 0.05404022312061819,
"grad_norm": 0.7204102805022299,
"learning_rate": 1.99699230678427e-06,
"loss": 0.4159,
"step": 264
},
{
"epoch": 0.05424492093546902,
"grad_norm": 0.8012057502786673,
"learning_rate": 1.996940699851896e-06,
"loss": 0.4784,
"step": 265
},
{
"epoch": 0.05444961875031984,
"grad_norm": 0.7456181199531785,
"learning_rate": 1.9968886546119805e-06,
"loss": 0.4716,
"step": 266
},
{
"epoch": 0.05465431656517067,
"grad_norm": 0.7582815001255205,
"learning_rate": 1.996836171087405e-06,
"loss": 0.4561,
"step": 267
},
{
"epoch": 0.05485901438002149,
"grad_norm": 0.7334168776176787,
"learning_rate": 1.996783249301245e-06,
"loss": 0.4344,
"step": 268
},
{
"epoch": 0.05506371219487232,
"grad_norm": 0.6872926048341936,
"learning_rate": 1.9967298892767674e-06,
"loss": 0.4293,
"step": 269
},
{
"epoch": 0.05526841000972314,
"grad_norm": 0.7532210670290133,
"learning_rate": 1.9966760910374313e-06,
"loss": 0.4644,
"step": 270
},
{
"epoch": 0.05547310782457397,
"grad_norm": 0.7306067456669837,
"learning_rate": 1.99662185460689e-06,
"loss": 0.4618,
"step": 271
},
{
"epoch": 0.0556778056394248,
"grad_norm": 0.7456313505165526,
"learning_rate": 1.9965671800089887e-06,
"loss": 0.4389,
"step": 272
},
{
"epoch": 0.055882503454275624,
"grad_norm": 0.6963357390921938,
"learning_rate": 1.9965120672677646e-06,
"loss": 0.4596,
"step": 273
},
{
"epoch": 0.056087201269126453,
"grad_norm": 0.6963762346496246,
"learning_rate": 1.9964565164074488e-06,
"loss": 0.4452,
"step": 274
},
{
"epoch": 0.056291899083977276,
"grad_norm": 0.7538437049515703,
"learning_rate": 1.996400527452464e-06,
"loss": 0.442,
"step": 275
},
{
"epoch": 0.056496596898828105,
"grad_norm": 0.7129437285411915,
"learning_rate": 1.9963441004274265e-06,
"loss": 0.4575,
"step": 276
},
{
"epoch": 0.056701294713678935,
"grad_norm": 0.7500374688918953,
"learning_rate": 1.9962872353571436e-06,
"loss": 0.4739,
"step": 277
},
{
"epoch": 0.05690599252852976,
"grad_norm": 0.7695931372733311,
"learning_rate": 1.996229932266617e-06,
"loss": 0.4523,
"step": 278
},
{
"epoch": 0.05711069034338059,
"grad_norm": 0.8047384042314083,
"learning_rate": 1.99617219118104e-06,
"loss": 0.4541,
"step": 279
},
{
"epoch": 0.05731538815823141,
"grad_norm": 0.7337412326319969,
"learning_rate": 1.9961140121257978e-06,
"loss": 0.5429,
"step": 280
},
{
"epoch": 0.05752008597308224,
"grad_norm": 0.7647644479794993,
"learning_rate": 1.99605539512647e-06,
"loss": 0.4479,
"step": 281
},
{
"epoch": 0.05772478378793306,
"grad_norm": 0.7676226046817726,
"learning_rate": 1.9959963402088274e-06,
"loss": 0.4641,
"step": 282
},
{
"epoch": 0.05792948160278389,
"grad_norm": 0.7235143819897621,
"learning_rate": 1.9959368473988333e-06,
"loss": 0.4545,
"step": 283
},
{
"epoch": 0.05813417941763472,
"grad_norm": 0.7247778707795571,
"learning_rate": 1.9958769167226444e-06,
"loss": 0.4527,
"step": 284
},
{
"epoch": 0.05833887723248554,
"grad_norm": 0.7539665158584474,
"learning_rate": 1.995816548206609e-06,
"loss": 0.4458,
"step": 285
},
{
"epoch": 0.05854357504733637,
"grad_norm": 0.7532087725317325,
"learning_rate": 1.995755741877269e-06,
"loss": 0.4461,
"step": 286
},
{
"epoch": 0.058748272862187194,
"grad_norm": 0.7797916526020292,
"learning_rate": 1.995694497761357e-06,
"loss": 0.4665,
"step": 287
},
{
"epoch": 0.05895297067703802,
"grad_norm": 0.7517569918058703,
"learning_rate": 1.9956328158857992e-06,
"loss": 0.4728,
"step": 288
},
{
"epoch": 0.059157668491888846,
"grad_norm": 0.7570745421432602,
"learning_rate": 1.995570696277715e-06,
"loss": 0.4563,
"step": 289
},
{
"epoch": 0.059362366306739675,
"grad_norm": 0.740586734451177,
"learning_rate": 1.995508138964415e-06,
"loss": 0.4709,
"step": 290
},
{
"epoch": 0.059567064121590504,
"grad_norm": 0.7681915361666245,
"learning_rate": 1.995445143973403e-06,
"loss": 0.4503,
"step": 291
},
{
"epoch": 0.05977176193644133,
"grad_norm": 0.7455448750612553,
"learning_rate": 1.9953817113323743e-06,
"loss": 0.4529,
"step": 292
},
{
"epoch": 0.059976459751292156,
"grad_norm": 0.7745938996976398,
"learning_rate": 1.9953178410692174e-06,
"loss": 0.4256,
"step": 293
},
{
"epoch": 0.06018115756614298,
"grad_norm": 0.7431821602019313,
"learning_rate": 1.9952535332120137e-06,
"loss": 0.4453,
"step": 294
},
{
"epoch": 0.06038585538099381,
"grad_norm": 0.6903595364669262,
"learning_rate": 1.9951887877890354e-06,
"loss": 0.4339,
"step": 295
},
{
"epoch": 0.06059055319584464,
"grad_norm": 0.773597292773469,
"learning_rate": 1.9951236048287483e-06,
"loss": 0.4817,
"step": 296
},
{
"epoch": 0.06079525101069546,
"grad_norm": 0.7263693618591641,
"learning_rate": 1.9950579843598105e-06,
"loss": 0.4572,
"step": 297
},
{
"epoch": 0.06099994882554629,
"grad_norm": 0.7297961413486055,
"learning_rate": 1.994991926411072e-06,
"loss": 0.4359,
"step": 298
},
{
"epoch": 0.06120464664039711,
"grad_norm": 0.7855052727291876,
"learning_rate": 1.9949254310115753e-06,
"loss": 0.4568,
"step": 299
},
{
"epoch": 0.06140934445524794,
"grad_norm": 0.7300504368627593,
"learning_rate": 1.994858498190556e-06,
"loss": 0.4501,
"step": 300
},
{
"epoch": 0.061614042270098764,
"grad_norm": 0.7096161782700376,
"learning_rate": 1.99479112797744e-06,
"loss": 0.4663,
"step": 301
},
{
"epoch": 0.06181874008494959,
"grad_norm": 0.7018494412530502,
"learning_rate": 1.9947233204018477e-06,
"loss": 0.4401,
"step": 302
},
{
"epoch": 0.06202343789980042,
"grad_norm": 0.7907086687325865,
"learning_rate": 1.9946550754935906e-06,
"loss": 0.4394,
"step": 303
},
{
"epoch": 0.062228135714651245,
"grad_norm": 0.7637305795975494,
"learning_rate": 1.9945863932826727e-06,
"loss": 0.4368,
"step": 304
},
{
"epoch": 0.062432833529502074,
"grad_norm": 0.7745057196668337,
"learning_rate": 1.9945172737992904e-06,
"loss": 0.4926,
"step": 305
},
{
"epoch": 0.0626375313443529,
"grad_norm": 0.7367608831304401,
"learning_rate": 1.994447717073832e-06,
"loss": 0.4688,
"step": 306
},
{
"epoch": 0.06284222915920372,
"grad_norm": 0.7695856962055981,
"learning_rate": 1.9943777231368794e-06,
"loss": 0.4484,
"step": 307
},
{
"epoch": 0.06304692697405455,
"grad_norm": 0.7187776209986876,
"learning_rate": 1.994307292019204e-06,
"loss": 0.4444,
"step": 308
},
{
"epoch": 0.06325162478890538,
"grad_norm": 0.7347949665208309,
"learning_rate": 1.994236423751772e-06,
"loss": 0.4485,
"step": 309
},
{
"epoch": 0.06345632260375621,
"grad_norm": 0.7105615411584904,
"learning_rate": 1.99416511836574e-06,
"loss": 0.4115,
"step": 310
},
{
"epoch": 0.06366102041860704,
"grad_norm": 0.7092446318381356,
"learning_rate": 1.994093375892459e-06,
"loss": 0.4398,
"step": 311
},
{
"epoch": 0.06386571823345785,
"grad_norm": 0.770807237418002,
"learning_rate": 1.9940211963634696e-06,
"loss": 0.4413,
"step": 312
},
{
"epoch": 0.06407041604830868,
"grad_norm": 0.7396584102001305,
"learning_rate": 1.9939485798105057e-06,
"loss": 0.4851,
"step": 313
},
{
"epoch": 0.06427511386315951,
"grad_norm": 0.7444518429400907,
"learning_rate": 1.9938755262654945e-06,
"loss": 0.4337,
"step": 314
},
{
"epoch": 0.06447981167801034,
"grad_norm": 0.722720227886292,
"learning_rate": 1.9938020357605527e-06,
"loss": 0.4965,
"step": 315
},
{
"epoch": 0.06468450949286117,
"grad_norm": 0.7666649202795676,
"learning_rate": 1.993728108327992e-06,
"loss": 0.4532,
"step": 316
},
{
"epoch": 0.06488920730771199,
"grad_norm": 0.7549308832804502,
"learning_rate": 1.9936537440003134e-06,
"loss": 0.4582,
"step": 317
},
{
"epoch": 0.06509390512256281,
"grad_norm": 0.6872546619483418,
"learning_rate": 1.993578942810212e-06,
"loss": 0.4695,
"step": 318
},
{
"epoch": 0.06529860293741364,
"grad_norm": 0.7550879312423509,
"learning_rate": 1.9935037047905748e-06,
"loss": 0.4833,
"step": 319
},
{
"epoch": 0.06550330075226447,
"grad_norm": 0.7064631296777315,
"learning_rate": 1.99342802997448e-06,
"loss": 0.4724,
"step": 320
},
{
"epoch": 0.0657079985671153,
"grad_norm": 0.7041398494235039,
"learning_rate": 1.9933519183951977e-06,
"loss": 0.4441,
"step": 321
},
{
"epoch": 0.06591269638196612,
"grad_norm": 0.7624056534749137,
"learning_rate": 1.9932753700861914e-06,
"loss": 0.465,
"step": 322
},
{
"epoch": 0.06611739419681695,
"grad_norm": 0.7553645719854849,
"learning_rate": 1.9931983850811155e-06,
"loss": 0.4241,
"step": 323
},
{
"epoch": 0.06632209201166778,
"grad_norm": 0.7617302308064162,
"learning_rate": 1.9931209634138158e-06,
"loss": 0.4475,
"step": 324
},
{
"epoch": 0.0665267898265186,
"grad_norm": 0.7484410349937703,
"learning_rate": 1.9930431051183324e-06,
"loss": 0.432,
"step": 325
},
{
"epoch": 0.06673148764136942,
"grad_norm": 0.7283423040518047,
"learning_rate": 1.9929648102288953e-06,
"loss": 0.4388,
"step": 326
},
{
"epoch": 0.06693618545622025,
"grad_norm": 0.7852619772614368,
"learning_rate": 1.9928860787799265e-06,
"loss": 0.468,
"step": 327
},
{
"epoch": 0.06714088327107108,
"grad_norm": 0.7769054079891817,
"learning_rate": 1.992806910806041e-06,
"loss": 0.4579,
"step": 328
},
{
"epoch": 0.06734558108592191,
"grad_norm": 0.7619008198687942,
"learning_rate": 1.992727306342045e-06,
"loss": 0.4789,
"step": 329
},
{
"epoch": 0.06755027890077274,
"grad_norm": 0.7607061858221339,
"learning_rate": 1.9926472654229376e-06,
"loss": 0.4241,
"step": 330
},
{
"epoch": 0.06775497671562355,
"grad_norm": 0.7236097758888326,
"learning_rate": 1.992566788083908e-06,
"loss": 0.4181,
"step": 331
},
{
"epoch": 0.06795967453047438,
"grad_norm": 0.8114241422692142,
"learning_rate": 1.992485874360338e-06,
"loss": 0.4383,
"step": 332
},
{
"epoch": 0.06816437234532521,
"grad_norm": 0.7429107582072085,
"learning_rate": 1.992404524287803e-06,
"loss": 0.4418,
"step": 333
},
{
"epoch": 0.06836907016017604,
"grad_norm": 0.7664592844718724,
"learning_rate": 1.9923227379020674e-06,
"loss": 0.4424,
"step": 334
},
{
"epoch": 0.06857376797502687,
"grad_norm": 0.7525421136101852,
"learning_rate": 1.9922405152390893e-06,
"loss": 0.4601,
"step": 335
},
{
"epoch": 0.06877846578987769,
"grad_norm": 0.7924977816871237,
"learning_rate": 1.9921578563350182e-06,
"loss": 0.4481,
"step": 336
},
{
"epoch": 0.06898316360472852,
"grad_norm": 0.7205455607720451,
"learning_rate": 1.9920747612261953e-06,
"loss": 0.472,
"step": 337
},
{
"epoch": 0.06918786141957935,
"grad_norm": 0.8099842191878124,
"learning_rate": 1.9919912299491534e-06,
"loss": 0.4522,
"step": 338
},
{
"epoch": 0.06939255923443018,
"grad_norm": 0.7601204829884295,
"learning_rate": 1.991907262540617e-06,
"loss": 0.4361,
"step": 339
},
{
"epoch": 0.069597257049281,
"grad_norm": 0.7498165219729875,
"learning_rate": 1.9918228590375034e-06,
"loss": 0.4363,
"step": 340
},
{
"epoch": 0.06980195486413182,
"grad_norm": 0.7130352034738562,
"learning_rate": 1.9917380194769197e-06,
"loss": 0.4355,
"step": 341
},
{
"epoch": 0.07000665267898265,
"grad_norm": 0.7653650161746682,
"learning_rate": 1.991652743896167e-06,
"loss": 0.4062,
"step": 342
},
{
"epoch": 0.07021135049383348,
"grad_norm": 0.8196206368298663,
"learning_rate": 1.991567032332736e-06,
"loss": 0.4614,
"step": 343
},
{
"epoch": 0.07041604830868431,
"grad_norm": 0.7812062890731587,
"learning_rate": 1.991480884824311e-06,
"loss": 0.4975,
"step": 344
},
{
"epoch": 0.07062074612353512,
"grad_norm": 0.7206276352235353,
"learning_rate": 1.9913943014087655e-06,
"loss": 0.4157,
"step": 345
},
{
"epoch": 0.07082544393838595,
"grad_norm": 0.78698897087266,
"learning_rate": 1.9913072821241672e-06,
"loss": 0.4581,
"step": 346
},
{
"epoch": 0.07103014175323678,
"grad_norm": 0.7764210083677198,
"learning_rate": 1.991219827008775e-06,
"loss": 0.4408,
"step": 347
},
{
"epoch": 0.07123483956808761,
"grad_norm": 0.7595433195005857,
"learning_rate": 1.9911319361010367e-06,
"loss": 0.4492,
"step": 348
},
{
"epoch": 0.07143953738293844,
"grad_norm": 0.722334537399672,
"learning_rate": 1.991043609439596e-06,
"loss": 0.4098,
"step": 349
},
{
"epoch": 0.07164423519778926,
"grad_norm": 0.7424362807959037,
"learning_rate": 1.9909548470632842e-06,
"loss": 0.4169,
"step": 350
},
{
"epoch": 0.07184893301264009,
"grad_norm": 0.7665542829825039,
"learning_rate": 1.9908656490111267e-06,
"loss": 0.4635,
"step": 351
},
{
"epoch": 0.07205363082749092,
"grad_norm": 0.7670493050777013,
"learning_rate": 1.9907760153223396e-06,
"loss": 0.4245,
"step": 352
},
{
"epoch": 0.07225832864234175,
"grad_norm": 0.7082306081348982,
"learning_rate": 1.9906859460363304e-06,
"loss": 0.3885,
"step": 353
},
{
"epoch": 0.07246302645719258,
"grad_norm": 0.7179046673862461,
"learning_rate": 1.990595441192699e-06,
"loss": 0.4047,
"step": 354
},
{
"epoch": 0.07266772427204339,
"grad_norm": 0.7785673633417279,
"learning_rate": 1.990504500831235e-06,
"loss": 0.484,
"step": 355
},
{
"epoch": 0.07287242208689422,
"grad_norm": 0.7657292926077239,
"learning_rate": 1.9904131249919215e-06,
"loss": 0.4444,
"step": 356
},
{
"epoch": 0.07307711990174505,
"grad_norm": 0.7569753573050043,
"learning_rate": 1.9903213137149313e-06,
"loss": 0.4701,
"step": 357
},
{
"epoch": 0.07328181771659588,
"grad_norm": 0.8072244283837906,
"learning_rate": 1.99022906704063e-06,
"loss": 0.4409,
"step": 358
},
{
"epoch": 0.07348651553144671,
"grad_norm": 0.7629374834771911,
"learning_rate": 1.990136385009574e-06,
"loss": 0.4927,
"step": 359
},
{
"epoch": 0.07369121334629752,
"grad_norm": 0.7385249798311992,
"learning_rate": 1.990043267662511e-06,
"loss": 0.4338,
"step": 360
},
{
"epoch": 0.07389591116114835,
"grad_norm": 0.775227359922002,
"learning_rate": 1.989949715040381e-06,
"loss": 0.4789,
"step": 361
},
{
"epoch": 0.07410060897599918,
"grad_norm": 0.7517526442766227,
"learning_rate": 1.9898557271843133e-06,
"loss": 0.4504,
"step": 362
},
{
"epoch": 0.07430530679085001,
"grad_norm": 0.7375927174293256,
"learning_rate": 1.9897613041356314e-06,
"loss": 0.4108,
"step": 363
},
{
"epoch": 0.07451000460570083,
"grad_norm": 0.72174871971371,
"learning_rate": 1.9896664459358472e-06,
"loss": 0.4475,
"step": 364
},
{
"epoch": 0.07471470242055166,
"grad_norm": 0.7329776456768429,
"learning_rate": 1.9895711526266667e-06,
"loss": 0.424,
"step": 365
},
{
"epoch": 0.07491940023540249,
"grad_norm": 0.7185696661034995,
"learning_rate": 1.9894754242499852e-06,
"loss": 0.4543,
"step": 366
},
{
"epoch": 0.07512409805025332,
"grad_norm": 0.7049184644044292,
"learning_rate": 1.98937926084789e-06,
"loss": 0.42,
"step": 367
},
{
"epoch": 0.07532879586510415,
"grad_norm": 0.7293724081932391,
"learning_rate": 1.989282662462659e-06,
"loss": 0.43,
"step": 368
},
{
"epoch": 0.07553349367995496,
"grad_norm": 0.7059632003848231,
"learning_rate": 1.9891856291367626e-06,
"loss": 0.4275,
"step": 369
},
{
"epoch": 0.07573819149480579,
"grad_norm": 0.7683789138077434,
"learning_rate": 1.9890881609128618e-06,
"loss": 0.4408,
"step": 370
},
{
"epoch": 0.07594288930965662,
"grad_norm": 0.7872951646579199,
"learning_rate": 1.9889902578338087e-06,
"loss": 0.4292,
"step": 371
},
{
"epoch": 0.07614758712450745,
"grad_norm": 0.7924075912782186,
"learning_rate": 1.988891919942646e-06,
"loss": 0.4408,
"step": 372
},
{
"epoch": 0.07635228493935828,
"grad_norm": 0.7607074410846072,
"learning_rate": 1.9887931472826093e-06,
"loss": 0.4485,
"step": 373
},
{
"epoch": 0.0765569827542091,
"grad_norm": 0.7592064137840062,
"learning_rate": 1.9886939398971238e-06,
"loss": 0.4404,
"step": 374
},
{
"epoch": 0.07676168056905992,
"grad_norm": 0.8183614522835663,
"learning_rate": 1.9885942978298054e-06,
"loss": 0.4677,
"step": 375
},
{
"epoch": 0.07696637838391075,
"grad_norm": 0.7400985748589023,
"learning_rate": 1.9884942211244637e-06,
"loss": 0.4867,
"step": 376
},
{
"epoch": 0.07717107619876158,
"grad_norm": 0.7657568826393069,
"learning_rate": 1.988393709825096e-06,
"loss": 0.4592,
"step": 377
},
{
"epoch": 0.07737577401361241,
"grad_norm": 0.7242932934680155,
"learning_rate": 1.988292763975893e-06,
"loss": 0.4037,
"step": 378
},
{
"epoch": 0.07758047182846323,
"grad_norm": 0.695616397865644,
"learning_rate": 1.9881913836212365e-06,
"loss": 0.4534,
"step": 379
},
{
"epoch": 0.07778516964331406,
"grad_norm": 0.7286084983103666,
"learning_rate": 1.9880895688056977e-06,
"loss": 0.4267,
"step": 380
},
{
"epoch": 0.07798986745816489,
"grad_norm": 0.7434704183939207,
"learning_rate": 1.98798731957404e-06,
"loss": 0.4308,
"step": 381
},
{
"epoch": 0.07819456527301571,
"grad_norm": 0.7803766550265465,
"learning_rate": 1.9878846359712176e-06,
"loss": 0.4455,
"step": 382
},
{
"epoch": 0.07839926308786653,
"grad_norm": 0.777748381274515,
"learning_rate": 1.9877815180423757e-06,
"loss": 0.4639,
"step": 383
},
{
"epoch": 0.07860396090271736,
"grad_norm": 0.724760659067026,
"learning_rate": 1.9876779658328503e-06,
"loss": 0.4666,
"step": 384
},
{
"epoch": 0.07880865871756819,
"grad_norm": 0.6620651646162441,
"learning_rate": 1.9875739793881685e-06,
"loss": 0.4195,
"step": 385
},
{
"epoch": 0.07901335653241902,
"grad_norm": 0.7524415209772658,
"learning_rate": 1.9874695587540477e-06,
"loss": 0.4431,
"step": 386
},
{
"epoch": 0.07921805434726985,
"grad_norm": 0.6797630439484897,
"learning_rate": 1.9873647039763975e-06,
"loss": 0.4453,
"step": 387
},
{
"epoch": 0.07942275216212066,
"grad_norm": 0.7685812672797145,
"learning_rate": 1.987259415101317e-06,
"loss": 0.4623,
"step": 388
},
{
"epoch": 0.07962744997697149,
"grad_norm": 0.7581895406953137,
"learning_rate": 1.9871536921750965e-06,
"loss": 0.4423,
"step": 389
},
{
"epoch": 0.07983214779182232,
"grad_norm": 0.7100776210964536,
"learning_rate": 1.987047535244218e-06,
"loss": 0.4171,
"step": 390
},
{
"epoch": 0.08003684560667315,
"grad_norm": 0.7299757652818287,
"learning_rate": 1.9869409443553535e-06,
"loss": 0.454,
"step": 391
},
{
"epoch": 0.08024154342152398,
"grad_norm": 0.7439290629641574,
"learning_rate": 1.9868339195553657e-06,
"loss": 0.4426,
"step": 392
},
{
"epoch": 0.0804462412363748,
"grad_norm": 0.75716235792549,
"learning_rate": 1.9867264608913084e-06,
"loss": 0.4479,
"step": 393
},
{
"epoch": 0.08065093905122563,
"grad_norm": 0.6968938261452492,
"learning_rate": 1.9866185684104266e-06,
"loss": 0.4335,
"step": 394
},
{
"epoch": 0.08085563686607646,
"grad_norm": 0.7170546940871543,
"learning_rate": 1.9865102421601545e-06,
"loss": 0.4286,
"step": 395
},
{
"epoch": 0.08106033468092728,
"grad_norm": 0.7478223773320385,
"learning_rate": 1.986401482188119e-06,
"loss": 0.4202,
"step": 396
},
{
"epoch": 0.08126503249577811,
"grad_norm": 0.7076328746009948,
"learning_rate": 1.986292288542136e-06,
"loss": 0.4558,
"step": 397
},
{
"epoch": 0.08146973031062893,
"grad_norm": 0.7591423650217918,
"learning_rate": 1.986182661270213e-06,
"loss": 0.4674,
"step": 398
},
{
"epoch": 0.08167442812547976,
"grad_norm": 0.7416281412961139,
"learning_rate": 1.9860726004205485e-06,
"loss": 0.4397,
"step": 399
},
{
"epoch": 0.08187912594033059,
"grad_norm": 0.7416913042041706,
"learning_rate": 1.98596210604153e-06,
"loss": 0.4405,
"step": 400
}
],
"logging_steps": 1,
"max_steps": 4885,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 200,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 53820235284480.0,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}