DuongTrongChi's picture
Training in progress, step 887, checkpoint
027562d verified
raw
history blame
155 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9733882030178327,
"eval_steps": 500,
"global_step": 887,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0010973936899862826,
"grad_norm": 0.27108055353164673,
"learning_rate": 2.0000000000000002e-07,
"loss": 1.9861,
"step": 1
},
{
"epoch": 0.0021947873799725653,
"grad_norm": 0.27781054377555847,
"learning_rate": 4.0000000000000003e-07,
"loss": 2.0261,
"step": 2
},
{
"epoch": 0.0032921810699588477,
"grad_norm": 0.2786555290222168,
"learning_rate": 6.000000000000001e-07,
"loss": 2.0242,
"step": 3
},
{
"epoch": 0.0043895747599451305,
"grad_norm": 0.3020860552787781,
"learning_rate": 8.000000000000001e-07,
"loss": 2.0951,
"step": 4
},
{
"epoch": 0.0054869684499314125,
"grad_norm": 0.2764291763305664,
"learning_rate": 1.0000000000000002e-06,
"loss": 1.9623,
"step": 5
},
{
"epoch": 0.006584362139917695,
"grad_norm": 0.3049551248550415,
"learning_rate": 1.2000000000000002e-06,
"loss": 2.0732,
"step": 6
},
{
"epoch": 0.007681755829903978,
"grad_norm": 0.2732066512107849,
"learning_rate": 1.4000000000000001e-06,
"loss": 2.015,
"step": 7
},
{
"epoch": 0.008779149519890261,
"grad_norm": 0.3337247371673584,
"learning_rate": 1.6000000000000001e-06,
"loss": 2.1519,
"step": 8
},
{
"epoch": 0.009876543209876543,
"grad_norm": 0.3307493329048157,
"learning_rate": 1.8000000000000001e-06,
"loss": 2.0968,
"step": 9
},
{
"epoch": 0.010973936899862825,
"grad_norm": 0.29627102613449097,
"learning_rate": 2.0000000000000003e-06,
"loss": 2.0329,
"step": 10
},
{
"epoch": 0.012071330589849109,
"grad_norm": 0.2832229733467102,
"learning_rate": 2.2e-06,
"loss": 2.0597,
"step": 11
},
{
"epoch": 0.01316872427983539,
"grad_norm": 0.2993783950805664,
"learning_rate": 2.4000000000000003e-06,
"loss": 1.9716,
"step": 12
},
{
"epoch": 0.014266117969821674,
"grad_norm": 0.3148089051246643,
"learning_rate": 2.6e-06,
"loss": 2.1158,
"step": 13
},
{
"epoch": 0.015363511659807956,
"grad_norm": 0.3562306761741638,
"learning_rate": 2.8000000000000003e-06,
"loss": 2.1904,
"step": 14
},
{
"epoch": 0.01646090534979424,
"grad_norm": 0.2843739688396454,
"learning_rate": 3e-06,
"loss": 1.9782,
"step": 15
},
{
"epoch": 0.017558299039780522,
"grad_norm": 0.2809521555900574,
"learning_rate": 3.2000000000000003e-06,
"loss": 1.9743,
"step": 16
},
{
"epoch": 0.018655692729766804,
"grad_norm": 0.30157962441444397,
"learning_rate": 3.4000000000000005e-06,
"loss": 1.9993,
"step": 17
},
{
"epoch": 0.019753086419753086,
"grad_norm": 0.3049308657646179,
"learning_rate": 3.6000000000000003e-06,
"loss": 2.0293,
"step": 18
},
{
"epoch": 0.020850480109739368,
"grad_norm": 0.31477564573287964,
"learning_rate": 3.8000000000000005e-06,
"loss": 2.0614,
"step": 19
},
{
"epoch": 0.02194787379972565,
"grad_norm": 0.34430813789367676,
"learning_rate": 4.000000000000001e-06,
"loss": 2.082,
"step": 20
},
{
"epoch": 0.023045267489711935,
"grad_norm": 0.29869017004966736,
"learning_rate": 4.2000000000000004e-06,
"loss": 1.9798,
"step": 21
},
{
"epoch": 0.024142661179698217,
"grad_norm": 0.28752100467681885,
"learning_rate": 4.4e-06,
"loss": 1.9871,
"step": 22
},
{
"epoch": 0.0252400548696845,
"grad_norm": 0.33573031425476074,
"learning_rate": 4.600000000000001e-06,
"loss": 2.0437,
"step": 23
},
{
"epoch": 0.02633744855967078,
"grad_norm": 0.29683223366737366,
"learning_rate": 4.800000000000001e-06,
"loss": 2.037,
"step": 24
},
{
"epoch": 0.027434842249657063,
"grad_norm": 0.319722056388855,
"learning_rate": 5e-06,
"loss": 2.0975,
"step": 25
},
{
"epoch": 0.02853223593964335,
"grad_norm": 0.3013472855091095,
"learning_rate": 5.2e-06,
"loss": 2.1016,
"step": 26
},
{
"epoch": 0.02962962962962963,
"grad_norm": 0.28419122099876404,
"learning_rate": 5.400000000000001e-06,
"loss": 1.9477,
"step": 27
},
{
"epoch": 0.030727023319615913,
"grad_norm": 0.2823992669582367,
"learning_rate": 5.600000000000001e-06,
"loss": 1.9544,
"step": 28
},
{
"epoch": 0.031824417009602195,
"grad_norm": 0.2990231215953827,
"learning_rate": 5.8e-06,
"loss": 1.9982,
"step": 29
},
{
"epoch": 0.03292181069958848,
"grad_norm": 0.28071701526641846,
"learning_rate": 6e-06,
"loss": 1.9735,
"step": 30
},
{
"epoch": 0.03401920438957476,
"grad_norm": 0.28065022826194763,
"learning_rate": 6.200000000000001e-06,
"loss": 2.0191,
"step": 31
},
{
"epoch": 0.035116598079561044,
"grad_norm": 0.2757718563079834,
"learning_rate": 6.4000000000000006e-06,
"loss": 1.9187,
"step": 32
},
{
"epoch": 0.03621399176954732,
"grad_norm": 0.2993408739566803,
"learning_rate": 6.600000000000001e-06,
"loss": 1.9989,
"step": 33
},
{
"epoch": 0.03731138545953361,
"grad_norm": 0.28326326608657837,
"learning_rate": 6.800000000000001e-06,
"loss": 2.0218,
"step": 34
},
{
"epoch": 0.038408779149519894,
"grad_norm": 0.2843347489833832,
"learning_rate": 7e-06,
"loss": 1.9994,
"step": 35
},
{
"epoch": 0.03950617283950617,
"grad_norm": 0.28945305943489075,
"learning_rate": 7.2000000000000005e-06,
"loss": 2.0353,
"step": 36
},
{
"epoch": 0.04060356652949246,
"grad_norm": 0.26997682452201843,
"learning_rate": 7.4e-06,
"loss": 2.0038,
"step": 37
},
{
"epoch": 0.041700960219478736,
"grad_norm": 0.3185621201992035,
"learning_rate": 7.600000000000001e-06,
"loss": 1.9736,
"step": 38
},
{
"epoch": 0.04279835390946502,
"grad_norm": 0.32522234320640564,
"learning_rate": 7.800000000000002e-06,
"loss": 2.1093,
"step": 39
},
{
"epoch": 0.0438957475994513,
"grad_norm": 0.3031747043132782,
"learning_rate": 8.000000000000001e-06,
"loss": 2.0228,
"step": 40
},
{
"epoch": 0.044993141289437585,
"grad_norm": 0.31074920296669006,
"learning_rate": 8.2e-06,
"loss": 2.0363,
"step": 41
},
{
"epoch": 0.04609053497942387,
"grad_norm": 0.3501761257648468,
"learning_rate": 8.400000000000001e-06,
"loss": 2.0894,
"step": 42
},
{
"epoch": 0.04718792866941015,
"grad_norm": 0.25342920422554016,
"learning_rate": 8.6e-06,
"loss": 1.919,
"step": 43
},
{
"epoch": 0.048285322359396435,
"grad_norm": 0.2695287764072418,
"learning_rate": 8.8e-06,
"loss": 1.9496,
"step": 44
},
{
"epoch": 0.04938271604938271,
"grad_norm": 0.26249709725379944,
"learning_rate": 9e-06,
"loss": 1.9096,
"step": 45
},
{
"epoch": 0.050480109739369,
"grad_norm": 0.2559400498867035,
"learning_rate": 9.200000000000002e-06,
"loss": 1.9609,
"step": 46
},
{
"epoch": 0.051577503429355284,
"grad_norm": 0.26113536953926086,
"learning_rate": 9.4e-06,
"loss": 1.9484,
"step": 47
},
{
"epoch": 0.05267489711934156,
"grad_norm": 0.2483452707529068,
"learning_rate": 9.600000000000001e-06,
"loss": 1.9271,
"step": 48
},
{
"epoch": 0.05377229080932785,
"grad_norm": 0.24584588408470154,
"learning_rate": 9.800000000000001e-06,
"loss": 1.9134,
"step": 49
},
{
"epoch": 0.05486968449931413,
"grad_norm": 0.23456168174743652,
"learning_rate": 1e-05,
"loss": 1.8595,
"step": 50
},
{
"epoch": 0.05596707818930041,
"grad_norm": 0.24329470098018646,
"learning_rate": 1.02e-05,
"loss": 1.8626,
"step": 51
},
{
"epoch": 0.0570644718792867,
"grad_norm": 0.22366423904895782,
"learning_rate": 1.04e-05,
"loss": 1.9075,
"step": 52
},
{
"epoch": 0.058161865569272976,
"grad_norm": 0.2408476620912552,
"learning_rate": 1.0600000000000002e-05,
"loss": 1.9073,
"step": 53
},
{
"epoch": 0.05925925925925926,
"grad_norm": 0.26551100611686707,
"learning_rate": 1.0800000000000002e-05,
"loss": 1.9767,
"step": 54
},
{
"epoch": 0.06035665294924554,
"grad_norm": 0.2730308771133423,
"learning_rate": 1.1000000000000001e-05,
"loss": 1.9087,
"step": 55
},
{
"epoch": 0.061454046639231825,
"grad_norm": 0.2597392797470093,
"learning_rate": 1.1200000000000001e-05,
"loss": 1.975,
"step": 56
},
{
"epoch": 0.06255144032921811,
"grad_norm": 0.24098357558250427,
"learning_rate": 1.14e-05,
"loss": 1.8624,
"step": 57
},
{
"epoch": 0.06364883401920439,
"grad_norm": 0.23562097549438477,
"learning_rate": 1.16e-05,
"loss": 1.9068,
"step": 58
},
{
"epoch": 0.06474622770919067,
"grad_norm": 0.24404938519001007,
"learning_rate": 1.18e-05,
"loss": 1.9516,
"step": 59
},
{
"epoch": 0.06584362139917696,
"grad_norm": 0.245197594165802,
"learning_rate": 1.2e-05,
"loss": 1.9105,
"step": 60
},
{
"epoch": 0.06694101508916324,
"grad_norm": 0.22279669344425201,
"learning_rate": 1.22e-05,
"loss": 1.9019,
"step": 61
},
{
"epoch": 0.06803840877914952,
"grad_norm": 0.2473766803741455,
"learning_rate": 1.2400000000000002e-05,
"loss": 1.8418,
"step": 62
},
{
"epoch": 0.0691358024691358,
"grad_norm": 0.2862042486667633,
"learning_rate": 1.2600000000000001e-05,
"loss": 1.9353,
"step": 63
},
{
"epoch": 0.07023319615912209,
"grad_norm": 0.2588101625442505,
"learning_rate": 1.2800000000000001e-05,
"loss": 1.9448,
"step": 64
},
{
"epoch": 0.07133058984910837,
"grad_norm": 0.2884417772293091,
"learning_rate": 1.3000000000000001e-05,
"loss": 1.8889,
"step": 65
},
{
"epoch": 0.07242798353909465,
"grad_norm": 0.23664578795433044,
"learning_rate": 1.3200000000000002e-05,
"loss": 1.8244,
"step": 66
},
{
"epoch": 0.07352537722908094,
"grad_norm": 0.25291287899017334,
"learning_rate": 1.3400000000000002e-05,
"loss": 1.8071,
"step": 67
},
{
"epoch": 0.07462277091906722,
"grad_norm": 0.2404923290014267,
"learning_rate": 1.3600000000000002e-05,
"loss": 1.8677,
"step": 68
},
{
"epoch": 0.0757201646090535,
"grad_norm": 0.24635937809944153,
"learning_rate": 1.38e-05,
"loss": 1.8015,
"step": 69
},
{
"epoch": 0.07681755829903979,
"grad_norm": 0.24646110832691193,
"learning_rate": 1.4e-05,
"loss": 1.7901,
"step": 70
},
{
"epoch": 0.07791495198902607,
"grad_norm": 0.243991419672966,
"learning_rate": 1.4200000000000001e-05,
"loss": 1.7483,
"step": 71
},
{
"epoch": 0.07901234567901234,
"grad_norm": 0.26060330867767334,
"learning_rate": 1.4400000000000001e-05,
"loss": 1.7065,
"step": 72
},
{
"epoch": 0.08010973936899862,
"grad_norm": 0.2618904709815979,
"learning_rate": 1.46e-05,
"loss": 1.8377,
"step": 73
},
{
"epoch": 0.08120713305898491,
"grad_norm": 0.26802942156791687,
"learning_rate": 1.48e-05,
"loss": 1.7947,
"step": 74
},
{
"epoch": 0.0823045267489712,
"grad_norm": 0.28539785742759705,
"learning_rate": 1.5000000000000002e-05,
"loss": 1.7596,
"step": 75
},
{
"epoch": 0.08340192043895747,
"grad_norm": 0.25785282254219055,
"learning_rate": 1.5200000000000002e-05,
"loss": 1.6622,
"step": 76
},
{
"epoch": 0.08449931412894376,
"grad_norm": 0.2746061682701111,
"learning_rate": 1.54e-05,
"loss": 1.6833,
"step": 77
},
{
"epoch": 0.08559670781893004,
"grad_norm": 0.2993699610233307,
"learning_rate": 1.5600000000000003e-05,
"loss": 1.728,
"step": 78
},
{
"epoch": 0.08669410150891632,
"grad_norm": 0.31998202204704285,
"learning_rate": 1.58e-05,
"loss": 1.7735,
"step": 79
},
{
"epoch": 0.0877914951989026,
"grad_norm": 0.2985467314720154,
"learning_rate": 1.6000000000000003e-05,
"loss": 1.6394,
"step": 80
},
{
"epoch": 0.08888888888888889,
"grad_norm": 0.31099262833595276,
"learning_rate": 1.62e-05,
"loss": 1.7466,
"step": 81
},
{
"epoch": 0.08998628257887517,
"grad_norm": 0.2854881286621094,
"learning_rate": 1.64e-05,
"loss": 1.7291,
"step": 82
},
{
"epoch": 0.09108367626886145,
"grad_norm": 0.3132914900779724,
"learning_rate": 1.66e-05,
"loss": 1.7431,
"step": 83
},
{
"epoch": 0.09218106995884774,
"grad_norm": 0.3053690791130066,
"learning_rate": 1.6800000000000002e-05,
"loss": 1.6816,
"step": 84
},
{
"epoch": 0.09327846364883402,
"grad_norm": 0.2843291759490967,
"learning_rate": 1.7e-05,
"loss": 1.7334,
"step": 85
},
{
"epoch": 0.0943758573388203,
"grad_norm": 0.31299757957458496,
"learning_rate": 1.72e-05,
"loss": 1.6034,
"step": 86
},
{
"epoch": 0.09547325102880659,
"grad_norm": 0.29502251744270325,
"learning_rate": 1.7400000000000003e-05,
"loss": 1.6195,
"step": 87
},
{
"epoch": 0.09657064471879287,
"grad_norm": 0.32556185126304626,
"learning_rate": 1.76e-05,
"loss": 1.6456,
"step": 88
},
{
"epoch": 0.09766803840877915,
"grad_norm": 0.3797527253627777,
"learning_rate": 1.7800000000000002e-05,
"loss": 1.5675,
"step": 89
},
{
"epoch": 0.09876543209876543,
"grad_norm": 0.4491126239299774,
"learning_rate": 1.8e-05,
"loss": 1.6092,
"step": 90
},
{
"epoch": 0.09986282578875172,
"grad_norm": 0.3437753915786743,
"learning_rate": 1.8200000000000002e-05,
"loss": 1.5709,
"step": 91
},
{
"epoch": 0.100960219478738,
"grad_norm": 0.33262452483177185,
"learning_rate": 1.8400000000000003e-05,
"loss": 1.5559,
"step": 92
},
{
"epoch": 0.10205761316872428,
"grad_norm": 0.4088902771472931,
"learning_rate": 1.86e-05,
"loss": 1.5929,
"step": 93
},
{
"epoch": 0.10315500685871057,
"grad_norm": 0.42855969071388245,
"learning_rate": 1.88e-05,
"loss": 1.481,
"step": 94
},
{
"epoch": 0.10425240054869685,
"grad_norm": 0.38623425364494324,
"learning_rate": 1.9e-05,
"loss": 1.4687,
"step": 95
},
{
"epoch": 0.10534979423868313,
"grad_norm": 0.3554774522781372,
"learning_rate": 1.9200000000000003e-05,
"loss": 1.4739,
"step": 96
},
{
"epoch": 0.1064471879286694,
"grad_norm": 0.4467147886753082,
"learning_rate": 1.94e-05,
"loss": 1.4996,
"step": 97
},
{
"epoch": 0.1075445816186557,
"grad_norm": 0.4558533728122711,
"learning_rate": 1.9600000000000002e-05,
"loss": 1.463,
"step": 98
},
{
"epoch": 0.10864197530864197,
"grad_norm": 0.3612457513809204,
"learning_rate": 1.98e-05,
"loss": 1.4667,
"step": 99
},
{
"epoch": 0.10973936899862825,
"grad_norm": 0.3397855758666992,
"learning_rate": 2e-05,
"loss": 1.41,
"step": 100
},
{
"epoch": 0.11083676268861455,
"grad_norm": 0.42120879888534546,
"learning_rate": 1.997533908754624e-05,
"loss": 1.3884,
"step": 101
},
{
"epoch": 0.11193415637860082,
"grad_norm": 0.34555450081825256,
"learning_rate": 1.995067817509248e-05,
"loss": 1.4158,
"step": 102
},
{
"epoch": 0.1130315500685871,
"grad_norm": 0.30095019936561584,
"learning_rate": 1.992601726263872e-05,
"loss": 1.4731,
"step": 103
},
{
"epoch": 0.1141289437585734,
"grad_norm": 0.3604261577129364,
"learning_rate": 1.990135635018496e-05,
"loss": 1.4059,
"step": 104
},
{
"epoch": 0.11522633744855967,
"grad_norm": 0.3028746545314789,
"learning_rate": 1.9876695437731196e-05,
"loss": 1.4234,
"step": 105
},
{
"epoch": 0.11632373113854595,
"grad_norm": 0.343404084444046,
"learning_rate": 1.985203452527744e-05,
"loss": 1.319,
"step": 106
},
{
"epoch": 0.11742112482853223,
"grad_norm": 0.26763787865638733,
"learning_rate": 1.9827373612823677e-05,
"loss": 1.3013,
"step": 107
},
{
"epoch": 0.11851851851851852,
"grad_norm": 0.2645047903060913,
"learning_rate": 1.9802712700369916e-05,
"loss": 1.3119,
"step": 108
},
{
"epoch": 0.1196159122085048,
"grad_norm": 0.24199865758419037,
"learning_rate": 1.9778051787916155e-05,
"loss": 1.3058,
"step": 109
},
{
"epoch": 0.12071330589849108,
"grad_norm": 0.2630648910999298,
"learning_rate": 1.9753390875462394e-05,
"loss": 1.3024,
"step": 110
},
{
"epoch": 0.12181069958847737,
"grad_norm": 0.29971155524253845,
"learning_rate": 1.9728729963008633e-05,
"loss": 1.2853,
"step": 111
},
{
"epoch": 0.12290809327846365,
"grad_norm": 0.23841412365436554,
"learning_rate": 1.9704069050554872e-05,
"loss": 1.3528,
"step": 112
},
{
"epoch": 0.12400548696844993,
"grad_norm": 0.18174125254154205,
"learning_rate": 1.967940813810111e-05,
"loss": 1.3534,
"step": 113
},
{
"epoch": 0.12510288065843622,
"grad_norm": 0.23740167915821075,
"learning_rate": 1.965474722564735e-05,
"loss": 1.2486,
"step": 114
},
{
"epoch": 0.1262002743484225,
"grad_norm": 0.317221999168396,
"learning_rate": 1.9630086313193592e-05,
"loss": 1.2452,
"step": 115
},
{
"epoch": 0.12729766803840878,
"grad_norm": 0.17076945304870605,
"learning_rate": 1.9605425400739828e-05,
"loss": 1.282,
"step": 116
},
{
"epoch": 0.12839506172839507,
"grad_norm": 0.19480882585048676,
"learning_rate": 1.9580764488286066e-05,
"loss": 1.3408,
"step": 117
},
{
"epoch": 0.12949245541838134,
"grad_norm": 0.21473167836666107,
"learning_rate": 1.955610357583231e-05,
"loss": 1.2389,
"step": 118
},
{
"epoch": 0.13058984910836763,
"grad_norm": 0.21171647310256958,
"learning_rate": 1.9531442663378544e-05,
"loss": 1.2866,
"step": 119
},
{
"epoch": 0.13168724279835392,
"grad_norm": 0.16918618977069855,
"learning_rate": 1.9506781750924787e-05,
"loss": 1.3065,
"step": 120
},
{
"epoch": 0.13278463648834019,
"grad_norm": 0.17470093071460724,
"learning_rate": 1.9482120838471025e-05,
"loss": 1.3088,
"step": 121
},
{
"epoch": 0.13388203017832648,
"grad_norm": 0.16515418887138367,
"learning_rate": 1.9457459926017264e-05,
"loss": 1.2865,
"step": 122
},
{
"epoch": 0.13497942386831277,
"grad_norm": 0.17647692561149597,
"learning_rate": 1.9432799013563503e-05,
"loss": 1.2298,
"step": 123
},
{
"epoch": 0.13607681755829903,
"grad_norm": 0.1819489300251007,
"learning_rate": 1.9408138101109742e-05,
"loss": 1.2641,
"step": 124
},
{
"epoch": 0.13717421124828533,
"grad_norm": 0.16584061086177826,
"learning_rate": 1.938347718865598e-05,
"loss": 1.2872,
"step": 125
},
{
"epoch": 0.1382716049382716,
"grad_norm": 0.15061640739440918,
"learning_rate": 1.935881627620222e-05,
"loss": 1.1875,
"step": 126
},
{
"epoch": 0.13936899862825788,
"grad_norm": 0.15820416808128357,
"learning_rate": 1.9334155363748462e-05,
"loss": 1.2242,
"step": 127
},
{
"epoch": 0.14046639231824418,
"grad_norm": 0.16064974665641785,
"learning_rate": 1.9309494451294698e-05,
"loss": 1.2635,
"step": 128
},
{
"epoch": 0.14156378600823044,
"grad_norm": 0.1459810435771942,
"learning_rate": 1.928483353884094e-05,
"loss": 1.2475,
"step": 129
},
{
"epoch": 0.14266117969821673,
"grad_norm": 0.18043562769889832,
"learning_rate": 1.9260172626387176e-05,
"loss": 1.3077,
"step": 130
},
{
"epoch": 0.14375857338820303,
"grad_norm": 0.16961152851581573,
"learning_rate": 1.9235511713933418e-05,
"loss": 1.2155,
"step": 131
},
{
"epoch": 0.1448559670781893,
"grad_norm": 0.16236276924610138,
"learning_rate": 1.9210850801479657e-05,
"loss": 1.1733,
"step": 132
},
{
"epoch": 0.14595336076817558,
"grad_norm": 0.15118443965911865,
"learning_rate": 1.9186189889025896e-05,
"loss": 1.2129,
"step": 133
},
{
"epoch": 0.14705075445816188,
"grad_norm": 0.15442046523094177,
"learning_rate": 1.9161528976572135e-05,
"loss": 1.2811,
"step": 134
},
{
"epoch": 0.14814814814814814,
"grad_norm": 0.15228775143623352,
"learning_rate": 1.9136868064118374e-05,
"loss": 1.2877,
"step": 135
},
{
"epoch": 0.14924554183813443,
"grad_norm": 0.20188400149345398,
"learning_rate": 1.9112207151664612e-05,
"loss": 1.2504,
"step": 136
},
{
"epoch": 0.15034293552812072,
"grad_norm": 0.15376144647598267,
"learning_rate": 1.908754623921085e-05,
"loss": 1.2608,
"step": 137
},
{
"epoch": 0.151440329218107,
"grad_norm": 0.18212908506393433,
"learning_rate": 1.9062885326757094e-05,
"loss": 1.2443,
"step": 138
},
{
"epoch": 0.15253772290809328,
"grad_norm": 0.18315152823925018,
"learning_rate": 1.903822441430333e-05,
"loss": 1.2282,
"step": 139
},
{
"epoch": 0.15363511659807957,
"grad_norm": 0.18566685914993286,
"learning_rate": 1.901356350184957e-05,
"loss": 1.2061,
"step": 140
},
{
"epoch": 0.15473251028806584,
"grad_norm": 0.17911821603775024,
"learning_rate": 1.898890258939581e-05,
"loss": 1.2598,
"step": 141
},
{
"epoch": 0.15582990397805213,
"grad_norm": 0.15996401011943817,
"learning_rate": 1.896424167694205e-05,
"loss": 1.3087,
"step": 142
},
{
"epoch": 0.1569272976680384,
"grad_norm": 0.15476301312446594,
"learning_rate": 1.8939580764488288e-05,
"loss": 1.2545,
"step": 143
},
{
"epoch": 0.1580246913580247,
"grad_norm": 0.22089847922325134,
"learning_rate": 1.8914919852034527e-05,
"loss": 1.1303,
"step": 144
},
{
"epoch": 0.15912208504801098,
"grad_norm": 0.19699229300022125,
"learning_rate": 1.8890258939580766e-05,
"loss": 1.1878,
"step": 145
},
{
"epoch": 0.16021947873799725,
"grad_norm": 0.1997714638710022,
"learning_rate": 1.8865598027127005e-05,
"loss": 1.2437,
"step": 146
},
{
"epoch": 0.16131687242798354,
"grad_norm": 0.19294144213199615,
"learning_rate": 1.8840937114673244e-05,
"loss": 1.178,
"step": 147
},
{
"epoch": 0.16241426611796983,
"grad_norm": 0.18544507026672363,
"learning_rate": 1.8816276202219483e-05,
"loss": 1.1617,
"step": 148
},
{
"epoch": 0.1635116598079561,
"grad_norm": 0.1713118851184845,
"learning_rate": 1.879161528976572e-05,
"loss": 1.2141,
"step": 149
},
{
"epoch": 0.1646090534979424,
"grad_norm": 0.1757712960243225,
"learning_rate": 1.876695437731196e-05,
"loss": 1.2505,
"step": 150
},
{
"epoch": 0.16570644718792868,
"grad_norm": 0.25870320200920105,
"learning_rate": 1.87422934648582e-05,
"loss": 1.1955,
"step": 151
},
{
"epoch": 0.16680384087791494,
"grad_norm": 0.197688490152359,
"learning_rate": 1.8717632552404442e-05,
"loss": 1.1598,
"step": 152
},
{
"epoch": 0.16790123456790124,
"grad_norm": 0.21808716654777527,
"learning_rate": 1.8692971639950677e-05,
"loss": 1.1682,
"step": 153
},
{
"epoch": 0.16899862825788753,
"grad_norm": 0.26828381419181824,
"learning_rate": 1.866831072749692e-05,
"loss": 1.2372,
"step": 154
},
{
"epoch": 0.1700960219478738,
"grad_norm": 0.20964942872524261,
"learning_rate": 1.864364981504316e-05,
"loss": 1.1816,
"step": 155
},
{
"epoch": 0.17119341563786009,
"grad_norm": 0.2113889753818512,
"learning_rate": 1.8618988902589397e-05,
"loss": 1.1749,
"step": 156
},
{
"epoch": 0.17229080932784638,
"grad_norm": 0.2011781483888626,
"learning_rate": 1.8594327990135636e-05,
"loss": 1.2413,
"step": 157
},
{
"epoch": 0.17338820301783264,
"grad_norm": 0.2024763971567154,
"learning_rate": 1.8569667077681875e-05,
"loss": 1.2371,
"step": 158
},
{
"epoch": 0.17448559670781894,
"grad_norm": 0.21682558953762054,
"learning_rate": 1.8545006165228114e-05,
"loss": 1.2055,
"step": 159
},
{
"epoch": 0.1755829903978052,
"grad_norm": 0.24796062707901,
"learning_rate": 1.8520345252774353e-05,
"loss": 1.2046,
"step": 160
},
{
"epoch": 0.1766803840877915,
"grad_norm": 0.25501304864883423,
"learning_rate": 1.8495684340320595e-05,
"loss": 1.1452,
"step": 161
},
{
"epoch": 0.17777777777777778,
"grad_norm": 0.24295510351657867,
"learning_rate": 1.847102342786683e-05,
"loss": 1.3494,
"step": 162
},
{
"epoch": 0.17887517146776405,
"grad_norm": 0.23228970170021057,
"learning_rate": 1.8446362515413073e-05,
"loss": 1.2089,
"step": 163
},
{
"epoch": 0.17997256515775034,
"grad_norm": 0.22333182394504547,
"learning_rate": 1.8421701602959312e-05,
"loss": 1.1953,
"step": 164
},
{
"epoch": 0.18106995884773663,
"grad_norm": 0.28809961676597595,
"learning_rate": 1.839704069050555e-05,
"loss": 1.2015,
"step": 165
},
{
"epoch": 0.1821673525377229,
"grad_norm": 0.24514758586883545,
"learning_rate": 1.837237977805179e-05,
"loss": 1.1897,
"step": 166
},
{
"epoch": 0.1832647462277092,
"grad_norm": 0.23966249823570251,
"learning_rate": 1.834771886559803e-05,
"loss": 1.2381,
"step": 167
},
{
"epoch": 0.18436213991769548,
"grad_norm": 0.2911086082458496,
"learning_rate": 1.8323057953144268e-05,
"loss": 1.2303,
"step": 168
},
{
"epoch": 0.18545953360768175,
"grad_norm": 0.2177533060312271,
"learning_rate": 1.8298397040690507e-05,
"loss": 1.2319,
"step": 169
},
{
"epoch": 0.18655692729766804,
"grad_norm": 0.2377919703722,
"learning_rate": 1.8273736128236746e-05,
"loss": 1.2014,
"step": 170
},
{
"epoch": 0.18765432098765433,
"grad_norm": 0.21792137622833252,
"learning_rate": 1.8249075215782984e-05,
"loss": 1.1645,
"step": 171
},
{
"epoch": 0.1887517146776406,
"grad_norm": 0.2608526051044464,
"learning_rate": 1.8224414303329227e-05,
"loss": 1.1807,
"step": 172
},
{
"epoch": 0.1898491083676269,
"grad_norm": 0.14767280220985413,
"learning_rate": 1.8199753390875462e-05,
"loss": 1.2637,
"step": 173
},
{
"epoch": 0.19094650205761318,
"grad_norm": 0.15476517379283905,
"learning_rate": 1.8175092478421705e-05,
"loss": 1.229,
"step": 174
},
{
"epoch": 0.19204389574759945,
"grad_norm": 0.1727033257484436,
"learning_rate": 1.8150431565967943e-05,
"loss": 1.2646,
"step": 175
},
{
"epoch": 0.19314128943758574,
"grad_norm": 0.17207948863506317,
"learning_rate": 1.8125770653514182e-05,
"loss": 1.1604,
"step": 176
},
{
"epoch": 0.194238683127572,
"grad_norm": 0.19457168877124786,
"learning_rate": 1.810110974106042e-05,
"loss": 1.3146,
"step": 177
},
{
"epoch": 0.1953360768175583,
"grad_norm": 0.2222977876663208,
"learning_rate": 1.807644882860666e-05,
"loss": 1.2462,
"step": 178
},
{
"epoch": 0.1964334705075446,
"grad_norm": 0.21270394325256348,
"learning_rate": 1.80517879161529e-05,
"loss": 1.1648,
"step": 179
},
{
"epoch": 0.19753086419753085,
"grad_norm": 0.22708119451999664,
"learning_rate": 1.8027127003699138e-05,
"loss": 1.1397,
"step": 180
},
{
"epoch": 0.19862825788751715,
"grad_norm": 0.21450617909431458,
"learning_rate": 1.8002466091245377e-05,
"loss": 1.1892,
"step": 181
},
{
"epoch": 0.19972565157750344,
"grad_norm": 0.16464580595493317,
"learning_rate": 1.7977805178791616e-05,
"loss": 1.0918,
"step": 182
},
{
"epoch": 0.2008230452674897,
"grad_norm": 0.22942472994327545,
"learning_rate": 1.7953144266337855e-05,
"loss": 1.2338,
"step": 183
},
{
"epoch": 0.201920438957476,
"grad_norm": 0.16418524086475372,
"learning_rate": 1.7928483353884094e-05,
"loss": 1.241,
"step": 184
},
{
"epoch": 0.2030178326474623,
"grad_norm": 0.18531592190265656,
"learning_rate": 1.7903822441430333e-05,
"loss": 1.1884,
"step": 185
},
{
"epoch": 0.20411522633744855,
"grad_norm": 0.16026315093040466,
"learning_rate": 1.7879161528976575e-05,
"loss": 1.1948,
"step": 186
},
{
"epoch": 0.20521262002743484,
"grad_norm": 0.16926898062229156,
"learning_rate": 1.785450061652281e-05,
"loss": 1.1487,
"step": 187
},
{
"epoch": 0.20631001371742114,
"grad_norm": 0.21820904314517975,
"learning_rate": 1.7829839704069053e-05,
"loss": 1.1851,
"step": 188
},
{
"epoch": 0.2074074074074074,
"grad_norm": 0.16973170638084412,
"learning_rate": 1.780517879161529e-05,
"loss": 1.2141,
"step": 189
},
{
"epoch": 0.2085048010973937,
"grad_norm": 0.15451133251190186,
"learning_rate": 1.778051787916153e-05,
"loss": 1.2001,
"step": 190
},
{
"epoch": 0.20960219478738,
"grad_norm": 0.20399673283100128,
"learning_rate": 1.775585696670777e-05,
"loss": 1.1314,
"step": 191
},
{
"epoch": 0.21069958847736625,
"grad_norm": 0.16214239597320557,
"learning_rate": 1.7731196054254008e-05,
"loss": 1.1438,
"step": 192
},
{
"epoch": 0.21179698216735254,
"grad_norm": 0.1611064076423645,
"learning_rate": 1.7706535141800247e-05,
"loss": 1.1743,
"step": 193
},
{
"epoch": 0.2128943758573388,
"grad_norm": 0.12813259661197662,
"learning_rate": 1.7681874229346486e-05,
"loss": 1.1954,
"step": 194
},
{
"epoch": 0.2139917695473251,
"grad_norm": 0.1601381003856659,
"learning_rate": 1.765721331689273e-05,
"loss": 1.1613,
"step": 195
},
{
"epoch": 0.2150891632373114,
"grad_norm": 0.13577936589717865,
"learning_rate": 1.7632552404438964e-05,
"loss": 1.1982,
"step": 196
},
{
"epoch": 0.21618655692729766,
"grad_norm": 0.13865117728710175,
"learning_rate": 1.7607891491985206e-05,
"loss": 1.1725,
"step": 197
},
{
"epoch": 0.21728395061728395,
"grad_norm": 0.13816504180431366,
"learning_rate": 1.7583230579531445e-05,
"loss": 1.205,
"step": 198
},
{
"epoch": 0.21838134430727024,
"grad_norm": 0.155447855591774,
"learning_rate": 1.7558569667077684e-05,
"loss": 1.1324,
"step": 199
},
{
"epoch": 0.2194787379972565,
"grad_norm": 0.10742881894111633,
"learning_rate": 1.7533908754623923e-05,
"loss": 1.2384,
"step": 200
},
{
"epoch": 0.2205761316872428,
"grad_norm": 0.12242433428764343,
"learning_rate": 1.7509247842170162e-05,
"loss": 1.1908,
"step": 201
},
{
"epoch": 0.2216735253772291,
"grad_norm": 0.1424039602279663,
"learning_rate": 1.74845869297164e-05,
"loss": 1.1699,
"step": 202
},
{
"epoch": 0.22277091906721536,
"grad_norm": 0.16237859427928925,
"learning_rate": 1.745992601726264e-05,
"loss": 1.0864,
"step": 203
},
{
"epoch": 0.22386831275720165,
"grad_norm": 0.22748763859272003,
"learning_rate": 1.743526510480888e-05,
"loss": 1.2065,
"step": 204
},
{
"epoch": 0.22496570644718794,
"grad_norm": 0.16209690272808075,
"learning_rate": 1.7410604192355117e-05,
"loss": 1.2071,
"step": 205
},
{
"epoch": 0.2260631001371742,
"grad_norm": 0.12627777457237244,
"learning_rate": 1.738594327990136e-05,
"loss": 1.2082,
"step": 206
},
{
"epoch": 0.2271604938271605,
"grad_norm": 0.12761177122592926,
"learning_rate": 1.7361282367447595e-05,
"loss": 1.1662,
"step": 207
},
{
"epoch": 0.2282578875171468,
"grad_norm": 0.16722379624843597,
"learning_rate": 1.7336621454993838e-05,
"loss": 1.3239,
"step": 208
},
{
"epoch": 0.22935528120713305,
"grad_norm": 0.22056573629379272,
"learning_rate": 1.7311960542540076e-05,
"loss": 1.24,
"step": 209
},
{
"epoch": 0.23045267489711935,
"grad_norm": 0.15545333921909332,
"learning_rate": 1.7287299630086315e-05,
"loss": 1.1569,
"step": 210
},
{
"epoch": 0.2315500685871056,
"grad_norm": 0.15357747673988342,
"learning_rate": 1.7262638717632554e-05,
"loss": 1.1662,
"step": 211
},
{
"epoch": 0.2326474622770919,
"grad_norm": 0.2370174080133438,
"learning_rate": 1.7237977805178793e-05,
"loss": 1.163,
"step": 212
},
{
"epoch": 0.2337448559670782,
"grad_norm": 0.18010015785694122,
"learning_rate": 1.7213316892725032e-05,
"loss": 1.2003,
"step": 213
},
{
"epoch": 0.23484224965706446,
"grad_norm": 0.12438640743494034,
"learning_rate": 1.718865598027127e-05,
"loss": 1.2254,
"step": 214
},
{
"epoch": 0.23593964334705075,
"grad_norm": 0.11115527898073196,
"learning_rate": 1.716399506781751e-05,
"loss": 1.1531,
"step": 215
},
{
"epoch": 0.23703703703703705,
"grad_norm": 0.19279608130455017,
"learning_rate": 1.713933415536375e-05,
"loss": 1.1805,
"step": 216
},
{
"epoch": 0.2381344307270233,
"grad_norm": 0.18822820484638214,
"learning_rate": 1.7114673242909988e-05,
"loss": 1.2364,
"step": 217
},
{
"epoch": 0.2392318244170096,
"grad_norm": 0.11501055210828781,
"learning_rate": 1.709001233045623e-05,
"loss": 1.2192,
"step": 218
},
{
"epoch": 0.2403292181069959,
"grad_norm": 0.15815867483615875,
"learning_rate": 1.7065351418002466e-05,
"loss": 1.1756,
"step": 219
},
{
"epoch": 0.24142661179698216,
"grad_norm": 0.11231168359518051,
"learning_rate": 1.7040690505548708e-05,
"loss": 1.1561,
"step": 220
},
{
"epoch": 0.24252400548696845,
"grad_norm": 0.17530983686447144,
"learning_rate": 1.7016029593094943e-05,
"loss": 1.2097,
"step": 221
},
{
"epoch": 0.24362139917695474,
"grad_norm": 0.22233904898166656,
"learning_rate": 1.6991368680641186e-05,
"loss": 1.0435,
"step": 222
},
{
"epoch": 0.244718792866941,
"grad_norm": 0.16978253424167633,
"learning_rate": 1.6966707768187425e-05,
"loss": 1.1275,
"step": 223
},
{
"epoch": 0.2458161865569273,
"grad_norm": 0.10136917978525162,
"learning_rate": 1.6942046855733664e-05,
"loss": 1.2177,
"step": 224
},
{
"epoch": 0.24691358024691357,
"grad_norm": 0.14352397620677948,
"learning_rate": 1.6917385943279902e-05,
"loss": 1.178,
"step": 225
},
{
"epoch": 0.24801097393689986,
"grad_norm": 0.16630198061466217,
"learning_rate": 1.689272503082614e-05,
"loss": 1.1465,
"step": 226
},
{
"epoch": 0.24910836762688615,
"grad_norm": 0.1705196648836136,
"learning_rate": 1.686806411837238e-05,
"loss": 1.2136,
"step": 227
},
{
"epoch": 0.25020576131687244,
"grad_norm": 0.14427916705608368,
"learning_rate": 1.684340320591862e-05,
"loss": 1.1775,
"step": 228
},
{
"epoch": 0.25130315500685874,
"grad_norm": 0.14155656099319458,
"learning_rate": 1.681874229346486e-05,
"loss": 1.1594,
"step": 229
},
{
"epoch": 0.252400548696845,
"grad_norm": 0.13098452985286713,
"learning_rate": 1.6794081381011097e-05,
"loss": 1.202,
"step": 230
},
{
"epoch": 0.25349794238683127,
"grad_norm": 0.20060771703720093,
"learning_rate": 1.676942046855734e-05,
"loss": 1.1472,
"step": 231
},
{
"epoch": 0.25459533607681756,
"grad_norm": 0.22307084500789642,
"learning_rate": 1.6744759556103578e-05,
"loss": 1.155,
"step": 232
},
{
"epoch": 0.25569272976680385,
"grad_norm": 0.14008598029613495,
"learning_rate": 1.6720098643649817e-05,
"loss": 1.244,
"step": 233
},
{
"epoch": 0.25679012345679014,
"grad_norm": 0.12508675456047058,
"learning_rate": 1.6695437731196056e-05,
"loss": 1.1144,
"step": 234
},
{
"epoch": 0.2578875171467764,
"grad_norm": 0.1596231907606125,
"learning_rate": 1.6670776818742295e-05,
"loss": 1.0987,
"step": 235
},
{
"epoch": 0.25898491083676267,
"grad_norm": 0.14945028722286224,
"learning_rate": 1.6646115906288534e-05,
"loss": 1.2141,
"step": 236
},
{
"epoch": 0.26008230452674896,
"grad_norm": 0.15245778858661652,
"learning_rate": 1.6621454993834773e-05,
"loss": 1.1507,
"step": 237
},
{
"epoch": 0.26117969821673526,
"grad_norm": 0.13351744413375854,
"learning_rate": 1.6596794081381015e-05,
"loss": 1.1998,
"step": 238
},
{
"epoch": 0.26227709190672155,
"grad_norm": 0.10289657860994339,
"learning_rate": 1.657213316892725e-05,
"loss": 1.1589,
"step": 239
},
{
"epoch": 0.26337448559670784,
"grad_norm": 0.11290785670280457,
"learning_rate": 1.6547472256473493e-05,
"loss": 1.1987,
"step": 240
},
{
"epoch": 0.2644718792866941,
"grad_norm": 0.1390170007944107,
"learning_rate": 1.652281134401973e-05,
"loss": 1.1691,
"step": 241
},
{
"epoch": 0.26556927297668037,
"grad_norm": 0.12496920675039291,
"learning_rate": 1.649815043156597e-05,
"loss": 1.1337,
"step": 242
},
{
"epoch": 0.26666666666666666,
"grad_norm": 0.11996202915906906,
"learning_rate": 1.647348951911221e-05,
"loss": 1.118,
"step": 243
},
{
"epoch": 0.26776406035665296,
"grad_norm": 0.14449340105056763,
"learning_rate": 1.644882860665845e-05,
"loss": 1.1664,
"step": 244
},
{
"epoch": 0.26886145404663925,
"grad_norm": 0.11750940978527069,
"learning_rate": 1.6424167694204687e-05,
"loss": 1.1768,
"step": 245
},
{
"epoch": 0.26995884773662554,
"grad_norm": 0.11455260217189789,
"learning_rate": 1.6399506781750926e-05,
"loss": 1.1669,
"step": 246
},
{
"epoch": 0.2710562414266118,
"grad_norm": 0.1612052172422409,
"learning_rate": 1.6374845869297165e-05,
"loss": 1.0813,
"step": 247
},
{
"epoch": 0.27215363511659807,
"grad_norm": 0.1127680242061615,
"learning_rate": 1.6350184956843404e-05,
"loss": 1.1253,
"step": 248
},
{
"epoch": 0.27325102880658436,
"grad_norm": 0.12000089883804321,
"learning_rate": 1.6325524044389643e-05,
"loss": 1.1451,
"step": 249
},
{
"epoch": 0.27434842249657065,
"grad_norm": 0.10796555131673813,
"learning_rate": 1.6300863131935882e-05,
"loss": 1.2189,
"step": 250
},
{
"epoch": 0.27544581618655695,
"grad_norm": 0.11138719320297241,
"learning_rate": 1.627620221948212e-05,
"loss": 1.1434,
"step": 251
},
{
"epoch": 0.2765432098765432,
"grad_norm": 0.11853396147489548,
"learning_rate": 1.6251541307028363e-05,
"loss": 1.1962,
"step": 252
},
{
"epoch": 0.2776406035665295,
"grad_norm": 0.11319135874509811,
"learning_rate": 1.62268803945746e-05,
"loss": 1.1238,
"step": 253
},
{
"epoch": 0.27873799725651577,
"grad_norm": 0.11749457567930222,
"learning_rate": 1.620221948212084e-05,
"loss": 1.1505,
"step": 254
},
{
"epoch": 0.27983539094650206,
"grad_norm": 0.11317736655473709,
"learning_rate": 1.617755856966708e-05,
"loss": 1.2262,
"step": 255
},
{
"epoch": 0.28093278463648835,
"grad_norm": 0.12298385798931122,
"learning_rate": 1.615289765721332e-05,
"loss": 1.1338,
"step": 256
},
{
"epoch": 0.28203017832647465,
"grad_norm": 0.10966061800718307,
"learning_rate": 1.6128236744759558e-05,
"loss": 1.1722,
"step": 257
},
{
"epoch": 0.2831275720164609,
"grad_norm": 0.11691927909851074,
"learning_rate": 1.6103575832305797e-05,
"loss": 1.212,
"step": 258
},
{
"epoch": 0.2842249657064472,
"grad_norm": 0.13233579695224762,
"learning_rate": 1.6078914919852035e-05,
"loss": 1.1113,
"step": 259
},
{
"epoch": 0.28532235939643347,
"grad_norm": 0.10713832825422287,
"learning_rate": 1.6054254007398274e-05,
"loss": 1.1638,
"step": 260
},
{
"epoch": 0.28641975308641976,
"grad_norm": 0.10320425778627396,
"learning_rate": 1.6029593094944513e-05,
"loss": 1.1553,
"step": 261
},
{
"epoch": 0.28751714677640605,
"grad_norm": 0.14395684003829956,
"learning_rate": 1.6004932182490752e-05,
"loss": 1.1875,
"step": 262
},
{
"epoch": 0.28861454046639234,
"grad_norm": 0.11742989718914032,
"learning_rate": 1.5980271270036994e-05,
"loss": 1.1434,
"step": 263
},
{
"epoch": 0.2897119341563786,
"grad_norm": 0.12088185548782349,
"learning_rate": 1.595561035758323e-05,
"loss": 1.1365,
"step": 264
},
{
"epoch": 0.2908093278463649,
"grad_norm": 0.12207765877246857,
"learning_rate": 1.5930949445129472e-05,
"loss": 1.0191,
"step": 265
},
{
"epoch": 0.29190672153635117,
"grad_norm": 0.12891145050525665,
"learning_rate": 1.590628853267571e-05,
"loss": 1.1808,
"step": 266
},
{
"epoch": 0.29300411522633746,
"grad_norm": 0.12283164262771606,
"learning_rate": 1.588162762022195e-05,
"loss": 1.1244,
"step": 267
},
{
"epoch": 0.29410150891632375,
"grad_norm": 0.11997072398662567,
"learning_rate": 1.585696670776819e-05,
"loss": 1.1481,
"step": 268
},
{
"epoch": 0.29519890260631,
"grad_norm": 0.13282917439937592,
"learning_rate": 1.5832305795314428e-05,
"loss": 1.1222,
"step": 269
},
{
"epoch": 0.2962962962962963,
"grad_norm": 0.11063191294670105,
"learning_rate": 1.5807644882860667e-05,
"loss": 1.2377,
"step": 270
},
{
"epoch": 0.29739368998628257,
"grad_norm": 0.16821421682834625,
"learning_rate": 1.5782983970406906e-05,
"loss": 1.111,
"step": 271
},
{
"epoch": 0.29849108367626886,
"grad_norm": 0.1252758502960205,
"learning_rate": 1.5758323057953148e-05,
"loss": 1.1257,
"step": 272
},
{
"epoch": 0.29958847736625516,
"grad_norm": 0.11989396065473557,
"learning_rate": 1.5733662145499384e-05,
"loss": 1.2035,
"step": 273
},
{
"epoch": 0.30068587105624145,
"grad_norm": 0.122134268283844,
"learning_rate": 1.5709001233045626e-05,
"loss": 1.1897,
"step": 274
},
{
"epoch": 0.3017832647462277,
"grad_norm": 0.1424403041601181,
"learning_rate": 1.5684340320591865e-05,
"loss": 1.136,
"step": 275
},
{
"epoch": 0.302880658436214,
"grad_norm": 0.11523528397083282,
"learning_rate": 1.5659679408138104e-05,
"loss": 1.1721,
"step": 276
},
{
"epoch": 0.30397805212620027,
"grad_norm": 0.1188586950302124,
"learning_rate": 1.5635018495684343e-05,
"loss": 1.196,
"step": 277
},
{
"epoch": 0.30507544581618656,
"grad_norm": 0.09858889132738113,
"learning_rate": 1.561035758323058e-05,
"loss": 1.1256,
"step": 278
},
{
"epoch": 0.30617283950617286,
"grad_norm": 0.13993050158023834,
"learning_rate": 1.558569667077682e-05,
"loss": 1.142,
"step": 279
},
{
"epoch": 0.30727023319615915,
"grad_norm": 0.11930122971534729,
"learning_rate": 1.556103575832306e-05,
"loss": 1.1358,
"step": 280
},
{
"epoch": 0.3083676268861454,
"grad_norm": 0.12060839682817459,
"learning_rate": 1.5536374845869298e-05,
"loss": 1.2019,
"step": 281
},
{
"epoch": 0.3094650205761317,
"grad_norm": 0.11242841929197311,
"learning_rate": 1.5511713933415537e-05,
"loss": 1.0959,
"step": 282
},
{
"epoch": 0.31056241426611797,
"grad_norm": 0.12150558829307556,
"learning_rate": 1.5487053020961776e-05,
"loss": 1.1794,
"step": 283
},
{
"epoch": 0.31165980795610426,
"grad_norm": 0.13793016970157623,
"learning_rate": 1.5462392108508015e-05,
"loss": 1.0896,
"step": 284
},
{
"epoch": 0.31275720164609055,
"grad_norm": 0.1277860403060913,
"learning_rate": 1.5437731196054254e-05,
"loss": 1.1495,
"step": 285
},
{
"epoch": 0.3138545953360768,
"grad_norm": 0.12210772931575775,
"learning_rate": 1.5413070283600496e-05,
"loss": 1.108,
"step": 286
},
{
"epoch": 0.3149519890260631,
"grad_norm": 0.11492680013179779,
"learning_rate": 1.538840937114673e-05,
"loss": 1.1468,
"step": 287
},
{
"epoch": 0.3160493827160494,
"grad_norm": 0.10993566364049911,
"learning_rate": 1.5363748458692974e-05,
"loss": 1.0821,
"step": 288
},
{
"epoch": 0.31714677640603567,
"grad_norm": 0.11138095706701279,
"learning_rate": 1.5339087546239213e-05,
"loss": 1.1024,
"step": 289
},
{
"epoch": 0.31824417009602196,
"grad_norm": 0.12479337304830551,
"learning_rate": 1.5314426633785452e-05,
"loss": 1.1282,
"step": 290
},
{
"epoch": 0.31934156378600825,
"grad_norm": 0.1219845563173294,
"learning_rate": 1.528976572133169e-05,
"loss": 1.1404,
"step": 291
},
{
"epoch": 0.3204389574759945,
"grad_norm": 0.1182159036397934,
"learning_rate": 1.526510480887793e-05,
"loss": 1.173,
"step": 292
},
{
"epoch": 0.3215363511659808,
"grad_norm": 0.11335504055023193,
"learning_rate": 1.5240443896424168e-05,
"loss": 1.1762,
"step": 293
},
{
"epoch": 0.3226337448559671,
"grad_norm": 0.11732617020606995,
"learning_rate": 1.5215782983970409e-05,
"loss": 1.1762,
"step": 294
},
{
"epoch": 0.32373113854595337,
"grad_norm": 0.1295761615037918,
"learning_rate": 1.5191122071516646e-05,
"loss": 1.0506,
"step": 295
},
{
"epoch": 0.32482853223593966,
"grad_norm": 0.11054473370313644,
"learning_rate": 1.5166461159062887e-05,
"loss": 1.0934,
"step": 296
},
{
"epoch": 0.32592592592592595,
"grad_norm": 0.11660335958003998,
"learning_rate": 1.5141800246609126e-05,
"loss": 1.0956,
"step": 297
},
{
"epoch": 0.3270233196159122,
"grad_norm": 0.10848717391490936,
"learning_rate": 1.5117139334155365e-05,
"loss": 1.1147,
"step": 298
},
{
"epoch": 0.3281207133058985,
"grad_norm": 0.11791523545980453,
"learning_rate": 1.5092478421701604e-05,
"loss": 1.1918,
"step": 299
},
{
"epoch": 0.3292181069958848,
"grad_norm": 0.11942354589700699,
"learning_rate": 1.5067817509247844e-05,
"loss": 1.2093,
"step": 300
},
{
"epoch": 0.33031550068587107,
"grad_norm": 0.11650928854942322,
"learning_rate": 1.5043156596794081e-05,
"loss": 1.1171,
"step": 301
},
{
"epoch": 0.33141289437585736,
"grad_norm": 0.1390320360660553,
"learning_rate": 1.5018495684340322e-05,
"loss": 1.0934,
"step": 302
},
{
"epoch": 0.3325102880658436,
"grad_norm": 0.1456214338541031,
"learning_rate": 1.4993834771886561e-05,
"loss": 1.0721,
"step": 303
},
{
"epoch": 0.3336076817558299,
"grad_norm": 0.11829102784395218,
"learning_rate": 1.49691738594328e-05,
"loss": 1.1352,
"step": 304
},
{
"epoch": 0.3347050754458162,
"grad_norm": 0.11520489305257797,
"learning_rate": 1.4944512946979039e-05,
"loss": 1.1205,
"step": 305
},
{
"epoch": 0.3358024691358025,
"grad_norm": 0.1192188486456871,
"learning_rate": 1.491985203452528e-05,
"loss": 1.0658,
"step": 306
},
{
"epoch": 0.33689986282578877,
"grad_norm": 0.12938235700130463,
"learning_rate": 1.4895191122071517e-05,
"loss": 1.2173,
"step": 307
},
{
"epoch": 0.33799725651577506,
"grad_norm": 0.11106622219085693,
"learning_rate": 1.4870530209617757e-05,
"loss": 1.0982,
"step": 308
},
{
"epoch": 0.3390946502057613,
"grad_norm": 0.11311810463666916,
"learning_rate": 1.4845869297163998e-05,
"loss": 1.0987,
"step": 309
},
{
"epoch": 0.3401920438957476,
"grad_norm": 0.14787402749061584,
"learning_rate": 1.4821208384710235e-05,
"loss": 1.2011,
"step": 310
},
{
"epoch": 0.3412894375857339,
"grad_norm": 0.12094119936227798,
"learning_rate": 1.4796547472256476e-05,
"loss": 1.0972,
"step": 311
},
{
"epoch": 0.34238683127572017,
"grad_norm": 0.12174614518880844,
"learning_rate": 1.4771886559802713e-05,
"loss": 1.2196,
"step": 312
},
{
"epoch": 0.34348422496570646,
"grad_norm": 0.12063402682542801,
"learning_rate": 1.4747225647348953e-05,
"loss": 1.0724,
"step": 313
},
{
"epoch": 0.34458161865569276,
"grad_norm": 0.1335890144109726,
"learning_rate": 1.4722564734895192e-05,
"loss": 1.2439,
"step": 314
},
{
"epoch": 0.345679012345679,
"grad_norm": 0.120571069419384,
"learning_rate": 1.4697903822441431e-05,
"loss": 1.1374,
"step": 315
},
{
"epoch": 0.3467764060356653,
"grad_norm": 0.1246613934636116,
"learning_rate": 1.467324290998767e-05,
"loss": 1.1456,
"step": 316
},
{
"epoch": 0.3478737997256516,
"grad_norm": 0.1251581907272339,
"learning_rate": 1.464858199753391e-05,
"loss": 1.1829,
"step": 317
},
{
"epoch": 0.34897119341563787,
"grad_norm": 0.12001658231019974,
"learning_rate": 1.4623921085080148e-05,
"loss": 1.2,
"step": 318
},
{
"epoch": 0.35006858710562416,
"grad_norm": 0.11571818590164185,
"learning_rate": 1.4599260172626389e-05,
"loss": 1.0829,
"step": 319
},
{
"epoch": 0.3511659807956104,
"grad_norm": 0.11667678505182266,
"learning_rate": 1.4574599260172627e-05,
"loss": 1.1365,
"step": 320
},
{
"epoch": 0.3522633744855967,
"grad_norm": 0.13005587458610535,
"learning_rate": 1.4549938347718866e-05,
"loss": 1.0685,
"step": 321
},
{
"epoch": 0.353360768175583,
"grad_norm": 0.13064813613891602,
"learning_rate": 1.4525277435265105e-05,
"loss": 1.1665,
"step": 322
},
{
"epoch": 0.3544581618655693,
"grad_norm": 0.11439865827560425,
"learning_rate": 1.4500616522811346e-05,
"loss": 1.0968,
"step": 323
},
{
"epoch": 0.35555555555555557,
"grad_norm": 0.11633537709712982,
"learning_rate": 1.4475955610357583e-05,
"loss": 1.2706,
"step": 324
},
{
"epoch": 0.35665294924554186,
"grad_norm": 0.11462391167879105,
"learning_rate": 1.4451294697903824e-05,
"loss": 1.2981,
"step": 325
},
{
"epoch": 0.3577503429355281,
"grad_norm": 0.11040814965963364,
"learning_rate": 1.4426633785450064e-05,
"loss": 1.0935,
"step": 326
},
{
"epoch": 0.3588477366255144,
"grad_norm": 0.14056402444839478,
"learning_rate": 1.4401972872996302e-05,
"loss": 1.1784,
"step": 327
},
{
"epoch": 0.3599451303155007,
"grad_norm": 0.12376075237989426,
"learning_rate": 1.4377311960542542e-05,
"loss": 1.1781,
"step": 328
},
{
"epoch": 0.361042524005487,
"grad_norm": 0.13468991219997406,
"learning_rate": 1.4352651048088781e-05,
"loss": 1.151,
"step": 329
},
{
"epoch": 0.36213991769547327,
"grad_norm": 0.11974407732486725,
"learning_rate": 1.432799013563502e-05,
"loss": 1.0996,
"step": 330
},
{
"epoch": 0.36323731138545956,
"grad_norm": 0.13505803048610687,
"learning_rate": 1.4303329223181259e-05,
"loss": 1.0323,
"step": 331
},
{
"epoch": 0.3643347050754458,
"grad_norm": 0.13089002668857574,
"learning_rate": 1.4278668310727498e-05,
"loss": 1.0352,
"step": 332
},
{
"epoch": 0.3654320987654321,
"grad_norm": 0.13586758077144623,
"learning_rate": 1.4254007398273737e-05,
"loss": 1.1415,
"step": 333
},
{
"epoch": 0.3665294924554184,
"grad_norm": 0.13658414781093597,
"learning_rate": 1.4229346485819977e-05,
"loss": 1.2563,
"step": 334
},
{
"epoch": 0.3676268861454047,
"grad_norm": 0.13994932174682617,
"learning_rate": 1.4204685573366214e-05,
"loss": 1.1559,
"step": 335
},
{
"epoch": 0.36872427983539097,
"grad_norm": 0.11985601484775543,
"learning_rate": 1.4180024660912455e-05,
"loss": 1.1639,
"step": 336
},
{
"epoch": 0.3698216735253772,
"grad_norm": 0.15504884719848633,
"learning_rate": 1.4155363748458694e-05,
"loss": 1.0932,
"step": 337
},
{
"epoch": 0.3709190672153635,
"grad_norm": 0.11062606424093246,
"learning_rate": 1.4130702836004933e-05,
"loss": 1.1327,
"step": 338
},
{
"epoch": 0.3720164609053498,
"grad_norm": 0.13901466131210327,
"learning_rate": 1.4106041923551172e-05,
"loss": 1.1924,
"step": 339
},
{
"epoch": 0.3731138545953361,
"grad_norm": 0.1262989044189453,
"learning_rate": 1.4081381011097412e-05,
"loss": 1.1411,
"step": 340
},
{
"epoch": 0.3742112482853224,
"grad_norm": 0.1387164443731308,
"learning_rate": 1.405672009864365e-05,
"loss": 1.116,
"step": 341
},
{
"epoch": 0.37530864197530867,
"grad_norm": 0.13789041340351105,
"learning_rate": 1.403205918618989e-05,
"loss": 1.1003,
"step": 342
},
{
"epoch": 0.3764060356652949,
"grad_norm": 0.12197256088256836,
"learning_rate": 1.400739827373613e-05,
"loss": 1.1363,
"step": 343
},
{
"epoch": 0.3775034293552812,
"grad_norm": 0.11622656881809235,
"learning_rate": 1.3982737361282368e-05,
"loss": 1.2004,
"step": 344
},
{
"epoch": 0.3786008230452675,
"grad_norm": 0.13322965800762177,
"learning_rate": 1.3958076448828609e-05,
"loss": 1.0868,
"step": 345
},
{
"epoch": 0.3796982167352538,
"grad_norm": 0.13613948225975037,
"learning_rate": 1.3933415536374848e-05,
"loss": 1.0893,
"step": 346
},
{
"epoch": 0.38079561042524007,
"grad_norm": 0.12749332189559937,
"learning_rate": 1.3908754623921086e-05,
"loss": 1.1242,
"step": 347
},
{
"epoch": 0.38189300411522636,
"grad_norm": 0.13489745557308197,
"learning_rate": 1.3884093711467325e-05,
"loss": 1.0463,
"step": 348
},
{
"epoch": 0.3829903978052126,
"grad_norm": 0.13293969631195068,
"learning_rate": 1.3859432799013564e-05,
"loss": 1.0682,
"step": 349
},
{
"epoch": 0.3840877914951989,
"grad_norm": 0.13386596739292145,
"learning_rate": 1.3834771886559803e-05,
"loss": 1.1216,
"step": 350
},
{
"epoch": 0.3851851851851852,
"grad_norm": 0.1273600161075592,
"learning_rate": 1.3810110974106044e-05,
"loss": 1.1638,
"step": 351
},
{
"epoch": 0.3862825788751715,
"grad_norm": 0.113369882106781,
"learning_rate": 1.3785450061652281e-05,
"loss": 1.0776,
"step": 352
},
{
"epoch": 0.38737997256515777,
"grad_norm": 0.13265594840049744,
"learning_rate": 1.3760789149198522e-05,
"loss": 1.1413,
"step": 353
},
{
"epoch": 0.388477366255144,
"grad_norm": 0.10907372832298279,
"learning_rate": 1.373612823674476e-05,
"loss": 1.1685,
"step": 354
},
{
"epoch": 0.3895747599451303,
"grad_norm": 0.14246472716331482,
"learning_rate": 1.3711467324291e-05,
"loss": 1.1737,
"step": 355
},
{
"epoch": 0.3906721536351166,
"grad_norm": 0.11236773431301117,
"learning_rate": 1.3686806411837238e-05,
"loss": 1.2439,
"step": 356
},
{
"epoch": 0.3917695473251029,
"grad_norm": 0.14173471927642822,
"learning_rate": 1.3662145499383479e-05,
"loss": 1.0831,
"step": 357
},
{
"epoch": 0.3928669410150892,
"grad_norm": 0.13366690278053284,
"learning_rate": 1.3637484586929716e-05,
"loss": 1.1333,
"step": 358
},
{
"epoch": 0.39396433470507547,
"grad_norm": 0.14148059487342834,
"learning_rate": 1.3612823674475957e-05,
"loss": 1.1519,
"step": 359
},
{
"epoch": 0.3950617283950617,
"grad_norm": 0.11044170707464218,
"learning_rate": 1.3588162762022197e-05,
"loss": 1.255,
"step": 360
},
{
"epoch": 0.396159122085048,
"grad_norm": 0.11801745742559433,
"learning_rate": 1.3563501849568435e-05,
"loss": 1.0922,
"step": 361
},
{
"epoch": 0.3972565157750343,
"grad_norm": 0.11835184693336487,
"learning_rate": 1.3538840937114675e-05,
"loss": 1.1125,
"step": 362
},
{
"epoch": 0.3983539094650206,
"grad_norm": 0.1362667679786682,
"learning_rate": 1.3514180024660914e-05,
"loss": 1.1118,
"step": 363
},
{
"epoch": 0.3994513031550069,
"grad_norm": 0.1164252832531929,
"learning_rate": 1.3489519112207153e-05,
"loss": 1.0671,
"step": 364
},
{
"epoch": 0.40054869684499317,
"grad_norm": 0.12106901407241821,
"learning_rate": 1.3464858199753392e-05,
"loss": 1.1901,
"step": 365
},
{
"epoch": 0.4016460905349794,
"grad_norm": 0.1594018191099167,
"learning_rate": 1.3440197287299632e-05,
"loss": 1.0512,
"step": 366
},
{
"epoch": 0.4027434842249657,
"grad_norm": 0.11015798151493073,
"learning_rate": 1.341553637484587e-05,
"loss": 1.1859,
"step": 367
},
{
"epoch": 0.403840877914952,
"grad_norm": 0.1140451431274414,
"learning_rate": 1.339087546239211e-05,
"loss": 1.093,
"step": 368
},
{
"epoch": 0.4049382716049383,
"grad_norm": 0.1487288475036621,
"learning_rate": 1.3366214549938348e-05,
"loss": 1.2052,
"step": 369
},
{
"epoch": 0.4060356652949246,
"grad_norm": 0.12370628118515015,
"learning_rate": 1.3341553637484588e-05,
"loss": 1.0985,
"step": 370
},
{
"epoch": 0.4071330589849108,
"grad_norm": 0.11730250716209412,
"learning_rate": 1.3316892725030827e-05,
"loss": 1.082,
"step": 371
},
{
"epoch": 0.4082304526748971,
"grad_norm": 0.1203538104891777,
"learning_rate": 1.3292231812577066e-05,
"loss": 1.0728,
"step": 372
},
{
"epoch": 0.4093278463648834,
"grad_norm": 0.11578691750764847,
"learning_rate": 1.3267570900123305e-05,
"loss": 1.1351,
"step": 373
},
{
"epoch": 0.4104252400548697,
"grad_norm": 0.1101141944527626,
"learning_rate": 1.3242909987669545e-05,
"loss": 1.1311,
"step": 374
},
{
"epoch": 0.411522633744856,
"grad_norm": 0.11373890191316605,
"learning_rate": 1.3218249075215783e-05,
"loss": 1.0773,
"step": 375
},
{
"epoch": 0.4126200274348423,
"grad_norm": 0.14648625254631042,
"learning_rate": 1.3193588162762023e-05,
"loss": 1.2263,
"step": 376
},
{
"epoch": 0.4137174211248285,
"grad_norm": 0.11010008305311203,
"learning_rate": 1.3168927250308264e-05,
"loss": 1.1445,
"step": 377
},
{
"epoch": 0.4148148148148148,
"grad_norm": 0.12386184185743332,
"learning_rate": 1.3144266337854501e-05,
"loss": 1.0454,
"step": 378
},
{
"epoch": 0.4159122085048011,
"grad_norm": 0.11810237169265747,
"learning_rate": 1.3119605425400742e-05,
"loss": 1.1877,
"step": 379
},
{
"epoch": 0.4170096021947874,
"grad_norm": 0.1291743367910385,
"learning_rate": 1.309494451294698e-05,
"loss": 1.2161,
"step": 380
},
{
"epoch": 0.4181069958847737,
"grad_norm": 0.115330770611763,
"learning_rate": 1.307028360049322e-05,
"loss": 1.1117,
"step": 381
},
{
"epoch": 0.41920438957476,
"grad_norm": 0.15565675497055054,
"learning_rate": 1.3045622688039458e-05,
"loss": 1.1049,
"step": 382
},
{
"epoch": 0.4203017832647462,
"grad_norm": 0.17110465466976166,
"learning_rate": 1.3020961775585699e-05,
"loss": 1.1047,
"step": 383
},
{
"epoch": 0.4213991769547325,
"grad_norm": 0.12324660271406174,
"learning_rate": 1.2996300863131936e-05,
"loss": 1.091,
"step": 384
},
{
"epoch": 0.4224965706447188,
"grad_norm": 0.14864134788513184,
"learning_rate": 1.2971639950678177e-05,
"loss": 1.1008,
"step": 385
},
{
"epoch": 0.4235939643347051,
"grad_norm": 0.15296033024787903,
"learning_rate": 1.2946979038224414e-05,
"loss": 1.1242,
"step": 386
},
{
"epoch": 0.4246913580246914,
"grad_norm": 0.16041865944862366,
"learning_rate": 1.2922318125770655e-05,
"loss": 1.1821,
"step": 387
},
{
"epoch": 0.4257887517146776,
"grad_norm": 0.13168294727802277,
"learning_rate": 1.2897657213316894e-05,
"loss": 1.1585,
"step": 388
},
{
"epoch": 0.4268861454046639,
"grad_norm": 0.11569740623235703,
"learning_rate": 1.2872996300863132e-05,
"loss": 1.1275,
"step": 389
},
{
"epoch": 0.4279835390946502,
"grad_norm": 0.14467458426952362,
"learning_rate": 1.2848335388409371e-05,
"loss": 1.1436,
"step": 390
},
{
"epoch": 0.4290809327846365,
"grad_norm": 0.11647368967533112,
"learning_rate": 1.2823674475955612e-05,
"loss": 1.1061,
"step": 391
},
{
"epoch": 0.4301783264746228,
"grad_norm": 0.11563649028539658,
"learning_rate": 1.279901356350185e-05,
"loss": 1.1721,
"step": 392
},
{
"epoch": 0.4312757201646091,
"grad_norm": 0.13032524287700653,
"learning_rate": 1.277435265104809e-05,
"loss": 1.1382,
"step": 393
},
{
"epoch": 0.4323731138545953,
"grad_norm": 0.12809215486049652,
"learning_rate": 1.274969173859433e-05,
"loss": 1.1197,
"step": 394
},
{
"epoch": 0.4334705075445816,
"grad_norm": 0.11773429811000824,
"learning_rate": 1.2725030826140568e-05,
"loss": 1.1025,
"step": 395
},
{
"epoch": 0.4345679012345679,
"grad_norm": 0.12838852405548096,
"learning_rate": 1.2700369913686808e-05,
"loss": 1.1378,
"step": 396
},
{
"epoch": 0.4356652949245542,
"grad_norm": 0.14228248596191406,
"learning_rate": 1.2675709001233047e-05,
"loss": 1.2118,
"step": 397
},
{
"epoch": 0.4367626886145405,
"grad_norm": 0.14777442812919617,
"learning_rate": 1.2651048088779286e-05,
"loss": 1.129,
"step": 398
},
{
"epoch": 0.4378600823045268,
"grad_norm": 0.11499614268541336,
"learning_rate": 1.2626387176325525e-05,
"loss": 1.1193,
"step": 399
},
{
"epoch": 0.438957475994513,
"grad_norm": 0.1274246871471405,
"learning_rate": 1.2601726263871766e-05,
"loss": 1.1436,
"step": 400
},
{
"epoch": 0.4400548696844993,
"grad_norm": 0.13293783366680145,
"learning_rate": 1.2577065351418003e-05,
"loss": 1.0427,
"step": 401
},
{
"epoch": 0.4411522633744856,
"grad_norm": 0.12298054248094559,
"learning_rate": 1.2552404438964243e-05,
"loss": 1.1429,
"step": 402
},
{
"epoch": 0.4422496570644719,
"grad_norm": 0.10508795082569122,
"learning_rate": 1.2527743526510482e-05,
"loss": 1.2339,
"step": 403
},
{
"epoch": 0.4433470507544582,
"grad_norm": 0.14140458405017853,
"learning_rate": 1.2503082614056721e-05,
"loss": 1.0924,
"step": 404
},
{
"epoch": 0.4444444444444444,
"grad_norm": 0.16051869094371796,
"learning_rate": 1.247842170160296e-05,
"loss": 1.055,
"step": 405
},
{
"epoch": 0.4455418381344307,
"grad_norm": 0.12968482077121735,
"learning_rate": 1.2453760789149199e-05,
"loss": 1.1362,
"step": 406
},
{
"epoch": 0.446639231824417,
"grad_norm": 0.12716621160507202,
"learning_rate": 1.2429099876695438e-05,
"loss": 1.0987,
"step": 407
},
{
"epoch": 0.4477366255144033,
"grad_norm": 0.15174546837806702,
"learning_rate": 1.2404438964241678e-05,
"loss": 1.1899,
"step": 408
},
{
"epoch": 0.4488340192043896,
"grad_norm": 0.1363244652748108,
"learning_rate": 1.2379778051787916e-05,
"loss": 1.1274,
"step": 409
},
{
"epoch": 0.4499314128943759,
"grad_norm": 0.11740902811288834,
"learning_rate": 1.2355117139334156e-05,
"loss": 1.0766,
"step": 410
},
{
"epoch": 0.4510288065843621,
"grad_norm": 0.1075834259390831,
"learning_rate": 1.2330456226880397e-05,
"loss": 1.1318,
"step": 411
},
{
"epoch": 0.4521262002743484,
"grad_norm": 0.11810291558504105,
"learning_rate": 1.2305795314426634e-05,
"loss": 1.1657,
"step": 412
},
{
"epoch": 0.4532235939643347,
"grad_norm": 0.1261415034532547,
"learning_rate": 1.2281134401972875e-05,
"loss": 1.2063,
"step": 413
},
{
"epoch": 0.454320987654321,
"grad_norm": 0.14952872693538666,
"learning_rate": 1.2256473489519114e-05,
"loss": 1.0857,
"step": 414
},
{
"epoch": 0.4554183813443073,
"grad_norm": 0.1363765001296997,
"learning_rate": 1.2231812577065353e-05,
"loss": 1.1038,
"step": 415
},
{
"epoch": 0.4565157750342936,
"grad_norm": 0.1384081095457077,
"learning_rate": 1.2207151664611591e-05,
"loss": 1.1109,
"step": 416
},
{
"epoch": 0.4576131687242798,
"grad_norm": 0.1204955130815506,
"learning_rate": 1.2182490752157832e-05,
"loss": 1.0567,
"step": 417
},
{
"epoch": 0.4587105624142661,
"grad_norm": 0.14003603160381317,
"learning_rate": 1.215782983970407e-05,
"loss": 1.1006,
"step": 418
},
{
"epoch": 0.4598079561042524,
"grad_norm": 0.18735840916633606,
"learning_rate": 1.213316892725031e-05,
"loss": 1.1884,
"step": 419
},
{
"epoch": 0.4609053497942387,
"grad_norm": 0.16455943882465363,
"learning_rate": 1.2108508014796549e-05,
"loss": 1.0925,
"step": 420
},
{
"epoch": 0.462002743484225,
"grad_norm": 0.14214913547039032,
"learning_rate": 1.2083847102342788e-05,
"loss": 1.0512,
"step": 421
},
{
"epoch": 0.4631001371742112,
"grad_norm": 0.12036455422639847,
"learning_rate": 1.2059186189889027e-05,
"loss": 1.2031,
"step": 422
},
{
"epoch": 0.4641975308641975,
"grad_norm": 0.13500386476516724,
"learning_rate": 1.2034525277435265e-05,
"loss": 1.1086,
"step": 423
},
{
"epoch": 0.4652949245541838,
"grad_norm": 0.14389222860336304,
"learning_rate": 1.2009864364981504e-05,
"loss": 1.1124,
"step": 424
},
{
"epoch": 0.4663923182441701,
"grad_norm": 0.14557717740535736,
"learning_rate": 1.1985203452527745e-05,
"loss": 1.1043,
"step": 425
},
{
"epoch": 0.4674897119341564,
"grad_norm": 0.16723041236400604,
"learning_rate": 1.1960542540073982e-05,
"loss": 1.1054,
"step": 426
},
{
"epoch": 0.4685871056241427,
"grad_norm": 0.11840852349996567,
"learning_rate": 1.1935881627620223e-05,
"loss": 1.2366,
"step": 427
},
{
"epoch": 0.4696844993141289,
"grad_norm": 0.13276275992393494,
"learning_rate": 1.1911220715166463e-05,
"loss": 1.1297,
"step": 428
},
{
"epoch": 0.4707818930041152,
"grad_norm": 0.12433144450187683,
"learning_rate": 1.18865598027127e-05,
"loss": 1.1457,
"step": 429
},
{
"epoch": 0.4718792866941015,
"grad_norm": 0.1469450742006302,
"learning_rate": 1.1861898890258941e-05,
"loss": 1.1337,
"step": 430
},
{
"epoch": 0.4729766803840878,
"grad_norm": 0.16274379193782806,
"learning_rate": 1.183723797780518e-05,
"loss": 1.1523,
"step": 431
},
{
"epoch": 0.4740740740740741,
"grad_norm": 0.13927359879016876,
"learning_rate": 1.1812577065351419e-05,
"loss": 1.1721,
"step": 432
},
{
"epoch": 0.47517146776406033,
"grad_norm": 0.11743171513080597,
"learning_rate": 1.1787916152897658e-05,
"loss": 1.0745,
"step": 433
},
{
"epoch": 0.4762688614540466,
"grad_norm": 0.11775142699480057,
"learning_rate": 1.1763255240443899e-05,
"loss": 1.152,
"step": 434
},
{
"epoch": 0.4773662551440329,
"grad_norm": 0.11214631050825119,
"learning_rate": 1.1738594327990136e-05,
"loss": 1.1544,
"step": 435
},
{
"epoch": 0.4784636488340192,
"grad_norm": 0.15197938680648804,
"learning_rate": 1.1713933415536376e-05,
"loss": 1.0955,
"step": 436
},
{
"epoch": 0.4795610425240055,
"grad_norm": 0.1254250705242157,
"learning_rate": 1.1689272503082615e-05,
"loss": 1.1479,
"step": 437
},
{
"epoch": 0.4806584362139918,
"grad_norm": 0.15701737999916077,
"learning_rate": 1.1664611590628854e-05,
"loss": 1.0073,
"step": 438
},
{
"epoch": 0.48175582990397803,
"grad_norm": 0.10863196849822998,
"learning_rate": 1.1639950678175093e-05,
"loss": 1.1565,
"step": 439
},
{
"epoch": 0.4828532235939643,
"grad_norm": 0.11690645664930344,
"learning_rate": 1.1615289765721334e-05,
"loss": 1.1264,
"step": 440
},
{
"epoch": 0.4839506172839506,
"grad_norm": 0.13278046250343323,
"learning_rate": 1.1590628853267571e-05,
"loss": 1.1141,
"step": 441
},
{
"epoch": 0.4850480109739369,
"grad_norm": 0.13872136175632477,
"learning_rate": 1.1565967940813812e-05,
"loss": 1.1011,
"step": 442
},
{
"epoch": 0.4861454046639232,
"grad_norm": 0.12824486196041107,
"learning_rate": 1.1541307028360049e-05,
"loss": 1.1151,
"step": 443
},
{
"epoch": 0.4872427983539095,
"grad_norm": 0.12683530151844025,
"learning_rate": 1.151664611590629e-05,
"loss": 1.0178,
"step": 444
},
{
"epoch": 0.4883401920438957,
"grad_norm": 0.12646201252937317,
"learning_rate": 1.149198520345253e-05,
"loss": 1.1747,
"step": 445
},
{
"epoch": 0.489437585733882,
"grad_norm": 0.12779991328716278,
"learning_rate": 1.1467324290998767e-05,
"loss": 1.1784,
"step": 446
},
{
"epoch": 0.4905349794238683,
"grad_norm": 0.13384082913398743,
"learning_rate": 1.1442663378545008e-05,
"loss": 1.076,
"step": 447
},
{
"epoch": 0.4916323731138546,
"grad_norm": 0.13101617991924286,
"learning_rate": 1.1418002466091247e-05,
"loss": 1.0109,
"step": 448
},
{
"epoch": 0.4927297668038409,
"grad_norm": 0.13373570144176483,
"learning_rate": 1.1393341553637486e-05,
"loss": 1.1277,
"step": 449
},
{
"epoch": 0.49382716049382713,
"grad_norm": 0.13308392465114594,
"learning_rate": 1.1368680641183724e-05,
"loss": 1.0997,
"step": 450
},
{
"epoch": 0.4949245541838134,
"grad_norm": 0.11568623036146164,
"learning_rate": 1.1344019728729965e-05,
"loss": 1.2082,
"step": 451
},
{
"epoch": 0.4960219478737997,
"grad_norm": 0.12799036502838135,
"learning_rate": 1.1319358816276202e-05,
"loss": 1.1844,
"step": 452
},
{
"epoch": 0.497119341563786,
"grad_norm": 0.12493016570806503,
"learning_rate": 1.1294697903822443e-05,
"loss": 1.2041,
"step": 453
},
{
"epoch": 0.4982167352537723,
"grad_norm": 0.12631264328956604,
"learning_rate": 1.1270036991368682e-05,
"loss": 1.0397,
"step": 454
},
{
"epoch": 0.4993141289437586,
"grad_norm": 0.12955130636692047,
"learning_rate": 1.124537607891492e-05,
"loss": 1.0504,
"step": 455
},
{
"epoch": 0.5004115226337449,
"grad_norm": 0.12372354418039322,
"learning_rate": 1.122071516646116e-05,
"loss": 1.1982,
"step": 456
},
{
"epoch": 0.5015089163237312,
"grad_norm": 0.13814988732337952,
"learning_rate": 1.11960542540074e-05,
"loss": 1.201,
"step": 457
},
{
"epoch": 0.5026063100137175,
"grad_norm": 0.11566805094480515,
"learning_rate": 1.1171393341553637e-05,
"loss": 1.1334,
"step": 458
},
{
"epoch": 0.5037037037037037,
"grad_norm": 0.11871378123760223,
"learning_rate": 1.1146732429099878e-05,
"loss": 1.1315,
"step": 459
},
{
"epoch": 0.50480109739369,
"grad_norm": 0.12469706684350967,
"learning_rate": 1.1122071516646115e-05,
"loss": 1.1309,
"step": 460
},
{
"epoch": 0.5058984910836762,
"grad_norm": 0.12486052513122559,
"learning_rate": 1.1097410604192356e-05,
"loss": 1.0966,
"step": 461
},
{
"epoch": 0.5069958847736625,
"grad_norm": 0.12366752326488495,
"learning_rate": 1.1072749691738596e-05,
"loss": 1.1861,
"step": 462
},
{
"epoch": 0.5080932784636488,
"grad_norm": 0.1204606145620346,
"learning_rate": 1.1048088779284834e-05,
"loss": 1.1,
"step": 463
},
{
"epoch": 0.5091906721536351,
"grad_norm": 0.15034319460391998,
"learning_rate": 1.1023427866831074e-05,
"loss": 1.1683,
"step": 464
},
{
"epoch": 0.5102880658436214,
"grad_norm": 0.1372024267911911,
"learning_rate": 1.0998766954377313e-05,
"loss": 1.1134,
"step": 465
},
{
"epoch": 0.5113854595336077,
"grad_norm": 0.13857926428318024,
"learning_rate": 1.097410604192355e-05,
"loss": 1.1922,
"step": 466
},
{
"epoch": 0.512482853223594,
"grad_norm": 0.1584538072347641,
"learning_rate": 1.0949445129469791e-05,
"loss": 1.139,
"step": 467
},
{
"epoch": 0.5135802469135803,
"grad_norm": 0.14659465849399567,
"learning_rate": 1.0924784217016032e-05,
"loss": 1.1054,
"step": 468
},
{
"epoch": 0.5146776406035666,
"grad_norm": 0.1330493539571762,
"learning_rate": 1.0900123304562269e-05,
"loss": 1.1533,
"step": 469
},
{
"epoch": 0.5157750342935528,
"grad_norm": 0.12379042059183121,
"learning_rate": 1.087546239210851e-05,
"loss": 1.0663,
"step": 470
},
{
"epoch": 0.516872427983539,
"grad_norm": 0.12589439749717712,
"learning_rate": 1.0850801479654748e-05,
"loss": 1.1414,
"step": 471
},
{
"epoch": 0.5179698216735253,
"grad_norm": 0.12561152875423431,
"learning_rate": 1.0826140567200987e-05,
"loss": 1.1005,
"step": 472
},
{
"epoch": 0.5190672153635116,
"grad_norm": 0.12228364497423172,
"learning_rate": 1.0801479654747226e-05,
"loss": 1.179,
"step": 473
},
{
"epoch": 0.5201646090534979,
"grad_norm": 0.12040815502405167,
"learning_rate": 1.0776818742293467e-05,
"loss": 1.1178,
"step": 474
},
{
"epoch": 0.5212620027434842,
"grad_norm": 0.15086573362350464,
"learning_rate": 1.0752157829839704e-05,
"loss": 1.0438,
"step": 475
},
{
"epoch": 0.5223593964334705,
"grad_norm": 0.14049848914146423,
"learning_rate": 1.0727496917385945e-05,
"loss": 1.1483,
"step": 476
},
{
"epoch": 0.5234567901234568,
"grad_norm": 0.12689921259880066,
"learning_rate": 1.0702836004932185e-05,
"loss": 1.1626,
"step": 477
},
{
"epoch": 0.5245541838134431,
"grad_norm": 0.13062772154808044,
"learning_rate": 1.0678175092478422e-05,
"loss": 1.1126,
"step": 478
},
{
"epoch": 0.5256515775034294,
"grad_norm": 0.13485489785671234,
"learning_rate": 1.0653514180024663e-05,
"loss": 1.1382,
"step": 479
},
{
"epoch": 0.5267489711934157,
"grad_norm": 0.14438626170158386,
"learning_rate": 1.06288532675709e-05,
"loss": 1.0479,
"step": 480
},
{
"epoch": 0.527846364883402,
"grad_norm": 0.1365397721529007,
"learning_rate": 1.060419235511714e-05,
"loss": 1.0412,
"step": 481
},
{
"epoch": 0.5289437585733882,
"grad_norm": 0.1348356157541275,
"learning_rate": 1.057953144266338e-05,
"loss": 1.1173,
"step": 482
},
{
"epoch": 0.5300411522633744,
"grad_norm": 0.13282406330108643,
"learning_rate": 1.0554870530209617e-05,
"loss": 1.128,
"step": 483
},
{
"epoch": 0.5311385459533607,
"grad_norm": 0.1166507676243782,
"learning_rate": 1.0530209617755858e-05,
"loss": 1.0998,
"step": 484
},
{
"epoch": 0.532235939643347,
"grad_norm": 0.11780740320682526,
"learning_rate": 1.0505548705302098e-05,
"loss": 1.1824,
"step": 485
},
{
"epoch": 0.5333333333333333,
"grad_norm": 0.12084660679101944,
"learning_rate": 1.0480887792848335e-05,
"loss": 1.1065,
"step": 486
},
{
"epoch": 0.5344307270233196,
"grad_norm": 0.13042369484901428,
"learning_rate": 1.0456226880394576e-05,
"loss": 1.092,
"step": 487
},
{
"epoch": 0.5355281207133059,
"grad_norm": 0.12373581528663635,
"learning_rate": 1.0431565967940815e-05,
"loss": 1.1842,
"step": 488
},
{
"epoch": 0.5366255144032922,
"grad_norm": 0.13902585208415985,
"learning_rate": 1.0406905055487054e-05,
"loss": 1.0733,
"step": 489
},
{
"epoch": 0.5377229080932785,
"grad_norm": 0.12822625041007996,
"learning_rate": 1.0382244143033293e-05,
"loss": 1.1401,
"step": 490
},
{
"epoch": 0.5388203017832648,
"grad_norm": 0.14749017357826233,
"learning_rate": 1.0357583230579533e-05,
"loss": 1.0773,
"step": 491
},
{
"epoch": 0.5399176954732511,
"grad_norm": 0.11797695606946945,
"learning_rate": 1.033292231812577e-05,
"loss": 1.1464,
"step": 492
},
{
"epoch": 0.5410150891632373,
"grad_norm": 0.1362835019826889,
"learning_rate": 1.0308261405672011e-05,
"loss": 1.1871,
"step": 493
},
{
"epoch": 0.5421124828532236,
"grad_norm": 0.13655728101730347,
"learning_rate": 1.0283600493218252e-05,
"loss": 1.1013,
"step": 494
},
{
"epoch": 0.5432098765432098,
"grad_norm": 0.12978941202163696,
"learning_rate": 1.0258939580764489e-05,
"loss": 1.1468,
"step": 495
},
{
"epoch": 0.5443072702331961,
"grad_norm": 0.12897971272468567,
"learning_rate": 1.023427866831073e-05,
"loss": 1.1408,
"step": 496
},
{
"epoch": 0.5454046639231824,
"grad_norm": 0.12697400152683258,
"learning_rate": 1.0209617755856967e-05,
"loss": 1.0682,
"step": 497
},
{
"epoch": 0.5465020576131687,
"grad_norm": 0.12820445001125336,
"learning_rate": 1.0184956843403206e-05,
"loss": 1.2311,
"step": 498
},
{
"epoch": 0.547599451303155,
"grad_norm": 0.21345391869544983,
"learning_rate": 1.0160295930949446e-05,
"loss": 0.9323,
"step": 499
},
{
"epoch": 0.5486968449931413,
"grad_norm": 0.12978078424930573,
"learning_rate": 1.0135635018495683e-05,
"loss": 1.0179,
"step": 500
},
{
"epoch": 0.5497942386831276,
"grad_norm": 0.13460998237133026,
"learning_rate": 1.0110974106041924e-05,
"loss": 1.141,
"step": 501
},
{
"epoch": 0.5508916323731139,
"grad_norm": 0.12281425297260284,
"learning_rate": 1.0086313193588165e-05,
"loss": 1.1465,
"step": 502
},
{
"epoch": 0.5519890260631002,
"grad_norm": 0.11920657008886337,
"learning_rate": 1.0061652281134402e-05,
"loss": 1.1632,
"step": 503
},
{
"epoch": 0.5530864197530864,
"grad_norm": 0.13477723300457,
"learning_rate": 1.0036991368680642e-05,
"loss": 1.1192,
"step": 504
},
{
"epoch": 0.5541838134430727,
"grad_norm": 0.12021885067224503,
"learning_rate": 1.0012330456226881e-05,
"loss": 1.1749,
"step": 505
},
{
"epoch": 0.555281207133059,
"grad_norm": 0.1291181594133377,
"learning_rate": 9.98766954377312e-06,
"loss": 1.0651,
"step": 506
},
{
"epoch": 0.5563786008230452,
"grad_norm": 0.12819477915763855,
"learning_rate": 9.96300863131936e-06,
"loss": 1.116,
"step": 507
},
{
"epoch": 0.5574759945130315,
"grad_norm": 0.12644226849079132,
"learning_rate": 9.938347718865598e-06,
"loss": 1.147,
"step": 508
},
{
"epoch": 0.5585733882030178,
"grad_norm": 0.12499803304672241,
"learning_rate": 9.913686806411839e-06,
"loss": 1.1251,
"step": 509
},
{
"epoch": 0.5596707818930041,
"grad_norm": 0.12837661802768707,
"learning_rate": 9.889025893958078e-06,
"loss": 1.104,
"step": 510
},
{
"epoch": 0.5607681755829904,
"grad_norm": 0.13800503313541412,
"learning_rate": 9.864364981504317e-06,
"loss": 1.2091,
"step": 511
},
{
"epoch": 0.5618655692729767,
"grad_norm": 0.12412415444850922,
"learning_rate": 9.839704069050555e-06,
"loss": 1.1144,
"step": 512
},
{
"epoch": 0.562962962962963,
"grad_norm": 0.142201229929924,
"learning_rate": 9.815043156596796e-06,
"loss": 1.0442,
"step": 513
},
{
"epoch": 0.5640603566529493,
"grad_norm": 0.1310838907957077,
"learning_rate": 9.790382244143033e-06,
"loss": 1.1073,
"step": 514
},
{
"epoch": 0.5651577503429356,
"grad_norm": 0.13173076510429382,
"learning_rate": 9.765721331689272e-06,
"loss": 1.0572,
"step": 515
},
{
"epoch": 0.5662551440329218,
"grad_norm": 0.14995068311691284,
"learning_rate": 9.741060419235513e-06,
"loss": 1.141,
"step": 516
},
{
"epoch": 0.5673525377229081,
"grad_norm": 0.1287776529788971,
"learning_rate": 9.716399506781752e-06,
"loss": 1.0868,
"step": 517
},
{
"epoch": 0.5684499314128943,
"grad_norm": 0.13318051397800446,
"learning_rate": 9.69173859432799e-06,
"loss": 1.0967,
"step": 518
},
{
"epoch": 0.5695473251028806,
"grad_norm": 0.13498608767986298,
"learning_rate": 9.667077681874231e-06,
"loss": 1.1334,
"step": 519
},
{
"epoch": 0.5706447187928669,
"grad_norm": 0.11963332444429398,
"learning_rate": 9.64241676942047e-06,
"loss": 1.1811,
"step": 520
},
{
"epoch": 0.5717421124828532,
"grad_norm": 0.13906215131282806,
"learning_rate": 9.617755856966709e-06,
"loss": 1.1026,
"step": 521
},
{
"epoch": 0.5728395061728395,
"grad_norm": 0.12360992282629013,
"learning_rate": 9.593094944512948e-06,
"loss": 1.096,
"step": 522
},
{
"epoch": 0.5739368998628258,
"grad_norm": 0.16927878558635712,
"learning_rate": 9.568434032059187e-06,
"loss": 1.0426,
"step": 523
},
{
"epoch": 0.5750342935528121,
"grad_norm": 0.1344671994447708,
"learning_rate": 9.543773119605426e-06,
"loss": 1.1305,
"step": 524
},
{
"epoch": 0.5761316872427984,
"grad_norm": 0.12095975130796432,
"learning_rate": 9.519112207151665e-06,
"loss": 1.1912,
"step": 525
},
{
"epoch": 0.5772290809327847,
"grad_norm": 0.1255401372909546,
"learning_rate": 9.494451294697905e-06,
"loss": 1.148,
"step": 526
},
{
"epoch": 0.5783264746227709,
"grad_norm": 0.13548162579536438,
"learning_rate": 9.469790382244144e-06,
"loss": 1.1192,
"step": 527
},
{
"epoch": 0.5794238683127572,
"grad_norm": 0.1412874460220337,
"learning_rate": 9.445129469790383e-06,
"loss": 1.1496,
"step": 528
},
{
"epoch": 0.5805212620027435,
"grad_norm": 0.1487760990858078,
"learning_rate": 9.420468557336622e-06,
"loss": 1.0904,
"step": 529
},
{
"epoch": 0.5816186556927297,
"grad_norm": 0.1419951468706131,
"learning_rate": 9.39580764488286e-06,
"loss": 1.0875,
"step": 530
},
{
"epoch": 0.582716049382716,
"grad_norm": 0.12120068818330765,
"learning_rate": 9.3711467324291e-06,
"loss": 1.1354,
"step": 531
},
{
"epoch": 0.5838134430727023,
"grad_norm": 0.13798055052757263,
"learning_rate": 9.346485819975339e-06,
"loss": 1.0849,
"step": 532
},
{
"epoch": 0.5849108367626886,
"grad_norm": 0.1383359581232071,
"learning_rate": 9.32182490752158e-06,
"loss": 1.1356,
"step": 533
},
{
"epoch": 0.5860082304526749,
"grad_norm": 0.1449320912361145,
"learning_rate": 9.297163995067818e-06,
"loss": 1.0923,
"step": 534
},
{
"epoch": 0.5871056241426612,
"grad_norm": 0.1634596735239029,
"learning_rate": 9.272503082614057e-06,
"loss": 1.1235,
"step": 535
},
{
"epoch": 0.5882030178326475,
"grad_norm": 0.13807539641857147,
"learning_rate": 9.247842170160298e-06,
"loss": 1.0803,
"step": 536
},
{
"epoch": 0.5893004115226338,
"grad_norm": 0.13776534795761108,
"learning_rate": 9.223181257706537e-06,
"loss": 1.0765,
"step": 537
},
{
"epoch": 0.59039780521262,
"grad_norm": 0.13710565865039825,
"learning_rate": 9.198520345252775e-06,
"loss": 0.9956,
"step": 538
},
{
"epoch": 0.5914951989026063,
"grad_norm": 0.14347536861896515,
"learning_rate": 9.173859432799014e-06,
"loss": 1.0868,
"step": 539
},
{
"epoch": 0.5925925925925926,
"grad_norm": 0.14716362953186035,
"learning_rate": 9.149198520345253e-06,
"loss": 1.1414,
"step": 540
},
{
"epoch": 0.5936899862825789,
"grad_norm": 0.1471024453639984,
"learning_rate": 9.124537607891492e-06,
"loss": 1.1012,
"step": 541
},
{
"epoch": 0.5947873799725651,
"grad_norm": 0.15511175990104675,
"learning_rate": 9.099876695437731e-06,
"loss": 1.1101,
"step": 542
},
{
"epoch": 0.5958847736625514,
"grad_norm": 0.12379336357116699,
"learning_rate": 9.075215782983972e-06,
"loss": 1.1474,
"step": 543
},
{
"epoch": 0.5969821673525377,
"grad_norm": 0.13909649848937988,
"learning_rate": 9.05055487053021e-06,
"loss": 1.0773,
"step": 544
},
{
"epoch": 0.598079561042524,
"grad_norm": 0.11885585635900497,
"learning_rate": 9.02589395807645e-06,
"loss": 1.092,
"step": 545
},
{
"epoch": 0.5991769547325103,
"grad_norm": 0.14395396411418915,
"learning_rate": 9.001233045622688e-06,
"loss": 1.0922,
"step": 546
},
{
"epoch": 0.6002743484224966,
"grad_norm": 0.13211029767990112,
"learning_rate": 8.976572133168927e-06,
"loss": 1.1803,
"step": 547
},
{
"epoch": 0.6013717421124829,
"grad_norm": 0.14772741496562958,
"learning_rate": 8.951911220715166e-06,
"loss": 1.0751,
"step": 548
},
{
"epoch": 0.6024691358024692,
"grad_norm": 0.11977895349264145,
"learning_rate": 8.927250308261405e-06,
"loss": 1.1872,
"step": 549
},
{
"epoch": 0.6035665294924554,
"grad_norm": 0.12050166726112366,
"learning_rate": 8.902589395807646e-06,
"loss": 1.1168,
"step": 550
},
{
"epoch": 0.6046639231824417,
"grad_norm": 0.14867153763771057,
"learning_rate": 8.877928483353885e-06,
"loss": 1.0557,
"step": 551
},
{
"epoch": 0.605761316872428,
"grad_norm": 0.13625556230545044,
"learning_rate": 8.853267570900124e-06,
"loss": 1.0931,
"step": 552
},
{
"epoch": 0.6068587105624142,
"grad_norm": 0.13109120726585388,
"learning_rate": 8.828606658446364e-06,
"loss": 1.229,
"step": 553
},
{
"epoch": 0.6079561042524005,
"grad_norm": 0.125279039144516,
"learning_rate": 8.803945745992603e-06,
"loss": 1.1492,
"step": 554
},
{
"epoch": 0.6090534979423868,
"grad_norm": 0.1311226338148117,
"learning_rate": 8.779284833538842e-06,
"loss": 1.1132,
"step": 555
},
{
"epoch": 0.6101508916323731,
"grad_norm": 0.12563110888004303,
"learning_rate": 8.754623921085081e-06,
"loss": 1.1419,
"step": 556
},
{
"epoch": 0.6112482853223594,
"grad_norm": 0.15926848351955414,
"learning_rate": 8.72996300863132e-06,
"loss": 1.1256,
"step": 557
},
{
"epoch": 0.6123456790123457,
"grad_norm": 0.13532838225364685,
"learning_rate": 8.705302096177559e-06,
"loss": 1.1244,
"step": 558
},
{
"epoch": 0.613443072702332,
"grad_norm": 0.13677728176116943,
"learning_rate": 8.680641183723798e-06,
"loss": 1.1055,
"step": 559
},
{
"epoch": 0.6145404663923183,
"grad_norm": 0.12396983802318573,
"learning_rate": 8.655980271270038e-06,
"loss": 1.1146,
"step": 560
},
{
"epoch": 0.6156378600823045,
"grad_norm": 0.12140499800443649,
"learning_rate": 8.631319358816277e-06,
"loss": 1.1088,
"step": 561
},
{
"epoch": 0.6167352537722908,
"grad_norm": 0.144679993391037,
"learning_rate": 8.606658446362516e-06,
"loss": 1.1813,
"step": 562
},
{
"epoch": 0.6178326474622771,
"grad_norm": 0.13666993379592896,
"learning_rate": 8.581997533908755e-06,
"loss": 1.0682,
"step": 563
},
{
"epoch": 0.6189300411522634,
"grad_norm": 0.1422543227672577,
"learning_rate": 8.557336621454994e-06,
"loss": 1.1206,
"step": 564
},
{
"epoch": 0.6200274348422496,
"grad_norm": 0.13064906001091003,
"learning_rate": 8.532675709001233e-06,
"loss": 1.1021,
"step": 565
},
{
"epoch": 0.6211248285322359,
"grad_norm": 0.13712053000926971,
"learning_rate": 8.508014796547472e-06,
"loss": 1.0606,
"step": 566
},
{
"epoch": 0.6222222222222222,
"grad_norm": 0.1299920678138733,
"learning_rate": 8.483353884093712e-06,
"loss": 1.1722,
"step": 567
},
{
"epoch": 0.6233196159122085,
"grad_norm": 0.1433524787425995,
"learning_rate": 8.458692971639951e-06,
"loss": 1.1478,
"step": 568
},
{
"epoch": 0.6244170096021948,
"grad_norm": 0.13581444323062897,
"learning_rate": 8.43403205918619e-06,
"loss": 1.14,
"step": 569
},
{
"epoch": 0.6255144032921811,
"grad_norm": 0.1414836049079895,
"learning_rate": 8.40937114673243e-06,
"loss": 1.1146,
"step": 570
},
{
"epoch": 0.6266117969821674,
"grad_norm": 0.13587547838687897,
"learning_rate": 8.38471023427867e-06,
"loss": 1.2179,
"step": 571
},
{
"epoch": 0.6277091906721536,
"grad_norm": 0.1454116404056549,
"learning_rate": 8.360049321824909e-06,
"loss": 1.0244,
"step": 572
},
{
"epoch": 0.6288065843621399,
"grad_norm": 0.11638734489679337,
"learning_rate": 8.335388409371147e-06,
"loss": 1.1298,
"step": 573
},
{
"epoch": 0.6299039780521262,
"grad_norm": 0.13515996932983398,
"learning_rate": 8.310727496917386e-06,
"loss": 1.1097,
"step": 574
},
{
"epoch": 0.6310013717421125,
"grad_norm": 0.12577927112579346,
"learning_rate": 8.286066584463625e-06,
"loss": 1.0974,
"step": 575
},
{
"epoch": 0.6320987654320988,
"grad_norm": 0.1303602010011673,
"learning_rate": 8.261405672009864e-06,
"loss": 1.1112,
"step": 576
},
{
"epoch": 0.633196159122085,
"grad_norm": 0.147017240524292,
"learning_rate": 8.236744759556105e-06,
"loss": 1.1228,
"step": 577
},
{
"epoch": 0.6342935528120713,
"grad_norm": 0.13731957972049713,
"learning_rate": 8.212083847102344e-06,
"loss": 1.0454,
"step": 578
},
{
"epoch": 0.6353909465020576,
"grad_norm": 0.13873283565044403,
"learning_rate": 8.187422934648583e-06,
"loss": 1.1814,
"step": 579
},
{
"epoch": 0.6364883401920439,
"grad_norm": 0.1308884173631668,
"learning_rate": 8.162762022194821e-06,
"loss": 1.0395,
"step": 580
},
{
"epoch": 0.6375857338820302,
"grad_norm": 0.120221808552742,
"learning_rate": 8.13810110974106e-06,
"loss": 1.0833,
"step": 581
},
{
"epoch": 0.6386831275720165,
"grad_norm": 0.12536829710006714,
"learning_rate": 8.1134401972873e-06,
"loss": 1.1286,
"step": 582
},
{
"epoch": 0.6397805212620027,
"grad_norm": 0.12190794199705124,
"learning_rate": 8.08877928483354e-06,
"loss": 1.1711,
"step": 583
},
{
"epoch": 0.640877914951989,
"grad_norm": 0.13477134704589844,
"learning_rate": 8.064118372379779e-06,
"loss": 1.1483,
"step": 584
},
{
"epoch": 0.6419753086419753,
"grad_norm": 0.13288861513137817,
"learning_rate": 8.039457459926018e-06,
"loss": 1.1008,
"step": 585
},
{
"epoch": 0.6430727023319616,
"grad_norm": 0.14225690066814423,
"learning_rate": 8.014796547472257e-06,
"loss": 1.1924,
"step": 586
},
{
"epoch": 0.6441700960219479,
"grad_norm": 0.1309647411108017,
"learning_rate": 7.990135635018497e-06,
"loss": 1.1586,
"step": 587
},
{
"epoch": 0.6452674897119342,
"grad_norm": 0.14278441667556763,
"learning_rate": 7.965474722564736e-06,
"loss": 1.1341,
"step": 588
},
{
"epoch": 0.6463648834019204,
"grad_norm": 0.1286327838897705,
"learning_rate": 7.940813810110975e-06,
"loss": 1.122,
"step": 589
},
{
"epoch": 0.6474622770919067,
"grad_norm": 0.1449936330318451,
"learning_rate": 7.916152897657214e-06,
"loss": 1.1826,
"step": 590
},
{
"epoch": 0.648559670781893,
"grad_norm": 0.1307440549135208,
"learning_rate": 7.891491985203453e-06,
"loss": 1.1417,
"step": 591
},
{
"epoch": 0.6496570644718793,
"grad_norm": 0.14206618070602417,
"learning_rate": 7.866831072749692e-06,
"loss": 1.0413,
"step": 592
},
{
"epoch": 0.6507544581618656,
"grad_norm": 0.13166101276874542,
"learning_rate": 7.842170160295932e-06,
"loss": 1.0842,
"step": 593
},
{
"epoch": 0.6518518518518519,
"grad_norm": 0.13434740900993347,
"learning_rate": 7.817509247842171e-06,
"loss": 1.1033,
"step": 594
},
{
"epoch": 0.6529492455418381,
"grad_norm": 0.1200101375579834,
"learning_rate": 7.79284833538841e-06,
"loss": 1.08,
"step": 595
},
{
"epoch": 0.6540466392318244,
"grad_norm": 0.13683106005191803,
"learning_rate": 7.768187422934649e-06,
"loss": 1.128,
"step": 596
},
{
"epoch": 0.6551440329218107,
"grad_norm": 0.13702082633972168,
"learning_rate": 7.743526510480888e-06,
"loss": 1.1325,
"step": 597
},
{
"epoch": 0.656241426611797,
"grad_norm": 0.13655568659305573,
"learning_rate": 7.718865598027127e-06,
"loss": 1.0282,
"step": 598
},
{
"epoch": 0.6573388203017833,
"grad_norm": 0.11861226707696915,
"learning_rate": 7.694204685573366e-06,
"loss": 1.1373,
"step": 599
},
{
"epoch": 0.6584362139917695,
"grad_norm": 0.13324734568595886,
"learning_rate": 7.669543773119606e-06,
"loss": 1.1823,
"step": 600
},
{
"epoch": 0.6595336076817558,
"grad_norm": 0.13969723880290985,
"learning_rate": 7.644882860665845e-06,
"loss": 1.1193,
"step": 601
},
{
"epoch": 0.6606310013717421,
"grad_norm": 0.138763889670372,
"learning_rate": 7.620221948212084e-06,
"loss": 1.1123,
"step": 602
},
{
"epoch": 0.6617283950617284,
"grad_norm": 0.13153599202632904,
"learning_rate": 7.595561035758323e-06,
"loss": 1.1058,
"step": 603
},
{
"epoch": 0.6628257887517147,
"grad_norm": 0.1288379430770874,
"learning_rate": 7.570900123304563e-06,
"loss": 1.0909,
"step": 604
},
{
"epoch": 0.663923182441701,
"grad_norm": 0.1367582231760025,
"learning_rate": 7.546239210850802e-06,
"loss": 1.0334,
"step": 605
},
{
"epoch": 0.6650205761316872,
"grad_norm": 0.12939676642417908,
"learning_rate": 7.521578298397041e-06,
"loss": 1.0775,
"step": 606
},
{
"epoch": 0.6661179698216735,
"grad_norm": 0.13814114034175873,
"learning_rate": 7.4969173859432805e-06,
"loss": 1.1047,
"step": 607
},
{
"epoch": 0.6672153635116598,
"grad_norm": 0.1455143690109253,
"learning_rate": 7.472256473489519e-06,
"loss": 1.163,
"step": 608
},
{
"epoch": 0.6683127572016461,
"grad_norm": 0.15828116238117218,
"learning_rate": 7.447595561035758e-06,
"loss": 1.0329,
"step": 609
},
{
"epoch": 0.6694101508916324,
"grad_norm": 0.14122609794139862,
"learning_rate": 7.422934648581999e-06,
"loss": 1.1115,
"step": 610
},
{
"epoch": 0.6705075445816187,
"grad_norm": 0.12997443974018097,
"learning_rate": 7.398273736128238e-06,
"loss": 1.1093,
"step": 611
},
{
"epoch": 0.671604938271605,
"grad_norm": 0.13643480837345123,
"learning_rate": 7.373612823674477e-06,
"loss": 1.13,
"step": 612
},
{
"epoch": 0.6727023319615912,
"grad_norm": 0.12696883082389832,
"learning_rate": 7.348951911220716e-06,
"loss": 1.1004,
"step": 613
},
{
"epoch": 0.6737997256515775,
"grad_norm": 0.14005936682224274,
"learning_rate": 7.324290998766955e-06,
"loss": 1.0574,
"step": 614
},
{
"epoch": 0.6748971193415638,
"grad_norm": 0.14040903747081757,
"learning_rate": 7.299630086313194e-06,
"loss": 1.1028,
"step": 615
},
{
"epoch": 0.6759945130315501,
"grad_norm": 0.12855766713619232,
"learning_rate": 7.274969173859433e-06,
"loss": 1.1411,
"step": 616
},
{
"epoch": 0.6770919067215363,
"grad_norm": 0.14175771176815033,
"learning_rate": 7.250308261405673e-06,
"loss": 1.077,
"step": 617
},
{
"epoch": 0.6781893004115226,
"grad_norm": 0.12823879718780518,
"learning_rate": 7.225647348951912e-06,
"loss": 1.0089,
"step": 618
},
{
"epoch": 0.6792866941015089,
"grad_norm": 0.13076744973659515,
"learning_rate": 7.200986436498151e-06,
"loss": 1.1641,
"step": 619
},
{
"epoch": 0.6803840877914952,
"grad_norm": 0.1256016492843628,
"learning_rate": 7.1763255240443905e-06,
"loss": 1.1092,
"step": 620
},
{
"epoch": 0.6814814814814815,
"grad_norm": 0.14268584549427032,
"learning_rate": 7.1516646115906294e-06,
"loss": 1.0106,
"step": 621
},
{
"epoch": 0.6825788751714678,
"grad_norm": 0.13120578229427338,
"learning_rate": 7.127003699136868e-06,
"loss": 1.0598,
"step": 622
},
{
"epoch": 0.683676268861454,
"grad_norm": 0.13504907488822937,
"learning_rate": 7.102342786683107e-06,
"loss": 1.0966,
"step": 623
},
{
"epoch": 0.6847736625514403,
"grad_norm": 0.12563414871692657,
"learning_rate": 7.077681874229347e-06,
"loss": 1.0724,
"step": 624
},
{
"epoch": 0.6858710562414266,
"grad_norm": 0.13522499799728394,
"learning_rate": 7.053020961775586e-06,
"loss": 1.1182,
"step": 625
},
{
"epoch": 0.6869684499314129,
"grad_norm": 0.12960287928581238,
"learning_rate": 7.028360049321825e-06,
"loss": 1.1252,
"step": 626
},
{
"epoch": 0.6880658436213992,
"grad_norm": 0.13346299529075623,
"learning_rate": 7.003699136868065e-06,
"loss": 1.1317,
"step": 627
},
{
"epoch": 0.6891632373113855,
"grad_norm": 0.1333625316619873,
"learning_rate": 6.979038224414304e-06,
"loss": 1.1505,
"step": 628
},
{
"epoch": 0.6902606310013717,
"grad_norm": 0.1392945796251297,
"learning_rate": 6.954377311960543e-06,
"loss": 1.1966,
"step": 629
},
{
"epoch": 0.691358024691358,
"grad_norm": 0.12204419821500778,
"learning_rate": 6.929716399506782e-06,
"loss": 1.1243,
"step": 630
},
{
"epoch": 0.6924554183813443,
"grad_norm": 0.1395426094532013,
"learning_rate": 6.905055487053022e-06,
"loss": 1.0475,
"step": 631
},
{
"epoch": 0.6935528120713306,
"grad_norm": 0.13325053453445435,
"learning_rate": 6.880394574599261e-06,
"loss": 1.0344,
"step": 632
},
{
"epoch": 0.6946502057613169,
"grad_norm": 0.14765462279319763,
"learning_rate": 6.8557336621455e-06,
"loss": 1.0965,
"step": 633
},
{
"epoch": 0.6957475994513032,
"grad_norm": 0.12556719779968262,
"learning_rate": 6.8310727496917395e-06,
"loss": 1.199,
"step": 634
},
{
"epoch": 0.6968449931412894,
"grad_norm": 0.12908804416656494,
"learning_rate": 6.806411837237978e-06,
"loss": 1.1594,
"step": 635
},
{
"epoch": 0.6979423868312757,
"grad_norm": 0.1703738272190094,
"learning_rate": 6.781750924784217e-06,
"loss": 1.0171,
"step": 636
},
{
"epoch": 0.699039780521262,
"grad_norm": 0.12791863083839417,
"learning_rate": 6.757090012330457e-06,
"loss": 1.2105,
"step": 637
},
{
"epoch": 0.7001371742112483,
"grad_norm": 0.17011161148548126,
"learning_rate": 6.732429099876696e-06,
"loss": 1.0192,
"step": 638
},
{
"epoch": 0.7012345679012346,
"grad_norm": 0.14074620604515076,
"learning_rate": 6.707768187422935e-06,
"loss": 1.1763,
"step": 639
},
{
"epoch": 0.7023319615912208,
"grad_norm": 0.13788381218910217,
"learning_rate": 6.683107274969174e-06,
"loss": 1.0638,
"step": 640
},
{
"epoch": 0.7034293552812071,
"grad_norm": 0.13305304944515228,
"learning_rate": 6.6584463625154135e-06,
"loss": 1.1449,
"step": 641
},
{
"epoch": 0.7045267489711934,
"grad_norm": 0.1297188103199005,
"learning_rate": 6.633785450061652e-06,
"loss": 1.1244,
"step": 642
},
{
"epoch": 0.7056241426611797,
"grad_norm": 0.12216539680957794,
"learning_rate": 6.609124537607891e-06,
"loss": 1.099,
"step": 643
},
{
"epoch": 0.706721536351166,
"grad_norm": 0.12714643776416779,
"learning_rate": 6.584463625154132e-06,
"loss": 1.1373,
"step": 644
},
{
"epoch": 0.7078189300411523,
"grad_norm": 0.12196072936058044,
"learning_rate": 6.559802712700371e-06,
"loss": 1.1225,
"step": 645
},
{
"epoch": 0.7089163237311386,
"grad_norm": 0.1701362133026123,
"learning_rate": 6.53514180024661e-06,
"loss": 0.991,
"step": 646
},
{
"epoch": 0.7100137174211248,
"grad_norm": 0.1309044361114502,
"learning_rate": 6.5104808877928495e-06,
"loss": 1.1614,
"step": 647
},
{
"epoch": 0.7111111111111111,
"grad_norm": 0.1310199499130249,
"learning_rate": 6.485819975339088e-06,
"loss": 1.1724,
"step": 648
},
{
"epoch": 0.7122085048010974,
"grad_norm": 0.15935364365577698,
"learning_rate": 6.461159062885327e-06,
"loss": 1.0417,
"step": 649
},
{
"epoch": 0.7133058984910837,
"grad_norm": 0.13248024880886078,
"learning_rate": 6.436498150431566e-06,
"loss": 1.2158,
"step": 650
},
{
"epoch": 0.7144032921810699,
"grad_norm": 0.14017465710639954,
"learning_rate": 6.411837237977806e-06,
"loss": 1.1212,
"step": 651
},
{
"epoch": 0.7155006858710562,
"grad_norm": 0.13974924385547638,
"learning_rate": 6.387176325524045e-06,
"loss": 1.0866,
"step": 652
},
{
"epoch": 0.7165980795610425,
"grad_norm": 0.13914860785007477,
"learning_rate": 6.362515413070284e-06,
"loss": 1.046,
"step": 653
},
{
"epoch": 0.7176954732510288,
"grad_norm": 0.1510930210351944,
"learning_rate": 6.3378545006165236e-06,
"loss": 0.9835,
"step": 654
},
{
"epoch": 0.7187928669410151,
"grad_norm": 0.13082289695739746,
"learning_rate": 6.3131935881627625e-06,
"loss": 1.1769,
"step": 655
},
{
"epoch": 0.7198902606310014,
"grad_norm": 0.14069297909736633,
"learning_rate": 6.288532675709001e-06,
"loss": 1.0869,
"step": 656
},
{
"epoch": 0.7209876543209877,
"grad_norm": 0.1553945541381836,
"learning_rate": 6.263871763255241e-06,
"loss": 1.0641,
"step": 657
},
{
"epoch": 0.722085048010974,
"grad_norm": 0.14064814150333405,
"learning_rate": 6.23921085080148e-06,
"loss": 1.1924,
"step": 658
},
{
"epoch": 0.7231824417009602,
"grad_norm": 0.1389569491147995,
"learning_rate": 6.214549938347719e-06,
"loss": 1.0729,
"step": 659
},
{
"epoch": 0.7242798353909465,
"grad_norm": 0.14110144972801208,
"learning_rate": 6.189889025893958e-06,
"loss": 1.1349,
"step": 660
},
{
"epoch": 0.7253772290809328,
"grad_norm": 0.13982906937599182,
"learning_rate": 6.1652281134401985e-06,
"loss": 1.0304,
"step": 661
},
{
"epoch": 0.7264746227709191,
"grad_norm": 0.12203299254179001,
"learning_rate": 6.140567200986437e-06,
"loss": 1.2023,
"step": 662
},
{
"epoch": 0.7275720164609053,
"grad_norm": 0.1401350200176239,
"learning_rate": 6.115906288532676e-06,
"loss": 1.0947,
"step": 663
},
{
"epoch": 0.7286694101508916,
"grad_norm": 0.14056162536144257,
"learning_rate": 6.091245376078916e-06,
"loss": 1.073,
"step": 664
},
{
"epoch": 0.7297668038408779,
"grad_norm": 0.13901904225349426,
"learning_rate": 6.066584463625155e-06,
"loss": 1.134,
"step": 665
},
{
"epoch": 0.7308641975308642,
"grad_norm": 0.1339583396911621,
"learning_rate": 6.041923551171394e-06,
"loss": 1.2012,
"step": 666
},
{
"epoch": 0.7319615912208505,
"grad_norm": 0.14305201172828674,
"learning_rate": 6.017262638717633e-06,
"loss": 1.2075,
"step": 667
},
{
"epoch": 0.7330589849108368,
"grad_norm": 0.1388700008392334,
"learning_rate": 5.9926017262638725e-06,
"loss": 1.1049,
"step": 668
},
{
"epoch": 0.7341563786008231,
"grad_norm": 0.13110363483428955,
"learning_rate": 5.967940813810111e-06,
"loss": 1.1915,
"step": 669
},
{
"epoch": 0.7352537722908093,
"grad_norm": 0.1336205154657364,
"learning_rate": 5.94327990135635e-06,
"loss": 1.1189,
"step": 670
},
{
"epoch": 0.7363511659807956,
"grad_norm": 0.15483205020427704,
"learning_rate": 5.91861898890259e-06,
"loss": 1.0508,
"step": 671
},
{
"epoch": 0.7374485596707819,
"grad_norm": 0.1405985802412033,
"learning_rate": 5.893958076448829e-06,
"loss": 1.1348,
"step": 672
},
{
"epoch": 0.7385459533607682,
"grad_norm": 0.13037075102329254,
"learning_rate": 5.869297163995068e-06,
"loss": 1.1437,
"step": 673
},
{
"epoch": 0.7396433470507544,
"grad_norm": 0.12945199012756348,
"learning_rate": 5.844636251541308e-06,
"loss": 1.1265,
"step": 674
},
{
"epoch": 0.7407407407407407,
"grad_norm": 0.1295364648103714,
"learning_rate": 5.8199753390875466e-06,
"loss": 1.1266,
"step": 675
},
{
"epoch": 0.741838134430727,
"grad_norm": 0.12387209385633469,
"learning_rate": 5.7953144266337855e-06,
"loss": 1.184,
"step": 676
},
{
"epoch": 0.7429355281207133,
"grad_norm": 0.12598906457424164,
"learning_rate": 5.770653514180024e-06,
"loss": 1.1155,
"step": 677
},
{
"epoch": 0.7440329218106996,
"grad_norm": 0.13896718621253967,
"learning_rate": 5.745992601726265e-06,
"loss": 1.1824,
"step": 678
},
{
"epoch": 0.7451303155006859,
"grad_norm": 0.13778887689113617,
"learning_rate": 5.721331689272504e-06,
"loss": 1.1126,
"step": 679
},
{
"epoch": 0.7462277091906722,
"grad_norm": 0.12722033262252808,
"learning_rate": 5.696670776818743e-06,
"loss": 1.1672,
"step": 680
},
{
"epoch": 0.7473251028806585,
"grad_norm": 0.13544504344463348,
"learning_rate": 5.6720098643649825e-06,
"loss": 1.1423,
"step": 681
},
{
"epoch": 0.7484224965706447,
"grad_norm": 0.14108151197433472,
"learning_rate": 5.6473489519112214e-06,
"loss": 1.1087,
"step": 682
},
{
"epoch": 0.749519890260631,
"grad_norm": 0.15130096673965454,
"learning_rate": 5.62268803945746e-06,
"loss": 1.0687,
"step": 683
},
{
"epoch": 0.7506172839506173,
"grad_norm": 0.15002749860286713,
"learning_rate": 5.5980271270037e-06,
"loss": 1.0799,
"step": 684
},
{
"epoch": 0.7517146776406035,
"grad_norm": 0.12720707058906555,
"learning_rate": 5.573366214549939e-06,
"loss": 1.1763,
"step": 685
},
{
"epoch": 0.7528120713305898,
"grad_norm": 0.12860235571861267,
"learning_rate": 5.548705302096178e-06,
"loss": 1.0789,
"step": 686
},
{
"epoch": 0.7539094650205761,
"grad_norm": 0.13125815987586975,
"learning_rate": 5.524044389642417e-06,
"loss": 1.1059,
"step": 687
},
{
"epoch": 0.7550068587105624,
"grad_norm": 0.14888082444667816,
"learning_rate": 5.499383477188657e-06,
"loss": 1.1347,
"step": 688
},
{
"epoch": 0.7561042524005487,
"grad_norm": 0.1384962499141693,
"learning_rate": 5.4747225647348955e-06,
"loss": 1.0306,
"step": 689
},
{
"epoch": 0.757201646090535,
"grad_norm": 0.14311553537845612,
"learning_rate": 5.450061652281134e-06,
"loss": 1.0885,
"step": 690
},
{
"epoch": 0.7582990397805213,
"grad_norm": 0.15725594758987427,
"learning_rate": 5.425400739827374e-06,
"loss": 1.2072,
"step": 691
},
{
"epoch": 0.7593964334705076,
"grad_norm": 0.13400770723819733,
"learning_rate": 5.400739827373613e-06,
"loss": 1.1365,
"step": 692
},
{
"epoch": 0.7604938271604939,
"grad_norm": 0.1280183643102646,
"learning_rate": 5.376078914919852e-06,
"loss": 1.0595,
"step": 693
},
{
"epoch": 0.7615912208504801,
"grad_norm": 0.1436558961868286,
"learning_rate": 5.351418002466093e-06,
"loss": 1.0537,
"step": 694
},
{
"epoch": 0.7626886145404664,
"grad_norm": 0.119937002658844,
"learning_rate": 5.3267570900123315e-06,
"loss": 1.16,
"step": 695
},
{
"epoch": 0.7637860082304527,
"grad_norm": 0.139418363571167,
"learning_rate": 5.30209617755857e-06,
"loss": 1.1494,
"step": 696
},
{
"epoch": 0.7648834019204389,
"grad_norm": 0.1451789140701294,
"learning_rate": 5.2774352651048085e-06,
"loss": 1.1444,
"step": 697
},
{
"epoch": 0.7659807956104252,
"grad_norm": 0.13994170725345612,
"learning_rate": 5.252774352651049e-06,
"loss": 1.1218,
"step": 698
},
{
"epoch": 0.7670781893004115,
"grad_norm": 0.14148814976215363,
"learning_rate": 5.228113440197288e-06,
"loss": 1.0957,
"step": 699
},
{
"epoch": 0.7681755829903978,
"grad_norm": 0.16258449852466583,
"learning_rate": 5.203452527743527e-06,
"loss": 1.1237,
"step": 700
},
{
"epoch": 0.7692729766803841,
"grad_norm": 0.15161359310150146,
"learning_rate": 5.178791615289767e-06,
"loss": 0.9999,
"step": 701
},
{
"epoch": 0.7703703703703704,
"grad_norm": 0.14900504052639008,
"learning_rate": 5.1541307028360055e-06,
"loss": 1.0474,
"step": 702
},
{
"epoch": 0.7714677640603567,
"grad_norm": 0.14473195374011993,
"learning_rate": 5.1294697903822444e-06,
"loss": 1.1994,
"step": 703
},
{
"epoch": 0.772565157750343,
"grad_norm": 0.14690522849559784,
"learning_rate": 5.104808877928483e-06,
"loss": 0.9891,
"step": 704
},
{
"epoch": 0.7736625514403292,
"grad_norm": 0.12408190965652466,
"learning_rate": 5.080147965474723e-06,
"loss": 1.1341,
"step": 705
},
{
"epoch": 0.7747599451303155,
"grad_norm": 0.14178062975406647,
"learning_rate": 5.055487053020962e-06,
"loss": 1.1432,
"step": 706
},
{
"epoch": 0.7758573388203018,
"grad_norm": 0.13498662412166595,
"learning_rate": 5.030826140567201e-06,
"loss": 1.1644,
"step": 707
},
{
"epoch": 0.776954732510288,
"grad_norm": 0.1294807493686676,
"learning_rate": 5.006165228113441e-06,
"loss": 1.2114,
"step": 708
},
{
"epoch": 0.7780521262002743,
"grad_norm": 0.14092494547367096,
"learning_rate": 4.98150431565968e-06,
"loss": 1.045,
"step": 709
},
{
"epoch": 0.7791495198902606,
"grad_norm": 0.16273631155490875,
"learning_rate": 4.956843403205919e-06,
"loss": 1.0973,
"step": 710
},
{
"epoch": 0.7802469135802469,
"grad_norm": 0.1321549266576767,
"learning_rate": 4.932182490752158e-06,
"loss": 1.2112,
"step": 711
},
{
"epoch": 0.7813443072702332,
"grad_norm": 0.14917899668216705,
"learning_rate": 4.907521578298398e-06,
"loss": 1.1379,
"step": 712
},
{
"epoch": 0.7824417009602195,
"grad_norm": 0.14083033800125122,
"learning_rate": 4.882860665844636e-06,
"loss": 1.1145,
"step": 713
},
{
"epoch": 0.7835390946502058,
"grad_norm": 0.1294207125902176,
"learning_rate": 4.858199753390876e-06,
"loss": 1.1685,
"step": 714
},
{
"epoch": 0.7846364883401921,
"grad_norm": 0.1395813524723053,
"learning_rate": 4.833538840937116e-06,
"loss": 1.096,
"step": 715
},
{
"epoch": 0.7857338820301784,
"grad_norm": 0.12633414566516876,
"learning_rate": 4.8088779284833545e-06,
"loss": 1.1502,
"step": 716
},
{
"epoch": 0.7868312757201646,
"grad_norm": 0.13412798941135406,
"learning_rate": 4.784217016029593e-06,
"loss": 1.1933,
"step": 717
},
{
"epoch": 0.7879286694101509,
"grad_norm": 0.1472862958908081,
"learning_rate": 4.759556103575832e-06,
"loss": 1.0965,
"step": 718
},
{
"epoch": 0.7890260631001371,
"grad_norm": 0.1501110941171646,
"learning_rate": 4.734895191122072e-06,
"loss": 1.0412,
"step": 719
},
{
"epoch": 0.7901234567901234,
"grad_norm": 0.13417938351631165,
"learning_rate": 4.710234278668311e-06,
"loss": 1.1097,
"step": 720
},
{
"epoch": 0.7912208504801097,
"grad_norm": 0.14140449464321136,
"learning_rate": 4.68557336621455e-06,
"loss": 1.0982,
"step": 721
},
{
"epoch": 0.792318244170096,
"grad_norm": 0.15051521360874176,
"learning_rate": 4.66091245376079e-06,
"loss": 1.1403,
"step": 722
},
{
"epoch": 0.7934156378600823,
"grad_norm": 0.12827663123607635,
"learning_rate": 4.6362515413070285e-06,
"loss": 1.1603,
"step": 723
},
{
"epoch": 0.7945130315500686,
"grad_norm": 0.1369502693414688,
"learning_rate": 4.611590628853268e-06,
"loss": 1.1927,
"step": 724
},
{
"epoch": 0.7956104252400549,
"grad_norm": 0.1420857161283493,
"learning_rate": 4.586929716399507e-06,
"loss": 1.078,
"step": 725
},
{
"epoch": 0.7967078189300412,
"grad_norm": 0.12905989587306976,
"learning_rate": 4.562268803945746e-06,
"loss": 1.1609,
"step": 726
},
{
"epoch": 0.7978052126200275,
"grad_norm": 0.16541007161140442,
"learning_rate": 4.537607891491986e-06,
"loss": 1.0611,
"step": 727
},
{
"epoch": 0.7989026063100138,
"grad_norm": 0.14104437828063965,
"learning_rate": 4.512946979038225e-06,
"loss": 1.0642,
"step": 728
},
{
"epoch": 0.8,
"grad_norm": 0.14089111983776093,
"learning_rate": 4.488286066584464e-06,
"loss": 1.1089,
"step": 729
},
{
"epoch": 0.8010973936899863,
"grad_norm": 0.1437205672264099,
"learning_rate": 4.463625154130703e-06,
"loss": 1.1064,
"step": 730
},
{
"epoch": 0.8021947873799725,
"grad_norm": 0.1396748125553131,
"learning_rate": 4.438964241676942e-06,
"loss": 1.083,
"step": 731
},
{
"epoch": 0.8032921810699588,
"grad_norm": 0.13815851509571075,
"learning_rate": 4.414303329223182e-06,
"loss": 1.2415,
"step": 732
},
{
"epoch": 0.8043895747599451,
"grad_norm": 0.1404045671224594,
"learning_rate": 4.389642416769421e-06,
"loss": 1.1589,
"step": 733
},
{
"epoch": 0.8054869684499314,
"grad_norm": 0.13807553052902222,
"learning_rate": 4.36498150431566e-06,
"loss": 1.1516,
"step": 734
},
{
"epoch": 0.8065843621399177,
"grad_norm": 0.138895183801651,
"learning_rate": 4.340320591861899e-06,
"loss": 1.1195,
"step": 735
},
{
"epoch": 0.807681755829904,
"grad_norm": 0.12265230715274811,
"learning_rate": 4.315659679408139e-06,
"loss": 1.1197,
"step": 736
},
{
"epoch": 0.8087791495198903,
"grad_norm": 0.14143763482570648,
"learning_rate": 4.2909987669543775e-06,
"loss": 1.1023,
"step": 737
},
{
"epoch": 0.8098765432098766,
"grad_norm": 0.16049422323703766,
"learning_rate": 4.266337854500616e-06,
"loss": 1.0921,
"step": 738
},
{
"epoch": 0.8109739368998629,
"grad_norm": 0.133567214012146,
"learning_rate": 4.241676942046856e-06,
"loss": 1.1309,
"step": 739
},
{
"epoch": 0.8120713305898491,
"grad_norm": 0.1394726186990738,
"learning_rate": 4.217016029593095e-06,
"loss": 1.0782,
"step": 740
},
{
"epoch": 0.8131687242798354,
"grad_norm": 0.146071657538414,
"learning_rate": 4.192355117139335e-06,
"loss": 1.1323,
"step": 741
},
{
"epoch": 0.8142661179698216,
"grad_norm": 0.1433979719877243,
"learning_rate": 4.167694204685574e-06,
"loss": 1.0563,
"step": 742
},
{
"epoch": 0.8153635116598079,
"grad_norm": 0.13134688138961792,
"learning_rate": 4.143033292231813e-06,
"loss": 1.1631,
"step": 743
},
{
"epoch": 0.8164609053497942,
"grad_norm": 0.1523819863796234,
"learning_rate": 4.118372379778052e-06,
"loss": 1.1061,
"step": 744
},
{
"epoch": 0.8175582990397805,
"grad_norm": 0.1449640989303589,
"learning_rate": 4.093711467324291e-06,
"loss": 1.0674,
"step": 745
},
{
"epoch": 0.8186556927297668,
"grad_norm": 0.14067208766937256,
"learning_rate": 4.06905055487053e-06,
"loss": 1.0599,
"step": 746
},
{
"epoch": 0.8197530864197531,
"grad_norm": 0.14667151868343353,
"learning_rate": 4.04438964241677e-06,
"loss": 1.0941,
"step": 747
},
{
"epoch": 0.8208504801097394,
"grad_norm": 0.1466807872056961,
"learning_rate": 4.019728729963009e-06,
"loss": 1.0162,
"step": 748
},
{
"epoch": 0.8219478737997257,
"grad_norm": 0.130960613489151,
"learning_rate": 3.995067817509249e-06,
"loss": 1.2093,
"step": 749
},
{
"epoch": 0.823045267489712,
"grad_norm": 0.13577674329280853,
"learning_rate": 3.9704069050554875e-06,
"loss": 1.0382,
"step": 750
},
{
"epoch": 0.8241426611796983,
"grad_norm": 0.12382911890745163,
"learning_rate": 3.9457459926017264e-06,
"loss": 1.2137,
"step": 751
},
{
"epoch": 0.8252400548696845,
"grad_norm": 0.14159835875034332,
"learning_rate": 3.921085080147966e-06,
"loss": 1.1786,
"step": 752
},
{
"epoch": 0.8263374485596707,
"grad_norm": 0.13204412162303925,
"learning_rate": 3.896424167694205e-06,
"loss": 1.0827,
"step": 753
},
{
"epoch": 0.827434842249657,
"grad_norm": 0.14544348418712616,
"learning_rate": 3.871763255240444e-06,
"loss": 1.2168,
"step": 754
},
{
"epoch": 0.8285322359396433,
"grad_norm": 0.13837821781635284,
"learning_rate": 3.847102342786683e-06,
"loss": 1.1306,
"step": 755
},
{
"epoch": 0.8296296296296296,
"grad_norm": 0.13542281091213226,
"learning_rate": 3.822441430332923e-06,
"loss": 1.065,
"step": 756
},
{
"epoch": 0.8307270233196159,
"grad_norm": 0.1563270390033722,
"learning_rate": 3.7977805178791616e-06,
"loss": 1.0928,
"step": 757
},
{
"epoch": 0.8318244170096022,
"grad_norm": 0.1355254054069519,
"learning_rate": 3.773119605425401e-06,
"loss": 1.1382,
"step": 758
},
{
"epoch": 0.8329218106995885,
"grad_norm": 0.14081105589866638,
"learning_rate": 3.7484586929716402e-06,
"loss": 1.0408,
"step": 759
},
{
"epoch": 0.8340192043895748,
"grad_norm": 0.14367350935935974,
"learning_rate": 3.723797780517879e-06,
"loss": 1.12,
"step": 760
},
{
"epoch": 0.8351165980795611,
"grad_norm": 0.13955897092819214,
"learning_rate": 3.699136868064119e-06,
"loss": 1.1122,
"step": 761
},
{
"epoch": 0.8362139917695474,
"grad_norm": 0.13528084754943848,
"learning_rate": 3.674475955610358e-06,
"loss": 1.1463,
"step": 762
},
{
"epoch": 0.8373113854595337,
"grad_norm": 0.13060660660266876,
"learning_rate": 3.649815043156597e-06,
"loss": 1.088,
"step": 763
},
{
"epoch": 0.83840877914952,
"grad_norm": 0.14304772019386292,
"learning_rate": 3.6251541307028365e-06,
"loss": 1.13,
"step": 764
},
{
"epoch": 0.8395061728395061,
"grad_norm": 0.129106804728508,
"learning_rate": 3.6004932182490754e-06,
"loss": 1.0758,
"step": 765
},
{
"epoch": 0.8406035665294924,
"grad_norm": 0.14966481924057007,
"learning_rate": 3.5758323057953147e-06,
"loss": 1.051,
"step": 766
},
{
"epoch": 0.8417009602194787,
"grad_norm": 0.13731549680233002,
"learning_rate": 3.5511713933415536e-06,
"loss": 1.1467,
"step": 767
},
{
"epoch": 0.842798353909465,
"grad_norm": 0.16249963641166687,
"learning_rate": 3.526510480887793e-06,
"loss": 1.1939,
"step": 768
},
{
"epoch": 0.8438957475994513,
"grad_norm": 0.1546361893415451,
"learning_rate": 3.5018495684340327e-06,
"loss": 1.1762,
"step": 769
},
{
"epoch": 0.8449931412894376,
"grad_norm": 0.1352168768644333,
"learning_rate": 3.4771886559802716e-06,
"loss": 1.1351,
"step": 770
},
{
"epoch": 0.8460905349794239,
"grad_norm": 0.13795001804828644,
"learning_rate": 3.452527743526511e-06,
"loss": 1.0621,
"step": 771
},
{
"epoch": 0.8471879286694102,
"grad_norm": 0.13399291038513184,
"learning_rate": 3.42786683107275e-06,
"loss": 1.1674,
"step": 772
},
{
"epoch": 0.8482853223593965,
"grad_norm": 0.1293582171201706,
"learning_rate": 3.403205918618989e-06,
"loss": 1.1216,
"step": 773
},
{
"epoch": 0.8493827160493828,
"grad_norm": 0.13657528162002563,
"learning_rate": 3.3785450061652285e-06,
"loss": 1.1037,
"step": 774
},
{
"epoch": 0.850480109739369,
"grad_norm": 0.14344428479671478,
"learning_rate": 3.3538840937114674e-06,
"loss": 1.1299,
"step": 775
},
{
"epoch": 0.8515775034293552,
"grad_norm": 0.12276988476514816,
"learning_rate": 3.3292231812577068e-06,
"loss": 1.1005,
"step": 776
},
{
"epoch": 0.8526748971193415,
"grad_norm": 0.14884981513023376,
"learning_rate": 3.3045622688039457e-06,
"loss": 1.0838,
"step": 777
},
{
"epoch": 0.8537722908093278,
"grad_norm": 0.1271783709526062,
"learning_rate": 3.2799013563501854e-06,
"loss": 1.0585,
"step": 778
},
{
"epoch": 0.8548696844993141,
"grad_norm": 0.13373488187789917,
"learning_rate": 3.2552404438964248e-06,
"loss": 1.1283,
"step": 779
},
{
"epoch": 0.8559670781893004,
"grad_norm": 0.15838205814361572,
"learning_rate": 3.2305795314426637e-06,
"loss": 1.0838,
"step": 780
},
{
"epoch": 0.8570644718792867,
"grad_norm": 0.13347558677196503,
"learning_rate": 3.205918618988903e-06,
"loss": 1.1981,
"step": 781
},
{
"epoch": 0.858161865569273,
"grad_norm": 0.1298656165599823,
"learning_rate": 3.181257706535142e-06,
"loss": 1.1628,
"step": 782
},
{
"epoch": 0.8592592592592593,
"grad_norm": 0.14071859419345856,
"learning_rate": 3.1565967940813812e-06,
"loss": 1.0938,
"step": 783
},
{
"epoch": 0.8603566529492456,
"grad_norm": 0.12630558013916016,
"learning_rate": 3.1319358816276206e-06,
"loss": 1.1672,
"step": 784
},
{
"epoch": 0.8614540466392319,
"grad_norm": 0.14046333730220795,
"learning_rate": 3.1072749691738595e-06,
"loss": 1.1078,
"step": 785
},
{
"epoch": 0.8625514403292182,
"grad_norm": 0.13367818295955658,
"learning_rate": 3.0826140567200992e-06,
"loss": 1.1097,
"step": 786
},
{
"epoch": 0.8636488340192043,
"grad_norm": 0.14946097135543823,
"learning_rate": 3.057953144266338e-06,
"loss": 1.0131,
"step": 787
},
{
"epoch": 0.8647462277091906,
"grad_norm": 0.1397130787372589,
"learning_rate": 3.0332922318125775e-06,
"loss": 1.1225,
"step": 788
},
{
"epoch": 0.8658436213991769,
"grad_norm": 0.13838127255439758,
"learning_rate": 3.0086313193588164e-06,
"loss": 1.0598,
"step": 789
},
{
"epoch": 0.8669410150891632,
"grad_norm": 0.15007399022579193,
"learning_rate": 2.9839704069050557e-06,
"loss": 1.0897,
"step": 790
},
{
"epoch": 0.8680384087791495,
"grad_norm": 0.1351744681596756,
"learning_rate": 2.959309494451295e-06,
"loss": 1.1067,
"step": 791
},
{
"epoch": 0.8691358024691358,
"grad_norm": 0.1379036158323288,
"learning_rate": 2.934648581997534e-06,
"loss": 1.1377,
"step": 792
},
{
"epoch": 0.8702331961591221,
"grad_norm": 0.13790594041347504,
"learning_rate": 2.9099876695437733e-06,
"loss": 1.1171,
"step": 793
},
{
"epoch": 0.8713305898491084,
"grad_norm": 0.13032451272010803,
"learning_rate": 2.885326757090012e-06,
"loss": 1.1251,
"step": 794
},
{
"epoch": 0.8724279835390947,
"grad_norm": 0.13590891659259796,
"learning_rate": 2.860665844636252e-06,
"loss": 1.162,
"step": 795
},
{
"epoch": 0.873525377229081,
"grad_norm": 0.12618786096572876,
"learning_rate": 2.8360049321824913e-06,
"loss": 1.084,
"step": 796
},
{
"epoch": 0.8746227709190673,
"grad_norm": 0.13335590064525604,
"learning_rate": 2.81134401972873e-06,
"loss": 1.1065,
"step": 797
},
{
"epoch": 0.8757201646090536,
"grad_norm": 0.13176120817661285,
"learning_rate": 2.7866831072749695e-06,
"loss": 1.1029,
"step": 798
},
{
"epoch": 0.8768175582990397,
"grad_norm": 0.1394006758928299,
"learning_rate": 2.7620221948212084e-06,
"loss": 1.0872,
"step": 799
},
{
"epoch": 0.877914951989026,
"grad_norm": 0.16219663619995117,
"learning_rate": 2.7373612823674478e-06,
"loss": 1.1006,
"step": 800
},
{
"epoch": 0.8790123456790123,
"grad_norm": 0.13275958597660065,
"learning_rate": 2.712700369913687e-06,
"loss": 1.1087,
"step": 801
},
{
"epoch": 0.8801097393689986,
"grad_norm": 0.15880566835403442,
"learning_rate": 2.688039457459926e-06,
"loss": 1.0905,
"step": 802
},
{
"epoch": 0.8812071330589849,
"grad_norm": 0.14361722767353058,
"learning_rate": 2.6633785450061657e-06,
"loss": 1.0701,
"step": 803
},
{
"epoch": 0.8823045267489712,
"grad_norm": 0.13469044864177704,
"learning_rate": 2.6387176325524042e-06,
"loss": 1.1495,
"step": 804
},
{
"epoch": 0.8834019204389575,
"grad_norm": 0.16092169284820557,
"learning_rate": 2.614056720098644e-06,
"loss": 1.074,
"step": 805
},
{
"epoch": 0.8844993141289438,
"grad_norm": 0.13376876711845398,
"learning_rate": 2.5893958076448833e-06,
"loss": 1.1405,
"step": 806
},
{
"epoch": 0.8855967078189301,
"grad_norm": 0.1367831826210022,
"learning_rate": 2.5647348951911222e-06,
"loss": 1.0685,
"step": 807
},
{
"epoch": 0.8866941015089164,
"grad_norm": 0.13057412207126617,
"learning_rate": 2.5400739827373616e-06,
"loss": 1.133,
"step": 808
},
{
"epoch": 0.8877914951989027,
"grad_norm": 0.1330074518918991,
"learning_rate": 2.5154130702836005e-06,
"loss": 1.2354,
"step": 809
},
{
"epoch": 0.8888888888888888,
"grad_norm": 0.15305842459201813,
"learning_rate": 2.49075215782984e-06,
"loss": 1.0574,
"step": 810
},
{
"epoch": 0.8899862825788751,
"grad_norm": 0.13910268247127533,
"learning_rate": 2.466091245376079e-06,
"loss": 1.0733,
"step": 811
},
{
"epoch": 0.8910836762688614,
"grad_norm": 0.13843494653701782,
"learning_rate": 2.441430332922318e-06,
"loss": 1.0353,
"step": 812
},
{
"epoch": 0.8921810699588477,
"grad_norm": 0.14887547492980957,
"learning_rate": 2.416769420468558e-06,
"loss": 0.9972,
"step": 813
},
{
"epoch": 0.893278463648834,
"grad_norm": 0.13981156051158905,
"learning_rate": 2.3921085080147967e-06,
"loss": 1.2125,
"step": 814
},
{
"epoch": 0.8943758573388203,
"grad_norm": 0.13580431044101715,
"learning_rate": 2.367447595561036e-06,
"loss": 1.2606,
"step": 815
},
{
"epoch": 0.8954732510288066,
"grad_norm": 0.14896319806575775,
"learning_rate": 2.342786683107275e-06,
"loss": 1.1067,
"step": 816
},
{
"epoch": 0.8965706447187929,
"grad_norm": 0.1296452283859253,
"learning_rate": 2.3181257706535143e-06,
"loss": 1.0699,
"step": 817
},
{
"epoch": 0.8976680384087792,
"grad_norm": 0.145808607339859,
"learning_rate": 2.2934648581997536e-06,
"loss": 1.1611,
"step": 818
},
{
"epoch": 0.8987654320987655,
"grad_norm": 0.13980808854103088,
"learning_rate": 2.268803945745993e-06,
"loss": 1.0291,
"step": 819
},
{
"epoch": 0.8998628257887518,
"grad_norm": 0.13322117924690247,
"learning_rate": 2.244143033292232e-06,
"loss": 1.0608,
"step": 820
},
{
"epoch": 0.900960219478738,
"grad_norm": 0.1385853886604309,
"learning_rate": 2.219482120838471e-06,
"loss": 1.0595,
"step": 821
},
{
"epoch": 0.9020576131687242,
"grad_norm": 0.16439485549926758,
"learning_rate": 2.1948212083847105e-06,
"loss": 1.031,
"step": 822
},
{
"epoch": 0.9031550068587105,
"grad_norm": 0.12988966703414917,
"learning_rate": 2.1701602959309494e-06,
"loss": 1.0905,
"step": 823
},
{
"epoch": 0.9042524005486968,
"grad_norm": 0.13069093227386475,
"learning_rate": 2.1454993834771887e-06,
"loss": 1.1536,
"step": 824
},
{
"epoch": 0.9053497942386831,
"grad_norm": 0.13863211870193481,
"learning_rate": 2.120838471023428e-06,
"loss": 1.1898,
"step": 825
},
{
"epoch": 0.9064471879286694,
"grad_norm": 0.14132994413375854,
"learning_rate": 2.0961775585696674e-06,
"loss": 1.1759,
"step": 826
},
{
"epoch": 0.9075445816186557,
"grad_norm": 0.14824488759040833,
"learning_rate": 2.0715166461159063e-06,
"loss": 1.0744,
"step": 827
},
{
"epoch": 0.908641975308642,
"grad_norm": 0.1388639658689499,
"learning_rate": 2.0468557336621456e-06,
"loss": 1.0687,
"step": 828
},
{
"epoch": 0.9097393689986283,
"grad_norm": 0.14056843519210815,
"learning_rate": 2.022194821208385e-06,
"loss": 1.1299,
"step": 829
},
{
"epoch": 0.9108367626886146,
"grad_norm": 0.1364564299583435,
"learning_rate": 1.9975339087546243e-06,
"loss": 1.1216,
"step": 830
},
{
"epoch": 0.9119341563786009,
"grad_norm": 0.14670343697071075,
"learning_rate": 1.9728729963008632e-06,
"loss": 1.0785,
"step": 831
},
{
"epoch": 0.9130315500685872,
"grad_norm": 0.13665646314620972,
"learning_rate": 1.9482120838471025e-06,
"loss": 1.0355,
"step": 832
},
{
"epoch": 0.9141289437585733,
"grad_norm": 0.1377921998500824,
"learning_rate": 1.9235511713933415e-06,
"loss": 1.1445,
"step": 833
},
{
"epoch": 0.9152263374485596,
"grad_norm": 0.12789370119571686,
"learning_rate": 1.8988902589395808e-06,
"loss": 1.1242,
"step": 834
},
{
"epoch": 0.9163237311385459,
"grad_norm": 0.12622785568237305,
"learning_rate": 1.8742293464858201e-06,
"loss": 1.1358,
"step": 835
},
{
"epoch": 0.9174211248285322,
"grad_norm": 0.14954856038093567,
"learning_rate": 1.8495684340320595e-06,
"loss": 1.0822,
"step": 836
},
{
"epoch": 0.9185185185185185,
"grad_norm": 0.12256734073162079,
"learning_rate": 1.8249075215782986e-06,
"loss": 1.0968,
"step": 837
},
{
"epoch": 0.9196159122085048,
"grad_norm": 0.14016473293304443,
"learning_rate": 1.8002466091245377e-06,
"loss": 1.0905,
"step": 838
},
{
"epoch": 0.9207133058984911,
"grad_norm": 0.14381630718708038,
"learning_rate": 1.7755856966707768e-06,
"loss": 1.1512,
"step": 839
},
{
"epoch": 0.9218106995884774,
"grad_norm": 0.15085048973560333,
"learning_rate": 1.7509247842170164e-06,
"loss": 1.109,
"step": 840
},
{
"epoch": 0.9229080932784637,
"grad_norm": 0.1343962699174881,
"learning_rate": 1.7262638717632555e-06,
"loss": 1.1442,
"step": 841
},
{
"epoch": 0.92400548696845,
"grad_norm": 0.12811584770679474,
"learning_rate": 1.7016029593094946e-06,
"loss": 1.2034,
"step": 842
},
{
"epoch": 0.9251028806584363,
"grad_norm": 0.15649715065956116,
"learning_rate": 1.6769420468557337e-06,
"loss": 1.0986,
"step": 843
},
{
"epoch": 0.9262002743484224,
"grad_norm": 0.13506309688091278,
"learning_rate": 1.6522811344019728e-06,
"loss": 1.0733,
"step": 844
},
{
"epoch": 0.9272976680384087,
"grad_norm": 0.13824434578418732,
"learning_rate": 1.6276202219482124e-06,
"loss": 1.0783,
"step": 845
},
{
"epoch": 0.928395061728395,
"grad_norm": 0.14913596212863922,
"learning_rate": 1.6029593094944515e-06,
"loss": 1.1579,
"step": 846
},
{
"epoch": 0.9294924554183813,
"grad_norm": 0.14216169714927673,
"learning_rate": 1.5782983970406906e-06,
"loss": 1.2532,
"step": 847
},
{
"epoch": 0.9305898491083676,
"grad_norm": 0.12944941222667694,
"learning_rate": 1.5536374845869297e-06,
"loss": 1.1187,
"step": 848
},
{
"epoch": 0.9316872427983539,
"grad_norm": 0.15228183567523956,
"learning_rate": 1.528976572133169e-06,
"loss": 1.0625,
"step": 849
},
{
"epoch": 0.9327846364883402,
"grad_norm": 0.1437804400920868,
"learning_rate": 1.5043156596794082e-06,
"loss": 1.0785,
"step": 850
},
{
"epoch": 0.9338820301783265,
"grad_norm": 0.13705220818519592,
"learning_rate": 1.4796547472256475e-06,
"loss": 1.1105,
"step": 851
},
{
"epoch": 0.9349794238683128,
"grad_norm": 0.13792921602725983,
"learning_rate": 1.4549938347718866e-06,
"loss": 1.0987,
"step": 852
},
{
"epoch": 0.9360768175582991,
"grad_norm": 0.12909933924674988,
"learning_rate": 1.430332922318126e-06,
"loss": 1.0932,
"step": 853
},
{
"epoch": 0.9371742112482854,
"grad_norm": 0.13667872548103333,
"learning_rate": 1.405672009864365e-06,
"loss": 1.1542,
"step": 854
},
{
"epoch": 0.9382716049382716,
"grad_norm": 0.13207869231700897,
"learning_rate": 1.3810110974106042e-06,
"loss": 1.1891,
"step": 855
},
{
"epoch": 0.9393689986282578,
"grad_norm": 0.12875591218471527,
"learning_rate": 1.3563501849568435e-06,
"loss": 1.0372,
"step": 856
},
{
"epoch": 0.9404663923182441,
"grad_norm": 0.12986066937446594,
"learning_rate": 1.3316892725030829e-06,
"loss": 1.1259,
"step": 857
},
{
"epoch": 0.9415637860082304,
"grad_norm": 0.13095685839653015,
"learning_rate": 1.307028360049322e-06,
"loss": 1.0452,
"step": 858
},
{
"epoch": 0.9426611796982167,
"grad_norm": 0.13577769696712494,
"learning_rate": 1.2823674475955611e-06,
"loss": 1.0942,
"step": 859
},
{
"epoch": 0.943758573388203,
"grad_norm": 0.136579230427742,
"learning_rate": 1.2577065351418002e-06,
"loss": 1.0996,
"step": 860
},
{
"epoch": 0.9448559670781893,
"grad_norm": 0.14068828523159027,
"learning_rate": 1.2330456226880396e-06,
"loss": 1.0527,
"step": 861
},
{
"epoch": 0.9459533607681756,
"grad_norm": 0.14738546311855316,
"learning_rate": 1.208384710234279e-06,
"loss": 1.126,
"step": 862
},
{
"epoch": 0.9470507544581619,
"grad_norm": 0.13700617849826813,
"learning_rate": 1.183723797780518e-06,
"loss": 1.1714,
"step": 863
},
{
"epoch": 0.9481481481481482,
"grad_norm": 0.13408678770065308,
"learning_rate": 1.1590628853267571e-06,
"loss": 1.1135,
"step": 864
},
{
"epoch": 0.9492455418381345,
"grad_norm": 0.1358562856912613,
"learning_rate": 1.1344019728729965e-06,
"loss": 1.0761,
"step": 865
},
{
"epoch": 0.9503429355281207,
"grad_norm": 0.15928462147712708,
"learning_rate": 1.1097410604192356e-06,
"loss": 1.0841,
"step": 866
},
{
"epoch": 0.951440329218107,
"grad_norm": 0.14678318798542023,
"learning_rate": 1.0850801479654747e-06,
"loss": 1.0772,
"step": 867
},
{
"epoch": 0.9525377229080932,
"grad_norm": 0.12827463448047638,
"learning_rate": 1.060419235511714e-06,
"loss": 1.1324,
"step": 868
},
{
"epoch": 0.9536351165980795,
"grad_norm": 0.14437751471996307,
"learning_rate": 1.0357583230579532e-06,
"loss": 1.2083,
"step": 869
},
{
"epoch": 0.9547325102880658,
"grad_norm": 0.14573253691196442,
"learning_rate": 1.0110974106041925e-06,
"loss": 1.0288,
"step": 870
},
{
"epoch": 0.9558299039780521,
"grad_norm": 0.148457869887352,
"learning_rate": 9.864364981504316e-07,
"loss": 1.1433,
"step": 871
},
{
"epoch": 0.9569272976680384,
"grad_norm": 0.1432981789112091,
"learning_rate": 9.617755856966707e-07,
"loss": 1.1222,
"step": 872
},
{
"epoch": 0.9580246913580247,
"grad_norm": 0.14053548872470856,
"learning_rate": 9.371146732429101e-07,
"loss": 1.1193,
"step": 873
},
{
"epoch": 0.959122085048011,
"grad_norm": 0.13648538291454315,
"learning_rate": 9.124537607891493e-07,
"loss": 1.1411,
"step": 874
},
{
"epoch": 0.9602194787379973,
"grad_norm": 0.15301236510276794,
"learning_rate": 8.877928483353884e-07,
"loss": 1.1395,
"step": 875
},
{
"epoch": 0.9613168724279836,
"grad_norm": 0.1504426896572113,
"learning_rate": 8.631319358816277e-07,
"loss": 1.1447,
"step": 876
},
{
"epoch": 0.9624142661179699,
"grad_norm": 0.15618982911109924,
"learning_rate": 8.384710234278669e-07,
"loss": 1.1235,
"step": 877
},
{
"epoch": 0.9635116598079561,
"grad_norm": 0.1426519900560379,
"learning_rate": 8.138101109741062e-07,
"loss": 1.1242,
"step": 878
},
{
"epoch": 0.9646090534979423,
"grad_norm": 0.1583835482597351,
"learning_rate": 7.891491985203453e-07,
"loss": 1.0598,
"step": 879
},
{
"epoch": 0.9657064471879286,
"grad_norm": 0.14297567307949066,
"learning_rate": 7.644882860665845e-07,
"loss": 1.0291,
"step": 880
},
{
"epoch": 0.9668038408779149,
"grad_norm": 0.14067769050598145,
"learning_rate": 7.398273736128238e-07,
"loss": 1.1746,
"step": 881
},
{
"epoch": 0.9679012345679012,
"grad_norm": 0.1404830813407898,
"learning_rate": 7.15166461159063e-07,
"loss": 1.1294,
"step": 882
},
{
"epoch": 0.9689986282578875,
"grad_norm": 0.1324174553155899,
"learning_rate": 6.905055487053021e-07,
"loss": 1.1089,
"step": 883
},
{
"epoch": 0.9700960219478738,
"grad_norm": 0.13488321006298065,
"learning_rate": 6.658446362515414e-07,
"loss": 1.1228,
"step": 884
},
{
"epoch": 0.9711934156378601,
"grad_norm": 0.13472045958042145,
"learning_rate": 6.411837237977806e-07,
"loss": 1.0933,
"step": 885
},
{
"epoch": 0.9722908093278464,
"grad_norm": 0.14468832314014435,
"learning_rate": 6.165228113440198e-07,
"loss": 1.0767,
"step": 886
},
{
"epoch": 0.9733882030178327,
"grad_norm": 0.13993267714977264,
"learning_rate": 5.91861898890259e-07,
"loss": 1.1123,
"step": 887
}
],
"logging_steps": 1,
"max_steps": 911,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 1,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 9.202552775203185e+17,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}