ajaymin28's picture
Upload folder using huggingface_hub
61c01cb verified
raw
history blame
139 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 875,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 8.812040662498683,
"learning_rate": 3.7037037037037036e-07,
"loss": 0.6324,
"step": 1
},
{
"epoch": 0.0,
"grad_norm": 8.231021188121307,
"learning_rate": 7.407407407407407e-07,
"loss": 0.5088,
"step": 2
},
{
"epoch": 0.0,
"grad_norm": 10.084697585340875,
"learning_rate": 1.111111111111111e-06,
"loss": 0.5952,
"step": 3
},
{
"epoch": 0.0,
"grad_norm": 8.504686522060355,
"learning_rate": 1.4814814814814815e-06,
"loss": 0.5506,
"step": 4
},
{
"epoch": 0.01,
"grad_norm": 7.78998673730679,
"learning_rate": 1.8518518518518519e-06,
"loss": 0.5389,
"step": 5
},
{
"epoch": 0.01,
"grad_norm": 7.564344020278276,
"learning_rate": 2.222222222222222e-06,
"loss": 0.4419,
"step": 6
},
{
"epoch": 0.01,
"grad_norm": 6.335657208121732,
"learning_rate": 2.5925925925925925e-06,
"loss": 0.4503,
"step": 7
},
{
"epoch": 0.01,
"grad_norm": 5.736176328651262,
"learning_rate": 2.962962962962963e-06,
"loss": 0.4152,
"step": 8
},
{
"epoch": 0.01,
"grad_norm": 6.646697141146249,
"learning_rate": 3.3333333333333333e-06,
"loss": 0.4766,
"step": 9
},
{
"epoch": 0.01,
"grad_norm": 7.271376765925272,
"learning_rate": 3.7037037037037037e-06,
"loss": 0.3847,
"step": 10
},
{
"epoch": 0.01,
"grad_norm": 11.816959033855433,
"learning_rate": 4.074074074074074e-06,
"loss": 0.3234,
"step": 11
},
{
"epoch": 0.01,
"grad_norm": 17.376716600598993,
"learning_rate": 4.444444444444444e-06,
"loss": 0.2575,
"step": 12
},
{
"epoch": 0.01,
"grad_norm": 13.166527782235265,
"learning_rate": 4.814814814814815e-06,
"loss": 0.3037,
"step": 13
},
{
"epoch": 0.02,
"grad_norm": 8.131001484797347,
"learning_rate": 5.185185185185185e-06,
"loss": 0.1747,
"step": 14
},
{
"epoch": 0.02,
"grad_norm": 9.174937982191583,
"learning_rate": 5.555555555555557e-06,
"loss": 0.2399,
"step": 15
},
{
"epoch": 0.02,
"grad_norm": 8.592965454381307,
"learning_rate": 5.925925925925926e-06,
"loss": 0.1949,
"step": 16
},
{
"epoch": 0.02,
"grad_norm": 6.872465363012901,
"learning_rate": 6.296296296296297e-06,
"loss": 0.1805,
"step": 17
},
{
"epoch": 0.02,
"grad_norm": 5.792592320141116,
"learning_rate": 6.666666666666667e-06,
"loss": 0.1909,
"step": 18
},
{
"epoch": 0.02,
"grad_norm": 2.288697729735019,
"learning_rate": 7.0370370370370375e-06,
"loss": 0.1502,
"step": 19
},
{
"epoch": 0.02,
"grad_norm": 1.6947246687987911,
"learning_rate": 7.4074074074074075e-06,
"loss": 0.1174,
"step": 20
},
{
"epoch": 0.02,
"grad_norm": 5.72465565098873,
"learning_rate": 7.77777777777778e-06,
"loss": 0.1739,
"step": 21
},
{
"epoch": 0.03,
"grad_norm": 5.765453075575737,
"learning_rate": 8.148148148148148e-06,
"loss": 0.1894,
"step": 22
},
{
"epoch": 0.03,
"grad_norm": 3.2240061796287334,
"learning_rate": 8.518518518518519e-06,
"loss": 0.1376,
"step": 23
},
{
"epoch": 0.03,
"grad_norm": 5.983671039035109,
"learning_rate": 8.888888888888888e-06,
"loss": 0.1445,
"step": 24
},
{
"epoch": 0.03,
"grad_norm": 6.056090509557002,
"learning_rate": 9.25925925925926e-06,
"loss": 0.1628,
"step": 25
},
{
"epoch": 0.03,
"grad_norm": 3.667505927161921,
"learning_rate": 9.62962962962963e-06,
"loss": 0.1411,
"step": 26
},
{
"epoch": 0.03,
"grad_norm": 9.96893801537625,
"learning_rate": 1e-05,
"loss": 0.1942,
"step": 27
},
{
"epoch": 0.03,
"grad_norm": 3.081996759418418,
"learning_rate": 9.999965687879942e-06,
"loss": 0.1401,
"step": 28
},
{
"epoch": 0.03,
"grad_norm": 3.992334648293423,
"learning_rate": 9.999862751990697e-06,
"loss": 0.1495,
"step": 29
},
{
"epoch": 0.03,
"grad_norm": 3.446786827543344,
"learning_rate": 9.999691193745045e-06,
"loss": 0.1356,
"step": 30
},
{
"epoch": 0.04,
"grad_norm": 2.452927122228086,
"learning_rate": 9.999451015497595e-06,
"loss": 0.1576,
"step": 31
},
{
"epoch": 0.04,
"grad_norm": 4.8167704565956715,
"learning_rate": 9.999142220544759e-06,
"loss": 0.1237,
"step": 32
},
{
"epoch": 0.04,
"grad_norm": 4.128302969077741,
"learning_rate": 9.9987648131247e-06,
"loss": 0.124,
"step": 33
},
{
"epoch": 0.04,
"grad_norm": 3.7257757203533584,
"learning_rate": 9.998318798417276e-06,
"loss": 0.1622,
"step": 34
},
{
"epoch": 0.04,
"grad_norm": 4.811332863461092,
"learning_rate": 9.997804182543973e-06,
"loss": 0.1373,
"step": 35
},
{
"epoch": 0.04,
"grad_norm": 6.006945777469291,
"learning_rate": 9.997220972567815e-06,
"loss": 0.1617,
"step": 36
},
{
"epoch": 0.04,
"grad_norm": 5.2290233962608275,
"learning_rate": 9.996569176493269e-06,
"loss": 0.1317,
"step": 37
},
{
"epoch": 0.04,
"grad_norm": 4.887606275239368,
"learning_rate": 9.995848803266141e-06,
"loss": 0.1559,
"step": 38
},
{
"epoch": 0.04,
"grad_norm": 2.649617040738647,
"learning_rate": 9.99505986277344e-06,
"loss": 0.1359,
"step": 39
},
{
"epoch": 0.05,
"grad_norm": 3.6342992270813492,
"learning_rate": 9.994202365843256e-06,
"loss": 0.1444,
"step": 40
},
{
"epoch": 0.05,
"grad_norm": 2.977502599851725,
"learning_rate": 9.993276324244606e-06,
"loss": 0.1367,
"step": 41
},
{
"epoch": 0.05,
"grad_norm": 3.007957356864356,
"learning_rate": 9.992281750687265e-06,
"loss": 0.1297,
"step": 42
},
{
"epoch": 0.05,
"grad_norm": 1.8406129718892825,
"learning_rate": 9.991218658821609e-06,
"loss": 0.129,
"step": 43
},
{
"epoch": 0.05,
"grad_norm": 2.0044983411925275,
"learning_rate": 9.990087063238408e-06,
"loss": 0.107,
"step": 44
},
{
"epoch": 0.05,
"grad_norm": 1.8795658720176402,
"learning_rate": 9.988886979468644e-06,
"loss": 0.1121,
"step": 45
},
{
"epoch": 0.05,
"grad_norm": 2.356084791458523,
"learning_rate": 9.98761842398328e-06,
"loss": 0.0981,
"step": 46
},
{
"epoch": 0.05,
"grad_norm": 4.534924570867148,
"learning_rate": 9.98628141419305e-06,
"loss": 0.1794,
"step": 47
},
{
"epoch": 0.05,
"grad_norm": 2.3579321421491866,
"learning_rate": 9.984875968448212e-06,
"loss": 0.108,
"step": 48
},
{
"epoch": 0.06,
"grad_norm": 2.2510325224717254,
"learning_rate": 9.983402106038292e-06,
"loss": 0.1117,
"step": 49
},
{
"epoch": 0.06,
"grad_norm": 2.4788953336363653,
"learning_rate": 9.981859847191827e-06,
"loss": 0.1282,
"step": 50
},
{
"epoch": 0.06,
"grad_norm": 1.8906655279037572,
"learning_rate": 9.980249213076085e-06,
"loss": 0.1208,
"step": 51
},
{
"epoch": 0.06,
"grad_norm": 3.1073556178878188,
"learning_rate": 9.978570225796777e-06,
"loss": 0.1644,
"step": 52
},
{
"epoch": 0.06,
"grad_norm": 1.9452742167713701,
"learning_rate": 9.97682290839775e-06,
"loss": 0.1285,
"step": 53
},
{
"epoch": 0.06,
"grad_norm": 2.670121793189728,
"learning_rate": 9.975007284860664e-06,
"loss": 0.142,
"step": 54
},
{
"epoch": 0.06,
"grad_norm": 1.8244790066486611,
"learning_rate": 9.97312338010468e-06,
"loss": 0.1182,
"step": 55
},
{
"epoch": 0.06,
"grad_norm": 1.1114450183499889,
"learning_rate": 9.971171219986104e-06,
"loss": 0.1013,
"step": 56
},
{
"epoch": 0.07,
"grad_norm": 2.8735563274335463,
"learning_rate": 9.969150831298037e-06,
"loss": 0.1351,
"step": 57
},
{
"epoch": 0.07,
"grad_norm": 2.222351118905723,
"learning_rate": 9.967062241770008e-06,
"loss": 0.1122,
"step": 58
},
{
"epoch": 0.07,
"grad_norm": 1.7829363422208375,
"learning_rate": 9.964905480067585e-06,
"loss": 0.1299,
"step": 59
},
{
"epoch": 0.07,
"grad_norm": 2.187968659410326,
"learning_rate": 9.962680575792e-06,
"loss": 0.0957,
"step": 60
},
{
"epoch": 0.07,
"grad_norm": 2.846988563675764,
"learning_rate": 9.960387559479727e-06,
"loss": 0.1187,
"step": 61
},
{
"epoch": 0.07,
"grad_norm": 1.643896260993509,
"learning_rate": 9.958026462602062e-06,
"loss": 0.1086,
"step": 62
},
{
"epoch": 0.07,
"grad_norm": 2.3501454089832894,
"learning_rate": 9.955597317564705e-06,
"loss": 0.1181,
"step": 63
},
{
"epoch": 0.07,
"grad_norm": 2.3392801570363164,
"learning_rate": 9.953100157707299e-06,
"loss": 0.1283,
"step": 64
},
{
"epoch": 0.07,
"grad_norm": 2.215077177644609,
"learning_rate": 9.950535017302984e-06,
"loss": 0.1194,
"step": 65
},
{
"epoch": 0.08,
"grad_norm": 2.298364445576207,
"learning_rate": 9.947901931557924e-06,
"loss": 0.1317,
"step": 66
},
{
"epoch": 0.08,
"grad_norm": 2.8138148663788862,
"learning_rate": 9.945200936610821e-06,
"loss": 0.1241,
"step": 67
},
{
"epoch": 0.08,
"grad_norm": 2.5107542165471934,
"learning_rate": 9.942432069532417e-06,
"loss": 0.1021,
"step": 68
},
{
"epoch": 0.08,
"grad_norm": 2.5847605764691766,
"learning_rate": 9.939595368324996e-06,
"loss": 0.1432,
"step": 69
},
{
"epoch": 0.08,
"grad_norm": 2.534356782579585,
"learning_rate": 9.936690871921849e-06,
"loss": 0.1407,
"step": 70
},
{
"epoch": 0.08,
"grad_norm": 2.2605138628608623,
"learning_rate": 9.933718620186745e-06,
"loss": 0.1323,
"step": 71
},
{
"epoch": 0.08,
"grad_norm": 4.239825098653746,
"learning_rate": 9.930678653913392e-06,
"loss": 0.1329,
"step": 72
},
{
"epoch": 0.08,
"grad_norm": 3.4950058767667134,
"learning_rate": 9.927571014824862e-06,
"loss": 0.1259,
"step": 73
},
{
"epoch": 0.08,
"grad_norm": 2.538742560460438,
"learning_rate": 9.924395745573029e-06,
"loss": 0.1276,
"step": 74
},
{
"epoch": 0.09,
"grad_norm": 2.8351129913937987,
"learning_rate": 9.921152889737985e-06,
"loss": 0.1333,
"step": 75
},
{
"epoch": 0.09,
"grad_norm": 3.6729191949488227,
"learning_rate": 9.91784249182743e-06,
"loss": 0.1209,
"step": 76
},
{
"epoch": 0.09,
"grad_norm": 3.5760153753569086,
"learning_rate": 9.914464597276072e-06,
"loss": 0.1443,
"step": 77
},
{
"epoch": 0.09,
"grad_norm": 2.057116589062098,
"learning_rate": 9.911019252445e-06,
"loss": 0.1082,
"step": 78
},
{
"epoch": 0.09,
"grad_norm": 2.6837432634831067,
"learning_rate": 9.907506504621052e-06,
"loss": 0.1133,
"step": 79
},
{
"epoch": 0.09,
"grad_norm": 2.118680599811244,
"learning_rate": 9.903926402016153e-06,
"loss": 0.1193,
"step": 80
},
{
"epoch": 0.09,
"grad_norm": 3.1410450154804663,
"learning_rate": 9.90027899376667e-06,
"loss": 0.1212,
"step": 81
},
{
"epoch": 0.09,
"grad_norm": 5.110344600127584,
"learning_rate": 9.896564329932727e-06,
"loss": 0.1318,
"step": 82
},
{
"epoch": 0.09,
"grad_norm": 2.33027322583564,
"learning_rate": 9.892782461497521e-06,
"loss": 0.1054,
"step": 83
},
{
"epoch": 0.1,
"grad_norm": 1.9832010503747017,
"learning_rate": 9.88893344036662e-06,
"loss": 0.1113,
"step": 84
},
{
"epoch": 0.1,
"grad_norm": 2.6705177259808988,
"learning_rate": 9.885017319367253e-06,
"loss": 0.1181,
"step": 85
},
{
"epoch": 0.1,
"grad_norm": 1.6879356709331421,
"learning_rate": 9.88103415224759e-06,
"loss": 0.0843,
"step": 86
},
{
"epoch": 0.1,
"grad_norm": 1.3641501934361187,
"learning_rate": 9.87698399367599e-06,
"loss": 0.0985,
"step": 87
},
{
"epoch": 0.1,
"grad_norm": 2.0451464275509053,
"learning_rate": 9.872866899240265e-06,
"loss": 0.1057,
"step": 88
},
{
"epoch": 0.1,
"grad_norm": 2.2974724008225484,
"learning_rate": 9.86868292544691e-06,
"loss": 0.1166,
"step": 89
},
{
"epoch": 0.1,
"grad_norm": 2.1974043469355458,
"learning_rate": 9.864432129720332e-06,
"loss": 0.1007,
"step": 90
},
{
"epoch": 0.1,
"grad_norm": 1.5880779234771585,
"learning_rate": 9.860114570402055e-06,
"loss": 0.1059,
"step": 91
},
{
"epoch": 0.11,
"grad_norm": 1.6210019246715581,
"learning_rate": 9.855730306749924e-06,
"loss": 0.0955,
"step": 92
},
{
"epoch": 0.11,
"grad_norm": 2.053943766699023,
"learning_rate": 9.85127939893729e-06,
"loss": 0.1223,
"step": 93
},
{
"epoch": 0.11,
"grad_norm": 1.5230542823178097,
"learning_rate": 9.846761908052188e-06,
"loss": 0.0969,
"step": 94
},
{
"epoch": 0.11,
"grad_norm": 1.9898055550048976,
"learning_rate": 9.842177896096495e-06,
"loss": 0.1359,
"step": 95
},
{
"epoch": 0.11,
"grad_norm": 1.5219298537997954,
"learning_rate": 9.837527425985074e-06,
"loss": 0.1049,
"step": 96
},
{
"epoch": 0.11,
"grad_norm": 1.5694384883915047,
"learning_rate": 9.832810561544925e-06,
"loss": 0.0885,
"step": 97
},
{
"epoch": 0.11,
"grad_norm": 1.8382074625592275,
"learning_rate": 9.828027367514296e-06,
"loss": 0.0789,
"step": 98
},
{
"epoch": 0.11,
"grad_norm": 3.094156783497333,
"learning_rate": 9.823177909541795e-06,
"loss": 0.125,
"step": 99
},
{
"epoch": 0.11,
"grad_norm": 2.1177801350294474,
"learning_rate": 9.818262254185495e-06,
"loss": 0.1286,
"step": 100
},
{
"epoch": 0.12,
"grad_norm": 2.051759324600107,
"learning_rate": 9.813280468912024e-06,
"loss": 0.0867,
"step": 101
},
{
"epoch": 0.12,
"grad_norm": 2.309052314502647,
"learning_rate": 9.808232622095621e-06,
"loss": 0.0836,
"step": 102
},
{
"epoch": 0.12,
"grad_norm": 1.9691513403830945,
"learning_rate": 9.803118783017221e-06,
"loss": 0.1088,
"step": 103
},
{
"epoch": 0.12,
"grad_norm": 1.8816504929896276,
"learning_rate": 9.797939021863487e-06,
"loss": 0.1263,
"step": 104
},
{
"epoch": 0.12,
"grad_norm": 3.5814461055271196,
"learning_rate": 9.792693409725853e-06,
"loss": 0.1266,
"step": 105
},
{
"epoch": 0.12,
"grad_norm": 2.804380821700463,
"learning_rate": 9.78738201859955e-06,
"loss": 0.1548,
"step": 106
},
{
"epoch": 0.12,
"grad_norm": 2.0086077809415332,
"learning_rate": 9.782004921382612e-06,
"loss": 0.1312,
"step": 107
},
{
"epoch": 0.12,
"grad_norm": 1.7232244244365362,
"learning_rate": 9.776562191874884e-06,
"loss": 0.1197,
"step": 108
},
{
"epoch": 0.12,
"grad_norm": 1.3610013356293815,
"learning_rate": 9.771053904776998e-06,
"loss": 0.1139,
"step": 109
},
{
"epoch": 0.13,
"grad_norm": 3.0085151960644643,
"learning_rate": 9.765480135689357e-06,
"loss": 0.1049,
"step": 110
},
{
"epoch": 0.13,
"grad_norm": 2.94972805872663,
"learning_rate": 9.759840961111098e-06,
"loss": 0.0992,
"step": 111
},
{
"epoch": 0.13,
"grad_norm": 1.2797796597040898,
"learning_rate": 9.754136458439033e-06,
"loss": 0.0779,
"step": 112
},
{
"epoch": 0.13,
"grad_norm": 2.080098377998089,
"learning_rate": 9.748366705966595e-06,
"loss": 0.1192,
"step": 113
},
{
"epoch": 0.13,
"grad_norm": 3.026465124623598,
"learning_rate": 9.742531782882758e-06,
"loss": 0.1248,
"step": 114
},
{
"epoch": 0.13,
"grad_norm": 3.419083049509824,
"learning_rate": 9.736631769270958e-06,
"loss": 0.1284,
"step": 115
},
{
"epoch": 0.13,
"grad_norm": 1.9998224613269435,
"learning_rate": 9.730666746107982e-06,
"loss": 0.1106,
"step": 116
},
{
"epoch": 0.13,
"grad_norm": 1.7572924951035829,
"learning_rate": 9.724636795262868e-06,
"loss": 0.0874,
"step": 117
},
{
"epoch": 0.13,
"grad_norm": 2.448977492181066,
"learning_rate": 9.718541999495773e-06,
"loss": 0.0823,
"step": 118
},
{
"epoch": 0.14,
"grad_norm": 4.110748776288409,
"learning_rate": 9.712382442456845e-06,
"loss": 0.1604,
"step": 119
},
{
"epoch": 0.14,
"grad_norm": 2.09981079064145,
"learning_rate": 9.706158208685066e-06,
"loss": 0.0911,
"step": 120
},
{
"epoch": 0.14,
"grad_norm": 1.9370008400745906,
"learning_rate": 9.6998693836071e-06,
"loss": 0.1122,
"step": 121
},
{
"epoch": 0.14,
"grad_norm": 1.9316495646897083,
"learning_rate": 9.693516053536118e-06,
"loss": 0.0807,
"step": 122
},
{
"epoch": 0.14,
"grad_norm": 3.16334665341287,
"learning_rate": 9.687098305670606e-06,
"loss": 0.1276,
"step": 123
},
{
"epoch": 0.14,
"grad_norm": 2.341234757750837,
"learning_rate": 9.680616228093178e-06,
"loss": 0.1187,
"step": 124
},
{
"epoch": 0.14,
"grad_norm": 2.223298454459867,
"learning_rate": 9.674069909769365e-06,
"loss": 0.1045,
"step": 125
},
{
"epoch": 0.14,
"grad_norm": 3.4945449518796585,
"learning_rate": 9.66745944054639e-06,
"loss": 0.12,
"step": 126
},
{
"epoch": 0.15,
"grad_norm": 4.143984048508036,
"learning_rate": 9.66078491115194e-06,
"loss": 0.1013,
"step": 127
},
{
"epoch": 0.15,
"grad_norm": 2.7705806851123493,
"learning_rate": 9.654046413192916e-06,
"loss": 0.1123,
"step": 128
},
{
"epoch": 0.15,
"grad_norm": 1.2313407498327578,
"learning_rate": 9.647244039154178e-06,
"loss": 0.0846,
"step": 129
},
{
"epoch": 0.15,
"grad_norm": 1.7722835766792302,
"learning_rate": 9.640377882397276e-06,
"loss": 0.0965,
"step": 130
},
{
"epoch": 0.15,
"grad_norm": 3.4978175367401065,
"learning_rate": 9.633448037159167e-06,
"loss": 0.1218,
"step": 131
},
{
"epoch": 0.15,
"grad_norm": 3.9772864819418463,
"learning_rate": 9.626454598550927e-06,
"loss": 0.0919,
"step": 132
},
{
"epoch": 0.15,
"grad_norm": 1.9729154551618289,
"learning_rate": 9.619397662556434e-06,
"loss": 0.1104,
"step": 133
},
{
"epoch": 0.15,
"grad_norm": 1.9749670072044205,
"learning_rate": 9.612277326031065e-06,
"loss": 0.0879,
"step": 134
},
{
"epoch": 0.15,
"grad_norm": 2.46650550465165,
"learning_rate": 9.605093686700356e-06,
"loss": 0.1151,
"step": 135
},
{
"epoch": 0.16,
"grad_norm": 2.1700672021295397,
"learning_rate": 9.597846843158663e-06,
"loss": 0.1192,
"step": 136
},
{
"epoch": 0.16,
"grad_norm": 1.5484117589367528,
"learning_rate": 9.590536894867814e-06,
"loss": 0.1093,
"step": 137
},
{
"epoch": 0.16,
"grad_norm": 3.159315025435744,
"learning_rate": 9.58316394215574e-06,
"loss": 0.1246,
"step": 138
},
{
"epoch": 0.16,
"grad_norm": 2.3062290760264044,
"learning_rate": 9.575728086215093e-06,
"loss": 0.1044,
"step": 139
},
{
"epoch": 0.16,
"grad_norm": 2.1924424597130647,
"learning_rate": 9.568229429101867e-06,
"loss": 0.1018,
"step": 140
},
{
"epoch": 0.16,
"grad_norm": 1.4292889067167893,
"learning_rate": 9.560668073733993e-06,
"loss": 0.1102,
"step": 141
},
{
"epoch": 0.16,
"grad_norm": 1.8452890182403634,
"learning_rate": 9.553044123889922e-06,
"loss": 0.0851,
"step": 142
},
{
"epoch": 0.16,
"grad_norm": 1.686312012130693,
"learning_rate": 9.54535768420721e-06,
"loss": 0.0877,
"step": 143
},
{
"epoch": 0.16,
"grad_norm": 1.9544040435460412,
"learning_rate": 9.537608860181069e-06,
"loss": 0.1401,
"step": 144
},
{
"epoch": 0.17,
"grad_norm": 2.5638709576265777,
"learning_rate": 9.529797758162935e-06,
"loss": 0.1092,
"step": 145
},
{
"epoch": 0.17,
"grad_norm": 3.282994763913454,
"learning_rate": 9.521924485358993e-06,
"loss": 0.1153,
"step": 146
},
{
"epoch": 0.17,
"grad_norm": 2.5304046735735803,
"learning_rate": 9.513989149828718e-06,
"loss": 0.1249,
"step": 147
},
{
"epoch": 0.17,
"grad_norm": 2.084105660790946,
"learning_rate": 9.505991860483384e-06,
"loss": 0.1127,
"step": 148
},
{
"epoch": 0.17,
"grad_norm": 2.0807266175742334,
"learning_rate": 9.497932727084571e-06,
"loss": 0.0923,
"step": 149
},
{
"epoch": 0.17,
"grad_norm": 2.535491149619429,
"learning_rate": 9.489811860242658e-06,
"loss": 0.1171,
"step": 150
},
{
"epoch": 0.17,
"grad_norm": 2.8577061917206694,
"learning_rate": 9.481629371415315e-06,
"loss": 0.1027,
"step": 151
},
{
"epoch": 0.17,
"grad_norm": 2.366848987135643,
"learning_rate": 9.47338537290595e-06,
"loss": 0.1461,
"step": 152
},
{
"epoch": 0.17,
"grad_norm": 1.370601085821976,
"learning_rate": 9.465079977862193e-06,
"loss": 0.1176,
"step": 153
},
{
"epoch": 0.18,
"grad_norm": 2.418308466667954,
"learning_rate": 9.45671330027433e-06,
"loss": 0.14,
"step": 154
},
{
"epoch": 0.18,
"grad_norm": 2.3789531359503155,
"learning_rate": 9.448285454973739e-06,
"loss": 0.1272,
"step": 155
},
{
"epoch": 0.18,
"grad_norm": 1.837535532086006,
"learning_rate": 9.439796557631311e-06,
"loss": 0.1018,
"step": 156
},
{
"epoch": 0.18,
"grad_norm": 2.26111781111186,
"learning_rate": 9.431246724755879e-06,
"loss": 0.1089,
"step": 157
},
{
"epoch": 0.18,
"grad_norm": 2.149991918092674,
"learning_rate": 9.422636073692595e-06,
"loss": 0.1224,
"step": 158
},
{
"epoch": 0.18,
"grad_norm": 1.7257130483628491,
"learning_rate": 9.413964722621339e-06,
"loss": 0.0829,
"step": 159
},
{
"epoch": 0.18,
"grad_norm": 1.8121675919179852,
"learning_rate": 9.405232790555083e-06,
"loss": 0.1082,
"step": 160
},
{
"epoch": 0.18,
"grad_norm": 1.1269467977377563,
"learning_rate": 9.396440397338273e-06,
"loss": 0.0735,
"step": 161
},
{
"epoch": 0.19,
"grad_norm": 2.66841905851172,
"learning_rate": 9.387587663645163e-06,
"loss": 0.1171,
"step": 162
},
{
"epoch": 0.19,
"grad_norm": 1.9979957509268915,
"learning_rate": 9.378674710978185e-06,
"loss": 0.0867,
"step": 163
},
{
"epoch": 0.19,
"grad_norm": 1.3467231541181564,
"learning_rate": 9.369701661666255e-06,
"loss": 0.084,
"step": 164
},
{
"epoch": 0.19,
"grad_norm": 1.5661741867693126,
"learning_rate": 9.36066863886311e-06,
"loss": 0.0879,
"step": 165
},
{
"epoch": 0.19,
"grad_norm": 1.4781065347920526,
"learning_rate": 9.35157576654562e-06,
"loss": 0.081,
"step": 166
},
{
"epoch": 0.19,
"grad_norm": 1.747486649844575,
"learning_rate": 9.342423169512072e-06,
"loss": 0.0998,
"step": 167
},
{
"epoch": 0.19,
"grad_norm": 2.183425315531194,
"learning_rate": 9.333210973380469e-06,
"loss": 0.113,
"step": 168
},
{
"epoch": 0.19,
"grad_norm": 2.6187200801222126,
"learning_rate": 9.323939304586806e-06,
"loss": 0.1211,
"step": 169
},
{
"epoch": 0.19,
"grad_norm": 2.298579486613351,
"learning_rate": 9.314608290383324e-06,
"loss": 0.098,
"step": 170
},
{
"epoch": 0.2,
"grad_norm": 2.3760059355851286,
"learning_rate": 9.305218058836778e-06,
"loss": 0.1072,
"step": 171
},
{
"epoch": 0.2,
"grad_norm": 2.7212461615360892,
"learning_rate": 9.295768738826668e-06,
"loss": 0.1128,
"step": 172
},
{
"epoch": 0.2,
"grad_norm": 1.7379020422811915,
"learning_rate": 9.286260460043475e-06,
"loss": 0.125,
"step": 173
},
{
"epoch": 0.2,
"grad_norm": 3.1792312593675724,
"learning_rate": 9.27669335298688e-06,
"loss": 0.124,
"step": 174
},
{
"epoch": 0.2,
"grad_norm": 2.0277111530618916,
"learning_rate": 9.267067548963975e-06,
"loss": 0.0942,
"step": 175
},
{
"epoch": 0.2,
"grad_norm": 1.4469039936621195,
"learning_rate": 9.257383180087454e-06,
"loss": 0.0909,
"step": 176
},
{
"epoch": 0.2,
"grad_norm": 2.051541742743329,
"learning_rate": 9.24764037927381e-06,
"loss": 0.1199,
"step": 177
},
{
"epoch": 0.2,
"grad_norm": 1.4918383860547757,
"learning_rate": 9.237839280241504e-06,
"loss": 0.0948,
"step": 178
},
{
"epoch": 0.2,
"grad_norm": 1.64559721928272,
"learning_rate": 9.22798001750913e-06,
"loss": 0.0926,
"step": 179
},
{
"epoch": 0.21,
"grad_norm": 2.7118844975893306,
"learning_rate": 9.218062726393572e-06,
"loss": 0.1006,
"step": 180
},
{
"epoch": 0.21,
"grad_norm": 1.9102131867075969,
"learning_rate": 9.208087543008142e-06,
"loss": 0.0971,
"step": 181
},
{
"epoch": 0.21,
"grad_norm": 1.1482279190562423,
"learning_rate": 9.198054604260714e-06,
"loss": 0.0856,
"step": 182
},
{
"epoch": 0.21,
"grad_norm": 0.8541863535744467,
"learning_rate": 9.187964047851851e-06,
"loss": 0.0744,
"step": 183
},
{
"epoch": 0.21,
"grad_norm": 1.6754271851342437,
"learning_rate": 9.177816012272904e-06,
"loss": 0.1079,
"step": 184
},
{
"epoch": 0.21,
"grad_norm": 7.473478229947605,
"learning_rate": 9.16761063680412e-06,
"loss": 0.0957,
"step": 185
},
{
"epoch": 0.21,
"grad_norm": 3.0632873310708706,
"learning_rate": 9.157348061512728e-06,
"loss": 0.1259,
"step": 186
},
{
"epoch": 0.21,
"grad_norm": 2.173245370297517,
"learning_rate": 9.14702842725101e-06,
"loss": 0.1064,
"step": 187
},
{
"epoch": 0.21,
"grad_norm": 2.5577392058893906,
"learning_rate": 9.136651875654381e-06,
"loss": 0.1018,
"step": 188
},
{
"epoch": 0.22,
"grad_norm": 2.4914753007974073,
"learning_rate": 9.126218549139434e-06,
"loss": 0.0766,
"step": 189
},
{
"epoch": 0.22,
"grad_norm": 2.607548007532628,
"learning_rate": 9.115728590901988e-06,
"loss": 0.1145,
"step": 190
},
{
"epoch": 0.22,
"grad_norm": 2.3652955179863704,
"learning_rate": 9.10518214491513e-06,
"loss": 0.1075,
"step": 191
},
{
"epoch": 0.22,
"grad_norm": 1.944852075113964,
"learning_rate": 9.094579355927224e-06,
"loss": 0.1237,
"step": 192
},
{
"epoch": 0.22,
"grad_norm": 2.531422123532763,
"learning_rate": 9.083920369459941e-06,
"loss": 0.0996,
"step": 193
},
{
"epoch": 0.22,
"grad_norm": 1.6840091229079706,
"learning_rate": 9.073205331806248e-06,
"loss": 0.0978,
"step": 194
},
{
"epoch": 0.22,
"grad_norm": 1.1180165313908608,
"learning_rate": 9.062434390028407e-06,
"loss": 0.1071,
"step": 195
},
{
"epoch": 0.22,
"grad_norm": 1.3064591316530667,
"learning_rate": 9.05160769195596e-06,
"loss": 0.0989,
"step": 196
},
{
"epoch": 0.23,
"grad_norm": 2.467504354977469,
"learning_rate": 9.040725386183691e-06,
"loss": 0.1042,
"step": 197
},
{
"epoch": 0.23,
"grad_norm": 2.050946499964558,
"learning_rate": 9.029787622069594e-06,
"loss": 0.0975,
"step": 198
},
{
"epoch": 0.23,
"grad_norm": 1.4499865119155375,
"learning_rate": 9.018794549732819e-06,
"loss": 0.085,
"step": 199
},
{
"epoch": 0.23,
"grad_norm": 1.3533643803726048,
"learning_rate": 9.007746320051612e-06,
"loss": 0.1035,
"step": 200
},
{
"epoch": 0.23,
"grad_norm": 1.6026335880678133,
"learning_rate": 8.996643084661245e-06,
"loss": 0.1027,
"step": 201
},
{
"epoch": 0.23,
"grad_norm": 1.9397686652721016,
"learning_rate": 8.985484995951941e-06,
"loss": 0.1186,
"step": 202
},
{
"epoch": 0.23,
"grad_norm": 1.985748151579965,
"learning_rate": 8.974272207066767e-06,
"loss": 0.1106,
"step": 203
},
{
"epoch": 0.23,
"grad_norm": 3.0494865642540487,
"learning_rate": 8.96300487189955e-06,
"loss": 0.1064,
"step": 204
},
{
"epoch": 0.23,
"grad_norm": 1.5961078519634195,
"learning_rate": 8.951683145092749e-06,
"loss": 0.107,
"step": 205
},
{
"epoch": 0.24,
"grad_norm": 1.1110307819462288,
"learning_rate": 8.940307182035348e-06,
"loss": 0.0572,
"step": 206
},
{
"epoch": 0.24,
"grad_norm": 1.5666032654284987,
"learning_rate": 8.928877138860708e-06,
"loss": 0.0855,
"step": 207
},
{
"epoch": 0.24,
"grad_norm": 2.5628141099754145,
"learning_rate": 8.917393172444435e-06,
"loss": 0.1166,
"step": 208
},
{
"epoch": 0.24,
"grad_norm": 2.1688262602344186,
"learning_rate": 8.905855440402225e-06,
"loss": 0.1282,
"step": 209
},
{
"epoch": 0.24,
"grad_norm": 1.9861109578632914,
"learning_rate": 8.894264101087692e-06,
"loss": 0.114,
"step": 210
},
{
"epoch": 0.24,
"grad_norm": 2.501921488076984,
"learning_rate": 8.882619313590212e-06,
"loss": 0.1097,
"step": 211
},
{
"epoch": 0.24,
"grad_norm": 2.268674705211975,
"learning_rate": 8.870921237732722e-06,
"loss": 0.1071,
"step": 212
},
{
"epoch": 0.24,
"grad_norm": 2.067986488761189,
"learning_rate": 8.859170034069533e-06,
"loss": 0.078,
"step": 213
},
{
"epoch": 0.24,
"grad_norm": 1.848889166285347,
"learning_rate": 8.847365863884131e-06,
"loss": 0.0939,
"step": 214
},
{
"epoch": 0.25,
"grad_norm": 1.633820214871211,
"learning_rate": 8.835508889186957e-06,
"loss": 0.0668,
"step": 215
},
{
"epoch": 0.25,
"grad_norm": 1.762882965258614,
"learning_rate": 8.823599272713188e-06,
"loss": 0.1218,
"step": 216
},
{
"epoch": 0.25,
"grad_norm": 1.320523619911371,
"learning_rate": 8.8116371779205e-06,
"loss": 0.0862,
"step": 217
},
{
"epoch": 0.25,
"grad_norm": 2.2237720260978944,
"learning_rate": 8.799622768986826e-06,
"loss": 0.0818,
"step": 218
},
{
"epoch": 0.25,
"grad_norm": 2.4364710210798948,
"learning_rate": 8.787556210808101e-06,
"loss": 0.1199,
"step": 219
},
{
"epoch": 0.25,
"grad_norm": 1.6849135689019348,
"learning_rate": 8.775437668996004e-06,
"loss": 0.1075,
"step": 220
},
{
"epoch": 0.25,
"grad_norm": 2.220372344984879,
"learning_rate": 8.76326730987568e-06,
"loss": 0.0939,
"step": 221
},
{
"epoch": 0.25,
"grad_norm": 1.5555946268423595,
"learning_rate": 8.751045300483457e-06,
"loss": 0.0936,
"step": 222
},
{
"epoch": 0.25,
"grad_norm": 2.1402335410022744,
"learning_rate": 8.738771808564555e-06,
"loss": 0.0934,
"step": 223
},
{
"epoch": 0.26,
"grad_norm": 1.0488112883529606,
"learning_rate": 8.726447002570791e-06,
"loss": 0.0984,
"step": 224
},
{
"epoch": 0.26,
"grad_norm": 1.6002022312116384,
"learning_rate": 8.714071051658247e-06,
"loss": 0.1182,
"step": 225
},
{
"epoch": 0.26,
"grad_norm": 1.1565321002590416,
"learning_rate": 8.701644125684972e-06,
"loss": 0.0936,
"step": 226
},
{
"epoch": 0.26,
"grad_norm": 2.185324721324382,
"learning_rate": 8.689166395208638e-06,
"loss": 0.083,
"step": 227
},
{
"epoch": 0.26,
"grad_norm": 2.030700715820133,
"learning_rate": 8.676638031484196e-06,
"loss": 0.117,
"step": 228
},
{
"epoch": 0.26,
"grad_norm": 1.984880186535839,
"learning_rate": 8.664059206461537e-06,
"loss": 0.1018,
"step": 229
},
{
"epoch": 0.26,
"grad_norm": 1.5970035045867854,
"learning_rate": 8.65143009278312e-06,
"loss": 0.0949,
"step": 230
},
{
"epoch": 0.26,
"grad_norm": 1.1816220314739803,
"learning_rate": 8.638750863781614e-06,
"loss": 0.0813,
"step": 231
},
{
"epoch": 0.27,
"grad_norm": 1.1265530666409853,
"learning_rate": 8.626021693477506e-06,
"loss": 0.0807,
"step": 232
},
{
"epoch": 0.27,
"grad_norm": 1.4798060764319763,
"learning_rate": 8.613242756576729e-06,
"loss": 0.1005,
"step": 233
},
{
"epoch": 0.27,
"grad_norm": 2.2654194933943224,
"learning_rate": 8.600414228468245e-06,
"loss": 0.1188,
"step": 234
},
{
"epoch": 0.27,
"grad_norm": 2.069029364915602,
"learning_rate": 8.587536285221656e-06,
"loss": 0.1174,
"step": 235
},
{
"epoch": 0.27,
"grad_norm": 1.575346902419333,
"learning_rate": 8.574609103584776e-06,
"loss": 0.1085,
"step": 236
},
{
"epoch": 0.27,
"grad_norm": 1.4098282704868057,
"learning_rate": 8.561632860981205e-06,
"loss": 0.0866,
"step": 237
},
{
"epoch": 0.27,
"grad_norm": 1.322192828067451,
"learning_rate": 8.548607735507903e-06,
"loss": 0.0989,
"step": 238
},
{
"epoch": 0.27,
"grad_norm": 2.503973593981987,
"learning_rate": 8.535533905932739e-06,
"loss": 0.0959,
"step": 239
},
{
"epoch": 0.27,
"grad_norm": 1.9979798111827445,
"learning_rate": 8.522411551692034e-06,
"loss": 0.1173,
"step": 240
},
{
"epoch": 0.28,
"grad_norm": 2.331120139565699,
"learning_rate": 8.509240852888106e-06,
"loss": 0.1207,
"step": 241
},
{
"epoch": 0.28,
"grad_norm": 2.6853298265250545,
"learning_rate": 8.496021990286799e-06,
"loss": 0.1062,
"step": 242
},
{
"epoch": 0.28,
"grad_norm": 1.9768348877886996,
"learning_rate": 8.482755145314987e-06,
"loss": 0.1072,
"step": 243
},
{
"epoch": 0.28,
"grad_norm": 2.6831710659010493,
"learning_rate": 8.469440500058104e-06,
"loss": 0.1415,
"step": 244
},
{
"epoch": 0.28,
"grad_norm": 1.3075146767802872,
"learning_rate": 8.456078237257632e-06,
"loss": 0.0927,
"step": 245
},
{
"epoch": 0.28,
"grad_norm": 1.6603378900652728,
"learning_rate": 8.442668540308597e-06,
"loss": 0.1199,
"step": 246
},
{
"epoch": 0.28,
"grad_norm": 1.732311993009954,
"learning_rate": 8.429211593257054e-06,
"loss": 0.1162,
"step": 247
},
{
"epoch": 0.28,
"grad_norm": 1.0678226132557302,
"learning_rate": 8.415707580797552e-06,
"loss": 0.0811,
"step": 248
},
{
"epoch": 0.28,
"grad_norm": 1.637868487774632,
"learning_rate": 8.402156688270613e-06,
"loss": 0.1134,
"step": 249
},
{
"epoch": 0.29,
"grad_norm": 2.133244302298436,
"learning_rate": 8.388559101660177e-06,
"loss": 0.0926,
"step": 250
},
{
"epoch": 0.29,
"grad_norm": 2.0183612078901554,
"learning_rate": 8.374915007591053e-06,
"loss": 0.1111,
"step": 251
},
{
"epoch": 0.29,
"grad_norm": 1.9309545129494814,
"learning_rate": 8.361224593326359e-06,
"loss": 0.0935,
"step": 252
},
{
"epoch": 0.29,
"grad_norm": 1.9872264464717466,
"learning_rate": 8.347488046764948e-06,
"loss": 0.0943,
"step": 253
},
{
"epoch": 0.29,
"grad_norm": 3.421297893280941,
"learning_rate": 8.33370555643884e-06,
"loss": 0.1065,
"step": 254
},
{
"epoch": 0.29,
"grad_norm": 2.897377603677577,
"learning_rate": 8.319877311510614e-06,
"loss": 0.1002,
"step": 255
},
{
"epoch": 0.29,
"grad_norm": 2.3046015963660937,
"learning_rate": 8.30600350177083e-06,
"loss": 0.0855,
"step": 256
},
{
"epoch": 0.29,
"grad_norm": 2.124569836934495,
"learning_rate": 8.29208431763542e-06,
"loss": 0.1393,
"step": 257
},
{
"epoch": 0.29,
"grad_norm": 1.1578644869843313,
"learning_rate": 8.278119950143074e-06,
"loss": 0.0727,
"step": 258
},
{
"epoch": 0.3,
"grad_norm": 1.854683458939737,
"learning_rate": 8.264110590952609e-06,
"loss": 0.0943,
"step": 259
},
{
"epoch": 0.3,
"grad_norm": 1.4832089308281162,
"learning_rate": 8.250056432340352e-06,
"loss": 0.0874,
"step": 260
},
{
"epoch": 0.3,
"grad_norm": 1.258489452559218,
"learning_rate": 8.235957667197494e-06,
"loss": 0.0813,
"step": 261
},
{
"epoch": 0.3,
"grad_norm": 1.8439098458745395,
"learning_rate": 8.221814489027447e-06,
"loss": 0.1109,
"step": 262
},
{
"epoch": 0.3,
"grad_norm": 2.044799826590439,
"learning_rate": 8.207627091943178e-06,
"loss": 0.1031,
"step": 263
},
{
"epoch": 0.3,
"grad_norm": 1.0827682370362863,
"learning_rate": 8.193395670664555e-06,
"loss": 0.1009,
"step": 264
},
{
"epoch": 0.3,
"grad_norm": 2.323956787231295,
"learning_rate": 8.179120420515675e-06,
"loss": 0.124,
"step": 265
},
{
"epoch": 0.3,
"grad_norm": 10.023030336667428,
"learning_rate": 8.164801537422179e-06,
"loss": 0.0942,
"step": 266
},
{
"epoch": 0.31,
"grad_norm": 1.7106775924129292,
"learning_rate": 8.150439217908557e-06,
"loss": 0.1294,
"step": 267
},
{
"epoch": 0.31,
"grad_norm": 2.496815910955381,
"learning_rate": 8.136033659095462e-06,
"loss": 0.1046,
"step": 268
},
{
"epoch": 0.31,
"grad_norm": 1.646786991911152,
"learning_rate": 8.121585058697e-06,
"loss": 0.0861,
"step": 269
},
{
"epoch": 0.31,
"grad_norm": 1.8266200161202253,
"learning_rate": 8.107093615018017e-06,
"loss": 0.0949,
"step": 270
},
{
"epoch": 0.31,
"grad_norm": 2.5916169021461823,
"learning_rate": 8.092559526951374e-06,
"loss": 0.1169,
"step": 271
},
{
"epoch": 0.31,
"grad_norm": 1.3232237535990277,
"learning_rate": 8.077982993975222e-06,
"loss": 0.0972,
"step": 272
},
{
"epoch": 0.31,
"grad_norm": 1.5105056997202508,
"learning_rate": 8.063364216150258e-06,
"loss": 0.0838,
"step": 273
},
{
"epoch": 0.31,
"grad_norm": 1.422000688554186,
"learning_rate": 8.04870339411699e-06,
"loss": 0.0906,
"step": 274
},
{
"epoch": 0.31,
"grad_norm": 1.6597229035037346,
"learning_rate": 8.034000729092967e-06,
"loss": 0.1005,
"step": 275
},
{
"epoch": 0.32,
"grad_norm": 2.0117580401941204,
"learning_rate": 8.019256422870036e-06,
"loss": 0.1114,
"step": 276
},
{
"epoch": 0.32,
"grad_norm": 1.1092429217293147,
"learning_rate": 8.00447067781156e-06,
"loss": 0.0879,
"step": 277
},
{
"epoch": 0.32,
"grad_norm": 0.7979548894678264,
"learning_rate": 7.98964369684964e-06,
"loss": 0.0802,
"step": 278
},
{
"epoch": 0.32,
"grad_norm": 4.002883666778827,
"learning_rate": 7.974775683482337e-06,
"loss": 0.1201,
"step": 279
},
{
"epoch": 0.32,
"grad_norm": 2.9158794261385665,
"learning_rate": 7.959866841770875e-06,
"loss": 0.0928,
"step": 280
},
{
"epoch": 0.32,
"grad_norm": 1.8564270859008971,
"learning_rate": 7.94491737633684e-06,
"loss": 0.1002,
"step": 281
},
{
"epoch": 0.32,
"grad_norm": 0.9683845320254458,
"learning_rate": 7.929927492359377e-06,
"loss": 0.076,
"step": 282
},
{
"epoch": 0.32,
"grad_norm": 1.4081959120743777,
"learning_rate": 7.914897395572362e-06,
"loss": 0.1062,
"step": 283
},
{
"epoch": 0.32,
"grad_norm": 2.5215551080759475,
"learning_rate": 7.899827292261589e-06,
"loss": 0.1165,
"step": 284
},
{
"epoch": 0.33,
"grad_norm": 1.2748703118190907,
"learning_rate": 7.884717389261935e-06,
"loss": 0.0815,
"step": 285
},
{
"epoch": 0.33,
"grad_norm": 1.7476333672006885,
"learning_rate": 7.869567893954523e-06,
"loss": 0.1036,
"step": 286
},
{
"epoch": 0.33,
"grad_norm": 2.0867624034561008,
"learning_rate": 7.854379014263877e-06,
"loss": 0.0975,
"step": 287
},
{
"epoch": 0.33,
"grad_norm": 1.505523226821333,
"learning_rate": 7.839150958655058e-06,
"loss": 0.0909,
"step": 288
},
{
"epoch": 0.33,
"grad_norm": 1.1729694098449017,
"learning_rate": 7.823883936130817e-06,
"loss": 0.0973,
"step": 289
},
{
"epoch": 0.33,
"grad_norm": 2.8384149448507516,
"learning_rate": 7.808578156228718e-06,
"loss": 0.0897,
"step": 290
},
{
"epoch": 0.33,
"grad_norm": 1.62588050433828,
"learning_rate": 7.793233829018263e-06,
"loss": 0.0743,
"step": 291
},
{
"epoch": 0.33,
"grad_norm": 1.3669908142605618,
"learning_rate": 7.777851165098012e-06,
"loss": 0.0905,
"step": 292
},
{
"epoch": 0.33,
"grad_norm": 2.808747017133476,
"learning_rate": 7.762430375592689e-06,
"loss": 0.1093,
"step": 293
},
{
"epoch": 0.34,
"grad_norm": 1.6645286286403973,
"learning_rate": 7.746971672150286e-06,
"loss": 0.0904,
"step": 294
},
{
"epoch": 0.34,
"grad_norm": 1.5994325863369407,
"learning_rate": 7.731475266939159e-06,
"loss": 0.1028,
"step": 295
},
{
"epoch": 0.34,
"grad_norm": 1.5901303888596037,
"learning_rate": 7.715941372645115e-06,
"loss": 0.0956,
"step": 296
},
{
"epoch": 0.34,
"grad_norm": 2.328737208260307,
"learning_rate": 7.70037020246849e-06,
"loss": 0.1105,
"step": 297
},
{
"epoch": 0.34,
"grad_norm": 1.9563743002247425,
"learning_rate": 7.68476197012123e-06,
"loss": 0.1004,
"step": 298
},
{
"epoch": 0.34,
"grad_norm": 2.0781455779725273,
"learning_rate": 7.669116889823955e-06,
"loss": 0.0996,
"step": 299
},
{
"epoch": 0.34,
"grad_norm": 0.9436403770848232,
"learning_rate": 7.653435176303007e-06,
"loss": 0.0761,
"step": 300
},
{
"epoch": 0.34,
"grad_norm": 1.6406510415551603,
"learning_rate": 7.637717044787527e-06,
"loss": 0.1147,
"step": 301
},
{
"epoch": 0.35,
"grad_norm": 1.7565088827574487,
"learning_rate": 7.621962711006479e-06,
"loss": 0.1064,
"step": 302
},
{
"epoch": 0.35,
"grad_norm": 1.744343584962407,
"learning_rate": 7.6061723911857e-06,
"loss": 0.1237,
"step": 303
},
{
"epoch": 0.35,
"grad_norm": 1.8449164911800016,
"learning_rate": 7.59034630204493e-06,
"loss": 0.0866,
"step": 304
},
{
"epoch": 0.35,
"grad_norm": 1.950623541879017,
"learning_rate": 7.574484660794836e-06,
"loss": 0.0933,
"step": 305
},
{
"epoch": 0.35,
"grad_norm": 2.1686948556536465,
"learning_rate": 7.558587685134035e-06,
"loss": 0.1025,
"step": 306
},
{
"epoch": 0.35,
"grad_norm": 1.7581227603244038,
"learning_rate": 7.542655593246103e-06,
"loss": 0.0875,
"step": 307
},
{
"epoch": 0.35,
"grad_norm": 2.067970224647301,
"learning_rate": 7.526688603796578e-06,
"loss": 0.0984,
"step": 308
},
{
"epoch": 0.35,
"grad_norm": 1.589283294901552,
"learning_rate": 7.510686935929963e-06,
"loss": 0.086,
"step": 309
},
{
"epoch": 0.35,
"grad_norm": 2.0302397726328767,
"learning_rate": 7.494650809266719e-06,
"loss": 0.099,
"step": 310
},
{
"epoch": 0.36,
"grad_norm": 1.5989723290365598,
"learning_rate": 7.478580443900247e-06,
"loss": 0.0939,
"step": 311
},
{
"epoch": 0.36,
"grad_norm": 1.7617760156145226,
"learning_rate": 7.4624760603938704e-06,
"loss": 0.1073,
"step": 312
},
{
"epoch": 0.36,
"grad_norm": 2.399035341306831,
"learning_rate": 7.446337879777803e-06,
"loss": 0.0875,
"step": 313
},
{
"epoch": 0.36,
"grad_norm": 1.7044340741166755,
"learning_rate": 7.430166123546122e-06,
"loss": 0.1059,
"step": 314
},
{
"epoch": 0.36,
"grad_norm": 2.0742510793830444,
"learning_rate": 7.413961013653725e-06,
"loss": 0.121,
"step": 315
},
{
"epoch": 0.36,
"grad_norm": 1.3710310648899238,
"learning_rate": 7.397722772513282e-06,
"loss": 0.099,
"step": 316
},
{
"epoch": 0.36,
"grad_norm": 1.4093925149964166,
"learning_rate": 7.381451622992184e-06,
"loss": 0.0809,
"step": 317
},
{
"epoch": 0.36,
"grad_norm": 1.0871938974349964,
"learning_rate": 7.3651477884094855e-06,
"loss": 0.0873,
"step": 318
},
{
"epoch": 0.36,
"grad_norm": 2.6023982808593678,
"learning_rate": 7.34881149253284e-06,
"loss": 0.0827,
"step": 319
},
{
"epoch": 0.37,
"grad_norm": 2.467883768091289,
"learning_rate": 7.332442959575424e-06,
"loss": 0.1005,
"step": 320
},
{
"epoch": 0.37,
"grad_norm": 1.1212388969162672,
"learning_rate": 7.316042414192866e-06,
"loss": 0.0914,
"step": 321
},
{
"epoch": 0.37,
"grad_norm": 1.8358020056483744,
"learning_rate": 7.299610081480155e-06,
"loss": 0.1146,
"step": 322
},
{
"epoch": 0.37,
"grad_norm": 1.3636541396641189,
"learning_rate": 7.283146186968566e-06,
"loss": 0.1085,
"step": 323
},
{
"epoch": 0.37,
"grad_norm": 1.07569146070376,
"learning_rate": 7.266650956622546e-06,
"loss": 0.089,
"step": 324
},
{
"epoch": 0.37,
"grad_norm": 1.8768958532173836,
"learning_rate": 7.2501246168366235e-06,
"loss": 0.096,
"step": 325
},
{
"epoch": 0.37,
"grad_norm": 1.8347653173611815,
"learning_rate": 7.233567394432302e-06,
"loss": 0.1067,
"step": 326
},
{
"epoch": 0.37,
"grad_norm": 1.202389845113578,
"learning_rate": 7.216979516654944e-06,
"loss": 0.0754,
"step": 327
},
{
"epoch": 0.37,
"grad_norm": 1.0247024117157115,
"learning_rate": 7.200361211170648e-06,
"loss": 0.0958,
"step": 328
},
{
"epoch": 0.38,
"grad_norm": 0.9758438803344642,
"learning_rate": 7.183712706063133e-06,
"loss": 0.073,
"step": 329
},
{
"epoch": 0.38,
"grad_norm": 1.0927982452541785,
"learning_rate": 7.1670342298306e-06,
"loss": 0.0891,
"step": 330
},
{
"epoch": 0.38,
"grad_norm": 1.3702951110134494,
"learning_rate": 7.1503260113826035e-06,
"loss": 0.102,
"step": 331
},
{
"epoch": 0.38,
"grad_norm": 1.6194293721517825,
"learning_rate": 7.1335882800369e-06,
"loss": 0.0877,
"step": 332
},
{
"epoch": 0.38,
"grad_norm": 1.3330024442939468,
"learning_rate": 7.116821265516307e-06,
"loss": 0.1038,
"step": 333
},
{
"epoch": 0.38,
"grad_norm": 1.5082089283588793,
"learning_rate": 7.100025197945555e-06,
"loss": 0.0903,
"step": 334
},
{
"epoch": 0.38,
"grad_norm": 0.918479171265335,
"learning_rate": 7.083200307848116e-06,
"loss": 0.0782,
"step": 335
},
{
"epoch": 0.38,
"grad_norm": 1.270870459177953,
"learning_rate": 7.066346826143049e-06,
"loss": 0.078,
"step": 336
},
{
"epoch": 0.39,
"grad_norm": 2.808167569407084,
"learning_rate": 7.049464984141831e-06,
"loss": 0.1034,
"step": 337
},
{
"epoch": 0.39,
"grad_norm": 1.4850123445239072,
"learning_rate": 7.032555013545176e-06,
"loss": 0.0987,
"step": 338
},
{
"epoch": 0.39,
"grad_norm": 1.0857390354428968,
"learning_rate": 7.015617146439863e-06,
"loss": 0.0846,
"step": 339
},
{
"epoch": 0.39,
"grad_norm": 1.4019142617304745,
"learning_rate": 6.998651615295541e-06,
"loss": 0.0946,
"step": 340
},
{
"epoch": 0.39,
"grad_norm": 1.4419350588759805,
"learning_rate": 6.9816586529615475e-06,
"loss": 0.0925,
"step": 341
},
{
"epoch": 0.39,
"grad_norm": 2.122469313937723,
"learning_rate": 6.9646384926637076e-06,
"loss": 0.101,
"step": 342
},
{
"epoch": 0.39,
"grad_norm": 2.023493438132179,
"learning_rate": 6.947591368001138e-06,
"loss": 0.1049,
"step": 343
},
{
"epoch": 0.39,
"grad_norm": 1.8085392355883325,
"learning_rate": 6.930517512943029e-06,
"loss": 0.0906,
"step": 344
},
{
"epoch": 0.39,
"grad_norm": 1.8476500146983619,
"learning_rate": 6.913417161825449e-06,
"loss": 0.075,
"step": 345
},
{
"epoch": 0.4,
"grad_norm": 1.3169871494739973,
"learning_rate": 6.896290549348119e-06,
"loss": 0.0999,
"step": 346
},
{
"epoch": 0.4,
"grad_norm": 1.5793887545549115,
"learning_rate": 6.879137910571191e-06,
"loss": 0.0951,
"step": 347
},
{
"epoch": 0.4,
"grad_norm": 1.8170746345841167,
"learning_rate": 6.861959480912027e-06,
"loss": 0.0872,
"step": 348
},
{
"epoch": 0.4,
"grad_norm": 1.9489463937624985,
"learning_rate": 6.8447554961419615e-06,
"loss": 0.1034,
"step": 349
},
{
"epoch": 0.4,
"grad_norm": 2.1832874089959704,
"learning_rate": 6.82752619238307e-06,
"loss": 0.1158,
"step": 350
},
{
"epoch": 0.4,
"grad_norm": 1.3585943935815048,
"learning_rate": 6.810271806104931e-06,
"loss": 0.0801,
"step": 351
},
{
"epoch": 0.4,
"grad_norm": 4.249666680648082,
"learning_rate": 6.792992574121374e-06,
"loss": 0.097,
"step": 352
},
{
"epoch": 0.4,
"grad_norm": 2.597130012884428,
"learning_rate": 6.7756887335872275e-06,
"loss": 0.1166,
"step": 353
},
{
"epoch": 0.4,
"grad_norm": 1.4936397575141562,
"learning_rate": 6.758360521995079e-06,
"loss": 0.1212,
"step": 354
},
{
"epoch": 0.41,
"grad_norm": 1.171091229180981,
"learning_rate": 6.741008177171995e-06,
"loss": 0.0908,
"step": 355
},
{
"epoch": 0.41,
"grad_norm": 0.9253783941058572,
"learning_rate": 6.723631937276271e-06,
"loss": 0.0715,
"step": 356
},
{
"epoch": 0.41,
"grad_norm": 1.1914008404922645,
"learning_rate": 6.706232040794162e-06,
"loss": 0.0753,
"step": 357
},
{
"epoch": 0.41,
"grad_norm": 1.6494093340509537,
"learning_rate": 6.6888087265365995e-06,
"loss": 0.1066,
"step": 358
},
{
"epoch": 0.41,
"grad_norm": 1.1812819049319463,
"learning_rate": 6.671362233635926e-06,
"loss": 0.0927,
"step": 359
},
{
"epoch": 0.41,
"grad_norm": 2.4559536958240527,
"learning_rate": 6.653892801542604e-06,
"loss": 0.0928,
"step": 360
},
{
"epoch": 0.41,
"grad_norm": 1.25389273102752,
"learning_rate": 6.636400670021934e-06,
"loss": 0.0999,
"step": 361
},
{
"epoch": 0.41,
"grad_norm": 1.7179424428138383,
"learning_rate": 6.618886079150764e-06,
"loss": 0.0921,
"step": 362
},
{
"epoch": 0.41,
"grad_norm": 1.135878355877357,
"learning_rate": 6.601349269314188e-06,
"loss": 0.082,
"step": 363
},
{
"epoch": 0.42,
"grad_norm": 1.362482025104223,
"learning_rate": 6.583790481202261e-06,
"loss": 0.0999,
"step": 364
},
{
"epoch": 0.42,
"grad_norm": 1.9432112152221168,
"learning_rate": 6.566209955806679e-06,
"loss": 0.1061,
"step": 365
},
{
"epoch": 0.42,
"grad_norm": 1.6175027458716444,
"learning_rate": 6.54860793441748e-06,
"loss": 0.1199,
"step": 366
},
{
"epoch": 0.42,
"grad_norm": 1.7771666465783587,
"learning_rate": 6.530984658619735e-06,
"loss": 0.1045,
"step": 367
},
{
"epoch": 0.42,
"grad_norm": 1.4987987433398124,
"learning_rate": 6.5133403702902245e-06,
"loss": 0.1011,
"step": 368
},
{
"epoch": 0.42,
"grad_norm": 2.024080574349673,
"learning_rate": 6.495675311594123e-06,
"loss": 0.0962,
"step": 369
},
{
"epoch": 0.42,
"grad_norm": 1.233698410707572,
"learning_rate": 6.477989724981679e-06,
"loss": 0.102,
"step": 370
},
{
"epoch": 0.42,
"grad_norm": 1.860410897978535,
"learning_rate": 6.46028385318488e-06,
"loss": 0.0882,
"step": 371
},
{
"epoch": 0.43,
"grad_norm": 1.056573638183693,
"learning_rate": 6.442557939214125e-06,
"loss": 0.0768,
"step": 372
},
{
"epoch": 0.43,
"grad_norm": 1.8634932101277937,
"learning_rate": 6.42481222635489e-06,
"loss": 0.1102,
"step": 373
},
{
"epoch": 0.43,
"grad_norm": 1.624917843958734,
"learning_rate": 6.407046958164387e-06,
"loss": 0.1014,
"step": 374
},
{
"epoch": 0.43,
"grad_norm": 1.3310298175257878,
"learning_rate": 6.389262378468219e-06,
"loss": 0.0828,
"step": 375
},
{
"epoch": 0.43,
"grad_norm": 1.0614121621946018,
"learning_rate": 6.371458731357047e-06,
"loss": 0.0987,
"step": 376
},
{
"epoch": 0.43,
"grad_norm": 2.7534907444752217,
"learning_rate": 6.353636261183214e-06,
"loss": 0.0884,
"step": 377
},
{
"epoch": 0.43,
"grad_norm": 1.3293066218185532,
"learning_rate": 6.335795212557416e-06,
"loss": 0.1089,
"step": 378
},
{
"epoch": 0.43,
"grad_norm": 1.8227195971889414,
"learning_rate": 6.3179358303453386e-06,
"loss": 0.1146,
"step": 379
},
{
"epoch": 0.43,
"grad_norm": 1.8879414641081136,
"learning_rate": 6.300058359664285e-06,
"loss": 0.0771,
"step": 380
},
{
"epoch": 0.44,
"grad_norm": 2.0221109980415646,
"learning_rate": 6.2821630458798236e-06,
"loss": 0.113,
"step": 381
},
{
"epoch": 0.44,
"grad_norm": 1.9690496853348498,
"learning_rate": 6.264250134602415e-06,
"loss": 0.1076,
"step": 382
},
{
"epoch": 0.44,
"grad_norm": 1.3353353790171576,
"learning_rate": 6.246319871684048e-06,
"loss": 0.0983,
"step": 383
},
{
"epoch": 0.44,
"grad_norm": 0.8840122811422151,
"learning_rate": 6.228372503214853e-06,
"loss": 0.0784,
"step": 384
},
{
"epoch": 0.44,
"grad_norm": 1.9223687815447972,
"learning_rate": 6.210408275519735e-06,
"loss": 0.1164,
"step": 385
},
{
"epoch": 0.44,
"grad_norm": 1.3646046422183316,
"learning_rate": 6.192427435154991e-06,
"loss": 0.1062,
"step": 386
},
{
"epoch": 0.44,
"grad_norm": 1.3010904867560287,
"learning_rate": 6.17443022890492e-06,
"loss": 0.0733,
"step": 387
},
{
"epoch": 0.44,
"grad_norm": 0.9588966018297974,
"learning_rate": 6.156416903778443e-06,
"loss": 0.0937,
"step": 388
},
{
"epoch": 0.44,
"grad_norm": 0.8176889337157417,
"learning_rate": 6.138387707005711e-06,
"loss": 0.0785,
"step": 389
},
{
"epoch": 0.45,
"grad_norm": 1.3798947208822552,
"learning_rate": 6.1203428860347085e-06,
"loss": 0.1029,
"step": 390
},
{
"epoch": 0.45,
"grad_norm": 1.2627266387294376,
"learning_rate": 6.10228268852786e-06,
"loss": 0.0853,
"step": 391
},
{
"epoch": 0.45,
"grad_norm": 1.3636634430274013,
"learning_rate": 6.084207362358635e-06,
"loss": 0.1015,
"step": 392
},
{
"epoch": 0.45,
"grad_norm": 1.7109591987525221,
"learning_rate": 6.066117155608136e-06,
"loss": 0.1118,
"step": 393
},
{
"epoch": 0.45,
"grad_norm": 1.517961417989287,
"learning_rate": 6.048012316561699e-06,
"loss": 0.0852,
"step": 394
},
{
"epoch": 0.45,
"grad_norm": 2.342629048761873,
"learning_rate": 6.029893093705492e-06,
"loss": 0.1187,
"step": 395
},
{
"epoch": 0.45,
"grad_norm": 0.9294142148345402,
"learning_rate": 6.011759735723096e-06,
"loss": 0.0864,
"step": 396
},
{
"epoch": 0.45,
"grad_norm": 1.1950075847793311,
"learning_rate": 5.993612491492088e-06,
"loss": 0.092,
"step": 397
},
{
"epoch": 0.45,
"grad_norm": 1.3551545691072298,
"learning_rate": 5.975451610080643e-06,
"loss": 0.0693,
"step": 398
},
{
"epoch": 0.46,
"grad_norm": 1.216161129705157,
"learning_rate": 5.957277340744094e-06,
"loss": 0.0895,
"step": 399
},
{
"epoch": 0.46,
"grad_norm": 1.4076300298899016,
"learning_rate": 5.939089932921527e-06,
"loss": 0.085,
"step": 400
},
{
"epoch": 0.46,
"grad_norm": 1.465518309640694,
"learning_rate": 5.920889636232352e-06,
"loss": 0.0957,
"step": 401
},
{
"epoch": 0.46,
"grad_norm": 1.3444282299556731,
"learning_rate": 5.902676700472874e-06,
"loss": 0.1023,
"step": 402
},
{
"epoch": 0.46,
"grad_norm": 2.172246680213261,
"learning_rate": 5.884451375612865e-06,
"loss": 0.0859,
"step": 403
},
{
"epoch": 0.46,
"grad_norm": 1.0096049350842804,
"learning_rate": 5.8662139117921435e-06,
"loss": 0.0952,
"step": 404
},
{
"epoch": 0.46,
"grad_norm": 1.3924332642607742,
"learning_rate": 5.847964559317129e-06,
"loss": 0.075,
"step": 405
},
{
"epoch": 0.46,
"grad_norm": 1.555222595194896,
"learning_rate": 5.829703568657406e-06,
"loss": 0.1194,
"step": 406
},
{
"epoch": 0.47,
"grad_norm": 1.814069025220444,
"learning_rate": 5.8114311904423e-06,
"loss": 0.1114,
"step": 407
},
{
"epoch": 0.47,
"grad_norm": 2.63589934143905,
"learning_rate": 5.793147675457425e-06,
"loss": 0.0997,
"step": 408
},
{
"epoch": 0.47,
"grad_norm": 1.672874605967912,
"learning_rate": 5.774853274641243e-06,
"loss": 0.0936,
"step": 409
},
{
"epoch": 0.47,
"grad_norm": 1.293398379507954,
"learning_rate": 5.756548239081626e-06,
"loss": 0.0936,
"step": 410
},
{
"epoch": 0.47,
"grad_norm": 1.3509632172781578,
"learning_rate": 5.738232820012407e-06,
"loss": 0.1014,
"step": 411
},
{
"epoch": 0.47,
"grad_norm": 1.4424133826280734,
"learning_rate": 5.719907268809927e-06,
"loss": 0.0942,
"step": 412
},
{
"epoch": 0.47,
"grad_norm": 1.6011921811511678,
"learning_rate": 5.701571836989591e-06,
"loss": 0.0839,
"step": 413
},
{
"epoch": 0.47,
"grad_norm": 2.1072468499647248,
"learning_rate": 5.683226776202416e-06,
"loss": 0.1096,
"step": 414
},
{
"epoch": 0.47,
"grad_norm": 1.3764568994495563,
"learning_rate": 5.664872338231572e-06,
"loss": 0.1001,
"step": 415
},
{
"epoch": 0.48,
"grad_norm": 2.53101947882973,
"learning_rate": 5.64650877498893e-06,
"loss": 0.0885,
"step": 416
},
{
"epoch": 0.48,
"grad_norm": 1.9958749325869098,
"learning_rate": 5.628136338511607e-06,
"loss": 0.1067,
"step": 417
},
{
"epoch": 0.48,
"grad_norm": 1.1068155120562846,
"learning_rate": 5.6097552809584996e-06,
"loss": 0.0862,
"step": 418
},
{
"epoch": 0.48,
"grad_norm": 1.625671593043311,
"learning_rate": 5.591365854606829e-06,
"loss": 0.0954,
"step": 419
},
{
"epoch": 0.48,
"grad_norm": 0.8652769478201479,
"learning_rate": 5.5729683118486785e-06,
"loss": 0.0848,
"step": 420
},
{
"epoch": 0.48,
"grad_norm": 2.0731466858910217,
"learning_rate": 5.554562905187527e-06,
"loss": 0.089,
"step": 421
},
{
"epoch": 0.48,
"grad_norm": 1.6660858543441386,
"learning_rate": 5.536149887234781e-06,
"loss": 0.0982,
"step": 422
},
{
"epoch": 0.48,
"grad_norm": 1.552222550169945,
"learning_rate": 5.517729510706316e-06,
"loss": 0.1154,
"step": 423
},
{
"epoch": 0.48,
"grad_norm": 1.3368802229091306,
"learning_rate": 5.499302028418998e-06,
"loss": 0.0976,
"step": 424
},
{
"epoch": 0.49,
"grad_norm": 2.181126697333057,
"learning_rate": 5.480867693287224e-06,
"loss": 0.1062,
"step": 425
},
{
"epoch": 0.49,
"grad_norm": 1.9205645782423137,
"learning_rate": 5.462426758319439e-06,
"loss": 0.0911,
"step": 426
},
{
"epoch": 0.49,
"grad_norm": 1.9632409923514775,
"learning_rate": 5.443979476614674e-06,
"loss": 0.1095,
"step": 427
},
{
"epoch": 0.49,
"grad_norm": 1.1362611723204445,
"learning_rate": 5.425526101359068e-06,
"loss": 0.0777,
"step": 428
},
{
"epoch": 0.49,
"grad_norm": 1.3238503828238188,
"learning_rate": 5.407066885822391e-06,
"loss": 0.0824,
"step": 429
},
{
"epoch": 0.49,
"grad_norm": 1.5813044674729215,
"learning_rate": 5.388602083354572e-06,
"loss": 0.1091,
"step": 430
},
{
"epoch": 0.49,
"grad_norm": 1.2656866672687868,
"learning_rate": 5.370131947382215e-06,
"loss": 0.1061,
"step": 431
},
{
"epoch": 0.49,
"grad_norm": 1.3312297948060596,
"learning_rate": 5.351656731405132e-06,
"loss": 0.0823,
"step": 432
},
{
"epoch": 0.49,
"grad_norm": 1.4157388997754832,
"learning_rate": 5.333176688992856e-06,
"loss": 0.0792,
"step": 433
},
{
"epoch": 0.5,
"grad_norm": 1.4094464298882508,
"learning_rate": 5.314692073781157e-06,
"loss": 0.101,
"step": 434
},
{
"epoch": 0.5,
"grad_norm": 1.4099498232807148,
"learning_rate": 5.296203139468572e-06,
"loss": 0.0865,
"step": 435
},
{
"epoch": 0.5,
"grad_norm": 1.8502414411111636,
"learning_rate": 5.2777101398129135e-06,
"loss": 0.0889,
"step": 436
},
{
"epoch": 0.5,
"grad_norm": 1.7560344587273293,
"learning_rate": 5.2592133286277925e-06,
"loss": 0.0916,
"step": 437
},
{
"epoch": 0.5,
"grad_norm": 1.6710267518378974,
"learning_rate": 5.2407129597791295e-06,
"loss": 0.1047,
"step": 438
},
{
"epoch": 0.5,
"grad_norm": 1.2201280272734387,
"learning_rate": 5.222209287181677e-06,
"loss": 0.0891,
"step": 439
},
{
"epoch": 0.5,
"grad_norm": 1.4358152308072225,
"learning_rate": 5.203702564795528e-06,
"loss": 0.0802,
"step": 440
},
{
"epoch": 0.5,
"grad_norm": 0.8914507091018575,
"learning_rate": 5.185193046622636e-06,
"loss": 0.0782,
"step": 441
},
{
"epoch": 0.51,
"grad_norm": 1.6165198764693776,
"learning_rate": 5.166680986703323e-06,
"loss": 0.1246,
"step": 442
},
{
"epoch": 0.51,
"grad_norm": 1.4298748482799117,
"learning_rate": 5.148166639112799e-06,
"loss": 0.105,
"step": 443
},
{
"epoch": 0.51,
"grad_norm": 1.535559818853895,
"learning_rate": 5.129650257957671e-06,
"loss": 0.1119,
"step": 444
},
{
"epoch": 0.51,
"grad_norm": 1.9197842071730942,
"learning_rate": 5.111132097372459e-06,
"loss": 0.1033,
"step": 445
},
{
"epoch": 0.51,
"grad_norm": 1.1599006332138146,
"learning_rate": 5.0926124115160976e-06,
"loss": 0.0896,
"step": 446
},
{
"epoch": 0.51,
"grad_norm": 1.3974819169152706,
"learning_rate": 5.074091454568464e-06,
"loss": 0.0969,
"step": 447
},
{
"epoch": 0.51,
"grad_norm": 0.9408016749032659,
"learning_rate": 5.055569480726876e-06,
"loss": 0.0895,
"step": 448
},
{
"epoch": 0.51,
"grad_norm": 1.9952244551309546,
"learning_rate": 5.0370467442026115e-06,
"loss": 0.0871,
"step": 449
},
{
"epoch": 0.51,
"grad_norm": 1.36586105745886,
"learning_rate": 5.0185234992174114e-06,
"loss": 0.0844,
"step": 450
},
{
"epoch": 0.52,
"grad_norm": 1.7479502067142012,
"learning_rate": 5e-06,
"loss": 0.1044,
"step": 451
},
{
"epoch": 0.52,
"grad_norm": 1.3573048964331627,
"learning_rate": 4.981476500782591e-06,
"loss": 0.0948,
"step": 452
},
{
"epoch": 0.52,
"grad_norm": 1.473812452772602,
"learning_rate": 4.96295325579739e-06,
"loss": 0.0797,
"step": 453
},
{
"epoch": 0.52,
"grad_norm": 2.0296959916896,
"learning_rate": 4.944430519273126e-06,
"loss": 0.0895,
"step": 454
},
{
"epoch": 0.52,
"grad_norm": 1.2103951372488675,
"learning_rate": 4.925908545431537e-06,
"loss": 0.0915,
"step": 455
},
{
"epoch": 0.52,
"grad_norm": 1.434571480202637,
"learning_rate": 4.907387588483903e-06,
"loss": 0.0962,
"step": 456
},
{
"epoch": 0.52,
"grad_norm": 1.2049199119312812,
"learning_rate": 4.8888679026275436e-06,
"loss": 0.0909,
"step": 457
},
{
"epoch": 0.52,
"grad_norm": 1.7639545815530557,
"learning_rate": 4.870349742042329e-06,
"loss": 0.1117,
"step": 458
},
{
"epoch": 0.52,
"grad_norm": 1.1494030416261993,
"learning_rate": 4.8518333608872015e-06,
"loss": 0.0752,
"step": 459
},
{
"epoch": 0.53,
"grad_norm": 1.6445773991618207,
"learning_rate": 4.8333190132966794e-06,
"loss": 0.0861,
"step": 460
},
{
"epoch": 0.53,
"grad_norm": 1.6414731186074902,
"learning_rate": 4.814806953377366e-06,
"loss": 0.0829,
"step": 461
},
{
"epoch": 0.53,
"grad_norm": 1.8968387997428107,
"learning_rate": 4.796297435204473e-06,
"loss": 0.0887,
"step": 462
},
{
"epoch": 0.53,
"grad_norm": 2.25439526816161,
"learning_rate": 4.777790712818324e-06,
"loss": 0.075,
"step": 463
},
{
"epoch": 0.53,
"grad_norm": 1.1014332021036057,
"learning_rate": 4.759287040220872e-06,
"loss": 0.0719,
"step": 464
},
{
"epoch": 0.53,
"grad_norm": 1.2580923566775155,
"learning_rate": 4.740786671372209e-06,
"loss": 0.0955,
"step": 465
},
{
"epoch": 0.53,
"grad_norm": 1.7123882846797445,
"learning_rate": 4.7222898601870864e-06,
"loss": 0.0935,
"step": 466
},
{
"epoch": 0.53,
"grad_norm": 1.178774620445043,
"learning_rate": 4.703796860531429e-06,
"loss": 0.0987,
"step": 467
},
{
"epoch": 0.53,
"grad_norm": 1.6539278144919904,
"learning_rate": 4.6853079262188445e-06,
"loss": 0.1124,
"step": 468
},
{
"epoch": 0.54,
"grad_norm": 1.0935968959871103,
"learning_rate": 4.666823311007145e-06,
"loss": 0.0871,
"step": 469
},
{
"epoch": 0.54,
"grad_norm": 1.3166304048687854,
"learning_rate": 4.648343268594869e-06,
"loss": 0.0749,
"step": 470
},
{
"epoch": 0.54,
"grad_norm": 1.8561652305018204,
"learning_rate": 4.629868052617786e-06,
"loss": 0.0847,
"step": 471
},
{
"epoch": 0.54,
"grad_norm": 1.414637359104844,
"learning_rate": 4.61139791664543e-06,
"loss": 0.0887,
"step": 472
},
{
"epoch": 0.54,
"grad_norm": 1.2510852736375488,
"learning_rate": 4.59293311417761e-06,
"loss": 0.0713,
"step": 473
},
{
"epoch": 0.54,
"grad_norm": 1.404573593610514,
"learning_rate": 4.574473898640933e-06,
"loss": 0.0863,
"step": 474
},
{
"epoch": 0.54,
"grad_norm": 1.1678998858209273,
"learning_rate": 4.556020523385326e-06,
"loss": 0.1042,
"step": 475
},
{
"epoch": 0.54,
"grad_norm": 1.4292796447931189,
"learning_rate": 4.537573241680563e-06,
"loss": 0.0898,
"step": 476
},
{
"epoch": 0.55,
"grad_norm": 1.8305004593870045,
"learning_rate": 4.519132306712778e-06,
"loss": 0.0883,
"step": 477
},
{
"epoch": 0.55,
"grad_norm": 1.706985080173823,
"learning_rate": 4.500697971581003e-06,
"loss": 0.0628,
"step": 478
},
{
"epoch": 0.55,
"grad_norm": 1.5783162958105084,
"learning_rate": 4.482270489293685e-06,
"loss": 0.0899,
"step": 479
},
{
"epoch": 0.55,
"grad_norm": 1.2211595747222304,
"learning_rate": 4.463850112765221e-06,
"loss": 0.0754,
"step": 480
},
{
"epoch": 0.55,
"grad_norm": 1.995983190967539,
"learning_rate": 4.445437094812476e-06,
"loss": 0.0887,
"step": 481
},
{
"epoch": 0.55,
"grad_norm": 1.428898296054049,
"learning_rate": 4.4270316881513215e-06,
"loss": 0.1133,
"step": 482
},
{
"epoch": 0.55,
"grad_norm": 1.4225799563091075,
"learning_rate": 4.408634145393172e-06,
"loss": 0.1184,
"step": 483
},
{
"epoch": 0.55,
"grad_norm": 1.3918275141803165,
"learning_rate": 4.390244719041502e-06,
"loss": 0.0648,
"step": 484
},
{
"epoch": 0.55,
"grad_norm": 1.367999116075585,
"learning_rate": 4.371863661488394e-06,
"loss": 0.0853,
"step": 485
},
{
"epoch": 0.56,
"grad_norm": 1.7079182185930346,
"learning_rate": 4.353491225011071e-06,
"loss": 0.0949,
"step": 486
},
{
"epoch": 0.56,
"grad_norm": 1.5010142904615251,
"learning_rate": 4.335127661768429e-06,
"loss": 0.0641,
"step": 487
},
{
"epoch": 0.56,
"grad_norm": 1.7284173282169324,
"learning_rate": 4.316773223797585e-06,
"loss": 0.0703,
"step": 488
},
{
"epoch": 0.56,
"grad_norm": 1.5621635512490386,
"learning_rate": 4.298428163010411e-06,
"loss": 0.0869,
"step": 489
},
{
"epoch": 0.56,
"grad_norm": 1.422341370982355,
"learning_rate": 4.280092731190075e-06,
"loss": 0.0799,
"step": 490
},
{
"epoch": 0.56,
"grad_norm": 1.7501446138446721,
"learning_rate": 4.261767179987595e-06,
"loss": 0.0897,
"step": 491
},
{
"epoch": 0.56,
"grad_norm": 1.2772834345032038,
"learning_rate": 4.243451760918376e-06,
"loss": 0.1065,
"step": 492
},
{
"epoch": 0.56,
"grad_norm": 0.7832782732498063,
"learning_rate": 4.225146725358759e-06,
"loss": 0.0806,
"step": 493
},
{
"epoch": 0.56,
"grad_norm": 0.9590851368962775,
"learning_rate": 4.206852324542578e-06,
"loss": 0.0834,
"step": 494
},
{
"epoch": 0.57,
"grad_norm": 1.5291845240945432,
"learning_rate": 4.188568809557701e-06,
"loss": 0.0935,
"step": 495
},
{
"epoch": 0.57,
"grad_norm": 1.774265127813176,
"learning_rate": 4.170296431342595e-06,
"loss": 0.0776,
"step": 496
},
{
"epoch": 0.57,
"grad_norm": 1.6751404930422793,
"learning_rate": 4.152035440682874e-06,
"loss": 0.1102,
"step": 497
},
{
"epoch": 0.57,
"grad_norm": 1.1017368315986533,
"learning_rate": 4.1337860882078564e-06,
"loss": 0.0842,
"step": 498
},
{
"epoch": 0.57,
"grad_norm": 1.0095647614965828,
"learning_rate": 4.115548624387136e-06,
"loss": 0.1033,
"step": 499
},
{
"epoch": 0.57,
"grad_norm": 1.1850618393493437,
"learning_rate": 4.097323299527129e-06,
"loss": 0.0935,
"step": 500
},
{
"epoch": 0.57,
"grad_norm": 1.6016802129348722,
"learning_rate": 4.079110363767649e-06,
"loss": 0.0957,
"step": 501
},
{
"epoch": 0.57,
"grad_norm": 1.7614328270624333,
"learning_rate": 4.060910067078475e-06,
"loss": 0.0932,
"step": 502
},
{
"epoch": 0.57,
"grad_norm": 1.1784018017105278,
"learning_rate": 4.042722659255907e-06,
"loss": 0.0947,
"step": 503
},
{
"epoch": 0.58,
"grad_norm": 0.9536882155687711,
"learning_rate": 4.02454838991936e-06,
"loss": 0.0864,
"step": 504
},
{
"epoch": 0.58,
"grad_norm": 1.3412379988333587,
"learning_rate": 4.006387508507914e-06,
"loss": 0.0815,
"step": 505
},
{
"epoch": 0.58,
"grad_norm": 0.9937123395901131,
"learning_rate": 3.988240264276906e-06,
"loss": 0.0837,
"step": 506
},
{
"epoch": 0.58,
"grad_norm": 1.1118836347253336,
"learning_rate": 3.970106906294509e-06,
"loss": 0.1172,
"step": 507
},
{
"epoch": 0.58,
"grad_norm": 1.2375186671145577,
"learning_rate": 3.951987683438301e-06,
"loss": 0.0799,
"step": 508
},
{
"epoch": 0.58,
"grad_norm": 1.9232301509219243,
"learning_rate": 3.933882844391866e-06,
"loss": 0.1145,
"step": 509
},
{
"epoch": 0.58,
"grad_norm": 1.5256990396302608,
"learning_rate": 3.9157926376413676e-06,
"loss": 0.1008,
"step": 510
},
{
"epoch": 0.58,
"grad_norm": 1.737700461488523,
"learning_rate": 3.897717311472141e-06,
"loss": 0.1239,
"step": 511
},
{
"epoch": 0.59,
"grad_norm": 1.36273898878739,
"learning_rate": 3.879657113965294e-06,
"loss": 0.0945,
"step": 512
},
{
"epoch": 0.59,
"grad_norm": 1.0718267007797795,
"learning_rate": 3.861612292994293e-06,
"loss": 0.0899,
"step": 513
},
{
"epoch": 0.59,
"grad_norm": 1.6888992492780912,
"learning_rate": 3.843583096221559e-06,
"loss": 0.0994,
"step": 514
},
{
"epoch": 0.59,
"grad_norm": 1.0582879934749914,
"learning_rate": 3.825569771095082e-06,
"loss": 0.1048,
"step": 515
},
{
"epoch": 0.59,
"grad_norm": 0.9288548490683142,
"learning_rate": 3.80757256484501e-06,
"loss": 0.0913,
"step": 516
},
{
"epoch": 0.59,
"grad_norm": 1.488206708121423,
"learning_rate": 3.789591724480266e-06,
"loss": 0.092,
"step": 517
},
{
"epoch": 0.59,
"grad_norm": 1.396811130524263,
"learning_rate": 3.7716274967851485e-06,
"loss": 0.0923,
"step": 518
},
{
"epoch": 0.59,
"grad_norm": 1.1865029532635987,
"learning_rate": 3.7536801283159523e-06,
"loss": 0.1021,
"step": 519
},
{
"epoch": 0.59,
"grad_norm": 1.1926573230493913,
"learning_rate": 3.7357498653975854e-06,
"loss": 0.1041,
"step": 520
},
{
"epoch": 0.6,
"grad_norm": 2.7369114629342763,
"learning_rate": 3.7178369541201785e-06,
"loss": 0.098,
"step": 521
},
{
"epoch": 0.6,
"grad_norm": 1.3911362868676322,
"learning_rate": 3.6999416403357158e-06,
"loss": 0.0876,
"step": 522
},
{
"epoch": 0.6,
"grad_norm": 1.3865651156084833,
"learning_rate": 3.682064169654663e-06,
"loss": 0.0948,
"step": 523
},
{
"epoch": 0.6,
"grad_norm": 1.5188940499590486,
"learning_rate": 3.6642047874425833e-06,
"loss": 0.1049,
"step": 524
},
{
"epoch": 0.6,
"grad_norm": 1.8578901647350845,
"learning_rate": 3.646363738816788e-06,
"loss": 0.1042,
"step": 525
},
{
"epoch": 0.6,
"grad_norm": 0.9682155137321418,
"learning_rate": 3.6285412686429567e-06,
"loss": 0.0926,
"step": 526
},
{
"epoch": 0.6,
"grad_norm": 1.5329919136198829,
"learning_rate": 3.6107376215317813e-06,
"loss": 0.0918,
"step": 527
},
{
"epoch": 0.6,
"grad_norm": 1.3283757375600345,
"learning_rate": 3.592953041835615e-06,
"loss": 0.0979,
"step": 528
},
{
"epoch": 0.6,
"grad_norm": 1.7426984409997923,
"learning_rate": 3.5751877736451123e-06,
"loss": 0.0857,
"step": 529
},
{
"epoch": 0.61,
"grad_norm": 1.8343173094126104,
"learning_rate": 3.557442060785876e-06,
"loss": 0.1074,
"step": 530
},
{
"epoch": 0.61,
"grad_norm": 1.3960147690089932,
"learning_rate": 3.539716146815122e-06,
"loss": 0.1034,
"step": 531
},
{
"epoch": 0.61,
"grad_norm": 1.952266082891725,
"learning_rate": 3.5220102750183218e-06,
"loss": 0.0934,
"step": 532
},
{
"epoch": 0.61,
"grad_norm": 1.1119714305902637,
"learning_rate": 3.504324688405878e-06,
"loss": 0.0911,
"step": 533
},
{
"epoch": 0.61,
"grad_norm": 1.273976446680227,
"learning_rate": 3.4866596297097776e-06,
"loss": 0.0886,
"step": 534
},
{
"epoch": 0.61,
"grad_norm": 1.0318179315692617,
"learning_rate": 3.469015341380266e-06,
"loss": 0.0809,
"step": 535
},
{
"epoch": 0.61,
"grad_norm": 1.0847335063342978,
"learning_rate": 3.451392065582521e-06,
"loss": 0.0707,
"step": 536
},
{
"epoch": 0.61,
"grad_norm": 1.319209798209738,
"learning_rate": 3.433790044193323e-06,
"loss": 0.0874,
"step": 537
},
{
"epoch": 0.61,
"grad_norm": 1.2541175082857459,
"learning_rate": 3.4162095187977395e-06,
"loss": 0.0764,
"step": 538
},
{
"epoch": 0.62,
"grad_norm": 1.0523487335558295,
"learning_rate": 3.398650730685813e-06,
"loss": 0.0864,
"step": 539
},
{
"epoch": 0.62,
"grad_norm": 1.7334821654184849,
"learning_rate": 3.3811139208492384e-06,
"loss": 0.0868,
"step": 540
},
{
"epoch": 0.62,
"grad_norm": 1.5858715871111615,
"learning_rate": 3.3635993299780667e-06,
"loss": 0.0993,
"step": 541
},
{
"epoch": 0.62,
"grad_norm": 2.295933466624895,
"learning_rate": 3.3461071984573983e-06,
"loss": 0.1178,
"step": 542
},
{
"epoch": 0.62,
"grad_norm": 1.5080133595230978,
"learning_rate": 3.3286377663640753e-06,
"loss": 0.1269,
"step": 543
},
{
"epoch": 0.62,
"grad_norm": 1.7043750113015401,
"learning_rate": 3.311191273463401e-06,
"loss": 0.0982,
"step": 544
},
{
"epoch": 0.62,
"grad_norm": 1.3460953104218514,
"learning_rate": 3.29376795920584e-06,
"loss": 0.0918,
"step": 545
},
{
"epoch": 0.62,
"grad_norm": 1.424318713178504,
"learning_rate": 3.276368062723729e-06,
"loss": 0.0935,
"step": 546
},
{
"epoch": 0.63,
"grad_norm": 1.6851360859318019,
"learning_rate": 3.258991822828007e-06,
"loss": 0.0879,
"step": 547
},
{
"epoch": 0.63,
"grad_norm": 1.036620519893431,
"learning_rate": 3.241639478004922e-06,
"loss": 0.0961,
"step": 548
},
{
"epoch": 0.63,
"grad_norm": 1.320247516635966,
"learning_rate": 3.224311266412773e-06,
"loss": 0.0901,
"step": 549
},
{
"epoch": 0.63,
"grad_norm": 1.3883416513760776,
"learning_rate": 3.2070074258786287e-06,
"loss": 0.0819,
"step": 550
},
{
"epoch": 0.63,
"grad_norm": 1.1731384518668249,
"learning_rate": 3.1897281938950693e-06,
"loss": 0.0688,
"step": 551
},
{
"epoch": 0.63,
"grad_norm": 1.4524331811565157,
"learning_rate": 3.1724738076169314e-06,
"loss": 0.102,
"step": 552
},
{
"epoch": 0.63,
"grad_norm": 1.3664712580781437,
"learning_rate": 3.1552445038580415e-06,
"loss": 0.1001,
"step": 553
},
{
"epoch": 0.63,
"grad_norm": 1.9253928104639182,
"learning_rate": 3.138040519087975e-06,
"loss": 0.0782,
"step": 554
},
{
"epoch": 0.63,
"grad_norm": 1.7784718748270076,
"learning_rate": 3.1208620894288105e-06,
"loss": 0.1103,
"step": 555
},
{
"epoch": 0.64,
"grad_norm": 1.9332032343565215,
"learning_rate": 3.1037094506518806e-06,
"loss": 0.1249,
"step": 556
},
{
"epoch": 0.64,
"grad_norm": 1.2481471142608356,
"learning_rate": 3.0865828381745515e-06,
"loss": 0.0929,
"step": 557
},
{
"epoch": 0.64,
"grad_norm": 1.7804865693694558,
"learning_rate": 3.069482487056974e-06,
"loss": 0.1004,
"step": 558
},
{
"epoch": 0.64,
"grad_norm": 0.6441870832230248,
"learning_rate": 3.0524086319988635e-06,
"loss": 0.0635,
"step": 559
},
{
"epoch": 0.64,
"grad_norm": 1.4112352863599653,
"learning_rate": 3.0353615073362937e-06,
"loss": 0.0852,
"step": 560
},
{
"epoch": 0.64,
"grad_norm": 2.679818456248062,
"learning_rate": 3.0183413470384537e-06,
"loss": 0.1063,
"step": 561
},
{
"epoch": 0.64,
"grad_norm": 1.0605912128021044,
"learning_rate": 3.001348384704461e-06,
"loss": 0.0903,
"step": 562
},
{
"epoch": 0.64,
"grad_norm": 1.6370195416483042,
"learning_rate": 2.98438285356014e-06,
"loss": 0.0886,
"step": 563
},
{
"epoch": 0.64,
"grad_norm": 1.2563461066733568,
"learning_rate": 2.967444986454825e-06,
"loss": 0.0855,
"step": 564
},
{
"epoch": 0.65,
"grad_norm": 1.3138148878284275,
"learning_rate": 2.95053501585817e-06,
"loss": 0.0954,
"step": 565
},
{
"epoch": 0.65,
"grad_norm": 1.102694770789112,
"learning_rate": 2.933653173856953e-06,
"loss": 0.0772,
"step": 566
},
{
"epoch": 0.65,
"grad_norm": 1.0199400816615287,
"learning_rate": 2.9167996921518848e-06,
"loss": 0.0925,
"step": 567
},
{
"epoch": 0.65,
"grad_norm": 0.8547500043718359,
"learning_rate": 2.899974802054446e-06,
"loss": 0.0944,
"step": 568
},
{
"epoch": 0.65,
"grad_norm": 1.3931067354085072,
"learning_rate": 2.8831787344836926e-06,
"loss": 0.0993,
"step": 569
},
{
"epoch": 0.65,
"grad_norm": 2.2303435090125667,
"learning_rate": 2.8664117199631023e-06,
"loss": 0.1219,
"step": 570
},
{
"epoch": 0.65,
"grad_norm": 0.9215955759772718,
"learning_rate": 2.8496739886173994e-06,
"loss": 0.0861,
"step": 571
},
{
"epoch": 0.65,
"grad_norm": 0.6413872734862562,
"learning_rate": 2.8329657701694015e-06,
"loss": 0.0893,
"step": 572
},
{
"epoch": 0.65,
"grad_norm": 1.193727034493147,
"learning_rate": 2.816287293936868e-06,
"loss": 0.0795,
"step": 573
},
{
"epoch": 0.66,
"grad_norm": 1.2289155652249402,
"learning_rate": 2.799638788829354e-06,
"loss": 0.0737,
"step": 574
},
{
"epoch": 0.66,
"grad_norm": 0.830345925001274,
"learning_rate": 2.7830204833450577e-06,
"loss": 0.0831,
"step": 575
},
{
"epoch": 0.66,
"grad_norm": 1.068546819825466,
"learning_rate": 2.766432605567699e-06,
"loss": 0.0668,
"step": 576
},
{
"epoch": 0.66,
"grad_norm": 0.9453172607728504,
"learning_rate": 2.7498753831633773e-06,
"loss": 0.0901,
"step": 577
},
{
"epoch": 0.66,
"grad_norm": 2.586371621963873,
"learning_rate": 2.733349043377457e-06,
"loss": 0.1053,
"step": 578
},
{
"epoch": 0.66,
"grad_norm": 1.5289078110540708,
"learning_rate": 2.716853813031435e-06,
"loss": 0.1089,
"step": 579
},
{
"epoch": 0.66,
"grad_norm": 1.633488144896007,
"learning_rate": 2.7003899185198445e-06,
"loss": 0.1071,
"step": 580
},
{
"epoch": 0.66,
"grad_norm": 1.4665200970556465,
"learning_rate": 2.6839575858071367e-06,
"loss": 0.0867,
"step": 581
},
{
"epoch": 0.67,
"grad_norm": 1.141767875960879,
"learning_rate": 2.6675570404245783e-06,
"loss": 0.0814,
"step": 582
},
{
"epoch": 0.67,
"grad_norm": 1.5201605416934159,
"learning_rate": 2.651188507467161e-06,
"loss": 0.0977,
"step": 583
},
{
"epoch": 0.67,
"grad_norm": 1.4871770110745552,
"learning_rate": 2.634852211590516e-06,
"loss": 0.0781,
"step": 584
},
{
"epoch": 0.67,
"grad_norm": 1.3369457391490027,
"learning_rate": 2.6185483770078173e-06,
"loss": 0.0733,
"step": 585
},
{
"epoch": 0.67,
"grad_norm": 1.3086109100448613,
"learning_rate": 2.602277227486719e-06,
"loss": 0.1085,
"step": 586
},
{
"epoch": 0.67,
"grad_norm": 1.3624853079150907,
"learning_rate": 2.5860389863462765e-06,
"loss": 0.0865,
"step": 587
},
{
"epoch": 0.67,
"grad_norm": 0.887651265201604,
"learning_rate": 2.569833876453879e-06,
"loss": 0.075,
"step": 588
},
{
"epoch": 0.67,
"grad_norm": 1.6592764745581379,
"learning_rate": 2.553662120222199e-06,
"loss": 0.0772,
"step": 589
},
{
"epoch": 0.67,
"grad_norm": 1.145073711872565,
"learning_rate": 2.537523939606133e-06,
"loss": 0.0831,
"step": 590
},
{
"epoch": 0.68,
"grad_norm": 2.0550075999705046,
"learning_rate": 2.5214195560997546e-06,
"loss": 0.089,
"step": 591
},
{
"epoch": 0.68,
"grad_norm": 1.316393420334233,
"learning_rate": 2.505349190733282e-06,
"loss": 0.0991,
"step": 592
},
{
"epoch": 0.68,
"grad_norm": 1.0938355449466903,
"learning_rate": 2.489313064070037e-06,
"loss": 0.0888,
"step": 593
},
{
"epoch": 0.68,
"grad_norm": 0.9546003971132232,
"learning_rate": 2.4733113962034234e-06,
"loss": 0.0863,
"step": 594
},
{
"epoch": 0.68,
"grad_norm": 1.274927581713751,
"learning_rate": 2.457344406753899e-06,
"loss": 0.0866,
"step": 595
},
{
"epoch": 0.68,
"grad_norm": 1.5245485802819234,
"learning_rate": 2.441412314865965e-06,
"loss": 0.0941,
"step": 596
},
{
"epoch": 0.68,
"grad_norm": 1.3490782992266146,
"learning_rate": 2.4255153392051655e-06,
"loss": 0.0825,
"step": 597
},
{
"epoch": 0.68,
"grad_norm": 1.973182972399078,
"learning_rate": 2.4096536979550716e-06,
"loss": 0.0931,
"step": 598
},
{
"epoch": 0.68,
"grad_norm": 1.2896193150614899,
"learning_rate": 2.3938276088143003e-06,
"loss": 0.0848,
"step": 599
},
{
"epoch": 0.69,
"grad_norm": 1.0831644395119333,
"learning_rate": 2.3780372889935223e-06,
"loss": 0.0768,
"step": 600
},
{
"epoch": 0.69,
"grad_norm": 0.965364721243873,
"learning_rate": 2.362282955212473e-06,
"loss": 0.0818,
"step": 601
},
{
"epoch": 0.69,
"grad_norm": 1.0958775941489052,
"learning_rate": 2.346564823696994e-06,
"loss": 0.0853,
"step": 602
},
{
"epoch": 0.69,
"grad_norm": 1.2297992304308838,
"learning_rate": 2.330883110176049e-06,
"loss": 0.0843,
"step": 603
},
{
"epoch": 0.69,
"grad_norm": 1.2258534468894116,
"learning_rate": 2.31523802987877e-06,
"loss": 0.0833,
"step": 604
},
{
"epoch": 0.69,
"grad_norm": 1.2676625578470901,
"learning_rate": 2.29962979753151e-06,
"loss": 0.0885,
"step": 605
},
{
"epoch": 0.69,
"grad_norm": 1.8759537272989633,
"learning_rate": 2.284058627354887e-06,
"loss": 0.1015,
"step": 606
},
{
"epoch": 0.69,
"grad_norm": 1.5621051081143618,
"learning_rate": 2.2685247330608417e-06,
"loss": 0.0877,
"step": 607
},
{
"epoch": 0.69,
"grad_norm": 2.070142424838425,
"learning_rate": 2.253028327849716e-06,
"loss": 0.0975,
"step": 608
},
{
"epoch": 0.7,
"grad_norm": 1.2968662084013451,
"learning_rate": 2.2375696244073126e-06,
"loss": 0.1027,
"step": 609
},
{
"epoch": 0.7,
"grad_norm": 1.1236440141208273,
"learning_rate": 2.2221488349019903e-06,
"loss": 0.084,
"step": 610
},
{
"epoch": 0.7,
"grad_norm": 1.2823682175649511,
"learning_rate": 2.2067661709817384e-06,
"loss": 0.0632,
"step": 611
},
{
"epoch": 0.7,
"grad_norm": 1.2915537305568103,
"learning_rate": 2.1914218437712824e-06,
"loss": 0.098,
"step": 612
},
{
"epoch": 0.7,
"grad_norm": 0.7053759254724352,
"learning_rate": 2.1761160638691843e-06,
"loss": 0.0902,
"step": 613
},
{
"epoch": 0.7,
"grad_norm": 1.1559649931446625,
"learning_rate": 2.1608490413449428e-06,
"loss": 0.0765,
"step": 614
},
{
"epoch": 0.7,
"grad_norm": 1.9421156314939874,
"learning_rate": 2.145620985736125e-06,
"loss": 0.0868,
"step": 615
},
{
"epoch": 0.7,
"grad_norm": 1.0296488915584479,
"learning_rate": 2.130432106045477e-06,
"loss": 0.0916,
"step": 616
},
{
"epoch": 0.71,
"grad_norm": 1.1090554359149103,
"learning_rate": 2.1152826107380652e-06,
"loss": 0.0916,
"step": 617
},
{
"epoch": 0.71,
"grad_norm": 1.0976260165303318,
"learning_rate": 2.1001727077384133e-06,
"loss": 0.1014,
"step": 618
},
{
"epoch": 0.71,
"grad_norm": 1.1628149430896515,
"learning_rate": 2.0851026044276405e-06,
"loss": 0.0848,
"step": 619
},
{
"epoch": 0.71,
"grad_norm": 1.2655603395728712,
"learning_rate": 2.0700725076406235e-06,
"loss": 0.0943,
"step": 620
},
{
"epoch": 0.71,
"grad_norm": 0.6340092128585122,
"learning_rate": 2.05508262366316e-06,
"loss": 0.0789,
"step": 621
},
{
"epoch": 0.71,
"grad_norm": 1.3978109379704229,
"learning_rate": 2.0401331582291258e-06,
"loss": 0.095,
"step": 622
},
{
"epoch": 0.71,
"grad_norm": 0.9953586497854784,
"learning_rate": 2.025224316517663e-06,
"loss": 0.0683,
"step": 623
},
{
"epoch": 0.71,
"grad_norm": 1.1514532520993712,
"learning_rate": 2.0103563031503613e-06,
"loss": 0.0813,
"step": 624
},
{
"epoch": 0.71,
"grad_norm": 1.0382979576594575,
"learning_rate": 1.9955293221884403e-06,
"loss": 0.0881,
"step": 625
},
{
"epoch": 0.72,
"grad_norm": 1.1852249996471935,
"learning_rate": 1.9807435771299644e-06,
"loss": 0.0782,
"step": 626
},
{
"epoch": 0.72,
"grad_norm": 2.788558970046365,
"learning_rate": 1.9659992709070346e-06,
"loss": 0.0869,
"step": 627
},
{
"epoch": 0.72,
"grad_norm": 1.3959129722582788,
"learning_rate": 1.9512966058830125e-06,
"loss": 0.1102,
"step": 628
},
{
"epoch": 0.72,
"grad_norm": 1.8621826836279993,
"learning_rate": 1.9366357838497423e-06,
"loss": 0.1095,
"step": 629
},
{
"epoch": 0.72,
"grad_norm": 1.437311145687304,
"learning_rate": 1.9220170060247783e-06,
"loss": 0.0796,
"step": 630
},
{
"epoch": 0.72,
"grad_norm": 1.4068547957795265,
"learning_rate": 1.9074404730486264e-06,
"loss": 0.0857,
"step": 631
},
{
"epoch": 0.72,
"grad_norm": 1.0094769073015202,
"learning_rate": 1.8929063849819846e-06,
"loss": 0.1138,
"step": 632
},
{
"epoch": 0.72,
"grad_norm": 1.3064844865279543,
"learning_rate": 1.8784149413030006e-06,
"loss": 0.0844,
"step": 633
},
{
"epoch": 0.72,
"grad_norm": 1.2958603015342525,
"learning_rate": 1.8639663409045405e-06,
"loss": 0.081,
"step": 634
},
{
"epoch": 0.73,
"grad_norm": 1.1833983355282296,
"learning_rate": 1.8495607820914451e-06,
"loss": 0.1007,
"step": 635
},
{
"epoch": 0.73,
"grad_norm": 1.142884197140577,
"learning_rate": 1.835198462577822e-06,
"loss": 0.0764,
"step": 636
},
{
"epoch": 0.73,
"grad_norm": 1.4586827243798255,
"learning_rate": 1.820879579484325e-06,
"loss": 0.0958,
"step": 637
},
{
"epoch": 0.73,
"grad_norm": 2.5290567861521294,
"learning_rate": 1.8066043293354447e-06,
"loss": 0.1029,
"step": 638
},
{
"epoch": 0.73,
"grad_norm": 2.534517648492461,
"learning_rate": 1.7923729080568242e-06,
"loss": 0.1011,
"step": 639
},
{
"epoch": 0.73,
"grad_norm": 1.513440785236101,
"learning_rate": 1.778185510972556e-06,
"loss": 0.0819,
"step": 640
},
{
"epoch": 0.73,
"grad_norm": 1.4095512013043103,
"learning_rate": 1.7640423328025063e-06,
"loss": 0.0867,
"step": 641
},
{
"epoch": 0.73,
"grad_norm": 1.0547200042580467,
"learning_rate": 1.7499435676596482e-06,
"loss": 0.0692,
"step": 642
},
{
"epoch": 0.73,
"grad_norm": 1.2896781158690789,
"learning_rate": 1.7358894090473928e-06,
"loss": 0.0826,
"step": 643
},
{
"epoch": 0.74,
"grad_norm": 2.0540774389675582,
"learning_rate": 1.721880049856927e-06,
"loss": 0.1067,
"step": 644
},
{
"epoch": 0.74,
"grad_norm": 1.249361727592556,
"learning_rate": 1.7079156823645805e-06,
"loss": 0.0964,
"step": 645
},
{
"epoch": 0.74,
"grad_norm": 1.3933011021656423,
"learning_rate": 1.6939964982291713e-06,
"loss": 0.0828,
"step": 646
},
{
"epoch": 0.74,
"grad_norm": 1.1240586077405685,
"learning_rate": 1.6801226884893895e-06,
"loss": 0.0992,
"step": 647
},
{
"epoch": 0.74,
"grad_norm": 0.8382368133177154,
"learning_rate": 1.6662944435611622e-06,
"loss": 0.0695,
"step": 648
},
{
"epoch": 0.74,
"grad_norm": 1.7300003312511045,
"learning_rate": 1.652511953235051e-06,
"loss": 0.1086,
"step": 649
},
{
"epoch": 0.74,
"grad_norm": 1.2259941514107384,
"learning_rate": 1.6387754066736422e-06,
"loss": 0.0795,
"step": 650
},
{
"epoch": 0.74,
"grad_norm": 1.3255066128333746,
"learning_rate": 1.6250849924089485e-06,
"loss": 0.0893,
"step": 651
},
{
"epoch": 0.75,
"grad_norm": 0.8199741391521307,
"learning_rate": 1.611440898339824e-06,
"loss": 0.0606,
"step": 652
},
{
"epoch": 0.75,
"grad_norm": 0.9774400053985267,
"learning_rate": 1.5978433117293884e-06,
"loss": 0.0878,
"step": 653
},
{
"epoch": 0.75,
"grad_norm": 1.5814733538185348,
"learning_rate": 1.5842924192024489e-06,
"loss": 0.0922,
"step": 654
},
{
"epoch": 0.75,
"grad_norm": 1.0642541089350566,
"learning_rate": 1.5707884067429474e-06,
"loss": 0.0868,
"step": 655
},
{
"epoch": 0.75,
"grad_norm": 1.4337445102024018,
"learning_rate": 1.5573314596914046e-06,
"loss": 0.0849,
"step": 656
},
{
"epoch": 0.75,
"grad_norm": 0.882421180731485,
"learning_rate": 1.5439217627423696e-06,
"loss": 0.0663,
"step": 657
},
{
"epoch": 0.75,
"grad_norm": 1.5004750860759757,
"learning_rate": 1.5305594999418989e-06,
"loss": 0.0865,
"step": 658
},
{
"epoch": 0.75,
"grad_norm": 1.412572230637191,
"learning_rate": 1.5172448546850166e-06,
"loss": 0.0879,
"step": 659
},
{
"epoch": 0.75,
"grad_norm": 1.727267767004979,
"learning_rate": 1.5039780097132046e-06,
"loss": 0.087,
"step": 660
},
{
"epoch": 0.76,
"grad_norm": 1.6199501511796164,
"learning_rate": 1.4907591471118942e-06,
"loss": 0.0633,
"step": 661
},
{
"epoch": 0.76,
"grad_norm": 1.196467314248605,
"learning_rate": 1.4775884483079666e-06,
"loss": 0.0905,
"step": 662
},
{
"epoch": 0.76,
"grad_norm": 1.6865314637877673,
"learning_rate": 1.4644660940672628e-06,
"loss": 0.095,
"step": 663
},
{
"epoch": 0.76,
"grad_norm": 0.9948746939122228,
"learning_rate": 1.4513922644920985e-06,
"loss": 0.0672,
"step": 664
},
{
"epoch": 0.76,
"grad_norm": 1.3057924965780205,
"learning_rate": 1.4383671390187965e-06,
"loss": 0.0918,
"step": 665
},
{
"epoch": 0.76,
"grad_norm": 1.3739151217740653,
"learning_rate": 1.4253908964152252e-06,
"loss": 0.0784,
"step": 666
},
{
"epoch": 0.76,
"grad_norm": 1.3048795820201535,
"learning_rate": 1.4124637147783431e-06,
"loss": 0.099,
"step": 667
},
{
"epoch": 0.76,
"grad_norm": 0.8009570850923019,
"learning_rate": 1.399585771531755e-06,
"loss": 0.0735,
"step": 668
},
{
"epoch": 0.76,
"grad_norm": 0.9489677584418744,
"learning_rate": 1.386757243423273e-06,
"loss": 0.0867,
"step": 669
},
{
"epoch": 0.77,
"grad_norm": 1.2187793313789825,
"learning_rate": 1.3739783065224944e-06,
"loss": 0.0753,
"step": 670
},
{
"epoch": 0.77,
"grad_norm": 1.2771944302953684,
"learning_rate": 1.3612491362183887e-06,
"loss": 0.108,
"step": 671
},
{
"epoch": 0.77,
"grad_norm": 1.114716915199913,
"learning_rate": 1.3485699072168812e-06,
"loss": 0.0946,
"step": 672
},
{
"epoch": 0.77,
"grad_norm": 2.104197818175097,
"learning_rate": 1.3359407935384644e-06,
"loss": 0.0828,
"step": 673
},
{
"epoch": 0.77,
"grad_norm": 1.4388633932295818,
"learning_rate": 1.3233619685158056e-06,
"loss": 0.0751,
"step": 674
},
{
"epoch": 0.77,
"grad_norm": 1.1502743745660746,
"learning_rate": 1.3108336047913633e-06,
"loss": 0.0823,
"step": 675
},
{
"epoch": 0.77,
"grad_norm": 1.302715374352943,
"learning_rate": 1.2983558743150287e-06,
"loss": 0.0747,
"step": 676
},
{
"epoch": 0.77,
"grad_norm": 1.2518301338351348,
"learning_rate": 1.2859289483417558e-06,
"loss": 0.0764,
"step": 677
},
{
"epoch": 0.77,
"grad_norm": 1.250777735934068,
"learning_rate": 1.2735529974292122e-06,
"loss": 0.1011,
"step": 678
},
{
"epoch": 0.78,
"grad_norm": 1.1136692815348772,
"learning_rate": 1.2612281914354452e-06,
"loss": 0.0955,
"step": 679
},
{
"epoch": 0.78,
"grad_norm": 1.538562671017371,
"learning_rate": 1.2489546995165452e-06,
"loss": 0.097,
"step": 680
},
{
"epoch": 0.78,
"grad_norm": 1.1759591419244437,
"learning_rate": 1.2367326901243215e-06,
"loss": 0.0805,
"step": 681
},
{
"epoch": 0.78,
"grad_norm": 1.313266929061223,
"learning_rate": 1.2245623310039973e-06,
"loss": 0.0992,
"step": 682
},
{
"epoch": 0.78,
"grad_norm": 0.9182273134449173,
"learning_rate": 1.2124437891918995e-06,
"loss": 0.0708,
"step": 683
},
{
"epoch": 0.78,
"grad_norm": 0.906205039600781,
"learning_rate": 1.200377231013176e-06,
"loss": 0.0784,
"step": 684
},
{
"epoch": 0.78,
"grad_norm": 1.1516839769243379,
"learning_rate": 1.1883628220795008e-06,
"loss": 0.0928,
"step": 685
},
{
"epoch": 0.78,
"grad_norm": 1.2813185479825682,
"learning_rate": 1.1764007272868116e-06,
"loss": 0.0726,
"step": 686
},
{
"epoch": 0.79,
"grad_norm": 0.9213558082977095,
"learning_rate": 1.1644911108130436e-06,
"loss": 0.097,
"step": 687
},
{
"epoch": 0.79,
"grad_norm": 1.0500113768635098,
"learning_rate": 1.152634136115871e-06,
"loss": 0.0733,
"step": 688
},
{
"epoch": 0.79,
"grad_norm": 1.4068990440454445,
"learning_rate": 1.1408299659304684e-06,
"loss": 0.1059,
"step": 689
},
{
"epoch": 0.79,
"grad_norm": 1.3695680323420723,
"learning_rate": 1.1290787622672806e-06,
"loss": 0.08,
"step": 690
},
{
"epoch": 0.79,
"grad_norm": 1.2133479596639074,
"learning_rate": 1.1173806864097885e-06,
"loss": 0.1077,
"step": 691
},
{
"epoch": 0.79,
"grad_norm": 0.98289755771727,
"learning_rate": 1.1057358989123075e-06,
"loss": 0.0828,
"step": 692
},
{
"epoch": 0.79,
"grad_norm": 0.9289925027488103,
"learning_rate": 1.0941445595977768e-06,
"loss": 0.0715,
"step": 693
},
{
"epoch": 0.79,
"grad_norm": 1.2183961669443226,
"learning_rate": 1.0826068275555652e-06,
"loss": 0.0759,
"step": 694
},
{
"epoch": 0.79,
"grad_norm": 0.7469515955726049,
"learning_rate": 1.0711228611392937e-06,
"loss": 0.0601,
"step": 695
},
{
"epoch": 0.8,
"grad_norm": 0.8905767402220537,
"learning_rate": 1.0596928179646544e-06,
"loss": 0.0673,
"step": 696
},
{
"epoch": 0.8,
"grad_norm": 1.377778311548707,
"learning_rate": 1.048316854907252e-06,
"loss": 0.0906,
"step": 697
},
{
"epoch": 0.8,
"grad_norm": 0.8788045612493512,
"learning_rate": 1.0369951281004514e-06,
"loss": 0.0703,
"step": 698
},
{
"epoch": 0.8,
"grad_norm": 1.33875040003829,
"learning_rate": 1.0257277929332332e-06,
"loss": 0.0969,
"step": 699
},
{
"epoch": 0.8,
"grad_norm": 1.081307813191564,
"learning_rate": 1.0145150040480606e-06,
"loss": 0.11,
"step": 700
},
{
"epoch": 0.8,
"grad_norm": 0.8860649979755456,
"learning_rate": 1.0033569153387563e-06,
"loss": 0.0852,
"step": 701
},
{
"epoch": 0.8,
"grad_norm": 1.0736324612290742,
"learning_rate": 9.9225367994839e-07,
"loss": 0.0934,
"step": 702
},
{
"epoch": 0.8,
"grad_norm": 0.9872829827731274,
"learning_rate": 9.812054502671835e-07,
"loss": 0.0799,
"step": 703
},
{
"epoch": 0.8,
"grad_norm": 0.9282388618215119,
"learning_rate": 9.702123779304074e-07,
"loss": 0.1006,
"step": 704
},
{
"epoch": 0.81,
"grad_norm": 0.9435099327920515,
"learning_rate": 9.592746138163096e-07,
"loss": 0.082,
"step": 705
},
{
"epoch": 0.81,
"grad_norm": 0.8088817691415676,
"learning_rate": 9.48392308044041e-07,
"loss": 0.0873,
"step": 706
},
{
"epoch": 0.81,
"grad_norm": 1.5352041214674343,
"learning_rate": 9.375656099715935e-07,
"loss": 0.0996,
"step": 707
},
{
"epoch": 0.81,
"grad_norm": 0.9852893987107155,
"learning_rate": 9.267946681937545e-07,
"loss": 0.0818,
"step": 708
},
{
"epoch": 0.81,
"grad_norm": 1.5229064672434325,
"learning_rate": 9.16079630540061e-07,
"loss": 0.1036,
"step": 709
},
{
"epoch": 0.81,
"grad_norm": 1.2208544307050118,
"learning_rate": 9.054206440727764e-07,
"loss": 0.1008,
"step": 710
},
{
"epoch": 0.81,
"grad_norm": 1.0719293706407658,
"learning_rate": 8.948178550848702e-07,
"loss": 0.1068,
"step": 711
},
{
"epoch": 0.81,
"grad_norm": 1.6953289616059486,
"learning_rate": 8.84271409098012e-07,
"loss": 0.0933,
"step": 712
},
{
"epoch": 0.81,
"grad_norm": 0.7642497146531543,
"learning_rate": 8.737814508605674e-07,
"loss": 0.0801,
"step": 713
},
{
"epoch": 0.82,
"grad_norm": 1.4547072421888465,
"learning_rate": 8.63348124345621e-07,
"loss": 0.1064,
"step": 714
},
{
"epoch": 0.82,
"grad_norm": 0.9751721208506822,
"learning_rate": 8.529715727489912e-07,
"loss": 0.095,
"step": 715
},
{
"epoch": 0.82,
"grad_norm": 1.0346592348396086,
"learning_rate": 8.426519384872733e-07,
"loss": 0.052,
"step": 716
},
{
"epoch": 0.82,
"grad_norm": 1.149521366909892,
"learning_rate": 8.323893631958807e-07,
"loss": 0.0937,
"step": 717
},
{
"epoch": 0.82,
"grad_norm": 0.9997697767501313,
"learning_rate": 8.221839877270966e-07,
"loss": 0.0712,
"step": 718
},
{
"epoch": 0.82,
"grad_norm": 1.3762790723922584,
"learning_rate": 8.120359521481502e-07,
"loss": 0.0939,
"step": 719
},
{
"epoch": 0.82,
"grad_norm": 1.1717426287446866,
"learning_rate": 8.019453957392865e-07,
"loss": 0.0991,
"step": 720
},
{
"epoch": 0.82,
"grad_norm": 1.5011600992405558,
"learning_rate": 7.919124569918602e-07,
"loss": 0.0705,
"step": 721
},
{
"epoch": 0.83,
"grad_norm": 1.044248767738908,
"learning_rate": 7.81937273606429e-07,
"loss": 0.0829,
"step": 722
},
{
"epoch": 0.83,
"grad_norm": 1.193338514115385,
"learning_rate": 7.720199824908692e-07,
"loss": 0.085,
"step": 723
},
{
"epoch": 0.83,
"grad_norm": 1.932214944981213,
"learning_rate": 7.621607197584963e-07,
"loss": 0.104,
"step": 724
},
{
"epoch": 0.83,
"grad_norm": 1.1568238510064872,
"learning_rate": 7.523596207261907e-07,
"loss": 0.0808,
"step": 725
},
{
"epoch": 0.83,
"grad_norm": 1.0930033851467154,
"learning_rate": 7.426168199125466e-07,
"loss": 0.0903,
"step": 726
},
{
"epoch": 0.83,
"grad_norm": 0.9226197490862279,
"learning_rate": 7.329324510360269e-07,
"loss": 0.0862,
"step": 727
},
{
"epoch": 0.83,
"grad_norm": 0.9031890089209195,
"learning_rate": 7.233066470131205e-07,
"loss": 0.0736,
"step": 728
},
{
"epoch": 0.83,
"grad_norm": 0.9118942141479596,
"learning_rate": 7.13739539956525e-07,
"loss": 0.0766,
"step": 729
},
{
"epoch": 0.83,
"grad_norm": 1.538007969459405,
"learning_rate": 7.042312611733327e-07,
"loss": 0.079,
"step": 730
},
{
"epoch": 0.84,
"grad_norm": 0.8504785007825396,
"learning_rate": 6.947819411632223e-07,
"loss": 0.0712,
"step": 731
},
{
"epoch": 0.84,
"grad_norm": 1.3292424584047728,
"learning_rate": 6.853917096166768e-07,
"loss": 0.1028,
"step": 732
},
{
"epoch": 0.84,
"grad_norm": 1.8593939850427263,
"learning_rate": 6.760606954131966e-07,
"loss": 0.1032,
"step": 733
},
{
"epoch": 0.84,
"grad_norm": 0.8862139675569493,
"learning_rate": 6.667890266195321e-07,
"loss": 0.0605,
"step": 734
},
{
"epoch": 0.84,
"grad_norm": 2.0966949364511906,
"learning_rate": 6.575768304879293e-07,
"loss": 0.1037,
"step": 735
},
{
"epoch": 0.84,
"grad_norm": 1.705445088627103,
"learning_rate": 6.484242334543806e-07,
"loss": 0.0803,
"step": 736
},
{
"epoch": 0.84,
"grad_norm": 0.6749697226888411,
"learning_rate": 6.393313611368901e-07,
"loss": 0.065,
"step": 737
},
{
"epoch": 0.84,
"grad_norm": 0.9923237133863795,
"learning_rate": 6.302983383337474e-07,
"loss": 0.0816,
"step": 738
},
{
"epoch": 0.84,
"grad_norm": 1.4666632314163908,
"learning_rate": 6.213252890218163e-07,
"loss": 0.0904,
"step": 739
},
{
"epoch": 0.85,
"grad_norm": 1.9654914667201568,
"learning_rate": 6.124123363548374e-07,
"loss": 0.1037,
"step": 740
},
{
"epoch": 0.85,
"grad_norm": 0.9823530441556243,
"learning_rate": 6.035596026617291e-07,
"loss": 0.0821,
"step": 741
},
{
"epoch": 0.85,
"grad_norm": 1.2947434558738595,
"learning_rate": 5.947672094449169e-07,
"loss": 0.0734,
"step": 742
},
{
"epoch": 0.85,
"grad_norm": 1.3295655580056323,
"learning_rate": 5.860352773786632e-07,
"loss": 0.0956,
"step": 743
},
{
"epoch": 0.85,
"grad_norm": 1.0700520145965227,
"learning_rate": 5.77363926307406e-07,
"loss": 0.0883,
"step": 744
},
{
"epoch": 0.85,
"grad_norm": 1.2221120631868163,
"learning_rate": 5.687532752441232e-07,
"loss": 0.0916,
"step": 745
},
{
"epoch": 0.85,
"grad_norm": 1.2132238049137432,
"learning_rate": 5.602034423686903e-07,
"loss": 0.0914,
"step": 746
},
{
"epoch": 0.85,
"grad_norm": 0.966731842559717,
"learning_rate": 5.517145450262639e-07,
"loss": 0.0757,
"step": 747
},
{
"epoch": 0.85,
"grad_norm": 1.2637487646994567,
"learning_rate": 5.432866997256708e-07,
"loss": 0.082,
"step": 748
},
{
"epoch": 0.86,
"grad_norm": 1.1793858464928524,
"learning_rate": 5.349200221378076e-07,
"loss": 0.0766,
"step": 749
},
{
"epoch": 0.86,
"grad_norm": 0.6875498904508052,
"learning_rate": 5.266146270940509e-07,
"loss": 0.0628,
"step": 750
},
{
"epoch": 0.86,
"grad_norm": 0.9079148678981082,
"learning_rate": 5.183706285846873e-07,
"loss": 0.0967,
"step": 751
},
{
"epoch": 0.86,
"grad_norm": 1.0095386679421636,
"learning_rate": 5.101881397573417e-07,
"loss": 0.0818,
"step": 752
},
{
"epoch": 0.86,
"grad_norm": 1.2870739096748065,
"learning_rate": 5.020672729154308e-07,
"loss": 0.0951,
"step": 753
},
{
"epoch": 0.86,
"grad_norm": 0.9850619846545391,
"learning_rate": 4.940081395166174e-07,
"loss": 0.0499,
"step": 754
},
{
"epoch": 0.86,
"grad_norm": 1.0382004505125342,
"learning_rate": 4.860108501712824e-07,
"loss": 0.0831,
"step": 755
},
{
"epoch": 0.86,
"grad_norm": 0.8522871618719096,
"learning_rate": 4.780755146410077e-07,
"loss": 0.0884,
"step": 756
},
{
"epoch": 0.87,
"grad_norm": 1.6984622083006453,
"learning_rate": 4.7020224183706717e-07,
"loss": 0.0826,
"step": 757
},
{
"epoch": 0.87,
"grad_norm": 1.2324234124736468,
"learning_rate": 4.623911398189318e-07,
"loss": 0.0867,
"step": 758
},
{
"epoch": 0.87,
"grad_norm": 0.9551492511223306,
"learning_rate": 4.5464231579279206e-07,
"loss": 0.0668,
"step": 759
},
{
"epoch": 0.87,
"grad_norm": 0.973090902672149,
"learning_rate": 4.4695587611007786e-07,
"loss": 0.0741,
"step": 760
},
{
"epoch": 0.87,
"grad_norm": 1.221155278221863,
"learning_rate": 4.393319262660073e-07,
"loss": 0.0867,
"step": 761
},
{
"epoch": 0.87,
"grad_norm": 0.927123352010104,
"learning_rate": 4.317705708981329e-07,
"loss": 0.0936,
"step": 762
},
{
"epoch": 0.87,
"grad_norm": 1.164545511434888,
"learning_rate": 4.242719137849077e-07,
"loss": 0.0849,
"step": 763
},
{
"epoch": 0.87,
"grad_norm": 0.9999441318021076,
"learning_rate": 4.168360578442615e-07,
"loss": 0.0834,
"step": 764
},
{
"epoch": 0.87,
"grad_norm": 1.4159092264260025,
"learning_rate": 4.0946310513218733e-07,
"loss": 0.0862,
"step": 765
},
{
"epoch": 0.88,
"grad_norm": 1.126512488995191,
"learning_rate": 4.021531568413384e-07,
"loss": 0.0854,
"step": 766
},
{
"epoch": 0.88,
"grad_norm": 1.1346899479313985,
"learning_rate": 3.949063132996456e-07,
"loss": 0.0851,
"step": 767
},
{
"epoch": 0.88,
"grad_norm": 1.2301055402134775,
"learning_rate": 3.877226739689355e-07,
"loss": 0.1078,
"step": 768
},
{
"epoch": 0.88,
"grad_norm": 0.8267592190940007,
"learning_rate": 3.8060233744356634e-07,
"loss": 0.0835,
"step": 769
},
{
"epoch": 0.88,
"grad_norm": 0.6989450862265247,
"learning_rate": 3.735454014490747e-07,
"loss": 0.0621,
"step": 770
},
{
"epoch": 0.88,
"grad_norm": 1.091037054539574,
"learning_rate": 3.665519628408332e-07,
"loss": 0.0876,
"step": 771
},
{
"epoch": 0.88,
"grad_norm": 1.1571797362725371,
"learning_rate": 3.596221176027248e-07,
"loss": 0.0828,
"step": 772
},
{
"epoch": 0.88,
"grad_norm": 0.7724986008317762,
"learning_rate": 3.5275596084582254e-07,
"loss": 0.0681,
"step": 773
},
{
"epoch": 0.88,
"grad_norm": 1.1797550383983542,
"learning_rate": 3.459535868070851e-07,
"loss": 0.0956,
"step": 774
},
{
"epoch": 0.89,
"grad_norm": 1.5194629663196104,
"learning_rate": 3.392150888480611e-07,
"loss": 0.0766,
"step": 775
},
{
"epoch": 0.89,
"grad_norm": 1.1717270963830988,
"learning_rate": 3.3254055945361076e-07,
"loss": 0.0827,
"step": 776
},
{
"epoch": 0.89,
"grad_norm": 2.1502924320409957,
"learning_rate": 3.259300902306367e-07,
"loss": 0.0782,
"step": 777
},
{
"epoch": 0.89,
"grad_norm": 0.9421484362361487,
"learning_rate": 3.193837719068232e-07,
"loss": 0.0612,
"step": 778
},
{
"epoch": 0.89,
"grad_norm": 1.6220845244438815,
"learning_rate": 3.1290169432939556e-07,
"loss": 0.0851,
"step": 779
},
{
"epoch": 0.89,
"grad_norm": 1.2624256131518634,
"learning_rate": 3.0648394646388304e-07,
"loss": 0.0744,
"step": 780
},
{
"epoch": 0.89,
"grad_norm": 1.1192439687232085,
"learning_rate": 3.0013061639289855e-07,
"loss": 0.1133,
"step": 781
},
{
"epoch": 0.89,
"grad_norm": 1.2185909194304914,
"learning_rate": 2.938417913149344e-07,
"loss": 0.0892,
"step": 782
},
{
"epoch": 0.89,
"grad_norm": 1.4523300793441352,
"learning_rate": 2.8761755754315666e-07,
"loss": 0.0769,
"step": 783
},
{
"epoch": 0.9,
"grad_norm": 1.295435971693865,
"learning_rate": 2.814580005042283e-07,
"loss": 0.0659,
"step": 784
},
{
"epoch": 0.9,
"grad_norm": 0.8957520877828618,
"learning_rate": 2.753632047371335e-07,
"loss": 0.0858,
"step": 785
},
{
"epoch": 0.9,
"grad_norm": 0.6932007112743931,
"learning_rate": 2.693332538920196e-07,
"loss": 0.0798,
"step": 786
},
{
"epoch": 0.9,
"grad_norm": 1.4343179975918245,
"learning_rate": 2.6336823072904305e-07,
"loss": 0.0787,
"step": 787
},
{
"epoch": 0.9,
"grad_norm": 0.7646585417594924,
"learning_rate": 2.5746821711724215e-07,
"loss": 0.0842,
"step": 788
},
{
"epoch": 0.9,
"grad_norm": 1.2296867976725898,
"learning_rate": 2.51633294033406e-07,
"loss": 0.0893,
"step": 789
},
{
"epoch": 0.9,
"grad_norm": 0.8344753721377653,
"learning_rate": 2.4586354156096783e-07,
"loss": 0.0856,
"step": 790
},
{
"epoch": 0.9,
"grad_norm": 2.5235942296187353,
"learning_rate": 2.401590388889025e-07,
"loss": 0.0926,
"step": 791
},
{
"epoch": 0.91,
"grad_norm": 1.2341731250729187,
"learning_rate": 2.3451986431064276e-07,
"loss": 0.0676,
"step": 792
},
{
"epoch": 0.91,
"grad_norm": 1.380821266380361,
"learning_rate": 2.289460952230038e-07,
"loss": 0.0804,
"step": 793
},
{
"epoch": 0.91,
"grad_norm": 0.9195607263018777,
"learning_rate": 2.2343780812511819e-07,
"loss": 0.0807,
"step": 794
},
{
"epoch": 0.91,
"grad_norm": 1.3508840786940255,
"learning_rate": 2.179950786173879e-07,
"loss": 0.102,
"step": 795
},
{
"epoch": 0.91,
"grad_norm": 1.2203512555545983,
"learning_rate": 2.1261798140045086e-07,
"loss": 0.0804,
"step": 796
},
{
"epoch": 0.91,
"grad_norm": 0.947476542013538,
"learning_rate": 2.0730659027414723e-07,
"loss": 0.0627,
"step": 797
},
{
"epoch": 0.91,
"grad_norm": 1.2875569576022898,
"learning_rate": 2.020609781365135e-07,
"loss": 0.0697,
"step": 798
},
{
"epoch": 0.91,
"grad_norm": 1.0738316446109282,
"learning_rate": 1.9688121698277995e-07,
"loss": 0.0928,
"step": 799
},
{
"epoch": 0.91,
"grad_norm": 1.100302688905556,
"learning_rate": 1.9176737790437937e-07,
"loss": 0.0922,
"step": 800
},
{
"epoch": 0.92,
"grad_norm": 0.9144336671031666,
"learning_rate": 1.8671953108797825e-07,
"loss": 0.0847,
"step": 801
},
{
"epoch": 0.92,
"grad_norm": 0.8267226116425094,
"learning_rate": 1.8173774581450586e-07,
"loss": 0.068,
"step": 802
},
{
"epoch": 0.92,
"grad_norm": 1.2445599352791576,
"learning_rate": 1.7682209045820687e-07,
"loss": 0.0826,
"step": 803
},
{
"epoch": 0.92,
"grad_norm": 0.9745484531589232,
"learning_rate": 1.7197263248570517e-07,
"loss": 0.0767,
"step": 804
},
{
"epoch": 0.92,
"grad_norm": 0.8839595990343955,
"learning_rate": 1.671894384550743e-07,
"loss": 0.0772,
"step": 805
},
{
"epoch": 0.92,
"grad_norm": 0.9753602503357406,
"learning_rate": 1.6247257401492633e-07,
"loss": 0.0851,
"step": 806
},
{
"epoch": 0.92,
"grad_norm": 1.3608102557409236,
"learning_rate": 1.5782210390350717e-07,
"loss": 0.0838,
"step": 807
},
{
"epoch": 0.92,
"grad_norm": 1.003710736260583,
"learning_rate": 1.532380919478127e-07,
"loss": 0.0753,
"step": 808
},
{
"epoch": 0.92,
"grad_norm": 0.9986605939833132,
"learning_rate": 1.487206010627118e-07,
"loss": 0.0852,
"step": 809
},
{
"epoch": 0.93,
"grad_norm": 1.0489713487182382,
"learning_rate": 1.4426969325007812e-07,
"loss": 0.0829,
"step": 810
},
{
"epoch": 0.93,
"grad_norm": 1.1380778539184384,
"learning_rate": 1.3988542959794627e-07,
"loss": 0.0924,
"step": 811
},
{
"epoch": 0.93,
"grad_norm": 1.289574338056299,
"learning_rate": 1.3556787027966867e-07,
"loss": 0.081,
"step": 812
},
{
"epoch": 0.93,
"grad_norm": 1.5232518215408282,
"learning_rate": 1.3131707455309006e-07,
"loss": 0.0927,
"step": 813
},
{
"epoch": 0.93,
"grad_norm": 1.3190542373254943,
"learning_rate": 1.271331007597365e-07,
"loss": 0.085,
"step": 814
},
{
"epoch": 0.93,
"grad_norm": 0.823982217741129,
"learning_rate": 1.2301600632401212e-07,
"loss": 0.0583,
"step": 815
},
{
"epoch": 0.93,
"grad_norm": 1.9601283751011347,
"learning_rate": 1.1896584775241194e-07,
"loss": 0.0951,
"step": 816
},
{
"epoch": 0.93,
"grad_norm": 1.1845946794586921,
"learning_rate": 1.1498268063274697e-07,
"loss": 0.0807,
"step": 817
},
{
"epoch": 0.93,
"grad_norm": 2.048858092638161,
"learning_rate": 1.1106655963338154e-07,
"loss": 0.0735,
"step": 818
},
{
"epoch": 0.94,
"grad_norm": 1.0582211099338623,
"learning_rate": 1.0721753850247984e-07,
"loss": 0.0627,
"step": 819
},
{
"epoch": 0.94,
"grad_norm": 1.285727079837848,
"learning_rate": 1.034356700672734e-07,
"loss": 0.1099,
"step": 820
},
{
"epoch": 0.94,
"grad_norm": 1.5296721973274716,
"learning_rate": 9.972100623333036e-08,
"loss": 0.085,
"step": 821
},
{
"epoch": 0.94,
"grad_norm": 1.4156755488399408,
"learning_rate": 9.607359798384785e-08,
"loss": 0.0887,
"step": 822
},
{
"epoch": 0.94,
"grad_norm": 0.9732631646078624,
"learning_rate": 9.249349537894969e-08,
"loss": 0.0848,
"step": 823
},
{
"epoch": 0.94,
"grad_norm": 1.20599096937309,
"learning_rate": 8.89807475550003e-08,
"loss": 0.0869,
"step": 824
},
{
"epoch": 0.94,
"grad_norm": 0.8664219455219898,
"learning_rate": 8.553540272392969e-08,
"loss": 0.0682,
"step": 825
},
{
"epoch": 0.94,
"grad_norm": 0.766439476917645,
"learning_rate": 8.215750817257173e-08,
"loss": 0.0781,
"step": 826
},
{
"epoch": 0.95,
"grad_norm": 0.9177212304579919,
"learning_rate": 7.884711026201586e-08,
"loss": 0.0838,
"step": 827
},
{
"epoch": 0.95,
"grad_norm": 1.625903722521225,
"learning_rate": 7.560425442697028e-08,
"loss": 0.1111,
"step": 828
},
{
"epoch": 0.95,
"grad_norm": 1.0497108005642395,
"learning_rate": 7.242898517513864e-08,
"loss": 0.0921,
"step": 829
},
{
"epoch": 0.95,
"grad_norm": 1.0158040826453472,
"learning_rate": 6.932134608660934e-08,
"loss": 0.0764,
"step": 830
},
{
"epoch": 0.95,
"grad_norm": 1.8820874252726552,
"learning_rate": 6.628137981325611e-08,
"loss": 0.0859,
"step": 831
},
{
"epoch": 0.95,
"grad_norm": 1.1111712419679023,
"learning_rate": 6.330912807815338e-08,
"loss": 0.0755,
"step": 832
},
{
"epoch": 0.95,
"grad_norm": 0.9119864524126255,
"learning_rate": 6.04046316750051e-08,
"loss": 0.0828,
"step": 833
},
{
"epoch": 0.95,
"grad_norm": 1.400402904520679,
"learning_rate": 5.756793046758302e-08,
"loss": 0.0862,
"step": 834
},
{
"epoch": 0.95,
"grad_norm": 1.1178616108463992,
"learning_rate": 5.479906338917984e-08,
"loss": 0.0848,
"step": 835
},
{
"epoch": 0.96,
"grad_norm": 0.7790435488564559,
"learning_rate": 5.2098068442076325e-08,
"loss": 0.0657,
"step": 836
},
{
"epoch": 0.96,
"grad_norm": 0.9151934981942192,
"learning_rate": 4.946498269701616e-08,
"loss": 0.0808,
"step": 837
},
{
"epoch": 0.96,
"grad_norm": 1.6466988196620609,
"learning_rate": 4.6899842292702504e-08,
"loss": 0.096,
"step": 838
},
{
"epoch": 0.96,
"grad_norm": 0.9366718134612106,
"learning_rate": 4.4402682435296666e-08,
"loss": 0.0633,
"step": 839
},
{
"epoch": 0.96,
"grad_norm": 1.2794085716988426,
"learning_rate": 4.1973537397938524e-08,
"loss": 0.083,
"step": 840
},
{
"epoch": 0.96,
"grad_norm": 1.4323100350640852,
"learning_rate": 3.961244052027413e-08,
"loss": 0.0726,
"step": 841
},
{
"epoch": 0.96,
"grad_norm": 1.442592489577927,
"learning_rate": 3.731942420799939e-08,
"loss": 0.108,
"step": 842
},
{
"epoch": 0.96,
"grad_norm": 0.9558342121619565,
"learning_rate": 3.5094519932415417e-08,
"loss": 0.074,
"step": 843
},
{
"epoch": 0.96,
"grad_norm": 1.2267293855463146,
"learning_rate": 3.2937758229994455e-08,
"loss": 0.1133,
"step": 844
},
{
"epoch": 0.97,
"grad_norm": 1.377404962474636,
"learning_rate": 3.084916870196297e-08,
"loss": 0.0706,
"step": 845
},
{
"epoch": 0.97,
"grad_norm": 1.0880792886802064,
"learning_rate": 2.8828780013895885e-08,
"loss": 0.0749,
"step": 846
},
{
"epoch": 0.97,
"grad_norm": 1.127974831685117,
"learning_rate": 2.6876619895319646e-08,
"loss": 0.0868,
"step": 847
},
{
"epoch": 0.97,
"grad_norm": 1.177548020934705,
"learning_rate": 2.4992715139335877e-08,
"loss": 0.0752,
"step": 848
},
{
"epoch": 0.97,
"grad_norm": 1.7602271025103597,
"learning_rate": 2.3177091602251677e-08,
"loss": 0.0966,
"step": 849
},
{
"epoch": 0.97,
"grad_norm": 1.0131384087763258,
"learning_rate": 2.1429774203222674e-08,
"loss": 0.0825,
"step": 850
},
{
"epoch": 0.97,
"grad_norm": 1.1479626099193676,
"learning_rate": 1.975078692391552e-08,
"loss": 0.0751,
"step": 851
},
{
"epoch": 0.97,
"grad_norm": 1.0591293526686105,
"learning_rate": 1.814015280817538e-08,
"loss": 0.0802,
"step": 852
},
{
"epoch": 0.97,
"grad_norm": 1.3768999915261486,
"learning_rate": 1.659789396171063e-08,
"loss": 0.0772,
"step": 853
},
{
"epoch": 0.98,
"grad_norm": 1.9181306911762186,
"learning_rate": 1.5124031551789208e-08,
"loss": 0.0696,
"step": 854
},
{
"epoch": 0.98,
"grad_norm": 0.955991797622,
"learning_rate": 1.3718585806949403e-08,
"loss": 0.0624,
"step": 855
},
{
"epoch": 0.98,
"grad_norm": 0.9424787636427376,
"learning_rate": 1.2381576016720076e-08,
"loss": 0.09,
"step": 856
},
{
"epoch": 0.98,
"grad_norm": 1.2768624565283,
"learning_rate": 1.1113020531357543e-08,
"loss": 0.0679,
"step": 857
},
{
"epoch": 0.98,
"grad_norm": 0.8292588184596877,
"learning_rate": 9.91293676159244e-09,
"loss": 0.0762,
"step": 858
},
{
"epoch": 0.98,
"grad_norm": 1.4980250112396316,
"learning_rate": 8.781341178393244e-09,
"loss": 0.0896,
"step": 859
},
{
"epoch": 0.98,
"grad_norm": 1.0751217382850593,
"learning_rate": 7.718249312735903e-09,
"loss": 0.0669,
"step": 860
},
{
"epoch": 0.98,
"grad_norm": 0.6280685312977554,
"learning_rate": 6.7236757553962306e-09,
"loss": 0.0888,
"step": 861
},
{
"epoch": 0.99,
"grad_norm": 1.717122292617126,
"learning_rate": 5.797634156744502e-09,
"loss": 0.0992,
"step": 862
},
{
"epoch": 0.99,
"grad_norm": 1.9477118843467882,
"learning_rate": 4.940137226560615e-09,
"loss": 0.0892,
"step": 863
},
{
"epoch": 0.99,
"grad_norm": 1.3961547990365242,
"learning_rate": 4.151196733859775e-09,
"loss": 0.0694,
"step": 864
},
{
"epoch": 0.99,
"grad_norm": 2.039172931689318,
"learning_rate": 3.4308235067309624e-09,
"loss": 0.0766,
"step": 865
},
{
"epoch": 0.99,
"grad_norm": 1.5917603991318112,
"learning_rate": 2.7790274321859435e-09,
"loss": 0.1037,
"step": 866
},
{
"epoch": 0.99,
"grad_norm": 1.0235372378595196,
"learning_rate": 2.19581745602826e-09,
"loss": 0.1025,
"step": 867
},
{
"epoch": 0.99,
"grad_norm": 0.9414557507341034,
"learning_rate": 1.6812015827255557e-09,
"loss": 0.0913,
"step": 868
},
{
"epoch": 0.99,
"grad_norm": 0.9517346204680242,
"learning_rate": 1.2351868753018858e-09,
"loss": 0.084,
"step": 869
},
{
"epoch": 0.99,
"grad_norm": 1.0166083917147921,
"learning_rate": 8.577794552422359e-10,
"loss": 0.0556,
"step": 870
},
{
"epoch": 1.0,
"grad_norm": 1.1209124381768403,
"learning_rate": 5.489845024053698e-10,
"loss": 0.0786,
"step": 871
},
{
"epoch": 1.0,
"grad_norm": 2.4314007730116183,
"learning_rate": 3.088062549555515e-10,
"loss": 0.085,
"step": 872
},
{
"epoch": 1.0,
"grad_norm": 0.876419163965729,
"learning_rate": 1.3724800930314807e-10,
"loss": 0.0863,
"step": 873
},
{
"epoch": 1.0,
"grad_norm": 1.1111355248850858,
"learning_rate": 3.4312120057999886e-11,
"loss": 0.0886,
"step": 874
},
{
"epoch": 1.0,
"grad_norm": 0.9385207351245615,
"learning_rate": 0.0,
"loss": 0.0745,
"step": 875
},
{
"epoch": 1.0,
"step": 875,
"total_flos": 48930844000256.0,
"train_loss": 0.10205818943466459,
"train_runtime": 2840.5403,
"train_samples_per_second": 2.464,
"train_steps_per_second": 0.308
}
],
"logging_steps": 1.0,
"max_steps": 875,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"total_flos": 48930844000256.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}