|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.40100250626566414, |
|
"eval_steps": 500, |
|
"global_step": 320, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0012531328320802004, |
|
"grad_norm": 0.1519206017255783, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.8379, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002506265664160401, |
|
"grad_norm": 0.13846151530742645, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.6569, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0037593984962406013, |
|
"grad_norm": 0.13162349164485931, |
|
"learning_rate": 7.2e-06, |
|
"loss": 1.6083, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005012531328320802, |
|
"grad_norm": 0.1345846951007843, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.6603, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006265664160401002, |
|
"grad_norm": 0.1223267987370491, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.6579, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.007518796992481203, |
|
"grad_norm": 0.09618138521909714, |
|
"learning_rate": 1.44e-05, |
|
"loss": 1.5689, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.008771929824561403, |
|
"grad_norm": 0.09933976829051971, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 1.4663, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.010025062656641603, |
|
"grad_norm": 0.1198974996805191, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 1.734, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.011278195488721804, |
|
"grad_norm": 0.15424160659313202, |
|
"learning_rate": 2.16e-05, |
|
"loss": 1.6767, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.012531328320802004, |
|
"grad_norm": 0.1701776534318924, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.5974, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013784461152882205, |
|
"grad_norm": 0.15073753893375397, |
|
"learning_rate": 2.64e-05, |
|
"loss": 1.5259, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.015037593984962405, |
|
"grad_norm": 0.14940425753593445, |
|
"learning_rate": 2.88e-05, |
|
"loss": 1.6349, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.016290726817042606, |
|
"grad_norm": 0.12604176998138428, |
|
"learning_rate": 3.12e-05, |
|
"loss": 1.6443, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.017543859649122806, |
|
"grad_norm": 0.15434400737285614, |
|
"learning_rate": 3.3600000000000004e-05, |
|
"loss": 1.6378, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.018796992481203006, |
|
"grad_norm": 0.12647251784801483, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.542, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.020050125313283207, |
|
"grad_norm": 0.1258278489112854, |
|
"learning_rate": 3.8400000000000005e-05, |
|
"loss": 1.6091, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.021303258145363407, |
|
"grad_norm": 0.09623159468173981, |
|
"learning_rate": 4.08e-05, |
|
"loss": 1.5555, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.022556390977443608, |
|
"grad_norm": 0.10304850339889526, |
|
"learning_rate": 4.32e-05, |
|
"loss": 1.5517, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.023809523809523808, |
|
"grad_norm": 0.09278815984725952, |
|
"learning_rate": 4.5600000000000004e-05, |
|
"loss": 1.6436, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02506265664160401, |
|
"grad_norm": 0.08530683070421219, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.5664, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02631578947368421, |
|
"grad_norm": 0.08876050263643265, |
|
"learning_rate": 5.04e-05, |
|
"loss": 1.5628, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02756892230576441, |
|
"grad_norm": 0.09901795536279678, |
|
"learning_rate": 5.28e-05, |
|
"loss": 1.5402, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02882205513784461, |
|
"grad_norm": 0.09156398475170135, |
|
"learning_rate": 5.520000000000001e-05, |
|
"loss": 1.606, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03007518796992481, |
|
"grad_norm": 0.09371288120746613, |
|
"learning_rate": 5.76e-05, |
|
"loss": 1.566, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03132832080200501, |
|
"grad_norm": 0.0892362967133522, |
|
"learning_rate": 6e-05, |
|
"loss": 1.6193, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03258145363408521, |
|
"grad_norm": 0.08784804493188858, |
|
"learning_rate": 5.99997522398708e-05, |
|
"loss": 1.5756, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03383458646616541, |
|
"grad_norm": 0.08121798187494278, |
|
"learning_rate": 5.999900896357553e-05, |
|
"loss": 1.5085, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03508771929824561, |
|
"grad_norm": 0.09490139782428741, |
|
"learning_rate": 5.999777018339115e-05, |
|
"loss": 1.5692, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03634085213032581, |
|
"grad_norm": 0.1026797667145729, |
|
"learning_rate": 5.999603591977901e-05, |
|
"loss": 1.5761, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03759398496240601, |
|
"grad_norm": 0.09289643168449402, |
|
"learning_rate": 5.999380620138454e-05, |
|
"loss": 1.5612, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03884711779448621, |
|
"grad_norm": 0.09448801726102829, |
|
"learning_rate": 5.9991081065036745e-05, |
|
"loss": 1.6029, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.040100250626566414, |
|
"grad_norm": 0.09210563451051712, |
|
"learning_rate": 5.998786055574766e-05, |
|
"loss": 1.5229, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.041353383458646614, |
|
"grad_norm": 0.09483816474676132, |
|
"learning_rate": 5.998414472671151e-05, |
|
"loss": 1.5753, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.042606516290726815, |
|
"grad_norm": 0.0975150614976883, |
|
"learning_rate": 5.997993363930393e-05, |
|
"loss": 1.654, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.043859649122807015, |
|
"grad_norm": 0.10024327784776688, |
|
"learning_rate": 5.997522736308089e-05, |
|
"loss": 1.5868, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.045112781954887216, |
|
"grad_norm": 0.08656363934278488, |
|
"learning_rate": 5.9970025975777576e-05, |
|
"loss": 1.5087, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.046365914786967416, |
|
"grad_norm": 0.09871859848499298, |
|
"learning_rate": 5.996432956330705e-05, |
|
"loss": 1.4932, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.047619047619047616, |
|
"grad_norm": 0.11055205762386322, |
|
"learning_rate": 5.9958138219758926e-05, |
|
"loss": 1.5467, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04887218045112782, |
|
"grad_norm": 0.11448398977518082, |
|
"learning_rate": 5.995145204739774e-05, |
|
"loss": 1.5205, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05012531328320802, |
|
"grad_norm": 0.09594336152076721, |
|
"learning_rate": 5.994427115666128e-05, |
|
"loss": 1.4101, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05137844611528822, |
|
"grad_norm": 0.1022314578294754, |
|
"learning_rate": 5.993659566615878e-05, |
|
"loss": 1.5726, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05263157894736842, |
|
"grad_norm": 0.0918896496295929, |
|
"learning_rate": 5.9928425702668936e-05, |
|
"loss": 1.4416, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05388471177944862, |
|
"grad_norm": 0.11478869616985321, |
|
"learning_rate": 5.9919761401137845e-05, |
|
"loss": 1.4871, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.05513784461152882, |
|
"grad_norm": 0.09935441613197327, |
|
"learning_rate": 5.991060290467671e-05, |
|
"loss": 1.5057, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05639097744360902, |
|
"grad_norm": 0.1036807969212532, |
|
"learning_rate": 5.990095036455958e-05, |
|
"loss": 1.6244, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05764411027568922, |
|
"grad_norm": 0.10617007315158844, |
|
"learning_rate": 5.989080394022074e-05, |
|
"loss": 1.6287, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05889724310776942, |
|
"grad_norm": 0.09776347130537033, |
|
"learning_rate": 5.988016379925215e-05, |
|
"loss": 1.5421, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06015037593984962, |
|
"grad_norm": 0.10930298268795013, |
|
"learning_rate": 5.986903011740067e-05, |
|
"loss": 1.6162, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06140350877192982, |
|
"grad_norm": 0.10414919257164001, |
|
"learning_rate": 5.985740307856512e-05, |
|
"loss": 1.5381, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06265664160401002, |
|
"grad_norm": 0.10629701614379883, |
|
"learning_rate": 5.984528287479328e-05, |
|
"loss": 1.6127, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06390977443609022, |
|
"grad_norm": 0.11536388099193573, |
|
"learning_rate": 5.983266970627869e-05, |
|
"loss": 1.5455, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06516290726817042, |
|
"grad_norm": 0.12118639051914215, |
|
"learning_rate": 5.9819563781357385e-05, |
|
"loss": 1.5043, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06641604010025062, |
|
"grad_norm": 0.11645355075597763, |
|
"learning_rate": 5.98059653165044e-05, |
|
"loss": 1.5473, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.06766917293233082, |
|
"grad_norm": 0.12099536508321762, |
|
"learning_rate": 5.9791874536330225e-05, |
|
"loss": 1.5482, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06892230576441102, |
|
"grad_norm": 0.10795404016971588, |
|
"learning_rate": 5.97772916735771e-05, |
|
"loss": 1.5326, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07017543859649122, |
|
"grad_norm": 0.09979701787233353, |
|
"learning_rate": 5.9762216969115154e-05, |
|
"loss": 1.4133, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07142857142857142, |
|
"grad_norm": 0.12733447551727295, |
|
"learning_rate": 5.974665067193844e-05, |
|
"loss": 1.5839, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07268170426065163, |
|
"grad_norm": 0.11343546956777573, |
|
"learning_rate": 5.97305930391608e-05, |
|
"loss": 1.524, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07393483709273183, |
|
"grad_norm": 0.11789534986019135, |
|
"learning_rate": 5.971404433601165e-05, |
|
"loss": 1.4677, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.07518796992481203, |
|
"grad_norm": 0.10624619573354721, |
|
"learning_rate": 5.969700483583159e-05, |
|
"loss": 1.4893, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07644110275689223, |
|
"grad_norm": 0.10631600022315979, |
|
"learning_rate": 5.967947482006786e-05, |
|
"loss": 1.573, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.07769423558897243, |
|
"grad_norm": 0.12548910081386566, |
|
"learning_rate": 5.9661454578269724e-05, |
|
"loss": 1.5625, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07894736842105263, |
|
"grad_norm": 0.1474038064479828, |
|
"learning_rate": 5.964294440808368e-05, |
|
"loss": 1.6886, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.08020050125313283, |
|
"grad_norm": 0.12192469835281372, |
|
"learning_rate": 5.962394461524854e-05, |
|
"loss": 1.5252, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08145363408521303, |
|
"grad_norm": 0.11869639158248901, |
|
"learning_rate": 5.960445551359037e-05, |
|
"loss": 1.5262, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.08270676691729323, |
|
"grad_norm": 0.12624335289001465, |
|
"learning_rate": 5.958447742501735e-05, |
|
"loss": 1.5645, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08395989974937343, |
|
"grad_norm": 0.11716689169406891, |
|
"learning_rate": 5.9564010679514376e-05, |
|
"loss": 1.47, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.08521303258145363, |
|
"grad_norm": 0.12144263088703156, |
|
"learning_rate": 5.954305561513769e-05, |
|
"loss": 1.6181, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.08646616541353383, |
|
"grad_norm": 0.12713098526000977, |
|
"learning_rate": 5.9521612578009255e-05, |
|
"loss": 1.6227, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.08771929824561403, |
|
"grad_norm": 0.1043514758348465, |
|
"learning_rate": 5.9499681922311046e-05, |
|
"loss": 1.4522, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08897243107769423, |
|
"grad_norm": 0.11064239591360092, |
|
"learning_rate": 5.947726401027921e-05, |
|
"loss": 1.4685, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09022556390977443, |
|
"grad_norm": 0.11946432292461395, |
|
"learning_rate": 5.945435921219806e-05, |
|
"loss": 1.4165, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09147869674185463, |
|
"grad_norm": 0.11144128441810608, |
|
"learning_rate": 5.943096790639398e-05, |
|
"loss": 1.5081, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09273182957393483, |
|
"grad_norm": 0.11953038722276688, |
|
"learning_rate": 5.9407090479229166e-05, |
|
"loss": 1.5773, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09398496240601503, |
|
"grad_norm": 0.12432811409235, |
|
"learning_rate": 5.938272732509525e-05, |
|
"loss": 1.528, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09523809523809523, |
|
"grad_norm": 0.13081413507461548, |
|
"learning_rate": 5.9357878846406776e-05, |
|
"loss": 1.5024, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.09649122807017543, |
|
"grad_norm": 0.12342929095029831, |
|
"learning_rate": 5.933254545359456e-05, |
|
"loss": 1.478, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.09774436090225563, |
|
"grad_norm": 0.12129031121730804, |
|
"learning_rate": 5.9306727565098925e-05, |
|
"loss": 1.531, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.09899749373433583, |
|
"grad_norm": 0.12284113466739655, |
|
"learning_rate": 5.928042560736275e-05, |
|
"loss": 1.6204, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.10025062656641603, |
|
"grad_norm": 0.14579783380031586, |
|
"learning_rate": 5.9253640014824466e-05, |
|
"loss": 1.4382, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.10150375939849623, |
|
"grad_norm": 0.13060207664966583, |
|
"learning_rate": 5.9226371229910885e-05, |
|
"loss": 1.5713, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.10275689223057644, |
|
"grad_norm": 0.12609316408634186, |
|
"learning_rate": 5.919861970302982e-05, |
|
"loss": 1.5312, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.10401002506265664, |
|
"grad_norm": 0.12477698922157288, |
|
"learning_rate": 5.9170385892562755e-05, |
|
"loss": 1.5538, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.10526315789473684, |
|
"grad_norm": 0.14683856070041656, |
|
"learning_rate": 5.914167026485719e-05, |
|
"loss": 1.5318, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.10651629072681704, |
|
"grad_norm": 0.12768588960170746, |
|
"learning_rate": 5.9112473294218954e-05, |
|
"loss": 1.5587, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.10776942355889724, |
|
"grad_norm": 0.1492781639099121, |
|
"learning_rate": 5.9082795462904396e-05, |
|
"loss": 1.5623, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.10902255639097744, |
|
"grad_norm": 0.12561501562595367, |
|
"learning_rate": 5.905263726111241e-05, |
|
"loss": 1.5232, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.11027568922305764, |
|
"grad_norm": 0.11631227284669876, |
|
"learning_rate": 5.902199918697634e-05, |
|
"loss": 1.505, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11152882205513784, |
|
"grad_norm": 0.126926988363266, |
|
"learning_rate": 5.899088174655571e-05, |
|
"loss": 1.5001, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.11278195488721804, |
|
"grad_norm": 0.13932526111602783, |
|
"learning_rate": 5.8959285453827936e-05, |
|
"loss": 1.4141, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11403508771929824, |
|
"grad_norm": 0.13378114998340607, |
|
"learning_rate": 5.8927210830679785e-05, |
|
"loss": 1.5173, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.11528822055137844, |
|
"grad_norm": 0.12687304615974426, |
|
"learning_rate": 5.889465840689878e-05, |
|
"loss": 1.3759, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.11654135338345864, |
|
"grad_norm": 0.12497436255216599, |
|
"learning_rate": 5.886162872016442e-05, |
|
"loss": 1.6235, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.11779448621553884, |
|
"grad_norm": 0.12597259879112244, |
|
"learning_rate": 5.882812231603937e-05, |
|
"loss": 1.4533, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.11904761904761904, |
|
"grad_norm": 0.14128872752189636, |
|
"learning_rate": 5.879413974796033e-05, |
|
"loss": 1.4717, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.12030075187969924, |
|
"grad_norm": 0.13640277087688446, |
|
"learning_rate": 5.8759681577229014e-05, |
|
"loss": 1.4978, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.12155388471177944, |
|
"grad_norm": 0.11883015185594559, |
|
"learning_rate": 5.8724748373002805e-05, |
|
"loss": 1.5333, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.12280701754385964, |
|
"grad_norm": 0.14055690169334412, |
|
"learning_rate": 5.868934071228539e-05, |
|
"loss": 1.5967, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.12406015037593984, |
|
"grad_norm": 0.1388673484325409, |
|
"learning_rate": 5.8653459179917196e-05, |
|
"loss": 1.4698, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.12531328320802004, |
|
"grad_norm": 0.14991509914398193, |
|
"learning_rate": 5.861710436856577e-05, |
|
"loss": 1.565, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12656641604010024, |
|
"grad_norm": 0.15015804767608643, |
|
"learning_rate": 5.8580276878715964e-05, |
|
"loss": 1.5374, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.12781954887218044, |
|
"grad_norm": 0.14958204329013824, |
|
"learning_rate": 5.854297731866002e-05, |
|
"loss": 1.3789, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.12907268170426064, |
|
"grad_norm": 0.1282397210597992, |
|
"learning_rate": 5.850520630448752e-05, |
|
"loss": 1.5317, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.13032581453634084, |
|
"grad_norm": 0.12608109414577484, |
|
"learning_rate": 5.8466964460075225e-05, |
|
"loss": 1.4828, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.13157894736842105, |
|
"grad_norm": 0.17321082949638367, |
|
"learning_rate": 5.8428252417076766e-05, |
|
"loss": 1.4825, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.13283208020050125, |
|
"grad_norm": 0.135195791721344, |
|
"learning_rate": 5.838907081491219e-05, |
|
"loss": 1.5631, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.13408521303258145, |
|
"grad_norm": 0.12379708886146545, |
|
"learning_rate": 5.8349420300757393e-05, |
|
"loss": 1.5213, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.13533834586466165, |
|
"grad_norm": 0.13356727361679077, |
|
"learning_rate": 5.830930152953351e-05, |
|
"loss": 1.5834, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.13659147869674185, |
|
"grad_norm": 0.13438117504119873, |
|
"learning_rate": 5.8268715163895984e-05, |
|
"loss": 1.5039, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.13784461152882205, |
|
"grad_norm": 0.14895889163017273, |
|
"learning_rate": 5.82276618742237e-05, |
|
"loss": 1.5213, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.13909774436090225, |
|
"grad_norm": 0.13232296705245972, |
|
"learning_rate": 5.818614233860789e-05, |
|
"loss": 1.4048, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.14035087719298245, |
|
"grad_norm": 0.13256017863750458, |
|
"learning_rate": 5.8144157242840904e-05, |
|
"loss": 1.5077, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.14160401002506265, |
|
"grad_norm": 0.13263653218746185, |
|
"learning_rate": 5.810170728040494e-05, |
|
"loss": 1.3955, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 0.12680189311504364, |
|
"learning_rate": 5.8058793152460524e-05, |
|
"loss": 1.4194, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.14411027568922305, |
|
"grad_norm": 0.13666822016239166, |
|
"learning_rate": 5.801541556783501e-05, |
|
"loss": 1.4234, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.14536340852130325, |
|
"grad_norm": 0.14599356055259705, |
|
"learning_rate": 5.7971575243010775e-05, |
|
"loss": 1.5034, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.14661654135338345, |
|
"grad_norm": 0.14143531024456024, |
|
"learning_rate": 5.792727290211347e-05, |
|
"loss": 1.4257, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.14786967418546365, |
|
"grad_norm": 0.14613430202007294, |
|
"learning_rate": 5.7882509276899995e-05, |
|
"loss": 1.5734, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.14912280701754385, |
|
"grad_norm": 0.13908414542675018, |
|
"learning_rate": 5.7837285106746455e-05, |
|
"loss": 1.4259, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.15037593984962405, |
|
"grad_norm": 0.14299297332763672, |
|
"learning_rate": 5.779160113863594e-05, |
|
"loss": 1.6024, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.15162907268170425, |
|
"grad_norm": 0.1389327496290207, |
|
"learning_rate": 5.774545812714617e-05, |
|
"loss": 1.5475, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.15288220551378445, |
|
"grad_norm": 0.12398528307676315, |
|
"learning_rate": 5.769885683443704e-05, |
|
"loss": 1.4205, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.15413533834586465, |
|
"grad_norm": 0.1524042785167694, |
|
"learning_rate": 5.765179803023805e-05, |
|
"loss": 1.5462, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.15538847117794485, |
|
"grad_norm": 0.13744120299816132, |
|
"learning_rate": 5.7604282491835546e-05, |
|
"loss": 1.396, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.15664160401002505, |
|
"grad_norm": 0.1439579576253891, |
|
"learning_rate": 5.755631100405994e-05, |
|
"loss": 1.3751, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.15789473684210525, |
|
"grad_norm": 0.1428079754114151, |
|
"learning_rate": 5.750788435927268e-05, |
|
"loss": 1.4661, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.15914786967418545, |
|
"grad_norm": 0.15680895745754242, |
|
"learning_rate": 5.7459003357353214e-05, |
|
"loss": 1.5036, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.16040100250626566, |
|
"grad_norm": 0.1669396311044693, |
|
"learning_rate": 5.740966880568579e-05, |
|
"loss": 1.5688, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.16165413533834586, |
|
"grad_norm": 0.14206421375274658, |
|
"learning_rate": 5.735988151914606e-05, |
|
"loss": 1.4673, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.16290726817042606, |
|
"grad_norm": 0.14038872718811035, |
|
"learning_rate": 5.730964232008765e-05, |
|
"loss": 1.4081, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.16416040100250626, |
|
"grad_norm": 0.14589321613311768, |
|
"learning_rate": 5.72589520383286e-05, |
|
"loss": 1.4524, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.16541353383458646, |
|
"grad_norm": 0.12820565700531006, |
|
"learning_rate": 5.72078115111376e-05, |
|
"loss": 1.513, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.14205814898014069, |
|
"learning_rate": 5.715622158322027e-05, |
|
"loss": 1.4528, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.16791979949874686, |
|
"grad_norm": 0.14166466891765594, |
|
"learning_rate": 5.7104183106705065e-05, |
|
"loss": 1.4185, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.16917293233082706, |
|
"grad_norm": 0.1772213876247406, |
|
"learning_rate": 5.705169694112929e-05, |
|
"loss": 1.6193, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.17042606516290726, |
|
"grad_norm": 0.14686815440654755, |
|
"learning_rate": 5.6998763953424906e-05, |
|
"loss": 1.4533, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.17167919799498746, |
|
"grad_norm": 0.1334281861782074, |
|
"learning_rate": 5.694538501790417e-05, |
|
"loss": 1.4905, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.17293233082706766, |
|
"grad_norm": 0.1628011018037796, |
|
"learning_rate": 5.689156101624519e-05, |
|
"loss": 1.4211, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.17418546365914786, |
|
"grad_norm": 0.15260601043701172, |
|
"learning_rate": 5.683729283747743e-05, |
|
"loss": 1.487, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.17543859649122806, |
|
"grad_norm": 0.15220171213150024, |
|
"learning_rate": 5.6782581377966954e-05, |
|
"loss": 1.4547, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.17669172932330826, |
|
"grad_norm": 0.13987644016742706, |
|
"learning_rate": 5.672742754140162e-05, |
|
"loss": 1.4544, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.17794486215538846, |
|
"grad_norm": 0.1494978666305542, |
|
"learning_rate": 5.6671832238776246e-05, |
|
"loss": 1.3525, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.17919799498746866, |
|
"grad_norm": 0.15626025199890137, |
|
"learning_rate": 5.661579638837744e-05, |
|
"loss": 1.4859, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.18045112781954886, |
|
"grad_norm": 0.14824387431144714, |
|
"learning_rate": 5.655932091576849e-05, |
|
"loss": 1.487, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.18170426065162906, |
|
"grad_norm": 0.15267407894134521, |
|
"learning_rate": 5.6502406753774104e-05, |
|
"loss": 1.507, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.18295739348370926, |
|
"grad_norm": 0.14779289066791534, |
|
"learning_rate": 5.644505484246495e-05, |
|
"loss": 1.4844, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.18421052631578946, |
|
"grad_norm": 0.153567373752594, |
|
"learning_rate": 5.638726612914217e-05, |
|
"loss": 1.5149, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.18546365914786966, |
|
"grad_norm": 0.13506165146827698, |
|
"learning_rate": 5.632904156832169e-05, |
|
"loss": 1.4841, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.18671679197994986, |
|
"grad_norm": 0.153153657913208, |
|
"learning_rate": 5.62703821217185e-05, |
|
"loss": 1.5633, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.18796992481203006, |
|
"grad_norm": 0.14939852058887482, |
|
"learning_rate": 5.621128875823073e-05, |
|
"loss": 1.3896, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.18922305764411027, |
|
"grad_norm": 0.1525796800851822, |
|
"learning_rate": 5.615176245392367e-05, |
|
"loss": 1.5226, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.19047619047619047, |
|
"grad_norm": 0.15289181470870972, |
|
"learning_rate": 5.609180419201366e-05, |
|
"loss": 1.5143, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.19172932330827067, |
|
"grad_norm": 0.15484128892421722, |
|
"learning_rate": 5.603141496285179e-05, |
|
"loss": 1.3956, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.19298245614035087, |
|
"grad_norm": 0.1458120048046112, |
|
"learning_rate": 5.597059576390762e-05, |
|
"loss": 1.453, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.19423558897243107, |
|
"grad_norm": 0.14109253883361816, |
|
"learning_rate": 5.590934759975267e-05, |
|
"loss": 1.4137, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.19548872180451127, |
|
"grad_norm": 0.14820784330368042, |
|
"learning_rate": 5.584767148204379e-05, |
|
"loss": 1.4878, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.19674185463659147, |
|
"grad_norm": 0.16532014310359955, |
|
"learning_rate": 5.578556842950651e-05, |
|
"loss": 1.4942, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.19799498746867167, |
|
"grad_norm": 0.15438464283943176, |
|
"learning_rate": 5.572303946791819e-05, |
|
"loss": 1.353, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.19924812030075187, |
|
"grad_norm": 0.150332972407341, |
|
"learning_rate": 5.566008563009107e-05, |
|
"loss": 1.469, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.20050125313283207, |
|
"grad_norm": 0.1449950933456421, |
|
"learning_rate": 5.5596707955855215e-05, |
|
"loss": 1.4347, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.20175438596491227, |
|
"grad_norm": 0.15944981575012207, |
|
"learning_rate": 5.553290749204134e-05, |
|
"loss": 1.512, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.20300751879699247, |
|
"grad_norm": 0.14288978278636932, |
|
"learning_rate": 5.546868529246352e-05, |
|
"loss": 1.4669, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.20426065162907267, |
|
"grad_norm": 0.16595055162906647, |
|
"learning_rate": 5.54040424179018e-05, |
|
"loss": 1.6102, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.20551378446115287, |
|
"grad_norm": 0.1445576548576355, |
|
"learning_rate": 5.533897993608463e-05, |
|
"loss": 1.3255, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.20676691729323307, |
|
"grad_norm": 0.15171661972999573, |
|
"learning_rate": 5.527349892167127e-05, |
|
"loss": 1.4527, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.20802005012531327, |
|
"grad_norm": 0.1350892335176468, |
|
"learning_rate": 5.520760045623403e-05, |
|
"loss": 1.524, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.20927318295739347, |
|
"grad_norm": 0.15125590562820435, |
|
"learning_rate": 5.514128562824039e-05, |
|
"loss": 1.5476, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 0.18517212569713593, |
|
"learning_rate": 5.507455553303506e-05, |
|
"loss": 1.4797, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.21177944862155387, |
|
"grad_norm": 0.16194948554039001, |
|
"learning_rate": 5.5007411272821826e-05, |
|
"loss": 1.4256, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.21303258145363407, |
|
"grad_norm": 0.16133227944374084, |
|
"learning_rate": 5.493985395664539e-05, |
|
"loss": 1.5882, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.21428571428571427, |
|
"grad_norm": 0.15564045310020447, |
|
"learning_rate": 5.487188470037305e-05, |
|
"loss": 1.5051, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.21553884711779447, |
|
"grad_norm": 0.1621345579624176, |
|
"learning_rate": 5.480350462667625e-05, |
|
"loss": 1.4765, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.21679197994987467, |
|
"grad_norm": 0.16004936397075653, |
|
"learning_rate": 5.473471486501206e-05, |
|
"loss": 1.4995, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.21804511278195488, |
|
"grad_norm": 0.1602533906698227, |
|
"learning_rate": 5.466551655160449e-05, |
|
"loss": 1.5091, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.21929824561403508, |
|
"grad_norm": 0.15978209674358368, |
|
"learning_rate": 5.459591082942574e-05, |
|
"loss": 1.5351, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.22055137844611528, |
|
"grad_norm": 0.15208299458026886, |
|
"learning_rate": 5.452589884817733e-05, |
|
"loss": 1.5023, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.22180451127819548, |
|
"grad_norm": 0.1487240493297577, |
|
"learning_rate": 5.445548176427108e-05, |
|
"loss": 1.4458, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.22305764411027568, |
|
"grad_norm": 0.14227648079395294, |
|
"learning_rate": 5.4384660740810074e-05, |
|
"loss": 1.4342, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.22431077694235588, |
|
"grad_norm": 0.13798528909683228, |
|
"learning_rate": 5.431343694756935e-05, |
|
"loss": 1.3625, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.22556390977443608, |
|
"grad_norm": 0.16410118341445923, |
|
"learning_rate": 5.424181156097666e-05, |
|
"loss": 1.4706, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.22681704260651628, |
|
"grad_norm": 0.15224584937095642, |
|
"learning_rate": 5.416978576409301e-05, |
|
"loss": 1.5443, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.22807017543859648, |
|
"grad_norm": 0.15167997777462006, |
|
"learning_rate": 5.409736074659311e-05, |
|
"loss": 1.5039, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.22932330827067668, |
|
"grad_norm": 0.16224287450313568, |
|
"learning_rate": 5.402453770474575e-05, |
|
"loss": 1.4773, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.23057644110275688, |
|
"grad_norm": 0.16024534404277802, |
|
"learning_rate": 5.395131784139401e-05, |
|
"loss": 1.5209, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.23182957393483708, |
|
"grad_norm": 0.15539222955703735, |
|
"learning_rate": 5.3877702365935404e-05, |
|
"loss": 1.4137, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.23308270676691728, |
|
"grad_norm": 0.146916463971138, |
|
"learning_rate": 5.380369249430191e-05, |
|
"loss": 1.4557, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.23433583959899748, |
|
"grad_norm": 0.16157691180706024, |
|
"learning_rate": 5.37292894489399e-05, |
|
"loss": 1.5541, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.23558897243107768, |
|
"grad_norm": 0.15176048874855042, |
|
"learning_rate": 5.36544944587899e-05, |
|
"loss": 1.5486, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.23684210526315788, |
|
"grad_norm": 0.17215217649936676, |
|
"learning_rate": 5.357930875926636e-05, |
|
"loss": 1.5725, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.23809523809523808, |
|
"grad_norm": 0.16552142798900604, |
|
"learning_rate": 5.3503733592237174e-05, |
|
"loss": 1.5372, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.23934837092731828, |
|
"grad_norm": 0.1510804295539856, |
|
"learning_rate": 5.342777020600321e-05, |
|
"loss": 1.3789, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.24060150375939848, |
|
"grad_norm": 0.1481272578239441, |
|
"learning_rate": 5.335141985527771e-05, |
|
"loss": 1.7376, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.24185463659147868, |
|
"grad_norm": 0.1423473209142685, |
|
"learning_rate": 5.327468380116551e-05, |
|
"loss": 1.4623, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.24310776942355888, |
|
"grad_norm": 0.15733863413333893, |
|
"learning_rate": 5.319756331114227e-05, |
|
"loss": 1.4554, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.24436090225563908, |
|
"grad_norm": 0.1579495519399643, |
|
"learning_rate": 5.3120059659033484e-05, |
|
"loss": 1.3653, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.24561403508771928, |
|
"grad_norm": 0.16561569273471832, |
|
"learning_rate": 5.304217412499349e-05, |
|
"loss": 1.4189, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.24686716791979949, |
|
"grad_norm": 0.16004639863967896, |
|
"learning_rate": 5.296390799548426e-05, |
|
"loss": 1.4953, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.24812030075187969, |
|
"grad_norm": 0.14528200030326843, |
|
"learning_rate": 5.2885262563254246e-05, |
|
"loss": 1.2997, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.24937343358395989, |
|
"grad_norm": 0.15132363140583038, |
|
"learning_rate": 5.280623912731693e-05, |
|
"loss": 1.4261, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.2506265664160401, |
|
"grad_norm": 0.15129469335079193, |
|
"learning_rate": 5.272683899292943e-05, |
|
"loss": 1.4778, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2518796992481203, |
|
"grad_norm": 0.16739892959594727, |
|
"learning_rate": 5.2647063471570914e-05, |
|
"loss": 1.4741, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.2531328320802005, |
|
"grad_norm": 0.1433250457048416, |
|
"learning_rate": 5.256691388092095e-05, |
|
"loss": 1.398, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.2543859649122807, |
|
"grad_norm": 0.16294027864933014, |
|
"learning_rate": 5.248639154483773e-05, |
|
"loss": 1.4912, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.2556390977443609, |
|
"grad_norm": 0.16056182980537415, |
|
"learning_rate": 5.240549779333621e-05, |
|
"loss": 1.5168, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.2568922305764411, |
|
"grad_norm": 0.1551842838525772, |
|
"learning_rate": 5.2324233962566154e-05, |
|
"loss": 1.4244, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2581453634085213, |
|
"grad_norm": 0.15926840901374817, |
|
"learning_rate": 5.224260139479004e-05, |
|
"loss": 1.5547, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.2593984962406015, |
|
"grad_norm": 0.1555553674697876, |
|
"learning_rate": 5.21606014383609e-05, |
|
"loss": 1.4263, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.2606516290726817, |
|
"grad_norm": 0.1518319994211197, |
|
"learning_rate": 5.207823544770006e-05, |
|
"loss": 1.4604, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.2619047619047619, |
|
"grad_norm": 0.1934310644865036, |
|
"learning_rate": 5.199550478327474e-05, |
|
"loss": 1.5793, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.2631578947368421, |
|
"grad_norm": 0.16186797618865967, |
|
"learning_rate": 5.1912410811575635e-05, |
|
"loss": 1.5374, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2644110275689223, |
|
"grad_norm": 0.15732304751873016, |
|
"learning_rate": 5.182895490509427e-05, |
|
"loss": 1.4555, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.2656641604010025, |
|
"grad_norm": 0.17242950201034546, |
|
"learning_rate": 5.1745138442300396e-05, |
|
"loss": 1.5941, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.2669172932330827, |
|
"grad_norm": 0.16432379186153412, |
|
"learning_rate": 5.16609628076192e-05, |
|
"loss": 1.4662, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.2681704260651629, |
|
"grad_norm": 0.15895594656467438, |
|
"learning_rate": 5.15764293914084e-05, |
|
"loss": 1.4611, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.2694235588972431, |
|
"grad_norm": 0.17354588210582733, |
|
"learning_rate": 5.149153958993537e-05, |
|
"loss": 1.4325, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.2706766917293233, |
|
"grad_norm": 0.15314188599586487, |
|
"learning_rate": 5.140629480535396e-05, |
|
"loss": 1.4666, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.2719298245614035, |
|
"grad_norm": 0.16267825663089752, |
|
"learning_rate": 5.132069644568143e-05, |
|
"loss": 1.4234, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.2731829573934837, |
|
"grad_norm": 0.16543705761432648, |
|
"learning_rate": 5.123474592477517e-05, |
|
"loss": 1.5027, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.2744360902255639, |
|
"grad_norm": 0.1630410999059677, |
|
"learning_rate": 5.1148444662309326e-05, |
|
"loss": 1.4812, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.2756892230576441, |
|
"grad_norm": 0.17135612666606903, |
|
"learning_rate": 5.106179408375135e-05, |
|
"loss": 1.4541, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2769423558897243, |
|
"grad_norm": 0.1608368307352066, |
|
"learning_rate": 5.097479562033847e-05, |
|
"loss": 1.4371, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.2781954887218045, |
|
"grad_norm": 0.17631706595420837, |
|
"learning_rate": 5.0887450709054065e-05, |
|
"loss": 1.4593, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.2794486215538847, |
|
"grad_norm": 0.1584499627351761, |
|
"learning_rate": 5.079976079260391e-05, |
|
"loss": 1.3738, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.2807017543859649, |
|
"grad_norm": 0.1915035843849182, |
|
"learning_rate": 5.071172731939233e-05, |
|
"loss": 1.5473, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.2819548872180451, |
|
"grad_norm": 0.17202699184417725, |
|
"learning_rate": 5.062335174349831e-05, |
|
"loss": 1.4301, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.2832080200501253, |
|
"grad_norm": 0.1597733050584793, |
|
"learning_rate": 5.053463552465145e-05, |
|
"loss": 1.3866, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.2844611528822055, |
|
"grad_norm": 0.16445639729499817, |
|
"learning_rate": 5.0445580128207885e-05, |
|
"loss": 1.4294, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 0.17147259414196014, |
|
"learning_rate": 5.0356187025126035e-05, |
|
"loss": 1.4303, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.2869674185463659, |
|
"grad_norm": 0.18276292085647583, |
|
"learning_rate": 5.026645769194236e-05, |
|
"loss": 1.4647, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.2882205513784461, |
|
"grad_norm": 0.1690540462732315, |
|
"learning_rate": 5.017639361074694e-05, |
|
"loss": 1.5525, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2894736842105263, |
|
"grad_norm": 0.1618894338607788, |
|
"learning_rate": 5.0085996269159e-05, |
|
"loss": 1.4743, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.2907268170426065, |
|
"grad_norm": 0.16552814841270447, |
|
"learning_rate": 4.9995267160302336e-05, |
|
"loss": 1.4404, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.29197994987468673, |
|
"grad_norm": 0.17212465405464172, |
|
"learning_rate": 4.990420778278067e-05, |
|
"loss": 1.4921, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.2932330827067669, |
|
"grad_norm": 0.17030447721481323, |
|
"learning_rate": 4.981281964065287e-05, |
|
"loss": 1.4577, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.29448621553884713, |
|
"grad_norm": 0.16432829201221466, |
|
"learning_rate": 4.972110424340815e-05, |
|
"loss": 1.3663, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2957393483709273, |
|
"grad_norm": 0.17019811272621155, |
|
"learning_rate": 4.962906310594106e-05, |
|
"loss": 1.3605, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.29699248120300753, |
|
"grad_norm": 0.15439601242542267, |
|
"learning_rate": 4.953669774852657e-05, |
|
"loss": 1.3821, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.2982456140350877, |
|
"grad_norm": 0.16957609355449677, |
|
"learning_rate": 4.9444009696794836e-05, |
|
"loss": 1.5436, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.29949874686716793, |
|
"grad_norm": 0.17840701341629028, |
|
"learning_rate": 4.9351000481706135e-05, |
|
"loss": 1.5132, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.3007518796992481, |
|
"grad_norm": 0.16516822576522827, |
|
"learning_rate": 4.925767163952547e-05, |
|
"loss": 1.5301, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.30200501253132833, |
|
"grad_norm": 0.16189689934253693, |
|
"learning_rate": 4.916402471179723e-05, |
|
"loss": 1.4475, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.3032581453634085, |
|
"grad_norm": 0.1689058095216751, |
|
"learning_rate": 4.907006124531975e-05, |
|
"loss": 1.4622, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.30451127819548873, |
|
"grad_norm": 0.16913363337516785, |
|
"learning_rate": 4.897578279211975e-05, |
|
"loss": 1.5433, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.3057644110275689, |
|
"grad_norm": 0.17671072483062744, |
|
"learning_rate": 4.888119090942665e-05, |
|
"loss": 1.4205, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.30701754385964913, |
|
"grad_norm": 0.1828470230102539, |
|
"learning_rate": 4.878628715964693e-05, |
|
"loss": 1.4191, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.3082706766917293, |
|
"grad_norm": 0.17133688926696777, |
|
"learning_rate": 4.86910731103383e-05, |
|
"loss": 1.5249, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.30952380952380953, |
|
"grad_norm": 0.167369082570076, |
|
"learning_rate": 4.859555033418374e-05, |
|
"loss": 1.4614, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.3107769423558897, |
|
"grad_norm": 0.18615829944610596, |
|
"learning_rate": 4.8499720408965634e-05, |
|
"loss": 1.5662, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.31203007518796994, |
|
"grad_norm": 0.15995121002197266, |
|
"learning_rate": 4.840358491753961e-05, |
|
"loss": 1.4518, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.3132832080200501, |
|
"grad_norm": 0.17739561200141907, |
|
"learning_rate": 4.830714544780846e-05, |
|
"loss": 1.4187, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.31453634085213034, |
|
"grad_norm": 0.16059748828411102, |
|
"learning_rate": 4.821040359269587e-05, |
|
"loss": 1.3574, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.3157894736842105, |
|
"grad_norm": 0.1768496036529541, |
|
"learning_rate": 4.811336095012015e-05, |
|
"loss": 1.4048, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.31704260651629074, |
|
"grad_norm": 0.17573674023151398, |
|
"learning_rate": 4.8016019122967826e-05, |
|
"loss": 1.542, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.3182957393483709, |
|
"grad_norm": 0.15960711240768433, |
|
"learning_rate": 4.791837971906711e-05, |
|
"loss": 1.3686, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.31954887218045114, |
|
"grad_norm": 0.18074101209640503, |
|
"learning_rate": 4.782044435116145e-05, |
|
"loss": 1.5344, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.3208020050125313, |
|
"grad_norm": 0.2042127400636673, |
|
"learning_rate": 4.7722214636882806e-05, |
|
"loss": 1.5176, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.32205513784461154, |
|
"grad_norm": 0.16601479053497314, |
|
"learning_rate": 4.762369219872494e-05, |
|
"loss": 1.3895, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.3233082706766917, |
|
"grad_norm": 0.18349255621433258, |
|
"learning_rate": 4.752487866401667e-05, |
|
"loss": 1.4652, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.32456140350877194, |
|
"grad_norm": 0.18353460729122162, |
|
"learning_rate": 4.742577566489493e-05, |
|
"loss": 1.5181, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.3258145363408521, |
|
"grad_norm": 0.18172797560691833, |
|
"learning_rate": 4.732638483827784e-05, |
|
"loss": 1.4484, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.32706766917293234, |
|
"grad_norm": 0.17833280563354492, |
|
"learning_rate": 4.722670782583769e-05, |
|
"loss": 1.4063, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.3283208020050125, |
|
"grad_norm": 0.17507833242416382, |
|
"learning_rate": 4.7126746273973745e-05, |
|
"loss": 1.5928, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.32957393483709274, |
|
"grad_norm": 0.17333512008190155, |
|
"learning_rate": 4.702650183378517e-05, |
|
"loss": 1.526, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.3308270676691729, |
|
"grad_norm": 0.19539874792099, |
|
"learning_rate": 4.692597616104364e-05, |
|
"loss": 1.4775, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.33208020050125314, |
|
"grad_norm": 0.1711081713438034, |
|
"learning_rate": 4.682517091616608e-05, |
|
"loss": 1.2613, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.17969585955142975, |
|
"learning_rate": 4.672408776418718e-05, |
|
"loss": 1.5688, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.33458646616541354, |
|
"grad_norm": 0.17389164865016937, |
|
"learning_rate": 4.6622728374731936e-05, |
|
"loss": 1.3871, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.3358395989974937, |
|
"grad_norm": 0.16892869770526886, |
|
"learning_rate": 4.652109442198804e-05, |
|
"loss": 1.4454, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.33709273182957394, |
|
"grad_norm": 0.18089257180690765, |
|
"learning_rate": 4.641918758467824e-05, |
|
"loss": 1.4727, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.3383458646616541, |
|
"grad_norm": 0.18539738655090332, |
|
"learning_rate": 4.6317009546032615e-05, |
|
"loss": 1.4823, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.33959899749373434, |
|
"grad_norm": 0.1722467839717865, |
|
"learning_rate": 4.621456199376077e-05, |
|
"loss": 1.4174, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.3408521303258145, |
|
"grad_norm": 0.17670480906963348, |
|
"learning_rate": 4.611184662002396e-05, |
|
"loss": 1.5394, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.34210526315789475, |
|
"grad_norm": 0.16371813416481018, |
|
"learning_rate": 4.6008865121407126e-05, |
|
"loss": 1.4326, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.3433583959899749, |
|
"grad_norm": 0.17700393497943878, |
|
"learning_rate": 4.590561919889091e-05, |
|
"loss": 1.4755, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.34461152882205515, |
|
"grad_norm": 0.1750127226114273, |
|
"learning_rate": 4.58021105578235e-05, |
|
"loss": 1.3065, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3458646616541353, |
|
"grad_norm": 0.18933042883872986, |
|
"learning_rate": 4.5698340907892534e-05, |
|
"loss": 1.5368, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.34711779448621555, |
|
"grad_norm": 0.19926317036151886, |
|
"learning_rate": 4.55943119630968e-05, |
|
"loss": 1.5401, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.3483709273182957, |
|
"grad_norm": 0.1745910942554474, |
|
"learning_rate": 4.5490025441717946e-05, |
|
"loss": 1.4631, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.34962406015037595, |
|
"grad_norm": 0.1677342802286148, |
|
"learning_rate": 4.538548306629209e-05, |
|
"loss": 1.4973, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.3508771929824561, |
|
"grad_norm": 0.17847084999084473, |
|
"learning_rate": 4.5280686563581425e-05, |
|
"loss": 1.5144, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.35213032581453635, |
|
"grad_norm": 0.18145380914211273, |
|
"learning_rate": 4.517563766454561e-05, |
|
"loss": 1.4917, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.3533834586466165, |
|
"grad_norm": 0.19056649506092072, |
|
"learning_rate": 4.5070338104313224e-05, |
|
"loss": 1.4575, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.35463659147869675, |
|
"grad_norm": 0.17569053173065186, |
|
"learning_rate": 4.4964789622153116e-05, |
|
"loss": 1.5196, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.3558897243107769, |
|
"grad_norm": 0.18289506435394287, |
|
"learning_rate": 4.485899396144566e-05, |
|
"loss": 1.402, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 0.17359083890914917, |
|
"learning_rate": 4.4752952869653964e-05, |
|
"loss": 1.4144, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.3583959899749373, |
|
"grad_norm": 0.1759563386440277, |
|
"learning_rate": 4.464666809829499e-05, |
|
"loss": 1.5265, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.35964912280701755, |
|
"grad_norm": 0.17689688503742218, |
|
"learning_rate": 4.4540141402910645e-05, |
|
"loss": 1.5254, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.3609022556390977, |
|
"grad_norm": 0.18115444481372833, |
|
"learning_rate": 4.443337454303881e-05, |
|
"loss": 1.6087, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.36215538847117795, |
|
"grad_norm": 0.17893773317337036, |
|
"learning_rate": 4.432636928218419e-05, |
|
"loss": 1.4052, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.3634085213032581, |
|
"grad_norm": 0.2004009187221527, |
|
"learning_rate": 4.4219127387789293e-05, |
|
"loss": 1.503, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.36466165413533835, |
|
"grad_norm": 0.16822977364063263, |
|
"learning_rate": 4.411165063120514e-05, |
|
"loss": 1.3674, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.3659147869674185, |
|
"grad_norm": 0.1785370260477066, |
|
"learning_rate": 4.400394078766208e-05, |
|
"loss": 1.5148, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.36716791979949875, |
|
"grad_norm": 0.16503627598285675, |
|
"learning_rate": 4.389599963624042e-05, |
|
"loss": 1.4331, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.3684210526315789, |
|
"grad_norm": 0.1841992437839508, |
|
"learning_rate": 4.3787828959841084e-05, |
|
"loss": 1.3116, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.36967418546365916, |
|
"grad_norm": 0.18730437755584717, |
|
"learning_rate": 4.367943054515611e-05, |
|
"loss": 1.3779, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.37092731829573933, |
|
"grad_norm": 0.17390833795070648, |
|
"learning_rate": 4.3570806182639185e-05, |
|
"loss": 1.4706, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.37218045112781956, |
|
"grad_norm": 0.16994328796863556, |
|
"learning_rate": 4.346195766647606e-05, |
|
"loss": 1.4047, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.37343358395989973, |
|
"grad_norm": 0.17841129004955292, |
|
"learning_rate": 4.335288679455487e-05, |
|
"loss": 1.4686, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.37468671679197996, |
|
"grad_norm": 0.19922415912151337, |
|
"learning_rate": 4.3243595368436517e-05, |
|
"loss": 1.5165, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.37593984962406013, |
|
"grad_norm": 0.16899648308753967, |
|
"learning_rate": 4.3134085193324865e-05, |
|
"loss": 1.4881, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.37719298245614036, |
|
"grad_norm": 0.16627173125743866, |
|
"learning_rate": 4.302435807803693e-05, |
|
"loss": 1.4882, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.37844611528822053, |
|
"grad_norm": 0.19958005845546722, |
|
"learning_rate": 4.291441583497296e-05, |
|
"loss": 1.5203, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.37969924812030076, |
|
"grad_norm": 0.19517013430595398, |
|
"learning_rate": 4.280426028008661e-05, |
|
"loss": 1.4329, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.38095238095238093, |
|
"grad_norm": 0.1873486489057541, |
|
"learning_rate": 4.269389323285484e-05, |
|
"loss": 1.3892, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.38220551378446116, |
|
"grad_norm": 0.17650927603244781, |
|
"learning_rate": 4.258331651624791e-05, |
|
"loss": 1.4937, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.38345864661654133, |
|
"grad_norm": 0.17674066126346588, |
|
"learning_rate": 4.247253195669925e-05, |
|
"loss": 1.4761, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.38471177944862156, |
|
"grad_norm": 0.19194164872169495, |
|
"learning_rate": 4.236154138407533e-05, |
|
"loss": 1.4483, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.38596491228070173, |
|
"grad_norm": 0.18375438451766968, |
|
"learning_rate": 4.2250346631645374e-05, |
|
"loss": 1.458, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.38721804511278196, |
|
"grad_norm": 0.1865517795085907, |
|
"learning_rate": 4.213894953605113e-05, |
|
"loss": 1.4413, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.38847117794486213, |
|
"grad_norm": 0.18141886591911316, |
|
"learning_rate": 4.2027351937276525e-05, |
|
"loss": 1.4391, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.38972431077694236, |
|
"grad_norm": 0.18934974074363708, |
|
"learning_rate": 4.191555567861728e-05, |
|
"loss": 1.4008, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.39097744360902253, |
|
"grad_norm": 0.1794135719537735, |
|
"learning_rate": 4.1803562606650374e-05, |
|
"loss": 1.3936, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.39223057644110276, |
|
"grad_norm": 0.17992708086967468, |
|
"learning_rate": 4.169137457120373e-05, |
|
"loss": 1.3573, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.39348370927318294, |
|
"grad_norm": 0.1869203895330429, |
|
"learning_rate": 4.157899342532545e-05, |
|
"loss": 1.4579, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.39473684210526316, |
|
"grad_norm": 0.17959001660346985, |
|
"learning_rate": 4.146642102525336e-05, |
|
"loss": 1.6471, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.39598997493734334, |
|
"grad_norm": 0.18178239464759827, |
|
"learning_rate": 4.1353659230384306e-05, |
|
"loss": 1.5083, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.39724310776942356, |
|
"grad_norm": 0.17811906337738037, |
|
"learning_rate": 4.124070990324339e-05, |
|
"loss": 1.3851, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.39849624060150374, |
|
"grad_norm": 0.18983860313892365, |
|
"learning_rate": 4.1127574909453276e-05, |
|
"loss": 1.4802, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.39974937343358397, |
|
"grad_norm": 0.19141417741775513, |
|
"learning_rate": 4.1014256117703344e-05, |
|
"loss": 1.4496, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.40100250626566414, |
|
"grad_norm": 0.18502330780029297, |
|
"learning_rate": 4.090075539971883e-05, |
|
"loss": 1.3621, |
|
"step": 320 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 798, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 32, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.3421542911875482e+18, |
|
"train_batch_size": 12, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|