|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 178, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 307.2925252001398, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 2.14, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 348.5579299962218, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 2.1592, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 295.8217735171242, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 2.1112, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 320.95278420676283, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 2.0727, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 273.6994356101882, |
|
"learning_rate": 5.555555555555555e-06, |
|
"loss": 1.8522, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 137.18652764811333, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.5844, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.105539597979915, |
|
"learning_rate": 7.777777777777777e-06, |
|
"loss": 1.4424, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.0951798632526275, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 1.4244, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.6078027918916717, |
|
"learning_rate": 9.999999999999999e-06, |
|
"loss": 1.3763, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.9798839986563381, |
|
"learning_rate": 1.111111111111111e-05, |
|
"loss": 1.3583, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.065159602417454, |
|
"learning_rate": 1.2222222222222222e-05, |
|
"loss": 1.3335, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.3002391180584008, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.309, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.2825778137111357, |
|
"learning_rate": 1.4444444444444444e-05, |
|
"loss": 1.2932, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.0205928394685457, |
|
"learning_rate": 1.5555555555555555e-05, |
|
"loss": 1.2689, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.3373431587246087, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.2489, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.8043774075647928, |
|
"learning_rate": 1.7777777777777777e-05, |
|
"loss": 1.2302, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.8741531962350515, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 1.2195, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.7028376256413013, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 1.2027, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.6648050831010063, |
|
"learning_rate": 2.111111111111111e-05, |
|
"loss": 1.1887, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.7354468182477213, |
|
"learning_rate": 2.222222222222222e-05, |
|
"loss": 1.1704, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.7839624362161648, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 1.1654, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.7752033041911668, |
|
"learning_rate": 2.4444444444444445e-05, |
|
"loss": 1.1544, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.7396236066452132, |
|
"learning_rate": 2.5555555555555557e-05, |
|
"loss": 1.138, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.748517600591021, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 1.133, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.7065074963471533, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 1.1119, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.7289352652301433, |
|
"learning_rate": 2.8888888888888888e-05, |
|
"loss": 1.1132, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.7324972388581718, |
|
"learning_rate": 3e-05, |
|
"loss": 1.1008, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.7020306466156484, |
|
"learning_rate": 3.111111111111111e-05, |
|
"loss": 1.1067, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.7080095311543003, |
|
"learning_rate": 3.222222222222223e-05, |
|
"loss": 1.1028, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.7149252394051553, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.0953, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.7285982439983161, |
|
"learning_rate": 3.444444444444445e-05, |
|
"loss": 1.0859, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.7057181936211316, |
|
"learning_rate": 3.555555555555555e-05, |
|
"loss": 1.0829, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.718290068719298, |
|
"learning_rate": 3.666666666666667e-05, |
|
"loss": 1.0658, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.6998775916382852, |
|
"learning_rate": 3.777777777777778e-05, |
|
"loss": 1.0687, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6851057008807085, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 1.0701, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6724489509428345, |
|
"learning_rate": 3.9999999999999996e-05, |
|
"loss": 1.0675, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.6480552435697329, |
|
"learning_rate": 4.1111111111111116e-05, |
|
"loss": 1.0537, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.6383559954105056, |
|
"learning_rate": 4.222222222222222e-05, |
|
"loss": 1.0463, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.6029067517028285, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 1.0422, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.5709974683191396, |
|
"learning_rate": 4.444444444444444e-05, |
|
"loss": 1.037, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.5363586512263623, |
|
"learning_rate": 4.555555555555556e-05, |
|
"loss": 1.0249, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.5064701225673895, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 1.0269, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4492710010511058, |
|
"learning_rate": 4.777777777777778e-05, |
|
"loss": 1.014, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.4154607764986574, |
|
"learning_rate": 4.888888888888889e-05, |
|
"loss": 1.0209, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.3647804068160132, |
|
"learning_rate": 5e-05, |
|
"loss": 1.019, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.3247880218369176, |
|
"learning_rate": 5.1111111111111115e-05, |
|
"loss": 1.0142, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.3054048294238843, |
|
"learning_rate": 5.222222222222222e-05, |
|
"loss": 1.005, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.2911566588563454, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 1.0182, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.30175247842711633, |
|
"learning_rate": 5.4444444444444446e-05, |
|
"loss": 1.003, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.2957438299690764, |
|
"learning_rate": 5.555555555555556e-05, |
|
"loss": 1.0018, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.222811420448816, |
|
"learning_rate": 5.6666666666666664e-05, |
|
"loss": 0.9982, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.19282721337866324, |
|
"learning_rate": 5.7777777777777776e-05, |
|
"loss": 1.003, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.2271139316205582, |
|
"learning_rate": 5.888888888888889e-05, |
|
"loss": 0.9884, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.22400303611020278, |
|
"learning_rate": 6e-05, |
|
"loss": 0.9933, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2640000653332148, |
|
"learning_rate": 5.999935744992388e-05, |
|
"loss": 0.9898, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.29683962789566454, |
|
"learning_rate": 5.999742982722021e-05, |
|
"loss": 0.9894, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.23953312072588218, |
|
"learning_rate": 5.999421721446195e-05, |
|
"loss": 0.9891, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.22760431232110737, |
|
"learning_rate": 5.9989719749266715e-05, |
|
"loss": 0.9794, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.28069530624064654, |
|
"learning_rate": 5.998393762429097e-05, |
|
"loss": 0.9827, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.33248489828390104, |
|
"learning_rate": 5.997687108722169e-05, |
|
"loss": 0.9829, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.37460157299926583, |
|
"learning_rate": 5.9968520440765807e-05, |
|
"loss": 0.9865, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.40204606775328766, |
|
"learning_rate": 5.9958886042637214e-05, |
|
"loss": 0.9872, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.2760468657673376, |
|
"learning_rate": 5.994796830554148e-05, |
|
"loss": 0.9825, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.25200297636338487, |
|
"learning_rate": 5.9935767697158103e-05, |
|
"loss": 0.9761, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.3387334098621161, |
|
"learning_rate": 5.992228474012056e-05, |
|
"loss": 0.9724, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.3352356569193807, |
|
"learning_rate": 5.990752001199384e-05, |
|
"loss": 0.9694, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.25307126239627026, |
|
"learning_rate": 5.989147414524976e-05, |
|
"loss": 0.9751, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.28603448445681706, |
|
"learning_rate": 5.987414782723985e-05, |
|
"loss": 0.9675, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.2512510471162242, |
|
"learning_rate": 5.985554180016591e-05, |
|
"loss": 0.9713, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.23308838498745504, |
|
"learning_rate": 5.98356568610482e-05, |
|
"loss": 0.9675, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.2507676754613168, |
|
"learning_rate": 5.981449386169134e-05, |
|
"loss": 0.9768, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.17834215895890057, |
|
"learning_rate": 5.979205370864779e-05, |
|
"loss": 0.9736, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.22157097251518248, |
|
"learning_rate": 5.976833736317901e-05, |
|
"loss": 0.9761, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.19478083716793773, |
|
"learning_rate": 5.9743345841214316e-05, |
|
"loss": 0.9578, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.23929984172471444, |
|
"learning_rate": 5.9717080213307314e-05, |
|
"loss": 0.9637, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.1792504123152509, |
|
"learning_rate": 5.968954160459011e-05, |
|
"loss": 0.9694, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.1942820411854134, |
|
"learning_rate": 5.966073119472502e-05, |
|
"loss": 0.9654, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.20225817833116058, |
|
"learning_rate": 5.963065021785414e-05, |
|
"loss": 0.9568, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.17871245685588816, |
|
"learning_rate": 5.9599299962546375e-05, |
|
"loss": 0.9672, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.19357967707631799, |
|
"learning_rate": 5.956668177174234e-05, |
|
"loss": 0.9581, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.17703904963665285, |
|
"learning_rate": 5.953279704269675e-05, |
|
"loss": 0.9399, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.18397738069475075, |
|
"learning_rate": 5.949764722691864e-05, |
|
"loss": 0.9582, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1667458999382299, |
|
"learning_rate": 5.9461233830109117e-05, |
|
"loss": 0.9574, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.15246629979305598, |
|
"learning_rate": 5.9423558412096914e-05, |
|
"loss": 0.9624, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.147474418280348, |
|
"learning_rate": 5.938462258677154e-05, |
|
"loss": 0.9574, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.18908685471550463, |
|
"learning_rate": 5.934442802201417e-05, |
|
"loss": 0.9559, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.15962484297812873, |
|
"learning_rate": 5.930297643962617e-05, |
|
"loss": 0.9565, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.15626218745541218, |
|
"learning_rate": 5.926026961525538e-05, |
|
"loss": 0.9669, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.16363056001381104, |
|
"learning_rate": 5.921630937832001e-05, |
|
"loss": 0.9575, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.17266359267085402, |
|
"learning_rate": 5.91710976119303e-05, |
|
"loss": 0.9482, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.16154285700465842, |
|
"learning_rate": 5.9124636252807844e-05, |
|
"loss": 0.9486, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.1505238744935054, |
|
"learning_rate": 5.907692729120263e-05, |
|
"loss": 0.9465, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.1665515957602505, |
|
"learning_rate": 5.9027972770807796e-05, |
|
"loss": 0.9458, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.15288744931626277, |
|
"learning_rate": 5.897777478867205e-05, |
|
"loss": 0.9513, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.13893588088386338, |
|
"learning_rate": 5.892633549510988e-05, |
|
"loss": 0.9517, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.15518080707211024, |
|
"learning_rate": 5.887365709360941e-05, |
|
"loss": 0.956, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.16735904200015367, |
|
"learning_rate": 5.881974184073806e-05, |
|
"loss": 0.9644, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.16211582659946666, |
|
"learning_rate": 5.876459204604579e-05, |
|
"loss": 0.947, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.16067404496452142, |
|
"learning_rate": 5.8708210071966266e-05, |
|
"loss": 0.9493, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.16466229984590008, |
|
"learning_rate": 5.8650598333715604e-05, |
|
"loss": 0.9525, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.15635651077742227, |
|
"learning_rate": 5.8591759299188915e-05, |
|
"loss": 0.9462, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.1445739463712597, |
|
"learning_rate": 5.853169548885461e-05, |
|
"loss": 0.9557, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.16706069442335733, |
|
"learning_rate": 5.847040947564642e-05, |
|
"loss": 0.9571, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.14383289999902457, |
|
"learning_rate": 5.8407903884853173e-05, |
|
"loss": 0.9452, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.1409356084289515, |
|
"learning_rate": 5.8344181394006345e-05, |
|
"loss": 0.9452, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.1697312533943109, |
|
"learning_rate": 5.827924473276536e-05, |
|
"loss": 0.9567, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.17531082504359882, |
|
"learning_rate": 5.821309668280065e-05, |
|
"loss": 0.9462, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.1639474813042821, |
|
"learning_rate": 5.814574007767453e-05, |
|
"loss": 0.9485, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.17850681384081132, |
|
"learning_rate": 5.807717780271977e-05, |
|
"loss": 0.9366, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.16125613154173873, |
|
"learning_rate": 5.800741279491605e-05, |
|
"loss": 0.9451, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.197900179672024, |
|
"learning_rate": 5.7936448042764106e-05, |
|
"loss": 0.9495, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.21076815721637063, |
|
"learning_rate": 5.7864286586157726e-05, |
|
"loss": 0.9435, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.19782059920889028, |
|
"learning_rate": 5.7790931516253545e-05, |
|
"loss": 0.9416, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.1836244550876009, |
|
"learning_rate": 5.7716385975338605e-05, |
|
"loss": 0.9466, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1613256822257701, |
|
"learning_rate": 5.764065315669578e-05, |
|
"loss": 0.9513, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.16741350820768655, |
|
"learning_rate": 5.756373630446695e-05, |
|
"loss": 0.9418, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.18843157688751017, |
|
"learning_rate": 5.748563871351408e-05, |
|
"loss": 0.945, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.15069085597597218, |
|
"learning_rate": 5.7406363729278026e-05, |
|
"loss": 0.9466, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.16662142102625724, |
|
"learning_rate": 5.7325914747635275e-05, |
|
"loss": 0.9486, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.19590155354881233, |
|
"learning_rate": 5.724429521475244e-05, |
|
"loss": 0.9435, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.15881882311241918, |
|
"learning_rate": 5.716150862693866e-05, |
|
"loss": 0.9466, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.14283264252478434, |
|
"learning_rate": 5.707755853049582e-05, |
|
"loss": 0.9412, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.16782472517291772, |
|
"learning_rate": 5.699244852156665e-05, |
|
"loss": 0.9382, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.16548141172284359, |
|
"learning_rate": 5.690618224598065e-05, |
|
"loss": 0.9479, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.13809707490800946, |
|
"learning_rate": 5.681876339909797e-05, |
|
"loss": 0.9429, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.18656335425800988, |
|
"learning_rate": 5.673019572565103e-05, |
|
"loss": 0.9381, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.1732296780937527, |
|
"learning_rate": 5.664048301958422e-05, |
|
"loss": 0.9431, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.14221356678314365, |
|
"learning_rate": 5.654962912389126e-05, |
|
"loss": 0.9523, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.15119656927035072, |
|
"learning_rate": 5.645763793045065e-05, |
|
"loss": 0.9392, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.14464956279442504, |
|
"learning_rate": 5.636451337985896e-05, |
|
"loss": 0.9384, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.13140580588464784, |
|
"learning_rate": 5.627025946126199e-05, |
|
"loss": 0.9372, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.14493562139149813, |
|
"learning_rate": 5.617488021218392e-05, |
|
"loss": 0.9358, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.14893216335321577, |
|
"learning_rate": 5.6078379718354315e-05, |
|
"loss": 0.9419, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.14722396099871643, |
|
"learning_rate": 5.5980762113533166e-05, |
|
"loss": 0.944, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.1484388680331261, |
|
"learning_rate": 5.588203157933376e-05, |
|
"loss": 0.946, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.14453486799975523, |
|
"learning_rate": 5.578219234504359e-05, |
|
"loss": 0.9502, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.13849264435281988, |
|
"learning_rate": 5.568124868744315e-05, |
|
"loss": 0.9339, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.16199659629172095, |
|
"learning_rate": 5.557920493062277e-05, |
|
"loss": 0.9238, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.16192446153254933, |
|
"learning_rate": 5.547606544579737e-05, |
|
"loss": 0.9336, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.14648464155352814, |
|
"learning_rate": 5.5371834651119204e-05, |
|
"loss": 0.9305, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.1383162638141149, |
|
"learning_rate": 5.5266517011488596e-05, |
|
"loss": 0.9391, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.1543998232523172, |
|
"learning_rate": 5.5160117038362726e-05, |
|
"loss": 0.9366, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.15730672921508043, |
|
"learning_rate": 5.5052639289562294e-05, |
|
"loss": 0.9346, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.13879299352222021, |
|
"learning_rate": 5.494408836907636e-05, |
|
"loss": 0.9364, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.15487093688296455, |
|
"learning_rate": 5.483446892686507e-05, |
|
"loss": 0.9246, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.14401066674945762, |
|
"learning_rate": 5.472378565866047e-05, |
|
"loss": 0.9361, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.14277478926628612, |
|
"learning_rate": 5.461204330576541e-05, |
|
"loss": 0.9389, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.16131149816932222, |
|
"learning_rate": 5.4499246654850374e-05, |
|
"loss": 0.9371, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.17407874387998404, |
|
"learning_rate": 5.4385400537748465e-05, |
|
"loss": 0.9372, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.17636478629943686, |
|
"learning_rate": 5.427050983124843e-05, |
|
"loss": 0.9343, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.16977563161518314, |
|
"learning_rate": 5.4154579456885744e-05, |
|
"loss": 0.9281, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.14318477872489785, |
|
"learning_rate": 5.403761438073182e-05, |
|
"loss": 0.9365, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.1397942734256781, |
|
"learning_rate": 5.3919619613181215e-05, |
|
"loss": 0.9469, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.137963649770748, |
|
"learning_rate": 5.3800600208737054e-05, |
|
"loss": 0.9359, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.15171364959388206, |
|
"learning_rate": 5.3680561265794496e-05, |
|
"loss": 0.9269, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.14709787438935637, |
|
"learning_rate": 5.3559507926422344e-05, |
|
"loss": 0.9383, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.12890500807018168, |
|
"learning_rate": 5.343744537614276e-05, |
|
"loss": 0.924, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.11549683107184264, |
|
"learning_rate": 5.331437884370913e-05, |
|
"loss": 0.9283, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.15022761689747757, |
|
"learning_rate": 5.319031360088211e-05, |
|
"loss": 0.9307, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.15714839886122223, |
|
"learning_rate": 5.306525496220379e-05, |
|
"loss": 0.935, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.16859132275494298, |
|
"learning_rate": 5.293920828477001e-05, |
|
"loss": 0.9239, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.13633581062447336, |
|
"learning_rate": 5.281217896800093e-05, |
|
"loss": 0.9414, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.1513556776118746, |
|
"learning_rate": 5.268417245340968e-05, |
|
"loss": 0.9338, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.1757338104436492, |
|
"learning_rate": 5.255519422436932e-05, |
|
"loss": 0.9351, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.19112425290854476, |
|
"learning_rate": 5.242524980587791e-05, |
|
"loss": 0.9333, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.17979027309488244, |
|
"learning_rate": 5.2294344764321825e-05, |
|
"loss": 0.9179, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.16529910589891425, |
|
"learning_rate": 5.2162484707237387e-05, |
|
"loss": 0.9356, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.20692577816446356, |
|
"learning_rate": 5.202967528307057e-05, |
|
"loss": 0.9276, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.197154695709334, |
|
"learning_rate": 5.1895922180935066e-05, |
|
"loss": 0.9303, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.14150974456692325, |
|
"learning_rate": 5.176123113036863e-05, |
|
"loss": 0.9364, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.15101923044037124, |
|
"learning_rate": 5.162560790108756e-05, |
|
"loss": 0.9219, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.19585100899813354, |
|
"learning_rate": 5.148905830273964e-05, |
|
"loss": 0.9282, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.16466476745138203, |
|
"learning_rate": 5.135158818465514e-05, |
|
"loss": 0.9267, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.13928826936678446, |
|
"learning_rate": 5.1213203435596425e-05, |
|
"loss": 0.9204, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.16777686357665667, |
|
"learning_rate": 5.107390998350555e-05, |
|
"loss": 0.9209, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.13941558577566035, |
|
"learning_rate": 5.093371379525041e-05, |
|
"loss": 0.933, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.1312376870568112, |
|
"learning_rate": 5.079262087636908e-05, |
|
"loss": 0.9273, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.13304976321730042, |
|
"learning_rate": 5.0650637270812615e-05, |
|
"loss": 0.9325, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.9060415029525757, |
|
"eval_runtime": 367.2491, |
|
"eval_samples_per_second": 35.657, |
|
"eval_steps_per_second": 0.054, |
|
"step": 178 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 534, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 1405957714214912.0, |
|
"train_batch_size": 42, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|