|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.2892332907517655, |
|
"eval_steps": 500, |
|
"global_step": 4500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006427406461150345, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 1e-05, |
|
"loss": 3.0419, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.001285481292230069, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 2e-05, |
|
"loss": 3.0641, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0019282219383451036, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 1.9999950454033063e-05, |
|
"loss": 3.2405, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.002570962584460138, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 1.9999801816623205e-05, |
|
"loss": 2.8447, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0032137032305751727, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 1.9999554089243305e-05, |
|
"loss": 2.9348, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.003856443876690207, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 1.9999207274348143e-05, |
|
"loss": 2.966, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.004499184522805242, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 1.9998761375374376e-05, |
|
"loss": 2.8849, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.005141925168920276, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 1.9998216396740497e-05, |
|
"loss": 2.8674, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0057846658150353105, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 1.9997572343846814e-05, |
|
"loss": 2.789, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.006427406461150345, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 1.9996829223075363e-05, |
|
"loss": 2.8306, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0070701471072653795, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 1.9995987041789876e-05, |
|
"loss": 2.5481, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.007712887753380414, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 1.999504580833569e-05, |
|
"loss": 2.6249, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.00835562839949545, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 1.9994005532039665e-05, |
|
"loss": 2.6078, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.008998369045610483, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.9992866223210105e-05, |
|
"loss": 2.4607, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.009641109691725517, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.999162789313664e-05, |
|
"loss": 2.3672, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.010283850337840551, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 1.9990290554090123e-05, |
|
"loss": 2.4135, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.010926590983955587, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.9988854219322507e-05, |
|
"loss": 2.5179, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.011569331630070621, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 1.9987318903066704e-05, |
|
"loss": 2.3391, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.012212072276185655, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.9985684620536466e-05, |
|
"loss": 2.4168, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01285481292230069, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.9983951387926216e-05, |
|
"loss": 2.2706, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.013497553568415725, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 1.998211922241088e-05, |
|
"loss": 2.2684, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.014140294214530759, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.9980188142145755e-05, |
|
"loss": 2.2022, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.014783034860645793, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 1.997815816626628e-05, |
|
"loss": 2.2821, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.015425775506760829, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 1.9976029314887882e-05, |
|
"loss": 2.1055, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.016068516152875863, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 1.9973801609105757e-05, |
|
"loss": 2.1481, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.0167112567989909, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.9971475070994675e-05, |
|
"loss": 2.1277, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01735399744510593, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 1.9969049723608753e-05, |
|
"loss": 2.1143, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.017996738091220967, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 1.9966525590981228e-05, |
|
"loss": 2.1824, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.018639478737336002, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 1.9963902698124212e-05, |
|
"loss": 2.1849, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.019282219383451035, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.996118107102847e-05, |
|
"loss": 2.1996, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.01992496002956607, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.9958360736663117e-05, |
|
"loss": 2.1914, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.020567700675681103, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 1.99554417229754e-05, |
|
"loss": 2.0519, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.02121044132179614, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 1.995242405889039e-05, |
|
"loss": 2.0804, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.021853181967911174, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 1.99493077743107e-05, |
|
"loss": 2.0424, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.022495922614026206, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.99460929001162e-05, |
|
"loss": 2.1151, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.023138663260141242, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 1.9942779468163696e-05, |
|
"loss": 1.8499, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.023781403906256278, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 1.9939367511286635e-05, |
|
"loss": 2.1271, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.02442414455237131, |
|
"grad_norm": 0.75, |
|
"learning_rate": 1.993585706329475e-05, |
|
"loss": 2.0739, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.025066885198486346, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.9932248158973746e-05, |
|
"loss": 2.0379, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.02570962584460138, |
|
"grad_norm": 0.78515625, |
|
"learning_rate": 1.992854083408496e-05, |
|
"loss": 1.9566, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.026352366490716414, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 1.992473512536499e-05, |
|
"loss": 2.0377, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.02699510713683145, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 1.992083107052534e-05, |
|
"loss": 1.9839, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.027637847782946486, |
|
"grad_norm": 0.80859375, |
|
"learning_rate": 1.9916828708252046e-05, |
|
"loss": 1.9477, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.028280588429061518, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 1.9912728078205285e-05, |
|
"loss": 2.0033, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.028923329075176554, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.9908529221018994e-05, |
|
"loss": 1.9196, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.029566069721291586, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.9904232178300465e-05, |
|
"loss": 1.9712, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.03020881036740662, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.9899836992629922e-05, |
|
"loss": 1.9251, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.030851551013521657, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 1.989534370756011e-05, |
|
"loss": 1.9695, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.03149429165963669, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 1.989075236761586e-05, |
|
"loss": 1.9208, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.032137032305751725, |
|
"grad_norm": 0.74609375, |
|
"learning_rate": 1.988606301829365e-05, |
|
"loss": 1.9474, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03277977295186676, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 1.9881275706061146e-05, |
|
"loss": 1.8636, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.0334225135979818, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 1.987639047835675e-05, |
|
"loss": 1.9595, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.03406525424409683, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 1.987140738358913e-05, |
|
"loss": 1.8648, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.03470799489021186, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.9866326471136738e-05, |
|
"loss": 1.9641, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.0353507355363269, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.986114779134731e-05, |
|
"loss": 1.9649, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.03599347618244193, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 1.9855871395537395e-05, |
|
"loss": 1.9086, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.036636216828556965, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 1.9850497335991815e-05, |
|
"loss": 1.8802, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.037278957474672005, |
|
"grad_norm": 0.78125, |
|
"learning_rate": 1.984502566596317e-05, |
|
"loss": 1.8694, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.03792169812078704, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 1.9839456439671288e-05, |
|
"loss": 1.9474, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.03856443876690207, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 1.9833789712302714e-05, |
|
"loss": 1.8633, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.03920717941301711, |
|
"grad_norm": 0.8515625, |
|
"learning_rate": 1.9828025540010144e-05, |
|
"loss": 1.9034, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.03984992005913214, |
|
"grad_norm": 0.77734375, |
|
"learning_rate": 1.9822163979911878e-05, |
|
"loss": 1.9021, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.04049266070524717, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.981620509009125e-05, |
|
"loss": 1.8945, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.041135401351362205, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 1.9810148929596047e-05, |
|
"loss": 1.9007, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.041778141997477244, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 1.9803995558437938e-05, |
|
"loss": 1.8548, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.04242088264359228, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 1.9797745037591866e-05, |
|
"loss": 1.952, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.04306362328970731, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.979139742899545e-05, |
|
"loss": 1.8855, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.04370636393582235, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 1.9784952795548374e-05, |
|
"loss": 1.8523, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.04434910458193738, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 1.9778411201111755e-05, |
|
"loss": 1.808, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.04499184522805241, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.9771772710507516e-05, |
|
"loss": 1.8894, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.04563458587416745, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.976503738951775e-05, |
|
"loss": 1.8869, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.046277326520282484, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.9758205304884048e-05, |
|
"loss": 1.9048, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.04692006716639752, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.9751276524306865e-05, |
|
"loss": 1.848, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.047562807812512556, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.974425111644482e-05, |
|
"loss": 1.8434, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.04820554845862759, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 1.9737129150914045e-05, |
|
"loss": 1.9363, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.04884828910474262, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.972991069828747e-05, |
|
"loss": 1.8212, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.04949102975085766, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.9722595830094138e-05, |
|
"loss": 1.8591, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.05013377039697269, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.9715184618818493e-05, |
|
"loss": 1.9277, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.050776511043087724, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 1.9707677137899662e-05, |
|
"loss": 1.8552, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.05141925168920276, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 1.9700073461730725e-05, |
|
"loss": 1.8265, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.052061992335317796, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 1.969237366565798e-05, |
|
"loss": 1.7932, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.05270473298143283, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 1.9684577825980192e-05, |
|
"loss": 1.8122, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.05334747362754787, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 1.9676686019947852e-05, |
|
"loss": 1.8671, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.0539902142736629, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 1.9668698325762378e-05, |
|
"loss": 1.884, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.05463295491977793, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.9660614822575394e-05, |
|
"loss": 1.9472, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.05527569556589297, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 1.9652435590487878e-05, |
|
"loss": 1.7799, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.055918436212008, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 1.964416071054944e-05, |
|
"loss": 1.8548, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.056561176858123036, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.963579026475745e-05, |
|
"loss": 1.8213, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.05720391750423807, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.962732433605629e-05, |
|
"loss": 1.8675, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.05784665815035311, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.961876300833647e-05, |
|
"loss": 1.8382, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.05848939879646814, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 1.9610106366433846e-05, |
|
"loss": 1.8516, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.05913213944258317, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.9601354496128765e-05, |
|
"loss": 1.8137, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.05977488008869821, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 1.9592507484145193e-05, |
|
"loss": 1.9144, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.06041762073481324, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 1.9583565418149887e-05, |
|
"loss": 1.8284, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.061060361380928276, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 1.9574528386751507e-05, |
|
"loss": 1.7851, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.061703102027043315, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 1.9565396479499744e-05, |
|
"loss": 1.769, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.06234584267315835, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 1.9556169786884436e-05, |
|
"loss": 1.8202, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.06298858331927339, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.9546848400334658e-05, |
|
"loss": 1.8815, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.06363132396538841, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 1.953743241221784e-05, |
|
"loss": 1.866, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.06427406461150345, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 1.9527921915838827e-05, |
|
"loss": 1.8496, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.06491680525761849, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 1.9518317005438964e-05, |
|
"loss": 1.7597, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.06555954590373352, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.9508617776195167e-05, |
|
"loss": 1.8705, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.06620228654984855, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 1.9498824324218973e-05, |
|
"loss": 1.787, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.0668450271959636, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.9488936746555593e-05, |
|
"loss": 1.7781, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.06748776784207862, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 1.9478955141182948e-05, |
|
"loss": 1.7506, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.06813050848819366, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 1.946887960701069e-05, |
|
"loss": 1.8555, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.0687732491343087, |
|
"grad_norm": 0.8515625, |
|
"learning_rate": 1.9458710243879242e-05, |
|
"loss": 1.7989, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.06941598978042372, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.944844715255879e-05, |
|
"loss": 1.7939, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.07005873042653876, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.9438090434748285e-05, |
|
"loss": 1.7889, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.0707014710726538, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 1.942764019307445e-05, |
|
"loss": 1.7511, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.07134421171876883, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.9417096531090752e-05, |
|
"loss": 1.8022, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.07198695236488387, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.940645955327637e-05, |
|
"loss": 1.8737, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.0726296930109989, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 1.9395729365035185e-05, |
|
"loss": 1.764, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.07327243365711393, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 1.9384906072694703e-05, |
|
"loss": 1.8344, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.07391517430322897, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.9373989783505017e-05, |
|
"loss": 1.8173, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.07455791494934401, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 1.936298060563775e-05, |
|
"loss": 1.8271, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.07520065559545903, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 1.9351878648184973e-05, |
|
"loss": 1.8369, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.07584339624157407, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.9340684021158133e-05, |
|
"loss": 1.8255, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.07648613688768911, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.932939683548695e-05, |
|
"loss": 1.7836, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.07712887753380414, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 1.9318017203018334e-05, |
|
"loss": 1.7955, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.07777161817991918, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 1.9306545236515264e-05, |
|
"loss": 1.748, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.07841435882603422, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.929498104965567e-05, |
|
"loss": 1.7522, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.07905709947214924, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.928332475703132e-05, |
|
"loss": 1.8157, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.07969984011826428, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 1.9271576474146667e-05, |
|
"loss": 1.7609, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.0803425807643793, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 1.9259736317417723e-05, |
|
"loss": 1.8088, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.08098532141049435, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.9247804404170888e-05, |
|
"loss": 1.8767, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.08162806205660939, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 1.9235780852641798e-05, |
|
"loss": 1.7125, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.08227080270272441, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.9223665781974154e-05, |
|
"loss": 1.8067, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.08291354334883945, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 1.921145931221853e-05, |
|
"loss": 1.669, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.08355628399495449, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.9199161564331196e-05, |
|
"loss": 1.7822, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.08419902464106951, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.9186772660172916e-05, |
|
"loss": 1.8901, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.08484176528718455, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 1.9174292722507735e-05, |
|
"loss": 1.7516, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.08548450593329959, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.916172187500177e-05, |
|
"loss": 1.8496, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.08612724657941462, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.914906024222198e-05, |
|
"loss": 1.8261, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.08676998722552966, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 1.913630794963493e-05, |
|
"loss": 1.7513, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.0874127278716447, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 1.9123465123605558e-05, |
|
"loss": 1.6671, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.08805546851775972, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.9110531891395906e-05, |
|
"loss": 1.8489, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.08869820916387476, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 1.9097508381163877e-05, |
|
"loss": 1.7958, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.0893409498099898, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 1.9084394721961956e-05, |
|
"loss": 1.7382, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.08998369045610483, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 1.907119104373592e-05, |
|
"loss": 1.8107, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.09062643110221986, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.9057897477323572e-05, |
|
"loss": 1.829, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.0912691717483349, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.9044514154453434e-05, |
|
"loss": 1.7685, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.09191191239444993, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 1.903104120774344e-05, |
|
"loss": 1.8001, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.09255465304056497, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.9017478770699624e-05, |
|
"loss": 1.7632, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.09319739368668001, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 1.90038269777148e-05, |
|
"loss": 1.7482, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.09384013433279503, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.899008596406722e-05, |
|
"loss": 1.8016, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.09448287497891007, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.897625586591925e-05, |
|
"loss": 1.7998, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.09512561562502511, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 1.896233682031601e-05, |
|
"loss": 1.7887, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.09576835627114014, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.8948328965184004e-05, |
|
"loss": 1.7226, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.09641109691725518, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 1.8934232439329786e-05, |
|
"loss": 1.7656, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.09705383756337022, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 1.8920047382438554e-05, |
|
"loss": 1.7482, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.09769657820948524, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 1.890577393507278e-05, |
|
"loss": 1.7171, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.09833931885560028, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 1.8891412238670814e-05, |
|
"loss": 1.7527, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.09898205950171532, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 1.887696243554549e-05, |
|
"loss": 1.7824, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.09962480014783034, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.8862424668882687e-05, |
|
"loss": 1.8317, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.10026754079394538, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 1.8847799082739957e-05, |
|
"loss": 1.785, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.10091028144006042, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.8833085822045062e-05, |
|
"loss": 1.7549, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.10155302208617545, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.8818285032594537e-05, |
|
"loss": 1.803, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.10219576273229049, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.8803396861052284e-05, |
|
"loss": 1.7646, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.10283850337840553, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.878842145494806e-05, |
|
"loss": 1.7383, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.10348124402452055, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.8773358962676066e-05, |
|
"loss": 1.7851, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.10412398467063559, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.8758209533493447e-05, |
|
"loss": 1.8107, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.10476672531675063, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 1.874297331751883e-05, |
|
"loss": 1.8237, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.10540946596286566, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.8727650465730827e-05, |
|
"loss": 1.8021, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.1060522066089807, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 1.8712241129966532e-05, |
|
"loss": 1.7842, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.10669494725509573, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.8696745462920046e-05, |
|
"loss": 1.7979, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.10733768790121076, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 1.868116361814092e-05, |
|
"loss": 1.746, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.1079804285473258, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 1.8665495750032664e-05, |
|
"loss": 1.8202, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.10862316919344084, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 1.8649742013851225e-05, |
|
"loss": 1.7969, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.10926590983955586, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 1.8633902565703415e-05, |
|
"loss": 1.7795, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.1099086504856709, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 1.861797756254539e-05, |
|
"loss": 1.7474, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.11055139113178594, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.8601967162181082e-05, |
|
"loss": 1.8134, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.11119413177790097, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.8585871523260653e-05, |
|
"loss": 1.7977, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.111836872424016, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 1.8569690805278894e-05, |
|
"loss": 1.7256, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.11247961307013103, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 1.8553425168573667e-05, |
|
"loss": 1.7866, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.11312235371624607, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.8537074774324318e-05, |
|
"loss": 1.7567, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.11376509436236111, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.8520639784550068e-05, |
|
"loss": 1.769, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.11440783500847614, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.85041203621084e-05, |
|
"loss": 1.7404, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.11505057565459118, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.848751667069347e-05, |
|
"loss": 1.785, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.11569331630070621, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 1.847082887483447e-05, |
|
"loss": 1.6875, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.11633605694682124, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 1.8454057139893992e-05, |
|
"loss": 1.7259, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.11697879759293628, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.84372016320664e-05, |
|
"loss": 1.7666, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.11762153823905132, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.842026251837619e-05, |
|
"loss": 1.7275, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.11826427888516634, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.8403239966676304e-05, |
|
"loss": 1.8194, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.11890701953128138, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.8386134145646505e-05, |
|
"loss": 1.7619, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.11954976017739642, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 1.836894522479168e-05, |
|
"loss": 1.761, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.12019250082351145, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 1.835167337444017e-05, |
|
"loss": 1.722, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.12083524146962649, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 1.8334318765742078e-05, |
|
"loss": 1.7103, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.12147798211574153, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.8316881570667583e-05, |
|
"loss": 1.7891, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.12212072276185655, |
|
"grad_norm": 0.77734375, |
|
"learning_rate": 1.8299361962005218e-05, |
|
"loss": 1.7342, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.12276346340797159, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.8281760113360177e-05, |
|
"loss": 1.6901, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.12340620405408663, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.8264076199152582e-05, |
|
"loss": 1.7208, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.12404894470020165, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.8246310394615753e-05, |
|
"loss": 1.7616, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.1246916853463167, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 1.822846287579449e-05, |
|
"loss": 1.7558, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.12533442599243172, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 1.8210533819543304e-05, |
|
"loss": 1.7703, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.12597716663854677, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 1.8192523403524685e-05, |
|
"loss": 1.7272, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.1266199072846618, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 1.8174431806207326e-05, |
|
"loss": 1.7556, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.12726264793077682, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 1.815625920686436e-05, |
|
"loss": 1.7382, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.12790538857689188, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 1.813800578557159e-05, |
|
"loss": 1.754, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.1285481292230069, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 1.8119671723205708e-05, |
|
"loss": 1.8341, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.12919086986912193, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 1.8101257201442468e-05, |
|
"loss": 1.8343, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.12983361051523698, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 1.8082762402754936e-05, |
|
"loss": 1.7762, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.130476351161352, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 1.8064187510411646e-05, |
|
"loss": 1.7232, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.13111909180746703, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.80455327084748e-05, |
|
"loss": 1.7593, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.13176183245358208, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 1.8026798181798434e-05, |
|
"loss": 1.6904, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.1324045730996971, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.8007984116026604e-05, |
|
"loss": 1.7421, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.13304731374581213, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.7989090697591517e-05, |
|
"loss": 1.7673, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.1336900543919272, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 1.7970118113711715e-05, |
|
"loss": 1.7927, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.1343327950380422, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 1.7951066552390204e-05, |
|
"loss": 1.781, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.13497553568415724, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 1.7931936202412582e-05, |
|
"loss": 1.6528, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.1356182763302723, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 1.791272725334519e-05, |
|
"loss": 1.7846, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.13626101697638732, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 1.789343989553322e-05, |
|
"loss": 1.7236, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.13690375762250234, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.787407432009883e-05, |
|
"loss": 1.7879, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.1375464982686174, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 1.7854630718939254e-05, |
|
"loss": 1.6429, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.13818923891473242, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.7835109284724886e-05, |
|
"loss": 1.7904, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.13883197956084745, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 1.7815510210897407e-05, |
|
"loss": 1.7258, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.1394747202069625, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.779583369166782e-05, |
|
"loss": 1.6941, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.14011746085307752, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 1.7776079922014564e-05, |
|
"loss": 1.7171, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.14076020149919255, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.7756249097681563e-05, |
|
"loss": 1.7273, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.1414029421453076, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.7736341415176286e-05, |
|
"loss": 1.722, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.14204568279142263, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 1.7716357071767812e-05, |
|
"loss": 1.711, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.14268842343753765, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 1.7696296265484863e-05, |
|
"loss": 1.7142, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.1433311640836527, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.7676159195113853e-05, |
|
"loss": 1.7341, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.14397390472976773, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 1.7655946060196894e-05, |
|
"loss": 1.7583, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.14461664537588276, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.7635657061029863e-05, |
|
"loss": 1.6996, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.1452593860219978, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 1.7615292398660372e-05, |
|
"loss": 1.7818, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.14590212666811284, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.7594852274885795e-05, |
|
"loss": 1.7629, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.14654486731422786, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.7574336892251277e-05, |
|
"loss": 1.7498, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.14718760796034291, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 1.7553746454047705e-05, |
|
"loss": 1.7655, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.14783034860645794, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 1.753308116430972e-05, |
|
"loss": 1.7429, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.14847308925257297, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.751234122781367e-05, |
|
"loss": 1.7795, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.14911582989868802, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.7491526850075593e-05, |
|
"loss": 1.7375, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.14975857054480304, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 1.7470638237349195e-05, |
|
"loss": 1.7663, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.15040131119091807, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 1.7449675596623765e-05, |
|
"loss": 1.7934, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.15104405183703312, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.742863913562218e-05, |
|
"loss": 1.7493, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.15168679248314815, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.7407529062798784e-05, |
|
"loss": 1.7814, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.15232953312926317, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.7386345587337382e-05, |
|
"loss": 1.7127, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.15297227377537823, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.7365088919149124e-05, |
|
"loss": 1.7743, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.15361501442149325, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.734375926887045e-05, |
|
"loss": 1.7204, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.15425775506760828, |
|
"grad_norm": 2.25, |
|
"learning_rate": 1.7322356847860985e-05, |
|
"loss": 1.8416, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.15490049571372333, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.7300881868201456e-05, |
|
"loss": 1.7365, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.15554323635983835, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.7279334542691596e-05, |
|
"loss": 1.7371, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.15618597700595338, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 1.725771508484802e-05, |
|
"loss": 1.7034, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.15682871765206843, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.7236023708902113e-05, |
|
"loss": 1.7502, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.15747145829818346, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 1.7214260629797913e-05, |
|
"loss": 1.681, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.15811419894429848, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 1.7192426063189982e-05, |
|
"loss": 1.6597, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.1587569395904135, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.7170520225441264e-05, |
|
"loss": 1.7344, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.15939968023652856, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 1.714854333362094e-05, |
|
"loss": 1.7025, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.1600424208826436, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 1.712649560550228e-05, |
|
"loss": 1.7985, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.1606851615287586, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.710437725956049e-05, |
|
"loss": 1.7754, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.16132790217487367, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.7082188514970534e-05, |
|
"loss": 1.7558, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.1619706428209887, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.705992959160497e-05, |
|
"loss": 1.6337, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.16261338346710372, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.7037600710031783e-05, |
|
"loss": 1.7362, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.16325612411321877, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 1.701520209151217e-05, |
|
"loss": 1.7269, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.1638988647593338, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 1.699273395799838e-05, |
|
"loss": 1.8314, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.16454160540544882, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 1.697019653213149e-05, |
|
"loss": 1.7012, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.16518434605156387, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 1.694759003723921e-05, |
|
"loss": 1.7954, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.1658270866976789, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 1.692491469733367e-05, |
|
"loss": 1.6907, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.16646982734379392, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.6902170737109203e-05, |
|
"loss": 1.6991, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.16711256798990898, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 1.6879358381940103e-05, |
|
"loss": 1.715, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.167755308636024, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.6856477857878415e-05, |
|
"loss": 1.7482, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.16839804928213903, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.683352939165167e-05, |
|
"loss": 1.7095, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.16904078992825408, |
|
"grad_norm": 0.8515625, |
|
"learning_rate": 1.6810513210660665e-05, |
|
"loss": 1.6142, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.1696835305743691, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.678742954297718e-05, |
|
"loss": 1.7428, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.17032627122048413, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.676427861734175e-05, |
|
"loss": 1.7764, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.17096901186659919, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.674106066316137e-05, |
|
"loss": 1.7345, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.1716117525127142, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.671777591050724e-05, |
|
"loss": 1.7397, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.17225449315882924, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.669442459011248e-05, |
|
"loss": 1.7448, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.1728972338049443, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 1.667100693336983e-05, |
|
"loss": 1.747, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.17353997445105931, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.664752317232939e-05, |
|
"loss": 1.7379, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.17418271509717434, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 1.6623973539696283e-05, |
|
"loss": 1.8181, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.1748254557432894, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.6600358268828376e-05, |
|
"loss": 1.6882, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.17546819638940442, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 1.6576677593733955e-05, |
|
"loss": 1.7557, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.17611093703551944, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.6552931749069403e-05, |
|
"loss": 1.7477, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.1767536776816345, |
|
"grad_norm": 0.77734375, |
|
"learning_rate": 1.6529120970136893e-05, |
|
"loss": 1.7788, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.17739641832774952, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 1.6505245492882044e-05, |
|
"loss": 1.7058, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.17803915897386455, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.6481305553891563e-05, |
|
"loss": 1.8183, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.1786818996199796, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 1.6457301390390945e-05, |
|
"loss": 1.7238, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.17932464026609463, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 1.6433233240242085e-05, |
|
"loss": 1.7097, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.17996738091220965, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.6409101341940937e-05, |
|
"loss": 1.7503, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.1806101215583247, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 1.638490593461515e-05, |
|
"loss": 1.7222, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.18125286220443973, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 1.6360647258021698e-05, |
|
"loss": 1.67, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.18189560285055476, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.633632555254449e-05, |
|
"loss": 1.741, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.1825383434966698, |
|
"grad_norm": 0.74609375, |
|
"learning_rate": 1.6311941059192015e-05, |
|
"loss": 1.698, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.18318108414278483, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.6287494019594928e-05, |
|
"loss": 1.7521, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.18382382478889986, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 1.626298467600368e-05, |
|
"loss": 1.7319, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.1844665654350149, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.623841327128609e-05, |
|
"loss": 1.7694, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.18510930608112994, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.6213780048924964e-05, |
|
"loss": 1.774, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.18575204672724496, |
|
"grad_norm": 0.8203125, |
|
"learning_rate": 1.6189085253015656e-05, |
|
"loss": 1.7477, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.18639478737336002, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 1.616432912826369e-05, |
|
"loss": 1.6605, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.18703752801947504, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.613951191998228e-05, |
|
"loss": 1.7154, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.18768026866559007, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 1.6114633874089955e-05, |
|
"loss": 1.7665, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.18832300931170512, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 1.608969523710807e-05, |
|
"loss": 1.6787, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.18896574995782014, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.6064696256158408e-05, |
|
"loss": 1.6682, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.18960849060393517, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 1.6039637178960704e-05, |
|
"loss": 1.7495, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.19025123125005022, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.6014518253830204e-05, |
|
"loss": 1.6987, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.19089397189616525, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.5989339729675187e-05, |
|
"loss": 1.6799, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.19153671254228027, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.5964101855994527e-05, |
|
"loss": 1.7866, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.19217945318839533, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.593880488287519e-05, |
|
"loss": 1.8115, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.19282219383451035, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 1.5913449060989777e-05, |
|
"loss": 1.7679, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.19346493448062538, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.5888034641594024e-05, |
|
"loss": 1.7431, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.19410767512674043, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.5862561876524337e-05, |
|
"loss": 1.7064, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.19475041577285546, |
|
"grad_norm": 0.84375, |
|
"learning_rate": 1.5837031018195266e-05, |
|
"loss": 1.7073, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.19539315641897048, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.5811442319597028e-05, |
|
"loss": 1.6424, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.19603589706508553, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 1.578579603429298e-05, |
|
"loss": 1.7724, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.19667863771120056, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.5760092416417124e-05, |
|
"loss": 1.7448, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.19732137835731559, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.5734331720671584e-05, |
|
"loss": 1.7734, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.19796411900343064, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.5708514202324074e-05, |
|
"loss": 1.7086, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.19860685964954566, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.5682640117205376e-05, |
|
"loss": 1.7351, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.1992496002956607, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 1.56567097217068e-05, |
|
"loss": 1.6818, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.19989234094177574, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.5630723272777656e-05, |
|
"loss": 1.802, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.20053508158789077, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 1.5604681027922686e-05, |
|
"loss": 1.7713, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.2011778222340058, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.557858324519953e-05, |
|
"loss": 1.7553, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.20182056288012085, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 1.5552430183216173e-05, |
|
"loss": 1.7304, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.20246330352623587, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.5526222101128355e-05, |
|
"loss": 1.7812, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.2031060441723509, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.5499959258637033e-05, |
|
"loss": 1.6848, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.20374878481846595, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 1.5473641915985792e-05, |
|
"loss": 1.6899, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.20439152546458098, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 1.5447270333958265e-05, |
|
"loss": 1.7511, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.205034266110696, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 1.542084477387557e-05, |
|
"loss": 1.7477, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.20567700675681105, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 1.5394365497593683e-05, |
|
"loss": 1.7675, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.20631974740292608, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.5367832767500873e-05, |
|
"loss": 1.6689, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.2069624880490411, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 1.5341246846515096e-05, |
|
"loss": 1.7262, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.20760522869515616, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 1.5314607998081385e-05, |
|
"loss": 1.7819, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.20824796934127118, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 1.528791648616924e-05, |
|
"loss": 1.71, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.2088907099873862, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.5261172575270014e-05, |
|
"loss": 1.8036, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.20953345063350126, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 1.5234376530394297e-05, |
|
"loss": 1.7522, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.2101761912796163, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.5207528617069272e-05, |
|
"loss": 1.7288, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.2108189319257313, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 1.5180629101336109e-05, |
|
"loss": 1.7152, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.21146167257184637, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 1.5153678249747307e-05, |
|
"loss": 1.7825, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.2121044132179614, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 1.5126676329364072e-05, |
|
"loss": 1.6981, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.21274715386407642, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.5099623607753651e-05, |
|
"loss": 1.7109, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.21338989451019147, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.5072520352986696e-05, |
|
"loss": 1.7009, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.2140326351563065, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.50453668336346e-05, |
|
"loss": 1.6994, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.21467537580242152, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.5018163318766835e-05, |
|
"loss": 1.727, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.21531811644853657, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.4990910077948293e-05, |
|
"loss": 1.7093, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.2159608570946516, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.4963607381236608e-05, |
|
"loss": 1.7136, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.21660359774076662, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 1.4936255499179479e-05, |
|
"loss": 1.745, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.21724633838688168, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.4908854702812e-05, |
|
"loss": 1.7144, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.2178890790329967, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 1.4881405263653955e-05, |
|
"loss": 1.7291, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.21853181967911173, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.4853907453707148e-05, |
|
"loss": 1.8049, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.21917456032522678, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.4826361545452697e-05, |
|
"loss": 1.8222, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.2198173009713418, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 1.4798767811848331e-05, |
|
"loss": 1.7501, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.22046004161745683, |
|
"grad_norm": 0.80859375, |
|
"learning_rate": 1.4771126526325695e-05, |
|
"loss": 1.7232, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.22110278226357188, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 1.474343796278763e-05, |
|
"loss": 1.7281, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.2217455229096869, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 1.4715702395605468e-05, |
|
"loss": 1.7007, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.22238826355580193, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.4687920099616311e-05, |
|
"loss": 1.6244, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.22303100420191696, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.4660091350120297e-05, |
|
"loss": 1.657, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.223673744848032, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 1.4632216422877891e-05, |
|
"loss": 1.7143, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.22431648549414704, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 1.4604295594107139e-05, |
|
"loss": 1.7564, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.22495922614026206, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 1.4576329140480925e-05, |
|
"loss": 1.6929, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.22560196678637712, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 1.4548317339124251e-05, |
|
"loss": 1.7734, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.22624470743249214, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.452026046761148e-05, |
|
"loss": 1.714, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.22688744807860717, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 1.4492158803963568e-05, |
|
"loss": 1.7787, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.22753018872472222, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 1.4464012626645336e-05, |
|
"loss": 1.7508, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.22817292937083725, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.4435822214562705e-05, |
|
"loss": 1.6806, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.22881567001695227, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 1.4407587847059914e-05, |
|
"loss": 1.7545, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.22945841066306732, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.4379309803916772e-05, |
|
"loss": 1.7719, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.23010115130918235, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.4350988365345879e-05, |
|
"loss": 1.7533, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.23074389195529738, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.4322623811989844e-05, |
|
"loss": 1.6717, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.23138663260141243, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 1.4294216424918515e-05, |
|
"loss": 1.8084, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.23202937324752745, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.426576648562618e-05, |
|
"loss": 1.7027, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.23267211389364248, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 1.4237274276028792e-05, |
|
"loss": 1.7553, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.23331485453975753, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 1.4208740078461168e-05, |
|
"loss": 1.7069, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.23395759518587256, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 1.4180164175674184e-05, |
|
"loss": 1.6952, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.23460033583198758, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 1.4151546850831989e-05, |
|
"loss": 1.6905, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.23524307647810264, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.4122888387509193e-05, |
|
"loss": 1.7128, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.23588581712421766, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 1.4094189069688046e-05, |
|
"loss": 1.7267, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.2365285577703327, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 1.4065449181755644e-05, |
|
"loss": 1.7449, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.23717129841644774, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 1.403666900850109e-05, |
|
"loss": 1.7336, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.23781403906256277, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 1.4007848835112688e-05, |
|
"loss": 1.6449, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.2384567797086778, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 1.397898894717511e-05, |
|
"loss": 1.7207, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.23909952035479284, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.3950089630666564e-05, |
|
"loss": 1.7177, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.23974226100090787, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 1.392115117195597e-05, |
|
"loss": 1.7184, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.2403850016470229, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.3892173857800108e-05, |
|
"loss": 1.7254, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.24102774229313795, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.386315797534079e-05, |
|
"loss": 1.7199, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.24167048293925297, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 1.3834103812102012e-05, |
|
"loss": 1.6975, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.242313223585368, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.3805011655987084e-05, |
|
"loss": 1.7183, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.24295596423148305, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 1.3775881795275817e-05, |
|
"loss": 1.6751, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.24359870487759808, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.3746714518621631e-05, |
|
"loss": 1.6311, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.2442414455237131, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.3717510115048709e-05, |
|
"loss": 1.7294, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.24488418616982816, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.3688268873949135e-05, |
|
"loss": 1.6922, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.24552692681594318, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 1.3658991085080027e-05, |
|
"loss": 1.6738, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.2461696674620582, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.362967703856065e-05, |
|
"loss": 1.6963, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.24681240810817326, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.360032702486956e-05, |
|
"loss": 1.8388, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.24745514875428828, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.3570941334841722e-05, |
|
"loss": 1.7457, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.2480978894004033, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.3541520259665621e-05, |
|
"loss": 1.6294, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.24874063004651836, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.3512064090880381e-05, |
|
"loss": 1.7499, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.2493833706926334, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 1.3482573120372873e-05, |
|
"loss": 1.7487, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.2500261113387484, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.3453047640374828e-05, |
|
"loss": 1.7238, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.25066885198486344, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 1.3423487943459934e-05, |
|
"loss": 1.7135, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.2513115926309785, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.3393894322540952e-05, |
|
"loss": 1.6844, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.25195433327709355, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 1.3364267070866788e-05, |
|
"loss": 1.7126, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.25259707392320857, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.3334606482019607e-05, |
|
"loss": 1.7103, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.2532398145693236, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.3304912849911929e-05, |
|
"loss": 1.694, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.2538825552154386, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 1.3275186468783688e-05, |
|
"loss": 1.7423, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.25452529586155365, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 1.3245427633199347e-05, |
|
"loss": 1.7131, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.25516803650766867, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 1.3215636638044962e-05, |
|
"loss": 1.7375, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.25581077715378375, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 1.3185813778525265e-05, |
|
"loss": 1.7256, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.2564535177998988, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.3155959350160739e-05, |
|
"loss": 1.6983, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.2570962584460138, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.312607364878469e-05, |
|
"loss": 1.6608, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.25773899909212883, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 1.3096156970540305e-05, |
|
"loss": 1.6938, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.25838173973824385, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 1.3066209611877748e-05, |
|
"loss": 1.7964, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.2590244803843589, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 1.3036231869551173e-05, |
|
"loss": 1.6742, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.25966722103047396, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 1.3006224040615833e-05, |
|
"loss": 1.621, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.260309961676589, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.297618642242511e-05, |
|
"loss": 1.7152, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.260952702322704, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.2946119312627565e-05, |
|
"loss": 1.7553, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.26159544296881904, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.291602300916401e-05, |
|
"loss": 1.7938, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.26223818361493406, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 1.2885897810264534e-05, |
|
"loss": 1.794, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.2628809242610491, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 1.2855744014445554e-05, |
|
"loss": 1.7033, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.26352366490716417, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.2825561920506873e-05, |
|
"loss": 1.6797, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.2641664055532792, |
|
"grad_norm": 0.875, |
|
"learning_rate": 1.2795351827528693e-05, |
|
"loss": 1.8342, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.2648091461993942, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 1.2765114034868664e-05, |
|
"loss": 1.7093, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.26545188684550924, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 1.2734848842158925e-05, |
|
"loss": 1.7036, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.26609462749162427, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.2704556549303124e-05, |
|
"loss": 1.6945, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.2667373681377393, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.2674237456473444e-05, |
|
"loss": 1.6948, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.2673801087838544, |
|
"grad_norm": 0.75, |
|
"learning_rate": 1.2643891864107646e-05, |
|
"loss": 1.6862, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.2680228494299694, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.2613520072906069e-05, |
|
"loss": 1.7412, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.2686655900760844, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.2583122383828665e-05, |
|
"loss": 1.7044, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.26930833072219945, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 1.2552699098092016e-05, |
|
"loss": 1.7292, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.2699510713683145, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.2522250517166344e-05, |
|
"loss": 1.7285, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.2705938120144295, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.2491776942772525e-05, |
|
"loss": 1.7442, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.2712365526605446, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 1.2461278676879099e-05, |
|
"loss": 1.717, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.2718792933066596, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.2430756021699283e-05, |
|
"loss": 1.7403, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.27252203395277463, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 1.2400209279687968e-05, |
|
"loss": 1.7318, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.27316477459888966, |
|
"grad_norm": 1.25, |
|
"learning_rate": 1.2369638753538728e-05, |
|
"loss": 1.6867, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.2738075152450047, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 1.2339044746180826e-05, |
|
"loss": 1.7731, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.2744502558911197, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 1.2308427560776187e-05, |
|
"loss": 1.6815, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.2750929965372348, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.2277787500716424e-05, |
|
"loss": 1.7348, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.2757357371833498, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 1.2247124869619823e-05, |
|
"loss": 1.7091, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.27637847782946484, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 1.2216439971328323e-05, |
|
"loss": 1.6958, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.27702121847557987, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.2185733109904512e-05, |
|
"loss": 1.7388, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.2776639591216949, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.2155004589628622e-05, |
|
"loss": 1.6458, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.2783066997678099, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 1.21242547149955e-05, |
|
"loss": 1.679, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.278949440413925, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 1.2093483790711604e-05, |
|
"loss": 1.691, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.27959218106004, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 1.2062692121691972e-05, |
|
"loss": 1.7202, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.28023492170615505, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 1.2031880013057204e-05, |
|
"loss": 1.6382, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.2808776623522701, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.2001047770130448e-05, |
|
"loss": 1.7238, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.2815204029983851, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 1.197019569843436e-05, |
|
"loss": 1.7531, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.2821631436445001, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.1939324103688081e-05, |
|
"loss": 1.6925, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.2828058842906152, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.1908433291804217e-05, |
|
"loss": 1.8275, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.28344862493673023, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.18775235688858e-05, |
|
"loss": 1.7867, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.28409136558284526, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 1.1846595241223246e-05, |
|
"loss": 1.6453, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.2847341062289603, |
|
"grad_norm": 2.625, |
|
"learning_rate": 1.1815648615291337e-05, |
|
"loss": 1.7489, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.2853768468750753, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.1784683997746178e-05, |
|
"loss": 1.7083, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.28601958752119033, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.1753701695422146e-05, |
|
"loss": 1.7238, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.2866623281673054, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.1722702015328868e-05, |
|
"loss": 1.7005, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.28730506881342044, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 1.1691685264648176e-05, |
|
"loss": 1.731, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.28794780945953546, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 1.166065175073104e-05, |
|
"loss": 1.715, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.2885905501056505, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.1629601781094557e-05, |
|
"loss": 1.7201, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.2892332907517655, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.1598535663418884e-05, |
|
"loss": 1.7189, |
|
"step": 4500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"total_flos": 1.7054847722533683e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|