|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.06427406461150345, |
|
"eval_steps": 500, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006427406461150345, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 1e-05, |
|
"loss": 3.0419, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.001285481292230069, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 2e-05, |
|
"loss": 3.0641, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0019282219383451036, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 1.9999950454033063e-05, |
|
"loss": 3.2405, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.002570962584460138, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 1.9999801816623205e-05, |
|
"loss": 2.8447, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0032137032305751727, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 1.9999554089243305e-05, |
|
"loss": 2.9348, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.003856443876690207, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 1.9999207274348143e-05, |
|
"loss": 2.966, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.004499184522805242, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 1.9998761375374376e-05, |
|
"loss": 2.8849, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.005141925168920276, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 1.9998216396740497e-05, |
|
"loss": 2.8674, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0057846658150353105, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 1.9997572343846814e-05, |
|
"loss": 2.789, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.006427406461150345, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 1.9996829223075363e-05, |
|
"loss": 2.8306, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0070701471072653795, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 1.9995987041789876e-05, |
|
"loss": 2.5481, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.007712887753380414, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 1.999504580833569e-05, |
|
"loss": 2.6249, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.00835562839949545, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 1.9994005532039665e-05, |
|
"loss": 2.6078, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.008998369045610483, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.9992866223210105e-05, |
|
"loss": 2.4607, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.009641109691725517, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.999162789313664e-05, |
|
"loss": 2.3672, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.010283850337840551, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 1.9990290554090123e-05, |
|
"loss": 2.4135, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.010926590983955587, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.9988854219322507e-05, |
|
"loss": 2.5179, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.011569331630070621, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 1.9987318903066704e-05, |
|
"loss": 2.3391, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.012212072276185655, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.9985684620536466e-05, |
|
"loss": 2.4168, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01285481292230069, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.9983951387926216e-05, |
|
"loss": 2.2706, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.013497553568415725, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 1.998211922241088e-05, |
|
"loss": 2.2684, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.014140294214530759, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.9980188142145755e-05, |
|
"loss": 2.2022, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.014783034860645793, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 1.997815816626628e-05, |
|
"loss": 2.2821, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.015425775506760829, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 1.9976029314887882e-05, |
|
"loss": 2.1055, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.016068516152875863, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 1.9973801609105757e-05, |
|
"loss": 2.1481, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.0167112567989909, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.9971475070994675e-05, |
|
"loss": 2.1277, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01735399744510593, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 1.9969049723608753e-05, |
|
"loss": 2.1143, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.017996738091220967, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 1.9966525590981228e-05, |
|
"loss": 2.1824, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.018639478737336002, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 1.9963902698124212e-05, |
|
"loss": 2.1849, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.019282219383451035, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.996118107102847e-05, |
|
"loss": 2.1996, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.01992496002956607, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.9958360736663117e-05, |
|
"loss": 2.1914, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.020567700675681103, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 1.99554417229754e-05, |
|
"loss": 2.0519, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.02121044132179614, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 1.995242405889039e-05, |
|
"loss": 2.0804, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.021853181967911174, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 1.99493077743107e-05, |
|
"loss": 2.0424, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.022495922614026206, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.99460929001162e-05, |
|
"loss": 2.1151, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.023138663260141242, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 1.9942779468163696e-05, |
|
"loss": 1.8499, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.023781403906256278, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 1.9939367511286635e-05, |
|
"loss": 2.1271, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.02442414455237131, |
|
"grad_norm": 0.75, |
|
"learning_rate": 1.993585706329475e-05, |
|
"loss": 2.0739, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.025066885198486346, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.9932248158973746e-05, |
|
"loss": 2.0379, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.02570962584460138, |
|
"grad_norm": 0.78515625, |
|
"learning_rate": 1.992854083408496e-05, |
|
"loss": 1.9566, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.026352366490716414, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 1.992473512536499e-05, |
|
"loss": 2.0377, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.02699510713683145, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 1.992083107052534e-05, |
|
"loss": 1.9839, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.027637847782946486, |
|
"grad_norm": 0.80859375, |
|
"learning_rate": 1.9916828708252046e-05, |
|
"loss": 1.9477, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.028280588429061518, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 1.9912728078205285e-05, |
|
"loss": 2.0033, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.028923329075176554, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.9908529221018994e-05, |
|
"loss": 1.9196, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.029566069721291586, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.9904232178300465e-05, |
|
"loss": 1.9712, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.03020881036740662, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.9899836992629922e-05, |
|
"loss": 1.9251, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.030851551013521657, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 1.989534370756011e-05, |
|
"loss": 1.9695, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.03149429165963669, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 1.989075236761586e-05, |
|
"loss": 1.9208, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.032137032305751725, |
|
"grad_norm": 0.74609375, |
|
"learning_rate": 1.988606301829365e-05, |
|
"loss": 1.9474, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03277977295186676, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 1.9881275706061146e-05, |
|
"loss": 1.8636, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.0334225135979818, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 1.987639047835675e-05, |
|
"loss": 1.9595, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.03406525424409683, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 1.987140738358913e-05, |
|
"loss": 1.8648, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.03470799489021186, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.9866326471136738e-05, |
|
"loss": 1.9641, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.0353507355363269, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.986114779134731e-05, |
|
"loss": 1.9649, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.03599347618244193, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 1.9855871395537395e-05, |
|
"loss": 1.9086, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.036636216828556965, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 1.9850497335991815e-05, |
|
"loss": 1.8802, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.037278957474672005, |
|
"grad_norm": 0.78125, |
|
"learning_rate": 1.984502566596317e-05, |
|
"loss": 1.8694, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.03792169812078704, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 1.9839456439671288e-05, |
|
"loss": 1.9474, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.03856443876690207, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 1.9833789712302714e-05, |
|
"loss": 1.8633, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.03920717941301711, |
|
"grad_norm": 0.8515625, |
|
"learning_rate": 1.9828025540010144e-05, |
|
"loss": 1.9034, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.03984992005913214, |
|
"grad_norm": 0.77734375, |
|
"learning_rate": 1.9822163979911878e-05, |
|
"loss": 1.9021, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.04049266070524717, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.981620509009125e-05, |
|
"loss": 1.8945, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.041135401351362205, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 1.9810148929596047e-05, |
|
"loss": 1.9007, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.041778141997477244, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 1.9803995558437938e-05, |
|
"loss": 1.8548, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.04242088264359228, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 1.9797745037591866e-05, |
|
"loss": 1.952, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.04306362328970731, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.979139742899545e-05, |
|
"loss": 1.8855, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.04370636393582235, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 1.9784952795548374e-05, |
|
"loss": 1.8523, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.04434910458193738, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 1.9778411201111755e-05, |
|
"loss": 1.808, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.04499184522805241, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.9771772710507516e-05, |
|
"loss": 1.8894, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.04563458587416745, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.976503738951775e-05, |
|
"loss": 1.8869, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.046277326520282484, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.9758205304884048e-05, |
|
"loss": 1.9048, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.04692006716639752, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.9751276524306865e-05, |
|
"loss": 1.848, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.047562807812512556, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.974425111644482e-05, |
|
"loss": 1.8434, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.04820554845862759, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 1.9737129150914045e-05, |
|
"loss": 1.9363, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.04884828910474262, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.972991069828747e-05, |
|
"loss": 1.8212, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.04949102975085766, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.9722595830094138e-05, |
|
"loss": 1.8591, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.05013377039697269, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.9715184618818493e-05, |
|
"loss": 1.9277, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.050776511043087724, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 1.9707677137899662e-05, |
|
"loss": 1.8552, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.05141925168920276, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 1.9700073461730725e-05, |
|
"loss": 1.8265, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.052061992335317796, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 1.969237366565798e-05, |
|
"loss": 1.7932, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.05270473298143283, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 1.9684577825980192e-05, |
|
"loss": 1.8122, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.05334747362754787, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 1.9676686019947852e-05, |
|
"loss": 1.8671, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.0539902142736629, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 1.9668698325762378e-05, |
|
"loss": 1.884, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.05463295491977793, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.9660614822575394e-05, |
|
"loss": 1.9472, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.05527569556589297, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 1.9652435590487878e-05, |
|
"loss": 1.7799, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.055918436212008, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 1.964416071054944e-05, |
|
"loss": 1.8548, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.056561176858123036, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.963579026475745e-05, |
|
"loss": 1.8213, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.05720391750423807, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.962732433605629e-05, |
|
"loss": 1.8675, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.05784665815035311, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.961876300833647e-05, |
|
"loss": 1.8382, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.05848939879646814, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 1.9610106366433846e-05, |
|
"loss": 1.8516, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.05913213944258317, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.9601354496128765e-05, |
|
"loss": 1.8137, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.05977488008869821, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 1.9592507484145193e-05, |
|
"loss": 1.9144, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.06041762073481324, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 1.9583565418149887e-05, |
|
"loss": 1.8284, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.061060361380928276, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 1.9574528386751507e-05, |
|
"loss": 1.7851, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.061703102027043315, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 1.9565396479499744e-05, |
|
"loss": 1.769, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.06234584267315835, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 1.9556169786884436e-05, |
|
"loss": 1.8202, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.06298858331927339, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.9546848400334658e-05, |
|
"loss": 1.8815, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.06363132396538841, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 1.953743241221784e-05, |
|
"loss": 1.866, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.06427406461150345, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 1.9527921915838827e-05, |
|
"loss": 1.8496, |
|
"step": 1000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"total_flos": 3.83552809116631e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|