|
{ |
|
"best_metric": 0.3821594715118408, |
|
"best_model_checkpoint": "classify-google-augment-3/checkpoint-4950", |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 4950, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.015151515151515152, |
|
"grad_norm": 12.085535049438477, |
|
"learning_rate": 2.3232323232323234e-06, |
|
"loss": 2.4513, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.030303030303030304, |
|
"grad_norm": 10.15959358215332, |
|
"learning_rate": 4.848484848484849e-06, |
|
"loss": 2.234, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.045454545454545456, |
|
"grad_norm": 12.21349048614502, |
|
"learning_rate": 7.3737373737373745e-06, |
|
"loss": 1.9342, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06060606060606061, |
|
"grad_norm": 13.986493110656738, |
|
"learning_rate": 9.898989898989899e-06, |
|
"loss": 1.7956, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07575757575757576, |
|
"grad_norm": 13.179245948791504, |
|
"learning_rate": 1.2424242424242424e-05, |
|
"loss": 1.7165, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 15.372575759887695, |
|
"learning_rate": 1.494949494949495e-05, |
|
"loss": 1.519, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.10606060606060606, |
|
"grad_norm": 9.555817604064941, |
|
"learning_rate": 1.7474747474747475e-05, |
|
"loss": 1.4716, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 14.15468692779541, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5611, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13636363636363635, |
|
"grad_norm": 9.72482681274414, |
|
"learning_rate": 2.2525252525252528e-05, |
|
"loss": 1.3692, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.15151515151515152, |
|
"grad_norm": 11.219098091125488, |
|
"learning_rate": 2.505050505050505e-05, |
|
"loss": 1.3761, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 14.293442726135254, |
|
"learning_rate": 2.7575757575757578e-05, |
|
"loss": 1.5677, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 12.68210506439209, |
|
"learning_rate": 3.01010101010101e-05, |
|
"loss": 1.4212, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.19696969696969696, |
|
"grad_norm": 10.856423377990723, |
|
"learning_rate": 3.2626262626262624e-05, |
|
"loss": 1.3204, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.21212121212121213, |
|
"grad_norm": 11.644044876098633, |
|
"learning_rate": 3.515151515151515e-05, |
|
"loss": 1.3228, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.22727272727272727, |
|
"grad_norm": 7.3117356300354, |
|
"learning_rate": 3.767676767676768e-05, |
|
"loss": 1.4005, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.24242424242424243, |
|
"grad_norm": 12.309542655944824, |
|
"learning_rate": 4.0202020202020204e-05, |
|
"loss": 1.2739, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.25757575757575757, |
|
"grad_norm": 11.981978416442871, |
|
"learning_rate": 4.2727272727272724e-05, |
|
"loss": 1.3082, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 8.469183921813965, |
|
"learning_rate": 4.525252525252526e-05, |
|
"loss": 1.1932, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2878787878787879, |
|
"grad_norm": 13.018028259277344, |
|
"learning_rate": 4.7777777777777784e-05, |
|
"loss": 1.2612, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.30303030303030304, |
|
"grad_norm": 11.944835662841797, |
|
"learning_rate": 4.9966329966329964e-05, |
|
"loss": 1.3266, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3181818181818182, |
|
"grad_norm": 11.489705085754395, |
|
"learning_rate": 4.968574635241302e-05, |
|
"loss": 1.3537, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 14.265945434570312, |
|
"learning_rate": 4.940516273849607e-05, |
|
"loss": 1.4162, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3484848484848485, |
|
"grad_norm": 10.913326263427734, |
|
"learning_rate": 4.912457912457913e-05, |
|
"loss": 1.0754, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 7.5248236656188965, |
|
"learning_rate": 4.884399551066218e-05, |
|
"loss": 1.3724, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.3787878787878788, |
|
"grad_norm": 9.98138427734375, |
|
"learning_rate": 4.856341189674523e-05, |
|
"loss": 1.184, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.3939393939393939, |
|
"grad_norm": 9.07705307006836, |
|
"learning_rate": 4.828282828282829e-05, |
|
"loss": 1.0418, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.4090909090909091, |
|
"grad_norm": 8.512435913085938, |
|
"learning_rate": 4.800224466891134e-05, |
|
"loss": 1.1425, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.42424242424242425, |
|
"grad_norm": 13.559457778930664, |
|
"learning_rate": 4.7721661054994394e-05, |
|
"loss": 1.4285, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4393939393939394, |
|
"grad_norm": 7.820692539215088, |
|
"learning_rate": 4.7441077441077445e-05, |
|
"loss": 1.1221, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 6.074573040008545, |
|
"learning_rate": 4.7160493827160495e-05, |
|
"loss": 0.9862, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.4696969696969697, |
|
"grad_norm": 6.816392421722412, |
|
"learning_rate": 4.687991021324355e-05, |
|
"loss": 1.1894, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.48484848484848486, |
|
"grad_norm": 9.136533737182617, |
|
"learning_rate": 4.65993265993266e-05, |
|
"loss": 1.0674, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 8.841058731079102, |
|
"learning_rate": 4.631874298540965e-05, |
|
"loss": 1.0798, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5151515151515151, |
|
"grad_norm": 10.076774597167969, |
|
"learning_rate": 4.60381593714927e-05, |
|
"loss": 1.0903, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5303030303030303, |
|
"grad_norm": 13.101428985595703, |
|
"learning_rate": 4.575757575757576e-05, |
|
"loss": 1.0672, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 10.879769325256348, |
|
"learning_rate": 4.547699214365882e-05, |
|
"loss": 1.005, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5606060606060606, |
|
"grad_norm": 5.972232341766357, |
|
"learning_rate": 4.519640852974186e-05, |
|
"loss": 1.026, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5757575757575758, |
|
"grad_norm": 12.992452621459961, |
|
"learning_rate": 4.491582491582492e-05, |
|
"loss": 1.0436, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5909090909090909, |
|
"grad_norm": 11.431863784790039, |
|
"learning_rate": 4.463524130190797e-05, |
|
"loss": 1.006, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 13.05385971069336, |
|
"learning_rate": 4.4354657687991025e-05, |
|
"loss": 1.1255, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6212121212121212, |
|
"grad_norm": 9.93174934387207, |
|
"learning_rate": 4.4074074074074076e-05, |
|
"loss": 1.0457, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6363636363636364, |
|
"grad_norm": 14.331412315368652, |
|
"learning_rate": 4.3793490460157126e-05, |
|
"loss": 0.997, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6515151515151515, |
|
"grad_norm": 11.081893920898438, |
|
"learning_rate": 4.351290684624018e-05, |
|
"loss": 0.999, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 8.453628540039062, |
|
"learning_rate": 4.3232323232323234e-05, |
|
"loss": 1.0695, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.6818181818181818, |
|
"grad_norm": 8.907005310058594, |
|
"learning_rate": 4.295173961840629e-05, |
|
"loss": 0.967, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.696969696969697, |
|
"grad_norm": 10.75546646118164, |
|
"learning_rate": 4.267115600448934e-05, |
|
"loss": 1.0517, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7121212121212122, |
|
"grad_norm": 9.127320289611816, |
|
"learning_rate": 4.239057239057239e-05, |
|
"loss": 1.0049, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 10.124237060546875, |
|
"learning_rate": 4.210998877665545e-05, |
|
"loss": 0.969, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7424242424242424, |
|
"grad_norm": 5.617876052856445, |
|
"learning_rate": 4.18294051627385e-05, |
|
"loss": 1.0031, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.7575757575757576, |
|
"grad_norm": 11.184277534484863, |
|
"learning_rate": 4.154882154882155e-05, |
|
"loss": 1.0209, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7727272727272727, |
|
"grad_norm": 6.545035362243652, |
|
"learning_rate": 4.12682379349046e-05, |
|
"loss": 0.8416, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.7878787878787878, |
|
"grad_norm": 4.944152355194092, |
|
"learning_rate": 4.0987654320987657e-05, |
|
"loss": 1.0029, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.803030303030303, |
|
"grad_norm": 8.916969299316406, |
|
"learning_rate": 4.070707070707071e-05, |
|
"loss": 0.8808, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8181818181818182, |
|
"grad_norm": 4.9258713722229, |
|
"learning_rate": 4.0426487093153764e-05, |
|
"loss": 0.9536, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 11.556329727172852, |
|
"learning_rate": 4.0145903479236814e-05, |
|
"loss": 0.937, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.8484848484848485, |
|
"grad_norm": 10.06991958618164, |
|
"learning_rate": 3.9865319865319865e-05, |
|
"loss": 0.8424, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.8636363636363636, |
|
"grad_norm": 8.390497207641602, |
|
"learning_rate": 3.958473625140292e-05, |
|
"loss": 0.9453, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.8787878787878788, |
|
"grad_norm": 8.02001667022705, |
|
"learning_rate": 3.930415263748597e-05, |
|
"loss": 0.8522, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.8939393939393939, |
|
"grad_norm": 8.541698455810547, |
|
"learning_rate": 3.902356902356902e-05, |
|
"loss": 0.9519, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 7.404212951660156, |
|
"learning_rate": 3.874298540965208e-05, |
|
"loss": 0.9692, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9242424242424242, |
|
"grad_norm": 10.684733390808105, |
|
"learning_rate": 3.846240179573513e-05, |
|
"loss": 1.0417, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.9393939393939394, |
|
"grad_norm": 6.182531833648682, |
|
"learning_rate": 3.818181818181819e-05, |
|
"loss": 0.8632, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.9545454545454546, |
|
"grad_norm": 8.63337516784668, |
|
"learning_rate": 3.790123456790123e-05, |
|
"loss": 0.9318, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.9696969696969697, |
|
"grad_norm": 11.64743709564209, |
|
"learning_rate": 3.762065095398429e-05, |
|
"loss": 0.8535, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.9848484848484849, |
|
"grad_norm": 11.15438461303711, |
|
"learning_rate": 3.7340067340067345e-05, |
|
"loss": 0.9558, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 7.817443370819092, |
|
"learning_rate": 3.7059483726150395e-05, |
|
"loss": 0.9244, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.7645454545454545, |
|
"eval_f1_macro": 0.6989492792384068, |
|
"eval_f1_micro": 0.7645454545454545, |
|
"eval_f1_weighted": 0.7496986753676582, |
|
"eval_loss": 0.752944827079773, |
|
"eval_precision_macro": 0.8012284685903541, |
|
"eval_precision_micro": 0.7645454545454545, |
|
"eval_precision_weighted": 0.817408067065806, |
|
"eval_recall_macro": 0.7035, |
|
"eval_recall_micro": 0.7645454545454545, |
|
"eval_recall_weighted": 0.7645454545454545, |
|
"eval_runtime": 18.9801, |
|
"eval_samples_per_second": 173.866, |
|
"eval_steps_per_second": 10.906, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.0151515151515151, |
|
"grad_norm": 10.560444831848145, |
|
"learning_rate": 3.677890011223345e-05, |
|
"loss": 0.7833, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.0303030303030303, |
|
"grad_norm": 4.251949310302734, |
|
"learning_rate": 3.6498316498316496e-05, |
|
"loss": 0.7379, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.0454545454545454, |
|
"grad_norm": 10.605439186096191, |
|
"learning_rate": 3.621773288439955e-05, |
|
"loss": 0.7779, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.0606060606060606, |
|
"grad_norm": 7.418851375579834, |
|
"learning_rate": 3.59371492704826e-05, |
|
"loss": 0.7347, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.0757575757575757, |
|
"grad_norm": 10.209125518798828, |
|
"learning_rate": 3.565656565656566e-05, |
|
"loss": 0.8636, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 5.001626491546631, |
|
"learning_rate": 3.537598204264871e-05, |
|
"loss": 0.8716, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.106060606060606, |
|
"grad_norm": 5.035640239715576, |
|
"learning_rate": 3.509539842873176e-05, |
|
"loss": 0.7529, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.121212121212121, |
|
"grad_norm": 7.653929710388184, |
|
"learning_rate": 3.481481481481482e-05, |
|
"loss": 0.8541, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.1363636363636362, |
|
"grad_norm": 11.623481750488281, |
|
"learning_rate": 3.453423120089787e-05, |
|
"loss": 0.8159, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.1515151515151516, |
|
"grad_norm": 9.897392272949219, |
|
"learning_rate": 3.425364758698092e-05, |
|
"loss": 0.5642, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.1666666666666667, |
|
"grad_norm": 8.24569320678711, |
|
"learning_rate": 3.3973063973063976e-05, |
|
"loss": 1.0238, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.1818181818181819, |
|
"grad_norm": 6.044914245605469, |
|
"learning_rate": 3.3692480359147026e-05, |
|
"loss": 0.8917, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.196969696969697, |
|
"grad_norm": 5.3622002601623535, |
|
"learning_rate": 3.3411896745230084e-05, |
|
"loss": 0.7951, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.2121212121212122, |
|
"grad_norm": 8.479634284973145, |
|
"learning_rate": 3.3131313131313134e-05, |
|
"loss": 0.9219, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.2272727272727273, |
|
"grad_norm": 10.730965614318848, |
|
"learning_rate": 3.2850729517396184e-05, |
|
"loss": 0.6391, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.2424242424242424, |
|
"grad_norm": 2.901815891265869, |
|
"learning_rate": 3.2570145903479235e-05, |
|
"loss": 0.7946, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.2575757575757576, |
|
"grad_norm": 7.174593448638916, |
|
"learning_rate": 3.228956228956229e-05, |
|
"loss": 0.7458, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.2727272727272727, |
|
"grad_norm": 5.743284702301025, |
|
"learning_rate": 3.200897867564535e-05, |
|
"loss": 0.8514, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.2878787878787878, |
|
"grad_norm": 5.6034464836120605, |
|
"learning_rate": 3.172839506172839e-05, |
|
"loss": 0.5711, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.303030303030303, |
|
"grad_norm": 6.2251996994018555, |
|
"learning_rate": 3.144781144781145e-05, |
|
"loss": 0.8115, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.3181818181818181, |
|
"grad_norm": 6.7410078048706055, |
|
"learning_rate": 3.11672278338945e-05, |
|
"loss": 0.7262, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 5.232451438903809, |
|
"learning_rate": 3.088664421997756e-05, |
|
"loss": 0.7573, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.3484848484848486, |
|
"grad_norm": 4.0043439865112305, |
|
"learning_rate": 3.060606060606061e-05, |
|
"loss": 0.9982, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.3636363636363638, |
|
"grad_norm": 8.409883499145508, |
|
"learning_rate": 3.0325476992143658e-05, |
|
"loss": 0.7902, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.378787878787879, |
|
"grad_norm": 8.475225448608398, |
|
"learning_rate": 3.004489337822671e-05, |
|
"loss": 0.8343, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.393939393939394, |
|
"grad_norm": 4.989223480224609, |
|
"learning_rate": 2.976430976430977e-05, |
|
"loss": 0.8126, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.4090909090909092, |
|
"grad_norm": 5.526180267333984, |
|
"learning_rate": 2.9483726150392822e-05, |
|
"loss": 0.6589, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.4242424242424243, |
|
"grad_norm": 7.382469177246094, |
|
"learning_rate": 2.920314253647587e-05, |
|
"loss": 0.7384, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.4393939393939394, |
|
"grad_norm": 5.158092498779297, |
|
"learning_rate": 2.8922558922558923e-05, |
|
"loss": 0.6578, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.4545454545454546, |
|
"grad_norm": 4.976596832275391, |
|
"learning_rate": 2.8641975308641977e-05, |
|
"loss": 0.8378, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.4696969696969697, |
|
"grad_norm": 9.176383018493652, |
|
"learning_rate": 2.836139169472503e-05, |
|
"loss": 0.7935, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.4848484848484849, |
|
"grad_norm": 10.877906799316406, |
|
"learning_rate": 2.808080808080808e-05, |
|
"loss": 0.8241, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 12.3035249710083, |
|
"learning_rate": 2.7800224466891134e-05, |
|
"loss": 0.8298, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.5151515151515151, |
|
"grad_norm": 7.093743324279785, |
|
"learning_rate": 2.7519640852974188e-05, |
|
"loss": 0.7239, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.5303030303030303, |
|
"grad_norm": 14.64828872680664, |
|
"learning_rate": 2.7239057239057242e-05, |
|
"loss": 0.7086, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.5454545454545454, |
|
"grad_norm": 5.189187526702881, |
|
"learning_rate": 2.6958473625140296e-05, |
|
"loss": 0.7458, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.5606060606060606, |
|
"grad_norm": 7.915388107299805, |
|
"learning_rate": 2.6677890011223346e-05, |
|
"loss": 0.765, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.5757575757575757, |
|
"grad_norm": 5.491411209106445, |
|
"learning_rate": 2.63973063973064e-05, |
|
"loss": 0.8382, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.5909090909090908, |
|
"grad_norm": 10.231388092041016, |
|
"learning_rate": 2.6116722783389453e-05, |
|
"loss": 0.6905, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.606060606060606, |
|
"grad_norm": 4.599724769592285, |
|
"learning_rate": 2.5836139169472507e-05, |
|
"loss": 0.7544, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.621212121212121, |
|
"grad_norm": 9.038063049316406, |
|
"learning_rate": 2.5555555555555554e-05, |
|
"loss": 0.5843, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.6363636363636362, |
|
"grad_norm": 7.875892639160156, |
|
"learning_rate": 2.5274971941638608e-05, |
|
"loss": 0.7318, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.6515151515151514, |
|
"grad_norm": 7.997163772583008, |
|
"learning_rate": 2.499438832772166e-05, |
|
"loss": 0.6789, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 12.051643371582031, |
|
"learning_rate": 2.4725028058361395e-05, |
|
"loss": 0.8427, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.6818181818181817, |
|
"grad_norm": 18.78903579711914, |
|
"learning_rate": 2.4444444444444445e-05, |
|
"loss": 0.7387, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.696969696969697, |
|
"grad_norm": 5.628903865814209, |
|
"learning_rate": 2.41638608305275e-05, |
|
"loss": 0.6316, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.7121212121212122, |
|
"grad_norm": 6.309241771697998, |
|
"learning_rate": 2.388327721661055e-05, |
|
"loss": 0.6701, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.7272727272727273, |
|
"grad_norm": 6.3270392417907715, |
|
"learning_rate": 2.3602693602693603e-05, |
|
"loss": 0.5524, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.7424242424242424, |
|
"grad_norm": 3.400372266769409, |
|
"learning_rate": 2.3322109988776656e-05, |
|
"loss": 0.6322, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.7575757575757576, |
|
"grad_norm": 10.937955856323242, |
|
"learning_rate": 2.304152637485971e-05, |
|
"loss": 0.5497, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.7727272727272727, |
|
"grad_norm": 9.74146842956543, |
|
"learning_rate": 2.2760942760942764e-05, |
|
"loss": 0.7272, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 1.7878787878787878, |
|
"grad_norm": 24.714502334594727, |
|
"learning_rate": 2.2480359147025814e-05, |
|
"loss": 0.7961, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.803030303030303, |
|
"grad_norm": 3.8761136531829834, |
|
"learning_rate": 2.2199775533108868e-05, |
|
"loss": 0.6801, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 5.883018493652344, |
|
"learning_rate": 2.191919191919192e-05, |
|
"loss": 0.7082, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.8333333333333335, |
|
"grad_norm": 7.082830905914307, |
|
"learning_rate": 2.1638608305274975e-05, |
|
"loss": 0.5892, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 1.8484848484848486, |
|
"grad_norm": 10.057491302490234, |
|
"learning_rate": 2.1358024691358026e-05, |
|
"loss": 0.7226, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.8636363636363638, |
|
"grad_norm": 3.2809817790985107, |
|
"learning_rate": 2.107744107744108e-05, |
|
"loss": 0.546, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 1.878787878787879, |
|
"grad_norm": 8.243097305297852, |
|
"learning_rate": 2.079685746352413e-05, |
|
"loss": 0.6747, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.893939393939394, |
|
"grad_norm": 7.2149457931518555, |
|
"learning_rate": 2.0516273849607184e-05, |
|
"loss": 0.6502, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 1.9090909090909092, |
|
"grad_norm": 5.218500137329102, |
|
"learning_rate": 2.0235690235690234e-05, |
|
"loss": 0.5866, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.9242424242424243, |
|
"grad_norm": 8.202606201171875, |
|
"learning_rate": 1.995510662177329e-05, |
|
"loss": 0.7029, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 1.9393939393939394, |
|
"grad_norm": 10.254151344299316, |
|
"learning_rate": 1.967452300785634e-05, |
|
"loss": 0.6972, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.9545454545454546, |
|
"grad_norm": 3.413534641265869, |
|
"learning_rate": 1.9393939393939395e-05, |
|
"loss": 0.6599, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 1.9696969696969697, |
|
"grad_norm": 5.466010570526123, |
|
"learning_rate": 1.911335578002245e-05, |
|
"loss": 0.6938, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.9848484848484849, |
|
"grad_norm": 4.317994594573975, |
|
"learning_rate": 1.88327721661055e-05, |
|
"loss": 0.7309, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 7.49802303314209, |
|
"learning_rate": 1.8552188552188553e-05, |
|
"loss": 0.5147, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.8475757575757575, |
|
"eval_f1_macro": 0.8117394542384239, |
|
"eval_f1_micro": 0.8475757575757575, |
|
"eval_f1_weighted": 0.840866356559439, |
|
"eval_loss": 0.5004541873931885, |
|
"eval_precision_macro": 0.8356258037544531, |
|
"eval_precision_micro": 0.8475757575757575, |
|
"eval_precision_weighted": 0.8505208734646063, |
|
"eval_recall_macro": 0.8084166666666667, |
|
"eval_recall_micro": 0.8475757575757575, |
|
"eval_recall_weighted": 0.8475757575757575, |
|
"eval_runtime": 18.9039, |
|
"eval_samples_per_second": 174.567, |
|
"eval_steps_per_second": 10.95, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.015151515151515, |
|
"grad_norm": 4.281924247741699, |
|
"learning_rate": 1.8271604938271607e-05, |
|
"loss": 0.4413, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 2.0303030303030303, |
|
"grad_norm": 11.078526496887207, |
|
"learning_rate": 1.799102132435466e-05, |
|
"loss": 0.516, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.0454545454545454, |
|
"grad_norm": 12.010851860046387, |
|
"learning_rate": 1.771043771043771e-05, |
|
"loss": 0.6546, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 2.0606060606060606, |
|
"grad_norm": 6.438499450683594, |
|
"learning_rate": 1.7429854096520764e-05, |
|
"loss": 0.6509, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.0757575757575757, |
|
"grad_norm": 2.6832661628723145, |
|
"learning_rate": 1.7149270482603815e-05, |
|
"loss": 0.6495, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 2.090909090909091, |
|
"grad_norm": 6.37797737121582, |
|
"learning_rate": 1.686868686868687e-05, |
|
"loss": 0.6738, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.106060606060606, |
|
"grad_norm": 5.069665908813477, |
|
"learning_rate": 1.6588103254769922e-05, |
|
"loss": 0.5145, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 2.121212121212121, |
|
"grad_norm": 12.002437591552734, |
|
"learning_rate": 1.6307519640852976e-05, |
|
"loss": 0.7411, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.1363636363636362, |
|
"grad_norm": 4.099394798278809, |
|
"learning_rate": 1.6026936026936026e-05, |
|
"loss": 0.6736, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 2.1515151515151514, |
|
"grad_norm": 11.855905532836914, |
|
"learning_rate": 1.574635241301908e-05, |
|
"loss": 0.4738, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.1666666666666665, |
|
"grad_norm": 9.715377807617188, |
|
"learning_rate": 1.5465768799102134e-05, |
|
"loss": 0.5321, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 2.1818181818181817, |
|
"grad_norm": 10.646961212158203, |
|
"learning_rate": 1.5185185185185186e-05, |
|
"loss": 0.5856, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.196969696969697, |
|
"grad_norm": 9.710843086242676, |
|
"learning_rate": 1.490460157126824e-05, |
|
"loss": 0.6343, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 2.212121212121212, |
|
"grad_norm": 5.35206937789917, |
|
"learning_rate": 1.4624017957351292e-05, |
|
"loss": 0.7256, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.227272727272727, |
|
"grad_norm": 6.582408428192139, |
|
"learning_rate": 1.4343434343434345e-05, |
|
"loss": 0.5843, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 2.242424242424242, |
|
"grad_norm": 11.637269973754883, |
|
"learning_rate": 1.4062850729517396e-05, |
|
"loss": 0.5725, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.257575757575758, |
|
"grad_norm": 11.256560325622559, |
|
"learning_rate": 1.378226711560045e-05, |
|
"loss": 0.5129, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 2.2727272727272725, |
|
"grad_norm": 4.118545055389404, |
|
"learning_rate": 1.3501683501683501e-05, |
|
"loss": 0.6303, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.287878787878788, |
|
"grad_norm": 8.819905281066895, |
|
"learning_rate": 1.3221099887766555e-05, |
|
"loss": 0.4861, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 2.303030303030303, |
|
"grad_norm": 11.89466667175293, |
|
"learning_rate": 1.2940516273849607e-05, |
|
"loss": 0.6803, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.3181818181818183, |
|
"grad_norm": 14.192046165466309, |
|
"learning_rate": 1.2659932659932661e-05, |
|
"loss": 0.6194, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 2.3333333333333335, |
|
"grad_norm": 8.376368522644043, |
|
"learning_rate": 1.2379349046015713e-05, |
|
"loss": 0.5394, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.3484848484848486, |
|
"grad_norm": 2.375666856765747, |
|
"learning_rate": 1.2098765432098767e-05, |
|
"loss": 0.5348, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 2.3636363636363638, |
|
"grad_norm": 6.248928070068359, |
|
"learning_rate": 1.1818181818181819e-05, |
|
"loss": 0.6926, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.378787878787879, |
|
"grad_norm": 3.3347110748291016, |
|
"learning_rate": 1.153759820426487e-05, |
|
"loss": 0.5196, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 2.393939393939394, |
|
"grad_norm": 7.418989658355713, |
|
"learning_rate": 1.1257014590347924e-05, |
|
"loss": 0.5975, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.409090909090909, |
|
"grad_norm": 17.091835021972656, |
|
"learning_rate": 1.0976430976430976e-05, |
|
"loss": 0.5818, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 2.4242424242424243, |
|
"grad_norm": 4.748253345489502, |
|
"learning_rate": 1.0695847362514029e-05, |
|
"loss": 0.5008, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.4393939393939394, |
|
"grad_norm": 2.7156593799591064, |
|
"learning_rate": 1.0415263748597082e-05, |
|
"loss": 0.5977, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 2.4545454545454546, |
|
"grad_norm": 3.704885482788086, |
|
"learning_rate": 1.0134680134680136e-05, |
|
"loss": 0.428, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.4696969696969697, |
|
"grad_norm": 6.289621829986572, |
|
"learning_rate": 9.854096520763188e-06, |
|
"loss": 0.6108, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 2.484848484848485, |
|
"grad_norm": 11.862101554870605, |
|
"learning_rate": 9.573512906846242e-06, |
|
"loss": 0.6182, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.4363837242126465, |
|
"learning_rate": 9.292929292929294e-06, |
|
"loss": 0.6187, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 2.515151515151515, |
|
"grad_norm": 4.281114101409912, |
|
"learning_rate": 9.012345679012346e-06, |
|
"loss": 0.5247, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.5303030303030303, |
|
"grad_norm": 7.008238315582275, |
|
"learning_rate": 8.7317620650954e-06, |
|
"loss": 0.4063, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 2.5454545454545454, |
|
"grad_norm": 10.460261344909668, |
|
"learning_rate": 8.451178451178452e-06, |
|
"loss": 0.4699, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.5606060606060606, |
|
"grad_norm": 6.779576301574707, |
|
"learning_rate": 8.170594837261504e-06, |
|
"loss": 0.632, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 2.5757575757575757, |
|
"grad_norm": 12.081551551818848, |
|
"learning_rate": 7.890011223344557e-06, |
|
"loss": 0.5803, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.590909090909091, |
|
"grad_norm": 14.813763618469238, |
|
"learning_rate": 7.609427609427609e-06, |
|
"loss": 0.5825, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 2.606060606060606, |
|
"grad_norm": 3.7761809825897217, |
|
"learning_rate": 7.328843995510662e-06, |
|
"loss": 0.4587, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.621212121212121, |
|
"grad_norm": 2.71801495552063, |
|
"learning_rate": 7.048260381593716e-06, |
|
"loss": 0.5465, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 2.6363636363636362, |
|
"grad_norm": 5.783569812774658, |
|
"learning_rate": 6.767676767676769e-06, |
|
"loss": 0.5003, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.6515151515151514, |
|
"grad_norm": 12.182541847229004, |
|
"learning_rate": 6.487093153759821e-06, |
|
"loss": 0.5783, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 12.15306282043457, |
|
"learning_rate": 6.206509539842874e-06, |
|
"loss": 0.5159, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.6818181818181817, |
|
"grad_norm": 8.337636947631836, |
|
"learning_rate": 5.925925925925927e-06, |
|
"loss": 0.4119, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 2.6969696969696972, |
|
"grad_norm": 3.530006170272827, |
|
"learning_rate": 5.645342312008979e-06, |
|
"loss": 0.4734, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.712121212121212, |
|
"grad_norm": 7.290157794952393, |
|
"learning_rate": 5.364758698092032e-06, |
|
"loss": 0.5549, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 2.7272727272727275, |
|
"grad_norm": 6.58420467376709, |
|
"learning_rate": 5.0841750841750845e-06, |
|
"loss": 0.4441, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.742424242424242, |
|
"grad_norm": 4.345494747161865, |
|
"learning_rate": 4.803591470258137e-06, |
|
"loss": 0.48, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 2.757575757575758, |
|
"grad_norm": 10.148066520690918, |
|
"learning_rate": 4.523007856341189e-06, |
|
"loss": 0.4423, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 2.7727272727272725, |
|
"grad_norm": 6.80890417098999, |
|
"learning_rate": 4.242424242424243e-06, |
|
"loss": 0.4497, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 2.787878787878788, |
|
"grad_norm": 9.686090469360352, |
|
"learning_rate": 3.961840628507295e-06, |
|
"loss": 0.3885, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.8030303030303028, |
|
"grad_norm": 6.605715274810791, |
|
"learning_rate": 3.681257014590348e-06, |
|
"loss": 0.4989, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 2.8181818181818183, |
|
"grad_norm": 5.217257499694824, |
|
"learning_rate": 3.400673400673401e-06, |
|
"loss": 0.5379, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 2.8333333333333335, |
|
"grad_norm": 8.507195472717285, |
|
"learning_rate": 3.1200897867564538e-06, |
|
"loss": 0.6228, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 2.8484848484848486, |
|
"grad_norm": 5.713125228881836, |
|
"learning_rate": 2.8395061728395062e-06, |
|
"loss": 0.4659, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.8636363636363638, |
|
"grad_norm": 13.924797058105469, |
|
"learning_rate": 2.558922558922559e-06, |
|
"loss": 0.5267, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 2.878787878787879, |
|
"grad_norm": 9.560832023620605, |
|
"learning_rate": 2.2783389450056116e-06, |
|
"loss": 0.5839, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 2.893939393939394, |
|
"grad_norm": 4.680112361907959, |
|
"learning_rate": 1.9977553310886645e-06, |
|
"loss": 0.3282, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 2.909090909090909, |
|
"grad_norm": 3.320807695388794, |
|
"learning_rate": 1.7171717171717171e-06, |
|
"loss": 0.4741, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.9242424242424243, |
|
"grad_norm": 6.167539119720459, |
|
"learning_rate": 1.43658810325477e-06, |
|
"loss": 0.5005, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 2.9393939393939394, |
|
"grad_norm": 6.241283893585205, |
|
"learning_rate": 1.156004489337823e-06, |
|
"loss": 0.4634, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 2.9545454545454546, |
|
"grad_norm": 2.0378332138061523, |
|
"learning_rate": 8.754208754208755e-07, |
|
"loss": 0.4892, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 2.9696969696969697, |
|
"grad_norm": 8.896339416503906, |
|
"learning_rate": 5.948372615039282e-07, |
|
"loss": 0.5611, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.984848484848485, |
|
"grad_norm": 2.1297545433044434, |
|
"learning_rate": 3.1425364758698096e-07, |
|
"loss": 0.516, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 10.862909317016602, |
|
"learning_rate": 3.367003367003367e-08, |
|
"loss": 0.4453, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.886969696969697, |
|
"eval_f1_macro": 0.8644630522360383, |
|
"eval_f1_micro": 0.886969696969697, |
|
"eval_f1_weighted": 0.8837489529217776, |
|
"eval_loss": 0.3821594715118408, |
|
"eval_precision_macro": 0.8700338902181693, |
|
"eval_precision_micro": 0.886969696969697, |
|
"eval_precision_weighted": 0.8838390180385471, |
|
"eval_recall_macro": 0.8628333333333335, |
|
"eval_recall_micro": 0.886969696969697, |
|
"eval_recall_weighted": 0.886969696969697, |
|
"eval_runtime": 18.9146, |
|
"eval_samples_per_second": 174.469, |
|
"eval_steps_per_second": 10.944, |
|
"step": 4950 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 4950, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.01 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.084816011852841e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|