|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 492, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0040650406504065045, |
|
"grad_norm": 16.96013186396094, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.5409, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02032520325203252, |
|
"grad_norm": 16.916170957900782, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.5345, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.04065040650406504, |
|
"grad_norm": 4.992436085095587, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.3026, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06097560975609756, |
|
"grad_norm": 2.654639415277273, |
|
"learning_rate": 6e-06, |
|
"loss": 1.2217, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08130081300813008, |
|
"grad_norm": 2.2317048430147577, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.162, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1016260162601626, |
|
"grad_norm": 1.3761478430076108, |
|
"learning_rate": 1e-05, |
|
"loss": 1.124, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.12195121951219512, |
|
"grad_norm": 1.1540940483296693, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.1148, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14227642276422764, |
|
"grad_norm": 1.1928473894163938, |
|
"learning_rate": 1.4e-05, |
|
"loss": 1.1046, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.16260162601626016, |
|
"grad_norm": 1.185131002730624, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.1034, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.18292682926829268, |
|
"grad_norm": 1.3370638997381883, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.0685, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2032520325203252, |
|
"grad_norm": 1.4250484136195543, |
|
"learning_rate": 2e-05, |
|
"loss": 1.0857, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.22357723577235772, |
|
"grad_norm": 1.3017876025421216, |
|
"learning_rate": 1.9993685782892184e-05, |
|
"loss": 1.0214, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 1.486201507551871, |
|
"learning_rate": 1.9974751105436266e-05, |
|
"loss": 1.0418, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.26422764227642276, |
|
"grad_norm": 1.2014188024997923, |
|
"learning_rate": 1.9943219879165113e-05, |
|
"loss": 1.0753, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2845528455284553, |
|
"grad_norm": 1.3819404641565476, |
|
"learning_rate": 1.98991319230804e-05, |
|
"loss": 1.0214, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3048780487804878, |
|
"grad_norm": 1.1959378462736852, |
|
"learning_rate": 1.984254291336743e-05, |
|
"loss": 1.0168, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3252032520325203, |
|
"grad_norm": 1.183753776787632, |
|
"learning_rate": 1.9773524313084857e-05, |
|
"loss": 1.0293, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.34552845528455284, |
|
"grad_norm": 1.2267547234529148, |
|
"learning_rate": 1.9692163281918016e-05, |
|
"loss": 1.0394, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.36585365853658536, |
|
"grad_norm": 1.1369844018691624, |
|
"learning_rate": 1.959856256610988e-05, |
|
"loss": 1.0491, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3861788617886179, |
|
"grad_norm": 1.1999534529754976, |
|
"learning_rate": 1.9492840368708668e-05, |
|
"loss": 1.0136, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4065040650406504, |
|
"grad_norm": 1.1735600472638217, |
|
"learning_rate": 1.937513020029588e-05, |
|
"loss": 1.0361, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4268292682926829, |
|
"grad_norm": 1.250115647846771, |
|
"learning_rate": 1.9245580710383344e-05, |
|
"loss": 1.0283, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.44715447154471544, |
|
"grad_norm": 1.1840318671469985, |
|
"learning_rate": 1.9104355499692166e-05, |
|
"loss": 1.0483, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.46747967479674796, |
|
"grad_norm": 1.1355456855985984, |
|
"learning_rate": 1.8951632913550625e-05, |
|
"loss": 1.0076, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.4878048780487805, |
|
"grad_norm": 1.176376978993297, |
|
"learning_rate": 1.8787605816671956e-05, |
|
"loss": 1.0451, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.508130081300813, |
|
"grad_norm": 1.2157595019570657, |
|
"learning_rate": 1.8612481349596406e-05, |
|
"loss": 0.9807, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5284552845528455, |
|
"grad_norm": 1.617382778432822, |
|
"learning_rate": 1.8426480667105178e-05, |
|
"loss": 1.0273, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5487804878048781, |
|
"grad_norm": 1.072969358036799, |
|
"learning_rate": 1.8229838658936566e-05, |
|
"loss": 1.0239, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5691056910569106, |
|
"grad_norm": 1.1349667239072485, |
|
"learning_rate": 1.8022803653156983e-05, |
|
"loss": 1.0191, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5894308943089431, |
|
"grad_norm": 1.061992535593682, |
|
"learning_rate": 1.7805637102561516e-05, |
|
"loss": 1.004, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6097560975609756, |
|
"grad_norm": 1.1127139653197202, |
|
"learning_rate": 1.757861325449997e-05, |
|
"loss": 0.9924, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6300813008130082, |
|
"grad_norm": 1.1022251104365055, |
|
"learning_rate": 1.73420188045454e-05, |
|
"loss": 0.9971, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6504065040650406, |
|
"grad_norm": 1.273269943630818, |
|
"learning_rate": 1.7096152534442515e-05, |
|
"loss": 1.0056, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6707317073170732, |
|
"grad_norm": 1.0563584964584172, |
|
"learning_rate": 1.6841324934793096e-05, |
|
"loss": 0.9856, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6910569105691057, |
|
"grad_norm": 1.145292649525439, |
|
"learning_rate": 1.6577857812954994e-05, |
|
"loss": 1.0083, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7113821138211383, |
|
"grad_norm": 1.0928885418057457, |
|
"learning_rate": 1.6306083886649823e-05, |
|
"loss": 0.9936, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7317073170731707, |
|
"grad_norm": 1.13829615874657, |
|
"learning_rate": 1.6026346363792565e-05, |
|
"loss": 0.9874, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7520325203252033, |
|
"grad_norm": 1.0862202261960012, |
|
"learning_rate": 1.573899850907373e-05, |
|
"loss": 0.9856, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7723577235772358, |
|
"grad_norm": 1.0544760585534974, |
|
"learning_rate": 1.5444403197841345e-05, |
|
"loss": 1.0079, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7926829268292683, |
|
"grad_norm": 1.1972116619581246, |
|
"learning_rate": 1.514293245784623e-05, |
|
"loss": 0.9818, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8130081300813008, |
|
"grad_norm": 1.0551988260235088, |
|
"learning_rate": 1.4834966999429179e-05, |
|
"loss": 0.9762, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 1.0937895071472443, |
|
"learning_rate": 1.4520895734743419e-05, |
|
"loss": 0.9967, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.8536585365853658, |
|
"grad_norm": 2.1677819071037283, |
|
"learning_rate": 1.4201115286619464e-05, |
|
"loss": 0.9642, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8739837398373984, |
|
"grad_norm": 1.0198723964107377, |
|
"learning_rate": 1.387602948769257e-05, |
|
"loss": 0.9717, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.8943089430894309, |
|
"grad_norm": 1.0362342965679063, |
|
"learning_rate": 1.3546048870425356e-05, |
|
"loss": 0.9714, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.9146341463414634, |
|
"grad_norm": 0.997164008308899, |
|
"learning_rate": 1.3211590148669586e-05, |
|
"loss": 0.9713, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.9349593495934959, |
|
"grad_norm": 1.1623269423245046, |
|
"learning_rate": 1.2873075691421808e-05, |
|
"loss": 0.9454, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9552845528455285, |
|
"grad_norm": 1.0749975590040124, |
|
"learning_rate": 1.2530932989437463e-05, |
|
"loss": 0.9845, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.975609756097561, |
|
"grad_norm": 0.988624506908827, |
|
"learning_rate": 1.2185594115376991e-05, |
|
"loss": 0.9547, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.9959349593495935, |
|
"grad_norm": 1.0165003436015188, |
|
"learning_rate": 1.1837495178165706e-05, |
|
"loss": 0.9962, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.968990683555603, |
|
"eval_runtime": 4.2249, |
|
"eval_samples_per_second": 37.397, |
|
"eval_steps_per_second": 0.71, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.016260162601626, |
|
"grad_norm": 2.2206925031006675, |
|
"learning_rate": 1.1487075772256517e-05, |
|
"loss": 0.7411, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.0365853658536586, |
|
"grad_norm": 1.5057083208667505, |
|
"learning_rate": 1.1134778422490971e-05, |
|
"loss": 0.6718, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.056910569105691, |
|
"grad_norm": 1.1997738500067352, |
|
"learning_rate": 1.0781048025259648e-05, |
|
"loss": 0.6885, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.0772357723577235, |
|
"grad_norm": 1.129770949868323, |
|
"learning_rate": 1.0426331286667701e-05, |
|
"loss": 0.6459, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.0975609756097562, |
|
"grad_norm": 1.0821575635964367, |
|
"learning_rate": 1.0071076158414977e-05, |
|
"loss": 0.6578, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.1178861788617886, |
|
"grad_norm": 0.9858678711249164, |
|
"learning_rate": 9.715731272103172e-06, |
|
"loss": 0.6665, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.1382113821138211, |
|
"grad_norm": 1.0404542015906177, |
|
"learning_rate": 9.360745372684346e-06, |
|
"loss": 0.6682, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.1585365853658536, |
|
"grad_norm": 1.0009568415817596, |
|
"learning_rate": 9.00656675176633e-06, |
|
"loss": 0.6557, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.1788617886178863, |
|
"grad_norm": 1.0425691363766654, |
|
"learning_rate": 8.653642681490608e-06, |
|
"loss": 0.6632, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.1991869918699187, |
|
"grad_norm": 0.9980360375332701, |
|
"learning_rate": 8.30241884969764e-06, |
|
"loss": 0.6557, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.2195121951219512, |
|
"grad_norm": 1.1151520476299475, |
|
"learning_rate": 7.953338797092902e-06, |
|
"loss": 0.676, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.2398373983739837, |
|
"grad_norm": 1.1095243103350756, |
|
"learning_rate": 7.606843357124426e-06, |
|
"loss": 0.6514, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.2601626016260163, |
|
"grad_norm": 1.0773726823725258, |
|
"learning_rate": 7.263370099279173e-06, |
|
"loss": 0.6592, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.2804878048780488, |
|
"grad_norm": 1.0415539344975482, |
|
"learning_rate": 6.923352776501302e-06, |
|
"loss": 0.663, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.3008130081300813, |
|
"grad_norm": 0.9921263103830474, |
|
"learning_rate": 6.587220777430097e-06, |
|
"loss": 0.6554, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.321138211382114, |
|
"grad_norm": 1.0067612241699029, |
|
"learning_rate": 6.255398584149366e-06, |
|
"loss": 0.6592, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.3414634146341464, |
|
"grad_norm": 0.9539630263562321, |
|
"learning_rate": 5.928305236133016e-06, |
|
"loss": 0.638, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.3617886178861789, |
|
"grad_norm": 0.9724353060112348, |
|
"learning_rate": 5.60635380106383e-06, |
|
"loss": 0.6664, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.3821138211382114, |
|
"grad_norm": 0.9667751887299418, |
|
"learning_rate": 5.2899508531936526e-06, |
|
"loss": 0.6582, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.4024390243902438, |
|
"grad_norm": 1.0179726164365328, |
|
"learning_rate": 4.979495959903759e-06, |
|
"loss": 0.658, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.4227642276422765, |
|
"grad_norm": 0.9892639534813169, |
|
"learning_rate": 4.675381177113837e-06, |
|
"loss": 0.6588, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.443089430894309, |
|
"grad_norm": 0.9663435093514263, |
|
"learning_rate": 4.377990554176729e-06, |
|
"loss": 0.6295, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.4634146341463414, |
|
"grad_norm": 0.9475872699295589, |
|
"learning_rate": 4.087699648884248e-06, |
|
"loss": 0.6309, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.4837398373983741, |
|
"grad_norm": 1.0174077533995098, |
|
"learning_rate": 3.804875053196477e-06, |
|
"loss": 0.6548, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.5040650406504064, |
|
"grad_norm": 0.9473261301601033, |
|
"learning_rate": 3.529873930293546e-06, |
|
"loss": 0.6585, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.524390243902439, |
|
"grad_norm": 0.9724565020578093, |
|
"learning_rate": 3.2630435635344283e-06, |
|
"loss": 0.6579, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.5447154471544715, |
|
"grad_norm": 0.9377507057850171, |
|
"learning_rate": 3.004720917892464e-06, |
|
"loss": 0.6325, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.565040650406504, |
|
"grad_norm": 1.220689589737471, |
|
"learning_rate": 2.7552322144213405e-06, |
|
"loss": 0.6656, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.5853658536585367, |
|
"grad_norm": 0.9445474318613094, |
|
"learning_rate": 2.514892518288988e-06, |
|
"loss": 0.6484, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.6056910569105691, |
|
"grad_norm": 0.915729013522606, |
|
"learning_rate": 2.2840053408996154e-06, |
|
"loss": 0.6611, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.6260162601626016, |
|
"grad_norm": 0.9261545603143331, |
|
"learning_rate": 2.0628622566063063e-06, |
|
"loss": 0.6366, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.6463414634146343, |
|
"grad_norm": 0.9879532897121964, |
|
"learning_rate": 1.8517425344982831e-06, |
|
"loss": 0.6349, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.9360662238523524, |
|
"learning_rate": 1.6509127857277784e-06, |
|
"loss": 0.654, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.6869918699186992, |
|
"grad_norm": 0.9548880407020373, |
|
"learning_rate": 1.4606266268218783e-06, |
|
"loss": 0.6625, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.7073170731707317, |
|
"grad_norm": 0.9497166098139703, |
|
"learning_rate": 1.2811243594045697e-06, |
|
"loss": 0.6593, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.7276422764227641, |
|
"grad_norm": 0.937994818106022, |
|
"learning_rate": 1.1126326667334196e-06, |
|
"loss": 0.657, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.7479674796747968, |
|
"grad_norm": 0.9426215224621286, |
|
"learning_rate": 9.55364327434105e-07, |
|
"loss": 0.6572, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.7682926829268293, |
|
"grad_norm": 0.956528393469926, |
|
"learning_rate": 8.095179467943293e-07, |
|
"loss": 0.6538, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.7886178861788617, |
|
"grad_norm": 0.9816007976764832, |
|
"learning_rate": 6.752777059564431e-07, |
|
"loss": 0.6683, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.8089430894308944, |
|
"grad_norm": 0.9327650063702297, |
|
"learning_rate": 5.528131293254957e-07, |
|
"loss": 0.6204, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.8292682926829267, |
|
"grad_norm": 0.9411103321287626, |
|
"learning_rate": 4.4227887048646335e-07, |
|
"loss": 0.6595, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.8495934959349594, |
|
"grad_norm": 0.9788741395392418, |
|
"learning_rate": 3.4381451690097653e-07, |
|
"loss": 0.6425, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.8699186991869918, |
|
"grad_norm": 0.9336996330937468, |
|
"learning_rate": 2.5754441363021854e-07, |
|
"loss": 0.6345, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.8902439024390243, |
|
"grad_norm": 0.9453351615169212, |
|
"learning_rate": 1.8357750630658367e-07, |
|
"loss": 0.6489, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.910569105691057, |
|
"grad_norm": 0.9708668647842268, |
|
"learning_rate": 1.2200720355239893e-07, |
|
"loss": 0.6509, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.9308943089430894, |
|
"grad_norm": 0.984113893821567, |
|
"learning_rate": 7.291125901946027e-08, |
|
"loss": 0.6546, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.951219512195122, |
|
"grad_norm": 0.9685414915217074, |
|
"learning_rate": 3.635167319834709e-08, |
|
"loss": 0.6384, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.9715447154471546, |
|
"grad_norm": 0.9169491016884999, |
|
"learning_rate": 1.2374615121508726e-08, |
|
"loss": 0.6367, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.9918699186991868, |
|
"grad_norm": 0.9084443808404363, |
|
"learning_rate": 1.0103640590064524e-09, |
|
"loss": 0.6291, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.9818947911262512, |
|
"eval_runtime": 6.2779, |
|
"eval_samples_per_second": 25.167, |
|
"eval_steps_per_second": 0.478, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 492, |
|
"total_flos": 103014790594560.0, |
|
"train_loss": 0.8469056426509609, |
|
"train_runtime": 3499.6534, |
|
"train_samples_per_second": 8.98, |
|
"train_steps_per_second": 0.141 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 492, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 103014790594560.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|