|
{ |
|
"best_metric": 0.5245915055274963, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.11245431543435479, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.000562271577171774, |
|
"grad_norm": 0.9277647137641907, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9523, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.000562271577171774, |
|
"eval_loss": 1.107572078704834, |
|
"eval_runtime": 60.3006, |
|
"eval_samples_per_second": 49.668, |
|
"eval_steps_per_second": 12.421, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001124543154343548, |
|
"grad_norm": 1.1947993040084839, |
|
"learning_rate": 2e-05, |
|
"loss": 0.9678, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0016868147315153219, |
|
"grad_norm": 1.1136486530303955, |
|
"learning_rate": 3e-05, |
|
"loss": 0.9377, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.002249086308687096, |
|
"grad_norm": 1.3076503276824951, |
|
"learning_rate": 4e-05, |
|
"loss": 1.055, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0028113578858588698, |
|
"grad_norm": 1.0048154592514038, |
|
"learning_rate": 5e-05, |
|
"loss": 0.9123, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0033736294630306437, |
|
"grad_norm": 1.0582215785980225, |
|
"learning_rate": 6e-05, |
|
"loss": 0.9838, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.003935901040202418, |
|
"grad_norm": 1.1351639032363892, |
|
"learning_rate": 7e-05, |
|
"loss": 0.9163, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.004498172617374192, |
|
"grad_norm": 1.0295337438583374, |
|
"learning_rate": 8e-05, |
|
"loss": 0.9018, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.005060444194545966, |
|
"grad_norm": 0.9168885946273804, |
|
"learning_rate": 9e-05, |
|
"loss": 0.8344, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0056227157717177395, |
|
"grad_norm": 1.0283502340316772, |
|
"learning_rate": 0.0001, |
|
"loss": 0.8446, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.006184987348889514, |
|
"grad_norm": 1.1670539379119873, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 0.787, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0067472589260612875, |
|
"grad_norm": 0.9883449673652649, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.7885, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.007309530503233062, |
|
"grad_norm": 1.1391271352767944, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 0.8175, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.007871802080404836, |
|
"grad_norm": 0.9470503330230713, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 0.7658, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.00843407365757661, |
|
"grad_norm": 0.8227817416191101, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 0.6894, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.008996345234748383, |
|
"grad_norm": 0.8031889200210571, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 0.753, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.009558616811920157, |
|
"grad_norm": 0.8018306493759155, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 0.7226, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.010120888389091932, |
|
"grad_norm": 0.867193341255188, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 0.6762, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.010683159966263706, |
|
"grad_norm": 0.7935461401939392, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.7003, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.011245431543435479, |
|
"grad_norm": 0.722403883934021, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.5762, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.011807703120607253, |
|
"grad_norm": 0.7102622389793396, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.7118, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.012369974697779028, |
|
"grad_norm": 0.8430309295654297, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.8028, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.012932246274950801, |
|
"grad_norm": 0.7064953446388245, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 0.6563, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.013494517852122575, |
|
"grad_norm": 0.7702734470367432, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.5956, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.014056789429294348, |
|
"grad_norm": 0.8048498034477234, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.6192, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.014619061006466124, |
|
"grad_norm": 0.769148588180542, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.6339, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.015181332583637897, |
|
"grad_norm": 0.744289755821228, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.5355, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.015743604160809672, |
|
"grad_norm": 0.7577502131462097, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.6171, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.016305875737981444, |
|
"grad_norm": 0.824184238910675, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.6907, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.01686814731515322, |
|
"grad_norm": 0.8315814733505249, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.5212, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01743041889232499, |
|
"grad_norm": 0.8605427742004395, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 0.5392, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.017992690469496767, |
|
"grad_norm": 0.7918468117713928, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.4832, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.018554962046668542, |
|
"grad_norm": 0.8169512152671814, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.5747, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.019117233623840314, |
|
"grad_norm": 0.8559743165969849, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 0.5924, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.01967950520101209, |
|
"grad_norm": 0.801286518573761, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 0.5012, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.020241776778183864, |
|
"grad_norm": 0.8353800177574158, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.4738, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.020804048355355636, |
|
"grad_norm": 0.7663769125938416, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 0.5555, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02136631993252741, |
|
"grad_norm": 0.7065837383270264, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.4453, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.021928591509699186, |
|
"grad_norm": 0.8448126912117004, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 0.4219, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.022490863086870958, |
|
"grad_norm": 0.8258911967277527, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.4561, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.023053134664042733, |
|
"grad_norm": 0.8381558656692505, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.5233, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.023615406241214505, |
|
"grad_norm": 0.9373324513435364, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.5464, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.02417767781838628, |
|
"grad_norm": 0.9225761890411377, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 0.5119, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.024739949395558056, |
|
"grad_norm": 0.9429646730422974, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 0.4632, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.025302220972729828, |
|
"grad_norm": 0.9394980072975159, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 0.5444, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.025864492549901603, |
|
"grad_norm": 1.0874422788619995, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 0.6728, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.026426764127073378, |
|
"grad_norm": 1.0107288360595703, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 0.5739, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.02698903570424515, |
|
"grad_norm": 1.1551225185394287, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.4969, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.027551307281416925, |
|
"grad_norm": 1.3663979768753052, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 0.7977, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.028113578858588697, |
|
"grad_norm": 2.2082934379577637, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 0.7978, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.028113578858588697, |
|
"eval_loss": 0.6958192586898804, |
|
"eval_runtime": 60.4229, |
|
"eval_samples_per_second": 49.567, |
|
"eval_steps_per_second": 12.396, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.028675850435760472, |
|
"grad_norm": 1.617163062095642, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.8293, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.029238122012932247, |
|
"grad_norm": 1.5228155851364136, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 0.834, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.02980039359010402, |
|
"grad_norm": 1.3696954250335693, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.8516, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.030362665167275794, |
|
"grad_norm": 0.9437366724014282, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.7302, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.03092493674444757, |
|
"grad_norm": 0.7396307587623596, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.6234, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.031487208321619345, |
|
"grad_norm": 0.6352867484092712, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.6283, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03204947989879112, |
|
"grad_norm": 0.6413976550102234, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.7315, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.03261175147596289, |
|
"grad_norm": 0.6398018598556519, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.5883, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03317402305313467, |
|
"grad_norm": 0.6652855277061462, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.6167, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03373629463030644, |
|
"grad_norm": 0.6662874817848206, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.6626, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03429856620747821, |
|
"grad_norm": 0.6972583532333374, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.6922, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03486083778464998, |
|
"grad_norm": 0.5826914310455322, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.5906, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.03542310936182176, |
|
"grad_norm": 0.6414178013801575, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.5979, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.03598538093899353, |
|
"grad_norm": 0.614965558052063, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.6806, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.036547652516165305, |
|
"grad_norm": 0.6080766320228577, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.6056, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.037109924093337084, |
|
"grad_norm": 0.599414587020874, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.5893, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.037672195670508855, |
|
"grad_norm": 0.6107190847396851, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.6567, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.03823446724768063, |
|
"grad_norm": 0.6296833157539368, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.6634, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.038796738824852406, |
|
"grad_norm": 0.6974261999130249, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.5943, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.03935901040202418, |
|
"grad_norm": 0.650062084197998, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.5836, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03992128197919595, |
|
"grad_norm": 0.6339553594589233, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.5484, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.04048355355636773, |
|
"grad_norm": 0.6875646710395813, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.6858, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0410458251335395, |
|
"grad_norm": 0.64603191614151, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.623, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.04160809671071127, |
|
"grad_norm": 0.6082363128662109, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.5215, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04217036828788305, |
|
"grad_norm": 0.6290409564971924, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.4936, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04273263986505482, |
|
"grad_norm": 0.6085454821586609, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.5462, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.043294911442226594, |
|
"grad_norm": 0.7018570303916931, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.6391, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04385718301939837, |
|
"grad_norm": 0.5906082391738892, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.4946, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.044419454596570145, |
|
"grad_norm": 0.645088791847229, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.4762, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.044981726173741916, |
|
"grad_norm": 0.6799085140228271, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.543, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04554399775091369, |
|
"grad_norm": 0.6760143041610718, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 0.5326, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.04610626932808547, |
|
"grad_norm": 0.6451261639595032, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.4561, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.04666854090525724, |
|
"grad_norm": 0.6407421231269836, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.4899, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.04723081248242901, |
|
"grad_norm": 0.659145712852478, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.4942, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.04779308405960079, |
|
"grad_norm": 0.6394976377487183, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.4354, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04835535563677256, |
|
"grad_norm": 0.6170784831047058, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.455, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.04891762721394433, |
|
"grad_norm": 0.6625480651855469, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.458, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.04947989879111611, |
|
"grad_norm": 0.6765840649604797, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.3961, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.05004217036828788, |
|
"grad_norm": 0.6819695234298706, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 0.5176, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.050604441945459655, |
|
"grad_norm": 0.7992807626724243, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.4787, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.051166713522631434, |
|
"grad_norm": 0.6881734132766724, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.4361, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.051728985099803206, |
|
"grad_norm": 0.7603946328163147, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.4724, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.05229125667697498, |
|
"grad_norm": 0.8886284828186035, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.5389, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.052853528254146756, |
|
"grad_norm": 0.7603895664215088, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.5042, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.05341579983131853, |
|
"grad_norm": 0.8904877305030823, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.6202, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.0539780714084903, |
|
"grad_norm": 0.726782500743866, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 0.5166, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.05454034298566207, |
|
"grad_norm": 0.8900876045227051, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 0.6052, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.05510261456283385, |
|
"grad_norm": 0.919487714767456, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 0.5764, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.05566488614000562, |
|
"grad_norm": 1.2879389524459839, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 0.6897, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.056227157717177394, |
|
"grad_norm": 1.9414801597595215, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 0.4912, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.056227157717177394, |
|
"eval_loss": 0.5721317529678345, |
|
"eval_runtime": 60.446, |
|
"eval_samples_per_second": 49.548, |
|
"eval_steps_per_second": 12.391, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05678942929434917, |
|
"grad_norm": 0.7900543212890625, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 0.7565, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.057351700871520944, |
|
"grad_norm": 0.7188913226127625, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.6344, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.057913972448692716, |
|
"grad_norm": 0.7000232338905334, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 0.7186, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.058476244025864495, |
|
"grad_norm": 0.7585048675537109, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 0.742, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.059038515603036266, |
|
"grad_norm": 0.6727244257926941, |
|
"learning_rate": 5e-05, |
|
"loss": 0.6335, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05960078718020804, |
|
"grad_norm": 0.635672926902771, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 0.7268, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.06016305875737982, |
|
"grad_norm": 0.5601180791854858, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.6556, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.06072533033455159, |
|
"grad_norm": 0.5831847786903381, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.5855, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.06128760191172336, |
|
"grad_norm": 0.5406240224838257, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.663, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.06184987348889514, |
|
"grad_norm": 0.5173934102058411, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.5705, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06241214506606691, |
|
"grad_norm": 0.5559889674186707, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 0.591, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.06297441664323869, |
|
"grad_norm": 0.5619823336601257, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.5909, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.06353668822041046, |
|
"grad_norm": 0.5657660365104675, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.6236, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.06409895979758223, |
|
"grad_norm": 0.6045258641242981, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.5462, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.064661231374754, |
|
"grad_norm": 0.583362340927124, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.5561, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06522350295192578, |
|
"grad_norm": 0.6070014834403992, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.6116, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.06578577452909755, |
|
"grad_norm": 0.5519648194313049, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.591, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.06634804610626933, |
|
"grad_norm": 0.5768078565597534, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.5516, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.0669103176834411, |
|
"grad_norm": 0.5689049959182739, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.5451, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.06747258926061288, |
|
"grad_norm": 0.611607551574707, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.5779, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06803486083778465, |
|
"grad_norm": 0.6133226752281189, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.5086, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.06859713241495642, |
|
"grad_norm": 0.6262627243995667, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.606, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.0691594039921282, |
|
"grad_norm": 0.5980281829833984, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.5636, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.06972167556929997, |
|
"grad_norm": 0.6119388341903687, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.5313, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.07028394714647175, |
|
"grad_norm": 0.5762614607810974, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.4927, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07084621872364352, |
|
"grad_norm": 0.656383752822876, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.632, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.0714084903008153, |
|
"grad_norm": 0.5831140279769897, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.4951, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.07197076187798707, |
|
"grad_norm": 0.6357362270355225, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.5096, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.07253303345515884, |
|
"grad_norm": 0.6704001426696777, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.6236, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.07309530503233061, |
|
"grad_norm": 0.5634742975234985, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.4083, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.0736575766095024, |
|
"grad_norm": 0.6350815892219543, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 0.4295, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.07421984818667417, |
|
"grad_norm": 0.6566004157066345, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 0.4454, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.07478211976384594, |
|
"grad_norm": 0.6152843236923218, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.4091, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.07534439134101771, |
|
"grad_norm": 0.7407167553901672, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 0.5535, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.07590666291818948, |
|
"grad_norm": 0.6638815402984619, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.4944, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07646893449536125, |
|
"grad_norm": 0.6325174570083618, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.399, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.07703120607253304, |
|
"grad_norm": 0.683302640914917, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 0.4391, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.07759347764970481, |
|
"grad_norm": 0.6366162300109863, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 0.3555, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.07815574922687658, |
|
"grad_norm": 0.67439866065979, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.4154, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.07871802080404836, |
|
"grad_norm": 0.6465699076652527, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.378, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07928029238122013, |
|
"grad_norm": 0.6464277505874634, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.3991, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.0798425639583919, |
|
"grad_norm": 0.6784489154815674, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.3894, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.08040483553556367, |
|
"grad_norm": 0.7543945908546448, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.4289, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.08096710711273546, |
|
"grad_norm": 0.728791356086731, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 0.4247, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.08152937868990723, |
|
"grad_norm": 0.8233468532562256, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 0.5633, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.082091650267079, |
|
"grad_norm": 0.7600351572036743, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 0.4912, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.08265392184425077, |
|
"grad_norm": 0.823025107383728, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 0.5105, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.08321619342142254, |
|
"grad_norm": 0.9203780889511108, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 0.5627, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.08377846499859432, |
|
"grad_norm": 1.0546104907989502, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 0.644, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.0843407365757661, |
|
"grad_norm": 1.2494205236434937, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 0.3982, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0843407365757661, |
|
"eval_loss": 0.5304158329963684, |
|
"eval_runtime": 60.4588, |
|
"eval_samples_per_second": 49.538, |
|
"eval_steps_per_second": 12.389, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08490300815293787, |
|
"grad_norm": 0.4984513819217682, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 0.6096, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.08546527973010964, |
|
"grad_norm": 0.5627269744873047, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 0.6625, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.08602755130728142, |
|
"grad_norm": 0.5592852234840393, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 0.6395, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.08658982288445319, |
|
"grad_norm": 0.5454599857330322, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 0.6092, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.08715209446162496, |
|
"grad_norm": 0.5715036392211914, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 0.6648, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08771436603879675, |
|
"grad_norm": 0.5295091271400452, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 0.5753, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.08827663761596852, |
|
"grad_norm": 0.5699715614318848, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 0.6065, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.08883890919314029, |
|
"grad_norm": 0.5259392261505127, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 0.5584, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.08940118077031206, |
|
"grad_norm": 0.6719173192977905, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 0.613, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.08996345234748383, |
|
"grad_norm": 0.5674479007720947, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 0.5934, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.0905257239246556, |
|
"grad_norm": 0.6157873868942261, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 0.6343, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.09108799550182738, |
|
"grad_norm": 0.6046155691146851, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.6198, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.09165026707899916, |
|
"grad_norm": 0.5647667646408081, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 0.6186, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.09221253865617093, |
|
"grad_norm": 0.5314237475395203, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 0.4828, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.0927748102333427, |
|
"grad_norm": 0.5461083650588989, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 0.5188, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09333708181051448, |
|
"grad_norm": 0.5884983539581299, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 0.6037, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.09389935338768625, |
|
"grad_norm": 0.5966055393218994, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 0.5606, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.09446162496485802, |
|
"grad_norm": 0.5838494300842285, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 0.5659, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.0950238965420298, |
|
"grad_norm": 0.5209010243415833, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 0.4508, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.09558616811920158, |
|
"grad_norm": 0.5918200612068176, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 0.539, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09614843969637335, |
|
"grad_norm": 0.6243396997451782, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 0.608, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.09671071127354512, |
|
"grad_norm": 0.6295937299728394, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 0.6011, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.0972729828507169, |
|
"grad_norm": 0.6207840442657471, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 0.5537, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.09783525442788867, |
|
"grad_norm": 0.5702746510505676, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 0.4806, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.09839752600506045, |
|
"grad_norm": 0.6419647932052612, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 0.5246, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09895979758223222, |
|
"grad_norm": 0.5579126477241516, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 0.4836, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.099522069159404, |
|
"grad_norm": 0.666792094707489, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 0.562, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.10008434073657577, |
|
"grad_norm": 0.6097604036331177, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 0.4448, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.10064661231374754, |
|
"grad_norm": 0.6671866774559021, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 0.534, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.10120888389091931, |
|
"grad_norm": 0.6225870847702026, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 0.4926, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.10177115546809108, |
|
"grad_norm": 0.6297417283058167, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.4599, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.10233342704526287, |
|
"grad_norm": 0.5996748208999634, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 0.4007, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.10289569862243464, |
|
"grad_norm": 0.7020567059516907, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 0.467, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.10345797019960641, |
|
"grad_norm": 0.6249246001243591, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 0.4417, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.10402024177677818, |
|
"grad_norm": 0.6193715333938599, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 0.4229, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.10458251335394995, |
|
"grad_norm": 0.6512302756309509, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 0.4499, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.10514478493112173, |
|
"grad_norm": 0.6990061402320862, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 0.4468, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.10570705650829351, |
|
"grad_norm": 0.7123408317565918, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 0.4648, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.10626932808546528, |
|
"grad_norm": 0.6349745392799377, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 0.3529, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.10683159966263706, |
|
"grad_norm": 0.682070791721344, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 0.4053, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.10739387123980883, |
|
"grad_norm": 0.7470072507858276, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 0.4741, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.1079561428169806, |
|
"grad_norm": 0.7900189757347107, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 0.4458, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.10851841439415237, |
|
"grad_norm": 0.7109037041664124, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 0.3639, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.10908068597132414, |
|
"grad_norm": 0.8317274451255798, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 0.4855, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.10964295754849593, |
|
"grad_norm": 0.80501788854599, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 0.5061, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1102052291256677, |
|
"grad_norm": 0.7943066954612732, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 0.4833, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.11076750070283947, |
|
"grad_norm": 0.8239003419876099, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 0.5212, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.11132977228001124, |
|
"grad_norm": 1.0709213018417358, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 0.6827, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.11189204385718302, |
|
"grad_norm": 0.9007803797721863, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 0.5218, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.11245431543435479, |
|
"grad_norm": 1.3899116516113281, |
|
"learning_rate": 0.0, |
|
"loss": 0.5689, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11245431543435479, |
|
"eval_loss": 0.5245915055274963, |
|
"eval_runtime": 60.4684, |
|
"eval_samples_per_second": 49.53, |
|
"eval_steps_per_second": 12.387, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.26629570494464e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|