|
{ |
|
"best_metric": 0.5461220145225525, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.0203190084323885, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0001015950421619425, |
|
"grad_norm": 2.2005860805511475, |
|
"learning_rate": 1.001e-05, |
|
"loss": 0.646, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0001015950421619425, |
|
"eval_loss": 0.6843345165252686, |
|
"eval_runtime": 515.2648, |
|
"eval_samples_per_second": 8.044, |
|
"eval_steps_per_second": 2.013, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.000203190084323885, |
|
"grad_norm": 2.192756175994873, |
|
"learning_rate": 2.002e-05, |
|
"loss": 0.7142, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00030478512648582747, |
|
"grad_norm": 2.6510324478149414, |
|
"learning_rate": 3.0029999999999995e-05, |
|
"loss": 0.8049, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00040638016864777, |
|
"grad_norm": 2.7101404666900635, |
|
"learning_rate": 4.004e-05, |
|
"loss": 0.7722, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0005079752108097125, |
|
"grad_norm": 3.240100145339966, |
|
"learning_rate": 5.005e-05, |
|
"loss": 0.79, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0006095702529716549, |
|
"grad_norm": 3.555654525756836, |
|
"learning_rate": 6.005999999999999e-05, |
|
"loss": 1.065, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0007111652951335975, |
|
"grad_norm": 4.47487735748291, |
|
"learning_rate": 7.006999999999998e-05, |
|
"loss": 0.863, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.00081276033729554, |
|
"grad_norm": 2.9086074829101562, |
|
"learning_rate": 8.008e-05, |
|
"loss": 0.9118, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0009143553794574825, |
|
"grad_norm": 3.0420775413513184, |
|
"learning_rate": 9.009e-05, |
|
"loss": 0.7969, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.001015950421619425, |
|
"grad_norm": 3.648852825164795, |
|
"learning_rate": 0.0001001, |
|
"loss": 1.228, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0011175454637813674, |
|
"grad_norm": 3.1643669605255127, |
|
"learning_rate": 9.957315789473684e-05, |
|
"loss": 0.9534, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0012191405059433099, |
|
"grad_norm": 3.5377748012542725, |
|
"learning_rate": 9.904631578947367e-05, |
|
"loss": 0.9373, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0013207355481052526, |
|
"grad_norm": 3.505016565322876, |
|
"learning_rate": 9.851947368421052e-05, |
|
"loss": 1.179, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.001422330590267195, |
|
"grad_norm": 3.4394781589508057, |
|
"learning_rate": 9.799263157894736e-05, |
|
"loss": 1.1948, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0015239256324291375, |
|
"grad_norm": 3.656883955001831, |
|
"learning_rate": 9.746578947368421e-05, |
|
"loss": 1.1357, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.00162552067459108, |
|
"grad_norm": 4.39939546585083, |
|
"learning_rate": 9.693894736842104e-05, |
|
"loss": 1.2653, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0017271157167530224, |
|
"grad_norm": 3.9846978187561035, |
|
"learning_rate": 9.641210526315789e-05, |
|
"loss": 1.1856, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.001828710758914965, |
|
"grad_norm": 3.4548585414886475, |
|
"learning_rate": 9.588526315789473e-05, |
|
"loss": 1.1261, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0019303058010769075, |
|
"grad_norm": 3.6354310512542725, |
|
"learning_rate": 9.535842105263157e-05, |
|
"loss": 1.0152, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.00203190084323885, |
|
"grad_norm": 3.5814967155456543, |
|
"learning_rate": 9.483157894736841e-05, |
|
"loss": 1.1737, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0021334958854007926, |
|
"grad_norm": 3.7806692123413086, |
|
"learning_rate": 9.430473684210526e-05, |
|
"loss": 1.1642, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.002235090927562735, |
|
"grad_norm": 3.724287509918213, |
|
"learning_rate": 9.37778947368421e-05, |
|
"loss": 1.2753, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0023366859697246775, |
|
"grad_norm": 3.3061375617980957, |
|
"learning_rate": 9.325105263157894e-05, |
|
"loss": 0.9576, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0024382810118866198, |
|
"grad_norm": 4.0224714279174805, |
|
"learning_rate": 9.272421052631578e-05, |
|
"loss": 1.28, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0025398760540485624, |
|
"grad_norm": 3.936861991882324, |
|
"learning_rate": 9.219736842105263e-05, |
|
"loss": 0.9267, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.002641471096210505, |
|
"grad_norm": 4.16921854019165, |
|
"learning_rate": 9.167052631578946e-05, |
|
"loss": 1.2012, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0027430661383724473, |
|
"grad_norm": 3.989863157272339, |
|
"learning_rate": 9.114368421052632e-05, |
|
"loss": 1.2667, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.00284466118053439, |
|
"grad_norm": 4.120609760284424, |
|
"learning_rate": 9.061684210526315e-05, |
|
"loss": 1.1629, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0029462562226963323, |
|
"grad_norm": 3.723917245864868, |
|
"learning_rate": 9.009e-05, |
|
"loss": 1.1301, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.003047851264858275, |
|
"grad_norm": 4.069361686706543, |
|
"learning_rate": 8.956315789473683e-05, |
|
"loss": 1.1755, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0031494463070202176, |
|
"grad_norm": 3.857142925262451, |
|
"learning_rate": 8.903631578947368e-05, |
|
"loss": 1.1941, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.00325104134918216, |
|
"grad_norm": 4.503450870513916, |
|
"learning_rate": 8.850947368421052e-05, |
|
"loss": 1.3796, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0033526363913441025, |
|
"grad_norm": 5.638123989105225, |
|
"learning_rate": 8.798263157894736e-05, |
|
"loss": 1.2541, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0034542314335060447, |
|
"grad_norm": 4.330247402191162, |
|
"learning_rate": 8.745578947368422e-05, |
|
"loss": 1.133, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0035558264756679874, |
|
"grad_norm": 5.019191741943359, |
|
"learning_rate": 8.692894736842105e-05, |
|
"loss": 1.5509, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.00365742151782993, |
|
"grad_norm": 5.650910377502441, |
|
"learning_rate": 8.64021052631579e-05, |
|
"loss": 1.3531, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0037590165599918723, |
|
"grad_norm": 6.807903289794922, |
|
"learning_rate": 8.587526315789473e-05, |
|
"loss": 1.4887, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.003860611602153815, |
|
"grad_norm": 5.474514484405518, |
|
"learning_rate": 8.534842105263157e-05, |
|
"loss": 1.1593, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.003962206644315757, |
|
"grad_norm": 7.569794178009033, |
|
"learning_rate": 8.482157894736842e-05, |
|
"loss": 1.5063, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0040638016864777, |
|
"grad_norm": 6.703556060791016, |
|
"learning_rate": 8.429473684210525e-05, |
|
"loss": 1.2757, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.004165396728639643, |
|
"grad_norm": 7.127315044403076, |
|
"learning_rate": 8.376789473684211e-05, |
|
"loss": 1.5994, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.004266991770801585, |
|
"grad_norm": 7.127439975738525, |
|
"learning_rate": 8.324105263157894e-05, |
|
"loss": 1.5217, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.004368586812963527, |
|
"grad_norm": 7.988884449005127, |
|
"learning_rate": 8.271421052631579e-05, |
|
"loss": 1.5936, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.00447018185512547, |
|
"grad_norm": 6.814123153686523, |
|
"learning_rate": 8.218736842105262e-05, |
|
"loss": 1.3053, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.004571776897287412, |
|
"grad_norm": 7.040807723999023, |
|
"learning_rate": 8.166052631578947e-05, |
|
"loss": 1.2787, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.004673371939449355, |
|
"grad_norm": 7.2124834060668945, |
|
"learning_rate": 8.113368421052631e-05, |
|
"loss": 0.9922, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.004774966981611298, |
|
"grad_norm": 11.030911445617676, |
|
"learning_rate": 8.060684210526315e-05, |
|
"loss": 1.3986, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0048765620237732395, |
|
"grad_norm": 10.35793685913086, |
|
"learning_rate": 8.008e-05, |
|
"loss": 1.0637, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.004978157065935182, |
|
"grad_norm": 11.990375518798828, |
|
"learning_rate": 7.955315789473684e-05, |
|
"loss": 1.4477, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.005079752108097125, |
|
"grad_norm": 15.286636352539062, |
|
"learning_rate": 7.902631578947368e-05, |
|
"loss": 2.0203, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.005079752108097125, |
|
"eval_loss": 0.6280013918876648, |
|
"eval_runtime": 518.3017, |
|
"eval_samples_per_second": 7.997, |
|
"eval_steps_per_second": 2.001, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0051813471502590676, |
|
"grad_norm": 2.2037503719329834, |
|
"learning_rate": 7.849947368421052e-05, |
|
"loss": 0.6711, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.00528294219242101, |
|
"grad_norm": 1.9842642545700073, |
|
"learning_rate": 7.797263157894736e-05, |
|
"loss": 0.6906, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.005384537234582952, |
|
"grad_norm": 2.058812379837036, |
|
"learning_rate": 7.744578947368421e-05, |
|
"loss": 0.5586, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.005486132276744895, |
|
"grad_norm": 2.2531464099884033, |
|
"learning_rate": 7.691894736842104e-05, |
|
"loss": 0.9127, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.005587727318906837, |
|
"grad_norm": 2.2486493587493896, |
|
"learning_rate": 7.63921052631579e-05, |
|
"loss": 0.8567, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.00568932236106878, |
|
"grad_norm": 2.560076951980591, |
|
"learning_rate": 7.586526315789473e-05, |
|
"loss": 0.8653, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.005790917403230723, |
|
"grad_norm": 2.5297718048095703, |
|
"learning_rate": 7.533842105263158e-05, |
|
"loss": 0.8544, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.0058925124453926645, |
|
"grad_norm": 2.6768887042999268, |
|
"learning_rate": 7.481157894736841e-05, |
|
"loss": 1.1578, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.005994107487554607, |
|
"grad_norm": 2.5234925746917725, |
|
"learning_rate": 7.428473684210526e-05, |
|
"loss": 1.0016, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.00609570252971655, |
|
"grad_norm": 2.62858247756958, |
|
"learning_rate": 7.375789473684209e-05, |
|
"loss": 1.0307, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0061972975718784925, |
|
"grad_norm": 2.6815719604492188, |
|
"learning_rate": 7.323105263157895e-05, |
|
"loss": 1.0425, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.006298892614040435, |
|
"grad_norm": 2.671543836593628, |
|
"learning_rate": 7.270421052631578e-05, |
|
"loss": 0.9924, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.006400487656202377, |
|
"grad_norm": 2.5434019565582275, |
|
"learning_rate": 7.217736842105263e-05, |
|
"loss": 0.962, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.00650208269836432, |
|
"grad_norm": 3.021360397338867, |
|
"learning_rate": 7.165052631578947e-05, |
|
"loss": 1.1692, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.006603677740526262, |
|
"grad_norm": 2.617884397506714, |
|
"learning_rate": 7.11236842105263e-05, |
|
"loss": 0.9373, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.006705272782688205, |
|
"grad_norm": 2.834733247756958, |
|
"learning_rate": 7.059684210526315e-05, |
|
"loss": 1.0911, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.006806867824850148, |
|
"grad_norm": 3.138725996017456, |
|
"learning_rate": 7.006999999999998e-05, |
|
"loss": 1.0814, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.0069084628670120895, |
|
"grad_norm": 3.1022236347198486, |
|
"learning_rate": 6.954315789473684e-05, |
|
"loss": 1.2817, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.007010057909174032, |
|
"grad_norm": 2.899486541748047, |
|
"learning_rate": 6.901631578947368e-05, |
|
"loss": 0.9868, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.007111652951335975, |
|
"grad_norm": 3.156597137451172, |
|
"learning_rate": 6.848947368421052e-05, |
|
"loss": 1.0047, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0072132479934979175, |
|
"grad_norm": 3.3846867084503174, |
|
"learning_rate": 6.796263157894737e-05, |
|
"loss": 1.143, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.00731484303565986, |
|
"grad_norm": 3.0827393531799316, |
|
"learning_rate": 6.74357894736842e-05, |
|
"loss": 1.1043, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.007416438077821802, |
|
"grad_norm": 2.9473469257354736, |
|
"learning_rate": 6.690894736842105e-05, |
|
"loss": 1.074, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.007518033119983745, |
|
"grad_norm": 3.0625174045562744, |
|
"learning_rate": 6.638210526315788e-05, |
|
"loss": 1.0141, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.007619628162145687, |
|
"grad_norm": 3.610325813293457, |
|
"learning_rate": 6.585526315789474e-05, |
|
"loss": 1.2885, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.00772122320430763, |
|
"grad_norm": 3.014136791229248, |
|
"learning_rate": 6.532842105263157e-05, |
|
"loss": 0.9769, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.007822818246469573, |
|
"grad_norm": 3.436788558959961, |
|
"learning_rate": 6.480157894736842e-05, |
|
"loss": 1.1911, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.007924413288631514, |
|
"grad_norm": 3.2950663566589355, |
|
"learning_rate": 6.427473684210526e-05, |
|
"loss": 1.2536, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.008026008330793458, |
|
"grad_norm": 3.095257043838501, |
|
"learning_rate": 6.37478947368421e-05, |
|
"loss": 1.0352, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0081276033729554, |
|
"grad_norm": 3.387840509414673, |
|
"learning_rate": 6.322105263157894e-05, |
|
"loss": 1.1025, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.008229198415117342, |
|
"grad_norm": 3.795701503753662, |
|
"learning_rate": 6.269421052631577e-05, |
|
"loss": 1.1707, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.008330793457279285, |
|
"grad_norm": 3.4337081909179688, |
|
"learning_rate": 6.216736842105263e-05, |
|
"loss": 0.9816, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.008432388499441227, |
|
"grad_norm": 3.789355754852295, |
|
"learning_rate": 6.164052631578947e-05, |
|
"loss": 1.3528, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.00853398354160317, |
|
"grad_norm": 4.3252434730529785, |
|
"learning_rate": 6.111368421052631e-05, |
|
"loss": 1.3132, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.008635578583765112, |
|
"grad_norm": 3.853529214859009, |
|
"learning_rate": 6.058684210526315e-05, |
|
"loss": 1.0348, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.008737173625927054, |
|
"grad_norm": 4.666858673095703, |
|
"learning_rate": 6.005999999999999e-05, |
|
"loss": 1.0574, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.008838768668088998, |
|
"grad_norm": 3.7602200508117676, |
|
"learning_rate": 5.953315789473684e-05, |
|
"loss": 1.2996, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.00894036371025094, |
|
"grad_norm": 4.555033206939697, |
|
"learning_rate": 5.9006315789473676e-05, |
|
"loss": 1.4222, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.009041958752412883, |
|
"grad_norm": 4.328731060028076, |
|
"learning_rate": 5.847947368421053e-05, |
|
"loss": 1.1422, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.009143553794574825, |
|
"grad_norm": 4.52231502532959, |
|
"learning_rate": 5.795263157894737e-05, |
|
"loss": 1.3654, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.009245148836736767, |
|
"grad_norm": 5.197935104370117, |
|
"learning_rate": 5.742578947368421e-05, |
|
"loss": 1.2897, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.00934674387889871, |
|
"grad_norm": 5.237155914306641, |
|
"learning_rate": 5.6898947368421046e-05, |
|
"loss": 1.4891, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.009448338921060652, |
|
"grad_norm": 5.300315856933594, |
|
"learning_rate": 5.6372105263157886e-05, |
|
"loss": 1.2487, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.009549933963222595, |
|
"grad_norm": 5.781769752502441, |
|
"learning_rate": 5.584526315789473e-05, |
|
"loss": 1.1509, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.009651529005384537, |
|
"grad_norm": 6.380777359008789, |
|
"learning_rate": 5.531842105263158e-05, |
|
"loss": 1.4483, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.009753124047546479, |
|
"grad_norm": 7.0414204597473145, |
|
"learning_rate": 5.4791578947368424e-05, |
|
"loss": 1.4183, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.009854719089708423, |
|
"grad_norm": 6.7459635734558105, |
|
"learning_rate": 5.426473684210526e-05, |
|
"loss": 1.2053, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.009956314131870364, |
|
"grad_norm": 8.890039443969727, |
|
"learning_rate": 5.37378947368421e-05, |
|
"loss": 1.4122, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.010057909174032308, |
|
"grad_norm": 7.953348636627197, |
|
"learning_rate": 5.321105263157894e-05, |
|
"loss": 0.6997, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.01015950421619425, |
|
"grad_norm": 8.372994422912598, |
|
"learning_rate": 5.268421052631578e-05, |
|
"loss": 0.9565, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01015950421619425, |
|
"eval_loss": 0.5977502465248108, |
|
"eval_runtime": 519.035, |
|
"eval_samples_per_second": 7.986, |
|
"eval_steps_per_second": 1.998, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.010261099258356192, |
|
"grad_norm": 1.7353254556655884, |
|
"learning_rate": 5.2157368421052626e-05, |
|
"loss": 0.6462, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.010362694300518135, |
|
"grad_norm": 1.8115421533584595, |
|
"learning_rate": 5.163052631578947e-05, |
|
"loss": 0.7568, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.010464289342680077, |
|
"grad_norm": 2.0411505699157715, |
|
"learning_rate": 5.110368421052632e-05, |
|
"loss": 0.8423, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.01056588438484202, |
|
"grad_norm": 1.840236783027649, |
|
"learning_rate": 5.057684210526316e-05, |
|
"loss": 0.6614, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.010667479427003962, |
|
"grad_norm": 2.0278501510620117, |
|
"learning_rate": 5.005e-05, |
|
"loss": 0.9404, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.010769074469165904, |
|
"grad_norm": 2.038224935531616, |
|
"learning_rate": 4.9523157894736836e-05, |
|
"loss": 0.7997, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.010870669511327848, |
|
"grad_norm": 1.961540937423706, |
|
"learning_rate": 4.899631578947368e-05, |
|
"loss": 0.88, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.01097226455348979, |
|
"grad_norm": 1.8542038202285767, |
|
"learning_rate": 4.846947368421052e-05, |
|
"loss": 0.7586, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.011073859595651733, |
|
"grad_norm": 2.5004920959472656, |
|
"learning_rate": 4.794263157894737e-05, |
|
"loss": 1.1421, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.011175454637813675, |
|
"grad_norm": 2.2767207622528076, |
|
"learning_rate": 4.7415789473684206e-05, |
|
"loss": 0.8825, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.011277049679975617, |
|
"grad_norm": 2.6416478157043457, |
|
"learning_rate": 4.688894736842105e-05, |
|
"loss": 1.0648, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.01137864472213756, |
|
"grad_norm": 2.6188547611236572, |
|
"learning_rate": 4.636210526315789e-05, |
|
"loss": 1.0757, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.011480239764299502, |
|
"grad_norm": 2.7085273265838623, |
|
"learning_rate": 4.583526315789473e-05, |
|
"loss": 1.0747, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.011581834806461445, |
|
"grad_norm": 2.96585750579834, |
|
"learning_rate": 4.530842105263158e-05, |
|
"loss": 1.1136, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.011683429848623387, |
|
"grad_norm": 3.3075642585754395, |
|
"learning_rate": 4.4781578947368416e-05, |
|
"loss": 1.1926, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.011785024890785329, |
|
"grad_norm": 2.8126935958862305, |
|
"learning_rate": 4.425473684210526e-05, |
|
"loss": 1.2221, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.011886619932947273, |
|
"grad_norm": 2.7019283771514893, |
|
"learning_rate": 4.372789473684211e-05, |
|
"loss": 1.0727, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.011988214975109214, |
|
"grad_norm": 3.095580577850342, |
|
"learning_rate": 4.320105263157895e-05, |
|
"loss": 1.0674, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.012089810017271158, |
|
"grad_norm": 3.138235330581665, |
|
"learning_rate": 4.2674210526315786e-05, |
|
"loss": 1.3973, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.0121914050594331, |
|
"grad_norm": 2.981416702270508, |
|
"learning_rate": 4.2147368421052626e-05, |
|
"loss": 1.2259, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.012293000101595042, |
|
"grad_norm": 2.793827772140503, |
|
"learning_rate": 4.162052631578947e-05, |
|
"loss": 1.1706, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.012394595143756985, |
|
"grad_norm": 3.055459976196289, |
|
"learning_rate": 4.109368421052631e-05, |
|
"loss": 1.1658, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.012496190185918927, |
|
"grad_norm": 2.991947889328003, |
|
"learning_rate": 4.056684210526316e-05, |
|
"loss": 1.1881, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.01259778522808087, |
|
"grad_norm": 2.9653897285461426, |
|
"learning_rate": 4.004e-05, |
|
"loss": 1.1678, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.012699380270242812, |
|
"grad_norm": 2.954136371612549, |
|
"learning_rate": 3.951315789473684e-05, |
|
"loss": 1.0478, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.012800975312404754, |
|
"grad_norm": 3.4325461387634277, |
|
"learning_rate": 3.898631578947368e-05, |
|
"loss": 1.2497, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.012902570354566698, |
|
"grad_norm": 4.589744567871094, |
|
"learning_rate": 3.845947368421052e-05, |
|
"loss": 1.2284, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.01300416539672864, |
|
"grad_norm": 3.4360063076019287, |
|
"learning_rate": 3.7932631578947367e-05, |
|
"loss": 1.1895, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.013105760438890583, |
|
"grad_norm": 3.6225502490997314, |
|
"learning_rate": 3.7405789473684206e-05, |
|
"loss": 1.182, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.013207355481052525, |
|
"grad_norm": 3.9139211177825928, |
|
"learning_rate": 3.6878947368421045e-05, |
|
"loss": 1.4435, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.013308950523214466, |
|
"grad_norm": 3.7881407737731934, |
|
"learning_rate": 3.635210526315789e-05, |
|
"loss": 1.1918, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.01341054556537641, |
|
"grad_norm": 3.863058567047119, |
|
"learning_rate": 3.582526315789474e-05, |
|
"loss": 1.1332, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.013512140607538352, |
|
"grad_norm": 3.9372336864471436, |
|
"learning_rate": 3.5298421052631576e-05, |
|
"loss": 1.0857, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.013613735649700295, |
|
"grad_norm": 4.330385684967041, |
|
"learning_rate": 3.477157894736842e-05, |
|
"loss": 1.1804, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.013715330691862237, |
|
"grad_norm": 4.239872455596924, |
|
"learning_rate": 3.424473684210526e-05, |
|
"loss": 1.2745, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.013816925734024179, |
|
"grad_norm": 4.237067699432373, |
|
"learning_rate": 3.37178947368421e-05, |
|
"loss": 1.0984, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.013918520776186123, |
|
"grad_norm": 4.873085975646973, |
|
"learning_rate": 3.319105263157894e-05, |
|
"loss": 1.4904, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.014020115818348064, |
|
"grad_norm": 5.1811842918396, |
|
"learning_rate": 3.2664210526315786e-05, |
|
"loss": 1.3309, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.014121710860510008, |
|
"grad_norm": 5.059803009033203, |
|
"learning_rate": 3.213736842105263e-05, |
|
"loss": 1.1333, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.01422330590267195, |
|
"grad_norm": 5.975893974304199, |
|
"learning_rate": 3.161052631578947e-05, |
|
"loss": 1.387, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.014324900944833891, |
|
"grad_norm": 6.209765911102295, |
|
"learning_rate": 3.108368421052632e-05, |
|
"loss": 1.681, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.014426495986995835, |
|
"grad_norm": 5.893293857574463, |
|
"learning_rate": 3.0556842105263156e-05, |
|
"loss": 1.4305, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.014528091029157777, |
|
"grad_norm": 8.19773006439209, |
|
"learning_rate": 3.0029999999999995e-05, |
|
"loss": 1.5269, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.01462968607131972, |
|
"grad_norm": 7.0231547355651855, |
|
"learning_rate": 2.9503157894736838e-05, |
|
"loss": 1.4278, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.014731281113481662, |
|
"grad_norm": 6.790130615234375, |
|
"learning_rate": 2.8976315789473684e-05, |
|
"loss": 1.2506, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.014832876155643604, |
|
"grad_norm": 6.087902069091797, |
|
"learning_rate": 2.8449473684210523e-05, |
|
"loss": 0.8843, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.014934471197805547, |
|
"grad_norm": 7.544525623321533, |
|
"learning_rate": 2.7922631578947366e-05, |
|
"loss": 0.9716, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.01503606623996749, |
|
"grad_norm": 7.611911296844482, |
|
"learning_rate": 2.7395789473684212e-05, |
|
"loss": 1.0314, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.015137661282129433, |
|
"grad_norm": 8.211386680603027, |
|
"learning_rate": 2.686894736842105e-05, |
|
"loss": 0.9311, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.015239256324291375, |
|
"grad_norm": 12.661590576171875, |
|
"learning_rate": 2.634210526315789e-05, |
|
"loss": 0.8717, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.015239256324291375, |
|
"eval_loss": 0.5625200271606445, |
|
"eval_runtime": 517.8623, |
|
"eval_samples_per_second": 8.004, |
|
"eval_steps_per_second": 2.002, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.015340851366453316, |
|
"grad_norm": 1.5939819812774658, |
|
"learning_rate": 2.5815263157894736e-05, |
|
"loss": 0.7066, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.01544244640861526, |
|
"grad_norm": 1.6483330726623535, |
|
"learning_rate": 2.528842105263158e-05, |
|
"loss": 0.6955, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.015544041450777202, |
|
"grad_norm": 1.583403468132019, |
|
"learning_rate": 2.4761578947368418e-05, |
|
"loss": 0.6968, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.015645636492939145, |
|
"grad_norm": 1.8011175394058228, |
|
"learning_rate": 2.423473684210526e-05, |
|
"loss": 0.7792, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.015747231535101085, |
|
"grad_norm": 1.8397107124328613, |
|
"learning_rate": 2.3707894736842103e-05, |
|
"loss": 0.7355, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.01584882657726303, |
|
"grad_norm": 1.9635233879089355, |
|
"learning_rate": 2.3181052631578946e-05, |
|
"loss": 0.843, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.015950421619424972, |
|
"grad_norm": 2.132654905319214, |
|
"learning_rate": 2.265421052631579e-05, |
|
"loss": 0.9858, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.016052016661586916, |
|
"grad_norm": 2.553905487060547, |
|
"learning_rate": 2.212736842105263e-05, |
|
"loss": 1.091, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.016153611703748856, |
|
"grad_norm": 1.965105414390564, |
|
"learning_rate": 2.1600526315789474e-05, |
|
"loss": 0.8878, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.0162552067459108, |
|
"grad_norm": 2.0289783477783203, |
|
"learning_rate": 2.1073684210526313e-05, |
|
"loss": 0.8764, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.016356801788072743, |
|
"grad_norm": 2.401559352874756, |
|
"learning_rate": 2.0546842105263155e-05, |
|
"loss": 1.0646, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.016458396830234683, |
|
"grad_norm": 2.2588117122650146, |
|
"learning_rate": 2.002e-05, |
|
"loss": 0.9658, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.016559991872396627, |
|
"grad_norm": 2.1253316402435303, |
|
"learning_rate": 1.949315789473684e-05, |
|
"loss": 0.8729, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.01666158691455857, |
|
"grad_norm": 2.832216739654541, |
|
"learning_rate": 1.8966315789473683e-05, |
|
"loss": 1.2091, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.01676318195672051, |
|
"grad_norm": 2.343205690383911, |
|
"learning_rate": 1.8439473684210522e-05, |
|
"loss": 1.0685, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.016864776998882454, |
|
"grad_norm": 2.5135369300842285, |
|
"learning_rate": 1.791263157894737e-05, |
|
"loss": 0.9415, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.016966372041044397, |
|
"grad_norm": 2.691727876663208, |
|
"learning_rate": 1.738578947368421e-05, |
|
"loss": 1.0256, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.01706796708320634, |
|
"grad_norm": 2.4180541038513184, |
|
"learning_rate": 1.685894736842105e-05, |
|
"loss": 0.9336, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.01716956212536828, |
|
"grad_norm": 2.8233697414398193, |
|
"learning_rate": 1.6332105263157893e-05, |
|
"loss": 1.0405, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.017271157167530225, |
|
"grad_norm": 2.802211284637451, |
|
"learning_rate": 1.5805263157894735e-05, |
|
"loss": 1.0237, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.017372752209692168, |
|
"grad_norm": 2.543400764465332, |
|
"learning_rate": 1.5278421052631578e-05, |
|
"loss": 1.0185, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.017474347251854108, |
|
"grad_norm": 2.7977938652038574, |
|
"learning_rate": 1.4751578947368419e-05, |
|
"loss": 1.0385, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.017575942294016052, |
|
"grad_norm": 3.0091969966888428, |
|
"learning_rate": 1.4224736842105262e-05, |
|
"loss": 1.184, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.017677537336177995, |
|
"grad_norm": 2.7623536586761475, |
|
"learning_rate": 1.3697894736842106e-05, |
|
"loss": 1.0893, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.017779132378339935, |
|
"grad_norm": 3.233461618423462, |
|
"learning_rate": 1.3171052631578945e-05, |
|
"loss": 1.2953, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.01788072742050188, |
|
"grad_norm": 2.841423511505127, |
|
"learning_rate": 1.264421052631579e-05, |
|
"loss": 1.1212, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.017982322462663822, |
|
"grad_norm": 3.207925796508789, |
|
"learning_rate": 1.211736842105263e-05, |
|
"loss": 1.2039, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.018083917504825766, |
|
"grad_norm": 2.9837098121643066, |
|
"learning_rate": 1.1590526315789473e-05, |
|
"loss": 1.0074, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.018185512546987706, |
|
"grad_norm": 3.340939521789551, |
|
"learning_rate": 1.1063684210526316e-05, |
|
"loss": 1.2716, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.01828710758914965, |
|
"grad_norm": 3.5256423950195312, |
|
"learning_rate": 1.0536842105263156e-05, |
|
"loss": 1.0574, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.018388702631311593, |
|
"grad_norm": 3.2755520343780518, |
|
"learning_rate": 1.001e-05, |
|
"loss": 1.1183, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.018490297673473533, |
|
"grad_norm": 3.5652754306793213, |
|
"learning_rate": 9.483157894736842e-06, |
|
"loss": 1.1908, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.018591892715635477, |
|
"grad_norm": 3.5001885890960693, |
|
"learning_rate": 8.956315789473684e-06, |
|
"loss": 1.3586, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.01869348775779742, |
|
"grad_norm": 3.986107110977173, |
|
"learning_rate": 8.429473684210525e-06, |
|
"loss": 1.4174, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.01879508279995936, |
|
"grad_norm": 3.2602295875549316, |
|
"learning_rate": 7.902631578947368e-06, |
|
"loss": 1.0629, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.018896677842121304, |
|
"grad_norm": 3.6767148971557617, |
|
"learning_rate": 7.3757894736842095e-06, |
|
"loss": 1.2673, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.018998272884283247, |
|
"grad_norm": 4.002993106842041, |
|
"learning_rate": 6.848947368421053e-06, |
|
"loss": 1.2915, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.01909986792644519, |
|
"grad_norm": 4.814608097076416, |
|
"learning_rate": 6.322105263157895e-06, |
|
"loss": 1.4219, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.01920146296860713, |
|
"grad_norm": 4.147160530090332, |
|
"learning_rate": 5.7952631578947365e-06, |
|
"loss": 1.1889, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.019303058010769075, |
|
"grad_norm": 5.3963470458984375, |
|
"learning_rate": 5.268421052631578e-06, |
|
"loss": 1.4151, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.019404653052931018, |
|
"grad_norm": 5.4656982421875, |
|
"learning_rate": 4.741578947368421e-06, |
|
"loss": 1.2987, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.019506248095092958, |
|
"grad_norm": 5.874293327331543, |
|
"learning_rate": 4.2147368421052626e-06, |
|
"loss": 1.3427, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.0196078431372549, |
|
"grad_norm": 5.825425148010254, |
|
"learning_rate": 3.6878947368421047e-06, |
|
"loss": 1.0556, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.019709438179416845, |
|
"grad_norm": 5.29592227935791, |
|
"learning_rate": 3.1610526315789474e-06, |
|
"loss": 0.9699, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.019811033221578785, |
|
"grad_norm": 6.861169815063477, |
|
"learning_rate": 2.634210526315789e-06, |
|
"loss": 1.2375, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.01991262826374073, |
|
"grad_norm": 7.811491966247559, |
|
"learning_rate": 2.1073684210526313e-06, |
|
"loss": 0.8911, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.020014223305902672, |
|
"grad_norm": 8.977100372314453, |
|
"learning_rate": 1.5805263157894737e-06, |
|
"loss": 1.1859, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.020115818348064616, |
|
"grad_norm": 9.319770812988281, |
|
"learning_rate": 1.0536842105263156e-06, |
|
"loss": 1.5089, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.020217413390226556, |
|
"grad_norm": 6.771570205688477, |
|
"learning_rate": 5.268421052631578e-07, |
|
"loss": 0.5158, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.0203190084323885, |
|
"grad_norm": 9.172361373901367, |
|
"learning_rate": 0.0, |
|
"loss": 0.8363, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.0203190084323885, |
|
"eval_loss": 0.5461220145225525, |
|
"eval_runtime": 519.175, |
|
"eval_samples_per_second": 7.984, |
|
"eval_steps_per_second": 1.997, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.71283380904919e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|