|
{ |
|
"best_metric": 10.70785140991211, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.07426661715558856, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0003713330857779428, |
|
"grad_norm": 0.47366201877593994, |
|
"learning_rate": 1.009e-05, |
|
"loss": 10.835, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0003713330857779428, |
|
"eval_loss": 10.832501411437988, |
|
"eval_runtime": 8.632, |
|
"eval_samples_per_second": 131.372, |
|
"eval_steps_per_second": 32.901, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0007426661715558856, |
|
"grad_norm": 0.39668479561805725, |
|
"learning_rate": 2.018e-05, |
|
"loss": 10.8341, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0011139992573338284, |
|
"grad_norm": 0.36069685220718384, |
|
"learning_rate": 3.027e-05, |
|
"loss": 10.8294, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0014853323431117712, |
|
"grad_norm": 0.3754219710826874, |
|
"learning_rate": 4.036e-05, |
|
"loss": 10.8319, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.001856665428889714, |
|
"grad_norm": 0.4531574547290802, |
|
"learning_rate": 5.045e-05, |
|
"loss": 10.8283, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0022279985146676567, |
|
"grad_norm": 0.46290841698646545, |
|
"learning_rate": 6.054e-05, |
|
"loss": 10.8327, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0025993316004456, |
|
"grad_norm": 0.3586803674697876, |
|
"learning_rate": 7.062999999999999e-05, |
|
"loss": 10.8329, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0029706646862235424, |
|
"grad_norm": 0.4396965205669403, |
|
"learning_rate": 8.072e-05, |
|
"loss": 10.8277, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0033419977720014855, |
|
"grad_norm": 0.5143634676933289, |
|
"learning_rate": 9.081e-05, |
|
"loss": 10.8298, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.003713330857779428, |
|
"grad_norm": 0.41205930709838867, |
|
"learning_rate": 0.0001009, |
|
"loss": 10.8375, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.004084663943557371, |
|
"grad_norm": 0.4410165548324585, |
|
"learning_rate": 0.00010036894736842106, |
|
"loss": 10.8263, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.004455997029335313, |
|
"grad_norm": 0.47727352380752563, |
|
"learning_rate": 9.98378947368421e-05, |
|
"loss": 10.8128, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.004827330115113257, |
|
"grad_norm": 0.48599427938461304, |
|
"learning_rate": 9.930684210526315e-05, |
|
"loss": 10.8048, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0051986632008912, |
|
"grad_norm": 0.5060449838638306, |
|
"learning_rate": 9.877578947368421e-05, |
|
"loss": 10.8201, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.005569996286669142, |
|
"grad_norm": 0.4900880455970764, |
|
"learning_rate": 9.824473684210527e-05, |
|
"loss": 10.8079, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.005941329372447085, |
|
"grad_norm": 0.5277690291404724, |
|
"learning_rate": 9.771368421052632e-05, |
|
"loss": 10.8078, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0063126624582250275, |
|
"grad_norm": 0.5447446703910828, |
|
"learning_rate": 9.718263157894736e-05, |
|
"loss": 10.8192, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.006683995544002971, |
|
"grad_norm": 0.5573148131370544, |
|
"learning_rate": 9.665157894736842e-05, |
|
"loss": 10.798, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.007055328629780914, |
|
"grad_norm": 0.5032119750976562, |
|
"learning_rate": 9.612052631578948e-05, |
|
"loss": 10.7994, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.007426661715558856, |
|
"grad_norm": 0.46621081233024597, |
|
"learning_rate": 9.558947368421052e-05, |
|
"loss": 10.8053, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.007797994801336799, |
|
"grad_norm": 0.4873751103878021, |
|
"learning_rate": 9.505842105263159e-05, |
|
"loss": 10.8143, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.008169327887114742, |
|
"grad_norm": 0.618602454662323, |
|
"learning_rate": 9.452736842105263e-05, |
|
"loss": 10.7913, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.008540660972892685, |
|
"grad_norm": 0.5478153228759766, |
|
"learning_rate": 9.399631578947368e-05, |
|
"loss": 10.7964, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.008911994058670627, |
|
"grad_norm": 0.5051780939102173, |
|
"learning_rate": 9.346526315789474e-05, |
|
"loss": 10.7684, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.00928332714444857, |
|
"grad_norm": 0.5710480213165283, |
|
"learning_rate": 9.293421052631578e-05, |
|
"loss": 10.7952, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.009654660230226514, |
|
"grad_norm": 0.5759515166282654, |
|
"learning_rate": 9.240315789473684e-05, |
|
"loss": 10.7925, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.010025993316004456, |
|
"grad_norm": 0.5633918642997742, |
|
"learning_rate": 9.18721052631579e-05, |
|
"loss": 10.7648, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0103973264017824, |
|
"grad_norm": 0.5759943723678589, |
|
"learning_rate": 9.134105263157895e-05, |
|
"loss": 10.756, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.010768659487560341, |
|
"grad_norm": 0.516233503818512, |
|
"learning_rate": 9.081e-05, |
|
"loss": 10.7807, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.011139992573338284, |
|
"grad_norm": 0.5473969578742981, |
|
"learning_rate": 9.027894736842105e-05, |
|
"loss": 10.7649, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.011511325659116228, |
|
"grad_norm": 0.46038320660591125, |
|
"learning_rate": 8.97478947368421e-05, |
|
"loss": 10.7741, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.01188265874489417, |
|
"grad_norm": 0.5598097443580627, |
|
"learning_rate": 8.921684210526316e-05, |
|
"loss": 10.7898, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.012253991830672113, |
|
"grad_norm": 0.5376091003417969, |
|
"learning_rate": 8.86857894736842e-05, |
|
"loss": 10.7795, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.012625324916450055, |
|
"grad_norm": 0.4991442859172821, |
|
"learning_rate": 8.815473684210527e-05, |
|
"loss": 10.7416, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.012996658002227999, |
|
"grad_norm": 0.5336849093437195, |
|
"learning_rate": 8.762368421052631e-05, |
|
"loss": 10.7558, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.013367991088005942, |
|
"grad_norm": 0.55255526304245, |
|
"learning_rate": 8.709263157894737e-05, |
|
"loss": 10.743, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.013739324173783884, |
|
"grad_norm": 0.5434712767601013, |
|
"learning_rate": 8.656157894736843e-05, |
|
"loss": 10.7494, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.014110657259561827, |
|
"grad_norm": 0.5447563529014587, |
|
"learning_rate": 8.603052631578947e-05, |
|
"loss": 10.7471, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.014481990345339769, |
|
"grad_norm": 0.5778723955154419, |
|
"learning_rate": 8.549947368421052e-05, |
|
"loss": 10.732, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.014853323431117713, |
|
"grad_norm": 0.5451592206954956, |
|
"learning_rate": 8.496842105263158e-05, |
|
"loss": 10.7317, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.015224656516895656, |
|
"grad_norm": 0.509575605392456, |
|
"learning_rate": 8.443736842105264e-05, |
|
"loss": 10.76, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.015595989602673598, |
|
"grad_norm": 0.5755866765975952, |
|
"learning_rate": 8.390631578947369e-05, |
|
"loss": 10.7328, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.01596732268845154, |
|
"grad_norm": 0.5568187832832336, |
|
"learning_rate": 8.337526315789473e-05, |
|
"loss": 10.7154, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.016338655774229483, |
|
"grad_norm": 0.5118264555931091, |
|
"learning_rate": 8.284421052631579e-05, |
|
"loss": 10.7452, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.01670998886000743, |
|
"grad_norm": 0.559354841709137, |
|
"learning_rate": 8.231315789473685e-05, |
|
"loss": 10.731, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01708132194578537, |
|
"grad_norm": 0.5593016147613525, |
|
"learning_rate": 8.178210526315789e-05, |
|
"loss": 10.7448, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.017452655031563312, |
|
"grad_norm": 0.6811694502830505, |
|
"learning_rate": 8.125105263157894e-05, |
|
"loss": 10.7603, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.017823988117341254, |
|
"grad_norm": 0.4952441155910492, |
|
"learning_rate": 8.072e-05, |
|
"loss": 10.7203, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0181953212031192, |
|
"grad_norm": 0.5311501026153564, |
|
"learning_rate": 8.018894736842106e-05, |
|
"loss": 10.7215, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.01856665428889714, |
|
"grad_norm": 0.5852133631706238, |
|
"learning_rate": 7.965789473684211e-05, |
|
"loss": 10.728, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01856665428889714, |
|
"eval_loss": 10.737244606018066, |
|
"eval_runtime": 8.5322, |
|
"eval_samples_per_second": 132.909, |
|
"eval_steps_per_second": 33.286, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.018937987374675083, |
|
"grad_norm": 0.42146560549736023, |
|
"learning_rate": 7.912684210526315e-05, |
|
"loss": 10.7457, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.019309320460453028, |
|
"grad_norm": 0.30984774231910706, |
|
"learning_rate": 7.859578947368421e-05, |
|
"loss": 10.7561, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.01968065354623097, |
|
"grad_norm": 0.3268715441226959, |
|
"learning_rate": 7.806473684210527e-05, |
|
"loss": 10.745, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.02005198663200891, |
|
"grad_norm": 0.33481699228286743, |
|
"learning_rate": 7.753368421052631e-05, |
|
"loss": 10.7458, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.020423319717786857, |
|
"grad_norm": 0.33349162340164185, |
|
"learning_rate": 7.700263157894738e-05, |
|
"loss": 10.7573, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0207946528035648, |
|
"grad_norm": 0.340987890958786, |
|
"learning_rate": 7.647157894736842e-05, |
|
"loss": 10.7356, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.02116598588934274, |
|
"grad_norm": 0.38484814763069153, |
|
"learning_rate": 7.594052631578948e-05, |
|
"loss": 10.7508, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.021537318975120682, |
|
"grad_norm": 0.37901782989501953, |
|
"learning_rate": 7.540947368421053e-05, |
|
"loss": 10.7437, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.021908652060898627, |
|
"grad_norm": 0.3089980483055115, |
|
"learning_rate": 7.487842105263157e-05, |
|
"loss": 10.7464, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.02227998514667657, |
|
"grad_norm": 0.3815767168998718, |
|
"learning_rate": 7.434736842105263e-05, |
|
"loss": 10.7555, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02265131823245451, |
|
"grad_norm": 0.312149316072464, |
|
"learning_rate": 7.381631578947368e-05, |
|
"loss": 10.7363, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.023022651318232456, |
|
"grad_norm": 0.31698325276374817, |
|
"learning_rate": 7.328526315789474e-05, |
|
"loss": 10.7385, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.023393984404010398, |
|
"grad_norm": 0.3198413848876953, |
|
"learning_rate": 7.27542105263158e-05, |
|
"loss": 10.7304, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.02376531748978834, |
|
"grad_norm": 0.32194074988365173, |
|
"learning_rate": 7.222315789473684e-05, |
|
"loss": 10.7269, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.024136650575566285, |
|
"grad_norm": 0.31859320402145386, |
|
"learning_rate": 7.16921052631579e-05, |
|
"loss": 10.7275, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.024507983661344226, |
|
"grad_norm": 0.3795001804828644, |
|
"learning_rate": 7.116105263157895e-05, |
|
"loss": 10.757, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.024879316747122168, |
|
"grad_norm": 0.35307377576828003, |
|
"learning_rate": 7.062999999999999e-05, |
|
"loss": 10.7512, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.02525064983290011, |
|
"grad_norm": 0.3632878065109253, |
|
"learning_rate": 7.009894736842106e-05, |
|
"loss": 10.7457, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.025621982918678055, |
|
"grad_norm": 0.29969894886016846, |
|
"learning_rate": 6.95678947368421e-05, |
|
"loss": 10.7376, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.025993316004455997, |
|
"grad_norm": 0.32437172532081604, |
|
"learning_rate": 6.903684210526316e-05, |
|
"loss": 10.7413, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02636464909023394, |
|
"grad_norm": 0.297307550907135, |
|
"learning_rate": 6.850578947368422e-05, |
|
"loss": 10.7514, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.026735982176011884, |
|
"grad_norm": 0.3031882047653198, |
|
"learning_rate": 6.797473684210526e-05, |
|
"loss": 10.7323, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.027107315261789826, |
|
"grad_norm": 0.31023070216178894, |
|
"learning_rate": 6.744368421052631e-05, |
|
"loss": 10.7279, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.027478648347567768, |
|
"grad_norm": 0.3623669445514679, |
|
"learning_rate": 6.691263157894736e-05, |
|
"loss": 10.7336, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.027849981433345713, |
|
"grad_norm": 0.3755355477333069, |
|
"learning_rate": 6.638157894736843e-05, |
|
"loss": 10.7147, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.028221314519123655, |
|
"grad_norm": 0.317091703414917, |
|
"learning_rate": 6.585052631578948e-05, |
|
"loss": 10.738, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.028592647604901596, |
|
"grad_norm": 0.3252423107624054, |
|
"learning_rate": 6.531947368421052e-05, |
|
"loss": 10.736, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.028963980690679538, |
|
"grad_norm": 0.4029505252838135, |
|
"learning_rate": 6.478842105263158e-05, |
|
"loss": 10.7214, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.029335313776457483, |
|
"grad_norm": 0.37360379099845886, |
|
"learning_rate": 6.425736842105264e-05, |
|
"loss": 10.7328, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.029706646862235425, |
|
"grad_norm": 0.35306406021118164, |
|
"learning_rate": 6.372631578947368e-05, |
|
"loss": 10.7361, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.030077979948013367, |
|
"grad_norm": 0.35350292921066284, |
|
"learning_rate": 6.319526315789473e-05, |
|
"loss": 10.7224, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.030449313033791312, |
|
"grad_norm": 0.3728892207145691, |
|
"learning_rate": 6.266421052631579e-05, |
|
"loss": 10.7082, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.030820646119569254, |
|
"grad_norm": 0.38188886642456055, |
|
"learning_rate": 6.213315789473685e-05, |
|
"loss": 10.7431, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.031191979205347196, |
|
"grad_norm": 0.41717568039894104, |
|
"learning_rate": 6.16021052631579e-05, |
|
"loss": 10.7524, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.03156331229112514, |
|
"grad_norm": 0.39692479372024536, |
|
"learning_rate": 6.107105263157894e-05, |
|
"loss": 10.7133, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.03193464537690308, |
|
"grad_norm": 0.4064257740974426, |
|
"learning_rate": 6.054e-05, |
|
"loss": 10.7193, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.032305978462681025, |
|
"grad_norm": 0.41969412565231323, |
|
"learning_rate": 6.000894736842105e-05, |
|
"loss": 10.7052, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.032677311548458966, |
|
"grad_norm": 0.4684526026248932, |
|
"learning_rate": 5.94778947368421e-05, |
|
"loss": 10.7227, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.03304864463423691, |
|
"grad_norm": 0.4255547821521759, |
|
"learning_rate": 5.894684210526316e-05, |
|
"loss": 10.7029, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.03341997772001486, |
|
"grad_norm": 0.47345179319381714, |
|
"learning_rate": 5.841578947368421e-05, |
|
"loss": 10.7109, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0337913108057928, |
|
"grad_norm": 0.47380679845809937, |
|
"learning_rate": 5.7884736842105265e-05, |
|
"loss": 10.7027, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.03416264389157074, |
|
"grad_norm": 0.45350202918052673, |
|
"learning_rate": 5.7353684210526314e-05, |
|
"loss": 10.6865, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.03453397697734868, |
|
"grad_norm": 0.5256607532501221, |
|
"learning_rate": 5.6822631578947364e-05, |
|
"loss": 10.7193, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.034905310063126624, |
|
"grad_norm": 0.540717363357544, |
|
"learning_rate": 5.629157894736842e-05, |
|
"loss": 10.7297, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.035276643148904566, |
|
"grad_norm": 0.4825849235057831, |
|
"learning_rate": 5.576052631578948e-05, |
|
"loss": 10.7247, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.03564797623468251, |
|
"grad_norm": 0.5021780729293823, |
|
"learning_rate": 5.522947368421053e-05, |
|
"loss": 10.6791, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.036019309320460456, |
|
"grad_norm": 0.4982372224330902, |
|
"learning_rate": 5.469842105263158e-05, |
|
"loss": 10.714, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.0363906424062384, |
|
"grad_norm": 0.5147339701652527, |
|
"learning_rate": 5.416736842105263e-05, |
|
"loss": 10.7015, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.03676197549201634, |
|
"grad_norm": 0.6090283989906311, |
|
"learning_rate": 5.3636315789473685e-05, |
|
"loss": 10.712, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.03713330857779428, |
|
"grad_norm": 0.757976233959198, |
|
"learning_rate": 5.3105263157894734e-05, |
|
"loss": 10.697, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03713330857779428, |
|
"eval_loss": 10.718417167663574, |
|
"eval_runtime": 8.5984, |
|
"eval_samples_per_second": 131.886, |
|
"eval_steps_per_second": 33.03, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03750464166357222, |
|
"grad_norm": 0.296906054019928, |
|
"learning_rate": 5.257421052631578e-05, |
|
"loss": 10.7377, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.037875974749350165, |
|
"grad_norm": 0.3151768445968628, |
|
"learning_rate": 5.2043157894736846e-05, |
|
"loss": 10.7402, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.03824730783512811, |
|
"grad_norm": 0.3774714171886444, |
|
"learning_rate": 5.1512105263157895e-05, |
|
"loss": 10.7228, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.038618640920906055, |
|
"grad_norm": 0.37634870409965515, |
|
"learning_rate": 5.098105263157895e-05, |
|
"loss": 10.7249, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.038989974006684, |
|
"grad_norm": 0.2864985764026642, |
|
"learning_rate": 5.045e-05, |
|
"loss": 10.7256, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.03936130709246194, |
|
"grad_norm": 0.3519158363342285, |
|
"learning_rate": 4.991894736842105e-05, |
|
"loss": 10.7307, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.03973264017823988, |
|
"grad_norm": 0.39623257517814636, |
|
"learning_rate": 4.9387894736842105e-05, |
|
"loss": 10.723, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.04010397326401782, |
|
"grad_norm": 0.39145541191101074, |
|
"learning_rate": 4.885684210526316e-05, |
|
"loss": 10.7214, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.040475306349795764, |
|
"grad_norm": 0.3571566939353943, |
|
"learning_rate": 4.832578947368421e-05, |
|
"loss": 10.7348, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.04084663943557371, |
|
"grad_norm": 0.3960326015949249, |
|
"learning_rate": 4.779473684210526e-05, |
|
"loss": 10.7309, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.041217972521351655, |
|
"grad_norm": 0.4168102443218231, |
|
"learning_rate": 4.7263684210526315e-05, |
|
"loss": 10.7314, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.0415893056071296, |
|
"grad_norm": 0.3861686885356903, |
|
"learning_rate": 4.673263157894737e-05, |
|
"loss": 10.7539, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.04196063869290754, |
|
"grad_norm": 0.34592297673225403, |
|
"learning_rate": 4.620157894736842e-05, |
|
"loss": 10.7263, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.04233197177868548, |
|
"grad_norm": 0.3721361458301544, |
|
"learning_rate": 4.5670526315789475e-05, |
|
"loss": 10.7193, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.04270330486446342, |
|
"grad_norm": 0.3191013038158417, |
|
"learning_rate": 4.5139473684210524e-05, |
|
"loss": 10.7208, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.043074637950241364, |
|
"grad_norm": 0.32601889967918396, |
|
"learning_rate": 4.460842105263158e-05, |
|
"loss": 10.7358, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.04344597103601931, |
|
"grad_norm": 0.3087843954563141, |
|
"learning_rate": 4.4077368421052636e-05, |
|
"loss": 10.7348, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.043817304121797254, |
|
"grad_norm": 0.33753201365470886, |
|
"learning_rate": 4.3546315789473685e-05, |
|
"loss": 10.7203, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.044188637207575196, |
|
"grad_norm": 0.3254026174545288, |
|
"learning_rate": 4.3015263157894734e-05, |
|
"loss": 10.7333, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.04455997029335314, |
|
"grad_norm": 0.3384432792663574, |
|
"learning_rate": 4.248421052631579e-05, |
|
"loss": 10.7171, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.04493130337913108, |
|
"grad_norm": 0.3901914954185486, |
|
"learning_rate": 4.1953157894736846e-05, |
|
"loss": 10.7343, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.04530263646490902, |
|
"grad_norm": 0.31527823209762573, |
|
"learning_rate": 4.1422105263157895e-05, |
|
"loss": 10.7208, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.04567396955068696, |
|
"grad_norm": 0.38066819310188293, |
|
"learning_rate": 4.0891052631578944e-05, |
|
"loss": 10.7062, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.04604530263646491, |
|
"grad_norm": 0.3694753646850586, |
|
"learning_rate": 4.036e-05, |
|
"loss": 10.7212, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.046416635722242854, |
|
"grad_norm": 0.358316570520401, |
|
"learning_rate": 3.9828947368421056e-05, |
|
"loss": 10.7281, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.046787968808020795, |
|
"grad_norm": 0.32940492033958435, |
|
"learning_rate": 3.9297894736842105e-05, |
|
"loss": 10.7151, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.04715930189379874, |
|
"grad_norm": 0.3700310289859772, |
|
"learning_rate": 3.8766842105263154e-05, |
|
"loss": 10.7148, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.04753063497957668, |
|
"grad_norm": 0.3548131585121155, |
|
"learning_rate": 3.823578947368421e-05, |
|
"loss": 10.7201, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.04790196806535462, |
|
"grad_norm": 0.3971237242221832, |
|
"learning_rate": 3.7704736842105265e-05, |
|
"loss": 10.7027, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.04827330115113257, |
|
"grad_norm": 0.41516757011413574, |
|
"learning_rate": 3.7173684210526315e-05, |
|
"loss": 10.7175, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.04864463423691051, |
|
"grad_norm": 0.39661961793899536, |
|
"learning_rate": 3.664263157894737e-05, |
|
"loss": 10.6913, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.04901596732268845, |
|
"grad_norm": 0.34752222895622253, |
|
"learning_rate": 3.611157894736842e-05, |
|
"loss": 10.707, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.049387300408466395, |
|
"grad_norm": 0.3332189917564392, |
|
"learning_rate": 3.5580526315789475e-05, |
|
"loss": 10.717, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.049758633494244336, |
|
"grad_norm": 0.31573349237442017, |
|
"learning_rate": 3.504947368421053e-05, |
|
"loss": 10.695, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.05012996658002228, |
|
"grad_norm": 0.40504634380340576, |
|
"learning_rate": 3.451842105263158e-05, |
|
"loss": 10.6974, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.05050129966580022, |
|
"grad_norm": 0.36559829115867615, |
|
"learning_rate": 3.398736842105263e-05, |
|
"loss": 10.7029, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.05087263275157817, |
|
"grad_norm": 0.34160315990448, |
|
"learning_rate": 3.345631578947368e-05, |
|
"loss": 10.7138, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.05124396583735611, |
|
"grad_norm": 0.49344658851623535, |
|
"learning_rate": 3.292526315789474e-05, |
|
"loss": 10.6774, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.05161529892313405, |
|
"grad_norm": 0.41159558296203613, |
|
"learning_rate": 3.239421052631579e-05, |
|
"loss": 10.698, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.051986632008911994, |
|
"grad_norm": 0.4433419406414032, |
|
"learning_rate": 3.186315789473684e-05, |
|
"loss": 10.7159, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.052357965094689936, |
|
"grad_norm": 0.38202357292175293, |
|
"learning_rate": 3.1332105263157895e-05, |
|
"loss": 10.6968, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.05272929818046788, |
|
"grad_norm": 0.3602319061756134, |
|
"learning_rate": 3.080105263157895e-05, |
|
"loss": 10.6922, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.05310063126624582, |
|
"grad_norm": 0.45938384532928467, |
|
"learning_rate": 3.027e-05, |
|
"loss": 10.6852, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.05347196435202377, |
|
"grad_norm": 0.4686320126056671, |
|
"learning_rate": 2.973894736842105e-05, |
|
"loss": 10.7081, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.05384329743780171, |
|
"grad_norm": 0.36970701813697815, |
|
"learning_rate": 2.9207894736842105e-05, |
|
"loss": 10.6916, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.05421463052357965, |
|
"grad_norm": 0.545552670955658, |
|
"learning_rate": 2.8676842105263157e-05, |
|
"loss": 10.7078, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.05458596360935759, |
|
"grad_norm": 0.43362295627593994, |
|
"learning_rate": 2.814578947368421e-05, |
|
"loss": 10.6728, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.054957296695135535, |
|
"grad_norm": 0.5415939688682556, |
|
"learning_rate": 2.7614736842105266e-05, |
|
"loss": 10.6628, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.05532862978091348, |
|
"grad_norm": 0.5985761284828186, |
|
"learning_rate": 2.7083684210526315e-05, |
|
"loss": 10.6833, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.055699962866691426, |
|
"grad_norm": 0.6192039847373962, |
|
"learning_rate": 2.6552631578947367e-05, |
|
"loss": 10.6364, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.055699962866691426, |
|
"eval_loss": 10.709182739257812, |
|
"eval_runtime": 8.5261, |
|
"eval_samples_per_second": 133.003, |
|
"eval_steps_per_second": 33.309, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05607129595246937, |
|
"grad_norm": 0.2916700541973114, |
|
"learning_rate": 2.6021578947368423e-05, |
|
"loss": 10.7382, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.05644262903824731, |
|
"grad_norm": 0.29082828760147095, |
|
"learning_rate": 2.5490526315789475e-05, |
|
"loss": 10.7209, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.05681396212402525, |
|
"grad_norm": 0.25077447295188904, |
|
"learning_rate": 2.4959473684210524e-05, |
|
"loss": 10.7209, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.05718529520980319, |
|
"grad_norm": 0.2654978632926941, |
|
"learning_rate": 2.442842105263158e-05, |
|
"loss": 10.7401, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.057556628295581135, |
|
"grad_norm": 0.2980552315711975, |
|
"learning_rate": 2.389736842105263e-05, |
|
"loss": 10.7409, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.057927961381359076, |
|
"grad_norm": 0.28160735964775085, |
|
"learning_rate": 2.3366315789473685e-05, |
|
"loss": 10.7515, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.058299294467137025, |
|
"grad_norm": 0.32138633728027344, |
|
"learning_rate": 2.2835263157894738e-05, |
|
"loss": 10.733, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.05867062755291497, |
|
"grad_norm": 0.24646006524562836, |
|
"learning_rate": 2.230421052631579e-05, |
|
"loss": 10.7129, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.05904196063869291, |
|
"grad_norm": 0.2982631027698517, |
|
"learning_rate": 2.1773157894736843e-05, |
|
"loss": 10.7029, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.05941329372447085, |
|
"grad_norm": 0.23823799192905426, |
|
"learning_rate": 2.1242105263157895e-05, |
|
"loss": 10.7194, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.05978462681024879, |
|
"grad_norm": 0.30195873975753784, |
|
"learning_rate": 2.0711052631578947e-05, |
|
"loss": 10.7269, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.060155959896026734, |
|
"grad_norm": 0.2737445533275604, |
|
"learning_rate": 2.018e-05, |
|
"loss": 10.7119, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.060527292981804676, |
|
"grad_norm": 0.29084184765815735, |
|
"learning_rate": 1.9648947368421052e-05, |
|
"loss": 10.739, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.060898626067582624, |
|
"grad_norm": 0.365295946598053, |
|
"learning_rate": 1.9117894736842105e-05, |
|
"loss": 10.7013, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.061269959153360566, |
|
"grad_norm": 0.31969207525253296, |
|
"learning_rate": 1.8586842105263157e-05, |
|
"loss": 10.7008, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.06164129223913851, |
|
"grad_norm": 0.2848905026912689, |
|
"learning_rate": 1.805578947368421e-05, |
|
"loss": 10.7239, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.06201262532491645, |
|
"grad_norm": 0.3124282658100128, |
|
"learning_rate": 1.7524736842105266e-05, |
|
"loss": 10.7078, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.06238395841069439, |
|
"grad_norm": 0.3112058639526367, |
|
"learning_rate": 1.6993684210526315e-05, |
|
"loss": 10.7357, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.06275529149647234, |
|
"grad_norm": 0.35105863213539124, |
|
"learning_rate": 1.646263157894737e-05, |
|
"loss": 10.7035, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.06312662458225028, |
|
"grad_norm": 0.36087125539779663, |
|
"learning_rate": 1.593157894736842e-05, |
|
"loss": 10.7081, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06349795766802822, |
|
"grad_norm": 0.3364165127277374, |
|
"learning_rate": 1.5400526315789475e-05, |
|
"loss": 10.7028, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.06386929075380617, |
|
"grad_norm": 0.3719669282436371, |
|
"learning_rate": 1.4869473684210524e-05, |
|
"loss": 10.7114, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.06424062383958411, |
|
"grad_norm": 0.3243717849254608, |
|
"learning_rate": 1.4338421052631579e-05, |
|
"loss": 10.7056, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.06461195692536205, |
|
"grad_norm": 0.34301552176475525, |
|
"learning_rate": 1.3807368421052633e-05, |
|
"loss": 10.7298, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.06498329001113999, |
|
"grad_norm": 0.33205053210258484, |
|
"learning_rate": 1.3276315789473684e-05, |
|
"loss": 10.7258, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.06535462309691793, |
|
"grad_norm": 0.32598814368247986, |
|
"learning_rate": 1.2745263157894738e-05, |
|
"loss": 10.7148, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.06572595618269587, |
|
"grad_norm": 0.3364621698856354, |
|
"learning_rate": 1.221421052631579e-05, |
|
"loss": 10.7189, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.06609728926847382, |
|
"grad_norm": 0.30798718333244324, |
|
"learning_rate": 1.1683157894736843e-05, |
|
"loss": 10.7077, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.06646862235425176, |
|
"grad_norm": 0.45846202969551086, |
|
"learning_rate": 1.1152105263157895e-05, |
|
"loss": 10.689, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.06683995544002971, |
|
"grad_norm": 0.32936862111091614, |
|
"learning_rate": 1.0621052631578948e-05, |
|
"loss": 10.7056, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.06721128852580766, |
|
"grad_norm": 0.4312770664691925, |
|
"learning_rate": 1.009e-05, |
|
"loss": 10.7056, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.0675826216115856, |
|
"grad_norm": 0.33121156692504883, |
|
"learning_rate": 9.558947368421052e-06, |
|
"loss": 10.7258, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.06795395469736354, |
|
"grad_norm": 0.3501925468444824, |
|
"learning_rate": 9.027894736842105e-06, |
|
"loss": 10.6905, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.06832528778314148, |
|
"grad_norm": 0.3568207025527954, |
|
"learning_rate": 8.496842105263157e-06, |
|
"loss": 10.7212, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.06869662086891942, |
|
"grad_norm": 0.334271103143692, |
|
"learning_rate": 7.96578947368421e-06, |
|
"loss": 10.7061, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.06906795395469736, |
|
"grad_norm": 0.37924298644065857, |
|
"learning_rate": 7.434736842105262e-06, |
|
"loss": 10.7275, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.0694392870404753, |
|
"grad_norm": 0.375325471162796, |
|
"learning_rate": 6.903684210526316e-06, |
|
"loss": 10.701, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.06981062012625325, |
|
"grad_norm": 0.3595587909221649, |
|
"learning_rate": 6.372631578947369e-06, |
|
"loss": 10.7056, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.07018195321203119, |
|
"grad_norm": 0.43467050790786743, |
|
"learning_rate": 5.841578947368421e-06, |
|
"loss": 10.7071, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.07055328629780913, |
|
"grad_norm": 0.3964037597179413, |
|
"learning_rate": 5.310526315789474e-06, |
|
"loss": 10.7053, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07092461938358707, |
|
"grad_norm": 0.4399015009403229, |
|
"learning_rate": 4.779473684210526e-06, |
|
"loss": 10.6696, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.07129595246936501, |
|
"grad_norm": 0.3865761160850525, |
|
"learning_rate": 4.248421052631579e-06, |
|
"loss": 10.7024, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.07166728555514296, |
|
"grad_norm": 0.4131179451942444, |
|
"learning_rate": 3.717368421052631e-06, |
|
"loss": 10.6978, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.07203861864092091, |
|
"grad_norm": 0.4591720700263977, |
|
"learning_rate": 3.1863157894736844e-06, |
|
"loss": 10.6869, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.07240995172669885, |
|
"grad_norm": 0.4387078881263733, |
|
"learning_rate": 2.655263157894737e-06, |
|
"loss": 10.6734, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.0727812848124768, |
|
"grad_norm": 0.5036574006080627, |
|
"learning_rate": 2.1242105263157893e-06, |
|
"loss": 10.7041, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.07315261789825474, |
|
"grad_norm": 0.5212882161140442, |
|
"learning_rate": 1.5931578947368422e-06, |
|
"loss": 10.7093, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.07352395098403268, |
|
"grad_norm": 0.5136047005653381, |
|
"learning_rate": 1.0621052631578947e-06, |
|
"loss": 10.6584, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.07389528406981062, |
|
"grad_norm": 0.6663255095481873, |
|
"learning_rate": 5.310526315789473e-07, |
|
"loss": 10.6604, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.07426661715558856, |
|
"grad_norm": 0.7245345711708069, |
|
"learning_rate": 0.0, |
|
"loss": 10.6654, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07426661715558856, |
|
"eval_loss": 10.70785140991211, |
|
"eval_runtime": 8.53, |
|
"eval_samples_per_second": 132.942, |
|
"eval_steps_per_second": 33.294, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 34155056332800.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|