|
{ |
|
"best_metric": 11.062679290771484, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.5899705014749262, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0058997050147492625, |
|
"grad_norm": 10.886664390563965, |
|
"learning_rate": 1.6666666666666668e-07, |
|
"loss": 44.8554, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0058997050147492625, |
|
"eval_loss": 11.221614837646484, |
|
"eval_runtime": 1.0489, |
|
"eval_samples_per_second": 272.669, |
|
"eval_steps_per_second": 34.322, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.011799410029498525, |
|
"grad_norm": 10.930596351623535, |
|
"learning_rate": 3.3333333333333335e-07, |
|
"loss": 44.876, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.017699115044247787, |
|
"grad_norm": 10.833826065063477, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 44.8799, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02359882005899705, |
|
"grad_norm": 10.970483779907227, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 44.8848, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.029498525073746312, |
|
"grad_norm": 11.25637435913086, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 44.904, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"grad_norm": 11.155887603759766, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 44.8352, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04129793510324484, |
|
"grad_norm": 10.9830322265625, |
|
"learning_rate": 1.1666666666666668e-06, |
|
"loss": 44.8633, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0471976401179941, |
|
"grad_norm": 11.078715324401855, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 44.8707, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05309734513274336, |
|
"grad_norm": 11.469353675842285, |
|
"learning_rate": 1.5e-06, |
|
"loss": 44.8924, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.058997050147492625, |
|
"grad_norm": 11.133667945861816, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 44.8986, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06489675516224189, |
|
"grad_norm": 11.04592227935791, |
|
"learning_rate": 1.8333333333333333e-06, |
|
"loss": 44.9135, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"grad_norm": 11.071782112121582, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 44.8871, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.07669616519174041, |
|
"grad_norm": 11.084949493408203, |
|
"learning_rate": 2.166666666666667e-06, |
|
"loss": 44.8941, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.08259587020648967, |
|
"grad_norm": 10.90757942199707, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 44.849, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.08849557522123894, |
|
"grad_norm": 10.782208442687988, |
|
"learning_rate": 2.5e-06, |
|
"loss": 44.8656, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0943952802359882, |
|
"grad_norm": 10.914060592651367, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 44.8329, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.10029498525073746, |
|
"grad_norm": 10.9237642288208, |
|
"learning_rate": 2.8333333333333335e-06, |
|
"loss": 44.8753, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"grad_norm": 10.91445255279541, |
|
"learning_rate": 3e-06, |
|
"loss": 44.8781, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.11209439528023599, |
|
"grad_norm": 11.406682014465332, |
|
"learning_rate": 3.1666666666666667e-06, |
|
"loss": 44.7866, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.11799410029498525, |
|
"grad_norm": 10.877439498901367, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 44.785, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.12389380530973451, |
|
"grad_norm": 11.090054512023926, |
|
"learning_rate": 3.5e-06, |
|
"loss": 44.8091, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.12979351032448377, |
|
"grad_norm": 10.820998191833496, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 44.7878, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.13569321533923304, |
|
"grad_norm": 11.049379348754883, |
|
"learning_rate": 3.833333333333334e-06, |
|
"loss": 44.7947, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"grad_norm": 11.088188171386719, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 44.7741, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.14749262536873156, |
|
"grad_norm": 11.064141273498535, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 44.8164, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.15339233038348082, |
|
"grad_norm": 11.089920043945312, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 44.7799, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.1592920353982301, |
|
"grad_norm": 11.03174877166748, |
|
"learning_rate": 4.5e-06, |
|
"loss": 44.7448, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.16519174041297935, |
|
"grad_norm": 10.936544418334961, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 44.7058, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1710914454277286, |
|
"grad_norm": 11.263934135437012, |
|
"learning_rate": 4.833333333333333e-06, |
|
"loss": 44.7484, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"grad_norm": 11.224115371704102, |
|
"learning_rate": 5e-06, |
|
"loss": 44.7395, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.18289085545722714, |
|
"grad_norm": 11.245924949645996, |
|
"learning_rate": 4.997482666353287e-06, |
|
"loss": 44.6893, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.1887905604719764, |
|
"grad_norm": 11.2431640625, |
|
"learning_rate": 4.989935734988098e-06, |
|
"loss": 44.6915, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.19469026548672566, |
|
"grad_norm": 10.986893653869629, |
|
"learning_rate": 4.977374404419838e-06, |
|
"loss": 44.6643, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.20058997050147492, |
|
"grad_norm": 11.198678016662598, |
|
"learning_rate": 4.959823971496575e-06, |
|
"loss": 44.7171, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.20648967551622419, |
|
"grad_norm": 11.15566349029541, |
|
"learning_rate": 4.937319780454559e-06, |
|
"loss": 44.69, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"grad_norm": 11.303568840026855, |
|
"learning_rate": 4.909907151739634e-06, |
|
"loss": 44.6101, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.2182890855457227, |
|
"grad_norm": 11.348630905151367, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 44.6683, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.22418879056047197, |
|
"grad_norm": 11.226109504699707, |
|
"learning_rate": 4.8405871765993435e-06, |
|
"loss": 44.5979, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.23008849557522124, |
|
"grad_norm": 11.195021629333496, |
|
"learning_rate": 4.7988194313786275e-06, |
|
"loss": 44.6171, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.2359882005899705, |
|
"grad_norm": 11.280457496643066, |
|
"learning_rate": 4.752422169756048e-06, |
|
"loss": 44.602, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.24188790560471976, |
|
"grad_norm": 11.282366752624512, |
|
"learning_rate": 4.701488829641845e-06, |
|
"loss": 44.5678, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.24778761061946902, |
|
"grad_norm": 11.656867027282715, |
|
"learning_rate": 4.646121984004666e-06, |
|
"loss": 44.5781, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.2536873156342183, |
|
"grad_norm": 10.911307334899902, |
|
"learning_rate": 4.586433134303257e-06, |
|
"loss": 44.5528, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.25958702064896755, |
|
"grad_norm": 11.045024871826172, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 44.5775, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.26548672566371684, |
|
"grad_norm": 10.932096481323242, |
|
"learning_rate": 4.454578706170075e-06, |
|
"loss": 44.5573, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2713864306784661, |
|
"grad_norm": 11.110983848571777, |
|
"learning_rate": 4.382678665009028e-06, |
|
"loss": 44.5243, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.27728613569321536, |
|
"grad_norm": 11.290690422058105, |
|
"learning_rate": 4.3069871595684795e-06, |
|
"loss": 44.4917, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.2831858407079646, |
|
"grad_norm": 11.164863586425781, |
|
"learning_rate": 4.227656622467162e-06, |
|
"loss": 44.4501, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.2890855457227139, |
|
"grad_norm": 11.13447380065918, |
|
"learning_rate": 4.144846814849282e-06, |
|
"loss": 44.4477, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2949852507374631, |
|
"grad_norm": 11.295125007629395, |
|
"learning_rate": 4.058724504646834e-06, |
|
"loss": 44.4398, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2949852507374631, |
|
"eval_loss": 11.117125511169434, |
|
"eval_runtime": 0.6596, |
|
"eval_samples_per_second": 433.596, |
|
"eval_steps_per_second": 54.579, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3008849557522124, |
|
"grad_norm": 11.262834548950195, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 44.5064, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.30678466076696165, |
|
"grad_norm": 11.28780460357666, |
|
"learning_rate": 3.8772424536302565e-06, |
|
"loss": 44.4551, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.31268436578171094, |
|
"grad_norm": 11.082712173461914, |
|
"learning_rate": 3.782248193514766e-06, |
|
"loss": 44.4367, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.3185840707964602, |
|
"grad_norm": 11.272944450378418, |
|
"learning_rate": 3.684671656182497e-06, |
|
"loss": 44.495, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.32448377581120946, |
|
"grad_norm": 11.367984771728516, |
|
"learning_rate": 3.5847093477938955e-06, |
|
"loss": 44.4521, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3303834808259587, |
|
"grad_norm": 11.50567626953125, |
|
"learning_rate": 3.4825625791348093e-06, |
|
"loss": 44.4069, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.336283185840708, |
|
"grad_norm": 11.37206745147705, |
|
"learning_rate": 3.3784370602033572e-06, |
|
"loss": 44.4106, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.3421828908554572, |
|
"grad_norm": 11.600016593933105, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 44.3691, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.3480825958702065, |
|
"grad_norm": 11.29223918914795, |
|
"learning_rate": 3.165092113916688e-06, |
|
"loss": 44.3776, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.35398230088495575, |
|
"grad_norm": 11.168807983398438, |
|
"learning_rate": 3.056302334890786e-06, |
|
"loss": 44.3662, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.35988200589970504, |
|
"grad_norm": 11.356904029846191, |
|
"learning_rate": 2.946392236996592e-06, |
|
"loss": 44.3792, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.36578171091445427, |
|
"grad_norm": 11.171018600463867, |
|
"learning_rate": 2.835583164544139e-06, |
|
"loss": 44.3577, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.37168141592920356, |
|
"grad_norm": 11.397581100463867, |
|
"learning_rate": 2.724098272258584e-06, |
|
"loss": 44.3313, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.3775811209439528, |
|
"grad_norm": 11.264802932739258, |
|
"learning_rate": 2.6121620758762877e-06, |
|
"loss": 44.33, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.3834808259587021, |
|
"grad_norm": 11.5596923828125, |
|
"learning_rate": 2.5e-06, |
|
"loss": 44.3594, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3893805309734513, |
|
"grad_norm": 11.320469856262207, |
|
"learning_rate": 2.3878379241237136e-06, |
|
"loss": 44.3022, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3952802359882006, |
|
"grad_norm": 11.212388038635254, |
|
"learning_rate": 2.2759017277414165e-06, |
|
"loss": 44.3685, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.40117994100294985, |
|
"grad_norm": 11.267321586608887, |
|
"learning_rate": 2.1644168354558623e-06, |
|
"loss": 44.3347, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.40707964601769914, |
|
"grad_norm": 11.387954711914062, |
|
"learning_rate": 2.053607763003409e-06, |
|
"loss": 44.2862, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.41297935103244837, |
|
"grad_norm": 11.291707992553711, |
|
"learning_rate": 1.9436976651092143e-06, |
|
"loss": 44.3167, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.41887905604719766, |
|
"grad_norm": 11.472674369812012, |
|
"learning_rate": 1.8349078860833125e-06, |
|
"loss": 44.3085, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.4247787610619469, |
|
"grad_norm": 11.46934700012207, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 44.3146, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.4306784660766962, |
|
"grad_norm": 11.346488952636719, |
|
"learning_rate": 1.6215629397966432e-06, |
|
"loss": 44.2872, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.4365781710914454, |
|
"grad_norm": 11.554361343383789, |
|
"learning_rate": 1.5174374208651913e-06, |
|
"loss": 44.2945, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.4424778761061947, |
|
"grad_norm": 11.511228561401367, |
|
"learning_rate": 1.415290652206105e-06, |
|
"loss": 44.3073, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.44837758112094395, |
|
"grad_norm": 11.469541549682617, |
|
"learning_rate": 1.3153283438175036e-06, |
|
"loss": 44.3281, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.45427728613569324, |
|
"grad_norm": 11.547125816345215, |
|
"learning_rate": 1.217751806485235e-06, |
|
"loss": 44.3098, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.46017699115044247, |
|
"grad_norm": 11.442727088928223, |
|
"learning_rate": 1.122757546369744e-06, |
|
"loss": 44.2586, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.46607669616519176, |
|
"grad_norm": 11.528830528259277, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 44.2821, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.471976401179941, |
|
"grad_norm": 11.535983085632324, |
|
"learning_rate": 9.412754953531664e-07, |
|
"loss": 44.3261, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4778761061946903, |
|
"grad_norm": 11.634220123291016, |
|
"learning_rate": 8.551531851507186e-07, |
|
"loss": 44.275, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.4837758112094395, |
|
"grad_norm": 11.648545265197754, |
|
"learning_rate": 7.723433775328385e-07, |
|
"loss": 44.2439, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.4896755162241888, |
|
"grad_norm": 11.617422103881836, |
|
"learning_rate": 6.930128404315214e-07, |
|
"loss": 44.3185, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.49557522123893805, |
|
"grad_norm": 11.682761192321777, |
|
"learning_rate": 6.17321334990973e-07, |
|
"loss": 44.2801, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.5014749262536873, |
|
"grad_norm": 11.089653968811035, |
|
"learning_rate": 5.454212938299256e-07, |
|
"loss": 44.2543, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.5073746312684366, |
|
"grad_norm": 11.051488876342773, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 44.2673, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.5132743362831859, |
|
"grad_norm": 11.184460639953613, |
|
"learning_rate": 4.1356686569674344e-07, |
|
"loss": 44.2306, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.5191740412979351, |
|
"grad_norm": 11.230326652526855, |
|
"learning_rate": 3.538780159953348e-07, |
|
"loss": 44.2451, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.5250737463126843, |
|
"grad_norm": 11.29784107208252, |
|
"learning_rate": 2.98511170358155e-07, |
|
"loss": 44.2646, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.5309734513274337, |
|
"grad_norm": 11.078227996826172, |
|
"learning_rate": 2.4757783024395244e-07, |
|
"loss": 44.2559, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5368731563421829, |
|
"grad_norm": 11.108466148376465, |
|
"learning_rate": 2.0118056862137358e-07, |
|
"loss": 44.2589, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.5427728613569321, |
|
"grad_norm": 11.564451217651367, |
|
"learning_rate": 1.59412823400657e-07, |
|
"loss": 44.2609, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.5486725663716814, |
|
"grad_norm": 11.223318099975586, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 44.2762, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.5545722713864307, |
|
"grad_norm": 11.573466300964355, |
|
"learning_rate": 9.00928482603669e-08, |
|
"loss": 44.2708, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.56047197640118, |
|
"grad_norm": 11.500224113464355, |
|
"learning_rate": 6.268021954544095e-08, |
|
"loss": 44.2757, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.5663716814159292, |
|
"grad_norm": 11.30689525604248, |
|
"learning_rate": 4.017602850342584e-08, |
|
"loss": 44.224, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.5722713864306784, |
|
"grad_norm": 11.348673820495605, |
|
"learning_rate": 2.262559558016325e-08, |
|
"loss": 44.2502, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.5781710914454278, |
|
"grad_norm": 11.519439697265625, |
|
"learning_rate": 1.006426501190233e-08, |
|
"loss": 44.2666, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.584070796460177, |
|
"grad_norm": 11.413444519042969, |
|
"learning_rate": 2.5173336467135266e-09, |
|
"loss": 44.3009, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.5899705014749262, |
|
"grad_norm": 11.36845588684082, |
|
"learning_rate": 0.0, |
|
"loss": 44.1867, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5899705014749262, |
|
"eval_loss": 11.062679290771484, |
|
"eval_runtime": 0.6567, |
|
"eval_samples_per_second": 435.541, |
|
"eval_steps_per_second": 54.823, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 11943700070400.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|