|
{ |
|
"best_metric": 1.2364895343780518, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.3420265070542967, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0017101325352714834, |
|
"grad_norm": 14.647956848144531, |
|
"learning_rate": 1e-05, |
|
"loss": 5.4392, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0017101325352714834, |
|
"eval_loss": 2.389522075653076, |
|
"eval_runtime": 70.0498, |
|
"eval_samples_per_second": 14.061, |
|
"eval_steps_per_second": 3.526, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003420265070542967, |
|
"grad_norm": 20.373123168945312, |
|
"learning_rate": 2e-05, |
|
"loss": 6.512, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005130397605814451, |
|
"grad_norm": 18.366159439086914, |
|
"learning_rate": 3e-05, |
|
"loss": 6.811, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.006840530141085934, |
|
"grad_norm": 16.541837692260742, |
|
"learning_rate": 4e-05, |
|
"loss": 7.0807, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.008550662676357419, |
|
"grad_norm": 16.134265899658203, |
|
"learning_rate": 5e-05, |
|
"loss": 6.9222, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010260795211628902, |
|
"grad_norm": 13.496607780456543, |
|
"learning_rate": 6e-05, |
|
"loss": 6.535, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.011970927746900385, |
|
"grad_norm": 12.634574890136719, |
|
"learning_rate": 7e-05, |
|
"loss": 6.4888, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.013681060282171868, |
|
"grad_norm": 12.351262092590332, |
|
"learning_rate": 8e-05, |
|
"loss": 6.2987, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.015391192817443352, |
|
"grad_norm": 10.987244606018066, |
|
"learning_rate": 9e-05, |
|
"loss": 6.2655, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.017101325352714837, |
|
"grad_norm": 10.413485527038574, |
|
"learning_rate": 0.0001, |
|
"loss": 6.3182, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01881145788798632, |
|
"grad_norm": 9.127406120300293, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 5.9941, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.020521590423257803, |
|
"grad_norm": 10.038461685180664, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 6.13, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.022231722958529286, |
|
"grad_norm": 8.865433692932129, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 5.7875, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02394185549380077, |
|
"grad_norm": 9.137649536132812, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 5.8117, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.025651988029072252, |
|
"grad_norm": 9.664212226867676, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 5.9177, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.027362120564343735, |
|
"grad_norm": 11.016179084777832, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 5.8847, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02907225309961522, |
|
"grad_norm": 9.859611511230469, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 5.6148, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.030782385634886705, |
|
"grad_norm": 9.732139587402344, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 5.6255, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.032492518170158184, |
|
"grad_norm": 8.641677856445312, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 5.7555, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.034202650705429674, |
|
"grad_norm": 8.856379508972168, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 5.6799, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03591278324070116, |
|
"grad_norm": 9.629952430725098, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 5.9026, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03762291577597264, |
|
"grad_norm": 9.597189903259277, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 5.8821, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03933304831124412, |
|
"grad_norm": 8.446582794189453, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 5.8156, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.041043180846515606, |
|
"grad_norm": 7.964376926422119, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 5.4823, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04275331338178709, |
|
"grad_norm": 8.392891883850098, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 5.8879, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04446344591705857, |
|
"grad_norm": 9.026515007019043, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 5.7524, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.046173578452330055, |
|
"grad_norm": 8.499021530151367, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 5.3138, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.04788371098760154, |
|
"grad_norm": 9.321228981018066, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 5.3142, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.04959384352287302, |
|
"grad_norm": 8.147331237792969, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 5.7566, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.051303976058144504, |
|
"grad_norm": 8.19190502166748, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 5.7796, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05301410859341599, |
|
"grad_norm": 8.113025665283203, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 5.8476, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05472424112868747, |
|
"grad_norm": 9.073809623718262, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 5.7916, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05643437366395896, |
|
"grad_norm": 8.299051284790039, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 6.0941, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.05814450619923044, |
|
"grad_norm": 8.128596305847168, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 5.2846, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.059854638734501926, |
|
"grad_norm": 9.958587646484375, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 5.5907, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06156477126977341, |
|
"grad_norm": 8.5099515914917, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 5.8238, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06327490380504489, |
|
"grad_norm": 9.221707344055176, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 6.0025, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06498503634031637, |
|
"grad_norm": 8.256094932556152, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 5.2493, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.06669516887558785, |
|
"grad_norm": 8.52989673614502, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 5.725, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.06840530141085935, |
|
"grad_norm": 8.509799003601074, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 5.6857, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07011543394613083, |
|
"grad_norm": 8.758727073669434, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 6.0527, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07182556648140231, |
|
"grad_norm": 8.856103897094727, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 5.8952, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0735356990166738, |
|
"grad_norm": 8.542580604553223, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 5.5058, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07524583155194528, |
|
"grad_norm": 8.372459411621094, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 5.697, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.07695596408721676, |
|
"grad_norm": 8.722542762756348, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 5.9055, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07866609662248825, |
|
"grad_norm": 9.559613227844238, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 5.9552, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08037622915775973, |
|
"grad_norm": 10.255206108093262, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 6.3674, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08208636169303121, |
|
"grad_norm": 10.23193359375, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 6.1721, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0837964942283027, |
|
"grad_norm": 9.893390655517578, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 6.5567, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08550662676357418, |
|
"grad_norm": 11.815103530883789, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 6.6826, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08550662676357418, |
|
"eval_loss": 1.5590479373931885, |
|
"eval_runtime": 71.164, |
|
"eval_samples_per_second": 13.841, |
|
"eval_steps_per_second": 3.471, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08721675929884566, |
|
"grad_norm": 11.670133590698242, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 4.8665, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.08892689183411714, |
|
"grad_norm": 10.765670776367188, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 5.6777, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09063702436938863, |
|
"grad_norm": 7.763930797576904, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 5.6151, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09234715690466011, |
|
"grad_norm": 5.661214828491211, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 5.0488, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0940572894399316, |
|
"grad_norm": 6.892148017883301, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 5.642, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.09576742197520308, |
|
"grad_norm": 6.748917102813721, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 5.4259, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.09747755451047456, |
|
"grad_norm": 6.506460189819336, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 5.5046, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.09918768704574604, |
|
"grad_norm": 6.217710971832275, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 5.0724, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10089781958101753, |
|
"grad_norm": 6.443725109100342, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 5.0304, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10260795211628901, |
|
"grad_norm": 6.875208377838135, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 5.1855, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.10431808465156049, |
|
"grad_norm": 6.576230049133301, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 5.2583, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.10602821718683197, |
|
"grad_norm": 6.3259429931640625, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 5.1767, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.10773834972210346, |
|
"grad_norm": 6.573944091796875, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 5.4993, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.10944848225737494, |
|
"grad_norm": 6.2207231521606445, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 5.1262, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11115861479264642, |
|
"grad_norm": 6.0934553146362305, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 5.1846, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11286874732791792, |
|
"grad_norm": 6.30827522277832, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 5.1388, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.1145788798631894, |
|
"grad_norm": 6.256842136383057, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 5.1978, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.11628901239846089, |
|
"grad_norm": 6.585797309875488, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 5.0545, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.11799914493373237, |
|
"grad_norm": 6.512587070465088, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 5.0827, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.11970927746900385, |
|
"grad_norm": 7.0146260261535645, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 5.3634, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12141941000427534, |
|
"grad_norm": 7.003252983093262, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 5.5665, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.12312954253954682, |
|
"grad_norm": 6.686169147491455, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 5.0258, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.1248396750748183, |
|
"grad_norm": 6.716172218322754, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 5.4769, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.12654980761008977, |
|
"grad_norm": 6.407820701599121, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 5.2346, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.12825994014536127, |
|
"grad_norm": 6.440876483917236, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 5.3857, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.12997007268063274, |
|
"grad_norm": 6.7987236976623535, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 5.5307, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13168020521590423, |
|
"grad_norm": 6.891758441925049, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 5.6692, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1333903377511757, |
|
"grad_norm": 6.718648910522461, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 5.5319, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1351004702864472, |
|
"grad_norm": 6.724673271179199, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 5.5484, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1368106028217187, |
|
"grad_norm": 7.216301441192627, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 5.4996, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13852073535699017, |
|
"grad_norm": 6.592189788818359, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 5.1591, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14023086789226166, |
|
"grad_norm": 6.543399333953857, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 4.927, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14194100042753313, |
|
"grad_norm": 7.43008279800415, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 5.4799, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.14365113296280463, |
|
"grad_norm": 7.017084121704102, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 5.1831, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1453612654980761, |
|
"grad_norm": 7.088075637817383, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 5.3733, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1470713980333476, |
|
"grad_norm": 7.448460102081299, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 5.3094, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.14878153056861906, |
|
"grad_norm": 7.983953475952148, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 5.5923, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15049166310389056, |
|
"grad_norm": 7.6488261222839355, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 5.4021, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.15220179563916203, |
|
"grad_norm": 7.249059677124023, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 5.1865, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.15391192817443353, |
|
"grad_norm": 7.189175605773926, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 5.6287, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.155622060709705, |
|
"grad_norm": 7.227424144744873, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 5.4788, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.1573321932449765, |
|
"grad_norm": 7.495189189910889, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 5.2419, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.15904232578024796, |
|
"grad_norm": 7.223033905029297, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 5.4925, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16075245831551946, |
|
"grad_norm": 7.196437835693359, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 5.1935, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.16246259085079093, |
|
"grad_norm": 8.053117752075195, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 5.5453, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.16417272338606242, |
|
"grad_norm": 8.177412033081055, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 5.8953, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1658828559213339, |
|
"grad_norm": 8.064677238464355, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 5.5499, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1675929884566054, |
|
"grad_norm": 9.8474760055542, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 6.354, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.16930312099187686, |
|
"grad_norm": 9.995335578918457, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 6.3403, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.17101325352714836, |
|
"grad_norm": 11.376033782958984, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 6.1669, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.17101325352714836, |
|
"eval_loss": 1.382412314414978, |
|
"eval_runtime": 71.2056, |
|
"eval_samples_per_second": 13.833, |
|
"eval_steps_per_second": 3.469, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.17272338606241983, |
|
"grad_norm": 5.957542896270752, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 4.1305, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.17443351859769132, |
|
"grad_norm": 6.4855732917785645, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 4.5345, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1761436511329628, |
|
"grad_norm": 6.2549519538879395, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 4.9724, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1778537836682343, |
|
"grad_norm": 5.879164218902588, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 4.9681, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.17956391620350576, |
|
"grad_norm": 5.322740077972412, |
|
"learning_rate": 5e-05, |
|
"loss": 4.7922, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.18127404873877725, |
|
"grad_norm": 5.165755271911621, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 4.9845, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.18298418127404875, |
|
"grad_norm": 5.382776260375977, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 5.2145, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.18469431380932022, |
|
"grad_norm": 5.669363975524902, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 5.1168, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.18640444634459172, |
|
"grad_norm": 5.768848896026611, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 4.7837, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.1881145788798632, |
|
"grad_norm": 5.866610527038574, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 5.1897, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.18982471141513468, |
|
"grad_norm": 5.909623622894287, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 5.0922, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.19153484395040615, |
|
"grad_norm": 5.68808126449585, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 5.1415, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.19324497648567765, |
|
"grad_norm": 5.953550338745117, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 5.3953, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.19495510902094912, |
|
"grad_norm": 5.5413665771484375, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 4.8691, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.19666524155622062, |
|
"grad_norm": 5.718400955200195, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 4.9716, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.19837537409149208, |
|
"grad_norm": 6.088201522827148, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 5.0398, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.20008550662676358, |
|
"grad_norm": 5.787220478057861, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 4.921, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.20179563916203505, |
|
"grad_norm": 6.0000319480896, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 5.1257, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.20350577169730655, |
|
"grad_norm": 5.856043815612793, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 4.9343, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.20521590423257802, |
|
"grad_norm": 5.995108604431152, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 5.0141, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2069260367678495, |
|
"grad_norm": 5.846787929534912, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 4.7522, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.20863616930312098, |
|
"grad_norm": 6.454184532165527, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 5.1337, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.21034630183839248, |
|
"grad_norm": 6.224088668823242, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 5.1751, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.21205643437366395, |
|
"grad_norm": 6.210409164428711, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 5.0064, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.21376656690893545, |
|
"grad_norm": 6.251707553863525, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 5.0831, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.21547669944420692, |
|
"grad_norm": 6.671133518218994, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 5.3674, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.2171868319794784, |
|
"grad_norm": 6.227624893188477, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 5.0815, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.21889696451474988, |
|
"grad_norm": 6.270323276519775, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 5.2016, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.22060709705002138, |
|
"grad_norm": 6.1814284324646, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 4.7255, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.22231722958529285, |
|
"grad_norm": 6.574129581451416, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 5.1615, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.22402736212056434, |
|
"grad_norm": 6.457674026489258, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 5.606, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.22573749465583584, |
|
"grad_norm": 6.920170783996582, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 5.5128, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.2274476271911073, |
|
"grad_norm": 6.169193744659424, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 4.9557, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.2291577597263788, |
|
"grad_norm": 6.105835914611816, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 5.1213, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.23086789226165028, |
|
"grad_norm": 6.616054534912109, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 5.1441, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.23257802479692177, |
|
"grad_norm": 6.647872447967529, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 5.3619, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.23428815733219324, |
|
"grad_norm": 6.889586448669434, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 5.4472, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.23599828986746474, |
|
"grad_norm": 6.807645320892334, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 5.2571, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.2377084224027362, |
|
"grad_norm": 7.267321586608887, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 5.4178, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2394185549380077, |
|
"grad_norm": 7.587237358093262, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 5.2038, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.24112868747327917, |
|
"grad_norm": 6.994505882263184, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 5.3623, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.24283882000855067, |
|
"grad_norm": 7.791904926300049, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 5.3432, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.24454895254382214, |
|
"grad_norm": 7.552254676818848, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 5.6422, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.24625908507909364, |
|
"grad_norm": 7.126366138458252, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 5.6549, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2479692176143651, |
|
"grad_norm": 8.048868179321289, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 5.4633, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2496793501496366, |
|
"grad_norm": 7.92402982711792, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 5.3619, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.25138948268490807, |
|
"grad_norm": 7.74806547164917, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 5.1205, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.25309961522017954, |
|
"grad_norm": 8.040278434753418, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 5.6218, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.25480974775545107, |
|
"grad_norm": 8.707082748413086, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 6.0942, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.25651988029072254, |
|
"grad_norm": 11.835229873657227, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 6.0573, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.25651988029072254, |
|
"eval_loss": 1.2828744649887085, |
|
"eval_runtime": 71.1991, |
|
"eval_samples_per_second": 13.834, |
|
"eval_steps_per_second": 3.469, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.258230012825994, |
|
"grad_norm": 4.506284236907959, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 3.7777, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2599401453612655, |
|
"grad_norm": 5.299330234527588, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 4.2758, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.261650277896537, |
|
"grad_norm": 5.438675403594971, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 4.649, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.26336041043180847, |
|
"grad_norm": 6.68652868270874, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 4.8162, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.26507054296707994, |
|
"grad_norm": 6.45853853225708, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 4.8465, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2667806755023514, |
|
"grad_norm": 6.240671634674072, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 5.0581, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.26849080803762293, |
|
"grad_norm": 6.3597331047058105, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 5.442, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.2702009405728944, |
|
"grad_norm": 5.745226860046387, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 4.8162, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.27191107310816587, |
|
"grad_norm": 5.929082870483398, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 5.2591, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2736212056434374, |
|
"grad_norm": 5.630734920501709, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 5.033, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.27533133817870886, |
|
"grad_norm": 5.56561279296875, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 4.8257, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.27704147071398033, |
|
"grad_norm": 5.408742904663086, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 4.6649, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2787516032492518, |
|
"grad_norm": 5.555550575256348, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 4.7131, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2804617357845233, |
|
"grad_norm": 5.2834014892578125, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 4.8631, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2821718683197948, |
|
"grad_norm": 5.39652156829834, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 4.8836, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.28388200085506626, |
|
"grad_norm": 5.338542938232422, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 4.8909, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.28559213339033773, |
|
"grad_norm": 5.798850059509277, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 5.0611, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.28730226592560926, |
|
"grad_norm": 5.571052074432373, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 4.8516, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2890123984608807, |
|
"grad_norm": 5.58050537109375, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 4.6637, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.2907225309961522, |
|
"grad_norm": 5.560103893280029, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 4.8089, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.29243266353142366, |
|
"grad_norm": 5.654231071472168, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 5.0751, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.2941427960666952, |
|
"grad_norm": 5.748037338256836, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 5.0826, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.29585292860196666, |
|
"grad_norm": 5.714996814727783, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 4.8812, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2975630611372381, |
|
"grad_norm": 5.568888187408447, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 4.9249, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2992731936725096, |
|
"grad_norm": 5.951992034912109, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 4.95, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3009833262077811, |
|
"grad_norm": 6.12166690826416, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 4.8529, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.3026934587430526, |
|
"grad_norm": 6.451945781707764, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 5.3523, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.30440359127832406, |
|
"grad_norm": 6.277839183807373, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 5.2762, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.30611372381359553, |
|
"grad_norm": 6.195245265960693, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 4.9359, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.30782385634886705, |
|
"grad_norm": 5.98644495010376, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 5.1152, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3095339888841385, |
|
"grad_norm": 6.431697845458984, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 5.1676, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.31124412141941, |
|
"grad_norm": 5.98140811920166, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 4.9523, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.31295425395468146, |
|
"grad_norm": 6.290094375610352, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 4.9884, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.314664386489953, |
|
"grad_norm": 6.388920783996582, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 5.0668, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.31637451902522445, |
|
"grad_norm": 6.373295307159424, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 5.0978, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3180846515604959, |
|
"grad_norm": 6.373231410980225, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 5.2877, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.31979478409576745, |
|
"grad_norm": 6.621831893920898, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 5.1399, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3215049166310389, |
|
"grad_norm": 7.05682897567749, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 5.4095, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.3232150491663104, |
|
"grad_norm": 6.611572265625, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 5.2426, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.32492518170158186, |
|
"grad_norm": 7.512360572814941, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 5.7162, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3266353142368534, |
|
"grad_norm": 6.702224254608154, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 5.1063, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.32834544677212485, |
|
"grad_norm": 7.078572750091553, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 5.3977, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3300555793073963, |
|
"grad_norm": 7.9836955070495605, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 5.4685, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3317657118426678, |
|
"grad_norm": 7.350215911865234, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 5.2704, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3334758443779393, |
|
"grad_norm": 8.438230514526367, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 5.3578, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3351859769132108, |
|
"grad_norm": 7.477552890777588, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 5.3017, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.33689610944848225, |
|
"grad_norm": 7.963863372802734, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 5.5124, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.3386062419837537, |
|
"grad_norm": 9.603384017944336, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 6.0447, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.34031637451902524, |
|
"grad_norm": 9.086202621459961, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 6.057, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.3420265070542967, |
|
"grad_norm": 12.168228149414062, |
|
"learning_rate": 0.0, |
|
"loss": 6.3207, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3420265070542967, |
|
"eval_loss": 1.2364895343780518, |
|
"eval_runtime": 71.0504, |
|
"eval_samples_per_second": 13.863, |
|
"eval_steps_per_second": 3.476, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.863232968556544e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|