|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 125, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.5675837397575378, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 0.6417, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5383580923080444, |
|
"learning_rate": 5.714285714285714e-05, |
|
"loss": 0.6334, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.523246705532074, |
|
"learning_rate": 8.571428571428571e-05, |
|
"loss": 0.6413, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.4905203580856323, |
|
"learning_rate": 0.00011428571428571428, |
|
"loss": 0.665, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.5177513957023621, |
|
"learning_rate": 0.00014285714285714287, |
|
"loss": 0.6041, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.482448011636734, |
|
"learning_rate": 0.00017142857142857143, |
|
"loss": 0.6078, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.49536892771720886, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6412, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.5152004361152649, |
|
"learning_rate": 0.00019996456111234527, |
|
"loss": 0.607, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.496277391910553, |
|
"learning_rate": 0.0001998582695676762, |
|
"loss": 0.6227, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.5162248611450195, |
|
"learning_rate": 0.000199681200703075, |
|
"loss": 0.6012, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.5029735565185547, |
|
"learning_rate": 0.00019943348002101371, |
|
"loss": 0.5842, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5039064884185791, |
|
"learning_rate": 0.00019911528310040074, |
|
"loss": 0.5833, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.486408531665802, |
|
"learning_rate": 0.00019872683547213446, |
|
"loss": 0.57, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.48064252734184265, |
|
"learning_rate": 0.00019826841245925212, |
|
"loss": 0.5857, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.4762842655181885, |
|
"learning_rate": 0.00019774033898178667, |
|
"loss": 0.5434, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.4918793737888336, |
|
"learning_rate": 0.00019714298932647098, |
|
"loss": 0.5295, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.4949023425579071, |
|
"learning_rate": 0.0001964767868814516, |
|
"loss": 0.5382, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.4618944227695465, |
|
"learning_rate": 0.00019574220383620055, |
|
"loss": 0.517, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.4404259920120239, |
|
"learning_rate": 0.00019493976084683813, |
|
"loss": 0.5089, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.47014883160591125, |
|
"learning_rate": 0.00019407002666710336, |
|
"loss": 0.5412, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.49193090200424194, |
|
"learning_rate": 0.00019313361774523385, |
|
"loss": 0.5533, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.47287049889564514, |
|
"learning_rate": 0.00019213119778704128, |
|
"loss": 0.486, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.4395417273044586, |
|
"learning_rate": 0.00019106347728549135, |
|
"loss": 0.4964, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.4677005708217621, |
|
"learning_rate": 0.00018993121301712193, |
|
"loss": 0.5101, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.4530835449695587, |
|
"learning_rate": 0.00018873520750565718, |
|
"loss": 0.4903, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.41354307532310486, |
|
"learning_rate": 0.00018747630845319612, |
|
"loss": 0.3837, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.4053184986114502, |
|
"learning_rate": 0.0001861554081393806, |
|
"loss": 0.3586, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.39625370502471924, |
|
"learning_rate": 0.0001847734427889671, |
|
"loss": 0.3408, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.3945152759552002, |
|
"learning_rate": 0.0001833313919082515, |
|
"loss": 0.3329, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.45374399423599243, |
|
"learning_rate": 0.0001818302775908169, |
|
"loss": 0.3273, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.44107717275619507, |
|
"learning_rate": 0.00018027116379309638, |
|
"loss": 0.3397, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.46182313561439514, |
|
"learning_rate": 0.00017865515558026428, |
|
"loss": 0.3439, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.4914720356464386, |
|
"learning_rate": 0.00017698339834299061, |
|
"loss": 0.3415, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 1.3599999999999999, |
|
"grad_norm": 0.46283280849456787, |
|
"learning_rate": 0.00017525707698561385, |
|
"loss": 0.3309, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.47037574648857117, |
|
"learning_rate": 0.00017347741508630672, |
|
"loss": 0.3311, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.4362049102783203, |
|
"learning_rate": 0.00017164567402983152, |
|
"loss": 0.3185, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.4418430030345917, |
|
"learning_rate": 0.0001697631521134985, |
|
"loss": 0.3189, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.4334975779056549, |
|
"learning_rate": 0.00016783118362696163, |
|
"loss": 0.3152, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.42978766560554504, |
|
"learning_rate": 0.00016585113790650388, |
|
"loss": 0.3302, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.4448607563972473, |
|
"learning_rate": 0.00016382441836448202, |
|
"loss": 0.3311, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.6400000000000001, |
|
"grad_norm": 0.40594780445098877, |
|
"learning_rate": 0.0001617524614946192, |
|
"loss": 0.326, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.6800000000000002, |
|
"grad_norm": 0.4091069996356964, |
|
"learning_rate": 0.00015963673585385016, |
|
"loss": 0.3374, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.42589232325553894, |
|
"learning_rate": 0.0001574787410214407, |
|
"loss": 0.3228, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.413335919380188, |
|
"learning_rate": 0.00015528000653611935, |
|
"loss": 0.3234, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.44703832268714905, |
|
"learning_rate": 0.00015304209081197425, |
|
"loss": 0.3352, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.8399999999999999, |
|
"grad_norm": 0.42445358633995056, |
|
"learning_rate": 0.000150766580033884, |
|
"loss": 0.3429, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.39593955874443054, |
|
"learning_rate": 0.00014845508703326504, |
|
"loss": 0.3247, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.40969106554985046, |
|
"learning_rate": 0.0001461092501449326, |
|
"loss": 0.3126, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.4226933717727661, |
|
"learning_rate": 0.00014373073204588556, |
|
"loss": 0.3184, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.4182460308074951, |
|
"learning_rate": 0.00014132121857683783, |
|
"loss": 0.3263, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.3583109676837921, |
|
"learning_rate": 0.00013888241754733208, |
|
"loss": 0.2484, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.3850906491279602, |
|
"learning_rate": 0.00013641605752528224, |
|
"loss": 0.24, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.3770233392715454, |
|
"learning_rate": 0.00013392388661180303, |
|
"loss": 0.2334, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.374737024307251, |
|
"learning_rate": 0.0001314076712021949, |
|
"loss": 0.244, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.37868672609329224, |
|
"learning_rate": 0.0001288691947339621, |
|
"loss": 0.2134, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.41098785400390625, |
|
"learning_rate": 0.00012631025642275212, |
|
"loss": 0.2373, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 2.2800000000000002, |
|
"grad_norm": 0.3870932161808014, |
|
"learning_rate": 0.0001237326699871115, |
|
"loss": 0.2238, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.4083153307437897, |
|
"learning_rate": 0.00012113826236296244, |
|
"loss": 0.2141, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.412580668926239, |
|
"learning_rate": 0.00011852887240871145, |
|
"loss": 0.2201, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.4499835968017578, |
|
"learning_rate": 0.00011590634960190721, |
|
"loss": 0.2215, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.48053982853889465, |
|
"learning_rate": 0.00011327255272837221, |
|
"loss": 0.217, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.44269296526908875, |
|
"learning_rate": 0.00011062934856473655, |
|
"loss": 0.2212, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.4232514202594757, |
|
"learning_rate": 0.00010797861055530831, |
|
"loss": 0.2289, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.41652482748031616, |
|
"learning_rate": 0.00010532221748421787, |
|
"loss": 0.2098, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.39650851488113403, |
|
"learning_rate": 0.00010266205214377748, |
|
"loss": 0.195, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.39610394835472107, |
|
"learning_rate": 0.0001, |
|
"loss": 0.1996, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.3933280110359192, |
|
"learning_rate": 9.733794785622253e-05, |
|
"loss": 0.2243, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 2.7199999999999998, |
|
"grad_norm": 0.3946898579597473, |
|
"learning_rate": 9.467778251578217e-05, |
|
"loss": 0.2216, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.4111415147781372, |
|
"learning_rate": 9.202138944469168e-05, |
|
"loss": 0.2261, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.4122645854949951, |
|
"learning_rate": 8.937065143526347e-05, |
|
"loss": 0.226, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.4040912091732025, |
|
"learning_rate": 8.672744727162781e-05, |
|
"loss": 0.2397, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.4039236605167389, |
|
"learning_rate": 8.409365039809281e-05, |
|
"loss": 0.214, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.39346644282341003, |
|
"learning_rate": 8.147112759128859e-05, |
|
"loss": 0.2211, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.4206030070781708, |
|
"learning_rate": 7.886173763703757e-05, |
|
"loss": 0.2193, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.3998810648918152, |
|
"learning_rate": 7.626733001288851e-05, |
|
"loss": 0.2129, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 0.3447999060153961, |
|
"learning_rate": 7.368974357724789e-05, |
|
"loss": 0.1809, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 0.3176288306713104, |
|
"learning_rate": 7.113080526603792e-05, |
|
"loss": 0.1648, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 0.32004985213279724, |
|
"learning_rate": 6.859232879780515e-05, |
|
"loss": 0.1582, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 0.31019288301467896, |
|
"learning_rate": 6.607611338819697e-05, |
|
"loss": 0.1502, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.31684502959251404, |
|
"learning_rate": 6.358394247471778e-05, |
|
"loss": 0.1514, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 0.3580498695373535, |
|
"learning_rate": 6.111758245266794e-05, |
|
"loss": 0.1533, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 3.2800000000000002, |
|
"grad_norm": 0.3377327024936676, |
|
"learning_rate": 5.867878142316221e-05, |
|
"loss": 0.1522, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 0.36469805240631104, |
|
"learning_rate": 5.626926795411447e-05, |
|
"loss": 0.1625, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 0.3576640486717224, |
|
"learning_rate": 5.38907498550674e-05, |
|
"loss": 0.1491, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 0.36043015122413635, |
|
"learning_rate": 5.1544912966734994e-05, |
|
"loss": 0.1561, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 0.3925049602985382, |
|
"learning_rate": 4.9233419966116036e-05, |
|
"loss": 0.1526, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 0.3896467685699463, |
|
"learning_rate": 4.695790918802576e-05, |
|
"loss": 0.1626, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.38003793358802795, |
|
"learning_rate": 4.47199934638807e-05, |
|
"loss": 0.1483, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 0.3852701187133789, |
|
"learning_rate": 4.252125897855932e-05, |
|
"loss": 0.1494, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 0.38289740681648254, |
|
"learning_rate": 4.036326414614985e-05, |
|
"loss": 0.1694, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 0.3584575057029724, |
|
"learning_rate": 3.824753850538082e-05, |
|
"loss": 0.1581, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 0.36479833722114563, |
|
"learning_rate": 3.617558163551802e-05, |
|
"loss": 0.1567, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 3.7199999999999998, |
|
"grad_norm": 0.3648757338523865, |
|
"learning_rate": 3.414886209349615e-05, |
|
"loss": 0.1539, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 0.3735385835170746, |
|
"learning_rate": 3.216881637303839e-05, |
|
"loss": 0.1455, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 0.3788692355155945, |
|
"learning_rate": 3.0236847886501542e-05, |
|
"loss": 0.1512, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.36534735560417175, |
|
"learning_rate": 2.8354325970168484e-05, |
|
"loss": 0.1539, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 0.3561382591724396, |
|
"learning_rate": 2.6522584913693294e-05, |
|
"loss": 0.1613, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 0.36043405532836914, |
|
"learning_rate": 2.4742923014386156e-05, |
|
"loss": 0.1399, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 0.3461771011352539, |
|
"learning_rate": 2.301660165700936e-05, |
|
"loss": 0.1446, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.3610660135746002, |
|
"learning_rate": 2.1344844419735755e-05, |
|
"loss": 0.1669, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 0.3192867636680603, |
|
"learning_rate": 1.9728836206903656e-05, |
|
"loss": 0.138, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 0.2961142957210541, |
|
"learning_rate": 1.8169722409183097e-05, |
|
"loss": 0.1251, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 0.33224716782569885, |
|
"learning_rate": 1.6668608091748495e-05, |
|
"loss": 0.1307, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 0.30373403429985046, |
|
"learning_rate": 1.522655721103291e-05, |
|
"loss": 0.1259, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 0.31472688913345337, |
|
"learning_rate": 1.3844591860619383e-05, |
|
"loss": 0.1267, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 0.3078845739364624, |
|
"learning_rate": 1.2523691546803873e-05, |
|
"loss": 0.123, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"grad_norm": 0.31925857067108154, |
|
"learning_rate": 1.1264792494342857e-05, |
|
"loss": 0.1326, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 0.29572564363479614, |
|
"learning_rate": 1.0068786982878087e-05, |
|
"loss": 0.1162, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"grad_norm": 0.29332664608955383, |
|
"learning_rate": 8.936522714508678e-06, |
|
"loss": 0.1153, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 0.29813987016677856, |
|
"learning_rate": 7.868802212958703e-06, |
|
"loss": 0.1166, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 0.2909725308418274, |
|
"learning_rate": 6.866382254766157e-06, |
|
"loss": 0.108, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.31103241443634033, |
|
"learning_rate": 5.929973332896677e-06, |
|
"loss": 0.1228, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 0.29073992371559143, |
|
"learning_rate": 5.060239153161872e-06, |
|
"loss": 0.118, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 4.5600000000000005, |
|
"grad_norm": 0.288641095161438, |
|
"learning_rate": 4.257796163799455e-06, |
|
"loss": 0.1184, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 0.3088034689426422, |
|
"learning_rate": 3.5232131185484076e-06, |
|
"loss": 0.1285, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 0.30730554461479187, |
|
"learning_rate": 2.857010673529015e-06, |
|
"loss": 0.1263, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 0.291875422000885, |
|
"learning_rate": 2.259661018213333e-06, |
|
"loss": 0.108, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 0.31049686670303345, |
|
"learning_rate": 1.7315875407479032e-06, |
|
"loss": 0.1225, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"grad_norm": 0.32437863945961, |
|
"learning_rate": 1.2731645278655445e-06, |
|
"loss": 0.1431, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.3190011978149414, |
|
"learning_rate": 8.847168995992916e-07, |
|
"loss": 0.1298, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 0.3092647194862366, |
|
"learning_rate": 5.665199789862907e-07, |
|
"loss": 0.1333, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 0.3259420096874237, |
|
"learning_rate": 3.1879929692498757e-07, |
|
"loss": 0.1312, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 0.31257393956184387, |
|
"learning_rate": 1.4173043232380557e-07, |
|
"loss": 0.1255, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 0.310062438249588, |
|
"learning_rate": 3.5438887654737355e-08, |
|
"loss": 0.1155, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.3081853687763214, |
|
"learning_rate": 0.0, |
|
"loss": 0.1245, |
|
"step": 125 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 125, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.8813347440214016e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|