|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 250, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004, |
|
"grad_norm": 2.7120276745865266, |
|
"learning_rate": 1.2500000000000002e-07, |
|
"loss": 0.9478, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008, |
|
"grad_norm": 2.5535110233770237, |
|
"learning_rate": 2.5000000000000004e-07, |
|
"loss": 0.962, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.012, |
|
"grad_norm": 2.6421564999361533, |
|
"learning_rate": 3.75e-07, |
|
"loss": 0.9483, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.016, |
|
"grad_norm": 2.5842713407482596, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.9484, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.5281267639976615, |
|
"learning_rate": 6.25e-07, |
|
"loss": 0.9512, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.024, |
|
"grad_norm": 2.55003561639048, |
|
"learning_rate": 7.5e-07, |
|
"loss": 0.9185, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.028, |
|
"grad_norm": 2.5014092783190947, |
|
"learning_rate": 8.75e-07, |
|
"loss": 0.9411, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.032, |
|
"grad_norm": 2.5247715994428046, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.9474, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.036, |
|
"grad_norm": 2.564090844776098, |
|
"learning_rate": 1.125e-06, |
|
"loss": 0.9246, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.5258707026382154, |
|
"learning_rate": 1.25e-06, |
|
"loss": 0.9078, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.044, |
|
"grad_norm": 2.5471156175078464, |
|
"learning_rate": 1.3750000000000002e-06, |
|
"loss": 0.9134, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.048, |
|
"grad_norm": 2.7125301372042467, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.8776, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.052, |
|
"grad_norm": 2.150392048325219, |
|
"learning_rate": 1.6250000000000001e-06, |
|
"loss": 0.8549, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.056, |
|
"grad_norm": 1.8184980673856375, |
|
"learning_rate": 1.75e-06, |
|
"loss": 0.8169, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.7529184087805771, |
|
"learning_rate": 1.8750000000000003e-06, |
|
"loss": 0.7975, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 1.7474840928555682, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.8204, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.068, |
|
"grad_norm": 1.522251241150409, |
|
"learning_rate": 2.125e-06, |
|
"loss": 0.8301, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.072, |
|
"grad_norm": 0.9782588646220091, |
|
"learning_rate": 2.25e-06, |
|
"loss": 0.7839, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.076, |
|
"grad_norm": 0.975162832649703, |
|
"learning_rate": 2.375e-06, |
|
"loss": 0.7876, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.8952616849532462, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.7589, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.084, |
|
"grad_norm": 0.8469187411478404, |
|
"learning_rate": 2.6250000000000003e-06, |
|
"loss": 0.7567, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.088, |
|
"grad_norm": 0.7683035597272257, |
|
"learning_rate": 2.7500000000000004e-06, |
|
"loss": 0.7598, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.092, |
|
"grad_norm": 0.6779719620181055, |
|
"learning_rate": 2.875e-06, |
|
"loss": 0.7431, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.096, |
|
"grad_norm": 0.5998651036445499, |
|
"learning_rate": 3e-06, |
|
"loss": 0.7348, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.6798779251188986, |
|
"learning_rate": 3.125e-06, |
|
"loss": 0.7271, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.104, |
|
"grad_norm": 0.7459028436934305, |
|
"learning_rate": 3.2500000000000002e-06, |
|
"loss": 0.7011, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.108, |
|
"grad_norm": 0.7661666147630528, |
|
"learning_rate": 3.3750000000000003e-06, |
|
"loss": 0.7209, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.112, |
|
"grad_norm": 0.7109730987249342, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.7118, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.116, |
|
"grad_norm": 0.6778217985942258, |
|
"learning_rate": 3.625e-06, |
|
"loss": 0.7295, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.6073429084137083, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.7114, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.124, |
|
"grad_norm": 0.5325413632521706, |
|
"learning_rate": 3.875e-06, |
|
"loss": 0.7117, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.128, |
|
"grad_norm": 0.5170445125401208, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.7132, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.132, |
|
"grad_norm": 0.47459002896590347, |
|
"learning_rate": 4.125e-06, |
|
"loss": 0.6913, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.136, |
|
"grad_norm": 0.46611890152884594, |
|
"learning_rate": 4.25e-06, |
|
"loss": 0.6893, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.5282002224942279, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 0.7059, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.144, |
|
"grad_norm": 0.516098581069478, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.7068, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.148, |
|
"grad_norm": 0.4946613953020982, |
|
"learning_rate": 4.625000000000001e-06, |
|
"loss": 0.6854, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.152, |
|
"grad_norm": 0.45539519616935026, |
|
"learning_rate": 4.75e-06, |
|
"loss": 0.689, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.156, |
|
"grad_norm": 0.43098619176364883, |
|
"learning_rate": 4.875e-06, |
|
"loss": 0.6904, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.42693567901762375, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6713, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.164, |
|
"grad_norm": 0.42916236211888303, |
|
"learning_rate": 4.999941696797974e-06, |
|
"loss": 0.6929, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.168, |
|
"grad_norm": 0.46320958381108696, |
|
"learning_rate": 4.9997667899113055e-06, |
|
"loss": 0.6601, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.172, |
|
"grad_norm": 0.4650598350234117, |
|
"learning_rate": 4.9994752874981e-06, |
|
"loss": 0.6627, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.176, |
|
"grad_norm": 0.4899967026584299, |
|
"learning_rate": 4.999067203154777e-06, |
|
"loss": 0.6583, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.4562804538758281, |
|
"learning_rate": 4.998542555915435e-06, |
|
"loss": 0.6712, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.184, |
|
"grad_norm": 0.36547047335003885, |
|
"learning_rate": 4.997901370250966e-06, |
|
"loss": 0.6495, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.188, |
|
"grad_norm": 0.37624066260466155, |
|
"learning_rate": 4.997143676067913e-06, |
|
"loss": 0.6703, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 0.36653954523845933, |
|
"learning_rate": 4.99626950870707e-06, |
|
"loss": 0.6781, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.196, |
|
"grad_norm": 0.3477765639450634, |
|
"learning_rate": 4.995278908941845e-06, |
|
"loss": 0.6745, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.3342296714847872, |
|
"learning_rate": 4.994171922976349e-06, |
|
"loss": 0.6771, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.204, |
|
"grad_norm": 0.37958792690250714, |
|
"learning_rate": 4.9929486024432405e-06, |
|
"loss": 0.6546, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.208, |
|
"grad_norm": 0.40870653964939113, |
|
"learning_rate": 4.991609004401324e-06, |
|
"loss": 0.6528, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.212, |
|
"grad_norm": 0.364192633459814, |
|
"learning_rate": 4.990153191332885e-06, |
|
"loss": 0.6458, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.216, |
|
"grad_norm": 0.30668416089546924, |
|
"learning_rate": 4.988581231140772e-06, |
|
"loss": 0.672, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.33255921507240616, |
|
"learning_rate": 4.986893197145238e-06, |
|
"loss": 0.6575, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.224, |
|
"grad_norm": 0.312428536241603, |
|
"learning_rate": 4.985089168080509e-06, |
|
"loss": 0.6629, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.228, |
|
"grad_norm": 0.2961748326875195, |
|
"learning_rate": 4.983169228091125e-06, |
|
"loss": 0.6514, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.232, |
|
"grad_norm": 0.30513153025857964, |
|
"learning_rate": 4.981133466728004e-06, |
|
"loss": 0.6547, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.236, |
|
"grad_norm": 0.3274000044829569, |
|
"learning_rate": 4.978981978944271e-06, |
|
"loss": 0.6514, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.3083977406610579, |
|
"learning_rate": 4.976714865090827e-06, |
|
"loss": 0.6422, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.244, |
|
"grad_norm": 0.3052292638265554, |
|
"learning_rate": 4.97433223091167e-06, |
|
"loss": 0.6355, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.248, |
|
"grad_norm": 0.2959593176715387, |
|
"learning_rate": 4.971834187538963e-06, |
|
"loss": 0.6551, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.252, |
|
"grad_norm": 0.29042322618949995, |
|
"learning_rate": 4.9692208514878445e-06, |
|
"loss": 0.6581, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.256, |
|
"grad_norm": 0.3149031676109748, |
|
"learning_rate": 4.966492344651006e-06, |
|
"loss": 0.6594, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.2874450280533471, |
|
"learning_rate": 4.963648794292992e-06, |
|
"loss": 0.6488, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.264, |
|
"grad_norm": 0.27325192501172824, |
|
"learning_rate": 4.960690333044279e-06, |
|
"loss": 0.6351, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.268, |
|
"grad_norm": 0.2817944445002008, |
|
"learning_rate": 4.957617098895076e-06, |
|
"loss": 0.6265, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.272, |
|
"grad_norm": 0.281980621240097, |
|
"learning_rate": 4.954429235188897e-06, |
|
"loss": 0.6432, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.276, |
|
"grad_norm": 0.2962526841715817, |
|
"learning_rate": 4.951126890615871e-06, |
|
"loss": 0.6354, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.28005347365677186, |
|
"learning_rate": 4.947710219205808e-06, |
|
"loss": 0.6548, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.284, |
|
"grad_norm": 0.26960325063378, |
|
"learning_rate": 4.944179380321015e-06, |
|
"loss": 0.6125, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.288, |
|
"grad_norm": 0.2848666105192882, |
|
"learning_rate": 4.940534538648862e-06, |
|
"loss": 0.6078, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.292, |
|
"grad_norm": 0.276353304899713, |
|
"learning_rate": 4.936775864194101e-06, |
|
"loss": 0.6524, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.296, |
|
"grad_norm": 0.27790715839174, |
|
"learning_rate": 4.932903532270939e-06, |
|
"loss": 0.6275, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.2636499704034946, |
|
"learning_rate": 4.928917723494854e-06, |
|
"loss": 0.6422, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.304, |
|
"grad_norm": 0.28133008746598603, |
|
"learning_rate": 4.924818623774178e-06, |
|
"loss": 0.6355, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.308, |
|
"grad_norm": 0.2779410264901997, |
|
"learning_rate": 4.920606424301424e-06, |
|
"loss": 0.6408, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.312, |
|
"grad_norm": 0.2897057665750394, |
|
"learning_rate": 4.916281321544362e-06, |
|
"loss": 0.6402, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.316, |
|
"grad_norm": 0.2761996704216673, |
|
"learning_rate": 4.911843517236867e-06, |
|
"loss": 0.653, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.29413202875425243, |
|
"learning_rate": 4.907293218369499e-06, |
|
"loss": 0.6298, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.324, |
|
"grad_norm": 0.2861680279307969, |
|
"learning_rate": 4.9026306371798526e-06, |
|
"loss": 0.6553, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.328, |
|
"grad_norm": 0.2848851913581759, |
|
"learning_rate": 4.897855991142658e-06, |
|
"loss": 0.6076, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.332, |
|
"grad_norm": 0.2687848093353044, |
|
"learning_rate": 4.892969502959639e-06, |
|
"loss": 0.6311, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.336, |
|
"grad_norm": 0.27061397240144314, |
|
"learning_rate": 4.8879714005491205e-06, |
|
"loss": 0.6148, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.2663532374960906, |
|
"learning_rate": 4.882861917035403e-06, |
|
"loss": 0.6412, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.344, |
|
"grad_norm": 0.27752950332016424, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 0.642, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.348, |
|
"grad_norm": 0.2772792789989426, |
|
"learning_rate": 4.87230976515995e-06, |
|
"loss": 0.6184, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.352, |
|
"grad_norm": 0.29699617925202587, |
|
"learning_rate": 4.8668675889776095e-06, |
|
"loss": 0.6275, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.356, |
|
"grad_norm": 0.2736517750983177, |
|
"learning_rate": 4.861315016027902e-06, |
|
"loss": 0.6347, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.2871346395772671, |
|
"learning_rate": 4.855652305297052e-06, |
|
"loss": 0.6132, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.364, |
|
"grad_norm": 0.27131438304958966, |
|
"learning_rate": 4.849879720908394e-06, |
|
"loss": 0.6026, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.368, |
|
"grad_norm": 0.2701400539855458, |
|
"learning_rate": 4.843997532110051e-06, |
|
"loss": 0.6385, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.372, |
|
"grad_norm": 0.2870430498497826, |
|
"learning_rate": 4.8380060132623776e-06, |
|
"loss": 0.624, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.376, |
|
"grad_norm": 0.2889305346509075, |
|
"learning_rate": 4.83190544382516e-06, |
|
"loss": 0.6375, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.2832554152572974, |
|
"learning_rate": 4.825696108344583e-06, |
|
"loss": 0.6348, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 0.282963454209734, |
|
"learning_rate": 4.819378296439962e-06, |
|
"loss": 0.6425, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.388, |
|
"grad_norm": 0.28136329883499284, |
|
"learning_rate": 4.812952302790226e-06, |
|
"loss": 0.6238, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.392, |
|
"grad_norm": 0.2751896445755537, |
|
"learning_rate": 4.80641842712018e-06, |
|
"loss": 0.6453, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.396, |
|
"grad_norm": 0.2802941094985609, |
|
"learning_rate": 4.799776974186523e-06, |
|
"loss": 0.6362, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.2816828378634338, |
|
"learning_rate": 4.793028253763633e-06, |
|
"loss": 0.6394, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.404, |
|
"grad_norm": 0.28942806582562414, |
|
"learning_rate": 4.786172580629118e-06, |
|
"loss": 0.6106, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.408, |
|
"grad_norm": 0.28396225609673553, |
|
"learning_rate": 4.7792102745491345e-06, |
|
"loss": 0.6302, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.412, |
|
"grad_norm": 0.269131748983874, |
|
"learning_rate": 4.772141660263472e-06, |
|
"loss": 0.6247, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.416, |
|
"grad_norm": 0.28932510717249166, |
|
"learning_rate": 4.764967067470409e-06, |
|
"loss": 0.6201, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.2734040302258933, |
|
"learning_rate": 4.757686830811332e-06, |
|
"loss": 0.6092, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.424, |
|
"grad_norm": 0.28628481564556507, |
|
"learning_rate": 4.750301289855128e-06, |
|
"loss": 0.6284, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.428, |
|
"grad_norm": 0.30932729909286755, |
|
"learning_rate": 4.742810789082345e-06, |
|
"loss": 0.6332, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.432, |
|
"grad_norm": 0.2707515621211506, |
|
"learning_rate": 4.735215677869129e-06, |
|
"loss": 0.6107, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.436, |
|
"grad_norm": 0.29859925040242785, |
|
"learning_rate": 4.72751631047092e-06, |
|
"loss": 0.6477, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2974079849667466, |
|
"learning_rate": 4.7197130460059385e-06, |
|
"loss": 0.632, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.444, |
|
"grad_norm": 0.28109309454092835, |
|
"learning_rate": 4.711806248438428e-06, |
|
"loss": 0.6308, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.448, |
|
"grad_norm": 0.287627270908265, |
|
"learning_rate": 4.7037962865616795e-06, |
|
"loss": 0.6322, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.452, |
|
"grad_norm": 0.2859506113795605, |
|
"learning_rate": 4.695683533980835e-06, |
|
"loss": 0.6196, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.456, |
|
"grad_norm": 0.27768505471724575, |
|
"learning_rate": 4.687468369095457e-06, |
|
"loss": 0.6107, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2985912133376052, |
|
"learning_rate": 4.679151175081879e-06, |
|
"loss": 0.6316, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.464, |
|
"grad_norm": 0.27790254502289174, |
|
"learning_rate": 4.6707323398753346e-06, |
|
"loss": 0.6194, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.468, |
|
"grad_norm": 0.2770496916475714, |
|
"learning_rate": 4.662212256151865e-06, |
|
"loss": 0.5938, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.472, |
|
"grad_norm": 0.2750067208531331, |
|
"learning_rate": 4.6535913213100005e-06, |
|
"loss": 0.6125, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.476, |
|
"grad_norm": 0.2640533408637943, |
|
"learning_rate": 4.644869937452224e-06, |
|
"loss": 0.6245, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.27591929390872805, |
|
"learning_rate": 4.636048511366222e-06, |
|
"loss": 0.6186, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.484, |
|
"grad_norm": 0.29643778935625803, |
|
"learning_rate": 4.627127454505902e-06, |
|
"loss": 0.6086, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.488, |
|
"grad_norm": 0.28076149524670235, |
|
"learning_rate": 4.618107182972209e-06, |
|
"loss": 0.6158, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.492, |
|
"grad_norm": 0.28214439623978305, |
|
"learning_rate": 4.6089881174937146e-06, |
|
"loss": 0.6299, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.496, |
|
"grad_norm": 0.2885430172419291, |
|
"learning_rate": 4.599770683406992e-06, |
|
"loss": 0.6367, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.28978271139767015, |
|
"learning_rate": 4.590455310636778e-06, |
|
"loss": 0.6248, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.504, |
|
"grad_norm": 0.2763777911865909, |
|
"learning_rate": 4.58104243367592e-06, |
|
"loss": 0.6235, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.508, |
|
"grad_norm": 0.2788581607213461, |
|
"learning_rate": 4.571532491565115e-06, |
|
"loss": 0.6201, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.512, |
|
"grad_norm": 0.28522419436506885, |
|
"learning_rate": 4.561925927872421e-06, |
|
"loss": 0.6287, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.516, |
|
"grad_norm": 0.2759422238312871, |
|
"learning_rate": 4.55222319067258e-06, |
|
"loss": 0.6105, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.271521828303117, |
|
"learning_rate": 4.542424732526105e-06, |
|
"loss": 0.6004, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.524, |
|
"grad_norm": 0.2832768486502443, |
|
"learning_rate": 4.532531010458188e-06, |
|
"loss": 0.6438, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.528, |
|
"grad_norm": 0.28545986352466657, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 0.6147, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.532, |
|
"grad_norm": 0.2843650568512383, |
|
"learning_rate": 4.512459624854017e-06, |
|
"loss": 0.6347, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.536, |
|
"grad_norm": 0.2758779923686556, |
|
"learning_rate": 4.5022828974986044e-06, |
|
"loss": 0.6111, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.28471240078326554, |
|
"learning_rate": 4.4920127785397615e-06, |
|
"loss": 0.6161, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.544, |
|
"grad_norm": 0.27215538114487603, |
|
"learning_rate": 4.481649747002146e-06, |
|
"loss": 0.6019, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.548, |
|
"grad_norm": 0.27161590017753495, |
|
"learning_rate": 4.471194286244094e-06, |
|
"loss": 0.6229, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.552, |
|
"grad_norm": 0.2786884282741861, |
|
"learning_rate": 4.460646883935079e-06, |
|
"loss": 0.6217, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.556, |
|
"grad_norm": 0.29095908793086706, |
|
"learning_rate": 4.4500080320329615e-06, |
|
"loss": 0.6212, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.2797512942233689, |
|
"learning_rate": 4.43927822676105e-06, |
|
"loss": 0.6183, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.564, |
|
"grad_norm": 0.2701904530059608, |
|
"learning_rate": 4.428457968584945e-06, |
|
"loss": 0.6067, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.568, |
|
"grad_norm": 0.2924071263588622, |
|
"learning_rate": 4.417547762189207e-06, |
|
"loss": 0.6167, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.572, |
|
"grad_norm": 0.2684300131690406, |
|
"learning_rate": 4.40654811645381e-06, |
|
"loss": 0.6185, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 0.2774759359262972, |
|
"learning_rate": 4.395459544430407e-06, |
|
"loss": 0.602, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.2808643430953345, |
|
"learning_rate": 4.384282563318403e-06, |
|
"loss": 0.598, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.584, |
|
"grad_norm": 0.27015365579319356, |
|
"learning_rate": 4.373017694440828e-06, |
|
"loss": 0.5857, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.588, |
|
"grad_norm": 0.2856861787094523, |
|
"learning_rate": 4.361665463220023e-06, |
|
"loss": 0.6206, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.592, |
|
"grad_norm": 0.28199517014381215, |
|
"learning_rate": 4.35022639915313e-06, |
|
"loss": 0.6094, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.596, |
|
"grad_norm": 0.27010314199532126, |
|
"learning_rate": 4.338701035787403e-06, |
|
"loss": 0.5947, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.2601892508343049, |
|
"learning_rate": 4.32708991069531e-06, |
|
"loss": 0.5871, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.604, |
|
"grad_norm": 0.27825064672033506, |
|
"learning_rate": 4.315393565449472e-06, |
|
"loss": 0.6093, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.608, |
|
"grad_norm": 0.27958608277896724, |
|
"learning_rate": 4.30361254559739e-06, |
|
"loss": 0.5951, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.612, |
|
"grad_norm": 0.2758779818206466, |
|
"learning_rate": 4.291747400636009e-06, |
|
"loss": 0.6062, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.616, |
|
"grad_norm": 0.29572303953208817, |
|
"learning_rate": 4.279798683986084e-06, |
|
"loss": 0.605, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.28194612739384267, |
|
"learning_rate": 4.267766952966369e-06, |
|
"loss": 0.6078, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.624, |
|
"grad_norm": 0.28615376291544004, |
|
"learning_rate": 4.255652768767619e-06, |
|
"loss": 0.6319, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.628, |
|
"grad_norm": 0.26959219285273633, |
|
"learning_rate": 4.243456696426415e-06, |
|
"loss": 0.5968, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.632, |
|
"grad_norm": 0.27878753771339543, |
|
"learning_rate": 4.2311793047988145e-06, |
|
"loss": 0.6214, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.636, |
|
"grad_norm": 0.2779844282953486, |
|
"learning_rate": 4.218821166533813e-06, |
|
"loss": 0.5964, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.2767287929857217, |
|
"learning_rate": 4.206382858046636e-06, |
|
"loss": 0.6187, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.644, |
|
"grad_norm": 0.2652936251998452, |
|
"learning_rate": 4.193864959491853e-06, |
|
"loss": 0.5897, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.648, |
|
"grad_norm": 0.26227694980471933, |
|
"learning_rate": 4.181268054736319e-06, |
|
"loss": 0.6107, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.652, |
|
"grad_norm": 0.2690441499487734, |
|
"learning_rate": 4.16859273133194e-06, |
|
"loss": 0.6012, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.656, |
|
"grad_norm": 0.26934906424793176, |
|
"learning_rate": 4.15583958048827e-06, |
|
"loss": 0.6086, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.2727839454931186, |
|
"learning_rate": 4.143009197044932e-06, |
|
"loss": 0.6156, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.664, |
|
"grad_norm": 0.2767715664709689, |
|
"learning_rate": 4.130102179443877e-06, |
|
"loss": 0.607, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.668, |
|
"grad_norm": 0.27462389864805775, |
|
"learning_rate": 4.117119129701468e-06, |
|
"loss": 0.598, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.672, |
|
"grad_norm": 0.28124958800487015, |
|
"learning_rate": 4.104060653380403e-06, |
|
"loss": 0.6174, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.676, |
|
"grad_norm": 0.26867080247614167, |
|
"learning_rate": 4.090927359561469e-06, |
|
"loss": 0.6222, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.27329020109654967, |
|
"learning_rate": 4.077719860815132e-06, |
|
"loss": 0.6174, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.684, |
|
"grad_norm": 0.2598239429892548, |
|
"learning_rate": 4.064438773172966e-06, |
|
"loss": 0.5949, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.688, |
|
"grad_norm": 0.26610910407219807, |
|
"learning_rate": 4.051084716098921e-06, |
|
"loss": 0.5876, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.692, |
|
"grad_norm": 0.28832248653224085, |
|
"learning_rate": 4.037658312460424e-06, |
|
"loss": 0.6038, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.696, |
|
"grad_norm": 0.27995126745782395, |
|
"learning_rate": 4.024160188499337e-06, |
|
"loss": 0.6024, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.2708024464743442, |
|
"learning_rate": 4.010590973802737e-06, |
|
"loss": 0.6166, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.704, |
|
"grad_norm": 0.27379773644645394, |
|
"learning_rate": 3.996951301273556e-06, |
|
"loss": 0.6172, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.708, |
|
"grad_norm": 0.2704403624062539, |
|
"learning_rate": 3.983241807101064e-06, |
|
"loss": 0.5848, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.712, |
|
"grad_norm": 0.26367325554187204, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 0.6084, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.716, |
|
"grad_norm": 0.2714449492216179, |
|
"learning_rate": 3.955615914836678e-06, |
|
"loss": 0.6067, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.27396192782433526, |
|
"learning_rate": 3.941700805287169e-06, |
|
"loss": 0.6049, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.724, |
|
"grad_norm": 0.2712108680127688, |
|
"learning_rate": 3.927718451119009e-06, |
|
"loss": 0.5981, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.728, |
|
"grad_norm": 0.27016877733602884, |
|
"learning_rate": 3.913669504505015e-06, |
|
"loss": 0.6148, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.732, |
|
"grad_norm": 0.2986908827790219, |
|
"learning_rate": 3.8995546207240455e-06, |
|
"loss": 0.6293, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.736, |
|
"grad_norm": 0.27281610268420575, |
|
"learning_rate": 3.8853744581304376e-06, |
|
"loss": 0.5937, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.28387181164952147, |
|
"learning_rate": 3.871129678123297e-06, |
|
"loss": 0.6098, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.744, |
|
"grad_norm": 0.2740312224605285, |
|
"learning_rate": 3.856820945115655e-06, |
|
"loss": 0.6078, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.748, |
|
"grad_norm": 0.2662514099930545, |
|
"learning_rate": 3.84244892650347e-06, |
|
"loss": 0.6254, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.752, |
|
"grad_norm": 0.26802564374459203, |
|
"learning_rate": 3.828014292634508e-06, |
|
"loss": 0.6121, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.756, |
|
"grad_norm": 0.28248517647364846, |
|
"learning_rate": 3.813517716777069e-06, |
|
"loss": 0.6202, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.272622897496479, |
|
"learning_rate": 3.798959875088584e-06, |
|
"loss": 0.5901, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.764, |
|
"grad_norm": 0.27197490333330376, |
|
"learning_rate": 3.7843414465840823e-06, |
|
"loss": 0.5856, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 0.26663311683845875, |
|
"learning_rate": 3.769663113104516e-06, |
|
"loss": 0.5907, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.772, |
|
"grad_norm": 0.2714585682015405, |
|
"learning_rate": 3.7549255592849575e-06, |
|
"loss": 0.6072, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.776, |
|
"grad_norm": 0.2766267849608307, |
|
"learning_rate": 3.7401294725226707e-06, |
|
"loss": 0.6158, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.26291754258948374, |
|
"learning_rate": 3.7252755429450437e-06, |
|
"loss": 0.5921, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.784, |
|
"grad_norm": 0.26530027759256725, |
|
"learning_rate": 3.7103644633774015e-06, |
|
"loss": 0.5841, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.788, |
|
"grad_norm": 0.26634011298693916, |
|
"learning_rate": 3.695396929310693e-06, |
|
"loss": 0.6147, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.792, |
|
"grad_norm": 0.26354297421036926, |
|
"learning_rate": 3.680373638869047e-06, |
|
"loss": 0.6061, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.796, |
|
"grad_norm": 0.2738313781435172, |
|
"learning_rate": 3.665295292777214e-06, |
|
"loss": 0.5903, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.27041603557150606, |
|
"learning_rate": 3.650162594327881e-06, |
|
"loss": 0.6216, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.804, |
|
"grad_norm": 0.29233209893761647, |
|
"learning_rate": 3.634976249348867e-06, |
|
"loss": 0.6221, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.808, |
|
"grad_norm": 0.28236083977418097, |
|
"learning_rate": 3.6197369661702052e-06, |
|
"loss": 0.6048, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.812, |
|
"grad_norm": 0.2610570975246162, |
|
"learning_rate": 3.604445455591099e-06, |
|
"loss": 0.586, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.816, |
|
"grad_norm": 0.27792495585124566, |
|
"learning_rate": 3.589102430846773e-06, |
|
"loss": 0.6052, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.27390708264043134, |
|
"learning_rate": 3.5737086075752054e-06, |
|
"loss": 0.5968, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.824, |
|
"grad_norm": 0.26341409551542055, |
|
"learning_rate": 3.5582647037837446e-06, |
|
"loss": 0.6128, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.828, |
|
"grad_norm": 0.2659397773506794, |
|
"learning_rate": 3.5427714398156267e-06, |
|
"loss": 0.6171, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.832, |
|
"grad_norm": 0.278302186061793, |
|
"learning_rate": 3.527229538316371e-06, |
|
"loss": 0.6001, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.836, |
|
"grad_norm": 0.27935693059901906, |
|
"learning_rate": 3.5116397242000748e-06, |
|
"loss": 0.5915, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.2681762304000699, |
|
"learning_rate": 3.4960027246156043e-06, |
|
"loss": 0.5982, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.844, |
|
"grad_norm": 0.26833511905783713, |
|
"learning_rate": 3.480319268912676e-06, |
|
"loss": 0.5823, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.848, |
|
"grad_norm": 0.27282574698411466, |
|
"learning_rate": 3.4645900886078388e-06, |
|
"loss": 0.6098, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.852, |
|
"grad_norm": 0.2841325964241835, |
|
"learning_rate": 3.448815917350355e-06, |
|
"loss": 0.6054, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.856, |
|
"grad_norm": 0.27811416712297765, |
|
"learning_rate": 3.432997490887979e-06, |
|
"loss": 0.6071, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.26897184119138856, |
|
"learning_rate": 3.417135547032642e-06, |
|
"loss": 0.612, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.864, |
|
"grad_norm": 0.2752083088143504, |
|
"learning_rate": 3.4012308256260366e-06, |
|
"loss": 0.6189, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.868, |
|
"grad_norm": 0.27146348269262077, |
|
"learning_rate": 3.385284068505113e-06, |
|
"loss": 0.5914, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.872, |
|
"grad_norm": 0.2616553689610195, |
|
"learning_rate": 3.369296019467473e-06, |
|
"loss": 0.5935, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.876, |
|
"grad_norm": 0.27439799989957114, |
|
"learning_rate": 3.3532674242366764e-06, |
|
"loss": 0.5815, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.27322752130009204, |
|
"learning_rate": 3.3371990304274654e-06, |
|
"loss": 0.593, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.884, |
|
"grad_norm": 0.2776915569429837, |
|
"learning_rate": 3.3210915875108895e-06, |
|
"loss": 0.636, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.888, |
|
"grad_norm": 0.27021599999486623, |
|
"learning_rate": 3.304945846779346e-06, |
|
"loss": 0.618, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.892, |
|
"grad_norm": 0.27748742860539916, |
|
"learning_rate": 3.2887625613115427e-06, |
|
"loss": 0.5937, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.896, |
|
"grad_norm": 0.28302784990496294, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 0.6093, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.2759446162478654, |
|
"learning_rate": 3.25628637720269e-06, |
|
"loss": 0.6261, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.904, |
|
"grad_norm": 0.2794223898275526, |
|
"learning_rate": 3.239994993334059e-06, |
|
"loss": 0.6098, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.908, |
|
"grad_norm": 0.2863521541541822, |
|
"learning_rate": 3.2236690942033523e-06, |
|
"loss": 0.6122, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.912, |
|
"grad_norm": 0.2810413939687701, |
|
"learning_rate": 3.207309441292325e-06, |
|
"loss": 0.6193, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.916, |
|
"grad_norm": 0.2686724574589154, |
|
"learning_rate": 3.1909167976570977e-06, |
|
"loss": 0.5847, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.27668513115261734, |
|
"learning_rate": 3.174491927892561e-06, |
|
"loss": 0.6083, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.924, |
|
"grad_norm": 0.2838278304951842, |
|
"learning_rate": 3.158035598096715e-06, |
|
"loss": 0.597, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.928, |
|
"grad_norm": 0.2702238327135827, |
|
"learning_rate": 3.1415485758349344e-06, |
|
"loss": 0.5884, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.932, |
|
"grad_norm": 0.2677368696260293, |
|
"learning_rate": 3.1250316301041727e-06, |
|
"loss": 0.5835, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.936, |
|
"grad_norm": 0.2848905107931979, |
|
"learning_rate": 3.1084855312970897e-06, |
|
"loss": 0.6255, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.2788843417615313, |
|
"learning_rate": 3.091911051166117e-06, |
|
"loss": 0.6215, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.944, |
|
"grad_norm": 0.27680715854768223, |
|
"learning_rate": 3.0753089627874668e-06, |
|
"loss": 0.6022, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.948, |
|
"grad_norm": 0.26897413893634964, |
|
"learning_rate": 3.0586800405250677e-06, |
|
"loss": 0.6194, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.952, |
|
"grad_norm": 0.2657249388294363, |
|
"learning_rate": 3.0420250599944525e-06, |
|
"loss": 0.5884, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.956, |
|
"grad_norm": 0.26576882569871957, |
|
"learning_rate": 3.0253447980265754e-06, |
|
"loss": 0.5949, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2754408264706426, |
|
"learning_rate": 3.0086400326315853e-06, |
|
"loss": 0.5747, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.964, |
|
"grad_norm": 0.26333215813151695, |
|
"learning_rate": 2.9919115429625295e-06, |
|
"loss": 0.6035, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.968, |
|
"grad_norm": 0.2687016604382336, |
|
"learning_rate": 2.9751601092790185e-06, |
|
"loss": 0.6057, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.972, |
|
"grad_norm": 0.28837866031127346, |
|
"learning_rate": 2.958386512910831e-06, |
|
"loss": 0.5737, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.976, |
|
"grad_norm": 0.27805411098388116, |
|
"learning_rate": 2.941591536221469e-06, |
|
"loss": 0.6022, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.27696163335286905, |
|
"learning_rate": 2.924775962571667e-06, |
|
"loss": 0.6081, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.984, |
|
"grad_norm": 0.28312029387331156, |
|
"learning_rate": 2.907940576282856e-06, |
|
"loss": 0.6178, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.988, |
|
"grad_norm": 0.2516197979856304, |
|
"learning_rate": 2.8910861626005774e-06, |
|
"loss": 0.5812, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.992, |
|
"grad_norm": 0.25765553700425475, |
|
"learning_rate": 2.8742135076578608e-06, |
|
"loss": 0.5868, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.996, |
|
"grad_norm": 0.27540024093918797, |
|
"learning_rate": 2.857323398438554e-06, |
|
"loss": 0.5976, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.2637219036362386, |
|
"learning_rate": 2.840416622740617e-06, |
|
"loss": 0.6106, |
|
"step": 250 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 125, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.0702370831938355e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|