|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.0, |
|
"eval_steps": 500, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.0647521018981934, |
|
"learning_rate": 4e-05, |
|
"loss": 1.1825, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.9963816404342651, |
|
"learning_rate": 8e-05, |
|
"loss": 1.1448, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.82085120677948, |
|
"learning_rate": 0.00012, |
|
"loss": 1.1246, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3797991275787354, |
|
"learning_rate": 0.00016, |
|
"loss": 0.9428, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.0062527656555176, |
|
"learning_rate": 0.0002, |
|
"loss": 0.8782, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6854935884475708, |
|
"learning_rate": 0.00019994532573409262, |
|
"loss": 0.8182, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6088404059410095, |
|
"learning_rate": 0.00019978136272187747, |
|
"loss": 0.7337, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.5058557391166687, |
|
"learning_rate": 0.00019950829025450114, |
|
"loss": 0.7606, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.4383871853351593, |
|
"learning_rate": 0.00019912640693269752, |
|
"loss": 0.7934, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.3763003349304199, |
|
"learning_rate": 0.00019863613034027224, |
|
"loss": 0.7623, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.3510531187057495, |
|
"learning_rate": 0.00019803799658748094, |
|
"loss": 0.7043, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.3534976541996002, |
|
"learning_rate": 0.0001973326597248006, |
|
"loss": 0.7702, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.33237403631210327, |
|
"learning_rate": 0.00019652089102773488, |
|
"loss": 0.7074, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.3145230710506439, |
|
"learning_rate": 0.00019560357815343577, |
|
"loss": 0.6778, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.32996928691864014, |
|
"learning_rate": 0.00019458172417006347, |
|
"loss": 0.7032, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.32944002747535706, |
|
"learning_rate": 0.0001934564464599461, |
|
"loss": 0.7088, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.3067278265953064, |
|
"learning_rate": 0.00019222897549773848, |
|
"loss": 0.6803, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.3252066671848297, |
|
"learning_rate": 0.00019090065350491626, |
|
"loss": 0.6947, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.31279104948043823, |
|
"learning_rate": 0.00018947293298207635, |
|
"loss": 0.6402, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.30892765522003174, |
|
"learning_rate": 0.0001879473751206489, |
|
"loss": 0.6519, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.29156580567359924, |
|
"learning_rate": 0.00018632564809575742, |
|
"loss": 0.6747, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.2854658365249634, |
|
"learning_rate": 0.00018460952524209355, |
|
"loss": 0.6556, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.2657647430896759, |
|
"learning_rate": 0.00018280088311480201, |
|
"loss": 0.6128, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.29050764441490173, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 0.6674, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.29195520281791687, |
|
"learning_rate": 0.00017891405093963938, |
|
"loss": 0.63, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.30334123969078064, |
|
"learning_rate": 0.00017684011108568592, |
|
"loss": 0.5533, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.30424660444259644, |
|
"learning_rate": 0.0001746821476984154, |
|
"loss": 0.5549, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.28369948267936707, |
|
"learning_rate": 0.00017244252047910892, |
|
"loss": 0.5415, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.2778087258338928, |
|
"learning_rate": 0.00017012367842724887, |
|
"loss": 0.5382, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.2843495011329651, |
|
"learning_rate": 0.00016772815716257412, |
|
"loss": 0.5388, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.28178870677948, |
|
"learning_rate": 0.00016525857615241687, |
|
"loss": 0.5126, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.28643131256103516, |
|
"learning_rate": 0.0001627176358473537, |
|
"loss": 0.5167, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.28843289613723755, |
|
"learning_rate": 0.00016010811472830252, |
|
"loss": 0.5099, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 1.3599999999999999, |
|
"grad_norm": 0.28687819838523865, |
|
"learning_rate": 0.00015743286626829437, |
|
"loss": 0.5185, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.28858163952827454, |
|
"learning_rate": 0.00015469481581224272, |
|
"loss": 0.5279, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.2918362021446228, |
|
"learning_rate": 0.00015189695737812152, |
|
"loss": 0.4788, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.291130930185318, |
|
"learning_rate": 0.00014904235038305083, |
|
"loss": 0.4856, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.2943766415119171, |
|
"learning_rate": 0.0001461341162978688, |
|
"loss": 0.4615, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.297374427318573, |
|
"learning_rate": 0.00014317543523384928, |
|
"loss": 0.4583, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.30772677063941956, |
|
"learning_rate": 0.00014016954246529696, |
|
"loss": 0.4859, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.6400000000000001, |
|
"grad_norm": 0.29820892214775085, |
|
"learning_rate": 0.00013711972489182208, |
|
"loss": 0.4768, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.6800000000000002, |
|
"grad_norm": 0.29821571707725525, |
|
"learning_rate": 0.00013402931744416433, |
|
"loss": 0.4891, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.2966136634349823, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 0.4794, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.3031865358352661, |
|
"learning_rate": 0.00012774029087618446, |
|
"loss": 0.4853, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.2918758690357208, |
|
"learning_rate": 0.00012454854871407994, |
|
"loss": 0.4523, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.8399999999999999, |
|
"grad_norm": 0.3020419180393219, |
|
"learning_rate": 0.0001213299630743747, |
|
"loss": 0.4668, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.30048614740371704, |
|
"learning_rate": 0.000118088053433211, |
|
"loss": 0.4468, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.3022134304046631, |
|
"learning_rate": 0.0001148263647711842, |
|
"loss": 0.4636, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.3102932572364807, |
|
"learning_rate": 0.00011154846369695863, |
|
"loss": 0.4762, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.29303210973739624, |
|
"learning_rate": 0.00010825793454723325, |
|
"loss": 0.4109, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.29739847779273987, |
|
"learning_rate": 0.00010495837546732224, |
|
"loss": 0.3787, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.29793456196784973, |
|
"learning_rate": 0.00010165339447663587, |
|
"loss": 0.3708, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.31656354665756226, |
|
"learning_rate": 9.834660552336415e-05, |
|
"loss": 0.3855, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.30371928215026855, |
|
"learning_rate": 9.504162453267777e-05, |
|
"loss": 0.3873, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.31064578890800476, |
|
"learning_rate": 9.174206545276677e-05, |
|
"loss": 0.3595, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.29833751916885376, |
|
"learning_rate": 8.845153630304139e-05, |
|
"loss": 0.366, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 2.2800000000000002, |
|
"grad_norm": 0.3072323501110077, |
|
"learning_rate": 8.517363522881579e-05, |
|
"loss": 0.3621, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.29597151279449463, |
|
"learning_rate": 8.191194656678904e-05, |
|
"loss": 0.3519, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.3137385845184326, |
|
"learning_rate": 7.867003692562534e-05, |
|
"loss": 0.3519, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.3161297142505646, |
|
"learning_rate": 7.54514512859201e-05, |
|
"loss": 0.3502, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.3286161720752716, |
|
"learning_rate": 7.225970912381556e-05, |
|
"loss": 0.3518, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.3324306905269623, |
|
"learning_rate": 6.909830056250527e-05, |
|
"loss": 0.357, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.3510464131832123, |
|
"learning_rate": 6.59706825558357e-05, |
|
"loss": 0.379, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.3358787000179291, |
|
"learning_rate": 6.28802751081779e-05, |
|
"loss": 0.3399, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.330174058675766, |
|
"learning_rate": 5.983045753470308e-05, |
|
"loss": 0.3465, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.31485724449157715, |
|
"learning_rate": 5.6824564766150726e-05, |
|
"loss": 0.3249, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.3314495384693146, |
|
"learning_rate": 5.386588370213124e-05, |
|
"loss": 0.3565, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 2.7199999999999998, |
|
"grad_norm": 0.334344744682312, |
|
"learning_rate": 5.095764961694922e-05, |
|
"loss": 0.344, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.330770343542099, |
|
"learning_rate": 4.810304262187852e-05, |
|
"loss": 0.3424, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.32071828842163086, |
|
"learning_rate": 4.530518418775733e-05, |
|
"loss": 0.3396, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.32281526923179626, |
|
"learning_rate": 4.256713373170564e-05, |
|
"loss": 0.343, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.3217308223247528, |
|
"learning_rate": 3.9891885271697496e-05, |
|
"loss": 0.3142, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.3167681097984314, |
|
"learning_rate": 3.7282364152646297e-05, |
|
"loss": 0.3351, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.3280259072780609, |
|
"learning_rate": 3.4741423847583134e-05, |
|
"loss": 0.316, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.32448863983154297, |
|
"learning_rate": 3.227184283742591e-05, |
|
"loss": 0.3195, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 0.33512407541275024, |
|
"learning_rate": 2.9876321572751144e-05, |
|
"loss": 0.2814, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 0.33603280782699585, |
|
"learning_rate": 2.7557479520891104e-05, |
|
"loss": 0.2981, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 0.33797961473464966, |
|
"learning_rate": 2.5317852301584643e-05, |
|
"loss": 0.293, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 0.3338729739189148, |
|
"learning_rate": 2.315988891431412e-05, |
|
"loss": 0.2988, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.3124498128890991, |
|
"learning_rate": 2.1085949060360654e-05, |
|
"loss": 0.2956, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 0.30723869800567627, |
|
"learning_rate": 1.9098300562505266e-05, |
|
"loss": 0.2762, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 3.2800000000000002, |
|
"grad_norm": 0.30877774953842163, |
|
"learning_rate": 1.7199116885197995e-05, |
|
"loss": 0.2811, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 0.3040075898170471, |
|
"learning_rate": 1.5390474757906446e-05, |
|
"loss": 0.2843, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 0.29099854826927185, |
|
"learning_rate": 1.3674351904242611e-05, |
|
"loss": 0.2823, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 0.29945293068885803, |
|
"learning_rate": 1.2052624879351104e-05, |
|
"loss": 0.3007, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 0.29691964387893677, |
|
"learning_rate": 1.0527067017923654e-05, |
|
"loss": 0.2866, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 0.29864245653152466, |
|
"learning_rate": 9.09934649508375e-06, |
|
"loss": 0.2946, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.29509297013282776, |
|
"learning_rate": 7.771024502261526e-06, |
|
"loss": 0.2717, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 0.3103945851325989, |
|
"learning_rate": 6.543553540053926e-06, |
|
"loss": 0.287, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 0.2974102795124054, |
|
"learning_rate": 5.418275829936537e-06, |
|
"loss": 0.2815, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 0.3041694760322571, |
|
"learning_rate": 4.3964218465642355e-06, |
|
"loss": 0.2905, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 0.2940130829811096, |
|
"learning_rate": 3.4791089722651436e-06, |
|
"loss": 0.273, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 3.7199999999999998, |
|
"grad_norm": 0.2969824969768524, |
|
"learning_rate": 2.667340275199426e-06, |
|
"loss": 0.2691, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 0.30998706817626953, |
|
"learning_rate": 1.9620034125190644e-06, |
|
"loss": 0.2984, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 0.31238672137260437, |
|
"learning_rate": 1.3638696597277679e-06, |
|
"loss": 0.2856, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.29781845211982727, |
|
"learning_rate": 8.735930673024806e-07, |
|
"loss": 0.2911, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 0.29917433857917786, |
|
"learning_rate": 4.917097454988584e-07, |
|
"loss": 0.278, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 0.30273205041885376, |
|
"learning_rate": 2.1863727812254653e-07, |
|
"loss": 0.2811, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 0.3171875774860382, |
|
"learning_rate": 5.467426590739511e-08, |
|
"loss": 0.2876, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.3021162450313568, |
|
"learning_rate": 0.0, |
|
"loss": 0.2926, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.5000238750511923e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|