|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.518549993730757, |
|
"global_step": 109000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.993034174340685e-05, |
|
"loss": 2.3834, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.986068348681369e-05, |
|
"loss": 2.3564, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.979102523022054e-05, |
|
"loss": 2.3619, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.972136697362739e-05, |
|
"loss": 2.3887, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.965170871703423e-05, |
|
"loss": 2.3675, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.958205046044108e-05, |
|
"loss": 2.3616, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.951239220384793e-05, |
|
"loss": 2.3612, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.944273394725477e-05, |
|
"loss": 2.3387, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.937307569066162e-05, |
|
"loss": 2.3691, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9303417434068464e-05, |
|
"loss": 2.3528, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.923375917747531e-05, |
|
"loss": 2.3344, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.916410092088216e-05, |
|
"loss": 2.3454, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9094442664289e-05, |
|
"loss": 2.3597, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9024784407695844e-05, |
|
"loss": 2.3215, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8955126151102695e-05, |
|
"loss": 2.3358, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.888546789450954e-05, |
|
"loss": 2.3134, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.881580963791638e-05, |
|
"loss": 2.3409, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.874615138132323e-05, |
|
"loss": 2.3431, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.8676493124730075e-05, |
|
"loss": 2.3337, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.8606834868136925e-05, |
|
"loss": 2.3242, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.853717661154377e-05, |
|
"loss": 2.3335, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.846751835495061e-05, |
|
"loss": 2.3331, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.839786009835746e-05, |
|
"loss": 2.3122, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.8328201841764306e-05, |
|
"loss": 2.3356, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.825854358517115e-05, |
|
"loss": 2.3029, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.8188885328578e-05, |
|
"loss": 2.2891, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.811922707198484e-05, |
|
"loss": 2.3112, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.804956881539169e-05, |
|
"loss": 2.3219, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.797991055879854e-05, |
|
"loss": 2.3077, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.791025230220538e-05, |
|
"loss": 2.3153, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.784059404561223e-05, |
|
"loss": 2.2975, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.7770935789019074e-05, |
|
"loss": 2.3129, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.770127753242592e-05, |
|
"loss": 2.3149, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.763161927583277e-05, |
|
"loss": 2.3233, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.756196101923962e-05, |
|
"loss": 2.2995, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.7492302762646454e-05, |
|
"loss": 2.3327, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.7422644506053305e-05, |
|
"loss": 2.2787, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.7352986249460155e-05, |
|
"loss": 2.2952, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.7283327992867e-05, |
|
"loss": 2.2961, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.721366973627384e-05, |
|
"loss": 2.2844, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.714401147968069e-05, |
|
"loss": 2.3003, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.7074353223087535e-05, |
|
"loss": 2.2918, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.700469496649438e-05, |
|
"loss": 2.2911, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.693503670990122e-05, |
|
"loss": 2.3298, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.686537845330807e-05, |
|
"loss": 2.288, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.679572019671492e-05, |
|
"loss": 2.3024, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.672606194012176e-05, |
|
"loss": 2.3144, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.665640368352861e-05, |
|
"loss": 2.2914, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.658674542693546e-05, |
|
"loss": 2.3032, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.65170871703423e-05, |
|
"loss": 2.285, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.644742891374915e-05, |
|
"loss": 2.3003, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.6377770657156e-05, |
|
"loss": 2.3186, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.630811240056284e-05, |
|
"loss": 2.3173, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.623845414396969e-05, |
|
"loss": 2.2682, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.6168795887376534e-05, |
|
"loss": 2.2887, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.609913763078338e-05, |
|
"loss": 2.3012, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.602947937419023e-05, |
|
"loss": 2.2775, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.595982111759707e-05, |
|
"loss": 2.2973, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.5890162861003915e-05, |
|
"loss": 2.2992, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.5820504604410765e-05, |
|
"loss": 2.2588, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.575084634781761e-05, |
|
"loss": 2.2797, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.568118809122445e-05, |
|
"loss": 2.2706, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.56115298346313e-05, |
|
"loss": 2.3081, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.5541871578038145e-05, |
|
"loss": 2.2698, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.5472213321444996e-05, |
|
"loss": 2.2789, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.540255506485184e-05, |
|
"loss": 2.3017, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.533289680825868e-05, |
|
"loss": 2.2938, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.526323855166553e-05, |
|
"loss": 2.3085, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.519358029507238e-05, |
|
"loss": 2.3055, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.512392203847922e-05, |
|
"loss": 2.2903, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.505426378188607e-05, |
|
"loss": 2.2923, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.498460552529291e-05, |
|
"loss": 2.2878, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.491494726869976e-05, |
|
"loss": 2.2875, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.484528901210661e-05, |
|
"loss": 2.2575, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.477563075551345e-05, |
|
"loss": 2.3053, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.47059724989203e-05, |
|
"loss": 2.2795, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.4636314242327144e-05, |
|
"loss": 2.2766, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.456665598573399e-05, |
|
"loss": 2.2476, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.449699772914084e-05, |
|
"loss": 2.3061, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.442733947254769e-05, |
|
"loss": 2.2821, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.4357681215954525e-05, |
|
"loss": 2.282, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.4288022959361375e-05, |
|
"loss": 2.276, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.4218364702768225e-05, |
|
"loss": 2.2973, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.414870644617507e-05, |
|
"loss": 2.2998, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.407904818958191e-05, |
|
"loss": 2.271, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.400938993298876e-05, |
|
"loss": 2.268, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.3939731676395606e-05, |
|
"loss": 2.2975, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.387007341980245e-05, |
|
"loss": 2.2643, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.38004151632093e-05, |
|
"loss": 2.3335, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.373075690661614e-05, |
|
"loss": 2.3195, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.366109865002299e-05, |
|
"loss": 2.3385, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.3591440393429836e-05, |
|
"loss": 2.3292, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.352178213683668e-05, |
|
"loss": 2.329, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.345212388024353e-05, |
|
"loss": 2.331, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.3382465623650374e-05, |
|
"loss": 2.3483, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.331280736705722e-05, |
|
"loss": 2.3124, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.324314911046407e-05, |
|
"loss": 2.3102, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.317349085387091e-05, |
|
"loss": 2.3151, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.310383259727776e-05, |
|
"loss": 2.323, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.3034174340684604e-05, |
|
"loss": 2.3174, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.296451608409145e-05, |
|
"loss": 2.3188, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.28948578274983e-05, |
|
"loss": 2.3054, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.282519957090514e-05, |
|
"loss": 2.3133, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.2755541314311985e-05, |
|
"loss": 2.3224, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.2685883057718835e-05, |
|
"loss": 2.3354, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.261622480112568e-05, |
|
"loss": 2.3386, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.254656654453252e-05, |
|
"loss": 2.3276, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.247690828793937e-05, |
|
"loss": 2.3057, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.2407250031346216e-05, |
|
"loss": 2.2921, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.2337591774753066e-05, |
|
"loss": 2.3185, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.226793351815991e-05, |
|
"loss": 2.3046, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.219827526156675e-05, |
|
"loss": 2.3018, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.21286170049736e-05, |
|
"loss": 2.2825, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.2058958748380446e-05, |
|
"loss": 2.3068, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.198930049178729e-05, |
|
"loss": 2.3115, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.191964223519414e-05, |
|
"loss": 2.3247, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.184998397860099e-05, |
|
"loss": 2.2919, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.178032572200783e-05, |
|
"loss": 2.345, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.171066746541468e-05, |
|
"loss": 2.298, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.164100920882153e-05, |
|
"loss": 2.3093, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.157135095222837e-05, |
|
"loss": 2.3259, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.1501692695635214e-05, |
|
"loss": 2.3174, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.1432034439042064e-05, |
|
"loss": 2.3268, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.136237618244891e-05, |
|
"loss": 2.2997, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.129271792585576e-05, |
|
"loss": 2.3239, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.1223059669262595e-05, |
|
"loss": 2.3302, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.1153401412669445e-05, |
|
"loss": 2.322, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.1083743156076295e-05, |
|
"loss": 2.3216, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.101408489948313e-05, |
|
"loss": 2.319, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.094442664288998e-05, |
|
"loss": 2.304, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.087476838629683e-05, |
|
"loss": 2.2772, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.0805110129703676e-05, |
|
"loss": 2.3205, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.073545187311052e-05, |
|
"loss": 2.3053, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.066579361651737e-05, |
|
"loss": 2.3196, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.059613535992421e-05, |
|
"loss": 2.3245, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.052647710333106e-05, |
|
"loss": 2.2804, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.0456818846737907e-05, |
|
"loss": 2.2981, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.038716059014475e-05, |
|
"loss": 2.307, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.03175023335516e-05, |
|
"loss": 2.3003, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.0247844076958444e-05, |
|
"loss": 2.2922, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.017818582036529e-05, |
|
"loss": 2.351, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.010852756377214e-05, |
|
"loss": 2.3629, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.003886930717898e-05, |
|
"loss": 2.3658, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_gen_len": 19.0, |
|
"eval_loss": 2.0120694637298584, |
|
"eval_rouge1": 13.694, |
|
"eval_rouge2": 3.0324, |
|
"eval_rougeL": 13.5592, |
|
"eval_rougeLsum": 13.6473, |
|
"eval_runtime": 1519.7773, |
|
"eval_samples_per_second": 8.796, |
|
"eval_steps_per_second": 2.199, |
|
"step": 71779 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9969211050585824e-05, |
|
"loss": 2.3626, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9899552793992674e-05, |
|
"loss": 2.3326, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.982989453739952e-05, |
|
"loss": 2.33, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.976023628080637e-05, |
|
"loss": 2.3426, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.969057802421321e-05, |
|
"loss": 2.3469, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.9620919767620055e-05, |
|
"loss": 2.3075, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.9551261511026905e-05, |
|
"loss": 2.3124, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.9481603254433755e-05, |
|
"loss": 2.344, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.941194499784059e-05, |
|
"loss": 2.3329, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.934228674124744e-05, |
|
"loss": 2.3163, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.9272628484654286e-05, |
|
"loss": 2.3322, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.9202970228061136e-05, |
|
"loss": 2.3133, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.913331197146798e-05, |
|
"loss": 2.3388, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.906365371487482e-05, |
|
"loss": 2.289, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.899399545828167e-05, |
|
"loss": 2.2948, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.8924337201688517e-05, |
|
"loss": 2.3597, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.885467894509536e-05, |
|
"loss": 2.3136, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.878502068850221e-05, |
|
"loss": 2.3143, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.871536243190906e-05, |
|
"loss": 2.3085, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.86457041753159e-05, |
|
"loss": 2.3485, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.857604591872275e-05, |
|
"loss": 2.2972, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.85063876621296e-05, |
|
"loss": 2.3323, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.843672940553644e-05, |
|
"loss": 2.3263, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.8367071148943284e-05, |
|
"loss": 2.2894, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.8297412892350135e-05, |
|
"loss": 2.3156, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.822775463575698e-05, |
|
"loss": 2.3401, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.815809637916383e-05, |
|
"loss": 2.3034, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.808843812257067e-05, |
|
"loss": 2.3141, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.8018779865977515e-05, |
|
"loss": 2.3265, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.7949121609384365e-05, |
|
"loss": 2.3211, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.787946335279121e-05, |
|
"loss": 2.3137, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.780980509619805e-05, |
|
"loss": 2.3216, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.77401468396049e-05, |
|
"loss": 2.3043, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.7670488583011746e-05, |
|
"loss": 2.3245, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.760083032641859e-05, |
|
"loss": 2.3319, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.753117206982544e-05, |
|
"loss": 2.3025, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.746151381323228e-05, |
|
"loss": 2.2983, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.739185555663913e-05, |
|
"loss": 2.3107, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.732219730004598e-05, |
|
"loss": 2.3137, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.725253904345282e-05, |
|
"loss": 2.3006, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.718288078685967e-05, |
|
"loss": 2.3058, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.7113222530266514e-05, |
|
"loss": 2.3175, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.704356427367336e-05, |
|
"loss": 2.2991, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.697390601708021e-05, |
|
"loss": 2.302, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.690424776048705e-05, |
|
"loss": 2.3089, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.6834589503893894e-05, |
|
"loss": 2.2957, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.6764931247300745e-05, |
|
"loss": 2.331, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.669527299070759e-05, |
|
"loss": 2.3044, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.662561473411444e-05, |
|
"loss": 2.2887, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.655595647752128e-05, |
|
"loss": 2.2816, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.6486298220928125e-05, |
|
"loss": 2.3284, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.6416639964334975e-05, |
|
"loss": 2.2985, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.6346981707741826e-05, |
|
"loss": 2.3366, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.627732345114866e-05, |
|
"loss": 2.3232, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.620766519455551e-05, |
|
"loss": 2.2867, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.613800693796236e-05, |
|
"loss": 2.2818, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.60683486813692e-05, |
|
"loss": 2.3103, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 3.599869042477605e-05, |
|
"loss": 2.2815, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.59290321681829e-05, |
|
"loss": 2.2924, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.585937391158974e-05, |
|
"loss": 2.3227, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.578971565499659e-05, |
|
"loss": 2.3177, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.572005739840344e-05, |
|
"loss": 2.3085, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.565039914181028e-05, |
|
"loss": 2.2983, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.558074088521713e-05, |
|
"loss": 2.2718, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.551108262862397e-05, |
|
"loss": 2.2958, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.544142437203082e-05, |
|
"loss": 2.3368, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.537176611543767e-05, |
|
"loss": 2.2821, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.530210785884451e-05, |
|
"loss": 2.2903, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.5232449602251355e-05, |
|
"loss": 2.2941, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.5162791345658205e-05, |
|
"loss": 2.2957, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.509313308906505e-05, |
|
"loss": 2.2894, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.502347483247189e-05, |
|
"loss": 2.3138, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.495381657587874e-05, |
|
"loss": 2.3015, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.4884158319285585e-05, |
|
"loss": 2.315, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.4814500062692436e-05, |
|
"loss": 2.2956, |
|
"step": 109000 |
|
} |
|
], |
|
"max_steps": 358895, |
|
"num_train_epochs": 5, |
|
"total_flos": 2.305332748437504e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|