|
{ |
|
"best_metric": 0.7570347785949707, |
|
"best_model_checkpoint": "beto-mulanSDS-esp-mchoice/checkpoint-95184", |
|
"epoch": 2.0, |
|
"global_step": 95184, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.973735081526307e-05, |
|
"loss": 1.0962, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.947470163052614e-05, |
|
"loss": 0.9714, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.921205244578921e-05, |
|
"loss": 0.9612, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.894940326105228e-05, |
|
"loss": 0.9123, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.868675407631535e-05, |
|
"loss": 0.8874, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.842410489157842e-05, |
|
"loss": 0.8928, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.816145570684149e-05, |
|
"loss": 0.8706, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.7898806522104555e-05, |
|
"loss": 0.8714, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.7636157337367624e-05, |
|
"loss": 0.886, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.7373508152630694e-05, |
|
"loss": 0.8701, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.711085896789377e-05, |
|
"loss": 0.841, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.684820978315684e-05, |
|
"loss": 0.817, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.65855605984199e-05, |
|
"loss": 0.8312, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.632291141368297e-05, |
|
"loss": 0.8249, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.6060262228946046e-05, |
|
"loss": 0.8095, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.5797613044209115e-05, |
|
"loss": 0.8356, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.5534963859472184e-05, |
|
"loss": 0.82, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.527231467473525e-05, |
|
"loss": 0.8038, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.500966548999832e-05, |
|
"loss": 0.8042, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.474701630526139e-05, |
|
"loss": 0.7926, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.448436712052446e-05, |
|
"loss": 0.8045, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.422171793578753e-05, |
|
"loss": 0.7923, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.39590687510506e-05, |
|
"loss": 0.8196, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.369641956631367e-05, |
|
"loss": 0.8151, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.343377038157674e-05, |
|
"loss": 0.7516, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.3171121196839806e-05, |
|
"loss": 0.7586, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.2908472012102875e-05, |
|
"loss": 0.7782, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.2645822827365944e-05, |
|
"loss": 0.7728, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.2383173642629014e-05, |
|
"loss": 0.782, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.212052445789208e-05, |
|
"loss": 0.7804, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.185787527315516e-05, |
|
"loss": 0.7705, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.159522608841822e-05, |
|
"loss": 0.7687, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.133257690368129e-05, |
|
"loss": 0.772, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.106992771894436e-05, |
|
"loss": 0.7656, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.0807278534207435e-05, |
|
"loss": 0.7603, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.0544629349470504e-05, |
|
"loss": 0.7927, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.0281980164733566e-05, |
|
"loss": 0.7649, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.0019330979996636e-05, |
|
"loss": 0.7714, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.975668179525971e-05, |
|
"loss": 0.7505, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.949403261052278e-05, |
|
"loss": 0.7597, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.923138342578585e-05, |
|
"loss": 0.752, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 3.896873424104891e-05, |
|
"loss": 0.744, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.870608505631199e-05, |
|
"loss": 0.7451, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 3.844343587157506e-05, |
|
"loss": 0.7809, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 3.8180786686838126e-05, |
|
"loss": 0.7694, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.7918137502101195e-05, |
|
"loss": 0.7728, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 3.7655488317364264e-05, |
|
"loss": 0.7607, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 3.7392839132627334e-05, |
|
"loss": 0.7481, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 3.71301899478904e-05, |
|
"loss": 0.7438, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 3.686754076315347e-05, |
|
"loss": 0.7502, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 3.660489157841655e-05, |
|
"loss": 0.7354, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 3.634224239367961e-05, |
|
"loss": 0.7971, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 3.607959320894268e-05, |
|
"loss": 0.7605, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 3.581694402420575e-05, |
|
"loss": 0.7513, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.5554294839468824e-05, |
|
"loss": 0.7515, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.529164565473189e-05, |
|
"loss": 0.7228, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.5028996469994956e-05, |
|
"loss": 0.7596, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.4766347285258025e-05, |
|
"loss": 0.7645, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.45036981005211e-05, |
|
"loss": 0.7634, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.424104891578417e-05, |
|
"loss": 0.7581, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.397839973104724e-05, |
|
"loss": 0.7504, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.37157505463103e-05, |
|
"loss": 0.7309, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.345310136157338e-05, |
|
"loss": 0.7705, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.3190452176836446e-05, |
|
"loss": 0.777, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.2927802992099515e-05, |
|
"loss": 0.753, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.2665153807362584e-05, |
|
"loss": 0.7707, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.2402504622625654e-05, |
|
"loss": 0.7327, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.213985543788872e-05, |
|
"loss": 0.7693, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.187720625315179e-05, |
|
"loss": 0.7579, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.161455706841486e-05, |
|
"loss": 0.7581, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.135190788367793e-05, |
|
"loss": 0.77, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.1089258698941e-05, |
|
"loss": 0.7263, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.082660951420407e-05, |
|
"loss": 0.7478, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.056396032946714e-05, |
|
"loss": 0.7646, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.030131114473021e-05, |
|
"loss": 0.7408, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.0038661959993276e-05, |
|
"loss": 0.7101, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.9776012775256345e-05, |
|
"loss": 0.7487, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.9513363590519417e-05, |
|
"loss": 0.732, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.9250714405782486e-05, |
|
"loss": 0.7522, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.898806522104556e-05, |
|
"loss": 0.7193, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.872541603630862e-05, |
|
"loss": 0.7343, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.8462766851571694e-05, |
|
"loss": 0.7295, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.8200117666834763e-05, |
|
"loss": 0.7265, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.7937468482097835e-05, |
|
"loss": 0.7143, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.7674819297360904e-05, |
|
"loss": 0.6877, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.741217011262397e-05, |
|
"loss": 0.7043, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.714952092788704e-05, |
|
"loss": 0.7297, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.6886871743150112e-05, |
|
"loss": 0.7749, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.662422255841318e-05, |
|
"loss": 0.7227, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.636157337367625e-05, |
|
"loss": 0.709, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.6098924188939316e-05, |
|
"loss": 0.7258, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.5836275004202388e-05, |
|
"loss": 0.6954, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.5573625819465457e-05, |
|
"loss": 0.7173, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.531097663472853e-05, |
|
"loss": 0.713, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.50483274499916e-05, |
|
"loss": 0.7, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.7327029705047607, |
|
"eval_loss": 0.7685355544090271, |
|
"eval_runtime": 578.0716, |
|
"eval_samples_per_second": 68.607, |
|
"eval_steps_per_second": 11.435, |
|
"step": 47592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.4785678265254668e-05, |
|
"loss": 0.6245, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.4523029080517734e-05, |
|
"loss": 0.6892, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 2.4260379895780806e-05, |
|
"loss": 0.628, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.3997730711043875e-05, |
|
"loss": 0.6394, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.3735081526306945e-05, |
|
"loss": 0.6367, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 2.3472432341570014e-05, |
|
"loss": 0.6092, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.3209783156833083e-05, |
|
"loss": 0.6286, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.2947133972096152e-05, |
|
"loss": 0.6809, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.268448478735922e-05, |
|
"loss": 0.6185, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.242183560262229e-05, |
|
"loss": 0.578, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.215918641788536e-05, |
|
"loss": 0.6271, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.189653723314843e-05, |
|
"loss": 0.654, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.16338880484115e-05, |
|
"loss": 0.6708, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.1371238863674567e-05, |
|
"loss": 0.6218, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.110858967893764e-05, |
|
"loss": 0.6413, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.0845940494200708e-05, |
|
"loss": 0.6441, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 2.0583291309463777e-05, |
|
"loss": 0.6444, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 2.0320642124726846e-05, |
|
"loss": 0.6311, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.0057992939989916e-05, |
|
"loss": 0.5755, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.9795343755252985e-05, |
|
"loss": 0.6561, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.9532694570516054e-05, |
|
"loss": 0.6098, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.9270045385779123e-05, |
|
"loss": 0.6406, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.9007396201042192e-05, |
|
"loss": 0.6184, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.874474701630526e-05, |
|
"loss": 0.603, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.848209783156833e-05, |
|
"loss": 0.5957, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.8219448646831403e-05, |
|
"loss": 0.5994, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.7956799462094472e-05, |
|
"loss": 0.6365, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.769415027735754e-05, |
|
"loss": 0.6169, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.743150109262061e-05, |
|
"loss": 0.63, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.716885190788368e-05, |
|
"loss": 0.6202, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.6906202723146748e-05, |
|
"loss": 0.6016, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.6643553538409817e-05, |
|
"loss": 0.6026, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.6380904353672887e-05, |
|
"loss": 0.6037, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.6118255168935956e-05, |
|
"loss": 0.6282, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.5855605984199025e-05, |
|
"loss": 0.6208, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.5592956799462094e-05, |
|
"loss": 0.6094, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.5330307614725163e-05, |
|
"loss": 0.6494, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.5067658429988235e-05, |
|
"loss": 0.6306, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.4805009245251303e-05, |
|
"loss": 0.6054, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.4542360060514374e-05, |
|
"loss": 0.6126, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.4279710875777441e-05, |
|
"loss": 0.6283, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.4017061691040512e-05, |
|
"loss": 0.635, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.3754412506303583e-05, |
|
"loss": 0.6037, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.349176332156665e-05, |
|
"loss": 0.6232, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.3229114136829721e-05, |
|
"loss": 0.6044, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.2966464952092788e-05, |
|
"loss": 0.6094, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.270381576735586e-05, |
|
"loss": 0.5856, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.2441166582618928e-05, |
|
"loss": 0.6061, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.2178517397881997e-05, |
|
"loss": 0.571, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.1915868213145067e-05, |
|
"loss": 0.5986, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.1653219028408137e-05, |
|
"loss": 0.6136, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.1390569843671207e-05, |
|
"loss": 0.6133, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.1127920658934276e-05, |
|
"loss": 0.6026, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.0865271474197345e-05, |
|
"loss": 0.5867, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.0602622289460414e-05, |
|
"loss": 0.6146, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.0339973104723485e-05, |
|
"loss": 0.6087, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.0077323919986554e-05, |
|
"loss": 0.6133, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.814674735249623e-06, |
|
"loss": 0.5743, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.552025550512692e-06, |
|
"loss": 0.606, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.289376365775761e-06, |
|
"loss": 0.6097, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.02672718103883e-06, |
|
"loss": 0.5951, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.764077996301901e-06, |
|
"loss": 0.6024, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.50142881156497e-06, |
|
"loss": 0.5819, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.23877962682804e-06, |
|
"loss": 0.592, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 7.976130442091108e-06, |
|
"loss": 0.5664, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 7.713481257354178e-06, |
|
"loss": 0.6218, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 7.4508320726172475e-06, |
|
"loss": 0.5798, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 7.188182887880317e-06, |
|
"loss": 0.589, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 6.925533703143386e-06, |
|
"loss": 0.5907, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 6.662884518406455e-06, |
|
"loss": 0.593, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 6.400235333669524e-06, |
|
"loss": 0.5453, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 6.137586148932594e-06, |
|
"loss": 0.5838, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 5.874936964195663e-06, |
|
"loss": 0.585, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 5.612287779458733e-06, |
|
"loss": 0.6179, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.349638594721802e-06, |
|
"loss": 0.6065, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 5.086989409984871e-06, |
|
"loss": 0.55, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.824340225247941e-06, |
|
"loss": 0.6015, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.56169104051101e-06, |
|
"loss": 0.5786, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.299041855774079e-06, |
|
"loss": 0.5576, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.036392671037149e-06, |
|
"loss": 0.569, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.7737434863002185e-06, |
|
"loss": 0.5692, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.5110943015632885e-06, |
|
"loss": 0.5528, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.2484451168263576e-06, |
|
"loss": 0.5592, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.985795932089427e-06, |
|
"loss": 0.5756, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.7231467473524962e-06, |
|
"loss": 0.5845, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.4604975626155658e-06, |
|
"loss": 0.5619, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.1978483778786353e-06, |
|
"loss": 0.5945, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.935199193141705e-06, |
|
"loss": 0.5672, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.672550008404774e-06, |
|
"loss": 0.5588, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.4099008236678435e-06, |
|
"loss": 0.5619, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.1472516389309128e-06, |
|
"loss": 0.5772, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 8.846024541939823e-07, |
|
"loss": 0.5774, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.219532694570517e-07, |
|
"loss": 0.5765, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.5930408472012103e-07, |
|
"loss": 0.5612, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.665489998319047e-08, |
|
"loss": 0.6085, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.7570347785949707, |
|
"eval_loss": 0.6955135464668274, |
|
"eval_runtime": 589.2402, |
|
"eval_samples_per_second": 67.307, |
|
"eval_steps_per_second": 11.218, |
|
"step": 95184 |
|
} |
|
], |
|
"max_steps": 95184, |
|
"num_train_epochs": 2, |
|
"total_flos": 1.2628160717770704e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|