|
{ |
|
"best_metric": 0.9301, |
|
"best_model_checkpoint": "checkpoint/swin-base/checkpoint-13320", |
|
"epoch": 40.0, |
|
"eval_steps": 500, |
|
"global_step": 13320, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 9.57372760772705, |
|
"learning_rate": 9.998998998999e-06, |
|
"loss": 4.6311, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.215463161468506, |
|
"learning_rate": 9.997997997998e-06, |
|
"loss": 4.57, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.255615711212158, |
|
"learning_rate": 9.996996996996998e-06, |
|
"loss": 4.5316, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.566373825073242, |
|
"learning_rate": 9.995995995995997e-06, |
|
"loss": 4.4767, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 6.302833557128906, |
|
"learning_rate": 9.994994994994995e-06, |
|
"loss": 4.4207, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.3884124755859375, |
|
"learning_rate": 9.993993993993994e-06, |
|
"loss": 4.3579, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 10.630465507507324, |
|
"learning_rate": 9.992992992992994e-06, |
|
"loss": 4.2144, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 8.169319152832031, |
|
"learning_rate": 9.991991991991993e-06, |
|
"loss": 4.1091, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 10.23892879486084, |
|
"learning_rate": 9.990990990990992e-06, |
|
"loss": 3.9887, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 8.140935897827148, |
|
"learning_rate": 9.989989989989992e-06, |
|
"loss": 3.838, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 11.872363090515137, |
|
"learning_rate": 9.98898898898899e-06, |
|
"loss": 3.6626, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 11.113883018493652, |
|
"learning_rate": 9.987987987987989e-06, |
|
"loss": 3.5326, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 15.503419876098633, |
|
"learning_rate": 9.986986986986988e-06, |
|
"loss": 3.3042, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 14.594585418701172, |
|
"learning_rate": 9.985985985985986e-06, |
|
"loss": 3.111, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 31.187252044677734, |
|
"learning_rate": 9.984984984984985e-06, |
|
"loss": 2.9329, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 20.547489166259766, |
|
"learning_rate": 9.983983983983985e-06, |
|
"loss": 2.8472, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 20.63930892944336, |
|
"learning_rate": 9.982982982982984e-06, |
|
"loss": 2.6238, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 24.311126708984375, |
|
"learning_rate": 9.981981981981982e-06, |
|
"loss": 2.4911, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 13.25031566619873, |
|
"learning_rate": 9.980980980980983e-06, |
|
"loss": 2.4185, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 43.531578063964844, |
|
"learning_rate": 9.979979979979981e-06, |
|
"loss": 2.3114, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 12.955471992492676, |
|
"learning_rate": 9.97897897897898e-06, |
|
"loss": 2.2558, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 15.336274147033691, |
|
"learning_rate": 9.977977977977978e-06, |
|
"loss": 2.1056, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 12.100207328796387, |
|
"learning_rate": 9.976976976976977e-06, |
|
"loss": 2.0495, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 21.610525131225586, |
|
"learning_rate": 9.975975975975977e-06, |
|
"loss": 2.0121, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 12.644548416137695, |
|
"learning_rate": 9.974974974974976e-06, |
|
"loss": 1.8556, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 15.277923583984375, |
|
"learning_rate": 9.973973973973974e-06, |
|
"loss": 1.8089, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 12.686960220336914, |
|
"learning_rate": 9.972972972972975e-06, |
|
"loss": 1.7365, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 13.061480522155762, |
|
"learning_rate": 9.971971971971973e-06, |
|
"loss": 1.7073, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 14.452112197875977, |
|
"learning_rate": 9.970970970970972e-06, |
|
"loss": 1.6694, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 17.65357780456543, |
|
"learning_rate": 9.96996996996997e-06, |
|
"loss": 1.6142, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 17.804506301879883, |
|
"learning_rate": 9.968968968968969e-06, |
|
"loss": 1.5512, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 21.067514419555664, |
|
"learning_rate": 9.96796796796797e-06, |
|
"loss": 1.5347, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 22.17087173461914, |
|
"learning_rate": 9.966966966966968e-06, |
|
"loss": 1.4475, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8024, |
|
"eval_loss": 0.8144841194152832, |
|
"eval_runtime": 32.2937, |
|
"eval_samples_per_second": 309.658, |
|
"eval_steps_per_second": 1.239, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 16.76087760925293, |
|
"learning_rate": 9.965965965965967e-06, |
|
"loss": 1.4671, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 13.050296783447266, |
|
"learning_rate": 9.964964964964965e-06, |
|
"loss": 1.3778, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 24.59625816345215, |
|
"learning_rate": 9.963963963963965e-06, |
|
"loss": 1.3997, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 18.178476333618164, |
|
"learning_rate": 9.962962962962964e-06, |
|
"loss": 1.3204, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 16.15305519104004, |
|
"learning_rate": 9.961961961961963e-06, |
|
"loss": 1.3254, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 13.282833099365234, |
|
"learning_rate": 9.960960960960961e-06, |
|
"loss": 1.2781, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 13.037567138671875, |
|
"learning_rate": 9.95995995995996e-06, |
|
"loss": 1.3349, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 16.423372268676758, |
|
"learning_rate": 9.95895895895896e-06, |
|
"loss": 1.2962, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 17.499189376831055, |
|
"learning_rate": 9.957957957957959e-06, |
|
"loss": 1.2996, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 13.364770889282227, |
|
"learning_rate": 9.956956956956957e-06, |
|
"loss": 1.2447, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 10.91217041015625, |
|
"learning_rate": 9.955955955955958e-06, |
|
"loss": 1.2044, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 21.4530029296875, |
|
"learning_rate": 9.954954954954956e-06, |
|
"loss": 1.2419, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 22.028942108154297, |
|
"learning_rate": 9.953953953953955e-06, |
|
"loss": 1.1525, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 15.714077949523926, |
|
"learning_rate": 9.952952952952953e-06, |
|
"loss": 1.1201, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 18.178543090820312, |
|
"learning_rate": 9.951951951951952e-06, |
|
"loss": 1.1787, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 12.53498363494873, |
|
"learning_rate": 9.950950950950952e-06, |
|
"loss": 1.1371, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 13.074543952941895, |
|
"learning_rate": 9.949949949949951e-06, |
|
"loss": 1.154, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 12.679483413696289, |
|
"learning_rate": 9.94894894894895e-06, |
|
"loss": 1.2112, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 20.075759887695312, |
|
"learning_rate": 9.94794794794795e-06, |
|
"loss": 1.2047, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 23.097604751586914, |
|
"learning_rate": 9.946946946946948e-06, |
|
"loss": 1.0866, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 11.857926368713379, |
|
"learning_rate": 9.945945945945947e-06, |
|
"loss": 1.1568, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 15.009241104125977, |
|
"learning_rate": 9.944944944944946e-06, |
|
"loss": 1.0899, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 21.914396286010742, |
|
"learning_rate": 9.943943943943944e-06, |
|
"loss": 0.9945, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 15.895747184753418, |
|
"learning_rate": 9.942942942942944e-06, |
|
"loss": 1.1402, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 22.441926956176758, |
|
"learning_rate": 9.941941941941943e-06, |
|
"loss": 1.0905, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 16.437023162841797, |
|
"learning_rate": 9.940940940940942e-06, |
|
"loss": 1.0368, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 12.452149391174316, |
|
"learning_rate": 9.93993993993994e-06, |
|
"loss": 1.1294, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 50.94220733642578, |
|
"learning_rate": 9.93893893893894e-06, |
|
"loss": 1.0648, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 22.72955322265625, |
|
"learning_rate": 9.937937937937939e-06, |
|
"loss": 1.0418, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 15.258744239807129, |
|
"learning_rate": 9.936936936936938e-06, |
|
"loss": 1.0291, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 13.402735710144043, |
|
"learning_rate": 9.935935935935936e-06, |
|
"loss": 1.0124, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 19.616920471191406, |
|
"learning_rate": 9.934934934934935e-06, |
|
"loss": 1.065, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 14.682768821716309, |
|
"learning_rate": 9.933933933933935e-06, |
|
"loss": 1.1153, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.8751, |
|
"eval_loss": 0.4367116093635559, |
|
"eval_runtime": 30.112, |
|
"eval_samples_per_second": 332.093, |
|
"eval_steps_per_second": 1.328, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 14.608451843261719, |
|
"learning_rate": 9.932932932932934e-06, |
|
"loss": 1.0625, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 18.75455093383789, |
|
"learning_rate": 9.931931931931932e-06, |
|
"loss": 1.0292, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 12.429720878601074, |
|
"learning_rate": 9.930930930930933e-06, |
|
"loss": 1.009, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 15.66808032989502, |
|
"learning_rate": 9.929929929929931e-06, |
|
"loss": 0.9994, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 20.903182983398438, |
|
"learning_rate": 9.92892892892893e-06, |
|
"loss": 0.9621, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 10.713549613952637, |
|
"learning_rate": 9.927927927927928e-06, |
|
"loss": 0.9526, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 19.999610900878906, |
|
"learning_rate": 9.926926926926927e-06, |
|
"loss": 1.0163, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 10.432254791259766, |
|
"learning_rate": 9.925925925925927e-06, |
|
"loss": 1.0134, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 9.605545043945312, |
|
"learning_rate": 9.924924924924926e-06, |
|
"loss": 0.9707, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 17.136886596679688, |
|
"learning_rate": 9.923923923923925e-06, |
|
"loss": 0.9925, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 10.676803588867188, |
|
"learning_rate": 9.922922922922925e-06, |
|
"loss": 0.9082, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 13.845260620117188, |
|
"learning_rate": 9.921921921921923e-06, |
|
"loss": 0.891, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 13.065056800842285, |
|
"learning_rate": 9.920920920920922e-06, |
|
"loss": 0.8955, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 10.908167839050293, |
|
"learning_rate": 9.91991991991992e-06, |
|
"loss": 0.9711, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 17.723506927490234, |
|
"learning_rate": 9.91891891891892e-06, |
|
"loss": 0.9859, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 12.879526138305664, |
|
"learning_rate": 9.917917917917918e-06, |
|
"loss": 0.9318, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 11.543569564819336, |
|
"learning_rate": 9.916916916916918e-06, |
|
"loss": 0.9445, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 13.50468635559082, |
|
"learning_rate": 9.915915915915917e-06, |
|
"loss": 0.9331, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 27.5030574798584, |
|
"learning_rate": 9.914914914914915e-06, |
|
"loss": 1.0073, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 17.51815414428711, |
|
"learning_rate": 9.913913913913916e-06, |
|
"loss": 0.8938, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 14.364579200744629, |
|
"learning_rate": 9.912912912912914e-06, |
|
"loss": 0.9455, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 15.077325820922852, |
|
"learning_rate": 9.911911911911913e-06, |
|
"loss": 0.9742, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 13.445960998535156, |
|
"learning_rate": 9.910910910910911e-06, |
|
"loss": 0.9341, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 12.205794334411621, |
|
"learning_rate": 9.90990990990991e-06, |
|
"loss": 0.9009, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 17.600419998168945, |
|
"learning_rate": 9.90890890890891e-06, |
|
"loss": 0.9779, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 13.027902603149414, |
|
"learning_rate": 9.907907907907909e-06, |
|
"loss": 0.9338, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 14.624979972839355, |
|
"learning_rate": 9.906906906906907e-06, |
|
"loss": 0.9067, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 14.892926216125488, |
|
"learning_rate": 9.905905905905908e-06, |
|
"loss": 0.8548, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 20.055511474609375, |
|
"learning_rate": 9.904904904904906e-06, |
|
"loss": 0.8416, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 15.868443489074707, |
|
"learning_rate": 9.903903903903905e-06, |
|
"loss": 0.9291, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 17.575693130493164, |
|
"learning_rate": 9.902902902902903e-06, |
|
"loss": 0.9228, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 12.875110626220703, |
|
"learning_rate": 9.901901901901902e-06, |
|
"loss": 0.8616, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 39.7818603515625, |
|
"learning_rate": 9.900900900900902e-06, |
|
"loss": 0.9219, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.8946, |
|
"eval_loss": 0.35464411973953247, |
|
"eval_runtime": 30.462, |
|
"eval_samples_per_second": 328.278, |
|
"eval_steps_per_second": 1.313, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 9.457967758178711, |
|
"learning_rate": 9.899899899899901e-06, |
|
"loss": 0.8197, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 11.913254737854004, |
|
"learning_rate": 9.8988988988989e-06, |
|
"loss": 0.8704, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 46.069637298583984, |
|
"learning_rate": 9.8978978978979e-06, |
|
"loss": 0.8304, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 14.785714149475098, |
|
"learning_rate": 9.896896896896898e-06, |
|
"loss": 0.8993, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 9.831999778747559, |
|
"learning_rate": 9.895895895895895e-06, |
|
"loss": 0.8093, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 10.466429710388184, |
|
"learning_rate": 9.894894894894896e-06, |
|
"loss": 0.9325, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 12.368927955627441, |
|
"learning_rate": 9.893893893893894e-06, |
|
"loss": 0.8663, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 12.843910217285156, |
|
"learning_rate": 9.892892892892893e-06, |
|
"loss": 0.7992, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 10.470433235168457, |
|
"learning_rate": 9.891891891891893e-06, |
|
"loss": 0.8336, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 11.602510452270508, |
|
"learning_rate": 9.890890890890892e-06, |
|
"loss": 0.8936, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 38.957767486572266, |
|
"learning_rate": 9.88988988988989e-06, |
|
"loss": 0.8651, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 9.675531387329102, |
|
"learning_rate": 9.88888888888889e-06, |
|
"loss": 0.8244, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 15.052042961120605, |
|
"learning_rate": 9.88788788788789e-06, |
|
"loss": 0.86, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 13.488245964050293, |
|
"learning_rate": 9.886886886886888e-06, |
|
"loss": 0.8673, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 14.295967102050781, |
|
"learning_rate": 9.885885885885886e-06, |
|
"loss": 0.9099, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 12.72698974609375, |
|
"learning_rate": 9.884884884884885e-06, |
|
"loss": 0.8992, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 19.24166488647461, |
|
"learning_rate": 9.883883883883885e-06, |
|
"loss": 0.9266, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 15.552074432373047, |
|
"learning_rate": 9.882882882882884e-06, |
|
"loss": 0.8785, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 89.45469665527344, |
|
"learning_rate": 9.881881881881882e-06, |
|
"loss": 0.8496, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 36.776451110839844, |
|
"learning_rate": 9.880880880880883e-06, |
|
"loss": 0.7665, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 12.820813179016113, |
|
"learning_rate": 9.879879879879881e-06, |
|
"loss": 0.7873, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 32.781124114990234, |
|
"learning_rate": 9.87887887887888e-06, |
|
"loss": 0.8752, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 11.789835929870605, |
|
"learning_rate": 9.877877877877879e-06, |
|
"loss": 0.844, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"grad_norm": 11.543076515197754, |
|
"learning_rate": 9.876876876876877e-06, |
|
"loss": 0.8588, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"grad_norm": 12.701692581176758, |
|
"learning_rate": 9.875875875875877e-06, |
|
"loss": 0.833, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 14.432270050048828, |
|
"learning_rate": 9.874874874874876e-06, |
|
"loss": 0.821, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 12.727407455444336, |
|
"learning_rate": 9.873873873873875e-06, |
|
"loss": 0.8628, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"grad_norm": 15.466862678527832, |
|
"learning_rate": 9.872872872872873e-06, |
|
"loss": 0.7927, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 10.200054168701172, |
|
"learning_rate": 9.871871871871873e-06, |
|
"loss": 0.7891, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 13.369035720825195, |
|
"learning_rate": 9.87087087087087e-06, |
|
"loss": 0.8204, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 25.642301559448242, |
|
"learning_rate": 9.86986986986987e-06, |
|
"loss": 0.8128, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"grad_norm": 8.70489501953125, |
|
"learning_rate": 9.86886886886887e-06, |
|
"loss": 0.8449, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 13.265392303466797, |
|
"learning_rate": 9.867867867867868e-06, |
|
"loss": 0.8596, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"grad_norm": 13.94210147857666, |
|
"learning_rate": 9.866866866866868e-06, |
|
"loss": 0.8802, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9027, |
|
"eval_loss": 0.3215944468975067, |
|
"eval_runtime": 30.2535, |
|
"eval_samples_per_second": 330.54, |
|
"eval_steps_per_second": 1.322, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"grad_norm": 11.947212219238281, |
|
"learning_rate": 9.865865865865867e-06, |
|
"loss": 0.863, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 19.2293758392334, |
|
"learning_rate": 9.864864864864865e-06, |
|
"loss": 0.787, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 9.49066162109375, |
|
"learning_rate": 9.863863863863866e-06, |
|
"loss": 0.799, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 18.955677032470703, |
|
"learning_rate": 9.862862862862864e-06, |
|
"loss": 0.7656, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 13.782297134399414, |
|
"learning_rate": 9.861861861861863e-06, |
|
"loss": 0.8222, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 10.21755599975586, |
|
"learning_rate": 9.860860860860861e-06, |
|
"loss": 0.8259, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 32.93952941894531, |
|
"learning_rate": 9.85985985985986e-06, |
|
"loss": 0.7729, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"grad_norm": 12.50378704071045, |
|
"learning_rate": 9.85885885885886e-06, |
|
"loss": 0.8319, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 13.975879669189453, |
|
"learning_rate": 9.857857857857859e-06, |
|
"loss": 0.8158, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"grad_norm": 13.557897567749023, |
|
"learning_rate": 9.856856856856857e-06, |
|
"loss": 0.8521, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 16.076936721801758, |
|
"learning_rate": 9.855855855855858e-06, |
|
"loss": 0.787, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"grad_norm": 13.304807662963867, |
|
"learning_rate": 9.854854854854856e-06, |
|
"loss": 0.7525, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 9.973689079284668, |
|
"learning_rate": 9.853853853853855e-06, |
|
"loss": 0.8127, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 11.573305130004883, |
|
"learning_rate": 9.852852852852854e-06, |
|
"loss": 0.7792, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 10.37661361694336, |
|
"learning_rate": 9.851851851851852e-06, |
|
"loss": 0.7511, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"grad_norm": 14.020105361938477, |
|
"learning_rate": 9.85085085085085e-06, |
|
"loss": 0.7556, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 13.901347160339355, |
|
"learning_rate": 9.849849849849851e-06, |
|
"loss": 0.788, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"grad_norm": 14.113587379455566, |
|
"learning_rate": 9.84884884884885e-06, |
|
"loss": 0.7675, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"grad_norm": 10.88308048248291, |
|
"learning_rate": 9.847847847847848e-06, |
|
"loss": 0.6993, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 12.067638397216797, |
|
"learning_rate": 9.846846846846849e-06, |
|
"loss": 0.8427, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 22.864439010620117, |
|
"learning_rate": 9.845845845845845e-06, |
|
"loss": 0.7892, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"grad_norm": 83.34579467773438, |
|
"learning_rate": 9.844844844844846e-06, |
|
"loss": 0.7976, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 20.661500930786133, |
|
"learning_rate": 9.843843843843844e-06, |
|
"loss": 0.7091, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 16.65585708618164, |
|
"learning_rate": 9.842842842842843e-06, |
|
"loss": 0.7744, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 11.51008129119873, |
|
"learning_rate": 9.841841841841843e-06, |
|
"loss": 0.7727, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 8.962791442871094, |
|
"learning_rate": 9.840840840840842e-06, |
|
"loss": 0.8176, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 12.087862014770508, |
|
"learning_rate": 9.83983983983984e-06, |
|
"loss": 0.8206, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"grad_norm": 13.356203079223633, |
|
"learning_rate": 9.83883883883884e-06, |
|
"loss": 0.7516, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 11.307415008544922, |
|
"learning_rate": 9.83783783783784e-06, |
|
"loss": 0.8181, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 13.581600189208984, |
|
"learning_rate": 9.836836836836838e-06, |
|
"loss": 0.8334, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 12.420446395874023, |
|
"learning_rate": 9.835835835835836e-06, |
|
"loss": 0.774, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"grad_norm": 20.350292205810547, |
|
"learning_rate": 9.834834834834835e-06, |
|
"loss": 0.8007, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"grad_norm": 11.960819244384766, |
|
"learning_rate": 9.833833833833835e-06, |
|
"loss": 0.7332, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9069, |
|
"eval_loss": 0.3024939298629761, |
|
"eval_runtime": 30.2033, |
|
"eval_samples_per_second": 331.09, |
|
"eval_steps_per_second": 1.324, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"grad_norm": 14.111451148986816, |
|
"learning_rate": 9.832832832832834e-06, |
|
"loss": 0.6782, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"grad_norm": 19.54442596435547, |
|
"learning_rate": 9.831831831831833e-06, |
|
"loss": 0.6969, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"grad_norm": 16.62184715270996, |
|
"learning_rate": 9.830830830830833e-06, |
|
"loss": 0.7776, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"grad_norm": 10.560216903686523, |
|
"learning_rate": 9.829829829829831e-06, |
|
"loss": 0.7622, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"grad_norm": 14.3679780960083, |
|
"learning_rate": 9.82882882882883e-06, |
|
"loss": 0.7208, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"grad_norm": 15.207982063293457, |
|
"learning_rate": 9.827827827827829e-06, |
|
"loss": 0.7025, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"grad_norm": 9.77590560913086, |
|
"learning_rate": 9.826826826826827e-06, |
|
"loss": 0.7776, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"grad_norm": 18.650480270385742, |
|
"learning_rate": 9.825825825825826e-06, |
|
"loss": 0.8204, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"grad_norm": 20.201152801513672, |
|
"learning_rate": 9.824824824824826e-06, |
|
"loss": 0.6941, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"grad_norm": 12.634096145629883, |
|
"learning_rate": 9.823823823823825e-06, |
|
"loss": 0.7537, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"grad_norm": 14.861948013305664, |
|
"learning_rate": 9.822822822822823e-06, |
|
"loss": 0.7318, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"grad_norm": 17.764514923095703, |
|
"learning_rate": 9.821821821821824e-06, |
|
"loss": 0.7009, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"grad_norm": 11.650490760803223, |
|
"learning_rate": 9.82082082082082e-06, |
|
"loss": 0.7511, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"grad_norm": 12.79879093170166, |
|
"learning_rate": 9.81981981981982e-06, |
|
"loss": 0.777, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"grad_norm": 12.410004615783691, |
|
"learning_rate": 9.81881881881882e-06, |
|
"loss": 0.7176, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"grad_norm": 19.437084197998047, |
|
"learning_rate": 9.817817817817818e-06, |
|
"loss": 0.8047, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"grad_norm": 17.08942413330078, |
|
"learning_rate": 9.816816816816818e-06, |
|
"loss": 0.7177, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"grad_norm": 15.615683555603027, |
|
"learning_rate": 9.815815815815817e-06, |
|
"loss": 0.6848, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"grad_norm": 13.829546928405762, |
|
"learning_rate": 9.814814814814815e-06, |
|
"loss": 0.7703, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"grad_norm": 10.06242847442627, |
|
"learning_rate": 9.813813813813816e-06, |
|
"loss": 0.6813, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"grad_norm": 11.726490020751953, |
|
"learning_rate": 9.812812812812814e-06, |
|
"loss": 0.749, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"grad_norm": 9.272887229919434, |
|
"learning_rate": 9.811811811811813e-06, |
|
"loss": 0.7128, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"grad_norm": 12.022890090942383, |
|
"learning_rate": 9.810810810810811e-06, |
|
"loss": 0.6505, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"grad_norm": 11.16840934753418, |
|
"learning_rate": 9.80980980980981e-06, |
|
"loss": 0.7784, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 16.869796752929688, |
|
"learning_rate": 9.80880880880881e-06, |
|
"loss": 0.7459, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"grad_norm": 12.084444999694824, |
|
"learning_rate": 9.807807807807809e-06, |
|
"loss": 0.7068, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"grad_norm": 11.130463600158691, |
|
"learning_rate": 9.806806806806808e-06, |
|
"loss": 0.7896, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"grad_norm": 20.33501625061035, |
|
"learning_rate": 9.805805805805808e-06, |
|
"loss": 0.7212, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"grad_norm": 14.124403953552246, |
|
"learning_rate": 9.804804804804806e-06, |
|
"loss": 0.7743, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"grad_norm": 13.767190933227539, |
|
"learning_rate": 9.803803803803803e-06, |
|
"loss": 0.7661, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"grad_norm": 14.234041213989258, |
|
"learning_rate": 9.802802802802804e-06, |
|
"loss": 0.7038, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"grad_norm": 16.750995635986328, |
|
"learning_rate": 9.801801801801802e-06, |
|
"loss": 0.7367, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"grad_norm": 49.59062194824219, |
|
"learning_rate": 9.8008008008008e-06, |
|
"loss": 0.6719, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.9098, |
|
"eval_loss": 0.2940322756767273, |
|
"eval_runtime": 30.4626, |
|
"eval_samples_per_second": 328.271, |
|
"eval_steps_per_second": 1.313, |
|
"step": 1998 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"grad_norm": 12.728262901306152, |
|
"learning_rate": 9.799799799799801e-06, |
|
"loss": 0.9003, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"grad_norm": 13.666450500488281, |
|
"learning_rate": 9.7987987987988e-06, |
|
"loss": 0.7714, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"grad_norm": 9.260753631591797, |
|
"learning_rate": 9.797797797797798e-06, |
|
"loss": 0.6515, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"grad_norm": 15.711074829101562, |
|
"learning_rate": 9.796796796796799e-06, |
|
"loss": 0.6656, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"grad_norm": 39.45460891723633, |
|
"learning_rate": 9.795795795795795e-06, |
|
"loss": 0.6897, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"grad_norm": 12.101982116699219, |
|
"learning_rate": 9.794794794794796e-06, |
|
"loss": 0.7077, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"grad_norm": 11.528532028198242, |
|
"learning_rate": 9.793793793793794e-06, |
|
"loss": 0.7468, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"grad_norm": 11.694806098937988, |
|
"learning_rate": 9.792792792792793e-06, |
|
"loss": 0.7502, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"grad_norm": 8.30628490447998, |
|
"learning_rate": 9.791791791791793e-06, |
|
"loss": 0.6735, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"grad_norm": 9.299217224121094, |
|
"learning_rate": 9.790790790790792e-06, |
|
"loss": 0.704, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"grad_norm": 10.323074340820312, |
|
"learning_rate": 9.78978978978979e-06, |
|
"loss": 0.7196, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"grad_norm": 17.049901962280273, |
|
"learning_rate": 9.78878878878879e-06, |
|
"loss": 0.71, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"grad_norm": 13.586891174316406, |
|
"learning_rate": 9.787787787787788e-06, |
|
"loss": 0.7129, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"grad_norm": 18.899734497070312, |
|
"learning_rate": 9.786786786786788e-06, |
|
"loss": 0.7571, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"grad_norm": 10.381853103637695, |
|
"learning_rate": 9.785785785785787e-06, |
|
"loss": 0.6343, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"grad_norm": 14.553983688354492, |
|
"learning_rate": 9.784784784784785e-06, |
|
"loss": 0.7838, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"grad_norm": 15.92480182647705, |
|
"learning_rate": 9.783783783783785e-06, |
|
"loss": 0.7708, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"grad_norm": 11.899249076843262, |
|
"learning_rate": 9.782782782782784e-06, |
|
"loss": 0.7326, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"grad_norm": 14.936653137207031, |
|
"learning_rate": 9.781781781781783e-06, |
|
"loss": 0.7262, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"grad_norm": 15.01948070526123, |
|
"learning_rate": 9.780780780780781e-06, |
|
"loss": 0.6918, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"grad_norm": 12.893462181091309, |
|
"learning_rate": 9.779779779779781e-06, |
|
"loss": 0.7247, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"grad_norm": 22.485645294189453, |
|
"learning_rate": 9.778778778778778e-06, |
|
"loss": 0.7185, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"grad_norm": 10.820356369018555, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.7147, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"grad_norm": 11.032286643981934, |
|
"learning_rate": 9.776776776776777e-06, |
|
"loss": 0.7425, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"grad_norm": 11.431447982788086, |
|
"learning_rate": 9.775775775775776e-06, |
|
"loss": 0.7005, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"grad_norm": 20.35511016845703, |
|
"learning_rate": 9.774774774774776e-06, |
|
"loss": 0.5943, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"grad_norm": 13.538642883300781, |
|
"learning_rate": 9.773773773773775e-06, |
|
"loss": 0.6561, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"grad_norm": 10.554620742797852, |
|
"learning_rate": 9.772772772772773e-06, |
|
"loss": 0.6407, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"grad_norm": 13.718347549438477, |
|
"learning_rate": 9.771771771771774e-06, |
|
"loss": 0.7527, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"grad_norm": 12.707255363464355, |
|
"learning_rate": 9.77077077077077e-06, |
|
"loss": 0.6908, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"grad_norm": 14.473544120788574, |
|
"learning_rate": 9.76976976976977e-06, |
|
"loss": 0.6696, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"grad_norm": 13.07099437713623, |
|
"learning_rate": 9.76876876876877e-06, |
|
"loss": 0.7644, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"grad_norm": 41.19286346435547, |
|
"learning_rate": 9.767767767767768e-06, |
|
"loss": 0.7138, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"grad_norm": 9.618319511413574, |
|
"learning_rate": 9.766766766766768e-06, |
|
"loss": 0.6996, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.9141, |
|
"eval_loss": 0.27847668528556824, |
|
"eval_runtime": 30.5293, |
|
"eval_samples_per_second": 327.554, |
|
"eval_steps_per_second": 1.31, |
|
"step": 2331 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"grad_norm": 10.23026180267334, |
|
"learning_rate": 9.765765765765767e-06, |
|
"loss": 0.6614, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"grad_norm": 14.503907203674316, |
|
"learning_rate": 9.764764764764765e-06, |
|
"loss": 0.7116, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"grad_norm": 11.488687515258789, |
|
"learning_rate": 9.763763763763766e-06, |
|
"loss": 0.7243, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"grad_norm": 10.227832794189453, |
|
"learning_rate": 9.762762762762763e-06, |
|
"loss": 0.5943, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"grad_norm": 11.515847206115723, |
|
"learning_rate": 9.761761761761763e-06, |
|
"loss": 0.6871, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"grad_norm": 17.236072540283203, |
|
"learning_rate": 9.760760760760762e-06, |
|
"loss": 0.72, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"grad_norm": 13.397762298583984, |
|
"learning_rate": 9.75975975975976e-06, |
|
"loss": 0.7651, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"grad_norm": 12.64128303527832, |
|
"learning_rate": 9.758758758758759e-06, |
|
"loss": 0.632, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"grad_norm": 28.24812126159668, |
|
"learning_rate": 9.757757757757759e-06, |
|
"loss": 0.6373, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"grad_norm": 12.245796203613281, |
|
"learning_rate": 9.756756756756758e-06, |
|
"loss": 0.6566, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"grad_norm": 18.77831268310547, |
|
"learning_rate": 9.755755755755756e-06, |
|
"loss": 0.6115, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"grad_norm": 13.072959899902344, |
|
"learning_rate": 9.754754754754756e-06, |
|
"loss": 0.6627, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"grad_norm": 29.834144592285156, |
|
"learning_rate": 9.753753753753753e-06, |
|
"loss": 0.71, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"grad_norm": 10.122661590576172, |
|
"learning_rate": 9.752752752752754e-06, |
|
"loss": 0.6729, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"grad_norm": 11.016374588012695, |
|
"learning_rate": 9.751751751751752e-06, |
|
"loss": 0.7137, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"grad_norm": 11.94205093383789, |
|
"learning_rate": 9.750750750750751e-06, |
|
"loss": 0.6615, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"grad_norm": 27.569272994995117, |
|
"learning_rate": 9.749749749749751e-06, |
|
"loss": 0.6984, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"grad_norm": 10.764726638793945, |
|
"learning_rate": 9.74874874874875e-06, |
|
"loss": 0.697, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"grad_norm": 9.96368408203125, |
|
"learning_rate": 9.747747747747748e-06, |
|
"loss": 0.72, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"grad_norm": 8.127985000610352, |
|
"learning_rate": 9.746746746746749e-06, |
|
"loss": 0.634, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"grad_norm": 16.233617782592773, |
|
"learning_rate": 9.745745745745746e-06, |
|
"loss": 0.7103, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"grad_norm": 11.445056915283203, |
|
"learning_rate": 9.744744744744746e-06, |
|
"loss": 0.6502, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"grad_norm": 15.745110511779785, |
|
"learning_rate": 9.743743743743744e-06, |
|
"loss": 0.6258, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"grad_norm": 11.728997230529785, |
|
"learning_rate": 9.742742742742743e-06, |
|
"loss": 0.7321, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"grad_norm": 12.35846996307373, |
|
"learning_rate": 9.741741741741743e-06, |
|
"loss": 0.7286, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"grad_norm": 11.954543113708496, |
|
"learning_rate": 9.740740740740742e-06, |
|
"loss": 0.6831, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"grad_norm": 11.742406845092773, |
|
"learning_rate": 9.73973973973974e-06, |
|
"loss": 0.7106, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"grad_norm": 9.084891319274902, |
|
"learning_rate": 9.73873873873874e-06, |
|
"loss": 0.6903, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"grad_norm": 12.923911094665527, |
|
"learning_rate": 9.737737737737738e-06, |
|
"loss": 0.6308, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"grad_norm": 9.698966979980469, |
|
"learning_rate": 9.736736736736738e-06, |
|
"loss": 0.693, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"grad_norm": 11.315530776977539, |
|
"learning_rate": 9.735735735735737e-06, |
|
"loss": 0.6412, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"grad_norm": 17.644512176513672, |
|
"learning_rate": 9.734734734734735e-06, |
|
"loss": 0.6525, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"grad_norm": 32.29540252685547, |
|
"learning_rate": 9.733733733733734e-06, |
|
"loss": 0.7023, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.9165, |
|
"eval_loss": 0.27294430136680603, |
|
"eval_runtime": 30.9418, |
|
"eval_samples_per_second": 323.187, |
|
"eval_steps_per_second": 1.293, |
|
"step": 2664 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"grad_norm": 11.768852233886719, |
|
"learning_rate": 9.732732732732734e-06, |
|
"loss": 0.5853, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"grad_norm": 9.919044494628906, |
|
"learning_rate": 9.731731731731733e-06, |
|
"loss": 0.7103, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"grad_norm": 10.119452476501465, |
|
"learning_rate": 9.730730730730731e-06, |
|
"loss": 0.6712, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"grad_norm": 11.397403717041016, |
|
"learning_rate": 9.729729729729732e-06, |
|
"loss": 0.6385, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"grad_norm": 16.2565975189209, |
|
"learning_rate": 9.728728728728728e-06, |
|
"loss": 0.6542, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"grad_norm": 10.146173477172852, |
|
"learning_rate": 9.727727727727729e-06, |
|
"loss": 0.645, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"grad_norm": 10.870752334594727, |
|
"learning_rate": 9.726726726726727e-06, |
|
"loss": 0.6539, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"grad_norm": 11.892374992370605, |
|
"learning_rate": 9.725725725725726e-06, |
|
"loss": 0.6483, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"grad_norm": 12.019987106323242, |
|
"learning_rate": 9.724724724724726e-06, |
|
"loss": 0.6794, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"grad_norm": 10.963202476501465, |
|
"learning_rate": 9.723723723723725e-06, |
|
"loss": 0.6416, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"grad_norm": 14.188977241516113, |
|
"learning_rate": 9.722722722722723e-06, |
|
"loss": 0.6006, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"grad_norm": 11.994919776916504, |
|
"learning_rate": 9.721721721721724e-06, |
|
"loss": 0.6577, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"grad_norm": 12.545328140258789, |
|
"learning_rate": 9.72072072072072e-06, |
|
"loss": 0.6337, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"grad_norm": 16.774003982543945, |
|
"learning_rate": 9.719719719719721e-06, |
|
"loss": 0.6151, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"grad_norm": 22.333213806152344, |
|
"learning_rate": 9.71871871871872e-06, |
|
"loss": 0.6167, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"grad_norm": 13.755775451660156, |
|
"learning_rate": 9.717717717717718e-06, |
|
"loss": 0.6507, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"grad_norm": 16.17780113220215, |
|
"learning_rate": 9.716716716716718e-06, |
|
"loss": 0.6653, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"grad_norm": 11.606565475463867, |
|
"learning_rate": 9.715715715715717e-06, |
|
"loss": 0.637, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"grad_norm": 21.008371353149414, |
|
"learning_rate": 9.714714714714716e-06, |
|
"loss": 0.6559, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"grad_norm": 9.835844039916992, |
|
"learning_rate": 9.713713713713714e-06, |
|
"loss": 0.6633, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"grad_norm": 12.223276138305664, |
|
"learning_rate": 9.712712712712713e-06, |
|
"loss": 0.6244, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"grad_norm": 16.365243911743164, |
|
"learning_rate": 9.711711711711711e-06, |
|
"loss": 0.6177, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"grad_norm": 21.39632797241211, |
|
"learning_rate": 9.710710710710712e-06, |
|
"loss": 0.6716, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"grad_norm": 13.667511940002441, |
|
"learning_rate": 9.70970970970971e-06, |
|
"loss": 0.6434, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"grad_norm": 11.143122673034668, |
|
"learning_rate": 9.708708708708709e-06, |
|
"loss": 0.6564, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"grad_norm": 10.231430053710938, |
|
"learning_rate": 9.707707707707709e-06, |
|
"loss": 0.6212, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"grad_norm": 11.799844741821289, |
|
"learning_rate": 9.706706706706708e-06, |
|
"loss": 0.5904, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"grad_norm": 28.14351463317871, |
|
"learning_rate": 9.705705705705706e-06, |
|
"loss": 0.66, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"grad_norm": 14.178098678588867, |
|
"learning_rate": 9.704704704704707e-06, |
|
"loss": 0.5597, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"grad_norm": 12.042082786560059, |
|
"learning_rate": 9.703703703703703e-06, |
|
"loss": 0.6368, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"grad_norm": 11.72601318359375, |
|
"learning_rate": 9.702702702702704e-06, |
|
"loss": 0.6435, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"grad_norm": 16.047924041748047, |
|
"learning_rate": 9.701701701701702e-06, |
|
"loss": 0.6367, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"grad_norm": 11.935233116149902, |
|
"learning_rate": 9.700700700700701e-06, |
|
"loss": 0.6376, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.9194, |
|
"eval_loss": 0.265708327293396, |
|
"eval_runtime": 30.5217, |
|
"eval_samples_per_second": 327.636, |
|
"eval_steps_per_second": 1.311, |
|
"step": 2997 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"grad_norm": 42.095699310302734, |
|
"learning_rate": 9.699699699699701e-06, |
|
"loss": 0.8317, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"grad_norm": 13.209282875061035, |
|
"learning_rate": 9.6986986986987e-06, |
|
"loss": 0.6914, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"grad_norm": 9.086323738098145, |
|
"learning_rate": 9.697697697697698e-06, |
|
"loss": 0.6362, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"grad_norm": 9.95993423461914, |
|
"learning_rate": 9.696696696696699e-06, |
|
"loss": 0.6453, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"grad_norm": 13.244100570678711, |
|
"learning_rate": 9.695695695695696e-06, |
|
"loss": 0.5516, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"grad_norm": 11.685038566589355, |
|
"learning_rate": 9.694694694694696e-06, |
|
"loss": 0.6275, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"grad_norm": 12.687440872192383, |
|
"learning_rate": 9.693693693693694e-06, |
|
"loss": 0.7, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"grad_norm": 14.26815128326416, |
|
"learning_rate": 9.692692692692693e-06, |
|
"loss": 0.6488, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"grad_norm": 12.68408203125, |
|
"learning_rate": 9.691691691691693e-06, |
|
"loss": 0.6669, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"grad_norm": 19.84491729736328, |
|
"learning_rate": 9.690690690690692e-06, |
|
"loss": 0.5962, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"grad_norm": 14.905065536499023, |
|
"learning_rate": 9.68968968968969e-06, |
|
"loss": 0.6467, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"grad_norm": 18.167434692382812, |
|
"learning_rate": 9.68868868868869e-06, |
|
"loss": 0.6241, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"grad_norm": 12.980717658996582, |
|
"learning_rate": 9.687687687687688e-06, |
|
"loss": 0.7141, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"grad_norm": 13.490983009338379, |
|
"learning_rate": 9.686686686686686e-06, |
|
"loss": 0.6145, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"grad_norm": 10.717024803161621, |
|
"learning_rate": 9.685685685685687e-06, |
|
"loss": 0.6809, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"grad_norm": 11.203835487365723, |
|
"learning_rate": 9.684684684684685e-06, |
|
"loss": 0.6164, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"grad_norm": 9.084880828857422, |
|
"learning_rate": 9.683683683683684e-06, |
|
"loss": 0.5897, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"grad_norm": 15.867284774780273, |
|
"learning_rate": 9.682682682682684e-06, |
|
"loss": 0.5605, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"grad_norm": 14.339218139648438, |
|
"learning_rate": 9.681681681681683e-06, |
|
"loss": 0.7164, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"grad_norm": 11.700525283813477, |
|
"learning_rate": 9.680680680680681e-06, |
|
"loss": 0.6495, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"grad_norm": 8.924003601074219, |
|
"learning_rate": 9.67967967967968e-06, |
|
"loss": 0.5767, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"grad_norm": 12.401798248291016, |
|
"learning_rate": 9.678678678678679e-06, |
|
"loss": 0.5759, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"grad_norm": 11.777472496032715, |
|
"learning_rate": 9.677677677677679e-06, |
|
"loss": 0.6899, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"grad_norm": 11.41810417175293, |
|
"learning_rate": 9.676676676676677e-06, |
|
"loss": 0.6634, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"grad_norm": 12.742891311645508, |
|
"learning_rate": 9.675675675675676e-06, |
|
"loss": 0.6457, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"grad_norm": 14.691203117370605, |
|
"learning_rate": 9.674674674674676e-06, |
|
"loss": 0.6526, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"grad_norm": 13.14875602722168, |
|
"learning_rate": 9.673673673673675e-06, |
|
"loss": 0.5675, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"grad_norm": 14.89482593536377, |
|
"learning_rate": 9.672672672672673e-06, |
|
"loss": 0.6553, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"grad_norm": 7.1069416999816895, |
|
"learning_rate": 9.671671671671674e-06, |
|
"loss": 0.6353, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"grad_norm": 20.37803077697754, |
|
"learning_rate": 9.67067067067067e-06, |
|
"loss": 0.6192, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"grad_norm": 11.729090690612793, |
|
"learning_rate": 9.669669669669671e-06, |
|
"loss": 0.5869, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"grad_norm": 23.864059448242188, |
|
"learning_rate": 9.66866866866867e-06, |
|
"loss": 0.6176, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"grad_norm": 13.164339065551758, |
|
"learning_rate": 9.667667667667668e-06, |
|
"loss": 0.6299, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 4.83162260055542, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 0.571, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.9209, |
|
"eval_loss": 0.26171091198921204, |
|
"eval_runtime": 30.6144, |
|
"eval_samples_per_second": 326.644, |
|
"eval_steps_per_second": 1.307, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"grad_norm": 8.903618812561035, |
|
"learning_rate": 9.665665665665667e-06, |
|
"loss": 0.5832, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"grad_norm": 13.1465425491333, |
|
"learning_rate": 9.664664664664666e-06, |
|
"loss": 0.6152, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"grad_norm": 8.559700965881348, |
|
"learning_rate": 9.663663663663664e-06, |
|
"loss": 0.613, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"grad_norm": 12.810148239135742, |
|
"learning_rate": 9.662662662662663e-06, |
|
"loss": 0.5844, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"grad_norm": 26.87183380126953, |
|
"learning_rate": 9.661661661661661e-06, |
|
"loss": 0.6117, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"grad_norm": 9.096632957458496, |
|
"learning_rate": 9.660660660660662e-06, |
|
"loss": 0.6312, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"grad_norm": 11.064271926879883, |
|
"learning_rate": 9.65965965965966e-06, |
|
"loss": 0.6052, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"grad_norm": 10.786611557006836, |
|
"learning_rate": 9.658658658658659e-06, |
|
"loss": 0.6101, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"grad_norm": 14.586657524108887, |
|
"learning_rate": 9.65765765765766e-06, |
|
"loss": 0.5827, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"grad_norm": 9.581392288208008, |
|
"learning_rate": 9.656656656656658e-06, |
|
"loss": 0.6229, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"grad_norm": 15.541650772094727, |
|
"learning_rate": 9.655655655655656e-06, |
|
"loss": 0.6739, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"grad_norm": 11.475388526916504, |
|
"learning_rate": 9.654654654654655e-06, |
|
"loss": 0.6073, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"grad_norm": 10.81979751586914, |
|
"learning_rate": 9.653653653653654e-06, |
|
"loss": 0.5649, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"grad_norm": 17.0631160736084, |
|
"learning_rate": 9.652652652652654e-06, |
|
"loss": 0.6303, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"grad_norm": 12.085174560546875, |
|
"learning_rate": 9.651651651651652e-06, |
|
"loss": 0.6294, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"grad_norm": 9.632865905761719, |
|
"learning_rate": 9.650650650650651e-06, |
|
"loss": 0.6468, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"grad_norm": 14.820611000061035, |
|
"learning_rate": 9.649649649649651e-06, |
|
"loss": 0.6449, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 10.54, |
|
"grad_norm": 12.00233268737793, |
|
"learning_rate": 9.64864864864865e-06, |
|
"loss": 0.5553, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"grad_norm": 13.7506742477417, |
|
"learning_rate": 9.647647647647648e-06, |
|
"loss": 0.576, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"grad_norm": 10.14376449584961, |
|
"learning_rate": 9.646646646646649e-06, |
|
"loss": 0.6363, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"grad_norm": 15.22951602935791, |
|
"learning_rate": 9.645645645645646e-06, |
|
"loss": 0.6088, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"grad_norm": 13.552799224853516, |
|
"learning_rate": 9.644644644644644e-06, |
|
"loss": 0.6076, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"grad_norm": 11.422019958496094, |
|
"learning_rate": 9.643643643643645e-06, |
|
"loss": 0.6158, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"grad_norm": 14.275238990783691, |
|
"learning_rate": 9.642642642642643e-06, |
|
"loss": 0.5884, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"grad_norm": 16.786712646484375, |
|
"learning_rate": 9.641641641641642e-06, |
|
"loss": 0.5642, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"grad_norm": 11.727968215942383, |
|
"learning_rate": 9.640640640640642e-06, |
|
"loss": 0.5777, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 10.81, |
|
"grad_norm": 14.0637788772583, |
|
"learning_rate": 9.63963963963964e-06, |
|
"loss": 0.5905, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 10.84, |
|
"grad_norm": 11.379030227661133, |
|
"learning_rate": 9.63863863863864e-06, |
|
"loss": 0.5427, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"grad_norm": 15.644015312194824, |
|
"learning_rate": 9.637637637637638e-06, |
|
"loss": 0.6216, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"grad_norm": 13.289280891418457, |
|
"learning_rate": 9.636636636636636e-06, |
|
"loss": 0.6093, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"grad_norm": 23.49856948852539, |
|
"learning_rate": 9.635635635635637e-06, |
|
"loss": 0.5797, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"grad_norm": 10.170689582824707, |
|
"learning_rate": 9.634634634634635e-06, |
|
"loss": 0.598, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"grad_norm": 32.763023376464844, |
|
"learning_rate": 9.633633633633634e-06, |
|
"loss": 0.6006, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_accuracy": 0.9215, |
|
"eval_loss": 0.26355910301208496, |
|
"eval_runtime": 30.4205, |
|
"eval_samples_per_second": 328.726, |
|
"eval_steps_per_second": 1.315, |
|
"step": 3663 |
|
}, |
|
{ |
|
"epoch": 11.02, |
|
"grad_norm": 11.804524421691895, |
|
"learning_rate": 9.632632632632634e-06, |
|
"loss": 0.7672, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"grad_norm": 12.28615665435791, |
|
"learning_rate": 9.631631631631633e-06, |
|
"loss": 0.5228, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 11.08, |
|
"grad_norm": 11.602808952331543, |
|
"learning_rate": 9.630630630630631e-06, |
|
"loss": 0.5835, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"grad_norm": 11.79984188079834, |
|
"learning_rate": 9.62962962962963e-06, |
|
"loss": 0.5576, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 11.14, |
|
"grad_norm": 28.47237777709961, |
|
"learning_rate": 9.628628628628629e-06, |
|
"loss": 0.6201, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 11.17, |
|
"grad_norm": 29.43509864807129, |
|
"learning_rate": 9.627627627627629e-06, |
|
"loss": 0.5559, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"grad_norm": 15.982295036315918, |
|
"learning_rate": 9.626626626626627e-06, |
|
"loss": 0.6419, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 11.23, |
|
"grad_norm": 10.097960472106934, |
|
"learning_rate": 9.625625625625626e-06, |
|
"loss": 0.5709, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 11.26, |
|
"grad_norm": 12.428840637207031, |
|
"learning_rate": 9.624624624624626e-06, |
|
"loss": 0.5881, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 11.29, |
|
"grad_norm": 7.855467319488525, |
|
"learning_rate": 9.623623623623625e-06, |
|
"loss": 0.5935, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"grad_norm": 13.401034355163574, |
|
"learning_rate": 9.622622622622624e-06, |
|
"loss": 0.6379, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"grad_norm": 11.108011245727539, |
|
"learning_rate": 9.621621621621622e-06, |
|
"loss": 0.6269, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 11.38, |
|
"grad_norm": 11.773721694946289, |
|
"learning_rate": 9.62062062062062e-06, |
|
"loss": 0.607, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 11.41, |
|
"grad_norm": 17.96162223815918, |
|
"learning_rate": 9.61961961961962e-06, |
|
"loss": 0.6359, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"grad_norm": 13.181288719177246, |
|
"learning_rate": 9.61861861861862e-06, |
|
"loss": 0.5791, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 11.47, |
|
"grad_norm": 10.062200546264648, |
|
"learning_rate": 9.617617617617618e-06, |
|
"loss": 0.5776, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"grad_norm": 11.909144401550293, |
|
"learning_rate": 9.616616616616617e-06, |
|
"loss": 0.5902, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"grad_norm": 11.405366897583008, |
|
"learning_rate": 9.615615615615617e-06, |
|
"loss": 0.6111, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"grad_norm": 9.566732406616211, |
|
"learning_rate": 9.614614614614616e-06, |
|
"loss": 0.6061, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 11.59, |
|
"grad_norm": 12.742683410644531, |
|
"learning_rate": 9.613613613613614e-06, |
|
"loss": 0.6257, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"grad_norm": 9.52748966217041, |
|
"learning_rate": 9.612612612612613e-06, |
|
"loss": 0.6245, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 11.65, |
|
"grad_norm": 9.65878963470459, |
|
"learning_rate": 9.611611611611611e-06, |
|
"loss": 0.6549, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 11.68, |
|
"grad_norm": 9.731181144714355, |
|
"learning_rate": 9.610610610610612e-06, |
|
"loss": 0.5947, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"grad_norm": 15.657157897949219, |
|
"learning_rate": 9.60960960960961e-06, |
|
"loss": 0.6265, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"grad_norm": 11.384998321533203, |
|
"learning_rate": 9.608608608608609e-06, |
|
"loss": 0.5735, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"grad_norm": 17.66204261779785, |
|
"learning_rate": 9.60760760760761e-06, |
|
"loss": 0.5889, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 11.8, |
|
"grad_norm": 14.75931453704834, |
|
"learning_rate": 9.606606606606608e-06, |
|
"loss": 0.6254, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"grad_norm": 11.870536804199219, |
|
"learning_rate": 9.605605605605606e-06, |
|
"loss": 0.5641, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 11.86, |
|
"grad_norm": 13.277334213256836, |
|
"learning_rate": 9.604604604604605e-06, |
|
"loss": 0.6206, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"grad_norm": 9.664814949035645, |
|
"learning_rate": 9.603603603603604e-06, |
|
"loss": 0.5821, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 11.92, |
|
"grad_norm": 9.741429328918457, |
|
"learning_rate": 9.602602602602604e-06, |
|
"loss": 0.5563, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"grad_norm": 8.5345458984375, |
|
"learning_rate": 9.601601601601602e-06, |
|
"loss": 0.6133, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"grad_norm": 13.11860179901123, |
|
"learning_rate": 9.600600600600601e-06, |
|
"loss": 0.5471, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy": 0.9239, |
|
"eval_loss": 0.2516760230064392, |
|
"eval_runtime": 30.4731, |
|
"eval_samples_per_second": 328.159, |
|
"eval_steps_per_second": 1.313, |
|
"step": 3996 |
|
}, |
|
{ |
|
"epoch": 12.01, |
|
"grad_norm": 14.547802925109863, |
|
"learning_rate": 9.5995995995996e-06, |
|
"loss": 0.955, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 12.04, |
|
"grad_norm": 14.620524406433105, |
|
"learning_rate": 9.5985985985986e-06, |
|
"loss": 0.5806, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"grad_norm": 11.669858932495117, |
|
"learning_rate": 9.597597597597599e-06, |
|
"loss": 0.5529, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 12.1, |
|
"grad_norm": 7.106392860412598, |
|
"learning_rate": 9.596596596596597e-06, |
|
"loss": 0.5495, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 12.13, |
|
"grad_norm": 9.383758544921875, |
|
"learning_rate": 9.595595595595596e-06, |
|
"loss": 0.5426, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 12.16, |
|
"grad_norm": 11.75998306274414, |
|
"learning_rate": 9.594594594594594e-06, |
|
"loss": 0.5571, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"grad_norm": 8.375412940979004, |
|
"learning_rate": 9.593593593593595e-06, |
|
"loss": 0.5685, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 12.22, |
|
"grad_norm": 12.944570541381836, |
|
"learning_rate": 9.592592592592593e-06, |
|
"loss": 0.5316, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"grad_norm": 17.294076919555664, |
|
"learning_rate": 9.591591591591592e-06, |
|
"loss": 0.6194, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 12.28, |
|
"grad_norm": 10.051231384277344, |
|
"learning_rate": 9.590590590590592e-06, |
|
"loss": 0.5876, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 12.31, |
|
"grad_norm": 312.2090148925781, |
|
"learning_rate": 9.58958958958959e-06, |
|
"loss": 0.5972, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 12.34, |
|
"grad_norm": 13.954002380371094, |
|
"learning_rate": 9.58858858858859e-06, |
|
"loss": 0.562, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"grad_norm": 11.686988830566406, |
|
"learning_rate": 9.587587587587588e-06, |
|
"loss": 0.5695, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"grad_norm": 12.769394874572754, |
|
"learning_rate": 9.586586586586586e-06, |
|
"loss": 0.6047, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 12.43, |
|
"grad_norm": 8.87507438659668, |
|
"learning_rate": 9.585585585585587e-06, |
|
"loss": 0.6268, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"grad_norm": 15.076183319091797, |
|
"learning_rate": 9.584584584584585e-06, |
|
"loss": 0.5486, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 12.49, |
|
"grad_norm": 10.28525447845459, |
|
"learning_rate": 9.583583583583584e-06, |
|
"loss": 0.6153, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 12.52, |
|
"grad_norm": 10.834978103637695, |
|
"learning_rate": 9.582582582582584e-06, |
|
"loss": 0.5824, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"grad_norm": 26.89397621154785, |
|
"learning_rate": 9.581581581581583e-06, |
|
"loss": 0.5857, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"grad_norm": 10.6329984664917, |
|
"learning_rate": 9.580580580580581e-06, |
|
"loss": 0.5734, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"grad_norm": 11.606546401977539, |
|
"learning_rate": 9.57957957957958e-06, |
|
"loss": 0.5593, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"grad_norm": 10.23750114440918, |
|
"learning_rate": 9.578578578578579e-06, |
|
"loss": 0.5584, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"grad_norm": 13.51978588104248, |
|
"learning_rate": 9.577577577577579e-06, |
|
"loss": 0.5167, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 12.7, |
|
"grad_norm": 11.312134742736816, |
|
"learning_rate": 9.576576576576578e-06, |
|
"loss": 0.5347, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 12.73, |
|
"grad_norm": 10.276420593261719, |
|
"learning_rate": 9.575575575575576e-06, |
|
"loss": 0.5815, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"grad_norm": 9.689416885375977, |
|
"learning_rate": 9.574574574574575e-06, |
|
"loss": 0.5319, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"grad_norm": 9.421140670776367, |
|
"learning_rate": 9.573573573573575e-06, |
|
"loss": 0.5544, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 12.82, |
|
"grad_norm": 9.061555862426758, |
|
"learning_rate": 9.572572572572574e-06, |
|
"loss": 0.6004, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 12.85, |
|
"grad_norm": 9.207202911376953, |
|
"learning_rate": 9.571571571571572e-06, |
|
"loss": 0.5692, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"grad_norm": 21.15509796142578, |
|
"learning_rate": 9.57057057057057e-06, |
|
"loss": 0.5143, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"grad_norm": 12.711333274841309, |
|
"learning_rate": 9.56956956956957e-06, |
|
"loss": 0.6018, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 12.94, |
|
"grad_norm": 8.004502296447754, |
|
"learning_rate": 9.56856856856857e-06, |
|
"loss": 0.5522, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"grad_norm": 17.612730026245117, |
|
"learning_rate": 9.567567567567568e-06, |
|
"loss": 0.6324, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_accuracy": 0.9226, |
|
"eval_loss": 0.2519969344139099, |
|
"eval_runtime": 30.5594, |
|
"eval_samples_per_second": 327.231, |
|
"eval_steps_per_second": 1.309, |
|
"step": 4329 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"grad_norm": 10.345263481140137, |
|
"learning_rate": 9.566566566566567e-06, |
|
"loss": 0.5334, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 13.03, |
|
"grad_norm": 13.402093887329102, |
|
"learning_rate": 9.565565565565567e-06, |
|
"loss": 0.5308, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 13.06, |
|
"grad_norm": 18.5904598236084, |
|
"learning_rate": 9.564564564564566e-06, |
|
"loss": 0.5594, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 13.09, |
|
"grad_norm": 12.926309585571289, |
|
"learning_rate": 9.563563563563564e-06, |
|
"loss": 0.5892, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"grad_norm": 9.018284797668457, |
|
"learning_rate": 9.562562562562563e-06, |
|
"loss": 0.6252, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 13.15, |
|
"grad_norm": 12.864182472229004, |
|
"learning_rate": 9.561561561561562e-06, |
|
"loss": 0.5282, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 13.18, |
|
"grad_norm": 18.937196731567383, |
|
"learning_rate": 9.560560560560562e-06, |
|
"loss": 0.5713, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 13.21, |
|
"grad_norm": 10.198113441467285, |
|
"learning_rate": 9.55955955955956e-06, |
|
"loss": 0.5767, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"grad_norm": 8.118650436401367, |
|
"learning_rate": 9.558558558558559e-06, |
|
"loss": 0.5426, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 13.27, |
|
"grad_norm": 19.721181869506836, |
|
"learning_rate": 9.55755755755756e-06, |
|
"loss": 0.5961, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"grad_norm": 16.725208282470703, |
|
"learning_rate": 9.556556556556558e-06, |
|
"loss": 0.6371, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"grad_norm": 10.268779754638672, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.5646, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 13.36, |
|
"grad_norm": 185.5620574951172, |
|
"learning_rate": 9.554554554554555e-06, |
|
"loss": 0.6055, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"grad_norm": 16.159719467163086, |
|
"learning_rate": 9.553553553553554e-06, |
|
"loss": 0.582, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 13.42, |
|
"grad_norm": 13.551849365234375, |
|
"learning_rate": 9.552552552552552e-06, |
|
"loss": 0.5657, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 13.45, |
|
"grad_norm": 13.877328872680664, |
|
"learning_rate": 9.551551551551553e-06, |
|
"loss": 0.6025, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 13.48, |
|
"grad_norm": 29.763002395629883, |
|
"learning_rate": 9.550550550550551e-06, |
|
"loss": 0.5481, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"grad_norm": 13.889739990234375, |
|
"learning_rate": 9.54954954954955e-06, |
|
"loss": 0.5733, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"grad_norm": 69.9765853881836, |
|
"learning_rate": 9.54854854854855e-06, |
|
"loss": 0.6338, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 13.57, |
|
"grad_norm": 19.75606918334961, |
|
"learning_rate": 9.547547547547549e-06, |
|
"loss": 0.5628, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 13.6, |
|
"grad_norm": 15.370271682739258, |
|
"learning_rate": 9.546546546546547e-06, |
|
"loss": 0.596, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 13.63, |
|
"grad_norm": 10.773337364196777, |
|
"learning_rate": 9.545545545545546e-06, |
|
"loss": 0.6277, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 13.66, |
|
"grad_norm": 7.928040504455566, |
|
"learning_rate": 9.544544544544544e-06, |
|
"loss": 0.5446, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 13.69, |
|
"grad_norm": 15.335708618164062, |
|
"learning_rate": 9.543543543543545e-06, |
|
"loss": 0.587, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 13.72, |
|
"grad_norm": 12.43051815032959, |
|
"learning_rate": 9.542542542542543e-06, |
|
"loss": 0.5645, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"grad_norm": 8.507277488708496, |
|
"learning_rate": 9.541541541541542e-06, |
|
"loss": 0.5238, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 13.78, |
|
"grad_norm": 12.679448127746582, |
|
"learning_rate": 9.540540540540542e-06, |
|
"loss": 0.5287, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 13.81, |
|
"grad_norm": 12.938888549804688, |
|
"learning_rate": 9.53953953953954e-06, |
|
"loss": 0.5759, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 13.84, |
|
"grad_norm": 11.686616897583008, |
|
"learning_rate": 9.53853853853854e-06, |
|
"loss": 0.556, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 13.87, |
|
"grad_norm": 10.387247085571289, |
|
"learning_rate": 9.537537537537538e-06, |
|
"loss": 0.5433, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 13.9, |
|
"grad_norm": 82.08344268798828, |
|
"learning_rate": 9.536536536536537e-06, |
|
"loss": 0.6063, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"grad_norm": 10.989513397216797, |
|
"learning_rate": 9.535535535535537e-06, |
|
"loss": 0.5787, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 13.96, |
|
"grad_norm": 14.753369331359863, |
|
"learning_rate": 9.534534534534535e-06, |
|
"loss": 0.5232, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 13.99, |
|
"grad_norm": 7.610456943511963, |
|
"learning_rate": 9.533533533533534e-06, |
|
"loss": 0.56, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_accuracy": 0.924, |
|
"eval_loss": 0.25301432609558105, |
|
"eval_runtime": 30.0786, |
|
"eval_samples_per_second": 332.463, |
|
"eval_steps_per_second": 1.33, |
|
"step": 4662 |
|
}, |
|
{ |
|
"epoch": 14.02, |
|
"grad_norm": 10.896953582763672, |
|
"learning_rate": 9.532532532532534e-06, |
|
"loss": 0.5122, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 14.05, |
|
"grad_norm": 10.77942180633545, |
|
"learning_rate": 9.531531531531533e-06, |
|
"loss": 0.5062, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"grad_norm": 8.38948917388916, |
|
"learning_rate": 9.530530530530532e-06, |
|
"loss": 0.4863, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 14.11, |
|
"grad_norm": 11.789532661437988, |
|
"learning_rate": 9.52952952952953e-06, |
|
"loss": 0.5271, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 14.14, |
|
"grad_norm": 12.290909767150879, |
|
"learning_rate": 9.528528528528529e-06, |
|
"loss": 0.5155, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 14.17, |
|
"grad_norm": 8.020136833190918, |
|
"learning_rate": 9.527527527527527e-06, |
|
"loss": 0.5747, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 14.2, |
|
"grad_norm": 29.248809814453125, |
|
"learning_rate": 9.526526526526528e-06, |
|
"loss": 0.6196, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 14.23, |
|
"grad_norm": 19.838760375976562, |
|
"learning_rate": 9.525525525525526e-06, |
|
"loss": 0.5818, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 14.26, |
|
"grad_norm": 22.35586929321289, |
|
"learning_rate": 9.524524524524525e-06, |
|
"loss": 0.5739, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 14.29, |
|
"grad_norm": 10.237540245056152, |
|
"learning_rate": 9.523523523523525e-06, |
|
"loss": 0.5978, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 14.32, |
|
"grad_norm": 8.147768020629883, |
|
"learning_rate": 9.522522522522524e-06, |
|
"loss": 0.5385, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"grad_norm": 10.893712997436523, |
|
"learning_rate": 9.521521521521522e-06, |
|
"loss": 0.5704, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 14.38, |
|
"grad_norm": 17.572235107421875, |
|
"learning_rate": 9.520520520520521e-06, |
|
"loss": 0.5272, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 14.41, |
|
"grad_norm": 12.654411315917969, |
|
"learning_rate": 9.51951951951952e-06, |
|
"loss": 0.5491, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"grad_norm": 14.064764976501465, |
|
"learning_rate": 9.51851851851852e-06, |
|
"loss": 0.5954, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 14.47, |
|
"grad_norm": 15.357030868530273, |
|
"learning_rate": 9.517517517517518e-06, |
|
"loss": 0.566, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"grad_norm": 12.153959274291992, |
|
"learning_rate": 9.516516516516517e-06, |
|
"loss": 0.6124, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 14.53, |
|
"grad_norm": 9.057621002197266, |
|
"learning_rate": 9.515515515515517e-06, |
|
"loss": 0.5356, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"grad_norm": 10.922284126281738, |
|
"learning_rate": 9.514514514514516e-06, |
|
"loss": 0.573, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 14.59, |
|
"grad_norm": 13.01318359375, |
|
"learning_rate": 9.513513513513514e-06, |
|
"loss": 0.5135, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 14.62, |
|
"grad_norm": 10.830948829650879, |
|
"learning_rate": 9.512512512512513e-06, |
|
"loss": 0.5725, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 14.65, |
|
"grad_norm": 14.062458992004395, |
|
"learning_rate": 9.511511511511512e-06, |
|
"loss": 0.5882, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 14.68, |
|
"grad_norm": 30.20458221435547, |
|
"learning_rate": 9.510510510510512e-06, |
|
"loss": 0.5735, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 14.71, |
|
"grad_norm": 8.41458511352539, |
|
"learning_rate": 9.50950950950951e-06, |
|
"loss": 0.5331, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 14.74, |
|
"grad_norm": 7.415578365325928, |
|
"learning_rate": 9.508508508508509e-06, |
|
"loss": 0.5463, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 14.77, |
|
"grad_norm": 16.323087692260742, |
|
"learning_rate": 9.507507507507508e-06, |
|
"loss": 0.5347, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 14.8, |
|
"grad_norm": 19.459257125854492, |
|
"learning_rate": 9.506506506506508e-06, |
|
"loss": 0.5883, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 14.83, |
|
"grad_norm": 10.431855201721191, |
|
"learning_rate": 9.505505505505507e-06, |
|
"loss": 0.5907, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 14.86, |
|
"grad_norm": 10.564922332763672, |
|
"learning_rate": 9.504504504504505e-06, |
|
"loss": 0.5597, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 14.89, |
|
"grad_norm": 9.982166290283203, |
|
"learning_rate": 9.503503503503504e-06, |
|
"loss": 0.5107, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 14.92, |
|
"grad_norm": 16.064804077148438, |
|
"learning_rate": 9.502502502502502e-06, |
|
"loss": 0.5819, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 14.95, |
|
"grad_norm": 12.23503589630127, |
|
"learning_rate": 9.501501501501503e-06, |
|
"loss": 0.6121, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 14.98, |
|
"grad_norm": 12.281106948852539, |
|
"learning_rate": 9.500500500500501e-06, |
|
"loss": 0.5821, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_accuracy": 0.9217, |
|
"eval_loss": 0.25685247778892517, |
|
"eval_runtime": 30.4112, |
|
"eval_samples_per_second": 328.827, |
|
"eval_steps_per_second": 1.315, |
|
"step": 4995 |
|
}, |
|
{ |
|
"epoch": 15.02, |
|
"grad_norm": 13.23077392578125, |
|
"learning_rate": 9.4994994994995e-06, |
|
"loss": 0.4503, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 15.05, |
|
"grad_norm": 12.722877502441406, |
|
"learning_rate": 9.4984984984985e-06, |
|
"loss": 0.5069, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 15.08, |
|
"grad_norm": 15.306122779846191, |
|
"learning_rate": 9.497497497497499e-06, |
|
"loss": 0.5421, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"grad_norm": 10.477108001708984, |
|
"learning_rate": 9.496496496496497e-06, |
|
"loss": 0.5861, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 15.14, |
|
"grad_norm": 11.663418769836426, |
|
"learning_rate": 9.495495495495496e-06, |
|
"loss": 0.4983, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 15.17, |
|
"grad_norm": 15.94667911529541, |
|
"learning_rate": 9.494494494494494e-06, |
|
"loss": 0.5003, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"grad_norm": 10.300756454467773, |
|
"learning_rate": 9.493493493493495e-06, |
|
"loss": 0.5396, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 15.23, |
|
"grad_norm": 9.597443580627441, |
|
"learning_rate": 9.492492492492493e-06, |
|
"loss": 0.5719, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 15.26, |
|
"grad_norm": 11.399624824523926, |
|
"learning_rate": 9.491491491491492e-06, |
|
"loss": 0.5314, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 15.29, |
|
"grad_norm": 10.787353515625, |
|
"learning_rate": 9.490490490490492e-06, |
|
"loss": 0.5105, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 15.32, |
|
"grad_norm": 9.648843765258789, |
|
"learning_rate": 9.489489489489491e-06, |
|
"loss": 0.5592, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 15.35, |
|
"grad_norm": 11.683751106262207, |
|
"learning_rate": 9.48848848848849e-06, |
|
"loss": 0.4935, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 15.38, |
|
"grad_norm": 9.463213920593262, |
|
"learning_rate": 9.487487487487488e-06, |
|
"loss": 0.5321, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 15.41, |
|
"grad_norm": 15.175768852233887, |
|
"learning_rate": 9.486486486486487e-06, |
|
"loss": 0.5266, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 15.44, |
|
"grad_norm": 12.34326171875, |
|
"learning_rate": 9.485485485485487e-06, |
|
"loss": 0.522, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 15.47, |
|
"grad_norm": 9.944204330444336, |
|
"learning_rate": 9.484484484484486e-06, |
|
"loss": 0.5229, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"grad_norm": 11.730018615722656, |
|
"learning_rate": 9.483483483483484e-06, |
|
"loss": 0.5248, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 15.53, |
|
"grad_norm": 11.413137435913086, |
|
"learning_rate": 9.482482482482483e-06, |
|
"loss": 0.5654, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 15.56, |
|
"grad_norm": 14.541297912597656, |
|
"learning_rate": 9.481481481481483e-06, |
|
"loss": 0.5079, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 15.59, |
|
"grad_norm": 16.22747230529785, |
|
"learning_rate": 9.480480480480482e-06, |
|
"loss": 0.5292, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"grad_norm": 10.435576438903809, |
|
"learning_rate": 9.47947947947948e-06, |
|
"loss": 0.488, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 15.65, |
|
"grad_norm": 9.990010261535645, |
|
"learning_rate": 9.478478478478479e-06, |
|
"loss": 0.4933, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 15.68, |
|
"grad_norm": 13.72536563873291, |
|
"learning_rate": 9.477477477477477e-06, |
|
"loss": 0.5082, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 15.71, |
|
"grad_norm": 15.257143020629883, |
|
"learning_rate": 9.476476476476478e-06, |
|
"loss": 0.5054, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"grad_norm": 13.4158935546875, |
|
"learning_rate": 9.475475475475476e-06, |
|
"loss": 0.5062, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 15.77, |
|
"grad_norm": 9.417861938476562, |
|
"learning_rate": 9.474474474474475e-06, |
|
"loss": 0.5095, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"grad_norm": 15.251216888427734, |
|
"learning_rate": 9.473473473473475e-06, |
|
"loss": 0.601, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 15.83, |
|
"grad_norm": 9.081033706665039, |
|
"learning_rate": 9.472472472472474e-06, |
|
"loss": 0.4405, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 15.86, |
|
"grad_norm": 9.81359577178955, |
|
"learning_rate": 9.471471471471472e-06, |
|
"loss": 0.5421, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 15.89, |
|
"grad_norm": 9.995782852172852, |
|
"learning_rate": 9.470470470470471e-06, |
|
"loss": 0.5325, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 15.92, |
|
"grad_norm": 10.108242988586426, |
|
"learning_rate": 9.46946946946947e-06, |
|
"loss": 0.5547, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 15.95, |
|
"grad_norm": 16.46483039855957, |
|
"learning_rate": 9.46846846846847e-06, |
|
"loss": 0.5079, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 15.98, |
|
"grad_norm": 9.164888381958008, |
|
"learning_rate": 9.467467467467468e-06, |
|
"loss": 0.5203, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_accuracy": 0.9278, |
|
"eval_loss": 0.24757839739322662, |
|
"eval_runtime": 29.8672, |
|
"eval_samples_per_second": 334.816, |
|
"eval_steps_per_second": 1.339, |
|
"step": 5328 |
|
}, |
|
{ |
|
"epoch": 16.01, |
|
"grad_norm": 10.40545654296875, |
|
"learning_rate": 9.466466466466467e-06, |
|
"loss": 0.6069, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 16.04, |
|
"grad_norm": 11.244277954101562, |
|
"learning_rate": 9.465465465465467e-06, |
|
"loss": 0.5398, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 16.07, |
|
"grad_norm": 9.848921775817871, |
|
"learning_rate": 9.464464464464466e-06, |
|
"loss": 0.522, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 16.1, |
|
"grad_norm": 10.520909309387207, |
|
"learning_rate": 9.463463463463464e-06, |
|
"loss": 0.5363, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 16.13, |
|
"grad_norm": 15.065001487731934, |
|
"learning_rate": 9.462462462462463e-06, |
|
"loss": 0.6021, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 16.16, |
|
"grad_norm": 11.10487174987793, |
|
"learning_rate": 9.461461461461462e-06, |
|
"loss": 0.5189, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 16.19, |
|
"grad_norm": 10.829561233520508, |
|
"learning_rate": 9.46046046046046e-06, |
|
"loss": 0.5338, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 16.22, |
|
"grad_norm": 11.856501579284668, |
|
"learning_rate": 9.45945945945946e-06, |
|
"loss": 0.503, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 16.25, |
|
"grad_norm": 17.04743003845215, |
|
"learning_rate": 9.458458458458459e-06, |
|
"loss": 0.5408, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 16.28, |
|
"grad_norm": 11.075129508972168, |
|
"learning_rate": 9.457457457457458e-06, |
|
"loss": 0.506, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 16.31, |
|
"grad_norm": 9.650065422058105, |
|
"learning_rate": 9.456456456456458e-06, |
|
"loss": 0.5234, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 16.34, |
|
"grad_norm": 13.577381134033203, |
|
"learning_rate": 9.455455455455457e-06, |
|
"loss": 0.5978, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 16.37, |
|
"grad_norm": 21.916799545288086, |
|
"learning_rate": 9.454454454454455e-06, |
|
"loss": 0.4931, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 16.4, |
|
"grad_norm": 12.489849090576172, |
|
"learning_rate": 9.453453453453454e-06, |
|
"loss": 0.5898, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 16.43, |
|
"grad_norm": 13.279026985168457, |
|
"learning_rate": 9.452452452452452e-06, |
|
"loss": 0.5541, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 16.46, |
|
"grad_norm": 13.039314270019531, |
|
"learning_rate": 9.451451451451453e-06, |
|
"loss": 0.5193, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 16.49, |
|
"grad_norm": 7.700623989105225, |
|
"learning_rate": 9.450450450450451e-06, |
|
"loss": 0.5478, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 16.52, |
|
"grad_norm": 9.966477394104004, |
|
"learning_rate": 9.44944944944945e-06, |
|
"loss": 0.5737, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 16.55, |
|
"grad_norm": 10.083463668823242, |
|
"learning_rate": 9.44844844844845e-06, |
|
"loss": 0.5538, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 16.58, |
|
"grad_norm": 20.965614318847656, |
|
"learning_rate": 9.447447447447449e-06, |
|
"loss": 0.523, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 16.61, |
|
"grad_norm": 24.368310928344727, |
|
"learning_rate": 9.446446446446447e-06, |
|
"loss": 0.5488, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 16.64, |
|
"grad_norm": 11.053398132324219, |
|
"learning_rate": 9.445445445445446e-06, |
|
"loss": 0.5208, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"grad_norm": 12.406140327453613, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 0.5106, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 16.7, |
|
"grad_norm": 9.5691556930542, |
|
"learning_rate": 9.443443443443445e-06, |
|
"loss": 0.5151, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 16.73, |
|
"grad_norm": 8.728398323059082, |
|
"learning_rate": 9.442442442442443e-06, |
|
"loss": 0.4747, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 16.76, |
|
"grad_norm": 8.877335548400879, |
|
"learning_rate": 9.441441441441442e-06, |
|
"loss": 0.5235, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 16.79, |
|
"grad_norm": 12.02859878540039, |
|
"learning_rate": 9.440440440440442e-06, |
|
"loss": 0.5394, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 16.82, |
|
"grad_norm": 11.32858657836914, |
|
"learning_rate": 9.439439439439441e-06, |
|
"loss": 0.5502, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 16.85, |
|
"grad_norm": 11.6917085647583, |
|
"learning_rate": 9.43843843843844e-06, |
|
"loss": 0.5332, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 16.88, |
|
"grad_norm": 21.33887481689453, |
|
"learning_rate": 9.437437437437438e-06, |
|
"loss": 0.55, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 16.91, |
|
"grad_norm": 10.514164924621582, |
|
"learning_rate": 9.436436436436437e-06, |
|
"loss": 0.5489, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 16.94, |
|
"grad_norm": 12.23349666595459, |
|
"learning_rate": 9.435435435435435e-06, |
|
"loss": 0.4969, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 16.97, |
|
"grad_norm": 10.175886154174805, |
|
"learning_rate": 9.434434434434436e-06, |
|
"loss": 0.5815, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"grad_norm": 23.207658767700195, |
|
"learning_rate": 9.433433433433434e-06, |
|
"loss": 0.5387, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_accuracy": 0.9264, |
|
"eval_loss": 0.25176796317100525, |
|
"eval_runtime": 30.4959, |
|
"eval_samples_per_second": 327.913, |
|
"eval_steps_per_second": 1.312, |
|
"step": 5661 |
|
}, |
|
{ |
|
"epoch": 17.03, |
|
"grad_norm": 13.02208137512207, |
|
"learning_rate": 9.432432432432433e-06, |
|
"loss": 0.6268, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 17.06, |
|
"grad_norm": 12.880228996276855, |
|
"learning_rate": 9.431431431431433e-06, |
|
"loss": 0.5462, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 17.09, |
|
"grad_norm": 12.443921089172363, |
|
"learning_rate": 9.430430430430432e-06, |
|
"loss": 0.5521, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 17.12, |
|
"grad_norm": 7.886068820953369, |
|
"learning_rate": 9.42942942942943e-06, |
|
"loss": 0.5117, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 17.15, |
|
"grad_norm": 15.501462936401367, |
|
"learning_rate": 9.428428428428429e-06, |
|
"loss": 0.5302, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 17.18, |
|
"grad_norm": 12.38558578491211, |
|
"learning_rate": 9.427427427427427e-06, |
|
"loss": 0.5136, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 17.21, |
|
"grad_norm": 12.14132308959961, |
|
"learning_rate": 9.426426426426428e-06, |
|
"loss": 0.4861, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 17.24, |
|
"grad_norm": 10.382802963256836, |
|
"learning_rate": 9.425425425425426e-06, |
|
"loss": 0.5109, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 17.27, |
|
"grad_norm": 15.48659610748291, |
|
"learning_rate": 9.424424424424425e-06, |
|
"loss": 0.4857, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 17.3, |
|
"grad_norm": 8.58863639831543, |
|
"learning_rate": 9.423423423423425e-06, |
|
"loss": 0.5337, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 17.33, |
|
"grad_norm": 17.04405403137207, |
|
"learning_rate": 9.422422422422424e-06, |
|
"loss": 0.5847, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 17.36, |
|
"grad_norm": 12.838520050048828, |
|
"learning_rate": 9.421421421421422e-06, |
|
"loss": 0.5376, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 17.39, |
|
"grad_norm": 7.937318325042725, |
|
"learning_rate": 9.420420420420421e-06, |
|
"loss": 0.5146, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 17.42, |
|
"grad_norm": 9.863258361816406, |
|
"learning_rate": 9.41941941941942e-06, |
|
"loss": 0.5409, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 17.45, |
|
"grad_norm": 14.783023834228516, |
|
"learning_rate": 9.41841841841842e-06, |
|
"loss": 0.5522, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 17.48, |
|
"grad_norm": 9.500492095947266, |
|
"learning_rate": 9.417417417417418e-06, |
|
"loss": 0.4801, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 17.51, |
|
"grad_norm": 9.685933113098145, |
|
"learning_rate": 9.416416416416417e-06, |
|
"loss": 0.53, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 17.54, |
|
"grad_norm": 20.050275802612305, |
|
"learning_rate": 9.415415415415416e-06, |
|
"loss": 0.5096, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 17.57, |
|
"grad_norm": 10.203237533569336, |
|
"learning_rate": 9.414414414414416e-06, |
|
"loss": 0.5229, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"grad_norm": 8.337947845458984, |
|
"learning_rate": 9.413413413413413e-06, |
|
"loss": 0.4883, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 17.63, |
|
"grad_norm": 10.492974281311035, |
|
"learning_rate": 9.412412412412413e-06, |
|
"loss": 0.552, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 17.66, |
|
"grad_norm": 19.00461196899414, |
|
"learning_rate": 9.411411411411412e-06, |
|
"loss": 0.5478, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 17.69, |
|
"grad_norm": 8.795330047607422, |
|
"learning_rate": 9.41041041041041e-06, |
|
"loss": 0.4974, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 17.72, |
|
"grad_norm": 14.406013488769531, |
|
"learning_rate": 9.40940940940941e-06, |
|
"loss": 0.5234, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 17.75, |
|
"grad_norm": 18.222362518310547, |
|
"learning_rate": 9.40840840840841e-06, |
|
"loss": 0.523, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 17.78, |
|
"grad_norm": 26.603675842285156, |
|
"learning_rate": 9.407407407407408e-06, |
|
"loss": 0.4944, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 17.81, |
|
"grad_norm": 10.870711326599121, |
|
"learning_rate": 9.406406406406408e-06, |
|
"loss": 0.579, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 17.84, |
|
"grad_norm": 15.319726943969727, |
|
"learning_rate": 9.405405405405407e-06, |
|
"loss": 0.4967, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 17.87, |
|
"grad_norm": 12.175629615783691, |
|
"learning_rate": 9.404404404404405e-06, |
|
"loss": 0.4831, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 17.9, |
|
"grad_norm": 12.597267150878906, |
|
"learning_rate": 9.403403403403404e-06, |
|
"loss": 0.4885, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 17.93, |
|
"grad_norm": 18.843530654907227, |
|
"learning_rate": 9.402402402402402e-06, |
|
"loss": 0.5816, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 17.96, |
|
"grad_norm": 27.022314071655273, |
|
"learning_rate": 9.401401401401403e-06, |
|
"loss": 0.4817, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 17.99, |
|
"grad_norm": 53.82378387451172, |
|
"learning_rate": 9.400400400400401e-06, |
|
"loss": 0.4921, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_accuracy": 0.9282, |
|
"eval_loss": 0.2474725842475891, |
|
"eval_runtime": 30.3563, |
|
"eval_samples_per_second": 329.421, |
|
"eval_steps_per_second": 1.318, |
|
"step": 5994 |
|
}, |
|
{ |
|
"epoch": 18.02, |
|
"grad_norm": 12.790140151977539, |
|
"learning_rate": 9.3993993993994e-06, |
|
"loss": 0.4826, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 18.05, |
|
"grad_norm": 11.542041778564453, |
|
"learning_rate": 9.3983983983984e-06, |
|
"loss": 0.492, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 18.08, |
|
"grad_norm": 27.646615982055664, |
|
"learning_rate": 9.397397397397399e-06, |
|
"loss": 0.5432, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 18.11, |
|
"grad_norm": 9.859136581420898, |
|
"learning_rate": 9.396396396396397e-06, |
|
"loss": 0.4926, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 18.14, |
|
"grad_norm": 12.287638664245605, |
|
"learning_rate": 9.395395395395396e-06, |
|
"loss": 0.5048, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 18.17, |
|
"grad_norm": 12.397451400756836, |
|
"learning_rate": 9.394394394394395e-06, |
|
"loss": 0.4915, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 18.2, |
|
"grad_norm": 10.373201370239258, |
|
"learning_rate": 9.393393393393393e-06, |
|
"loss": 0.4925, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 18.23, |
|
"grad_norm": 12.417383193969727, |
|
"learning_rate": 9.392392392392394e-06, |
|
"loss": 0.5435, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 18.26, |
|
"grad_norm": 7.52992057800293, |
|
"learning_rate": 9.391391391391392e-06, |
|
"loss": 0.5586, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 18.29, |
|
"grad_norm": 11.180517196655273, |
|
"learning_rate": 9.39039039039039e-06, |
|
"loss": 0.5261, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 18.32, |
|
"grad_norm": 14.029952049255371, |
|
"learning_rate": 9.389389389389391e-06, |
|
"loss": 0.5059, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 18.35, |
|
"grad_norm": 13.72758960723877, |
|
"learning_rate": 9.388388388388388e-06, |
|
"loss": 0.5025, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 18.38, |
|
"grad_norm": 9.351816177368164, |
|
"learning_rate": 9.387387387387388e-06, |
|
"loss": 0.4725, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 18.41, |
|
"grad_norm": 12.995992660522461, |
|
"learning_rate": 9.386386386386387e-06, |
|
"loss": 0.5565, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 18.44, |
|
"grad_norm": 9.428690910339355, |
|
"learning_rate": 9.385385385385385e-06, |
|
"loss": 0.4801, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 18.47, |
|
"grad_norm": 15.22610092163086, |
|
"learning_rate": 9.384384384384386e-06, |
|
"loss": 0.4922, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"grad_norm": 9.8953857421875, |
|
"learning_rate": 9.383383383383384e-06, |
|
"loss": 0.5103, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 18.53, |
|
"grad_norm": 15.755030632019043, |
|
"learning_rate": 9.382382382382383e-06, |
|
"loss": 0.5289, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 18.56, |
|
"grad_norm": 8.752655029296875, |
|
"learning_rate": 9.381381381381383e-06, |
|
"loss": 0.5092, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 18.59, |
|
"grad_norm": 57.676734924316406, |
|
"learning_rate": 9.380380380380382e-06, |
|
"loss": 0.5321, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 18.62, |
|
"grad_norm": 12.530813217163086, |
|
"learning_rate": 9.37937937937938e-06, |
|
"loss": 0.4906, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 18.65, |
|
"grad_norm": 10.715028762817383, |
|
"learning_rate": 9.378378378378379e-06, |
|
"loss": 0.4887, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 18.68, |
|
"grad_norm": 14.905635833740234, |
|
"learning_rate": 9.377377377377378e-06, |
|
"loss": 0.5427, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 18.71, |
|
"grad_norm": 12.105393409729004, |
|
"learning_rate": 9.376376376376378e-06, |
|
"loss": 0.4693, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 18.74, |
|
"grad_norm": 14.1998929977417, |
|
"learning_rate": 9.375375375375376e-06, |
|
"loss": 0.4998, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 18.77, |
|
"grad_norm": 11.141003608703613, |
|
"learning_rate": 9.374374374374375e-06, |
|
"loss": 0.5454, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 18.8, |
|
"grad_norm": 13.6138916015625, |
|
"learning_rate": 9.373373373373375e-06, |
|
"loss": 0.5684, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 18.83, |
|
"grad_norm": 9.682354927062988, |
|
"learning_rate": 9.372372372372374e-06, |
|
"loss": 0.5221, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 18.86, |
|
"grad_norm": 10.344449043273926, |
|
"learning_rate": 9.371371371371372e-06, |
|
"loss": 0.4974, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 18.89, |
|
"grad_norm": 10.217352867126465, |
|
"learning_rate": 9.370370370370371e-06, |
|
"loss": 0.429, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 18.92, |
|
"grad_norm": 15.496273040771484, |
|
"learning_rate": 9.36936936936937e-06, |
|
"loss": 0.4606, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 18.95, |
|
"grad_norm": 9.484021186828613, |
|
"learning_rate": 9.368368368368368e-06, |
|
"loss": 0.5258, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 18.98, |
|
"grad_norm": 11.742782592773438, |
|
"learning_rate": 9.367367367367369e-06, |
|
"loss": 0.413, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"eval_accuracy": 0.9271, |
|
"eval_loss": 0.2516428232192993, |
|
"eval_runtime": 30.2548, |
|
"eval_samples_per_second": 330.526, |
|
"eval_steps_per_second": 1.322, |
|
"step": 6327 |
|
}, |
|
{ |
|
"epoch": 19.01, |
|
"grad_norm": 12.082165718078613, |
|
"learning_rate": 9.366366366366367e-06, |
|
"loss": 0.5247, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 19.04, |
|
"grad_norm": 32.512901306152344, |
|
"learning_rate": 9.365365365365366e-06, |
|
"loss": 0.5459, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 19.07, |
|
"grad_norm": 19.557708740234375, |
|
"learning_rate": 9.364364364364366e-06, |
|
"loss": 0.4909, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 19.1, |
|
"grad_norm": 14.893004417419434, |
|
"learning_rate": 9.363363363363363e-06, |
|
"loss": 0.4836, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 19.13, |
|
"grad_norm": 16.80282974243164, |
|
"learning_rate": 9.362362362362363e-06, |
|
"loss": 0.5214, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 19.16, |
|
"grad_norm": 9.32387638092041, |
|
"learning_rate": 9.361361361361362e-06, |
|
"loss": 0.4313, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 19.19, |
|
"grad_norm": 10.158374786376953, |
|
"learning_rate": 9.36036036036036e-06, |
|
"loss": 0.5065, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 19.22, |
|
"grad_norm": 15.345830917358398, |
|
"learning_rate": 9.35935935935936e-06, |
|
"loss": 0.5007, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 19.25, |
|
"grad_norm": 21.002599716186523, |
|
"learning_rate": 9.35835835835836e-06, |
|
"loss": 0.5401, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 19.28, |
|
"grad_norm": 40.8951416015625, |
|
"learning_rate": 9.357357357357358e-06, |
|
"loss": 0.4987, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 19.31, |
|
"grad_norm": 6.890993595123291, |
|
"learning_rate": 9.356356356356358e-06, |
|
"loss": 0.5339, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 19.34, |
|
"grad_norm": 10.289228439331055, |
|
"learning_rate": 9.355355355355357e-06, |
|
"loss": 0.4809, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"grad_norm": 13.26909351348877, |
|
"learning_rate": 9.354354354354355e-06, |
|
"loss": 0.5463, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 19.4, |
|
"grad_norm": 14.539989471435547, |
|
"learning_rate": 9.353353353353354e-06, |
|
"loss": 0.5098, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 19.43, |
|
"grad_norm": 10.325398445129395, |
|
"learning_rate": 9.352352352352353e-06, |
|
"loss": 0.4576, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 19.46, |
|
"grad_norm": 6.7183518409729, |
|
"learning_rate": 9.351351351351353e-06, |
|
"loss": 0.4559, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 19.49, |
|
"grad_norm": 16.46378517150879, |
|
"learning_rate": 9.350350350350351e-06, |
|
"loss": 0.5428, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 19.52, |
|
"grad_norm": 93.92578887939453, |
|
"learning_rate": 9.34934934934935e-06, |
|
"loss": 0.484, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 19.55, |
|
"grad_norm": 8.785821914672852, |
|
"learning_rate": 9.34834834834835e-06, |
|
"loss": 0.5488, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 19.58, |
|
"grad_norm": 11.448771476745605, |
|
"learning_rate": 9.347347347347349e-06, |
|
"loss": 0.4804, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 19.61, |
|
"grad_norm": 8.699326515197754, |
|
"learning_rate": 9.346346346346346e-06, |
|
"loss": 0.4397, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 19.64, |
|
"grad_norm": 109.07538604736328, |
|
"learning_rate": 9.345345345345346e-06, |
|
"loss": 0.5142, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 19.67, |
|
"grad_norm": 25.838956832885742, |
|
"learning_rate": 9.344344344344345e-06, |
|
"loss": 0.4915, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 19.7, |
|
"grad_norm": 14.016207695007324, |
|
"learning_rate": 9.343343343343343e-06, |
|
"loss": 0.502, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 19.73, |
|
"grad_norm": 10.681465148925781, |
|
"learning_rate": 9.342342342342344e-06, |
|
"loss": 0.4819, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 19.76, |
|
"grad_norm": 14.305381774902344, |
|
"learning_rate": 9.341341341341342e-06, |
|
"loss": 0.4675, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 19.79, |
|
"grad_norm": 8.296285629272461, |
|
"learning_rate": 9.34034034034034e-06, |
|
"loss": 0.4814, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 19.82, |
|
"grad_norm": 28.747377395629883, |
|
"learning_rate": 9.339339339339341e-06, |
|
"loss": 0.5212, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 19.85, |
|
"grad_norm": 9.720871925354004, |
|
"learning_rate": 9.338338338338338e-06, |
|
"loss": 0.5133, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 19.88, |
|
"grad_norm": 12.930212020874023, |
|
"learning_rate": 9.337337337337338e-06, |
|
"loss": 0.5087, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 19.91, |
|
"grad_norm": 12.70535945892334, |
|
"learning_rate": 9.336336336336337e-06, |
|
"loss": 0.497, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 19.94, |
|
"grad_norm": 22.562301635742188, |
|
"learning_rate": 9.335335335335335e-06, |
|
"loss": 0.4719, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 19.97, |
|
"grad_norm": 10.32685375213623, |
|
"learning_rate": 9.334334334334336e-06, |
|
"loss": 0.4814, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 26.123905181884766, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.4612, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_accuracy": 0.9242, |
|
"eval_loss": 0.2537807822227478, |
|
"eval_runtime": 30.8556, |
|
"eval_samples_per_second": 324.09, |
|
"eval_steps_per_second": 1.296, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 20.03, |
|
"grad_norm": 9.324190139770508, |
|
"learning_rate": 9.332332332332333e-06, |
|
"loss": 0.4782, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 20.06, |
|
"grad_norm": 8.56516170501709, |
|
"learning_rate": 9.331331331331333e-06, |
|
"loss": 0.4571, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 20.09, |
|
"grad_norm": 10.29270076751709, |
|
"learning_rate": 9.330330330330332e-06, |
|
"loss": 0.5438, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 20.12, |
|
"grad_norm": 8.01328182220459, |
|
"learning_rate": 9.32932932932933e-06, |
|
"loss": 0.4622, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 20.15, |
|
"grad_norm": 12.914417266845703, |
|
"learning_rate": 9.328328328328329e-06, |
|
"loss": 0.403, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 20.18, |
|
"grad_norm": 13.325395584106445, |
|
"learning_rate": 9.327327327327328e-06, |
|
"loss": 0.4534, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 20.21, |
|
"grad_norm": 11.514555931091309, |
|
"learning_rate": 9.326326326326328e-06, |
|
"loss": 0.4949, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 20.24, |
|
"grad_norm": 8.554108619689941, |
|
"learning_rate": 9.325325325325326e-06, |
|
"loss": 0.4979, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 20.27, |
|
"grad_norm": 6.8894362449646, |
|
"learning_rate": 9.324324324324325e-06, |
|
"loss": 0.5234, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 20.3, |
|
"grad_norm": 36.356204986572266, |
|
"learning_rate": 9.323323323323324e-06, |
|
"loss": 0.4435, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 20.33, |
|
"grad_norm": 10.051351547241211, |
|
"learning_rate": 9.322322322322324e-06, |
|
"loss": 0.4812, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 20.36, |
|
"grad_norm": 7.804532527923584, |
|
"learning_rate": 9.321321321321321e-06, |
|
"loss": 0.5634, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 20.39, |
|
"grad_norm": 11.409031867980957, |
|
"learning_rate": 9.320320320320321e-06, |
|
"loss": 0.4961, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 20.42, |
|
"grad_norm": 12.909208297729492, |
|
"learning_rate": 9.31931931931932e-06, |
|
"loss": 0.5435, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 20.45, |
|
"grad_norm": 9.927260398864746, |
|
"learning_rate": 9.318318318318318e-06, |
|
"loss": 0.5077, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 20.48, |
|
"grad_norm": 16.0677490234375, |
|
"learning_rate": 9.317317317317319e-06, |
|
"loss": 0.5106, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 20.51, |
|
"grad_norm": 10.34533405303955, |
|
"learning_rate": 9.316316316316317e-06, |
|
"loss": 0.4648, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 20.54, |
|
"grad_norm": 8.697833061218262, |
|
"learning_rate": 9.315315315315316e-06, |
|
"loss": 0.4776, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 20.57, |
|
"grad_norm": 11.206692695617676, |
|
"learning_rate": 9.314314314314316e-06, |
|
"loss": 0.4511, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 20.6, |
|
"grad_norm": 18.051631927490234, |
|
"learning_rate": 9.313313313313313e-06, |
|
"loss": 0.5444, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 20.63, |
|
"grad_norm": 11.191703796386719, |
|
"learning_rate": 9.312312312312313e-06, |
|
"loss": 0.3936, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 20.66, |
|
"grad_norm": 17.013591766357422, |
|
"learning_rate": 9.311311311311312e-06, |
|
"loss": 0.4638, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 20.69, |
|
"grad_norm": 9.712099075317383, |
|
"learning_rate": 9.31031031031031e-06, |
|
"loss": 0.5224, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 20.72, |
|
"grad_norm": 8.011286735534668, |
|
"learning_rate": 9.30930930930931e-06, |
|
"loss": 0.4889, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 20.75, |
|
"grad_norm": 11.292800903320312, |
|
"learning_rate": 9.30830830830831e-06, |
|
"loss": 0.4924, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 20.78, |
|
"grad_norm": 12.472173690795898, |
|
"learning_rate": 9.307307307307308e-06, |
|
"loss": 0.4625, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 20.81, |
|
"grad_norm": 8.170212745666504, |
|
"learning_rate": 9.306306306306308e-06, |
|
"loss": 0.4415, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 20.84, |
|
"grad_norm": 20.078588485717773, |
|
"learning_rate": 9.305305305305305e-06, |
|
"loss": 0.4981, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 20.87, |
|
"grad_norm": 11.406576156616211, |
|
"learning_rate": 9.304304304304305e-06, |
|
"loss": 0.5696, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 20.9, |
|
"grad_norm": 10.49374008178711, |
|
"learning_rate": 9.303303303303304e-06, |
|
"loss": 0.5004, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 20.93, |
|
"grad_norm": 9.33711051940918, |
|
"learning_rate": 9.302302302302303e-06, |
|
"loss": 0.5085, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 20.96, |
|
"grad_norm": 13.773951530456543, |
|
"learning_rate": 9.301301301301301e-06, |
|
"loss": 0.5632, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 20.99, |
|
"grad_norm": 9.44569206237793, |
|
"learning_rate": 9.300300300300302e-06, |
|
"loss": 0.4903, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 21.0, |
|
"eval_accuracy": 0.9262, |
|
"eval_loss": 0.2555881142616272, |
|
"eval_runtime": 30.7701, |
|
"eval_samples_per_second": 324.99, |
|
"eval_steps_per_second": 1.3, |
|
"step": 6993 |
|
}, |
|
{ |
|
"epoch": 21.02, |
|
"grad_norm": 17.463153839111328, |
|
"learning_rate": 9.2992992992993e-06, |
|
"loss": 0.5627, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 21.05, |
|
"grad_norm": 13.008602142333984, |
|
"learning_rate": 9.298298298298299e-06, |
|
"loss": 0.5099, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 21.08, |
|
"grad_norm": 9.015759468078613, |
|
"learning_rate": 9.297297297297299e-06, |
|
"loss": 0.4618, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 21.11, |
|
"grad_norm": 12.224336624145508, |
|
"learning_rate": 9.296296296296296e-06, |
|
"loss": 0.4893, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 21.14, |
|
"grad_norm": 14.461615562438965, |
|
"learning_rate": 9.295295295295296e-06, |
|
"loss": 0.4554, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 21.17, |
|
"grad_norm": 14.201723098754883, |
|
"learning_rate": 9.294294294294295e-06, |
|
"loss": 0.4771, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 21.2, |
|
"grad_norm": 7.477570533752441, |
|
"learning_rate": 9.293293293293293e-06, |
|
"loss": 0.4875, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 21.23, |
|
"grad_norm": 9.28654956817627, |
|
"learning_rate": 9.292292292292294e-06, |
|
"loss": 0.4739, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 21.26, |
|
"grad_norm": 9.934181213378906, |
|
"learning_rate": 9.291291291291292e-06, |
|
"loss": 0.5152, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 21.29, |
|
"grad_norm": 18.241933822631836, |
|
"learning_rate": 9.29029029029029e-06, |
|
"loss": 0.535, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 21.32, |
|
"grad_norm": 8.119966506958008, |
|
"learning_rate": 9.289289289289291e-06, |
|
"loss": 0.4178, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 21.35, |
|
"grad_norm": 20.503637313842773, |
|
"learning_rate": 9.288288288288288e-06, |
|
"loss": 0.5279, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 21.38, |
|
"grad_norm": 13.679118156433105, |
|
"learning_rate": 9.287287287287288e-06, |
|
"loss": 0.4412, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 21.41, |
|
"grad_norm": 10.256949424743652, |
|
"learning_rate": 9.286286286286287e-06, |
|
"loss": 0.5258, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 21.44, |
|
"grad_norm": 14.46385383605957, |
|
"learning_rate": 9.285285285285286e-06, |
|
"loss": 0.5184, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 21.47, |
|
"grad_norm": 8.546163558959961, |
|
"learning_rate": 9.284284284284286e-06, |
|
"loss": 0.4498, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 21.5, |
|
"grad_norm": 13.359642028808594, |
|
"learning_rate": 9.283283283283284e-06, |
|
"loss": 0.4551, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 21.53, |
|
"grad_norm": 8.469135284423828, |
|
"learning_rate": 9.282282282282283e-06, |
|
"loss": 0.5594, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 21.56, |
|
"grad_norm": 11.59361457824707, |
|
"learning_rate": 9.281281281281283e-06, |
|
"loss": 0.4551, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 21.59, |
|
"grad_norm": 10.340875625610352, |
|
"learning_rate": 9.28028028028028e-06, |
|
"loss": 0.5166, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 21.62, |
|
"grad_norm": 14.073348045349121, |
|
"learning_rate": 9.27927927927928e-06, |
|
"loss": 0.4156, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 21.65, |
|
"grad_norm": 17.552919387817383, |
|
"learning_rate": 9.278278278278279e-06, |
|
"loss": 0.5288, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 21.68, |
|
"grad_norm": 8.071002006530762, |
|
"learning_rate": 9.277277277277278e-06, |
|
"loss": 0.4867, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 21.71, |
|
"grad_norm": 10.877429008483887, |
|
"learning_rate": 9.276276276276276e-06, |
|
"loss": 0.4382, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 21.74, |
|
"grad_norm": 16.8603458404541, |
|
"learning_rate": 9.275275275275277e-06, |
|
"loss": 0.4981, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 21.77, |
|
"grad_norm": 16.255592346191406, |
|
"learning_rate": 9.274274274274275e-06, |
|
"loss": 0.4993, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 21.8, |
|
"grad_norm": 12.949196815490723, |
|
"learning_rate": 9.273273273273274e-06, |
|
"loss": 0.5542, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 21.83, |
|
"grad_norm": 12.743133544921875, |
|
"learning_rate": 9.272272272272274e-06, |
|
"loss": 0.5043, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 21.86, |
|
"grad_norm": 13.237237930297852, |
|
"learning_rate": 9.271271271271271e-06, |
|
"loss": 0.505, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 21.89, |
|
"grad_norm": 9.304459571838379, |
|
"learning_rate": 9.270270270270271e-06, |
|
"loss": 0.4848, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 21.92, |
|
"grad_norm": 12.196815490722656, |
|
"learning_rate": 9.26926926926927e-06, |
|
"loss": 0.4643, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 21.95, |
|
"grad_norm": 6.767624378204346, |
|
"learning_rate": 9.268268268268268e-06, |
|
"loss": 0.541, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 21.98, |
|
"grad_norm": 12.607462882995605, |
|
"learning_rate": 9.267267267267269e-06, |
|
"loss": 0.4953, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"eval_accuracy": 0.9271, |
|
"eval_loss": 0.2500782608985901, |
|
"eval_runtime": 30.4081, |
|
"eval_samples_per_second": 328.859, |
|
"eval_steps_per_second": 1.315, |
|
"step": 7326 |
|
}, |
|
{ |
|
"epoch": 22.01, |
|
"grad_norm": 7.150274276733398, |
|
"learning_rate": 9.266266266266267e-06, |
|
"loss": 0.5962, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 22.04, |
|
"grad_norm": 13.817320823669434, |
|
"learning_rate": 9.265265265265266e-06, |
|
"loss": 0.4671, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 22.07, |
|
"grad_norm": 9.749372482299805, |
|
"learning_rate": 9.264264264264266e-06, |
|
"loss": 0.4525, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 22.1, |
|
"grad_norm": 17.435985565185547, |
|
"learning_rate": 9.263263263263263e-06, |
|
"loss": 0.4505, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 22.13, |
|
"grad_norm": 25.06301498413086, |
|
"learning_rate": 9.262262262262263e-06, |
|
"loss": 0.4748, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 22.16, |
|
"grad_norm": 7.567307949066162, |
|
"learning_rate": 9.261261261261262e-06, |
|
"loss": 0.4632, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 22.19, |
|
"grad_norm": 10.676952362060547, |
|
"learning_rate": 9.26026026026026e-06, |
|
"loss": 0.466, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 22.22, |
|
"grad_norm": 9.568814277648926, |
|
"learning_rate": 9.25925925925926e-06, |
|
"loss": 0.4918, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 22.25, |
|
"grad_norm": 11.789005279541016, |
|
"learning_rate": 9.25825825825826e-06, |
|
"loss": 0.5194, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 22.28, |
|
"grad_norm": 10.343085289001465, |
|
"learning_rate": 9.257257257257258e-06, |
|
"loss": 0.4521, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 22.31, |
|
"grad_norm": 8.934516906738281, |
|
"learning_rate": 9.256256256256257e-06, |
|
"loss": 0.4829, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 22.34, |
|
"grad_norm": 8.359495162963867, |
|
"learning_rate": 9.255255255255255e-06, |
|
"loss": 0.4337, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 22.37, |
|
"grad_norm": 10.556571006774902, |
|
"learning_rate": 9.254254254254254e-06, |
|
"loss": 0.483, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 22.4, |
|
"grad_norm": 14.519379615783691, |
|
"learning_rate": 9.253253253253254e-06, |
|
"loss": 0.4496, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 22.43, |
|
"grad_norm": 18.63585662841797, |
|
"learning_rate": 9.252252252252253e-06, |
|
"loss": 0.5033, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 22.46, |
|
"grad_norm": 10.354231834411621, |
|
"learning_rate": 9.251251251251251e-06, |
|
"loss": 0.5256, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 22.49, |
|
"grad_norm": 10.034125328063965, |
|
"learning_rate": 9.250250250250252e-06, |
|
"loss": 0.4682, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 22.52, |
|
"grad_norm": 10.081664085388184, |
|
"learning_rate": 9.24924924924925e-06, |
|
"loss": 0.4549, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 22.55, |
|
"grad_norm": 12.067422866821289, |
|
"learning_rate": 9.248248248248249e-06, |
|
"loss": 0.5043, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 22.58, |
|
"grad_norm": 8.950556755065918, |
|
"learning_rate": 9.247247247247249e-06, |
|
"loss": 0.4407, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 22.61, |
|
"grad_norm": 9.73210334777832, |
|
"learning_rate": 9.246246246246246e-06, |
|
"loss": 0.4622, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 22.64, |
|
"grad_norm": 9.844476699829102, |
|
"learning_rate": 9.245245245245246e-06, |
|
"loss": 0.4713, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 22.67, |
|
"grad_norm": 10.3902587890625, |
|
"learning_rate": 9.244244244244245e-06, |
|
"loss": 0.4744, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 22.7, |
|
"grad_norm": 9.012811660766602, |
|
"learning_rate": 9.243243243243243e-06, |
|
"loss": 0.5062, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 22.73, |
|
"grad_norm": 7.681649684906006, |
|
"learning_rate": 9.242242242242244e-06, |
|
"loss": 0.4653, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 22.76, |
|
"grad_norm": 7.8287506103515625, |
|
"learning_rate": 9.241241241241242e-06, |
|
"loss": 0.5323, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 22.79, |
|
"grad_norm": 20.302045822143555, |
|
"learning_rate": 9.240240240240241e-06, |
|
"loss": 0.4863, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 22.82, |
|
"grad_norm": 8.254586219787598, |
|
"learning_rate": 9.239239239239241e-06, |
|
"loss": 0.5281, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 22.85, |
|
"grad_norm": 18.055051803588867, |
|
"learning_rate": 9.238238238238238e-06, |
|
"loss": 0.4896, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 22.88, |
|
"grad_norm": 12.08217716217041, |
|
"learning_rate": 9.237237237237238e-06, |
|
"loss": 0.5, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 22.91, |
|
"grad_norm": 8.12684440612793, |
|
"learning_rate": 9.236236236236237e-06, |
|
"loss": 0.449, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 22.94, |
|
"grad_norm": 10.148077011108398, |
|
"learning_rate": 9.235235235235236e-06, |
|
"loss": 0.5071, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 22.97, |
|
"grad_norm": 8.919116973876953, |
|
"learning_rate": 9.234234234234236e-06, |
|
"loss": 0.4922, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"eval_accuracy": 0.9277, |
|
"eval_loss": 0.24855679273605347, |
|
"eval_runtime": 30.7483, |
|
"eval_samples_per_second": 325.221, |
|
"eval_steps_per_second": 1.301, |
|
"step": 7659 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"grad_norm": 7.698756217956543, |
|
"learning_rate": 9.233233233233234e-06, |
|
"loss": 0.4575, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 23.03, |
|
"grad_norm": 12.64100170135498, |
|
"learning_rate": 9.232232232232233e-06, |
|
"loss": 0.4228, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 23.06, |
|
"grad_norm": 14.073904991149902, |
|
"learning_rate": 9.231231231231232e-06, |
|
"loss": 0.5228, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 23.09, |
|
"grad_norm": 13.450045585632324, |
|
"learning_rate": 9.23023023023023e-06, |
|
"loss": 0.4917, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 23.12, |
|
"grad_norm": 9.442963600158691, |
|
"learning_rate": 9.229229229229229e-06, |
|
"loss": 0.4431, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 23.15, |
|
"grad_norm": 12.14179515838623, |
|
"learning_rate": 9.228228228228229e-06, |
|
"loss": 0.4929, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 23.18, |
|
"grad_norm": 15.105382919311523, |
|
"learning_rate": 9.227227227227228e-06, |
|
"loss": 0.4966, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 23.21, |
|
"grad_norm": 15.966352462768555, |
|
"learning_rate": 9.226226226226226e-06, |
|
"loss": 0.4768, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 23.24, |
|
"grad_norm": 17.863000869750977, |
|
"learning_rate": 9.225225225225227e-06, |
|
"loss": 0.4558, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 23.27, |
|
"grad_norm": 12.311907768249512, |
|
"learning_rate": 9.224224224224225e-06, |
|
"loss": 0.4995, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 23.3, |
|
"grad_norm": 17.10275650024414, |
|
"learning_rate": 9.223223223223224e-06, |
|
"loss": 0.4514, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 23.33, |
|
"grad_norm": 17.76338005065918, |
|
"learning_rate": 9.222222222222224e-06, |
|
"loss": 0.4349, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 23.36, |
|
"grad_norm": 16.42094612121582, |
|
"learning_rate": 9.221221221221221e-06, |
|
"loss": 0.4812, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 23.39, |
|
"grad_norm": 14.530420303344727, |
|
"learning_rate": 9.220220220220221e-06, |
|
"loss": 0.4498, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 23.42, |
|
"grad_norm": 14.333878517150879, |
|
"learning_rate": 9.21921921921922e-06, |
|
"loss": 0.4657, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 23.45, |
|
"grad_norm": 29.57023048400879, |
|
"learning_rate": 9.218218218218218e-06, |
|
"loss": 0.46, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 23.48, |
|
"grad_norm": 12.47533130645752, |
|
"learning_rate": 9.217217217217219e-06, |
|
"loss": 0.4458, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 23.51, |
|
"grad_norm": 17.979331970214844, |
|
"learning_rate": 9.216216216216217e-06, |
|
"loss": 0.4812, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 23.54, |
|
"grad_norm": 33.53706741333008, |
|
"learning_rate": 9.215215215215216e-06, |
|
"loss": 0.4412, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 23.57, |
|
"grad_norm": 9.694183349609375, |
|
"learning_rate": 9.214214214214216e-06, |
|
"loss": 0.5278, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 23.6, |
|
"grad_norm": 10.997142791748047, |
|
"learning_rate": 9.213213213213213e-06, |
|
"loss": 0.3873, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 23.63, |
|
"grad_norm": 14.332605361938477, |
|
"learning_rate": 9.212212212212213e-06, |
|
"loss": 0.4381, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 23.66, |
|
"grad_norm": 21.167810440063477, |
|
"learning_rate": 9.211211211211212e-06, |
|
"loss": 0.4479, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 23.69, |
|
"grad_norm": 11.316388130187988, |
|
"learning_rate": 9.21021021021021e-06, |
|
"loss": 0.4813, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 23.72, |
|
"grad_norm": 10.724557876586914, |
|
"learning_rate": 9.20920920920921e-06, |
|
"loss": 0.4825, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 23.75, |
|
"grad_norm": 8.695270538330078, |
|
"learning_rate": 9.20820820820821e-06, |
|
"loss": 0.4389, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 23.78, |
|
"grad_norm": 8.966339111328125, |
|
"learning_rate": 9.207207207207208e-06, |
|
"loss": 0.4852, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 23.81, |
|
"grad_norm": 9.002543449401855, |
|
"learning_rate": 9.206206206206207e-06, |
|
"loss": 0.4548, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 23.84, |
|
"grad_norm": 7.919975280761719, |
|
"learning_rate": 9.205205205205205e-06, |
|
"loss": 0.5072, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 23.87, |
|
"grad_norm": 10.650671005249023, |
|
"learning_rate": 9.204204204204204e-06, |
|
"loss": 0.4615, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 23.9, |
|
"grad_norm": 17.29688262939453, |
|
"learning_rate": 9.203203203203204e-06, |
|
"loss": 0.3928, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 23.93, |
|
"grad_norm": 9.760119438171387, |
|
"learning_rate": 9.202202202202203e-06, |
|
"loss": 0.5134, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 23.96, |
|
"grad_norm": 11.86888313293457, |
|
"learning_rate": 9.201201201201201e-06, |
|
"loss": 0.4764, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 23.99, |
|
"grad_norm": 47.17730712890625, |
|
"learning_rate": 9.200200200200202e-06, |
|
"loss": 0.4603, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"eval_accuracy": 0.9234, |
|
"eval_loss": 0.25501587986946106, |
|
"eval_runtime": 30.8318, |
|
"eval_samples_per_second": 324.341, |
|
"eval_steps_per_second": 1.297, |
|
"step": 7992 |
|
}, |
|
{ |
|
"epoch": 24.02, |
|
"grad_norm": 10.768325805664062, |
|
"learning_rate": 9.1991991991992e-06, |
|
"loss": 0.4188, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 24.05, |
|
"grad_norm": 14.675819396972656, |
|
"learning_rate": 9.198198198198199e-06, |
|
"loss": 0.4144, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 24.08, |
|
"grad_norm": 8.436361312866211, |
|
"learning_rate": 9.197197197197199e-06, |
|
"loss": 0.4573, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 24.11, |
|
"grad_norm": 10.41925048828125, |
|
"learning_rate": 9.196196196196196e-06, |
|
"loss": 0.4338, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 24.14, |
|
"grad_norm": 15.963869094848633, |
|
"learning_rate": 9.195195195195196e-06, |
|
"loss": 0.4744, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 24.17, |
|
"grad_norm": 14.007654190063477, |
|
"learning_rate": 9.194194194194195e-06, |
|
"loss": 0.4865, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 24.2, |
|
"grad_norm": 11.407424926757812, |
|
"learning_rate": 9.193193193193194e-06, |
|
"loss": 0.4371, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 24.23, |
|
"grad_norm": 41.07179641723633, |
|
"learning_rate": 9.192192192192194e-06, |
|
"loss": 0.4536, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 24.26, |
|
"grad_norm": 8.165257453918457, |
|
"learning_rate": 9.191191191191192e-06, |
|
"loss": 0.4284, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 24.29, |
|
"grad_norm": 9.9638671875, |
|
"learning_rate": 9.190190190190191e-06, |
|
"loss": 0.4581, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 24.32, |
|
"grad_norm": 15.189397811889648, |
|
"learning_rate": 9.189189189189191e-06, |
|
"loss": 0.4933, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 24.35, |
|
"grad_norm": 12.482817649841309, |
|
"learning_rate": 9.188188188188188e-06, |
|
"loss": 0.4576, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 24.38, |
|
"grad_norm": 14.716872215270996, |
|
"learning_rate": 9.187187187187187e-06, |
|
"loss": 0.4888, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 24.41, |
|
"grad_norm": 17.546016693115234, |
|
"learning_rate": 9.186186186186187e-06, |
|
"loss": 0.4797, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 24.44, |
|
"grad_norm": 12.303563117980957, |
|
"learning_rate": 9.185185185185186e-06, |
|
"loss": 0.5145, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 24.47, |
|
"grad_norm": 8.901636123657227, |
|
"learning_rate": 9.184184184184184e-06, |
|
"loss": 0.4398, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 24.5, |
|
"grad_norm": 13.526920318603516, |
|
"learning_rate": 9.183183183183185e-06, |
|
"loss": 0.4182, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 24.53, |
|
"grad_norm": 11.501439094543457, |
|
"learning_rate": 9.182182182182183e-06, |
|
"loss": 0.4582, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 24.56, |
|
"grad_norm": 10.772521018981934, |
|
"learning_rate": 9.181181181181182e-06, |
|
"loss": 0.4541, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 24.59, |
|
"grad_norm": 11.049300193786621, |
|
"learning_rate": 9.18018018018018e-06, |
|
"loss": 0.4674, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 24.62, |
|
"grad_norm": 19.82034683227539, |
|
"learning_rate": 9.179179179179179e-06, |
|
"loss": 0.4205, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 24.65, |
|
"grad_norm": 8.384696006774902, |
|
"learning_rate": 9.17817817817818e-06, |
|
"loss": 0.4723, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 24.68, |
|
"grad_norm": 11.282562255859375, |
|
"learning_rate": 9.177177177177178e-06, |
|
"loss": 0.4711, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 24.71, |
|
"grad_norm": 13.955788612365723, |
|
"learning_rate": 9.176176176176176e-06, |
|
"loss": 0.4702, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 24.74, |
|
"grad_norm": 5.679299831390381, |
|
"learning_rate": 9.175175175175177e-06, |
|
"loss": 0.4309, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 24.77, |
|
"grad_norm": 10.345995903015137, |
|
"learning_rate": 9.174174174174175e-06, |
|
"loss": 0.4701, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 24.8, |
|
"grad_norm": 20.478515625, |
|
"learning_rate": 9.173173173173174e-06, |
|
"loss": 0.4685, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 24.83, |
|
"grad_norm": 14.467691421508789, |
|
"learning_rate": 9.172172172172172e-06, |
|
"loss": 0.5148, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 24.86, |
|
"grad_norm": 8.202590942382812, |
|
"learning_rate": 9.171171171171171e-06, |
|
"loss": 0.3606, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 24.89, |
|
"grad_norm": 8.882107734680176, |
|
"learning_rate": 9.170170170170171e-06, |
|
"loss": 0.4808, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 24.92, |
|
"grad_norm": 14.242910385131836, |
|
"learning_rate": 9.16916916916917e-06, |
|
"loss": 0.4177, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 24.95, |
|
"grad_norm": 9.910883903503418, |
|
"learning_rate": 9.168168168168169e-06, |
|
"loss": 0.4853, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 24.98, |
|
"grad_norm": 10.825551986694336, |
|
"learning_rate": 9.167167167167169e-06, |
|
"loss": 0.4405, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"eval_accuracy": 0.9285, |
|
"eval_loss": 0.2476295828819275, |
|
"eval_runtime": 30.5772, |
|
"eval_samples_per_second": 327.041, |
|
"eval_steps_per_second": 1.308, |
|
"step": 8325 |
|
}, |
|
{ |
|
"epoch": 25.02, |
|
"grad_norm": 16.443073272705078, |
|
"learning_rate": 9.166166166166167e-06, |
|
"loss": 0.4394, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 25.05, |
|
"grad_norm": 13.089273452758789, |
|
"learning_rate": 9.165165165165166e-06, |
|
"loss": 0.4354, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 25.08, |
|
"grad_norm": 17.05743408203125, |
|
"learning_rate": 9.164164164164165e-06, |
|
"loss": 0.4766, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 25.11, |
|
"grad_norm": 17.6065673828125, |
|
"learning_rate": 9.163163163163163e-06, |
|
"loss": 0.4629, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 25.14, |
|
"grad_norm": 12.650850296020508, |
|
"learning_rate": 9.162162162162162e-06, |
|
"loss": 0.472, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 25.17, |
|
"grad_norm": 33.77656936645508, |
|
"learning_rate": 9.161161161161162e-06, |
|
"loss": 0.4871, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 25.2, |
|
"grad_norm": 10.210184097290039, |
|
"learning_rate": 9.16016016016016e-06, |
|
"loss": 0.4343, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 25.23, |
|
"grad_norm": 13.365535736083984, |
|
"learning_rate": 9.15915915915916e-06, |
|
"loss": 0.3632, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 25.26, |
|
"grad_norm": 8.703694343566895, |
|
"learning_rate": 9.15815815815816e-06, |
|
"loss": 0.4402, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 25.29, |
|
"grad_norm": 12.968696594238281, |
|
"learning_rate": 9.157157157157158e-06, |
|
"loss": 0.4308, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 25.32, |
|
"grad_norm": 13.030708312988281, |
|
"learning_rate": 9.156156156156157e-06, |
|
"loss": 0.4876, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 25.35, |
|
"grad_norm": 11.491731643676758, |
|
"learning_rate": 9.155155155155155e-06, |
|
"loss": 0.4824, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 25.38, |
|
"grad_norm": 15.222594261169434, |
|
"learning_rate": 9.154154154154154e-06, |
|
"loss": 0.4647, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 25.41, |
|
"grad_norm": 10.160212516784668, |
|
"learning_rate": 9.153153153153154e-06, |
|
"loss": 0.5069, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 25.44, |
|
"grad_norm": 9.530731201171875, |
|
"learning_rate": 9.152152152152153e-06, |
|
"loss": 0.4706, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 25.47, |
|
"grad_norm": 9.125948905944824, |
|
"learning_rate": 9.151151151151151e-06, |
|
"loss": 0.4115, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 25.5, |
|
"grad_norm": 15.237028121948242, |
|
"learning_rate": 9.150150150150152e-06, |
|
"loss": 0.503, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 25.53, |
|
"grad_norm": 10.005708694458008, |
|
"learning_rate": 9.14914914914915e-06, |
|
"loss": 0.4717, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 25.56, |
|
"grad_norm": 14.051669120788574, |
|
"learning_rate": 9.148148148148149e-06, |
|
"loss": 0.492, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 25.59, |
|
"grad_norm": 13.112764358520508, |
|
"learning_rate": 9.147147147147147e-06, |
|
"loss": 0.4358, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 25.62, |
|
"grad_norm": 26.472837448120117, |
|
"learning_rate": 9.146146146146146e-06, |
|
"loss": 0.498, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 25.65, |
|
"grad_norm": 10.999613761901855, |
|
"learning_rate": 9.145145145145146e-06, |
|
"loss": 0.4739, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 25.68, |
|
"grad_norm": 7.995015621185303, |
|
"learning_rate": 9.144144144144145e-06, |
|
"loss": 0.5002, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 25.71, |
|
"grad_norm": 10.026015281677246, |
|
"learning_rate": 9.143143143143144e-06, |
|
"loss": 0.4528, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 25.74, |
|
"grad_norm": 10.452445030212402, |
|
"learning_rate": 9.142142142142144e-06, |
|
"loss": 0.4851, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 25.77, |
|
"grad_norm": 10.0867280960083, |
|
"learning_rate": 9.141141141141142e-06, |
|
"loss": 0.4482, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 25.8, |
|
"grad_norm": 12.90353012084961, |
|
"learning_rate": 9.140140140140141e-06, |
|
"loss": 0.4755, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 25.83, |
|
"grad_norm": 7.924851417541504, |
|
"learning_rate": 9.13913913913914e-06, |
|
"loss": 0.465, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 25.86, |
|
"grad_norm": 11.196653366088867, |
|
"learning_rate": 9.138138138138138e-06, |
|
"loss": 0.4396, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 25.89, |
|
"grad_norm": 7.144512176513672, |
|
"learning_rate": 9.137137137137137e-06, |
|
"loss": 0.4915, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 25.92, |
|
"grad_norm": 8.691585540771484, |
|
"learning_rate": 9.136136136136137e-06, |
|
"loss": 0.4835, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 25.95, |
|
"grad_norm": 9.7068452835083, |
|
"learning_rate": 9.135135135135136e-06, |
|
"loss": 0.4593, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 25.98, |
|
"grad_norm": 11.20572566986084, |
|
"learning_rate": 9.134134134134134e-06, |
|
"loss": 0.4867, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"eval_accuracy": 0.9295, |
|
"eval_loss": 0.24820935726165771, |
|
"eval_runtime": 30.6834, |
|
"eval_samples_per_second": 325.909, |
|
"eval_steps_per_second": 1.304, |
|
"step": 8658 |
|
}, |
|
{ |
|
"epoch": 26.01, |
|
"grad_norm": 10.692183494567871, |
|
"learning_rate": 9.133133133133135e-06, |
|
"loss": 0.4562, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 26.04, |
|
"grad_norm": 11.422379493713379, |
|
"learning_rate": 9.132132132132133e-06, |
|
"loss": 0.4686, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 26.07, |
|
"grad_norm": 21.960498809814453, |
|
"learning_rate": 9.131131131131132e-06, |
|
"loss": 0.4379, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 26.1, |
|
"grad_norm": 6.990413188934326, |
|
"learning_rate": 9.13013013013013e-06, |
|
"loss": 0.4402, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 26.13, |
|
"grad_norm": 11.235393524169922, |
|
"learning_rate": 9.129129129129129e-06, |
|
"loss": 0.4269, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 26.16, |
|
"grad_norm": 11.997522354125977, |
|
"learning_rate": 9.12812812812813e-06, |
|
"loss": 0.4442, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 26.19, |
|
"grad_norm": 10.500715255737305, |
|
"learning_rate": 9.127127127127128e-06, |
|
"loss": 0.4521, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 26.22, |
|
"grad_norm": 9.616368293762207, |
|
"learning_rate": 9.126126126126126e-06, |
|
"loss": 0.4274, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 26.25, |
|
"grad_norm": 11.904566764831543, |
|
"learning_rate": 9.125125125125127e-06, |
|
"loss": 0.4542, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 26.28, |
|
"grad_norm": 11.470881462097168, |
|
"learning_rate": 9.124124124124125e-06, |
|
"loss": 0.4126, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 26.31, |
|
"grad_norm": 10.796327590942383, |
|
"learning_rate": 9.123123123123124e-06, |
|
"loss": 0.5106, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 26.34, |
|
"grad_norm": 9.003201484680176, |
|
"learning_rate": 9.122122122122123e-06, |
|
"loss": 0.4288, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 26.37, |
|
"grad_norm": 12.855923652648926, |
|
"learning_rate": 9.121121121121121e-06, |
|
"loss": 0.5086, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 26.4, |
|
"grad_norm": 10.519691467285156, |
|
"learning_rate": 9.120120120120121e-06, |
|
"loss": 0.4857, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 26.43, |
|
"grad_norm": 10.088730812072754, |
|
"learning_rate": 9.11911911911912e-06, |
|
"loss": 0.5249, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 26.46, |
|
"grad_norm": 7.654215335845947, |
|
"learning_rate": 9.118118118118119e-06, |
|
"loss": 0.4909, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 26.49, |
|
"grad_norm": 10.246333122253418, |
|
"learning_rate": 9.117117117117117e-06, |
|
"loss": 0.5, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 26.52, |
|
"grad_norm": 9.856045722961426, |
|
"learning_rate": 9.116116116116117e-06, |
|
"loss": 0.4561, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 26.55, |
|
"grad_norm": 8.209404945373535, |
|
"learning_rate": 9.115115115115116e-06, |
|
"loss": 0.4903, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 26.58, |
|
"grad_norm": 9.775983810424805, |
|
"learning_rate": 9.114114114114115e-06, |
|
"loss": 0.4763, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 26.61, |
|
"grad_norm": 16.25369644165039, |
|
"learning_rate": 9.113113113113113e-06, |
|
"loss": 0.4846, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 26.64, |
|
"grad_norm": 10.342110633850098, |
|
"learning_rate": 9.112112112112112e-06, |
|
"loss": 0.4912, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 26.67, |
|
"grad_norm": 12.154577255249023, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.4941, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 26.7, |
|
"grad_norm": 8.336454391479492, |
|
"learning_rate": 9.11011011011011e-06, |
|
"loss": 0.4475, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 26.73, |
|
"grad_norm": 7.27547025680542, |
|
"learning_rate": 9.10910910910911e-06, |
|
"loss": 0.4518, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 26.76, |
|
"grad_norm": 8.158557891845703, |
|
"learning_rate": 9.10810810810811e-06, |
|
"loss": 0.3841, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 26.79, |
|
"grad_norm": 13.1236572265625, |
|
"learning_rate": 9.107107107107108e-06, |
|
"loss": 0.4412, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 26.82, |
|
"grad_norm": 9.89743423461914, |
|
"learning_rate": 9.106106106106107e-06, |
|
"loss": 0.4979, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 26.85, |
|
"grad_norm": 8.974017143249512, |
|
"learning_rate": 9.105105105105105e-06, |
|
"loss": 0.4194, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 26.88, |
|
"grad_norm": 12.197188377380371, |
|
"learning_rate": 9.104104104104104e-06, |
|
"loss": 0.4394, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 26.91, |
|
"grad_norm": 17.870830535888672, |
|
"learning_rate": 9.103103103103104e-06, |
|
"loss": 0.4176, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 26.94, |
|
"grad_norm": 11.099676132202148, |
|
"learning_rate": 9.102102102102103e-06, |
|
"loss": 0.4828, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 26.97, |
|
"grad_norm": 14.031774520874023, |
|
"learning_rate": 9.101101101101101e-06, |
|
"loss": 0.4873, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"grad_norm": 7.13455867767334, |
|
"learning_rate": 9.100100100100102e-06, |
|
"loss": 0.4414, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"eval_accuracy": 0.9267, |
|
"eval_loss": 0.25395047664642334, |
|
"eval_runtime": 30.0761, |
|
"eval_samples_per_second": 332.49, |
|
"eval_steps_per_second": 1.33, |
|
"step": 8991 |
|
}, |
|
{ |
|
"epoch": 27.03, |
|
"grad_norm": 8.033126831054688, |
|
"learning_rate": 9.0990990990991e-06, |
|
"loss": 0.3802, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 27.06, |
|
"grad_norm": 8.065628051757812, |
|
"learning_rate": 9.098098098098099e-06, |
|
"loss": 0.4256, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 27.09, |
|
"grad_norm": 14.366639137268066, |
|
"learning_rate": 9.097097097097098e-06, |
|
"loss": 0.4556, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 27.12, |
|
"grad_norm": 10.824115753173828, |
|
"learning_rate": 9.096096096096096e-06, |
|
"loss": 0.4359, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 27.15, |
|
"grad_norm": 9.604802131652832, |
|
"learning_rate": 9.095095095095095e-06, |
|
"loss": 0.4823, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 27.18, |
|
"grad_norm": 29.343629837036133, |
|
"learning_rate": 9.094094094094095e-06, |
|
"loss": 0.4764, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 27.21, |
|
"grad_norm": 7.678133487701416, |
|
"learning_rate": 9.093093093093094e-06, |
|
"loss": 0.4019, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 27.24, |
|
"grad_norm": 12.999184608459473, |
|
"learning_rate": 9.092092092092092e-06, |
|
"loss": 0.4373, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 27.27, |
|
"grad_norm": 27.29155921936035, |
|
"learning_rate": 9.091091091091093e-06, |
|
"loss": 0.4027, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 27.3, |
|
"grad_norm": 9.286530494689941, |
|
"learning_rate": 9.090090090090091e-06, |
|
"loss": 0.441, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 27.33, |
|
"grad_norm": 8.044090270996094, |
|
"learning_rate": 9.08908908908909e-06, |
|
"loss": 0.3782, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 27.36, |
|
"grad_norm": 9.737653732299805, |
|
"learning_rate": 9.088088088088088e-06, |
|
"loss": 0.4717, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 27.39, |
|
"grad_norm": 11.821213722229004, |
|
"learning_rate": 9.087087087087087e-06, |
|
"loss": 0.4395, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 27.42, |
|
"grad_norm": 11.546897888183594, |
|
"learning_rate": 9.086086086086087e-06, |
|
"loss": 0.4485, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 27.45, |
|
"grad_norm": 11.010965347290039, |
|
"learning_rate": 9.085085085085086e-06, |
|
"loss": 0.459, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 27.48, |
|
"grad_norm": 9.444851875305176, |
|
"learning_rate": 9.084084084084084e-06, |
|
"loss": 0.4984, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 27.51, |
|
"grad_norm": 19.694604873657227, |
|
"learning_rate": 9.083083083083085e-06, |
|
"loss": 0.4741, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 27.54, |
|
"grad_norm": 7.38561487197876, |
|
"learning_rate": 9.082082082082083e-06, |
|
"loss": 0.435, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 27.57, |
|
"grad_norm": 11.60218620300293, |
|
"learning_rate": 9.081081081081082e-06, |
|
"loss": 0.4238, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 27.6, |
|
"grad_norm": 13.115228652954102, |
|
"learning_rate": 9.08008008008008e-06, |
|
"loss": 0.4175, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 27.63, |
|
"grad_norm": 22.14258575439453, |
|
"learning_rate": 9.079079079079079e-06, |
|
"loss": 0.4655, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 27.66, |
|
"grad_norm": 9.186527252197266, |
|
"learning_rate": 9.07807807807808e-06, |
|
"loss": 0.4148, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 27.69, |
|
"grad_norm": 8.470562934875488, |
|
"learning_rate": 9.077077077077078e-06, |
|
"loss": 0.4463, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 27.72, |
|
"grad_norm": 8.844724655151367, |
|
"learning_rate": 9.076076076076077e-06, |
|
"loss": 0.5129, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 27.75, |
|
"grad_norm": 14.874906539916992, |
|
"learning_rate": 9.075075075075077e-06, |
|
"loss": 0.508, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 27.78, |
|
"grad_norm": 9.115788459777832, |
|
"learning_rate": 9.074074074074075e-06, |
|
"loss": 0.4789, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 27.81, |
|
"grad_norm": 11.254831314086914, |
|
"learning_rate": 9.073073073073074e-06, |
|
"loss": 0.4359, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 27.84, |
|
"grad_norm": 9.67915153503418, |
|
"learning_rate": 9.072072072072073e-06, |
|
"loss": 0.4465, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 27.87, |
|
"grad_norm": 7.342519283294678, |
|
"learning_rate": 9.071071071071071e-06, |
|
"loss": 0.4468, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 27.9, |
|
"grad_norm": 9.372382164001465, |
|
"learning_rate": 9.07007007007007e-06, |
|
"loss": 0.4373, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 27.93, |
|
"grad_norm": 28.929168701171875, |
|
"learning_rate": 9.06906906906907e-06, |
|
"loss": 0.4206, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 27.96, |
|
"grad_norm": 88.10252380371094, |
|
"learning_rate": 9.068068068068069e-06, |
|
"loss": 0.4712, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 27.99, |
|
"grad_norm": 20.861848831176758, |
|
"learning_rate": 9.067067067067067e-06, |
|
"loss": 0.4574, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"eval_accuracy": 0.9287, |
|
"eval_loss": 0.24936652183532715, |
|
"eval_runtime": 30.6074, |
|
"eval_samples_per_second": 326.718, |
|
"eval_steps_per_second": 1.307, |
|
"step": 9324 |
|
}, |
|
{ |
|
"epoch": 28.02, |
|
"grad_norm": 18.218908309936523, |
|
"learning_rate": 9.066066066066068e-06, |
|
"loss": 0.4536, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 28.05, |
|
"grad_norm": 22.617238998413086, |
|
"learning_rate": 9.065065065065066e-06, |
|
"loss": 0.4148, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 28.08, |
|
"grad_norm": 13.542783737182617, |
|
"learning_rate": 9.064064064064065e-06, |
|
"loss": 0.4575, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 28.11, |
|
"grad_norm": 9.672589302062988, |
|
"learning_rate": 9.063063063063063e-06, |
|
"loss": 0.4472, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 28.14, |
|
"grad_norm": 10.828847885131836, |
|
"learning_rate": 9.062062062062062e-06, |
|
"loss": 0.4097, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 28.17, |
|
"grad_norm": 8.991840362548828, |
|
"learning_rate": 9.061061061061062e-06, |
|
"loss": 0.4342, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 28.2, |
|
"grad_norm": 10.406083106994629, |
|
"learning_rate": 9.06006006006006e-06, |
|
"loss": 0.4401, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 28.23, |
|
"grad_norm": 9.752996444702148, |
|
"learning_rate": 9.05905905905906e-06, |
|
"loss": 0.4453, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 28.26, |
|
"grad_norm": 14.64594841003418, |
|
"learning_rate": 9.05805805805806e-06, |
|
"loss": 0.4074, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 28.29, |
|
"grad_norm": 6.0390191078186035, |
|
"learning_rate": 9.057057057057058e-06, |
|
"loss": 0.4417, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 28.32, |
|
"grad_norm": 10.605696678161621, |
|
"learning_rate": 9.056056056056057e-06, |
|
"loss": 0.4227, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 28.35, |
|
"grad_norm": 9.552401542663574, |
|
"learning_rate": 9.055055055055055e-06, |
|
"loss": 0.4654, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 28.38, |
|
"grad_norm": 10.555407524108887, |
|
"learning_rate": 9.054054054054054e-06, |
|
"loss": 0.4677, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 28.41, |
|
"grad_norm": 10.286060333251953, |
|
"learning_rate": 9.053053053053054e-06, |
|
"loss": 0.3975, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 28.44, |
|
"grad_norm": 13.12886905670166, |
|
"learning_rate": 9.052052052052053e-06, |
|
"loss": 0.4472, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 28.47, |
|
"grad_norm": 9.690975189208984, |
|
"learning_rate": 9.051051051051052e-06, |
|
"loss": 0.4633, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 28.5, |
|
"grad_norm": 6.2562456130981445, |
|
"learning_rate": 9.05005005005005e-06, |
|
"loss": 0.4694, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 28.53, |
|
"grad_norm": 9.984062194824219, |
|
"learning_rate": 9.04904904904905e-06, |
|
"loss": 0.4463, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 28.56, |
|
"grad_norm": 11.556092262268066, |
|
"learning_rate": 9.048048048048049e-06, |
|
"loss": 0.4532, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 28.59, |
|
"grad_norm": 11.245503425598145, |
|
"learning_rate": 9.047047047047048e-06, |
|
"loss": 0.4504, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 28.62, |
|
"grad_norm": 10.833431243896484, |
|
"learning_rate": 9.046046046046046e-06, |
|
"loss": 0.5309, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 28.65, |
|
"grad_norm": 15.275016784667969, |
|
"learning_rate": 9.045045045045045e-06, |
|
"loss": 0.3924, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 28.68, |
|
"grad_norm": 8.553839683532715, |
|
"learning_rate": 9.044044044044045e-06, |
|
"loss": 0.4527, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 28.71, |
|
"grad_norm": 11.880951881408691, |
|
"learning_rate": 9.043043043043044e-06, |
|
"loss": 0.5098, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 28.74, |
|
"grad_norm": 12.77589225769043, |
|
"learning_rate": 9.042042042042042e-06, |
|
"loss": 0.4748, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 28.77, |
|
"grad_norm": 8.945269584655762, |
|
"learning_rate": 9.041041041041043e-06, |
|
"loss": 0.4274, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 28.8, |
|
"grad_norm": 9.76388931274414, |
|
"learning_rate": 9.040040040040041e-06, |
|
"loss": 0.5033, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 28.83, |
|
"grad_norm": 10.47138786315918, |
|
"learning_rate": 9.03903903903904e-06, |
|
"loss": 0.4589, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 28.86, |
|
"grad_norm": 11.299651145935059, |
|
"learning_rate": 9.038038038038038e-06, |
|
"loss": 0.4671, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 28.89, |
|
"grad_norm": 20.282169342041016, |
|
"learning_rate": 9.037037037037037e-06, |
|
"loss": 0.4182, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 28.92, |
|
"grad_norm": 13.877592086791992, |
|
"learning_rate": 9.036036036036037e-06, |
|
"loss": 0.4441, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 28.95, |
|
"grad_norm": 33.09040069580078, |
|
"learning_rate": 9.035035035035036e-06, |
|
"loss": 0.493, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 28.98, |
|
"grad_norm": 12.959691047668457, |
|
"learning_rate": 9.034034034034034e-06, |
|
"loss": 0.4109, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 29.0, |
|
"eval_accuracy": 0.928, |
|
"eval_loss": 0.2532622218132019, |
|
"eval_runtime": 30.4138, |
|
"eval_samples_per_second": 328.798, |
|
"eval_steps_per_second": 1.315, |
|
"step": 9657 |
|
}, |
|
{ |
|
"epoch": 29.01, |
|
"grad_norm": 7.716038703918457, |
|
"learning_rate": 9.033033033033035e-06, |
|
"loss": 0.4287, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 29.04, |
|
"grad_norm": 14.750630378723145, |
|
"learning_rate": 9.032032032032033e-06, |
|
"loss": 0.4264, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 29.07, |
|
"grad_norm": 24.9686222076416, |
|
"learning_rate": 9.031031031031032e-06, |
|
"loss": 0.4061, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 29.1, |
|
"grad_norm": 11.106522560119629, |
|
"learning_rate": 9.03003003003003e-06, |
|
"loss": 0.4478, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 29.13, |
|
"grad_norm": 12.746867179870605, |
|
"learning_rate": 9.029029029029029e-06, |
|
"loss": 0.4483, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 29.16, |
|
"grad_norm": 10.57923698425293, |
|
"learning_rate": 9.02802802802803e-06, |
|
"loss": 0.4322, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 29.19, |
|
"grad_norm": 8.244423866271973, |
|
"learning_rate": 9.027027027027028e-06, |
|
"loss": 0.4101, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 29.22, |
|
"grad_norm": 13.49233341217041, |
|
"learning_rate": 9.026026026026027e-06, |
|
"loss": 0.4631, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 29.25, |
|
"grad_norm": 14.60818099975586, |
|
"learning_rate": 9.025025025025025e-06, |
|
"loss": 0.4404, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 29.28, |
|
"grad_norm": 12.197328567504883, |
|
"learning_rate": 9.024024024024025e-06, |
|
"loss": 0.4371, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 29.31, |
|
"grad_norm": 10.69261646270752, |
|
"learning_rate": 9.023023023023024e-06, |
|
"loss": 0.4042, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 29.34, |
|
"grad_norm": 8.400017738342285, |
|
"learning_rate": 9.022022022022023e-06, |
|
"loss": 0.4588, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 29.37, |
|
"grad_norm": 6.550203800201416, |
|
"learning_rate": 9.021021021021021e-06, |
|
"loss": 0.3832, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 29.4, |
|
"grad_norm": 10.774068832397461, |
|
"learning_rate": 9.02002002002002e-06, |
|
"loss": 0.4582, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 29.43, |
|
"grad_norm": 14.41346263885498, |
|
"learning_rate": 9.01901901901902e-06, |
|
"loss": 0.429, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 29.46, |
|
"grad_norm": 11.75338077545166, |
|
"learning_rate": 9.018018018018019e-06, |
|
"loss": 0.4302, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 29.49, |
|
"grad_norm": 9.722987174987793, |
|
"learning_rate": 9.017017017017017e-06, |
|
"loss": 0.4308, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 29.52, |
|
"grad_norm": 10.287395477294922, |
|
"learning_rate": 9.016016016016018e-06, |
|
"loss": 0.4354, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 29.55, |
|
"grad_norm": 24.349010467529297, |
|
"learning_rate": 9.015015015015016e-06, |
|
"loss": 0.442, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 29.58, |
|
"grad_norm": 10.922633171081543, |
|
"learning_rate": 9.014014014014015e-06, |
|
"loss": 0.4221, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 29.61, |
|
"grad_norm": 11.137175559997559, |
|
"learning_rate": 9.013013013013013e-06, |
|
"loss": 0.4693, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 29.64, |
|
"grad_norm": 13.711250305175781, |
|
"learning_rate": 9.012012012012012e-06, |
|
"loss": 0.4086, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 29.67, |
|
"grad_norm": 19.143190383911133, |
|
"learning_rate": 9.011011011011012e-06, |
|
"loss": 0.4192, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 29.7, |
|
"grad_norm": 12.561365127563477, |
|
"learning_rate": 9.010010010010011e-06, |
|
"loss": 0.471, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 29.73, |
|
"grad_norm": 12.069458961486816, |
|
"learning_rate": 9.00900900900901e-06, |
|
"loss": 0.4193, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 29.76, |
|
"grad_norm": 11.876630783081055, |
|
"learning_rate": 9.00800800800801e-06, |
|
"loss": 0.4067, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 29.79, |
|
"grad_norm": 11.242775917053223, |
|
"learning_rate": 9.007007007007008e-06, |
|
"loss": 0.4756, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 29.82, |
|
"grad_norm": 11.685412406921387, |
|
"learning_rate": 9.006006006006007e-06, |
|
"loss": 0.4608, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 29.85, |
|
"grad_norm": 8.70016098022461, |
|
"learning_rate": 9.005005005005006e-06, |
|
"loss": 0.5007, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 29.88, |
|
"grad_norm": 11.36416244506836, |
|
"learning_rate": 9.004004004004004e-06, |
|
"loss": 0.4296, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 29.91, |
|
"grad_norm": 14.544692039489746, |
|
"learning_rate": 9.003003003003003e-06, |
|
"loss": 0.4573, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 29.94, |
|
"grad_norm": 5.4239044189453125, |
|
"learning_rate": 9.002002002002003e-06, |
|
"loss": 0.4643, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 29.97, |
|
"grad_norm": 6.896058082580566, |
|
"learning_rate": 9.001001001001002e-06, |
|
"loss": 0.4539, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"grad_norm": 46.193382263183594, |
|
"learning_rate": 9e-06, |
|
"loss": 0.4433, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"eval_accuracy": 0.9258, |
|
"eval_loss": 0.257062703371048, |
|
"eval_runtime": 30.4684, |
|
"eval_samples_per_second": 328.209, |
|
"eval_steps_per_second": 1.313, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 30.03, |
|
"grad_norm": 11.51973819732666, |
|
"learning_rate": 8.998998998999e-06, |
|
"loss": 0.4466, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 30.06, |
|
"grad_norm": 9.196100234985352, |
|
"learning_rate": 8.997997997997999e-06, |
|
"loss": 0.4622, |
|
"step": 10010 |
|
}, |
|
{ |
|
"epoch": 30.09, |
|
"grad_norm": 6.755195140838623, |
|
"learning_rate": 8.996996996996998e-06, |
|
"loss": 0.4014, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 30.12, |
|
"grad_norm": 10.63003158569336, |
|
"learning_rate": 8.995995995995996e-06, |
|
"loss": 0.4284, |
|
"step": 10030 |
|
}, |
|
{ |
|
"epoch": 30.15, |
|
"grad_norm": 11.699910163879395, |
|
"learning_rate": 8.994994994994995e-06, |
|
"loss": 0.4425, |
|
"step": 10040 |
|
}, |
|
{ |
|
"epoch": 30.18, |
|
"grad_norm": 10.2941255569458, |
|
"learning_rate": 8.993993993993995e-06, |
|
"loss": 0.3961, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 30.21, |
|
"grad_norm": 7.498856067657471, |
|
"learning_rate": 8.992992992992994e-06, |
|
"loss": 0.3904, |
|
"step": 10060 |
|
}, |
|
{ |
|
"epoch": 30.24, |
|
"grad_norm": 10.58414077758789, |
|
"learning_rate": 8.991991991991992e-06, |
|
"loss": 0.401, |
|
"step": 10070 |
|
}, |
|
{ |
|
"epoch": 30.27, |
|
"grad_norm": 9.456209182739258, |
|
"learning_rate": 8.990990990990993e-06, |
|
"loss": 0.4734, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 30.3, |
|
"grad_norm": 8.097994804382324, |
|
"learning_rate": 8.989989989989991e-06, |
|
"loss": 0.381, |
|
"step": 10090 |
|
}, |
|
{ |
|
"epoch": 30.33, |
|
"grad_norm": 9.386022567749023, |
|
"learning_rate": 8.98898898898899e-06, |
|
"loss": 0.4055, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 30.36, |
|
"grad_norm": 10.85639476776123, |
|
"learning_rate": 8.987987987987988e-06, |
|
"loss": 0.3886, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 30.39, |
|
"grad_norm": 25.46282196044922, |
|
"learning_rate": 8.986986986986987e-06, |
|
"loss": 0.4192, |
|
"step": 10120 |
|
}, |
|
{ |
|
"epoch": 30.42, |
|
"grad_norm": 17.969945907592773, |
|
"learning_rate": 8.985985985985987e-06, |
|
"loss": 0.4346, |
|
"step": 10130 |
|
}, |
|
{ |
|
"epoch": 30.45, |
|
"grad_norm": 13.142504692077637, |
|
"learning_rate": 8.984984984984986e-06, |
|
"loss": 0.4449, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 30.48, |
|
"grad_norm": 10.24559211730957, |
|
"learning_rate": 8.983983983983985e-06, |
|
"loss": 0.4229, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 30.51, |
|
"grad_norm": 13.709941864013672, |
|
"learning_rate": 8.982982982982985e-06, |
|
"loss": 0.4483, |
|
"step": 10160 |
|
}, |
|
{ |
|
"epoch": 30.54, |
|
"grad_norm": 8.802275657653809, |
|
"learning_rate": 8.981981981981983e-06, |
|
"loss": 0.4159, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 30.57, |
|
"grad_norm": 13.62450122833252, |
|
"learning_rate": 8.980980980980982e-06, |
|
"loss": 0.3761, |
|
"step": 10180 |
|
}, |
|
{ |
|
"epoch": 30.6, |
|
"grad_norm": 8.44536018371582, |
|
"learning_rate": 8.97997997997998e-06, |
|
"loss": 0.4117, |
|
"step": 10190 |
|
}, |
|
{ |
|
"epoch": 30.63, |
|
"grad_norm": 10.537995338439941, |
|
"learning_rate": 8.97897897897898e-06, |
|
"loss": 0.4116, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 30.66, |
|
"grad_norm": 8.89731502532959, |
|
"learning_rate": 8.977977977977978e-06, |
|
"loss": 0.3843, |
|
"step": 10210 |
|
}, |
|
{ |
|
"epoch": 30.69, |
|
"grad_norm": 9.666942596435547, |
|
"learning_rate": 8.976976976976978e-06, |
|
"loss": 0.3763, |
|
"step": 10220 |
|
}, |
|
{ |
|
"epoch": 30.72, |
|
"grad_norm": 7.942086696624756, |
|
"learning_rate": 8.975975975975977e-06, |
|
"loss": 0.4354, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 30.75, |
|
"grad_norm": 11.044801712036133, |
|
"learning_rate": 8.974974974974975e-06, |
|
"loss": 0.4023, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 30.78, |
|
"grad_norm": 20.663305282592773, |
|
"learning_rate": 8.973973973973976e-06, |
|
"loss": 0.402, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 30.81, |
|
"grad_norm": 8.409327507019043, |
|
"learning_rate": 8.972972972972974e-06, |
|
"loss": 0.4478, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 30.84, |
|
"grad_norm": 17.233417510986328, |
|
"learning_rate": 8.971971971971973e-06, |
|
"loss": 0.4025, |
|
"step": 10270 |
|
}, |
|
{ |
|
"epoch": 30.87, |
|
"grad_norm": 11.380728721618652, |
|
"learning_rate": 8.970970970970971e-06, |
|
"loss": 0.3812, |
|
"step": 10280 |
|
}, |
|
{ |
|
"epoch": 30.9, |
|
"grad_norm": 10.061477661132812, |
|
"learning_rate": 8.96996996996997e-06, |
|
"loss": 0.4453, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 30.93, |
|
"grad_norm": 32.968605041503906, |
|
"learning_rate": 8.96896896896897e-06, |
|
"loss": 0.4616, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 30.96, |
|
"grad_norm": 10.942723274230957, |
|
"learning_rate": 8.967967967967969e-06, |
|
"loss": 0.4368, |
|
"step": 10310 |
|
}, |
|
{ |
|
"epoch": 30.99, |
|
"grad_norm": 9.55047607421875, |
|
"learning_rate": 8.966966966966967e-06, |
|
"loss": 0.4034, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 31.0, |
|
"eval_accuracy": 0.9265, |
|
"eval_loss": 0.25430214405059814, |
|
"eval_runtime": 30.1984, |
|
"eval_samples_per_second": 331.144, |
|
"eval_steps_per_second": 1.325, |
|
"step": 10323 |
|
}, |
|
{ |
|
"epoch": 31.02, |
|
"grad_norm": 10.171669006347656, |
|
"learning_rate": 8.965965965965968e-06, |
|
"loss": 0.406, |
|
"step": 10330 |
|
}, |
|
{ |
|
"epoch": 31.05, |
|
"grad_norm": 18.30038833618164, |
|
"learning_rate": 8.964964964964966e-06, |
|
"loss": 0.506, |
|
"step": 10340 |
|
}, |
|
{ |
|
"epoch": 31.08, |
|
"grad_norm": 10.151785850524902, |
|
"learning_rate": 8.963963963963965e-06, |
|
"loss": 0.4268, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 31.11, |
|
"grad_norm": 11.276358604431152, |
|
"learning_rate": 8.962962962962963e-06, |
|
"loss": 0.392, |
|
"step": 10360 |
|
}, |
|
{ |
|
"epoch": 31.14, |
|
"grad_norm": 9.019916534423828, |
|
"learning_rate": 8.961961961961962e-06, |
|
"loss": 0.3692, |
|
"step": 10370 |
|
}, |
|
{ |
|
"epoch": 31.17, |
|
"grad_norm": 13.906360626220703, |
|
"learning_rate": 8.960960960960962e-06, |
|
"loss": 0.3863, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 31.2, |
|
"grad_norm": 7.59787130355835, |
|
"learning_rate": 8.959959959959961e-06, |
|
"loss": 0.441, |
|
"step": 10390 |
|
}, |
|
{ |
|
"epoch": 31.23, |
|
"grad_norm": 7.223018169403076, |
|
"learning_rate": 8.95895895895896e-06, |
|
"loss": 0.4255, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 31.26, |
|
"grad_norm": 8.906543731689453, |
|
"learning_rate": 8.957957957957958e-06, |
|
"loss": 0.4221, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 31.29, |
|
"grad_norm": 18.36624526977539, |
|
"learning_rate": 8.956956956956958e-06, |
|
"loss": 0.397, |
|
"step": 10420 |
|
}, |
|
{ |
|
"epoch": 31.32, |
|
"grad_norm": 37.474876403808594, |
|
"learning_rate": 8.955955955955957e-06, |
|
"loss": 0.4041, |
|
"step": 10430 |
|
}, |
|
{ |
|
"epoch": 31.35, |
|
"grad_norm": 12.822399139404297, |
|
"learning_rate": 8.954954954954956e-06, |
|
"loss": 0.4391, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 31.38, |
|
"grad_norm": 11.556109428405762, |
|
"learning_rate": 8.953953953953954e-06, |
|
"loss": 0.4278, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 31.41, |
|
"grad_norm": 9.80760669708252, |
|
"learning_rate": 8.952952952952953e-06, |
|
"loss": 0.3978, |
|
"step": 10460 |
|
}, |
|
{ |
|
"epoch": 31.44, |
|
"grad_norm": 14.095552444458008, |
|
"learning_rate": 8.951951951951953e-06, |
|
"loss": 0.39, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 31.47, |
|
"grad_norm": 13.767873764038086, |
|
"learning_rate": 8.950950950950952e-06, |
|
"loss": 0.4704, |
|
"step": 10480 |
|
}, |
|
{ |
|
"epoch": 31.5, |
|
"grad_norm": 9.213407516479492, |
|
"learning_rate": 8.94994994994995e-06, |
|
"loss": 0.4067, |
|
"step": 10490 |
|
}, |
|
{ |
|
"epoch": 31.53, |
|
"grad_norm": 8.17852783203125, |
|
"learning_rate": 8.94894894894895e-06, |
|
"loss": 0.4305, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 31.56, |
|
"grad_norm": 11.813791275024414, |
|
"learning_rate": 8.94794794794795e-06, |
|
"loss": 0.4228, |
|
"step": 10510 |
|
}, |
|
{ |
|
"epoch": 31.59, |
|
"grad_norm": 12.407458305358887, |
|
"learning_rate": 8.946946946946948e-06, |
|
"loss": 0.4218, |
|
"step": 10520 |
|
}, |
|
{ |
|
"epoch": 31.62, |
|
"grad_norm": 11.357696533203125, |
|
"learning_rate": 8.945945945945946e-06, |
|
"loss": 0.4716, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 31.65, |
|
"grad_norm": 14.272128105163574, |
|
"learning_rate": 8.944944944944945e-06, |
|
"loss": 0.4015, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 31.68, |
|
"grad_norm": 9.179841995239258, |
|
"learning_rate": 8.943943943943945e-06, |
|
"loss": 0.4237, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 31.71, |
|
"grad_norm": 11.722990036010742, |
|
"learning_rate": 8.942942942942944e-06, |
|
"loss": 0.4525, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 31.74, |
|
"grad_norm": 9.312932968139648, |
|
"learning_rate": 8.941941941941942e-06, |
|
"loss": 0.4601, |
|
"step": 10570 |
|
}, |
|
{ |
|
"epoch": 31.77, |
|
"grad_norm": 11.05932903289795, |
|
"learning_rate": 8.940940940940943e-06, |
|
"loss": 0.4365, |
|
"step": 10580 |
|
}, |
|
{ |
|
"epoch": 31.8, |
|
"grad_norm": 12.362451553344727, |
|
"learning_rate": 8.939939939939941e-06, |
|
"loss": 0.4413, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 31.83, |
|
"grad_norm": 9.756061553955078, |
|
"learning_rate": 8.93893893893894e-06, |
|
"loss": 0.4036, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 31.86, |
|
"grad_norm": 12.465987205505371, |
|
"learning_rate": 8.937937937937939e-06, |
|
"loss": 0.4504, |
|
"step": 10610 |
|
}, |
|
{ |
|
"epoch": 31.89, |
|
"grad_norm": 10.340137481689453, |
|
"learning_rate": 8.936936936936937e-06, |
|
"loss": 0.4414, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 31.92, |
|
"grad_norm": 7.671935558319092, |
|
"learning_rate": 8.935935935935937e-06, |
|
"loss": 0.4278, |
|
"step": 10630 |
|
}, |
|
{ |
|
"epoch": 31.95, |
|
"grad_norm": 7.171747207641602, |
|
"learning_rate": 8.934934934934936e-06, |
|
"loss": 0.4192, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 31.98, |
|
"grad_norm": 14.753591537475586, |
|
"learning_rate": 8.933933933933935e-06, |
|
"loss": 0.4203, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"eval_accuracy": 0.9286, |
|
"eval_loss": 0.2586733400821686, |
|
"eval_runtime": 30.4616, |
|
"eval_samples_per_second": 328.282, |
|
"eval_steps_per_second": 1.313, |
|
"step": 10656 |
|
}, |
|
{ |
|
"epoch": 32.01, |
|
"grad_norm": 9.285674095153809, |
|
"learning_rate": 8.932932932932933e-06, |
|
"loss": 0.3513, |
|
"step": 10660 |
|
}, |
|
{ |
|
"epoch": 32.04, |
|
"grad_norm": 13.126435279846191, |
|
"learning_rate": 8.931931931931933e-06, |
|
"loss": 0.4091, |
|
"step": 10670 |
|
}, |
|
{ |
|
"epoch": 32.07, |
|
"grad_norm": 14.447212219238281, |
|
"learning_rate": 8.93093093093093e-06, |
|
"loss": 0.4669, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 32.1, |
|
"grad_norm": 8.851954460144043, |
|
"learning_rate": 8.92992992992993e-06, |
|
"loss": 0.4089, |
|
"step": 10690 |
|
}, |
|
{ |
|
"epoch": 32.13, |
|
"grad_norm": 9.75550651550293, |
|
"learning_rate": 8.92892892892893e-06, |
|
"loss": 0.4309, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 32.16, |
|
"grad_norm": 8.048290252685547, |
|
"learning_rate": 8.927927927927928e-06, |
|
"loss": 0.4512, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 32.19, |
|
"grad_norm": 7.996254920959473, |
|
"learning_rate": 8.926926926926928e-06, |
|
"loss": 0.4154, |
|
"step": 10720 |
|
}, |
|
{ |
|
"epoch": 32.22, |
|
"grad_norm": 11.290484428405762, |
|
"learning_rate": 8.925925925925927e-06, |
|
"loss": 0.3833, |
|
"step": 10730 |
|
}, |
|
{ |
|
"epoch": 32.25, |
|
"grad_norm": 12.008049964904785, |
|
"learning_rate": 8.924924924924925e-06, |
|
"loss": 0.4744, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 32.28, |
|
"grad_norm": 9.068510055541992, |
|
"learning_rate": 8.923923923923926e-06, |
|
"loss": 0.4121, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 32.31, |
|
"grad_norm": 18.756370544433594, |
|
"learning_rate": 8.922922922922924e-06, |
|
"loss": 0.3766, |
|
"step": 10760 |
|
}, |
|
{ |
|
"epoch": 32.34, |
|
"grad_norm": 12.292619705200195, |
|
"learning_rate": 8.921921921921923e-06, |
|
"loss": 0.3634, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 32.37, |
|
"grad_norm": 8.977395057678223, |
|
"learning_rate": 8.920920920920921e-06, |
|
"loss": 0.4379, |
|
"step": 10780 |
|
}, |
|
{ |
|
"epoch": 32.4, |
|
"grad_norm": 13.57693862915039, |
|
"learning_rate": 8.91991991991992e-06, |
|
"loss": 0.4078, |
|
"step": 10790 |
|
}, |
|
{ |
|
"epoch": 32.43, |
|
"grad_norm": 11.201655387878418, |
|
"learning_rate": 8.91891891891892e-06, |
|
"loss": 0.4462, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 32.46, |
|
"grad_norm": 14.462066650390625, |
|
"learning_rate": 8.917917917917919e-06, |
|
"loss": 0.4056, |
|
"step": 10810 |
|
}, |
|
{ |
|
"epoch": 32.49, |
|
"grad_norm": 10.209726333618164, |
|
"learning_rate": 8.916916916916917e-06, |
|
"loss": 0.3494, |
|
"step": 10820 |
|
}, |
|
{ |
|
"epoch": 32.52, |
|
"grad_norm": 12.745504379272461, |
|
"learning_rate": 8.915915915915918e-06, |
|
"loss": 0.4028, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 32.55, |
|
"grad_norm": 7.859507083892822, |
|
"learning_rate": 8.914914914914916e-06, |
|
"loss": 0.4311, |
|
"step": 10840 |
|
}, |
|
{ |
|
"epoch": 32.58, |
|
"grad_norm": 7.138404369354248, |
|
"learning_rate": 8.913913913913915e-06, |
|
"loss": 0.4027, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 32.61, |
|
"grad_norm": 6.884012222290039, |
|
"learning_rate": 8.912912912912914e-06, |
|
"loss": 0.4139, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 32.64, |
|
"grad_norm": 9.922371864318848, |
|
"learning_rate": 8.911911911911912e-06, |
|
"loss": 0.4052, |
|
"step": 10870 |
|
}, |
|
{ |
|
"epoch": 32.67, |
|
"grad_norm": 9.349055290222168, |
|
"learning_rate": 8.91091091091091e-06, |
|
"loss": 0.3701, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 32.7, |
|
"grad_norm": 11.731735229492188, |
|
"learning_rate": 8.909909909909911e-06, |
|
"loss": 0.405, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 32.73, |
|
"grad_norm": 9.601366996765137, |
|
"learning_rate": 8.90890890890891e-06, |
|
"loss": 0.4134, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 32.76, |
|
"grad_norm": 9.94797134399414, |
|
"learning_rate": 8.907907907907908e-06, |
|
"loss": 0.4073, |
|
"step": 10910 |
|
}, |
|
{ |
|
"epoch": 32.79, |
|
"grad_norm": 10.514374732971191, |
|
"learning_rate": 8.906906906906909e-06, |
|
"loss": 0.4244, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 32.82, |
|
"grad_norm": 8.28952407836914, |
|
"learning_rate": 8.905905905905905e-06, |
|
"loss": 0.396, |
|
"step": 10930 |
|
}, |
|
{ |
|
"epoch": 32.85, |
|
"grad_norm": 8.094382286071777, |
|
"learning_rate": 8.904904904904906e-06, |
|
"loss": 0.3911, |
|
"step": 10940 |
|
}, |
|
{ |
|
"epoch": 32.88, |
|
"grad_norm": 10.968464851379395, |
|
"learning_rate": 8.903903903903904e-06, |
|
"loss": 0.4026, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 32.91, |
|
"grad_norm": 9.243600845336914, |
|
"learning_rate": 8.902902902902903e-06, |
|
"loss": 0.4211, |
|
"step": 10960 |
|
}, |
|
{ |
|
"epoch": 32.94, |
|
"grad_norm": 17.827457427978516, |
|
"learning_rate": 8.901901901901903e-06, |
|
"loss": 0.4502, |
|
"step": 10970 |
|
}, |
|
{ |
|
"epoch": 32.97, |
|
"grad_norm": 8.173558235168457, |
|
"learning_rate": 8.900900900900902e-06, |
|
"loss": 0.3942, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 33.0, |
|
"eval_accuracy": 0.927, |
|
"eval_loss": 0.2555399537086487, |
|
"eval_runtime": 30.1335, |
|
"eval_samples_per_second": 331.857, |
|
"eval_steps_per_second": 1.327, |
|
"step": 10989 |
|
}, |
|
{ |
|
"epoch": 33.0, |
|
"grad_norm": 10.555376052856445, |
|
"learning_rate": 8.8998998998999e-06, |
|
"loss": 0.3512, |
|
"step": 10990 |
|
}, |
|
{ |
|
"epoch": 33.03, |
|
"grad_norm": 12.584593772888184, |
|
"learning_rate": 8.8988988988989e-06, |
|
"loss": 0.4775, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 33.06, |
|
"grad_norm": 19.95534896850586, |
|
"learning_rate": 8.8978978978979e-06, |
|
"loss": 0.4426, |
|
"step": 11010 |
|
}, |
|
{ |
|
"epoch": 33.09, |
|
"grad_norm": 9.170835494995117, |
|
"learning_rate": 8.896896896896898e-06, |
|
"loss": 0.3763, |
|
"step": 11020 |
|
}, |
|
{ |
|
"epoch": 33.12, |
|
"grad_norm": 9.26423168182373, |
|
"learning_rate": 8.895895895895896e-06, |
|
"loss": 0.4534, |
|
"step": 11030 |
|
}, |
|
{ |
|
"epoch": 33.15, |
|
"grad_norm": 43.479000091552734, |
|
"learning_rate": 8.894894894894895e-06, |
|
"loss": 0.4026, |
|
"step": 11040 |
|
}, |
|
{ |
|
"epoch": 33.18, |
|
"grad_norm": 18.981931686401367, |
|
"learning_rate": 8.893893893893895e-06, |
|
"loss": 0.3983, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 33.21, |
|
"grad_norm": 10.87066650390625, |
|
"learning_rate": 8.892892892892894e-06, |
|
"loss": 0.5128, |
|
"step": 11060 |
|
}, |
|
{ |
|
"epoch": 33.24, |
|
"grad_norm": 15.586153984069824, |
|
"learning_rate": 8.891891891891893e-06, |
|
"loss": 0.4168, |
|
"step": 11070 |
|
}, |
|
{ |
|
"epoch": 33.27, |
|
"grad_norm": 10.101288795471191, |
|
"learning_rate": 8.890890890890893e-06, |
|
"loss": 0.3545, |
|
"step": 11080 |
|
}, |
|
{ |
|
"epoch": 33.3, |
|
"grad_norm": 13.756767272949219, |
|
"learning_rate": 8.889889889889891e-06, |
|
"loss": 0.4007, |
|
"step": 11090 |
|
}, |
|
{ |
|
"epoch": 33.33, |
|
"grad_norm": 9.537035942077637, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.4091, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 33.36, |
|
"grad_norm": 14.72399616241455, |
|
"learning_rate": 8.887887887887889e-06, |
|
"loss": 0.4212, |
|
"step": 11110 |
|
}, |
|
{ |
|
"epoch": 33.39, |
|
"grad_norm": 9.135162353515625, |
|
"learning_rate": 8.886886886886887e-06, |
|
"loss": 0.3875, |
|
"step": 11120 |
|
}, |
|
{ |
|
"epoch": 33.42, |
|
"grad_norm": 68.5506820678711, |
|
"learning_rate": 8.885885885885886e-06, |
|
"loss": 0.4631, |
|
"step": 11130 |
|
}, |
|
{ |
|
"epoch": 33.45, |
|
"grad_norm": 8.094832420349121, |
|
"learning_rate": 8.884884884884886e-06, |
|
"loss": 0.4356, |
|
"step": 11140 |
|
}, |
|
{ |
|
"epoch": 33.48, |
|
"grad_norm": 39.31940460205078, |
|
"learning_rate": 8.883883883883885e-06, |
|
"loss": 0.3732, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 33.51, |
|
"grad_norm": 8.211509704589844, |
|
"learning_rate": 8.882882882882883e-06, |
|
"loss": 0.4348, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 33.54, |
|
"grad_norm": 14.594659805297852, |
|
"learning_rate": 8.881881881881884e-06, |
|
"loss": 0.3853, |
|
"step": 11170 |
|
}, |
|
{ |
|
"epoch": 33.57, |
|
"grad_norm": 10.51009750366211, |
|
"learning_rate": 8.88088088088088e-06, |
|
"loss": 0.4139, |
|
"step": 11180 |
|
}, |
|
{ |
|
"epoch": 33.6, |
|
"grad_norm": 8.209303855895996, |
|
"learning_rate": 8.87987987987988e-06, |
|
"loss": 0.3847, |
|
"step": 11190 |
|
}, |
|
{ |
|
"epoch": 33.63, |
|
"grad_norm": 7.231290817260742, |
|
"learning_rate": 8.87887887887888e-06, |
|
"loss": 0.3514, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 33.66, |
|
"grad_norm": 7.874517917633057, |
|
"learning_rate": 8.877877877877878e-06, |
|
"loss": 0.4409, |
|
"step": 11210 |
|
}, |
|
{ |
|
"epoch": 33.69, |
|
"grad_norm": 12.88748836517334, |
|
"learning_rate": 8.876876876876878e-06, |
|
"loss": 0.4702, |
|
"step": 11220 |
|
}, |
|
{ |
|
"epoch": 33.72, |
|
"grad_norm": 7.494571208953857, |
|
"learning_rate": 8.875875875875877e-06, |
|
"loss": 0.3996, |
|
"step": 11230 |
|
}, |
|
{ |
|
"epoch": 33.75, |
|
"grad_norm": 9.620100975036621, |
|
"learning_rate": 8.874874874874875e-06, |
|
"loss": 0.4272, |
|
"step": 11240 |
|
}, |
|
{ |
|
"epoch": 33.78, |
|
"grad_norm": 15.190299034118652, |
|
"learning_rate": 8.873873873873876e-06, |
|
"loss": 0.4536, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 33.81, |
|
"grad_norm": 31.521875381469727, |
|
"learning_rate": 8.872872872872874e-06, |
|
"loss": 0.418, |
|
"step": 11260 |
|
}, |
|
{ |
|
"epoch": 33.84, |
|
"grad_norm": 10.580955505371094, |
|
"learning_rate": 8.871871871871873e-06, |
|
"loss": 0.4782, |
|
"step": 11270 |
|
}, |
|
{ |
|
"epoch": 33.87, |
|
"grad_norm": 10.014829635620117, |
|
"learning_rate": 8.870870870870871e-06, |
|
"loss": 0.3915, |
|
"step": 11280 |
|
}, |
|
{ |
|
"epoch": 33.9, |
|
"grad_norm": 14.541464805603027, |
|
"learning_rate": 8.86986986986987e-06, |
|
"loss": 0.385, |
|
"step": 11290 |
|
}, |
|
{ |
|
"epoch": 33.93, |
|
"grad_norm": 8.281698226928711, |
|
"learning_rate": 8.86886886886887e-06, |
|
"loss": 0.4109, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 33.96, |
|
"grad_norm": 15.637141227722168, |
|
"learning_rate": 8.867867867867869e-06, |
|
"loss": 0.4507, |
|
"step": 11310 |
|
}, |
|
{ |
|
"epoch": 33.99, |
|
"grad_norm": 12.205958366394043, |
|
"learning_rate": 8.866866866866868e-06, |
|
"loss": 0.3991, |
|
"step": 11320 |
|
}, |
|
{ |
|
"epoch": 34.0, |
|
"eval_accuracy": 0.9271, |
|
"eval_loss": 0.25638455152511597, |
|
"eval_runtime": 30.4793, |
|
"eval_samples_per_second": 328.091, |
|
"eval_steps_per_second": 1.312, |
|
"step": 11322 |
|
}, |
|
{ |
|
"epoch": 34.02, |
|
"grad_norm": 19.83987045288086, |
|
"learning_rate": 8.865865865865866e-06, |
|
"loss": 0.4396, |
|
"step": 11330 |
|
}, |
|
{ |
|
"epoch": 34.05, |
|
"grad_norm": 15.217132568359375, |
|
"learning_rate": 8.864864864864866e-06, |
|
"loss": 0.4039, |
|
"step": 11340 |
|
}, |
|
{ |
|
"epoch": 34.08, |
|
"grad_norm": 11.251469612121582, |
|
"learning_rate": 8.863863863863863e-06, |
|
"loss": 0.4203, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 34.11, |
|
"grad_norm": 18.08286476135254, |
|
"learning_rate": 8.862862862862864e-06, |
|
"loss": 0.3677, |
|
"step": 11360 |
|
}, |
|
{ |
|
"epoch": 34.14, |
|
"grad_norm": 10.37747859954834, |
|
"learning_rate": 8.861861861861862e-06, |
|
"loss": 0.3824, |
|
"step": 11370 |
|
}, |
|
{ |
|
"epoch": 34.17, |
|
"grad_norm": 8.164061546325684, |
|
"learning_rate": 8.86086086086086e-06, |
|
"loss": 0.3733, |
|
"step": 11380 |
|
}, |
|
{ |
|
"epoch": 34.2, |
|
"grad_norm": 10.746585845947266, |
|
"learning_rate": 8.859859859859861e-06, |
|
"loss": 0.4573, |
|
"step": 11390 |
|
}, |
|
{ |
|
"epoch": 34.23, |
|
"grad_norm": 39.38847732543945, |
|
"learning_rate": 8.85885885885886e-06, |
|
"loss": 0.439, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 34.26, |
|
"grad_norm": 13.934308052062988, |
|
"learning_rate": 8.857857857857858e-06, |
|
"loss": 0.4049, |
|
"step": 11410 |
|
}, |
|
{ |
|
"epoch": 34.29, |
|
"grad_norm": 16.80377960205078, |
|
"learning_rate": 8.856856856856859e-06, |
|
"loss": 0.4029, |
|
"step": 11420 |
|
}, |
|
{ |
|
"epoch": 34.32, |
|
"grad_norm": 17.65918731689453, |
|
"learning_rate": 8.855855855855855e-06, |
|
"loss": 0.3925, |
|
"step": 11430 |
|
}, |
|
{ |
|
"epoch": 34.35, |
|
"grad_norm": 8.79592227935791, |
|
"learning_rate": 8.854854854854856e-06, |
|
"loss": 0.3893, |
|
"step": 11440 |
|
}, |
|
{ |
|
"epoch": 34.38, |
|
"grad_norm": 9.0423002243042, |
|
"learning_rate": 8.853853853853854e-06, |
|
"loss": 0.4194, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 34.41, |
|
"grad_norm": 9.881715774536133, |
|
"learning_rate": 8.852852852852853e-06, |
|
"loss": 0.4095, |
|
"step": 11460 |
|
}, |
|
{ |
|
"epoch": 34.44, |
|
"grad_norm": 10.891952514648438, |
|
"learning_rate": 8.851851851851853e-06, |
|
"loss": 0.4182, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 34.47, |
|
"grad_norm": 5.583596706390381, |
|
"learning_rate": 8.850850850850852e-06, |
|
"loss": 0.4154, |
|
"step": 11480 |
|
}, |
|
{ |
|
"epoch": 34.5, |
|
"grad_norm": 8.60441780090332, |
|
"learning_rate": 8.84984984984985e-06, |
|
"loss": 0.3467, |
|
"step": 11490 |
|
}, |
|
{ |
|
"epoch": 34.53, |
|
"grad_norm": 9.848461151123047, |
|
"learning_rate": 8.84884884884885e-06, |
|
"loss": 0.363, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 34.56, |
|
"grad_norm": 22.63396453857422, |
|
"learning_rate": 8.84784784784785e-06, |
|
"loss": 0.4687, |
|
"step": 11510 |
|
}, |
|
{ |
|
"epoch": 34.59, |
|
"grad_norm": 17.529346466064453, |
|
"learning_rate": 8.846846846846848e-06, |
|
"loss": 0.4313, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 34.62, |
|
"grad_norm": 14.798726081848145, |
|
"learning_rate": 8.845845845845847e-06, |
|
"loss": 0.4441, |
|
"step": 11530 |
|
}, |
|
{ |
|
"epoch": 34.65, |
|
"grad_norm": 7.219150543212891, |
|
"learning_rate": 8.844844844844845e-06, |
|
"loss": 0.3626, |
|
"step": 11540 |
|
}, |
|
{ |
|
"epoch": 34.68, |
|
"grad_norm": 7.736437797546387, |
|
"learning_rate": 8.843843843843844e-06, |
|
"loss": 0.3977, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 34.71, |
|
"grad_norm": 7.897708892822266, |
|
"learning_rate": 8.842842842842844e-06, |
|
"loss": 0.4214, |
|
"step": 11560 |
|
}, |
|
{ |
|
"epoch": 34.74, |
|
"grad_norm": 10.178305625915527, |
|
"learning_rate": 8.841841841841843e-06, |
|
"loss": 0.4245, |
|
"step": 11570 |
|
}, |
|
{ |
|
"epoch": 34.77, |
|
"grad_norm": 12.364108085632324, |
|
"learning_rate": 8.840840840840841e-06, |
|
"loss": 0.4341, |
|
"step": 11580 |
|
}, |
|
{ |
|
"epoch": 34.8, |
|
"grad_norm": 11.002373695373535, |
|
"learning_rate": 8.839839839839841e-06, |
|
"loss": 0.4514, |
|
"step": 11590 |
|
}, |
|
{ |
|
"epoch": 34.83, |
|
"grad_norm": 9.911009788513184, |
|
"learning_rate": 8.838838838838838e-06, |
|
"loss": 0.4079, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 34.86, |
|
"grad_norm": 9.020291328430176, |
|
"learning_rate": 8.837837837837839e-06, |
|
"loss": 0.3755, |
|
"step": 11610 |
|
}, |
|
{ |
|
"epoch": 34.89, |
|
"grad_norm": 7.697541236877441, |
|
"learning_rate": 8.836836836836837e-06, |
|
"loss": 0.38, |
|
"step": 11620 |
|
}, |
|
{ |
|
"epoch": 34.92, |
|
"grad_norm": 9.926669120788574, |
|
"learning_rate": 8.835835835835836e-06, |
|
"loss": 0.4466, |
|
"step": 11630 |
|
}, |
|
{ |
|
"epoch": 34.95, |
|
"grad_norm": 8.414145469665527, |
|
"learning_rate": 8.834834834834836e-06, |
|
"loss": 0.4046, |
|
"step": 11640 |
|
}, |
|
{ |
|
"epoch": 34.98, |
|
"grad_norm": 12.950366020202637, |
|
"learning_rate": 8.833833833833835e-06, |
|
"loss": 0.4252, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 35.0, |
|
"eval_accuracy": 0.925, |
|
"eval_loss": 0.2602948546409607, |
|
"eval_runtime": 30.4318, |
|
"eval_samples_per_second": 328.603, |
|
"eval_steps_per_second": 1.314, |
|
"step": 11655 |
|
}, |
|
{ |
|
"epoch": 35.02, |
|
"grad_norm": 8.139337539672852, |
|
"learning_rate": 8.832832832832833e-06, |
|
"loss": 0.3753, |
|
"step": 11660 |
|
}, |
|
{ |
|
"epoch": 35.05, |
|
"grad_norm": 19.609962463378906, |
|
"learning_rate": 8.831831831831834e-06, |
|
"loss": 0.3776, |
|
"step": 11670 |
|
}, |
|
{ |
|
"epoch": 35.08, |
|
"grad_norm": 13.31221866607666, |
|
"learning_rate": 8.83083083083083e-06, |
|
"loss": 0.4545, |
|
"step": 11680 |
|
}, |
|
{ |
|
"epoch": 35.11, |
|
"grad_norm": 6.971845626831055, |
|
"learning_rate": 8.82982982982983e-06, |
|
"loss": 0.4126, |
|
"step": 11690 |
|
}, |
|
{ |
|
"epoch": 35.14, |
|
"grad_norm": 8.801642417907715, |
|
"learning_rate": 8.82882882882883e-06, |
|
"loss": 0.4099, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 35.17, |
|
"grad_norm": 11.066483497619629, |
|
"learning_rate": 8.827827827827828e-06, |
|
"loss": 0.434, |
|
"step": 11710 |
|
}, |
|
{ |
|
"epoch": 35.2, |
|
"grad_norm": 11.330470085144043, |
|
"learning_rate": 8.826826826826828e-06, |
|
"loss": 0.4099, |
|
"step": 11720 |
|
}, |
|
{ |
|
"epoch": 35.23, |
|
"grad_norm": 15.0160493850708, |
|
"learning_rate": 8.825825825825827e-06, |
|
"loss": 0.411, |
|
"step": 11730 |
|
}, |
|
{ |
|
"epoch": 35.26, |
|
"grad_norm": 8.729792594909668, |
|
"learning_rate": 8.824824824824825e-06, |
|
"loss": 0.4198, |
|
"step": 11740 |
|
}, |
|
{ |
|
"epoch": 35.29, |
|
"grad_norm": 16.530698776245117, |
|
"learning_rate": 8.823823823823826e-06, |
|
"loss": 0.4032, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 35.32, |
|
"grad_norm": 11.7534761428833, |
|
"learning_rate": 8.822822822822824e-06, |
|
"loss": 0.4211, |
|
"step": 11760 |
|
}, |
|
{ |
|
"epoch": 35.35, |
|
"grad_norm": 13.237783432006836, |
|
"learning_rate": 8.821821821821823e-06, |
|
"loss": 0.4198, |
|
"step": 11770 |
|
}, |
|
{ |
|
"epoch": 35.38, |
|
"grad_norm": 15.49626636505127, |
|
"learning_rate": 8.820820820820822e-06, |
|
"loss": 0.4464, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 35.41, |
|
"grad_norm": 18.350927352905273, |
|
"learning_rate": 8.81981981981982e-06, |
|
"loss": 0.4756, |
|
"step": 11790 |
|
}, |
|
{ |
|
"epoch": 35.44, |
|
"grad_norm": 10.492507934570312, |
|
"learning_rate": 8.818818818818819e-06, |
|
"loss": 0.427, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 35.47, |
|
"grad_norm": 9.446090698242188, |
|
"learning_rate": 8.817817817817819e-06, |
|
"loss": 0.4392, |
|
"step": 11810 |
|
}, |
|
{ |
|
"epoch": 35.5, |
|
"grad_norm": 8.739493370056152, |
|
"learning_rate": 8.816816816816818e-06, |
|
"loss": 0.4848, |
|
"step": 11820 |
|
}, |
|
{ |
|
"epoch": 35.53, |
|
"grad_norm": 9.116613388061523, |
|
"learning_rate": 8.815815815815816e-06, |
|
"loss": 0.4331, |
|
"step": 11830 |
|
}, |
|
{ |
|
"epoch": 35.56, |
|
"grad_norm": 14.439440727233887, |
|
"learning_rate": 8.814814814814817e-06, |
|
"loss": 0.4283, |
|
"step": 11840 |
|
}, |
|
{ |
|
"epoch": 35.59, |
|
"grad_norm": 11.005097389221191, |
|
"learning_rate": 8.813813813813813e-06, |
|
"loss": 0.4383, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 35.62, |
|
"grad_norm": 8.86245346069336, |
|
"learning_rate": 8.812812812812814e-06, |
|
"loss": 0.3952, |
|
"step": 11860 |
|
}, |
|
{ |
|
"epoch": 35.65, |
|
"grad_norm": 10.16314697265625, |
|
"learning_rate": 8.811811811811812e-06, |
|
"loss": 0.4468, |
|
"step": 11870 |
|
}, |
|
{ |
|
"epoch": 35.68, |
|
"grad_norm": 15.824507713317871, |
|
"learning_rate": 8.810810810810811e-06, |
|
"loss": 0.411, |
|
"step": 11880 |
|
}, |
|
{ |
|
"epoch": 35.71, |
|
"grad_norm": 13.30593490600586, |
|
"learning_rate": 8.809809809809811e-06, |
|
"loss": 0.4557, |
|
"step": 11890 |
|
}, |
|
{ |
|
"epoch": 35.74, |
|
"grad_norm": 8.765216827392578, |
|
"learning_rate": 8.80880880880881e-06, |
|
"loss": 0.4089, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 35.77, |
|
"grad_norm": 8.380579948425293, |
|
"learning_rate": 8.807807807807808e-06, |
|
"loss": 0.406, |
|
"step": 11910 |
|
}, |
|
{ |
|
"epoch": 35.8, |
|
"grad_norm": 17.145652770996094, |
|
"learning_rate": 8.806806806806809e-06, |
|
"loss": 0.4429, |
|
"step": 11920 |
|
}, |
|
{ |
|
"epoch": 35.83, |
|
"grad_norm": 9.908981323242188, |
|
"learning_rate": 8.805805805805806e-06, |
|
"loss": 0.3746, |
|
"step": 11930 |
|
}, |
|
{ |
|
"epoch": 35.86, |
|
"grad_norm": 29.383146286010742, |
|
"learning_rate": 8.804804804804806e-06, |
|
"loss": 0.4198, |
|
"step": 11940 |
|
}, |
|
{ |
|
"epoch": 35.89, |
|
"grad_norm": 12.33173942565918, |
|
"learning_rate": 8.803803803803804e-06, |
|
"loss": 0.4129, |
|
"step": 11950 |
|
}, |
|
{ |
|
"epoch": 35.92, |
|
"grad_norm": 15.631083488464355, |
|
"learning_rate": 8.802802802802803e-06, |
|
"loss": 0.4468, |
|
"step": 11960 |
|
}, |
|
{ |
|
"epoch": 35.95, |
|
"grad_norm": 9.646747589111328, |
|
"learning_rate": 8.801801801801803e-06, |
|
"loss": 0.3624, |
|
"step": 11970 |
|
}, |
|
{ |
|
"epoch": 35.98, |
|
"grad_norm": 11.596949577331543, |
|
"learning_rate": 8.800800800800802e-06, |
|
"loss": 0.4393, |
|
"step": 11980 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"eval_accuracy": 0.9288, |
|
"eval_loss": 0.25744786858558655, |
|
"eval_runtime": 30.2289, |
|
"eval_samples_per_second": 330.809, |
|
"eval_steps_per_second": 1.323, |
|
"step": 11988 |
|
}, |
|
{ |
|
"epoch": 36.01, |
|
"grad_norm": 15.940702438354492, |
|
"learning_rate": 8.7997997997998e-06, |
|
"loss": 0.3525, |
|
"step": 11990 |
|
}, |
|
{ |
|
"epoch": 36.04, |
|
"grad_norm": 18.273988723754883, |
|
"learning_rate": 8.798798798798799e-06, |
|
"loss": 0.3523, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 36.07, |
|
"grad_norm": 11.517987251281738, |
|
"learning_rate": 8.797797797797798e-06, |
|
"loss": 0.4488, |
|
"step": 12010 |
|
}, |
|
{ |
|
"epoch": 36.1, |
|
"grad_norm": 9.518025398254395, |
|
"learning_rate": 8.796796796796796e-06, |
|
"loss": 0.3793, |
|
"step": 12020 |
|
}, |
|
{ |
|
"epoch": 36.13, |
|
"grad_norm": 13.380279541015625, |
|
"learning_rate": 8.795795795795797e-06, |
|
"loss": 0.4446, |
|
"step": 12030 |
|
}, |
|
{ |
|
"epoch": 36.16, |
|
"grad_norm": 8.703398704528809, |
|
"learning_rate": 8.794794794794795e-06, |
|
"loss": 0.4429, |
|
"step": 12040 |
|
}, |
|
{ |
|
"epoch": 36.19, |
|
"grad_norm": 7.529371738433838, |
|
"learning_rate": 8.793793793793794e-06, |
|
"loss": 0.3442, |
|
"step": 12050 |
|
}, |
|
{ |
|
"epoch": 36.22, |
|
"grad_norm": 6.543487548828125, |
|
"learning_rate": 8.792792792792794e-06, |
|
"loss": 0.416, |
|
"step": 12060 |
|
}, |
|
{ |
|
"epoch": 36.25, |
|
"grad_norm": 10.498993873596191, |
|
"learning_rate": 8.791791791791793e-06, |
|
"loss": 0.3902, |
|
"step": 12070 |
|
}, |
|
{ |
|
"epoch": 36.28, |
|
"grad_norm": 9.740803718566895, |
|
"learning_rate": 8.790790790790791e-06, |
|
"loss": 0.4194, |
|
"step": 12080 |
|
}, |
|
{ |
|
"epoch": 36.31, |
|
"grad_norm": 9.331558227539062, |
|
"learning_rate": 8.789789789789792e-06, |
|
"loss": 0.3863, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 36.34, |
|
"grad_norm": 11.235783576965332, |
|
"learning_rate": 8.788788788788788e-06, |
|
"loss": 0.3495, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 36.37, |
|
"grad_norm": 10.817569732666016, |
|
"learning_rate": 8.787787787787789e-06, |
|
"loss": 0.3825, |
|
"step": 12110 |
|
}, |
|
{ |
|
"epoch": 36.4, |
|
"grad_norm": 6.002957820892334, |
|
"learning_rate": 8.786786786786787e-06, |
|
"loss": 0.4623, |
|
"step": 12120 |
|
}, |
|
{ |
|
"epoch": 36.43, |
|
"grad_norm": 12.816144943237305, |
|
"learning_rate": 8.785785785785786e-06, |
|
"loss": 0.4443, |
|
"step": 12130 |
|
}, |
|
{ |
|
"epoch": 36.46, |
|
"grad_norm": 16.187400817871094, |
|
"learning_rate": 8.784784784784786e-06, |
|
"loss": 0.3743, |
|
"step": 12140 |
|
}, |
|
{ |
|
"epoch": 36.49, |
|
"grad_norm": 10.328038215637207, |
|
"learning_rate": 8.783783783783785e-06, |
|
"loss": 0.3773, |
|
"step": 12150 |
|
}, |
|
{ |
|
"epoch": 36.52, |
|
"grad_norm": 9.40977954864502, |
|
"learning_rate": 8.782782782782783e-06, |
|
"loss": 0.4189, |
|
"step": 12160 |
|
}, |
|
{ |
|
"epoch": 36.55, |
|
"grad_norm": 14.900984764099121, |
|
"learning_rate": 8.781781781781784e-06, |
|
"loss": 0.4194, |
|
"step": 12170 |
|
}, |
|
{ |
|
"epoch": 36.58, |
|
"grad_norm": 8.397542953491211, |
|
"learning_rate": 8.78078078078078e-06, |
|
"loss": 0.3905, |
|
"step": 12180 |
|
}, |
|
{ |
|
"epoch": 36.61, |
|
"grad_norm": 13.33767318725586, |
|
"learning_rate": 8.779779779779781e-06, |
|
"loss": 0.4135, |
|
"step": 12190 |
|
}, |
|
{ |
|
"epoch": 36.64, |
|
"grad_norm": 20.183990478515625, |
|
"learning_rate": 8.77877877877878e-06, |
|
"loss": 0.3927, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 36.67, |
|
"grad_norm": 9.835906982421875, |
|
"learning_rate": 8.777777777777778e-06, |
|
"loss": 0.4258, |
|
"step": 12210 |
|
}, |
|
{ |
|
"epoch": 36.7, |
|
"grad_norm": 8.524311065673828, |
|
"learning_rate": 8.776776776776778e-06, |
|
"loss": 0.3815, |
|
"step": 12220 |
|
}, |
|
{ |
|
"epoch": 36.73, |
|
"grad_norm": 7.198403835296631, |
|
"learning_rate": 8.775775775775777e-06, |
|
"loss": 0.3816, |
|
"step": 12230 |
|
}, |
|
{ |
|
"epoch": 36.76, |
|
"grad_norm": 13.140053749084473, |
|
"learning_rate": 8.774774774774776e-06, |
|
"loss": 0.3481, |
|
"step": 12240 |
|
}, |
|
{ |
|
"epoch": 36.79, |
|
"grad_norm": 13.378637313842773, |
|
"learning_rate": 8.773773773773774e-06, |
|
"loss": 0.4309, |
|
"step": 12250 |
|
}, |
|
{ |
|
"epoch": 36.82, |
|
"grad_norm": 13.015869140625, |
|
"learning_rate": 8.772772772772773e-06, |
|
"loss": 0.4183, |
|
"step": 12260 |
|
}, |
|
{ |
|
"epoch": 36.85, |
|
"grad_norm": 9.255596160888672, |
|
"learning_rate": 8.771771771771771e-06, |
|
"loss": 0.384, |
|
"step": 12270 |
|
}, |
|
{ |
|
"epoch": 36.88, |
|
"grad_norm": 21.15357780456543, |
|
"learning_rate": 8.770770770770772e-06, |
|
"loss": 0.4241, |
|
"step": 12280 |
|
}, |
|
{ |
|
"epoch": 36.91, |
|
"grad_norm": 41.609375, |
|
"learning_rate": 8.76976976976977e-06, |
|
"loss": 0.4053, |
|
"step": 12290 |
|
}, |
|
{ |
|
"epoch": 36.94, |
|
"grad_norm": 10.68895435333252, |
|
"learning_rate": 8.768768768768769e-06, |
|
"loss": 0.3459, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 36.97, |
|
"grad_norm": 8.08588981628418, |
|
"learning_rate": 8.767767767767769e-06, |
|
"loss": 0.4098, |
|
"step": 12310 |
|
}, |
|
{ |
|
"epoch": 37.0, |
|
"grad_norm": 10.934639930725098, |
|
"learning_rate": 8.766766766766768e-06, |
|
"loss": 0.3949, |
|
"step": 12320 |
|
}, |
|
{ |
|
"epoch": 37.0, |
|
"eval_accuracy": 0.9279, |
|
"eval_loss": 0.26377877593040466, |
|
"eval_runtime": 30.2658, |
|
"eval_samples_per_second": 330.406, |
|
"eval_steps_per_second": 1.322, |
|
"step": 12321 |
|
}, |
|
{ |
|
"epoch": 37.03, |
|
"grad_norm": 10.90078067779541, |
|
"learning_rate": 8.765765765765766e-06, |
|
"loss": 0.3728, |
|
"step": 12330 |
|
}, |
|
{ |
|
"epoch": 37.06, |
|
"grad_norm": 11.749211311340332, |
|
"learning_rate": 8.764764764764767e-06, |
|
"loss": 0.4014, |
|
"step": 12340 |
|
}, |
|
{ |
|
"epoch": 37.09, |
|
"grad_norm": 23.710084915161133, |
|
"learning_rate": 8.763763763763763e-06, |
|
"loss": 0.4004, |
|
"step": 12350 |
|
}, |
|
{ |
|
"epoch": 37.12, |
|
"grad_norm": 11.619260787963867, |
|
"learning_rate": 8.762762762762764e-06, |
|
"loss": 0.4439, |
|
"step": 12360 |
|
}, |
|
{ |
|
"epoch": 37.15, |
|
"grad_norm": 10.997756958007812, |
|
"learning_rate": 8.761761761761762e-06, |
|
"loss": 0.3705, |
|
"step": 12370 |
|
}, |
|
{ |
|
"epoch": 37.18, |
|
"grad_norm": 8.640237808227539, |
|
"learning_rate": 8.760760760760761e-06, |
|
"loss": 0.4049, |
|
"step": 12380 |
|
}, |
|
{ |
|
"epoch": 37.21, |
|
"grad_norm": 13.680813789367676, |
|
"learning_rate": 8.759759759759761e-06, |
|
"loss": 0.4087, |
|
"step": 12390 |
|
}, |
|
{ |
|
"epoch": 37.24, |
|
"grad_norm": 14.672085762023926, |
|
"learning_rate": 8.75875875875876e-06, |
|
"loss": 0.4006, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 37.27, |
|
"grad_norm": 8.288801193237305, |
|
"learning_rate": 8.757757757757758e-06, |
|
"loss": 0.4078, |
|
"step": 12410 |
|
}, |
|
{ |
|
"epoch": 37.3, |
|
"grad_norm": 5.074464797973633, |
|
"learning_rate": 8.756756756756759e-06, |
|
"loss": 0.3809, |
|
"step": 12420 |
|
}, |
|
{ |
|
"epoch": 37.33, |
|
"grad_norm": 15.01401138305664, |
|
"learning_rate": 8.755755755755756e-06, |
|
"loss": 0.4182, |
|
"step": 12430 |
|
}, |
|
{ |
|
"epoch": 37.36, |
|
"grad_norm": 8.366719245910645, |
|
"learning_rate": 8.754754754754756e-06, |
|
"loss": 0.4188, |
|
"step": 12440 |
|
}, |
|
{ |
|
"epoch": 37.39, |
|
"grad_norm": 11.154147148132324, |
|
"learning_rate": 8.753753753753755e-06, |
|
"loss": 0.3938, |
|
"step": 12450 |
|
}, |
|
{ |
|
"epoch": 37.42, |
|
"grad_norm": 11.058762550354004, |
|
"learning_rate": 8.752752752752753e-06, |
|
"loss": 0.3139, |
|
"step": 12460 |
|
}, |
|
{ |
|
"epoch": 37.45, |
|
"grad_norm": 15.326699256896973, |
|
"learning_rate": 8.751751751751752e-06, |
|
"loss": 0.4496, |
|
"step": 12470 |
|
}, |
|
{ |
|
"epoch": 37.48, |
|
"grad_norm": 9.941515922546387, |
|
"learning_rate": 8.750750750750752e-06, |
|
"loss": 0.4487, |
|
"step": 12480 |
|
}, |
|
{ |
|
"epoch": 37.51, |
|
"grad_norm": 11.2483491897583, |
|
"learning_rate": 8.74974974974975e-06, |
|
"loss": 0.3616, |
|
"step": 12490 |
|
}, |
|
{ |
|
"epoch": 37.54, |
|
"grad_norm": 11.386582374572754, |
|
"learning_rate": 8.74874874874875e-06, |
|
"loss": 0.3986, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 37.57, |
|
"grad_norm": 7.893359184265137, |
|
"learning_rate": 8.747747747747748e-06, |
|
"loss": 0.3932, |
|
"step": 12510 |
|
}, |
|
{ |
|
"epoch": 37.6, |
|
"grad_norm": 8.38857650756836, |
|
"learning_rate": 8.746746746746746e-06, |
|
"loss": 0.4209, |
|
"step": 12520 |
|
}, |
|
{ |
|
"epoch": 37.63, |
|
"grad_norm": 12.934131622314453, |
|
"learning_rate": 8.745745745745747e-06, |
|
"loss": 0.4506, |
|
"step": 12530 |
|
}, |
|
{ |
|
"epoch": 37.66, |
|
"grad_norm": 11.205546379089355, |
|
"learning_rate": 8.744744744744745e-06, |
|
"loss": 0.4014, |
|
"step": 12540 |
|
}, |
|
{ |
|
"epoch": 37.69, |
|
"grad_norm": 51.77103042602539, |
|
"learning_rate": 8.743743743743744e-06, |
|
"loss": 0.3818, |
|
"step": 12550 |
|
}, |
|
{ |
|
"epoch": 37.72, |
|
"grad_norm": 12.370538711547852, |
|
"learning_rate": 8.742742742742744e-06, |
|
"loss": 0.3402, |
|
"step": 12560 |
|
}, |
|
{ |
|
"epoch": 37.75, |
|
"grad_norm": 8.91227912902832, |
|
"learning_rate": 8.741741741741743e-06, |
|
"loss": 0.3446, |
|
"step": 12570 |
|
}, |
|
{ |
|
"epoch": 37.78, |
|
"grad_norm": 9.033913612365723, |
|
"learning_rate": 8.740740740740741e-06, |
|
"loss": 0.3339, |
|
"step": 12580 |
|
}, |
|
{ |
|
"epoch": 37.81, |
|
"grad_norm": 15.76467514038086, |
|
"learning_rate": 8.739739739739742e-06, |
|
"loss": 0.4333, |
|
"step": 12590 |
|
}, |
|
{ |
|
"epoch": 37.84, |
|
"grad_norm": 12.027488708496094, |
|
"learning_rate": 8.738738738738739e-06, |
|
"loss": 0.353, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 37.87, |
|
"grad_norm": 11.376276016235352, |
|
"learning_rate": 8.737737737737739e-06, |
|
"loss": 0.4121, |
|
"step": 12610 |
|
}, |
|
{ |
|
"epoch": 37.9, |
|
"grad_norm": 9.924150466918945, |
|
"learning_rate": 8.736736736736737e-06, |
|
"loss": 0.3352, |
|
"step": 12620 |
|
}, |
|
{ |
|
"epoch": 37.93, |
|
"grad_norm": 11.061473846435547, |
|
"learning_rate": 8.735735735735736e-06, |
|
"loss": 0.4188, |
|
"step": 12630 |
|
}, |
|
{ |
|
"epoch": 37.96, |
|
"grad_norm": 9.54149341583252, |
|
"learning_rate": 8.734734734734736e-06, |
|
"loss": 0.4167, |
|
"step": 12640 |
|
}, |
|
{ |
|
"epoch": 37.99, |
|
"grad_norm": 10.981230735778809, |
|
"learning_rate": 8.733733733733735e-06, |
|
"loss": 0.4458, |
|
"step": 12650 |
|
}, |
|
{ |
|
"epoch": 38.0, |
|
"eval_accuracy": 0.9281, |
|
"eval_loss": 0.2582415044307709, |
|
"eval_runtime": 30.5044, |
|
"eval_samples_per_second": 327.822, |
|
"eval_steps_per_second": 1.311, |
|
"step": 12654 |
|
}, |
|
{ |
|
"epoch": 38.02, |
|
"grad_norm": 21.35448455810547, |
|
"learning_rate": 8.732732732732733e-06, |
|
"loss": 0.3786, |
|
"step": 12660 |
|
}, |
|
{ |
|
"epoch": 38.05, |
|
"grad_norm": 11.46558952331543, |
|
"learning_rate": 8.731731731731734e-06, |
|
"loss": 0.3863, |
|
"step": 12670 |
|
}, |
|
{ |
|
"epoch": 38.08, |
|
"grad_norm": 10.586058616638184, |
|
"learning_rate": 8.73073073073073e-06, |
|
"loss": 0.3799, |
|
"step": 12680 |
|
}, |
|
{ |
|
"epoch": 38.11, |
|
"grad_norm": 10.60461139678955, |
|
"learning_rate": 8.72972972972973e-06, |
|
"loss": 0.3601, |
|
"step": 12690 |
|
}, |
|
{ |
|
"epoch": 38.14, |
|
"grad_norm": 9.384730339050293, |
|
"learning_rate": 8.72872872872873e-06, |
|
"loss": 0.4018, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 38.17, |
|
"grad_norm": 8.258257865905762, |
|
"learning_rate": 8.727727727727728e-06, |
|
"loss": 0.3671, |
|
"step": 12710 |
|
}, |
|
{ |
|
"epoch": 38.2, |
|
"grad_norm": 8.778036117553711, |
|
"learning_rate": 8.726726726726727e-06, |
|
"loss": 0.4141, |
|
"step": 12720 |
|
}, |
|
{ |
|
"epoch": 38.23, |
|
"grad_norm": 23.05480194091797, |
|
"learning_rate": 8.725725725725727e-06, |
|
"loss": 0.3664, |
|
"step": 12730 |
|
}, |
|
{ |
|
"epoch": 38.26, |
|
"grad_norm": 14.267335891723633, |
|
"learning_rate": 8.724724724724726e-06, |
|
"loss": 0.3887, |
|
"step": 12740 |
|
}, |
|
{ |
|
"epoch": 38.29, |
|
"grad_norm": 10.668434143066406, |
|
"learning_rate": 8.723723723723724e-06, |
|
"loss": 0.4095, |
|
"step": 12750 |
|
}, |
|
{ |
|
"epoch": 38.32, |
|
"grad_norm": 9.362272262573242, |
|
"learning_rate": 8.722722722722723e-06, |
|
"loss": 0.4313, |
|
"step": 12760 |
|
}, |
|
{ |
|
"epoch": 38.35, |
|
"grad_norm": 9.018074989318848, |
|
"learning_rate": 8.721721721721721e-06, |
|
"loss": 0.4027, |
|
"step": 12770 |
|
}, |
|
{ |
|
"epoch": 38.38, |
|
"grad_norm": 6.7046051025390625, |
|
"learning_rate": 8.720720720720722e-06, |
|
"loss": 0.3636, |
|
"step": 12780 |
|
}, |
|
{ |
|
"epoch": 38.41, |
|
"grad_norm": 14.318246841430664, |
|
"learning_rate": 8.71971971971972e-06, |
|
"loss": 0.3956, |
|
"step": 12790 |
|
}, |
|
{ |
|
"epoch": 38.44, |
|
"grad_norm": 14.366408348083496, |
|
"learning_rate": 8.718718718718719e-06, |
|
"loss": 0.412, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 38.47, |
|
"grad_norm": 10.622831344604492, |
|
"learning_rate": 8.71771771771772e-06, |
|
"loss": 0.3799, |
|
"step": 12810 |
|
}, |
|
{ |
|
"epoch": 38.5, |
|
"grad_norm": 7.679351806640625, |
|
"learning_rate": 8.716716716716718e-06, |
|
"loss": 0.406, |
|
"step": 12820 |
|
}, |
|
{ |
|
"epoch": 38.53, |
|
"grad_norm": 10.854029655456543, |
|
"learning_rate": 8.715715715715716e-06, |
|
"loss": 0.3781, |
|
"step": 12830 |
|
}, |
|
{ |
|
"epoch": 38.56, |
|
"grad_norm": 10.242359161376953, |
|
"learning_rate": 8.714714714714717e-06, |
|
"loss": 0.3905, |
|
"step": 12840 |
|
}, |
|
{ |
|
"epoch": 38.59, |
|
"grad_norm": 17.268033981323242, |
|
"learning_rate": 8.713713713713714e-06, |
|
"loss": 0.3843, |
|
"step": 12850 |
|
}, |
|
{ |
|
"epoch": 38.62, |
|
"grad_norm": 14.737871170043945, |
|
"learning_rate": 8.712712712712714e-06, |
|
"loss": 0.435, |
|
"step": 12860 |
|
}, |
|
{ |
|
"epoch": 38.65, |
|
"grad_norm": 9.059165954589844, |
|
"learning_rate": 8.711711711711712e-06, |
|
"loss": 0.3855, |
|
"step": 12870 |
|
}, |
|
{ |
|
"epoch": 38.68, |
|
"grad_norm": 10.406525611877441, |
|
"learning_rate": 8.710710710710711e-06, |
|
"loss": 0.4175, |
|
"step": 12880 |
|
}, |
|
{ |
|
"epoch": 38.71, |
|
"grad_norm": 7.349973678588867, |
|
"learning_rate": 8.709709709709711e-06, |
|
"loss": 0.4328, |
|
"step": 12890 |
|
}, |
|
{ |
|
"epoch": 38.74, |
|
"grad_norm": 12.82719612121582, |
|
"learning_rate": 8.70870870870871e-06, |
|
"loss": 0.4047, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 38.77, |
|
"grad_norm": 7.317464828491211, |
|
"learning_rate": 8.707707707707708e-06, |
|
"loss": 0.3422, |
|
"step": 12910 |
|
}, |
|
{ |
|
"epoch": 38.8, |
|
"grad_norm": 8.046574592590332, |
|
"learning_rate": 8.706706706706707e-06, |
|
"loss": 0.3867, |
|
"step": 12920 |
|
}, |
|
{ |
|
"epoch": 38.83, |
|
"grad_norm": 7.7277913093566895, |
|
"learning_rate": 8.705705705705706e-06, |
|
"loss": 0.378, |
|
"step": 12930 |
|
}, |
|
{ |
|
"epoch": 38.86, |
|
"grad_norm": 8.408747673034668, |
|
"learning_rate": 8.704704704704704e-06, |
|
"loss": 0.3773, |
|
"step": 12940 |
|
}, |
|
{ |
|
"epoch": 38.89, |
|
"grad_norm": 32.56877899169922, |
|
"learning_rate": 8.703703703703705e-06, |
|
"loss": 0.4154, |
|
"step": 12950 |
|
}, |
|
{ |
|
"epoch": 38.92, |
|
"grad_norm": 6.650607585906982, |
|
"learning_rate": 8.702702702702703e-06, |
|
"loss": 0.4141, |
|
"step": 12960 |
|
}, |
|
{ |
|
"epoch": 38.95, |
|
"grad_norm": 76.25609588623047, |
|
"learning_rate": 8.701701701701702e-06, |
|
"loss": 0.4162, |
|
"step": 12970 |
|
}, |
|
{ |
|
"epoch": 38.98, |
|
"grad_norm": 15.734679222106934, |
|
"learning_rate": 8.700700700700702e-06, |
|
"loss": 0.3999, |
|
"step": 12980 |
|
}, |
|
{ |
|
"epoch": 39.0, |
|
"eval_accuracy": 0.928, |
|
"eval_loss": 0.2580805718898773, |
|
"eval_runtime": 30.6861, |
|
"eval_samples_per_second": 325.88, |
|
"eval_steps_per_second": 1.304, |
|
"step": 12987 |
|
}, |
|
{ |
|
"epoch": 39.01, |
|
"grad_norm": 13.652566909790039, |
|
"learning_rate": 8.6996996996997e-06, |
|
"loss": 0.3645, |
|
"step": 12990 |
|
}, |
|
{ |
|
"epoch": 39.04, |
|
"grad_norm": 9.84151840209961, |
|
"learning_rate": 8.6986986986987e-06, |
|
"loss": 0.3847, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 39.07, |
|
"grad_norm": 9.637248039245605, |
|
"learning_rate": 8.697697697697698e-06, |
|
"loss": 0.3613, |
|
"step": 13010 |
|
}, |
|
{ |
|
"epoch": 39.1, |
|
"grad_norm": 11.930158615112305, |
|
"learning_rate": 8.696696696696696e-06, |
|
"loss": 0.435, |
|
"step": 13020 |
|
}, |
|
{ |
|
"epoch": 39.13, |
|
"grad_norm": 24.212404251098633, |
|
"learning_rate": 8.695695695695697e-06, |
|
"loss": 0.3543, |
|
"step": 13030 |
|
}, |
|
{ |
|
"epoch": 39.16, |
|
"grad_norm": 8.99075984954834, |
|
"learning_rate": 8.694694694694695e-06, |
|
"loss": 0.4157, |
|
"step": 13040 |
|
}, |
|
{ |
|
"epoch": 39.19, |
|
"grad_norm": 10.34455680847168, |
|
"learning_rate": 8.693693693693694e-06, |
|
"loss": 0.3976, |
|
"step": 13050 |
|
}, |
|
{ |
|
"epoch": 39.22, |
|
"grad_norm": 11.143070220947266, |
|
"learning_rate": 8.692692692692694e-06, |
|
"loss": 0.3574, |
|
"step": 13060 |
|
}, |
|
{ |
|
"epoch": 39.25, |
|
"grad_norm": 11.767276763916016, |
|
"learning_rate": 8.691691691691693e-06, |
|
"loss": 0.3517, |
|
"step": 13070 |
|
}, |
|
{ |
|
"epoch": 39.28, |
|
"grad_norm": 6.911677837371826, |
|
"learning_rate": 8.690690690690691e-06, |
|
"loss": 0.3652, |
|
"step": 13080 |
|
}, |
|
{ |
|
"epoch": 39.31, |
|
"grad_norm": 16.10271453857422, |
|
"learning_rate": 8.689689689689692e-06, |
|
"loss": 0.4658, |
|
"step": 13090 |
|
}, |
|
{ |
|
"epoch": 39.34, |
|
"grad_norm": 11.662455558776855, |
|
"learning_rate": 8.688688688688689e-06, |
|
"loss": 0.4222, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 39.37, |
|
"grad_norm": 10.91153335571289, |
|
"learning_rate": 8.687687687687689e-06, |
|
"loss": 0.4105, |
|
"step": 13110 |
|
}, |
|
{ |
|
"epoch": 39.4, |
|
"grad_norm": 23.97760772705078, |
|
"learning_rate": 8.686686686686687e-06, |
|
"loss": 0.3721, |
|
"step": 13120 |
|
}, |
|
{ |
|
"epoch": 39.43, |
|
"grad_norm": 7.614724159240723, |
|
"learning_rate": 8.685685685685686e-06, |
|
"loss": 0.424, |
|
"step": 13130 |
|
}, |
|
{ |
|
"epoch": 39.46, |
|
"grad_norm": 12.252328872680664, |
|
"learning_rate": 8.684684684684686e-06, |
|
"loss": 0.3959, |
|
"step": 13140 |
|
}, |
|
{ |
|
"epoch": 39.49, |
|
"grad_norm": 10.605676651000977, |
|
"learning_rate": 8.683683683683685e-06, |
|
"loss": 0.3853, |
|
"step": 13150 |
|
}, |
|
{ |
|
"epoch": 39.52, |
|
"grad_norm": 9.702637672424316, |
|
"learning_rate": 8.682682682682684e-06, |
|
"loss": 0.3547, |
|
"step": 13160 |
|
}, |
|
{ |
|
"epoch": 39.55, |
|
"grad_norm": 8.899740219116211, |
|
"learning_rate": 8.681681681681682e-06, |
|
"loss": 0.4048, |
|
"step": 13170 |
|
}, |
|
{ |
|
"epoch": 39.58, |
|
"grad_norm": 11.565757751464844, |
|
"learning_rate": 8.68068068068068e-06, |
|
"loss": 0.4059, |
|
"step": 13180 |
|
}, |
|
{ |
|
"epoch": 39.61, |
|
"grad_norm": 10.45947551727295, |
|
"learning_rate": 8.67967967967968e-06, |
|
"loss": 0.388, |
|
"step": 13190 |
|
}, |
|
{ |
|
"epoch": 39.64, |
|
"grad_norm": 8.317366600036621, |
|
"learning_rate": 8.67867867867868e-06, |
|
"loss": 0.4108, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 39.67, |
|
"grad_norm": 6.8623433113098145, |
|
"learning_rate": 8.677677677677678e-06, |
|
"loss": 0.4123, |
|
"step": 13210 |
|
}, |
|
{ |
|
"epoch": 39.7, |
|
"grad_norm": 5.949448108673096, |
|
"learning_rate": 8.676676676676677e-06, |
|
"loss": 0.407, |
|
"step": 13220 |
|
}, |
|
{ |
|
"epoch": 39.73, |
|
"grad_norm": 9.582085609436035, |
|
"learning_rate": 8.675675675675677e-06, |
|
"loss": 0.4188, |
|
"step": 13230 |
|
}, |
|
{ |
|
"epoch": 39.76, |
|
"grad_norm": 10.572515487670898, |
|
"learning_rate": 8.674674674674676e-06, |
|
"loss": 0.4289, |
|
"step": 13240 |
|
}, |
|
{ |
|
"epoch": 39.79, |
|
"grad_norm": 17.77850341796875, |
|
"learning_rate": 8.673673673673674e-06, |
|
"loss": 0.4613, |
|
"step": 13250 |
|
}, |
|
{ |
|
"epoch": 39.82, |
|
"grad_norm": 17.842029571533203, |
|
"learning_rate": 8.672672672672673e-06, |
|
"loss": 0.4173, |
|
"step": 13260 |
|
}, |
|
{ |
|
"epoch": 39.85, |
|
"grad_norm": 11.029363632202148, |
|
"learning_rate": 8.671671671671671e-06, |
|
"loss": 0.4433, |
|
"step": 13270 |
|
}, |
|
{ |
|
"epoch": 39.88, |
|
"grad_norm": 11.918964385986328, |
|
"learning_rate": 8.670670670670672e-06, |
|
"loss": 0.3568, |
|
"step": 13280 |
|
}, |
|
{ |
|
"epoch": 39.91, |
|
"grad_norm": 13.341336250305176, |
|
"learning_rate": 8.66966966966967e-06, |
|
"loss": 0.3835, |
|
"step": 13290 |
|
}, |
|
{ |
|
"epoch": 39.94, |
|
"grad_norm": 6.750630855560303, |
|
"learning_rate": 8.668668668668669e-06, |
|
"loss": 0.3711, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 39.97, |
|
"grad_norm": 23.237773895263672, |
|
"learning_rate": 8.66766766766767e-06, |
|
"loss": 0.3866, |
|
"step": 13310 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"grad_norm": 6.4752349853515625, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.3887, |
|
"step": 13320 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"eval_accuracy": 0.9301, |
|
"eval_loss": 0.25753843784332275, |
|
"eval_runtime": 31.0782, |
|
"eval_samples_per_second": 321.769, |
|
"eval_steps_per_second": 1.287, |
|
"step": 13320 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 99900, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 300, |
|
"save_steps": 500, |
|
"total_flos": 1.333416740511744e+20, |
|
"train_batch_size": 128, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|