Menouar's picture
Upload folder using huggingface_hub
a8686e9 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.999921758860809,
"eval_steps": 500,
"global_step": 3195,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 0.64453125,
"learning_rate": 6.25e-07,
"loss": 1.385,
"step": 10
},
{
"epoch": 0.01,
"grad_norm": 0.52734375,
"learning_rate": 1.25e-06,
"loss": 1.4608,
"step": 20
},
{
"epoch": 0.01,
"grad_norm": 0.255859375,
"learning_rate": 1.8750000000000003e-06,
"loss": 1.4275,
"step": 30
},
{
"epoch": 0.01,
"grad_norm": 0.54296875,
"learning_rate": 2.5e-06,
"loss": 1.4101,
"step": 40
},
{
"epoch": 0.02,
"grad_norm": 0.59765625,
"learning_rate": 3.125e-06,
"loss": 1.4565,
"step": 50
},
{
"epoch": 0.02,
"grad_norm": 0.236328125,
"learning_rate": 3.7500000000000005e-06,
"loss": 1.4078,
"step": 60
},
{
"epoch": 0.02,
"grad_norm": 0.1884765625,
"learning_rate": 4.3750000000000005e-06,
"loss": 1.3982,
"step": 70
},
{
"epoch": 0.03,
"grad_norm": 0.294921875,
"learning_rate": 5e-06,
"loss": 1.4799,
"step": 80
},
{
"epoch": 0.03,
"grad_norm": 0.138671875,
"learning_rate": 5.625e-06,
"loss": 1.4549,
"step": 90
},
{
"epoch": 0.03,
"grad_norm": 1.625,
"learning_rate": 6.25e-06,
"loss": 1.4205,
"step": 100
},
{
"epoch": 0.03,
"grad_norm": 0.33984375,
"learning_rate": 6.875e-06,
"loss": 1.4777,
"step": 110
},
{
"epoch": 0.04,
"grad_norm": 0.8359375,
"learning_rate": 7.500000000000001e-06,
"loss": 1.4071,
"step": 120
},
{
"epoch": 0.04,
"grad_norm": 0.35546875,
"learning_rate": 8.125000000000001e-06,
"loss": 1.3978,
"step": 130
},
{
"epoch": 0.04,
"grad_norm": 0.267578125,
"learning_rate": 8.750000000000001e-06,
"loss": 1.3935,
"step": 140
},
{
"epoch": 0.05,
"grad_norm": 0.7578125,
"learning_rate": 9.375000000000001e-06,
"loss": 1.3514,
"step": 150
},
{
"epoch": 0.05,
"grad_norm": 0.279296875,
"learning_rate": 1e-05,
"loss": 1.3446,
"step": 160
},
{
"epoch": 0.05,
"grad_norm": 0.1201171875,
"learning_rate": 1.0625e-05,
"loss": 1.2796,
"step": 170
},
{
"epoch": 0.06,
"grad_norm": 0.103515625,
"learning_rate": 1.125e-05,
"loss": 1.3715,
"step": 180
},
{
"epoch": 0.06,
"grad_norm": 0.08740234375,
"learning_rate": 1.1875e-05,
"loss": 1.2741,
"step": 190
},
{
"epoch": 0.06,
"grad_norm": 0.1552734375,
"learning_rate": 1.25e-05,
"loss": 1.2679,
"step": 200
},
{
"epoch": 0.07,
"grad_norm": 0.1357421875,
"learning_rate": 1.3125e-05,
"loss": 1.2512,
"step": 210
},
{
"epoch": 0.07,
"grad_norm": 0.1484375,
"learning_rate": 1.375e-05,
"loss": 1.1982,
"step": 220
},
{
"epoch": 0.07,
"grad_norm": 0.0947265625,
"learning_rate": 1.4375e-05,
"loss": 1.2303,
"step": 230
},
{
"epoch": 0.08,
"grad_norm": 0.48828125,
"learning_rate": 1.5000000000000002e-05,
"loss": 1.2074,
"step": 240
},
{
"epoch": 0.08,
"grad_norm": 0.28515625,
"learning_rate": 1.5625e-05,
"loss": 1.2086,
"step": 250
},
{
"epoch": 0.08,
"grad_norm": 0.1884765625,
"learning_rate": 1.6250000000000002e-05,
"loss": 1.1753,
"step": 260
},
{
"epoch": 0.08,
"grad_norm": 0.0751953125,
"learning_rate": 1.6875e-05,
"loss": 1.1524,
"step": 270
},
{
"epoch": 0.09,
"grad_norm": 0.07470703125,
"learning_rate": 1.7500000000000002e-05,
"loss": 1.174,
"step": 280
},
{
"epoch": 0.09,
"grad_norm": 0.10400390625,
"learning_rate": 1.8125e-05,
"loss": 1.1385,
"step": 290
},
{
"epoch": 0.09,
"grad_norm": 0.068359375,
"learning_rate": 1.8750000000000002e-05,
"loss": 1.1546,
"step": 300
},
{
"epoch": 0.1,
"grad_norm": 0.103515625,
"learning_rate": 1.9375e-05,
"loss": 1.1263,
"step": 310
},
{
"epoch": 0.1,
"grad_norm": 0.0625,
"learning_rate": 2e-05,
"loss": 1.122,
"step": 320
},
{
"epoch": 0.1,
"grad_norm": 0.2080078125,
"learning_rate": 1.999940297883134e-05,
"loss": 1.0784,
"step": 330
},
{
"epoch": 0.11,
"grad_norm": 0.0634765625,
"learning_rate": 1.9997611986612203e-05,
"loss": 1.0684,
"step": 340
},
{
"epoch": 0.11,
"grad_norm": 0.10205078125,
"learning_rate": 1.9994627237194654e-05,
"loss": 1.0772,
"step": 350
},
{
"epoch": 0.11,
"grad_norm": 0.1298828125,
"learning_rate": 1.9990449086970404e-05,
"loss": 1.0837,
"step": 360
},
{
"epoch": 0.12,
"grad_norm": 0.255859375,
"learning_rate": 1.998507803482828e-05,
"loss": 1.0269,
"step": 370
},
{
"epoch": 0.12,
"grad_norm": 0.059814453125,
"learning_rate": 1.997851472209465e-05,
"loss": 1.0229,
"step": 380
},
{
"epoch": 0.12,
"grad_norm": 0.060791015625,
"learning_rate": 1.9970759932456836e-05,
"loss": 1.0335,
"step": 390
},
{
"epoch": 0.13,
"grad_norm": 0.06005859375,
"learning_rate": 1.9961814591869558e-05,
"loss": 1.031,
"step": 400
},
{
"epoch": 0.13,
"grad_norm": 0.058349609375,
"learning_rate": 1.9951679768444346e-05,
"loss": 1.0339,
"step": 410
},
{
"epoch": 0.13,
"grad_norm": 0.047119140625,
"learning_rate": 1.9940356672322037e-05,
"loss": 0.9754,
"step": 420
},
{
"epoch": 0.13,
"grad_norm": 0.07373046875,
"learning_rate": 1.992784665552824e-05,
"loss": 0.9955,
"step": 430
},
{
"epoch": 0.14,
"grad_norm": 0.051513671875,
"learning_rate": 1.9914151211811924e-05,
"loss": 1.0095,
"step": 440
},
{
"epoch": 0.14,
"grad_norm": 0.0625,
"learning_rate": 1.9899271976467058e-05,
"loss": 0.9758,
"step": 450
},
{
"epoch": 0.14,
"grad_norm": 0.08935546875,
"learning_rate": 1.9883210726137324e-05,
"loss": 0.9989,
"step": 460
},
{
"epoch": 0.15,
"grad_norm": 0.05029296875,
"learning_rate": 1.9865969378604023e-05,
"loss": 0.9932,
"step": 470
},
{
"epoch": 0.15,
"grad_norm": 0.13671875,
"learning_rate": 1.984754999255704e-05,
"loss": 0.9908,
"step": 480
},
{
"epoch": 0.15,
"grad_norm": 0.154296875,
"learning_rate": 1.982795476734905e-05,
"loss": 0.9849,
"step": 490
},
{
"epoch": 0.16,
"grad_norm": 0.058837890625,
"learning_rate": 1.9807186042732908e-05,
"loss": 0.9772,
"step": 500
},
{
"epoch": 0.16,
"grad_norm": 0.05908203125,
"learning_rate": 1.9785246298582262e-05,
"loss": 0.9763,
"step": 510
},
{
"epoch": 0.16,
"grad_norm": 0.0576171875,
"learning_rate": 1.9762138154595448e-05,
"loss": 0.975,
"step": 520
},
{
"epoch": 0.17,
"grad_norm": 0.057861328125,
"learning_rate": 1.9737864369982695e-05,
"loss": 0.9361,
"step": 530
},
{
"epoch": 0.17,
"grad_norm": 0.06396484375,
"learning_rate": 1.971242784313665e-05,
"loss": 0.9515,
"step": 540
},
{
"epoch": 0.17,
"grad_norm": 0.059814453125,
"learning_rate": 1.9685831611286312e-05,
"loss": 0.9349,
"step": 550
},
{
"epoch": 0.18,
"grad_norm": 0.087890625,
"learning_rate": 1.965807885013437e-05,
"loss": 1.0151,
"step": 560
},
{
"epoch": 0.18,
"grad_norm": 0.07080078125,
"learning_rate": 1.9629172873477995e-05,
"loss": 0.9818,
"step": 570
},
{
"epoch": 0.18,
"grad_norm": 0.08349609375,
"learning_rate": 1.9599117132813187e-05,
"loss": 0.9444,
"step": 580
},
{
"epoch": 0.18,
"grad_norm": 0.05908203125,
"learning_rate": 1.9567915216922624e-05,
"loss": 0.946,
"step": 590
},
{
"epoch": 0.19,
"grad_norm": 0.05078125,
"learning_rate": 1.9535570851447166e-05,
"loss": 0.9646,
"step": 600
},
{
"epoch": 0.19,
"grad_norm": 0.05078125,
"learning_rate": 1.9502087898440988e-05,
"loss": 1.0191,
"step": 610
},
{
"epoch": 0.19,
"grad_norm": 0.0673828125,
"learning_rate": 1.9467470355910438e-05,
"loss": 0.9789,
"step": 620
},
{
"epoch": 0.2,
"grad_norm": 0.058837890625,
"learning_rate": 1.9431722357336657e-05,
"loss": 0.9413,
"step": 630
},
{
"epoch": 0.2,
"grad_norm": 0.0986328125,
"learning_rate": 1.939484817118202e-05,
"loss": 0.974,
"step": 640
},
{
"epoch": 0.2,
"grad_norm": 0.181640625,
"learning_rate": 1.9356852200380466e-05,
"loss": 0.9416,
"step": 650
},
{
"epoch": 0.21,
"grad_norm": 0.04931640625,
"learning_rate": 1.9317738981811776e-05,
"loss": 0.9316,
"step": 660
},
{
"epoch": 0.21,
"grad_norm": 0.060791015625,
"learning_rate": 1.9277513185759847e-05,
"loss": 0.956,
"step": 670
},
{
"epoch": 0.21,
"grad_norm": 0.05078125,
"learning_rate": 1.9236179615355026e-05,
"loss": 0.9124,
"step": 680
},
{
"epoch": 0.22,
"grad_norm": 0.10302734375,
"learning_rate": 1.9193743206000618e-05,
"loss": 0.9405,
"step": 690
},
{
"epoch": 0.22,
"grad_norm": 0.048583984375,
"learning_rate": 1.9150209024783564e-05,
"loss": 0.9711,
"step": 700
},
{
"epoch": 0.22,
"grad_norm": 0.0732421875,
"learning_rate": 1.9105582269869413e-05,
"loss": 0.9114,
"step": 710
},
{
"epoch": 0.23,
"grad_norm": 0.1103515625,
"learning_rate": 1.9059868269881637e-05,
"loss": 0.9828,
"step": 720
},
{
"epoch": 0.23,
"grad_norm": 0.14453125,
"learning_rate": 1.9013072483265377e-05,
"loss": 0.9121,
"step": 730
},
{
"epoch": 0.23,
"grad_norm": 0.05615234375,
"learning_rate": 1.896520049763568e-05,
"loss": 0.9412,
"step": 740
},
{
"epoch": 0.23,
"grad_norm": 0.052490234375,
"learning_rate": 1.8916258029110305e-05,
"loss": 0.9624,
"step": 750
},
{
"epoch": 0.24,
"grad_norm": 0.06005859375,
"learning_rate": 1.88662509216272e-05,
"loss": 0.926,
"step": 760
},
{
"epoch": 0.24,
"grad_norm": 0.060791015625,
"learning_rate": 1.8815185146246718e-05,
"loss": 0.9164,
"step": 770
},
{
"epoch": 0.24,
"grad_norm": 0.072265625,
"learning_rate": 1.8763066800438638e-05,
"loss": 0.9532,
"step": 780
},
{
"epoch": 0.25,
"grad_norm": 0.06494140625,
"learning_rate": 1.87099021073541e-05,
"loss": 0.944,
"step": 790
},
{
"epoch": 0.25,
"grad_norm": 0.11181640625,
"learning_rate": 1.8655697415082556e-05,
"loss": 0.9284,
"step": 800
},
{
"epoch": 0.25,
"grad_norm": 0.3984375,
"learning_rate": 1.8600459195893737e-05,
"loss": 0.9235,
"step": 810
},
{
"epoch": 0.26,
"grad_norm": 0.189453125,
"learning_rate": 1.8544194045464888e-05,
"loss": 0.9442,
"step": 820
},
{
"epoch": 0.26,
"grad_norm": 0.072265625,
"learning_rate": 1.8486908682093175e-05,
"loss": 0.9653,
"step": 830
},
{
"epoch": 0.26,
"grad_norm": 0.04736328125,
"learning_rate": 1.842860994589352e-05,
"loss": 0.9122,
"step": 840
},
{
"epoch": 0.27,
"grad_norm": 0.271484375,
"learning_rate": 1.8369304797981843e-05,
"loss": 0.9183,
"step": 850
},
{
"epoch": 0.27,
"grad_norm": 0.058837890625,
"learning_rate": 1.8309000319643892e-05,
"loss": 0.9078,
"step": 860
},
{
"epoch": 0.27,
"grad_norm": 0.04931640625,
"learning_rate": 1.8247703711489684e-05,
"loss": 0.9404,
"step": 870
},
{
"epoch": 0.28,
"grad_norm": 0.060546875,
"learning_rate": 1.818542229259376e-05,
"loss": 0.9584,
"step": 880
},
{
"epoch": 0.28,
"grad_norm": 0.056640625,
"learning_rate": 1.8122163499621208e-05,
"loss": 0.9051,
"step": 890
},
{
"epoch": 0.28,
"grad_norm": 0.053955078125,
"learning_rate": 1.8057934885939734e-05,
"loss": 0.9525,
"step": 900
},
{
"epoch": 0.28,
"grad_norm": 0.10791015625,
"learning_rate": 1.7992744120717735e-05,
"loss": 0.9301,
"step": 910
},
{
"epoch": 0.29,
"grad_norm": 0.055419921875,
"learning_rate": 1.7926598988008584e-05,
"loss": 0.9319,
"step": 920
},
{
"epoch": 0.29,
"grad_norm": 0.051513671875,
"learning_rate": 1.7859507385821163e-05,
"loss": 0.9408,
"step": 930
},
{
"epoch": 0.29,
"grad_norm": 0.0771484375,
"learning_rate": 1.7791477325176824e-05,
"loss": 0.9646,
"step": 940
},
{
"epoch": 0.3,
"grad_norm": 0.0625,
"learning_rate": 1.7722516929152828e-05,
"loss": 0.9263,
"step": 950
},
{
"epoch": 0.3,
"grad_norm": 0.05419921875,
"learning_rate": 1.7652634431912417e-05,
"loss": 0.9605,
"step": 960
},
{
"epoch": 0.3,
"grad_norm": 0.05224609375,
"learning_rate": 1.758183817772163e-05,
"loss": 0.9097,
"step": 970
},
{
"epoch": 0.31,
"grad_norm": 0.072265625,
"learning_rate": 1.7510136619952947e-05,
"loss": 0.9178,
"step": 980
},
{
"epoch": 0.31,
"grad_norm": 0.0595703125,
"learning_rate": 1.743753832007593e-05,
"loss": 0.8983,
"step": 990
},
{
"epoch": 0.31,
"grad_norm": 0.142578125,
"learning_rate": 1.7364051946634953e-05,
"loss": 0.9637,
"step": 1000
},
{
"epoch": 0.32,
"grad_norm": 0.07275390625,
"learning_rate": 1.7289686274214116e-05,
"loss": 0.9142,
"step": 1010
},
{
"epoch": 0.32,
"grad_norm": 0.091796875,
"learning_rate": 1.721445018238956e-05,
"loss": 0.9663,
"step": 1020
},
{
"epoch": 0.32,
"grad_norm": 0.0732421875,
"learning_rate": 1.713835265466917e-05,
"loss": 0.9184,
"step": 1030
},
{
"epoch": 0.33,
"grad_norm": 0.087890625,
"learning_rate": 1.706140277741994e-05,
"loss": 0.9207,
"step": 1040
},
{
"epoch": 0.33,
"grad_norm": 0.07568359375,
"learning_rate": 1.6983609738782993e-05,
"loss": 0.917,
"step": 1050
},
{
"epoch": 0.33,
"grad_norm": 0.427734375,
"learning_rate": 1.6904982827576498e-05,
"loss": 0.9255,
"step": 1060
},
{
"epoch": 0.33,
"grad_norm": 0.08447265625,
"learning_rate": 1.6825531432186545e-05,
"loss": 0.9683,
"step": 1070
},
{
"epoch": 0.34,
"grad_norm": 0.054443359375,
"learning_rate": 1.674526503944611e-05,
"loss": 0.8988,
"step": 1080
},
{
"epoch": 0.34,
"grad_norm": 0.1357421875,
"learning_rate": 1.6664193233502314e-05,
"loss": 0.9114,
"step": 1090
},
{
"epoch": 0.34,
"grad_norm": 0.0556640625,
"learning_rate": 1.6582325694672032e-05,
"loss": 0.9341,
"step": 1100
},
{
"epoch": 0.35,
"grad_norm": 0.291015625,
"learning_rate": 1.6499672198285996e-05,
"loss": 0.9284,
"step": 1110
},
{
"epoch": 0.35,
"grad_norm": 0.076171875,
"learning_rate": 1.6416242613521612e-05,
"loss": 0.9461,
"step": 1120
},
{
"epoch": 0.35,
"grad_norm": 0.052978515625,
"learning_rate": 1.6332046902224518e-05,
"loss": 0.9011,
"step": 1130
},
{
"epoch": 0.36,
"grad_norm": 0.05322265625,
"learning_rate": 1.6247095117719106e-05,
"loss": 0.8884,
"step": 1140
},
{
"epoch": 0.36,
"grad_norm": 0.0498046875,
"learning_rate": 1.616139740360811e-05,
"loss": 0.911,
"step": 1150
},
{
"epoch": 0.36,
"grad_norm": 0.1767578125,
"learning_rate": 1.607496399256141e-05,
"loss": 0.9139,
"step": 1160
},
{
"epoch": 0.37,
"grad_norm": 0.0517578125,
"learning_rate": 1.5987805205094225e-05,
"loss": 0.9277,
"step": 1170
},
{
"epoch": 0.37,
"grad_norm": 0.0908203125,
"learning_rate": 1.5899931448334788e-05,
"loss": 0.8923,
"step": 1180
},
{
"epoch": 0.37,
"grad_norm": 1.5703125,
"learning_rate": 1.581135321478169e-05,
"loss": 0.9193,
"step": 1190
},
{
"epoch": 0.38,
"grad_norm": 0.052978515625,
"learning_rate": 1.5722081081051032e-05,
"loss": 0.8975,
"step": 1200
},
{
"epoch": 0.38,
"grad_norm": 0.0947265625,
"learning_rate": 1.5632125706613534e-05,
"loss": 0.9291,
"step": 1210
},
{
"epoch": 0.38,
"grad_norm": 0.054931640625,
"learning_rate": 1.554149783252175e-05,
"loss": 0.8887,
"step": 1220
},
{
"epoch": 0.38,
"grad_norm": 0.04736328125,
"learning_rate": 1.5450208280127543e-05,
"loss": 0.8927,
"step": 1230
},
{
"epoch": 0.39,
"grad_norm": 0.052978515625,
"learning_rate": 1.5358267949789968e-05,
"loss": 0.9219,
"step": 1240
},
{
"epoch": 0.39,
"grad_norm": 0.072265625,
"learning_rate": 1.526568781957371e-05,
"loss": 0.8972,
"step": 1250
},
{
"epoch": 0.39,
"grad_norm": 0.1259765625,
"learning_rate": 1.5172478943938288e-05,
"loss": 0.9107,
"step": 1260
},
{
"epoch": 0.4,
"grad_norm": 0.06005859375,
"learning_rate": 1.5078652452418063e-05,
"loss": 0.9162,
"step": 1270
},
{
"epoch": 0.4,
"grad_norm": 0.06494140625,
"learning_rate": 1.4984219548293361e-05,
"loss": 0.9022,
"step": 1280
},
{
"epoch": 0.4,
"grad_norm": 0.06201171875,
"learning_rate": 1.4889191507252743e-05,
"loss": 0.903,
"step": 1290
},
{
"epoch": 0.41,
"grad_norm": 0.05322265625,
"learning_rate": 1.479357967604663e-05,
"loss": 0.8873,
"step": 1300
},
{
"epoch": 0.41,
"grad_norm": 0.07421875,
"learning_rate": 1.469739547113246e-05,
"loss": 0.9103,
"step": 1310
},
{
"epoch": 0.41,
"grad_norm": 0.055908203125,
"learning_rate": 1.4600650377311523e-05,
"loss": 0.919,
"step": 1320
},
{
"epoch": 0.42,
"grad_norm": 0.08447265625,
"learning_rate": 1.450335594635761e-05,
"loss": 0.9349,
"step": 1330
},
{
"epoch": 0.42,
"grad_norm": 0.0908203125,
"learning_rate": 1.44055237956377e-05,
"loss": 0.8929,
"step": 1340
},
{
"epoch": 0.42,
"grad_norm": 0.09423828125,
"learning_rate": 1.4307165606724777e-05,
"loss": 0.9082,
"step": 1350
},
{
"epoch": 0.43,
"grad_norm": 0.052734375,
"learning_rate": 1.4208293124003028e-05,
"loss": 0.9079,
"step": 1360
},
{
"epoch": 0.43,
"grad_norm": 0.05517578125,
"learning_rate": 1.4108918153265485e-05,
"loss": 0.9074,
"step": 1370
},
{
"epoch": 0.43,
"grad_norm": 0.054931640625,
"learning_rate": 1.400905256030438e-05,
"loss": 0.8795,
"step": 1380
},
{
"epoch": 0.44,
"grad_norm": 0.072265625,
"learning_rate": 1.3908708269494318e-05,
"loss": 0.9201,
"step": 1390
},
{
"epoch": 0.44,
"grad_norm": 0.1171875,
"learning_rate": 1.3807897262368453e-05,
"loss": 0.9424,
"step": 1400
},
{
"epoch": 0.44,
"grad_norm": 0.056640625,
"learning_rate": 1.3706631576187842e-05,
"loss": 0.9165,
"step": 1410
},
{
"epoch": 0.44,
"grad_norm": 0.05224609375,
"learning_rate": 1.3604923302504146e-05,
"loss": 0.9164,
"step": 1420
},
{
"epoch": 0.45,
"grad_norm": 0.2021484375,
"learning_rate": 1.3502784585715853e-05,
"loss": 0.9122,
"step": 1430
},
{
"epoch": 0.45,
"grad_norm": 0.050537109375,
"learning_rate": 1.340022762161817e-05,
"loss": 0.939,
"step": 1440
},
{
"epoch": 0.45,
"grad_norm": 0.058837890625,
"learning_rate": 1.3297264655946816e-05,
"loss": 0.9257,
"step": 1450
},
{
"epoch": 0.46,
"grad_norm": 0.052734375,
"learning_rate": 1.31939079829158e-05,
"loss": 0.9182,
"step": 1460
},
{
"epoch": 0.46,
"grad_norm": 0.09130859375,
"learning_rate": 1.3090169943749475e-05,
"loss": 0.9062,
"step": 1470
},
{
"epoch": 0.46,
"grad_norm": 0.2041015625,
"learning_rate": 1.2986062925208913e-05,
"loss": 0.9107,
"step": 1480
},
{
"epoch": 0.47,
"grad_norm": 0.06689453125,
"learning_rate": 1.2881599358112888e-05,
"loss": 0.931,
"step": 1490
},
{
"epoch": 0.47,
"grad_norm": 0.08251953125,
"learning_rate": 1.2776791715853585e-05,
"loss": 0.9473,
"step": 1500
},
{
"epoch": 0.47,
"grad_norm": 0.10009765625,
"learning_rate": 1.2671652512907213e-05,
"loss": 0.917,
"step": 1510
},
{
"epoch": 0.48,
"grad_norm": 0.07080078125,
"learning_rate": 1.2566194303339738e-05,
"loss": 0.9566,
"step": 1520
},
{
"epoch": 0.48,
"grad_norm": 0.0615234375,
"learning_rate": 1.2460429679307863e-05,
"loss": 0.8839,
"step": 1530
},
{
"epoch": 0.48,
"grad_norm": 0.04931640625,
"learning_rate": 1.2354371269555478e-05,
"loss": 0.9068,
"step": 1540
},
{
"epoch": 0.49,
"grad_norm": 0.0947265625,
"learning_rate": 1.2248031737905732e-05,
"loss": 0.921,
"step": 1550
},
{
"epoch": 0.49,
"grad_norm": 0.10400390625,
"learning_rate": 1.2141423781748913e-05,
"loss": 0.9177,
"step": 1560
},
{
"epoch": 0.49,
"grad_norm": 0.259765625,
"learning_rate": 1.2034560130526341e-05,
"loss": 0.9099,
"step": 1570
},
{
"epoch": 0.49,
"grad_norm": 0.060546875,
"learning_rate": 1.1927453544210397e-05,
"loss": 0.9166,
"step": 1580
},
{
"epoch": 0.5,
"grad_norm": 0.10498046875,
"learning_rate": 1.182011681178095e-05,
"loss": 0.9022,
"step": 1590
},
{
"epoch": 0.5,
"grad_norm": 0.109375,
"learning_rate": 1.171256274969829e-05,
"loss": 0.8913,
"step": 1600
},
{
"epoch": 0.5,
"grad_norm": 0.054931640625,
"learning_rate": 1.1604804200372786e-05,
"loss": 0.9166,
"step": 1610
},
{
"epoch": 0.51,
"grad_norm": 0.06591796875,
"learning_rate": 1.1496854030631443e-05,
"loss": 0.9539,
"step": 1620
},
{
"epoch": 0.51,
"grad_norm": 0.0478515625,
"learning_rate": 1.1388725130181566e-05,
"loss": 0.8543,
"step": 1630
},
{
"epoch": 0.51,
"grad_norm": 0.10546875,
"learning_rate": 1.1280430410071652e-05,
"loss": 0.8803,
"step": 1640
},
{
"epoch": 0.52,
"grad_norm": 0.07470703125,
"learning_rate": 1.1171982801149774e-05,
"loss": 0.8952,
"step": 1650
},
{
"epoch": 0.52,
"grad_norm": 0.055419921875,
"learning_rate": 1.106339525251958e-05,
"loss": 0.8931,
"step": 1660
},
{
"epoch": 0.52,
"grad_norm": 0.1376953125,
"learning_rate": 1.0954680729994103e-05,
"loss": 0.9036,
"step": 1670
},
{
"epoch": 0.53,
"grad_norm": 0.059814453125,
"learning_rate": 1.08458522145476e-05,
"loss": 0.9455,
"step": 1680
},
{
"epoch": 0.53,
"grad_norm": 0.087890625,
"learning_rate": 1.073692270076557e-05,
"loss": 0.9097,
"step": 1690
},
{
"epoch": 0.53,
"grad_norm": 0.05029296875,
"learning_rate": 1.0627905195293135e-05,
"loss": 0.9132,
"step": 1700
},
{
"epoch": 0.54,
"grad_norm": 0.049560546875,
"learning_rate": 1.0518812715282001e-05,
"loss": 0.8866,
"step": 1710
},
{
"epoch": 0.54,
"grad_norm": 0.06494140625,
"learning_rate": 1.0409658286836144e-05,
"loss": 0.9258,
"step": 1720
},
{
"epoch": 0.54,
"grad_norm": 0.07177734375,
"learning_rate": 1.0300454943456457e-05,
"loss": 0.8964,
"step": 1730
},
{
"epoch": 0.54,
"grad_norm": 0.06005859375,
"learning_rate": 1.0191215724484476e-05,
"loss": 0.9028,
"step": 1740
},
{
"epoch": 0.55,
"grad_norm": 0.0712890625,
"learning_rate": 1.0081953673545432e-05,
"loss": 0.9022,
"step": 1750
},
{
"epoch": 0.55,
"grad_norm": 0.10888671875,
"learning_rate": 9.9726818369908e-06,
"loss": 0.9018,
"step": 1760
},
{
"epoch": 0.55,
"grad_norm": 0.050537109375,
"learning_rate": 9.863413262340491e-06,
"loss": 0.9033,
"step": 1770
},
{
"epoch": 0.56,
"grad_norm": 0.1884765625,
"learning_rate": 9.754160996724927e-06,
"loss": 0.8838,
"step": 1780
},
{
"epoch": 0.56,
"grad_norm": 0.062255859375,
"learning_rate": 9.644938085327174e-06,
"loss": 0.9077,
"step": 1790
},
{
"epoch": 0.56,
"grad_norm": 0.1064453125,
"learning_rate": 9.535757569825266e-06,
"loss": 0.9251,
"step": 1800
},
{
"epoch": 0.57,
"grad_norm": 0.33203125,
"learning_rate": 9.426632486834998e-06,
"loss": 0.908,
"step": 1810
},
{
"epoch": 0.57,
"grad_norm": 0.054443359375,
"learning_rate": 9.317575866353293e-06,
"loss": 0.9015,
"step": 1820
},
{
"epoch": 0.57,
"grad_norm": 0.2294921875,
"learning_rate": 9.20860073020234e-06,
"loss": 0.9201,
"step": 1830
},
{
"epoch": 0.58,
"grad_norm": 0.55078125,
"learning_rate": 9.099720090474779e-06,
"loss": 0.9151,
"step": 1840
},
{
"epoch": 0.58,
"grad_norm": 0.064453125,
"learning_rate": 8.990946947979955e-06,
"loss": 0.9088,
"step": 1850
},
{
"epoch": 0.58,
"grad_norm": 0.06787109375,
"learning_rate": 8.882294290691609e-06,
"loss": 0.9182,
"step": 1860
},
{
"epoch": 0.59,
"grad_norm": 0.234375,
"learning_rate": 8.773775092197018e-06,
"loss": 0.905,
"step": 1870
},
{
"epoch": 0.59,
"grad_norm": 0.17578125,
"learning_rate": 8.665402310147924e-06,
"loss": 0.8986,
"step": 1880
},
{
"epoch": 0.59,
"grad_norm": 0.06103515625,
"learning_rate": 8.557188884713334e-06,
"loss": 0.9175,
"step": 1890
},
{
"epoch": 0.59,
"grad_norm": 0.0517578125,
"learning_rate": 8.44914773703438e-06,
"loss": 0.926,
"step": 1900
},
{
"epoch": 0.6,
"grad_norm": 0.1337890625,
"learning_rate": 8.341291767681523e-06,
"loss": 0.9322,
"step": 1910
},
{
"epoch": 0.6,
"grad_norm": 0.177734375,
"learning_rate": 8.233633855114127e-06,
"loss": 0.9372,
"step": 1920
},
{
"epoch": 0.6,
"grad_norm": 0.051025390625,
"learning_rate": 8.126186854142752e-06,
"loss": 0.9285,
"step": 1930
},
{
"epoch": 0.61,
"grad_norm": 0.07177734375,
"learning_rate": 8.018963594394221e-06,
"loss": 0.916,
"step": 1940
},
{
"epoch": 0.61,
"grad_norm": 0.07373046875,
"learning_rate": 7.911976878779696e-06,
"loss": 0.9359,
"step": 1950
},
{
"epoch": 0.61,
"grad_norm": 0.0712890625,
"learning_rate": 7.805239481965976e-06,
"loss": 0.8861,
"step": 1960
},
{
"epoch": 0.62,
"grad_norm": 0.12890625,
"learning_rate": 7.698764148850138e-06,
"loss": 0.9203,
"step": 1970
},
{
"epoch": 0.62,
"grad_norm": 0.052001953125,
"learning_rate": 7.592563593037746e-06,
"loss": 0.9094,
"step": 1980
},
{
"epoch": 0.62,
"grad_norm": 0.08544921875,
"learning_rate": 7.486650495324783e-06,
"loss": 0.9085,
"step": 1990
},
{
"epoch": 0.63,
"grad_norm": 0.049560546875,
"learning_rate": 7.3810375021835275e-06,
"loss": 0.8981,
"step": 2000
},
{
"epoch": 0.63,
"grad_norm": 0.06005859375,
"learning_rate": 7.275737224252504e-06,
"loss": 0.9005,
"step": 2010
},
{
"epoch": 0.63,
"grad_norm": 0.11181640625,
"learning_rate": 7.1707622348307e-06,
"loss": 0.881,
"step": 2020
},
{
"epoch": 0.64,
"grad_norm": 0.05615234375,
"learning_rate": 7.066125068376297e-06,
"loss": 0.9161,
"step": 2030
},
{
"epoch": 0.64,
"grad_norm": 0.1630859375,
"learning_rate": 6.961838219009968e-06,
"loss": 0.9142,
"step": 2040
},
{
"epoch": 0.64,
"grad_norm": 0.051513671875,
"learning_rate": 6.857914139023058e-06,
"loss": 0.8743,
"step": 2050
},
{
"epoch": 0.64,
"grad_norm": 0.06298828125,
"learning_rate": 6.7543652373906966e-06,
"loss": 0.9011,
"step": 2060
},
{
"epoch": 0.65,
"grad_norm": 0.0556640625,
"learning_rate": 6.651203878290139e-06,
"loss": 0.8941,
"step": 2070
},
{
"epoch": 0.65,
"grad_norm": 0.060791015625,
"learning_rate": 6.548442379624425e-06,
"loss": 0.9323,
"step": 2080
},
{
"epoch": 0.65,
"grad_norm": 0.0634765625,
"learning_rate": 6.446093011551551e-06,
"loss": 0.9249,
"step": 2090
},
{
"epoch": 0.66,
"grad_norm": 0.09130859375,
"learning_rate": 6.344167995019395e-06,
"loss": 0.9195,
"step": 2100
},
{
"epoch": 0.66,
"grad_norm": 0.10595703125,
"learning_rate": 6.242679500306443e-06,
"loss": 0.9248,
"step": 2110
},
{
"epoch": 0.66,
"grad_norm": 0.12255859375,
"learning_rate": 6.141639645568646e-06,
"loss": 0.8919,
"step": 2120
},
{
"epoch": 0.67,
"grad_norm": 0.10400390625,
"learning_rate": 6.041060495392437e-06,
"loss": 0.9251,
"step": 2130
},
{
"epoch": 0.67,
"grad_norm": 0.09716796875,
"learning_rate": 5.940954059354165e-06,
"loss": 0.9018,
"step": 2140
},
{
"epoch": 0.67,
"grad_norm": 0.07568359375,
"learning_rate": 5.841332290586126e-06,
"loss": 0.9205,
"step": 2150
},
{
"epoch": 0.68,
"grad_norm": 0.0546875,
"learning_rate": 5.742207084349274e-06,
"loss": 0.9046,
"step": 2160
},
{
"epoch": 0.68,
"grad_norm": 0.06689453125,
"learning_rate": 5.643590276612909e-06,
"loss": 0.8988,
"step": 2170
},
{
"epoch": 0.68,
"grad_norm": 0.05126953125,
"learning_rate": 5.545493642641389e-06,
"loss": 0.8806,
"step": 2180
},
{
"epoch": 0.69,
"grad_norm": 0.0615234375,
"learning_rate": 5.447928895588128e-06,
"loss": 0.8878,
"step": 2190
},
{
"epoch": 0.69,
"grad_norm": 0.08544921875,
"learning_rate": 5.350907685096983e-06,
"loss": 0.8997,
"step": 2200
},
{
"epoch": 0.69,
"grad_norm": 0.05810546875,
"learning_rate": 5.254441595911255e-06,
"loss": 0.9127,
"step": 2210
},
{
"epoch": 0.69,
"grad_norm": 0.05419921875,
"learning_rate": 5.1585421464904e-06,
"loss": 0.8924,
"step": 2220
},
{
"epoch": 0.7,
"grad_norm": 0.0615234375,
"learning_rate": 5.063220787634686e-06,
"loss": 0.9403,
"step": 2230
},
{
"epoch": 0.7,
"grad_norm": 0.053955078125,
"learning_rate": 4.9684889011179335e-06,
"loss": 0.8869,
"step": 2240
},
{
"epoch": 0.7,
"grad_norm": 0.07275390625,
"learning_rate": 4.874357798328464e-06,
"loss": 0.9012,
"step": 2250
},
{
"epoch": 0.71,
"grad_norm": 0.050537109375,
"learning_rate": 4.780838718918467e-06,
"loss": 0.9102,
"step": 2260
},
{
"epoch": 0.71,
"grad_norm": 0.051025390625,
"learning_rate": 4.687942829461969e-06,
"loss": 0.8802,
"step": 2270
},
{
"epoch": 0.71,
"grad_norm": 0.05810546875,
"learning_rate": 4.595681222121458e-06,
"loss": 0.9363,
"step": 2280
},
{
"epoch": 0.72,
"grad_norm": 0.181640625,
"learning_rate": 4.504064913323472e-06,
"loss": 0.9008,
"step": 2290
},
{
"epoch": 0.72,
"grad_norm": 0.146484375,
"learning_rate": 4.413104842443149e-06,
"loss": 0.8748,
"step": 2300
},
{
"epoch": 0.72,
"grad_norm": 0.06689453125,
"learning_rate": 4.322811870498058e-06,
"loss": 0.8809,
"step": 2310
},
{
"epoch": 0.73,
"grad_norm": 0.06591796875,
"learning_rate": 4.2331967788513295e-06,
"loss": 0.9136,
"step": 2320
},
{
"epoch": 0.73,
"grad_norm": 0.115234375,
"learning_rate": 4.144270267924306e-06,
"loss": 0.9095,
"step": 2330
},
{
"epoch": 0.73,
"grad_norm": 0.1162109375,
"learning_rate": 4.05604295591889e-06,
"loss": 0.9077,
"step": 2340
},
{
"epoch": 0.74,
"grad_norm": 0.2177734375,
"learning_rate": 3.968525377549657e-06,
"loss": 0.9191,
"step": 2350
},
{
"epoch": 0.74,
"grad_norm": 0.06494140625,
"learning_rate": 3.881727982785999e-06,
"loss": 0.8953,
"step": 2360
},
{
"epoch": 0.74,
"grad_norm": 0.07568359375,
"learning_rate": 3.7956611356043196e-06,
"loss": 0.9204,
"step": 2370
},
{
"epoch": 0.74,
"grad_norm": 0.06884765625,
"learning_rate": 3.7103351127505616e-06,
"loss": 0.9004,
"step": 2380
},
{
"epoch": 0.75,
"grad_norm": 0.06884765625,
"learning_rate": 3.625760102513103e-06,
"loss": 0.9206,
"step": 2390
},
{
"epoch": 0.75,
"grad_norm": 0.259765625,
"learning_rate": 3.5419462035062313e-06,
"loss": 0.8932,
"step": 2400
},
{
"epoch": 0.75,
"grad_norm": 0.058837890625,
"learning_rate": 3.4589034234643315e-06,
"loss": 0.9136,
"step": 2410
},
{
"epoch": 0.76,
"grad_norm": 0.06494140625,
"learning_rate": 3.376641678046926e-06,
"loss": 0.8907,
"step": 2420
},
{
"epoch": 0.76,
"grad_norm": 0.05859375,
"learning_rate": 3.2951707896546858e-06,
"loss": 0.9091,
"step": 2430
},
{
"epoch": 0.76,
"grad_norm": 0.06689453125,
"learning_rate": 3.2145004862566185e-06,
"loss": 0.9153,
"step": 2440
},
{
"epoch": 0.77,
"grad_norm": 0.059326171875,
"learning_rate": 3.134640400228479e-06,
"loss": 0.9054,
"step": 2450
},
{
"epoch": 0.77,
"grad_norm": 0.06591796875,
"learning_rate": 3.055600067202652e-06,
"loss": 0.9191,
"step": 2460
},
{
"epoch": 0.77,
"grad_norm": 0.09326171875,
"learning_rate": 2.9773889249295294e-06,
"loss": 0.8951,
"step": 2470
},
{
"epoch": 0.78,
"grad_norm": 0.06298828125,
"learning_rate": 2.900016312150631e-06,
"loss": 0.9205,
"step": 2480
},
{
"epoch": 0.78,
"grad_norm": 0.0625,
"learning_rate": 2.8234914674834913e-06,
"loss": 0.9022,
"step": 2490
},
{
"epoch": 0.78,
"grad_norm": 0.15234375,
"learning_rate": 2.7478235283185504e-06,
"loss": 0.9253,
"step": 2500
},
{
"epoch": 0.79,
"grad_norm": 0.06591796875,
"learning_rate": 2.673021529728108e-06,
"loss": 0.905,
"step": 2510
},
{
"epoch": 0.79,
"grad_norm": 0.064453125,
"learning_rate": 2.599094403387481e-06,
"loss": 0.8965,
"step": 2520
},
{
"epoch": 0.79,
"grad_norm": 0.0517578125,
"learning_rate": 2.5260509765085474e-06,
"loss": 0.9029,
"step": 2530
},
{
"epoch": 0.79,
"grad_norm": 0.1513671875,
"learning_rate": 2.453899970785716e-06,
"loss": 0.8841,
"step": 2540
},
{
"epoch": 0.8,
"grad_norm": 0.0556640625,
"learning_rate": 2.382650001354543e-06,
"loss": 0.8932,
"step": 2550
},
{
"epoch": 0.8,
"grad_norm": 0.0771484375,
"learning_rate": 2.3123095757630344e-06,
"loss": 0.8846,
"step": 2560
},
{
"epoch": 0.8,
"grad_norm": 0.058349609375,
"learning_rate": 2.2428870929558012e-06,
"loss": 0.9301,
"step": 2570
},
{
"epoch": 0.81,
"grad_norm": 0.05712890625,
"learning_rate": 2.1743908422712135e-06,
"loss": 0.9335,
"step": 2580
},
{
"epoch": 0.81,
"grad_norm": 0.107421875,
"learning_rate": 2.1068290024515925e-06,
"loss": 0.9254,
"step": 2590
},
{
"epoch": 0.81,
"grad_norm": 0.1708984375,
"learning_rate": 2.0402096406666562e-06,
"loss": 0.9215,
"step": 2600
},
{
"epoch": 0.82,
"grad_norm": 0.053955078125,
"learning_rate": 1.974540711550248e-06,
"loss": 0.9133,
"step": 2610
},
{
"epoch": 0.82,
"grad_norm": 0.0986328125,
"learning_rate": 1.9098300562505266e-06,
"loss": 0.9071,
"step": 2620
},
{
"epoch": 0.82,
"grad_norm": 0.050537109375,
"learning_rate": 1.8460854014937068e-06,
"loss": 0.8938,
"step": 2630
},
{
"epoch": 0.83,
"grad_norm": 0.0634765625,
"learning_rate": 1.783314358661441e-06,
"loss": 0.9061,
"step": 2640
},
{
"epoch": 0.83,
"grad_norm": 0.0595703125,
"learning_rate": 1.7215244228820027e-06,
"loss": 0.8908,
"step": 2650
},
{
"epoch": 0.83,
"grad_norm": 0.09716796875,
"learning_rate": 1.6607229721353202e-06,
"loss": 0.9104,
"step": 2660
},
{
"epoch": 0.84,
"grad_norm": 0.06787109375,
"learning_rate": 1.6009172663720352e-06,
"loss": 0.9258,
"step": 2670
},
{
"epoch": 0.84,
"grad_norm": 0.08544921875,
"learning_rate": 1.5421144466466164e-06,
"loss": 0.8934,
"step": 2680
},
{
"epoch": 0.84,
"grad_norm": 0.09033203125,
"learning_rate": 1.4843215342646922e-06,
"loss": 0.9161,
"step": 2690
},
{
"epoch": 0.85,
"grad_norm": 0.138671875,
"learning_rate": 1.4275454299446834e-06,
"loss": 0.925,
"step": 2700
},
{
"epoch": 0.85,
"grad_norm": 0.0654296875,
"learning_rate": 1.3717929129938179e-06,
"loss": 0.9128,
"step": 2710
},
{
"epoch": 0.85,
"grad_norm": 0.056640625,
"learning_rate": 1.3170706404986645e-06,
"loss": 0.8888,
"step": 2720
},
{
"epoch": 0.85,
"grad_norm": 0.0751953125,
"learning_rate": 1.263385146530234e-06,
"loss": 0.9086,
"step": 2730
},
{
"epoch": 0.86,
"grad_norm": 0.05322265625,
"learning_rate": 1.2107428413637979e-06,
"loss": 0.9183,
"step": 2740
},
{
"epoch": 0.86,
"grad_norm": 0.072265625,
"learning_rate": 1.15915001071347e-06,
"loss": 0.8933,
"step": 2750
},
{
"epoch": 0.86,
"grad_norm": 0.05908203125,
"learning_rate": 1.1086128149816544e-06,
"loss": 0.91,
"step": 2760
},
{
"epoch": 0.87,
"grad_norm": 0.15625,
"learning_rate": 1.0591372885234885e-06,
"loss": 0.9167,
"step": 2770
},
{
"epoch": 0.87,
"grad_norm": 0.07373046875,
"learning_rate": 1.0107293389262918e-06,
"loss": 0.9172,
"step": 2780
},
{
"epoch": 0.87,
"grad_norm": 0.10498046875,
"learning_rate": 9.63394746304198e-07,
"loss": 0.89,
"step": 2790
},
{
"epoch": 0.88,
"grad_norm": 0.4140625,
"learning_rate": 9.171391626079629e-07,
"loss": 0.8683,
"step": 2800
},
{
"epoch": 0.88,
"grad_norm": 0.10205078125,
"learning_rate": 8.719681109501177e-07,
"loss": 0.9123,
"step": 2810
},
{
"epoch": 0.88,
"grad_norm": 0.0576171875,
"learning_rate": 8.278869849454718e-07,
"loss": 0.9419,
"step": 2820
},
{
"epoch": 0.89,
"grad_norm": 0.06005859375,
"learning_rate": 7.849010480670938e-07,
"loss": 0.9269,
"step": 2830
},
{
"epoch": 0.89,
"grad_norm": 0.12451171875,
"learning_rate": 7.43015433017844e-07,
"loss": 0.9286,
"step": 2840
},
{
"epoch": 0.89,
"grad_norm": 0.056884765625,
"learning_rate": 7.022351411174866e-07,
"loss": 0.9317,
"step": 2850
},
{
"epoch": 0.9,
"grad_norm": 0.06396484375,
"learning_rate": 6.625650417055296e-07,
"loss": 0.9374,
"step": 2860
},
{
"epoch": 0.9,
"grad_norm": 0.05419921875,
"learning_rate": 6.240098715597975e-07,
"loss": 0.9221,
"step": 2870
},
{
"epoch": 0.9,
"grad_norm": 0.0556640625,
"learning_rate": 5.865742343308345e-07,
"loss": 0.8889,
"step": 2880
},
{
"epoch": 0.9,
"grad_norm": 0.07666015625,
"learning_rate": 5.502625999922207e-07,
"loss": 0.9015,
"step": 2890
},
{
"epoch": 0.91,
"grad_norm": 0.0693359375,
"learning_rate": 5.150793043068269e-07,
"loss": 0.9012,
"step": 2900
},
{
"epoch": 0.91,
"grad_norm": 0.052001953125,
"learning_rate": 4.810285483091181e-07,
"loss": 0.8941,
"step": 2910
},
{
"epoch": 0.91,
"grad_norm": 0.083984375,
"learning_rate": 4.481143978035196e-07,
"loss": 0.9181,
"step": 2920
},
{
"epoch": 0.92,
"grad_norm": 0.060302734375,
"learning_rate": 4.163407828789523e-07,
"loss": 0.9184,
"step": 2930
},
{
"epoch": 0.92,
"grad_norm": 0.2578125,
"learning_rate": 3.857114974395604e-07,
"loss": 0.9085,
"step": 2940
},
{
"epoch": 0.92,
"grad_norm": 0.07470703125,
"learning_rate": 3.5623019875169916e-07,
"loss": 0.9153,
"step": 2950
},
{
"epoch": 0.93,
"grad_norm": 0.04736328125,
"learning_rate": 3.2790040700725114e-07,
"loss": 0.8967,
"step": 2960
},
{
"epoch": 0.93,
"grad_norm": 0.0810546875,
"learning_rate": 3.0072550490328754e-07,
"loss": 0.9105,
"step": 2970
},
{
"epoch": 0.93,
"grad_norm": 0.06298828125,
"learning_rate": 2.7470873723817405e-07,
"loss": 0.9336,
"step": 2980
},
{
"epoch": 0.94,
"grad_norm": 0.06640625,
"learning_rate": 2.498532105241158e-07,
"loss": 0.9182,
"step": 2990
},
{
"epoch": 0.94,
"grad_norm": 0.058349609375,
"learning_rate": 2.2616189261623568e-07,
"loss": 0.9152,
"step": 3000
},
{
"epoch": 0.94,
"grad_norm": 0.08935546875,
"learning_rate": 2.0363761235819402e-07,
"loss": 0.9392,
"step": 3010
},
{
"epoch": 0.95,
"grad_norm": 0.06591796875,
"learning_rate": 1.8228305924441469e-07,
"loss": 0.8772,
"step": 3020
},
{
"epoch": 0.95,
"grad_norm": 0.056396484375,
"learning_rate": 1.621007830989496e-07,
"loss": 0.9035,
"step": 3030
},
{
"epoch": 0.95,
"grad_norm": 0.0712890625,
"learning_rate": 1.430931937710156e-07,
"loss": 0.9264,
"step": 3040
},
{
"epoch": 0.95,
"grad_norm": 0.1015625,
"learning_rate": 1.2526256084725351e-07,
"loss": 0.9229,
"step": 3050
},
{
"epoch": 0.96,
"grad_norm": 0.07958984375,
"learning_rate": 1.0861101338072499e-07,
"loss": 0.8886,
"step": 3060
},
{
"epoch": 0.96,
"grad_norm": 0.05810546875,
"learning_rate": 9.314053963669245e-08,
"loss": 0.9022,
"step": 3070
},
{
"epoch": 0.96,
"grad_norm": 0.053466796875,
"learning_rate": 7.885298685522235e-08,
"loss": 0.902,
"step": 3080
},
{
"epoch": 0.97,
"grad_norm": 0.056884765625,
"learning_rate": 6.575006103060388e-08,
"loss": 0.9356,
"step": 3090
},
{
"epoch": 0.97,
"grad_norm": 0.058349609375,
"learning_rate": 5.3833326707656284e-08,
"loss": 0.9262,
"step": 3100
},
{
"epoch": 0.97,
"grad_norm": 0.078125,
"learning_rate": 4.310420679490945e-08,
"loss": 0.8869,
"step": 3110
},
{
"epoch": 0.98,
"grad_norm": 0.07080078125,
"learning_rate": 3.356398239470427e-08,
"loss": 0.9479,
"step": 3120
},
{
"epoch": 0.98,
"grad_norm": 0.050537109375,
"learning_rate": 2.5213792650227165e-08,
"loss": 0.8915,
"step": 3130
},
{
"epoch": 0.98,
"grad_norm": 0.0888671875,
"learning_rate": 1.8054634609484534e-08,
"loss": 0.9069,
"step": 3140
},
{
"epoch": 0.99,
"grad_norm": 0.0634765625,
"learning_rate": 1.20873631062568e-08,
"loss": 0.8806,
"step": 3150
},
{
"epoch": 0.99,
"grad_norm": 0.3671875,
"learning_rate": 7.312690658024535e-09,
"loss": 0.9192,
"step": 3160
},
{
"epoch": 0.99,
"grad_norm": 0.11865234375,
"learning_rate": 3.731187380893176e-09,
"loss": 0.8848,
"step": 3170
},
{
"epoch": 1.0,
"grad_norm": 0.1455078125,
"learning_rate": 1.343280921518586e-09,
"loss": 0.9189,
"step": 3180
},
{
"epoch": 1.0,
"grad_norm": 0.232421875,
"learning_rate": 1.4925640603902226e-10,
"loss": 0.9215,
"step": 3190
}
],
"logging_steps": 10,
"max_steps": 3195,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"total_flos": 9.706671003244954e+17,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}