gemma-7b-it-AskDocsEmpathy4k / trainer_state.json
vtiyyal1's picture
Upload folder using huggingface_hub
e174d6c verified
raw
history blame
24.6 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.8,
"eval_steps": 500,
"global_step": 4000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"grad_norm": 2.296875,
"learning_rate": 8.333333333333334e-06,
"loss": 6.02,
"step": 25
},
{
"epoch": 0.01,
"grad_norm": 0.63671875,
"learning_rate": 1.6666666666666667e-05,
"loss": 5.3902,
"step": 50
},
{
"epoch": 0.01,
"grad_norm": 1.2734375,
"learning_rate": 2.5e-05,
"loss": 4.6611,
"step": 75
},
{
"epoch": 0.02,
"grad_norm": 7.71875,
"learning_rate": 3.3333333333333335e-05,
"loss": 3.8627,
"step": 100
},
{
"epoch": 0.03,
"grad_norm": 0.5625,
"learning_rate": 4.166666666666667e-05,
"loss": 2.3529,
"step": 125
},
{
"epoch": 0.03,
"grad_norm": 0.53515625,
"learning_rate": 5e-05,
"loss": 1.4613,
"step": 150
},
{
"epoch": 0.04,
"grad_norm": 0.4453125,
"learning_rate": 4.999672209164081e-05,
"loss": 1.3771,
"step": 175
},
{
"epoch": 0.04,
"grad_norm": 0.5546875,
"learning_rate": 4.998688922613788e-05,
"loss": 1.3205,
"step": 200
},
{
"epoch": 0.04,
"grad_norm": 0.73046875,
"learning_rate": 4.997050398198977e-05,
"loss": 1.2926,
"step": 225
},
{
"epoch": 0.05,
"grad_norm": 0.50390625,
"learning_rate": 4.9947570655942796e-05,
"loss": 1.2837,
"step": 250
},
{
"epoch": 0.06,
"grad_norm": 0.5703125,
"learning_rate": 4.991809526186424e-05,
"loss": 1.3392,
"step": 275
},
{
"epoch": 0.06,
"grad_norm": 0.46484375,
"learning_rate": 4.988208552916535e-05,
"loss": 1.187,
"step": 300
},
{
"epoch": 0.07,
"grad_norm": 0.447265625,
"learning_rate": 4.983955090077444e-05,
"loss": 1.2047,
"step": 325
},
{
"epoch": 0.07,
"grad_norm": 0.494140625,
"learning_rate": 4.9790502530660635e-05,
"loss": 1.2062,
"step": 350
},
{
"epoch": 0.07,
"grad_norm": 0.55859375,
"learning_rate": 4.9734953280908904e-05,
"loss": 1.248,
"step": 375
},
{
"epoch": 0.08,
"grad_norm": 0.82421875,
"learning_rate": 4.967291771834727e-05,
"loss": 1.225,
"step": 400
},
{
"epoch": 0.09,
"grad_norm": 0.484375,
"learning_rate": 4.960441211072686e-05,
"loss": 1.2077,
"step": 425
},
{
"epoch": 0.09,
"grad_norm": 0.5078125,
"learning_rate": 4.9529454422455976e-05,
"loss": 1.2046,
"step": 450
},
{
"epoch": 0.1,
"grad_norm": 0.404296875,
"learning_rate": 4.944806430988927e-05,
"loss": 1.2055,
"step": 475
},
{
"epoch": 0.1,
"grad_norm": 0.546875,
"learning_rate": 4.936026311617316e-05,
"loss": 1.2438,
"step": 500
},
{
"epoch": 0.1,
"grad_norm": 0.462890625,
"learning_rate": 4.926607386564898e-05,
"loss": 1.2093,
"step": 525
},
{
"epoch": 0.11,
"grad_norm": 0.50390625,
"learning_rate": 4.916552125781528e-05,
"loss": 1.1535,
"step": 550
},
{
"epoch": 0.12,
"grad_norm": 0.53125,
"learning_rate": 4.9058631660850765e-05,
"loss": 1.2239,
"step": 575
},
{
"epoch": 0.12,
"grad_norm": 0.50390625,
"learning_rate": 4.894543310469968e-05,
"loss": 1.1454,
"step": 600
},
{
"epoch": 0.12,
"grad_norm": 0.61328125,
"learning_rate": 4.882595527372152e-05,
"loss": 1.1234,
"step": 625
},
{
"epoch": 0.13,
"grad_norm": 0.486328125,
"learning_rate": 4.870022949890676e-05,
"loss": 1.1888,
"step": 650
},
{
"epoch": 0.14,
"grad_norm": 0.57421875,
"learning_rate": 4.856828874966086e-05,
"loss": 1.1476,
"step": 675
},
{
"epoch": 0.14,
"grad_norm": 0.435546875,
"learning_rate": 4.8430167625158595e-05,
"loss": 1.2208,
"step": 700
},
{
"epoch": 0.14,
"grad_norm": 0.49609375,
"learning_rate": 4.828590234527106e-05,
"loss": 1.16,
"step": 725
},
{
"epoch": 0.15,
"grad_norm": 0.40625,
"learning_rate": 4.813553074106761e-05,
"loss": 1.1312,
"step": 750
},
{
"epoch": 0.15,
"grad_norm": 0.53125,
"learning_rate": 4.7979092244895305e-05,
"loss": 1.1732,
"step": 775
},
{
"epoch": 0.16,
"grad_norm": 0.498046875,
"learning_rate": 4.781662788003851e-05,
"loss": 1.1404,
"step": 800
},
{
"epoch": 0.17,
"grad_norm": 0.484375,
"learning_rate": 4.764818024996117e-05,
"loss": 1.1498,
"step": 825
},
{
"epoch": 0.17,
"grad_norm": 0.5859375,
"learning_rate": 4.747379352713489e-05,
"loss": 1.148,
"step": 850
},
{
"epoch": 0.17,
"grad_norm": 0.5234375,
"learning_rate": 4.7293513441455364e-05,
"loss": 1.1245,
"step": 875
},
{
"epoch": 0.18,
"grad_norm": 0.49609375,
"learning_rate": 4.710738726825059e-05,
"loss": 1.0975,
"step": 900
},
{
"epoch": 0.18,
"grad_norm": 0.5390625,
"learning_rate": 4.69154638158837e-05,
"loss": 1.1204,
"step": 925
},
{
"epoch": 0.19,
"grad_norm": 0.55078125,
"learning_rate": 4.671779341295378e-05,
"loss": 1.1291,
"step": 950
},
{
"epoch": 0.2,
"grad_norm": 0.46484375,
"learning_rate": 4.6514427895098134e-05,
"loss": 1.1382,
"step": 975
},
{
"epoch": 0.2,
"grad_norm": 0.65625,
"learning_rate": 4.630542059139924e-05,
"loss": 1.1861,
"step": 1000
},
{
"epoch": 0.2,
"grad_norm": 0.73046875,
"learning_rate": 4.6090826310400116e-05,
"loss": 1.1358,
"step": 1025
},
{
"epoch": 0.21,
"grad_norm": 0.57421875,
"learning_rate": 4.587070132573178e-05,
"loss": 1.115,
"step": 1050
},
{
"epoch": 0.21,
"grad_norm": 0.69921875,
"learning_rate": 4.5645103361356415e-05,
"loss": 1.0639,
"step": 1075
},
{
"epoch": 0.22,
"grad_norm": 0.51953125,
"learning_rate": 4.541409157643027e-05,
"loss": 1.1185,
"step": 1100
},
{
"epoch": 0.23,
"grad_norm": 0.65234375,
"learning_rate": 4.517772654979023e-05,
"loss": 1.1038,
"step": 1125
},
{
"epoch": 0.23,
"grad_norm": 0.498046875,
"learning_rate": 4.493607026406802e-05,
"loss": 1.0626,
"step": 1150
},
{
"epoch": 0.23,
"grad_norm": 0.46484375,
"learning_rate": 4.4689186089436366e-05,
"loss": 1.0829,
"step": 1175
},
{
"epoch": 0.24,
"grad_norm": 0.53515625,
"learning_rate": 4.443713876699124e-05,
"loss": 1.1554,
"step": 1200
},
{
"epoch": 0.24,
"grad_norm": 0.486328125,
"learning_rate": 4.417999439177466e-05,
"loss": 1.0713,
"step": 1225
},
{
"epoch": 0.25,
"grad_norm": 0.5078125,
"learning_rate": 4.391782039544238e-05,
"loss": 1.0986,
"step": 1250
},
{
"epoch": 0.26,
"grad_norm": 0.474609375,
"learning_rate": 4.365068552858115e-05,
"loss": 1.0185,
"step": 1275
},
{
"epoch": 0.26,
"grad_norm": 0.53515625,
"learning_rate": 4.337865984268001e-05,
"loss": 1.0955,
"step": 1300
},
{
"epoch": 0.27,
"grad_norm": 0.58984375,
"learning_rate": 4.3101814671760546e-05,
"loss": 1.0747,
"step": 1325
},
{
"epoch": 0.27,
"grad_norm": 0.53515625,
"learning_rate": 4.2820222613670736e-05,
"loss": 1.0395,
"step": 1350
},
{
"epoch": 0.28,
"grad_norm": 0.63671875,
"learning_rate": 4.253395751104748e-05,
"loss": 1.0798,
"step": 1375
},
{
"epoch": 0.28,
"grad_norm": 0.828125,
"learning_rate": 4.224309443195261e-05,
"loss": 1.154,
"step": 1400
},
{
"epoch": 0.28,
"grad_norm": 0.6015625,
"learning_rate": 4.194770965018758e-05,
"loss": 1.0445,
"step": 1425
},
{
"epoch": 0.29,
"grad_norm": 0.5859375,
"learning_rate": 4.164788062529203e-05,
"loss": 1.0651,
"step": 1450
},
{
"epoch": 0.29,
"grad_norm": 0.5,
"learning_rate": 4.134368598223132e-05,
"loss": 1.1505,
"step": 1475
},
{
"epoch": 0.3,
"grad_norm": 0.52734375,
"learning_rate": 4.10352054907785e-05,
"loss": 1.085,
"step": 1500
},
{
"epoch": 0.3,
"grad_norm": 0.53515625,
"learning_rate": 4.072252004459611e-05,
"loss": 1.095,
"step": 1525
},
{
"epoch": 0.31,
"grad_norm": 0.8046875,
"learning_rate": 4.0405711640023186e-05,
"loss": 1.0373,
"step": 1550
},
{
"epoch": 0.32,
"grad_norm": 0.6015625,
"learning_rate": 4.008486335457312e-05,
"loss": 1.0507,
"step": 1575
},
{
"epoch": 0.32,
"grad_norm": 0.578125,
"learning_rate": 3.976005932514807e-05,
"loss": 1.0545,
"step": 1600
},
{
"epoch": 0.33,
"grad_norm": 0.65625,
"learning_rate": 3.943138472597549e-05,
"loss": 1.0342,
"step": 1625
},
{
"epoch": 0.33,
"grad_norm": 0.609375,
"learning_rate": 3.909892574627266e-05,
"loss": 1.0948,
"step": 1650
},
{
"epoch": 0.34,
"grad_norm": 0.59765625,
"learning_rate": 3.876276956764509e-05,
"loss": 0.9903,
"step": 1675
},
{
"epoch": 0.34,
"grad_norm": 0.5625,
"learning_rate": 3.84230043412246e-05,
"loss": 1.058,
"step": 1700
},
{
"epoch": 0.34,
"grad_norm": 0.515625,
"learning_rate": 3.807971916455325e-05,
"loss": 0.9873,
"step": 1725
},
{
"epoch": 0.35,
"grad_norm": 0.55859375,
"learning_rate": 3.773300405821908e-05,
"loss": 1.0034,
"step": 1750
},
{
"epoch": 0.35,
"grad_norm": 0.60546875,
"learning_rate": 3.7382949942249694e-05,
"loss": 1.0152,
"step": 1775
},
{
"epoch": 0.36,
"grad_norm": 0.578125,
"learning_rate": 3.702964861227013e-05,
"loss": 1.0289,
"step": 1800
},
{
"epoch": 0.36,
"grad_norm": 0.65625,
"learning_rate": 3.6673192715431015e-05,
"loss": 0.9901,
"step": 1825
},
{
"epoch": 0.37,
"grad_norm": 0.63671875,
"learning_rate": 3.631367572611348e-05,
"loss": 0.9345,
"step": 1850
},
{
"epoch": 0.38,
"grad_norm": 0.671875,
"learning_rate": 3.595119192141706e-05,
"loss": 1.035,
"step": 1875
},
{
"epoch": 0.38,
"grad_norm": 0.6328125,
"learning_rate": 3.5585836356437264e-05,
"loss": 1.0689,
"step": 1900
},
{
"epoch": 0.39,
"grad_norm": 0.6015625,
"learning_rate": 3.521770483933891e-05,
"loss": 0.9883,
"step": 1925
},
{
"epoch": 0.39,
"grad_norm": 0.51953125,
"learning_rate": 3.484689390623218e-05,
"loss": 1.0279,
"step": 1950
},
{
"epoch": 0.4,
"grad_norm": 0.6484375,
"learning_rate": 3.447350079585767e-05,
"loss": 1.0602,
"step": 1975
},
{
"epoch": 0.4,
"grad_norm": 0.59375,
"learning_rate": 3.409762342408719e-05,
"loss": 1.0178,
"step": 2000
},
{
"epoch": 0.41,
"grad_norm": 0.6484375,
"learning_rate": 3.3719360358247054e-05,
"loss": 1.0243,
"step": 2025
},
{
"epoch": 0.41,
"grad_norm": 0.640625,
"learning_rate": 3.333881079127052e-05,
"loss": 1.0175,
"step": 2050
},
{
"epoch": 0.41,
"grad_norm": 0.69140625,
"learning_rate": 3.29560745156861e-05,
"loss": 0.921,
"step": 2075
},
{
"epoch": 0.42,
"grad_norm": 0.65234375,
"learning_rate": 3.2571251897448765e-05,
"loss": 0.9597,
"step": 2100
},
{
"epoch": 0.42,
"grad_norm": 0.51171875,
"learning_rate": 3.218444384962071e-05,
"loss": 0.9761,
"step": 2125
},
{
"epoch": 0.43,
"grad_norm": 0.640625,
"learning_rate": 3.1795751805908573e-05,
"loss": 0.9706,
"step": 2150
},
{
"epoch": 0.43,
"grad_norm": 0.6640625,
"learning_rate": 3.1405277694064305e-05,
"loss": 0.932,
"step": 2175
},
{
"epoch": 0.44,
"grad_norm": 0.6796875,
"learning_rate": 3.101312390915634e-05,
"loss": 0.9486,
"step": 2200
},
{
"epoch": 0.45,
"grad_norm": 0.69140625,
"learning_rate": 3.061939328671824e-05,
"loss": 1.011,
"step": 2225
},
{
"epoch": 0.45,
"grad_norm": 0.60546875,
"learning_rate": 3.0224189075781884e-05,
"loss": 0.9463,
"step": 2250
},
{
"epoch": 0.46,
"grad_norm": 0.6796875,
"learning_rate": 2.9827614911802203e-05,
"loss": 0.9989,
"step": 2275
},
{
"epoch": 0.46,
"grad_norm": 0.7109375,
"learning_rate": 2.9429774789480575e-05,
"loss": 0.9677,
"step": 2300
},
{
"epoch": 0.47,
"grad_norm": 0.62890625,
"learning_rate": 2.9030773035493997e-05,
"loss": 0.9739,
"step": 2325
},
{
"epoch": 0.47,
"grad_norm": 0.69140625,
"learning_rate": 2.863071428113726e-05,
"loss": 1.0254,
"step": 2350
},
{
"epoch": 0.47,
"grad_norm": 0.8203125,
"learning_rate": 2.8229703434885163e-05,
"loss": 0.9969,
"step": 2375
},
{
"epoch": 0.48,
"grad_norm": 0.56640625,
"learning_rate": 2.782784565488211e-05,
"loss": 0.9918,
"step": 2400
},
{
"epoch": 0.48,
"grad_norm": 0.58203125,
"learning_rate": 2.7425246321366203e-05,
"loss": 1.0169,
"step": 2425
},
{
"epoch": 0.49,
"grad_norm": 0.75,
"learning_rate": 2.7022011009035107e-05,
"loss": 0.9844,
"step": 2450
},
{
"epoch": 0.49,
"grad_norm": 0.7265625,
"learning_rate": 2.6618245459360897e-05,
"loss": 1.026,
"step": 2475
},
{
"epoch": 0.5,
"grad_norm": 0.671875,
"learning_rate": 2.621405555286121e-05,
"loss": 0.9333,
"step": 2500
},
{
"epoch": 0.51,
"grad_norm": 0.6875,
"learning_rate": 2.5809547281333902e-05,
"loss": 1.0336,
"step": 2525
},
{
"epoch": 0.51,
"grad_norm": 0.63671875,
"learning_rate": 2.540482672006254e-05,
"loss": 0.9375,
"step": 2550
},
{
"epoch": 0.52,
"grad_norm": 0.87109375,
"learning_rate": 2.5e-05,
"loss": 0.9441,
"step": 2575
},
{
"epoch": 0.52,
"grad_norm": 0.7265625,
"learning_rate": 2.4595173279937464e-05,
"loss": 0.9643,
"step": 2600
},
{
"epoch": 0.53,
"grad_norm": 0.67578125,
"learning_rate": 2.419045271866611e-05,
"loss": 0.9172,
"step": 2625
},
{
"epoch": 0.53,
"grad_norm": 0.87109375,
"learning_rate": 2.3785944447138802e-05,
"loss": 0.8813,
"step": 2650
},
{
"epoch": 0.54,
"grad_norm": 0.8046875,
"learning_rate": 2.338175454063911e-05,
"loss": 0.9638,
"step": 2675
},
{
"epoch": 0.54,
"grad_norm": 0.859375,
"learning_rate": 2.29779889909649e-05,
"loss": 0.9476,
"step": 2700
},
{
"epoch": 0.55,
"grad_norm": 0.6796875,
"learning_rate": 2.25747536786338e-05,
"loss": 0.9536,
"step": 2725
},
{
"epoch": 0.55,
"grad_norm": 0.77734375,
"learning_rate": 2.2172154345117894e-05,
"loss": 0.9308,
"step": 2750
},
{
"epoch": 0.56,
"grad_norm": 0.734375,
"learning_rate": 2.177029656511485e-05,
"loss": 0.9432,
"step": 2775
},
{
"epoch": 0.56,
"grad_norm": 0.66015625,
"learning_rate": 2.136928571886275e-05,
"loss": 0.9253,
"step": 2800
},
{
"epoch": 0.56,
"grad_norm": 0.69921875,
"learning_rate": 2.0969226964506006e-05,
"loss": 0.9301,
"step": 2825
},
{
"epoch": 0.57,
"grad_norm": 0.7578125,
"learning_rate": 2.0570225210519434e-05,
"loss": 0.9509,
"step": 2850
},
{
"epoch": 0.57,
"grad_norm": 0.7421875,
"learning_rate": 2.0172385088197803e-05,
"loss": 0.8995,
"step": 2875
},
{
"epoch": 0.58,
"grad_norm": 0.8125,
"learning_rate": 1.9775810924218125e-05,
"loss": 0.8682,
"step": 2900
},
{
"epoch": 0.58,
"grad_norm": 0.8203125,
"learning_rate": 1.9380606713281775e-05,
"loss": 0.9384,
"step": 2925
},
{
"epoch": 0.59,
"grad_norm": 0.76171875,
"learning_rate": 1.8986876090843667e-05,
"loss": 0.9545,
"step": 2950
},
{
"epoch": 0.59,
"grad_norm": 0.74609375,
"learning_rate": 1.859472230593569e-05,
"loss": 0.8595,
"step": 2975
},
{
"epoch": 0.6,
"grad_norm": 0.8125,
"learning_rate": 1.820424819409143e-05,
"loss": 0.9636,
"step": 3000
},
{
"epoch": 0.6,
"grad_norm": 0.75390625,
"learning_rate": 1.7815556150379298e-05,
"loss": 0.973,
"step": 3025
},
{
"epoch": 0.61,
"grad_norm": 0.73828125,
"learning_rate": 1.7428748102551237e-05,
"loss": 0.9182,
"step": 3050
},
{
"epoch": 0.61,
"grad_norm": 0.85546875,
"learning_rate": 1.704392548431391e-05,
"loss": 0.9419,
"step": 3075
},
{
"epoch": 0.62,
"grad_norm": 0.71484375,
"learning_rate": 1.666118920872949e-05,
"loss": 0.8826,
"step": 3100
},
{
"epoch": 0.62,
"grad_norm": 0.83203125,
"learning_rate": 1.6280639641752942e-05,
"loss": 0.8989,
"step": 3125
},
{
"epoch": 0.63,
"grad_norm": 0.8125,
"learning_rate": 1.5902376575912815e-05,
"loss": 0.8946,
"step": 3150
},
{
"epoch": 0.64,
"grad_norm": 0.7734375,
"learning_rate": 1.552649920414233e-05,
"loss": 0.8636,
"step": 3175
},
{
"epoch": 0.64,
"grad_norm": 0.91015625,
"learning_rate": 1.5153106093767827e-05,
"loss": 0.9318,
"step": 3200
},
{
"epoch": 0.65,
"grad_norm": 0.80078125,
"learning_rate": 1.4782295160661103e-05,
"loss": 0.9496,
"step": 3225
},
{
"epoch": 0.65,
"grad_norm": 0.734375,
"learning_rate": 1.4414163643562755e-05,
"loss": 0.8607,
"step": 3250
},
{
"epoch": 0.66,
"grad_norm": 0.7734375,
"learning_rate": 1.4048808078582942e-05,
"loss": 0.8367,
"step": 3275
},
{
"epoch": 0.66,
"grad_norm": 0.83203125,
"learning_rate": 1.368632427388653e-05,
"loss": 0.9584,
"step": 3300
},
{
"epoch": 0.67,
"grad_norm": 0.80859375,
"learning_rate": 1.3326807284568984e-05,
"loss": 0.8844,
"step": 3325
},
{
"epoch": 0.67,
"grad_norm": 0.87890625,
"learning_rate": 1.2970351387729873e-05,
"loss": 0.9125,
"step": 3350
},
{
"epoch": 0.68,
"grad_norm": 0.74609375,
"learning_rate": 1.2617050057750322e-05,
"loss": 0.9229,
"step": 3375
},
{
"epoch": 0.68,
"grad_norm": 0.80078125,
"learning_rate": 1.2266995941780934e-05,
"loss": 0.9541,
"step": 3400
},
{
"epoch": 0.69,
"grad_norm": 0.8359375,
"learning_rate": 1.1920280835446748e-05,
"loss": 0.8816,
"step": 3425
},
{
"epoch": 0.69,
"grad_norm": 0.82421875,
"learning_rate": 1.1576995658775405e-05,
"loss": 0.862,
"step": 3450
},
{
"epoch": 0.69,
"grad_norm": 0.80078125,
"learning_rate": 1.1237230432354912e-05,
"loss": 0.8541,
"step": 3475
},
{
"epoch": 0.7,
"grad_norm": 0.8203125,
"learning_rate": 1.0901074253727336e-05,
"loss": 0.8586,
"step": 3500
},
{
"epoch": 0.7,
"grad_norm": 0.7890625,
"learning_rate": 1.0568615274024522e-05,
"loss": 0.9033,
"step": 3525
},
{
"epoch": 0.71,
"grad_norm": 0.7890625,
"learning_rate": 1.0239940674851941e-05,
"loss": 0.8456,
"step": 3550
},
{
"epoch": 0.71,
"grad_norm": 0.91796875,
"learning_rate": 9.915136645426884e-06,
"loss": 0.9165,
"step": 3575
},
{
"epoch": 0.72,
"grad_norm": 0.80859375,
"learning_rate": 9.594288359976817e-06,
"loss": 0.8225,
"step": 3600
},
{
"epoch": 0.72,
"grad_norm": 0.85546875,
"learning_rate": 9.277479955403887e-06,
"loss": 0.815,
"step": 3625
},
{
"epoch": 0.73,
"grad_norm": 0.6640625,
"learning_rate": 8.964794509221508e-06,
"loss": 0.8517,
"step": 3650
},
{
"epoch": 0.73,
"grad_norm": 0.8125,
"learning_rate": 8.656314017768693e-06,
"loss": 0.9108,
"step": 3675
},
{
"epoch": 0.74,
"grad_norm": 0.8671875,
"learning_rate": 8.352119374707978e-06,
"loss": 0.8665,
"step": 3700
},
{
"epoch": 0.74,
"grad_norm": 0.8359375,
"learning_rate": 8.052290349812419e-06,
"loss": 0.875,
"step": 3725
},
{
"epoch": 0.75,
"grad_norm": 0.8359375,
"learning_rate": 7.756905568047393e-06,
"loss": 0.9181,
"step": 3750
},
{
"epoch": 0.76,
"grad_norm": 0.91015625,
"learning_rate": 7.466042488952521e-06,
"loss": 0.8715,
"step": 3775
},
{
"epoch": 0.76,
"grad_norm": 0.74609375,
"learning_rate": 7.179777386329276e-06,
"loss": 0.9051,
"step": 3800
},
{
"epoch": 0.77,
"grad_norm": 0.796875,
"learning_rate": 6.898185328239468e-06,
"loss": 0.871,
"step": 3825
},
{
"epoch": 0.77,
"grad_norm": 0.71875,
"learning_rate": 6.621340157319997e-06,
"loss": 0.8633,
"step": 3850
},
{
"epoch": 0.78,
"grad_norm": 0.98046875,
"learning_rate": 6.349314471418849e-06,
"loss": 0.8513,
"step": 3875
},
{
"epoch": 0.78,
"grad_norm": 0.81640625,
"learning_rate": 6.082179604557617e-06,
"loss": 0.8063,
"step": 3900
},
{
"epoch": 0.79,
"grad_norm": 0.80078125,
"learning_rate": 5.820005608225346e-06,
"loss": 0.8652,
"step": 3925
},
{
"epoch": 0.79,
"grad_norm": 0.81640625,
"learning_rate": 5.562861233008774e-06,
"loss": 0.872,
"step": 3950
},
{
"epoch": 0.8,
"grad_norm": 0.74609375,
"learning_rate": 5.310813910563644e-06,
"loss": 0.849,
"step": 3975
},
{
"epoch": 0.8,
"grad_norm": 0.79296875,
"learning_rate": 5.063929735931985e-06,
"loss": 0.8926,
"step": 4000
}
],
"logging_steps": 25,
"max_steps": 5000,
"num_input_tokens_seen": 0,
"num_train_epochs": 9223372036854775807,
"save_steps": 500,
"total_flos": 7.81641131753472e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}