|
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 10.0,
|
|
"eval_steps": 500,
|
|
"global_step": 610,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.08,
|
|
"grad_norm": 5.009121417999268,
|
|
"learning_rate": 4.9991711687857826e-05,
|
|
"loss": 3.4699,
|
|
"step": 5
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"grad_norm": 3.122627019882202,
|
|
"learning_rate": 4.9966852247120764e-05,
|
|
"loss": 3.1968,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"grad_norm": 2.5967676639556885,
|
|
"learning_rate": 4.9925438161213164e-05,
|
|
"loss": 3.0675,
|
|
"step": 15
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"grad_norm": 2.4438161849975586,
|
|
"learning_rate": 4.9867496890364726e-05,
|
|
"loss": 3.0402,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"grad_norm": 2.6208345890045166,
|
|
"learning_rate": 4.9793066853402536e-05,
|
|
"loss": 3.0463,
|
|
"step": 25
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"grad_norm": 2.487919330596924,
|
|
"learning_rate": 4.970219740227693e-05,
|
|
"loss": 2.9922,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"grad_norm": 2.4597654342651367,
|
|
"learning_rate": 4.9594948789337914e-05,
|
|
"loss": 2.9401,
|
|
"step": 35
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"grad_norm": 2.2669572830200195,
|
|
"learning_rate": 4.947139212738395e-05,
|
|
"loss": 2.913,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"grad_norm": 2.3517422676086426,
|
|
"learning_rate": 4.933160934250957e-05,
|
|
"loss": 2.9039,
|
|
"step": 45
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"grad_norm": 2.067209243774414,
|
|
"learning_rate": 4.9175693119783013e-05,
|
|
"loss": 2.9188,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"grad_norm": 2.5949714183807373,
|
|
"learning_rate": 4.900374684179004e-05,
|
|
"loss": 2.8791,
|
|
"step": 55
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"grad_norm": 2.3132359981536865,
|
|
"learning_rate": 4.881588452008456e-05,
|
|
"loss": 2.8602,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 1.07,
|
|
"grad_norm": 2.7244913578033447,
|
|
"learning_rate": 4.861223071959153e-05,
|
|
"loss": 2.6114,
|
|
"step": 65
|
|
},
|
|
{
|
|
"epoch": 1.15,
|
|
"grad_norm": 2.426957607269287,
|
|
"learning_rate": 4.839292047601234e-05,
|
|
"loss": 2.5097,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 1.23,
|
|
"grad_norm": 2.518718957901001,
|
|
"learning_rate": 4.815809920628738e-05,
|
|
"loss": 2.497,
|
|
"step": 75
|
|
},
|
|
{
|
|
"epoch": 1.31,
|
|
"grad_norm": 2.5183868408203125,
|
|
"learning_rate": 4.790792261217512e-05,
|
|
"loss": 2.4959,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 1.39,
|
|
"grad_norm": 2.5139663219451904,
|
|
"learning_rate": 4.764255657701179e-05,
|
|
"loss": 2.5037,
|
|
"step": 85
|
|
},
|
|
{
|
|
"epoch": 1.48,
|
|
"grad_norm": 2.398991823196411,
|
|
"learning_rate": 4.736217705571989e-05,
|
|
"loss": 2.4973,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 1.56,
|
|
"grad_norm": 2.291883707046509,
|
|
"learning_rate": 4.706696995813868e-05,
|
|
"loss": 2.4948,
|
|
"step": 95
|
|
},
|
|
{
|
|
"epoch": 1.64,
|
|
"grad_norm": 2.315034866333008,
|
|
"learning_rate": 4.6757131025753886e-05,
|
|
"loss": 2.5013,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 1.72,
|
|
"grad_norm": 2.419813394546509,
|
|
"learning_rate": 4.643286570190831e-05,
|
|
"loss": 2.4919,
|
|
"step": 105
|
|
},
|
|
{
|
|
"epoch": 1.8,
|
|
"grad_norm": 2.408583402633667,
|
|
"learning_rate": 4.609438899557964e-05,
|
|
"loss": 2.4871,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 1.89,
|
|
"grad_norm": 2.269737958908081,
|
|
"learning_rate": 4.5741925338815474e-05,
|
|
"loss": 2.5035,
|
|
"step": 115
|
|
},
|
|
{
|
|
"epoch": 1.97,
|
|
"grad_norm": 2.33317232131958,
|
|
"learning_rate": 4.5375708437920284e-05,
|
|
"loss": 2.4941,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 2.05,
|
|
"grad_norm": 2.799999475479126,
|
|
"learning_rate": 4.499598111849299e-05,
|
|
"loss": 2.2503,
|
|
"step": 125
|
|
},
|
|
{
|
|
"epoch": 2.13,
|
|
"grad_norm": 2.7277634143829346,
|
|
"learning_rate": 4.460299516441776e-05,
|
|
"loss": 2.0722,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 2.21,
|
|
"grad_norm": 2.5570781230926514,
|
|
"learning_rate": 4.4197011150915e-05,
|
|
"loss": 2.0567,
|
|
"step": 135
|
|
},
|
|
{
|
|
"epoch": 2.3,
|
|
"grad_norm": 2.5796124935150146,
|
|
"learning_rate": 4.3778298271762995e-05,
|
|
"loss": 2.0507,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 2.38,
|
|
"grad_norm": 2.4952049255371094,
|
|
"learning_rate": 4.334713416080498e-05,
|
|
"loss": 2.0694,
|
|
"step": 145
|
|
},
|
|
{
|
|
"epoch": 2.46,
|
|
"grad_norm": 2.6838080883026123,
|
|
"learning_rate": 4.2903804707859835e-05,
|
|
"loss": 2.0711,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 2.54,
|
|
"grad_norm": 3.0429630279541016,
|
|
"learning_rate": 4.2448603869158587e-05,
|
|
"loss": 2.0782,
|
|
"step": 155
|
|
},
|
|
{
|
|
"epoch": 2.62,
|
|
"grad_norm": 2.5743329524993896,
|
|
"learning_rate": 4.198183347243233e-05,
|
|
"loss": 2.0982,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 2.7,
|
|
"grad_norm": 2.6105706691741943,
|
|
"learning_rate": 4.1503803016780796e-05,
|
|
"loss": 2.1043,
|
|
"step": 165
|
|
},
|
|
{
|
|
"epoch": 2.79,
|
|
"grad_norm": 2.4099245071411133,
|
|
"learning_rate": 4.101482946745439e-05,
|
|
"loss": 2.0925,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 2.87,
|
|
"grad_norm": 2.3642492294311523,
|
|
"learning_rate": 4.051523704568557e-05,
|
|
"loss": 2.1157,
|
|
"step": 175
|
|
},
|
|
{
|
|
"epoch": 2.95,
|
|
"grad_norm": 2.626281499862671,
|
|
"learning_rate": 4.000535701370921e-05,
|
|
"loss": 2.1207,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 3.03,
|
|
"grad_norm": 4.121679306030273,
|
|
"learning_rate": 3.948552745511409e-05,
|
|
"loss": 1.9371,
|
|
"step": 185
|
|
},
|
|
{
|
|
"epoch": 3.11,
|
|
"grad_norm": 4.904088020324707,
|
|
"learning_rate": 3.895609305067162e-05,
|
|
"loss": 1.6614,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 3.2,
|
|
"grad_norm": 3.118800401687622,
|
|
"learning_rate": 3.841740484979002e-05,
|
|
"loss": 1.6199,
|
|
"step": 195
|
|
},
|
|
{
|
|
"epoch": 3.28,
|
|
"grad_norm": 3.1991629600524902,
|
|
"learning_rate": 3.7869820037745776e-05,
|
|
"loss": 1.6182,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 3.36,
|
|
"grad_norm": 2.901879072189331,
|
|
"learning_rate": 3.731370169884662e-05,
|
|
"loss": 1.613,
|
|
"step": 205
|
|
},
|
|
{
|
|
"epoch": 3.44,
|
|
"grad_norm": 2.936681032180786,
|
|
"learning_rate": 3.6749418575683e-05,
|
|
"loss": 1.6146,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 3.52,
|
|
"grad_norm": 2.902355194091797,
|
|
"learning_rate": 3.617734482462785e-05,
|
|
"loss": 1.6147,
|
|
"step": 215
|
|
},
|
|
{
|
|
"epoch": 3.61,
|
|
"grad_norm": 3.0887398719787598,
|
|
"learning_rate": 3.5597859767746524e-05,
|
|
"loss": 1.6139,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 3.69,
|
|
"grad_norm": 2.7664308547973633,
|
|
"learning_rate": 3.501134764128167e-05,
|
|
"loss": 1.6382,
|
|
"step": 225
|
|
},
|
|
{
|
|
"epoch": 3.77,
|
|
"grad_norm": 3.130692720413208,
|
|
"learning_rate": 3.4418197340879635e-05,
|
|
"loss": 1.6468,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 3.85,
|
|
"grad_norm": 2.858790636062622,
|
|
"learning_rate": 3.381880216372738e-05,
|
|
"loss": 1.6518,
|
|
"step": 235
|
|
},
|
|
{
|
|
"epoch": 3.93,
|
|
"grad_norm": 2.895685911178589,
|
|
"learning_rate": 3.321355954777087e-05,
|
|
"loss": 1.6574,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 4.02,
|
|
"grad_norm": 4.997915744781494,
|
|
"learning_rate": 3.260287080818795e-05,
|
|
"loss": 1.5766,
|
|
"step": 245
|
|
},
|
|
{
|
|
"epoch": 4.1,
|
|
"grad_norm": 5.18801212310791,
|
|
"learning_rate": 3.1987140871290236e-05,
|
|
"loss": 1.2207,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 4.18,
|
|
"grad_norm": 3.488536834716797,
|
|
"learning_rate": 3.136677800603072e-05,
|
|
"loss": 1.202,
|
|
"step": 255
|
|
},
|
|
{
|
|
"epoch": 4.26,
|
|
"grad_norm": 3.276165246963501,
|
|
"learning_rate": 3.07421935532949e-05,
|
|
"loss": 1.1862,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 4.34,
|
|
"grad_norm": 3.2313599586486816,
|
|
"learning_rate": 3.0113801653155026e-05,
|
|
"loss": 1.1969,
|
|
"step": 265
|
|
},
|
|
{
|
|
"epoch": 4.43,
|
|
"grad_norm": 3.2551193237304688,
|
|
"learning_rate": 2.9482018970268393e-05,
|
|
"loss": 1.1731,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 4.51,
|
|
"grad_norm": 3.300238609313965,
|
|
"learning_rate": 2.884726441760155e-05,
|
|
"loss": 1.1747,
|
|
"step": 275
|
|
},
|
|
{
|
|
"epoch": 4.59,
|
|
"grad_norm": 3.164844036102295,
|
|
"learning_rate": 2.8209958878663778e-05,
|
|
"loss": 1.1924,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 4.67,
|
|
"grad_norm": 3.359332799911499,
|
|
"learning_rate": 2.757052492843401e-05,
|
|
"loss": 1.1844,
|
|
"step": 285
|
|
},
|
|
{
|
|
"epoch": 4.75,
|
|
"grad_norm": 3.1776442527770996,
|
|
"learning_rate": 2.6929386553166164e-05,
|
|
"loss": 1.1892,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 4.84,
|
|
"grad_norm": 3.2457070350646973,
|
|
"learning_rate": 2.6286968869258665e-05,
|
|
"loss": 1.2005,
|
|
"step": 295
|
|
},
|
|
{
|
|
"epoch": 4.92,
|
|
"grad_norm": 3.3526594638824463,
|
|
"learning_rate": 2.564369784137472e-05,
|
|
"loss": 1.2167,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 5.0,
|
|
"grad_norm": 3.673542022705078,
|
|
"learning_rate": 2.5e-05,
|
|
"loss": 1.2292,
|
|
"step": 305
|
|
},
|
|
{
|
|
"epoch": 5.08,
|
|
"grad_norm": 4.4025373458862305,
|
|
"learning_rate": 2.4356302158625288e-05,
|
|
"loss": 0.8431,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 5.16,
|
|
"grad_norm": 3.3656206130981445,
|
|
"learning_rate": 2.3713031130741337e-05,
|
|
"loss": 0.8032,
|
|
"step": 315
|
|
},
|
|
{
|
|
"epoch": 5.25,
|
|
"grad_norm": 3.668022871017456,
|
|
"learning_rate": 2.3070613446833842e-05,
|
|
"loss": 0.8136,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 5.33,
|
|
"grad_norm": 3.4953699111938477,
|
|
"learning_rate": 2.2429475071565987e-05,
|
|
"loss": 0.813,
|
|
"step": 325
|
|
},
|
|
{
|
|
"epoch": 5.41,
|
|
"grad_norm": 3.4234421253204346,
|
|
"learning_rate": 2.1790041121336225e-05,
|
|
"loss": 0.8105,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 5.49,
|
|
"grad_norm": 3.4309818744659424,
|
|
"learning_rate": 2.1152735582398452e-05,
|
|
"loss": 0.8095,
|
|
"step": 335
|
|
},
|
|
{
|
|
"epoch": 5.57,
|
|
"grad_norm": 3.3163857460021973,
|
|
"learning_rate": 2.0517981029731616e-05,
|
|
"loss": 0.8083,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 5.66,
|
|
"grad_norm": 3.3125879764556885,
|
|
"learning_rate": 1.9886198346844987e-05,
|
|
"loss": 0.8329,
|
|
"step": 345
|
|
},
|
|
{
|
|
"epoch": 5.74,
|
|
"grad_norm": 3.3822097778320312,
|
|
"learning_rate": 1.9257806446705116e-05,
|
|
"loss": 0.8162,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 5.82,
|
|
"grad_norm": 3.432570219039917,
|
|
"learning_rate": 1.8633221993969285e-05,
|
|
"loss": 0.8286,
|
|
"step": 355
|
|
},
|
|
{
|
|
"epoch": 5.9,
|
|
"grad_norm": 3.3860833644866943,
|
|
"learning_rate": 1.8012859128709766e-05,
|
|
"loss": 0.8318,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 5.98,
|
|
"grad_norm": 3.3469362258911133,
|
|
"learning_rate": 1.7397129191812057e-05,
|
|
"loss": 0.8312,
|
|
"step": 365
|
|
},
|
|
{
|
|
"epoch": 6.07,
|
|
"grad_norm": 3.2420990467071533,
|
|
"learning_rate": 1.6786440452229134e-05,
|
|
"loss": 0.6165,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 6.15,
|
|
"grad_norm": 3.6129636764526367,
|
|
"learning_rate": 1.618119783627263e-05,
|
|
"loss": 0.5368,
|
|
"step": 375
|
|
},
|
|
{
|
|
"epoch": 6.23,
|
|
"grad_norm": 3.441241979598999,
|
|
"learning_rate": 1.558180265912037e-05,
|
|
"loss": 0.5347,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 6.31,
|
|
"grad_norm": 3.365767478942871,
|
|
"learning_rate": 1.4988652358718336e-05,
|
|
"loss": 0.5234,
|
|
"step": 385
|
|
},
|
|
{
|
|
"epoch": 6.39,
|
|
"grad_norm": 3.1380422115325928,
|
|
"learning_rate": 1.4402140232253486e-05,
|
|
"loss": 0.514,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 6.48,
|
|
"grad_norm": 3.0910489559173584,
|
|
"learning_rate": 1.3822655175372149e-05,
|
|
"loss": 0.522,
|
|
"step": 395
|
|
},
|
|
{
|
|
"epoch": 6.56,
|
|
"grad_norm": 3.047086477279663,
|
|
"learning_rate": 1.325058142431701e-05,
|
|
"loss": 0.5207,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 6.64,
|
|
"grad_norm": 3.230703115463257,
|
|
"learning_rate": 1.2686298301153393e-05,
|
|
"loss": 0.5399,
|
|
"step": 405
|
|
},
|
|
{
|
|
"epoch": 6.72,
|
|
"grad_norm": 3.278348445892334,
|
|
"learning_rate": 1.213017996225424e-05,
|
|
"loss": 0.5304,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 6.8,
|
|
"grad_norm": 3.2289199829101562,
|
|
"learning_rate": 1.158259515020999e-05,
|
|
"loss": 0.534,
|
|
"step": 415
|
|
},
|
|
{
|
|
"epoch": 6.89,
|
|
"grad_norm": 3.1904938220977783,
|
|
"learning_rate": 1.1043906949328387e-05,
|
|
"loss": 0.5255,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 6.97,
|
|
"grad_norm": 3.2226648330688477,
|
|
"learning_rate": 1.051447254488591e-05,
|
|
"loss": 0.5388,
|
|
"step": 425
|
|
},
|
|
{
|
|
"epoch": 7.05,
|
|
"grad_norm": 3.1080589294433594,
|
|
"learning_rate": 9.9946429862908e-06,
|
|
"loss": 0.4218,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 7.13,
|
|
"grad_norm": 3.863534450531006,
|
|
"learning_rate": 9.48476295431443e-06,
|
|
"loss": 0.3403,
|
|
"step": 435
|
|
},
|
|
{
|
|
"epoch": 7.21,
|
|
"grad_norm": 2.675412654876709,
|
|
"learning_rate": 8.985170532545622e-06,
|
|
"loss": 0.3332,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 7.3,
|
|
"grad_norm": 2.844601631164551,
|
|
"learning_rate": 8.496196983219203e-06,
|
|
"loss": 0.3341,
|
|
"step": 445
|
|
},
|
|
{
|
|
"epoch": 7.38,
|
|
"grad_norm": 2.8453519344329834,
|
|
"learning_rate": 8.018166527567672e-06,
|
|
"loss": 0.3334,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 7.46,
|
|
"grad_norm": 2.785527229309082,
|
|
"learning_rate": 7.5513961308414065e-06,
|
|
"loss": 0.3329,
|
|
"step": 455
|
|
},
|
|
{
|
|
"epoch": 7.54,
|
|
"grad_norm": 2.7892823219299316,
|
|
"learning_rate": 7.096195292140173e-06,
|
|
"loss": 0.3407,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 7.62,
|
|
"grad_norm": 2.7924044132232666,
|
|
"learning_rate": 6.652865839195024e-06,
|
|
"loss": 0.3329,
|
|
"step": 465
|
|
},
|
|
{
|
|
"epoch": 7.7,
|
|
"grad_norm": 2.592303991317749,
|
|
"learning_rate": 6.221701728237009e-06,
|
|
"loss": 0.3314,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 7.79,
|
|
"grad_norm": 2.7689881324768066,
|
|
"learning_rate": 5.8029888490850005e-06,
|
|
"loss": 0.3388,
|
|
"step": 475
|
|
},
|
|
{
|
|
"epoch": 7.87,
|
|
"grad_norm": 2.657968759536743,
|
|
"learning_rate": 5.397004835582242e-06,
|
|
"loss": 0.338,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 7.95,
|
|
"grad_norm": 2.7076845169067383,
|
|
"learning_rate": 5.004018881507016e-06,
|
|
"loss": 0.3344,
|
|
"step": 485
|
|
},
|
|
{
|
|
"epoch": 8.03,
|
|
"grad_norm": 2.54681396484375,
|
|
"learning_rate": 4.624291562079719e-06,
|
|
"loss": 0.2856,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 8.11,
|
|
"grad_norm": 2.4071710109710693,
|
|
"learning_rate": 4.258074661184527e-06,
|
|
"loss": 0.2364,
|
|
"step": 495
|
|
},
|
|
{
|
|
"epoch": 8.2,
|
|
"grad_norm": 2.4430058002471924,
|
|
"learning_rate": 3.90561100442036e-06,
|
|
"loss": 0.2325,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 8.28,
|
|
"grad_norm": 2.2299325466156006,
|
|
"learning_rate": 3.56713429809169e-06,
|
|
"loss": 0.2289,
|
|
"step": 505
|
|
},
|
|
{
|
|
"epoch": 8.36,
|
|
"grad_norm": 2.1451282501220703,
|
|
"learning_rate": 3.2428689742461188e-06,
|
|
"loss": 0.2277,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 8.44,
|
|
"grad_norm": 2.182375431060791,
|
|
"learning_rate": 2.933030041861312e-06,
|
|
"loss": 0.2268,
|
|
"step": 515
|
|
},
|
|
{
|
|
"epoch": 8.52,
|
|
"grad_norm": 2.242326259613037,
|
|
"learning_rate": 2.637822944280116e-06,
|
|
"loss": 0.2264,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 8.61,
|
|
"grad_norm": 2.2420380115509033,
|
|
"learning_rate": 2.3574434229882145e-06,
|
|
"loss": 0.2319,
|
|
"step": 525
|
|
},
|
|
{
|
|
"epoch": 8.69,
|
|
"grad_norm": 2.200953960418701,
|
|
"learning_rate": 2.092077387824884e-06,
|
|
"loss": 0.2292,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 8.77,
|
|
"grad_norm": 2.225543737411499,
|
|
"learning_rate": 1.8419007937126255e-06,
|
|
"loss": 0.2261,
|
|
"step": 535
|
|
},
|
|
{
|
|
"epoch": 8.85,
|
|
"grad_norm": 2.20648193359375,
|
|
"learning_rate": 1.6070795239876618e-06,
|
|
"loss": 0.2247,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 8.93,
|
|
"grad_norm": 2.1440372467041016,
|
|
"learning_rate": 1.3877692804084685e-06,
|
|
"loss": 0.2358,
|
|
"step": 545
|
|
},
|
|
{
|
|
"epoch": 9.02,
|
|
"grad_norm": 2.046773672103882,
|
|
"learning_rate": 1.1841154799154374e-06,
|
|
"loss": 0.2207,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 9.1,
|
|
"grad_norm": 1.9511895179748535,
|
|
"learning_rate": 9.96253158209956e-07,
|
|
"loss": 0.1908,
|
|
"step": 555
|
|
},
|
|
{
|
|
"epoch": 9.18,
|
|
"grad_norm": 1.9654351472854614,
|
|
"learning_rate": 8.243068802169906e-07,
|
|
"loss": 0.1906,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 9.26,
|
|
"grad_norm": 1.9583854675292969,
|
|
"learning_rate": 6.683906574904364e-07,
|
|
"loss": 0.1914,
|
|
"step": 565
|
|
},
|
|
{
|
|
"epoch": 9.34,
|
|
"grad_norm": 1.9551448822021484,
|
|
"learning_rate": 5.286078726160549e-07,
|
|
"loss": 0.1897,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 9.43,
|
|
"grad_norm": 1.9811819791793823,
|
|
"learning_rate": 4.050512106620913e-07,
|
|
"loss": 0.1926,
|
|
"step": 575
|
|
},
|
|
{
|
|
"epoch": 9.51,
|
|
"grad_norm": 1.927250623703003,
|
|
"learning_rate": 2.978025977230736e-07,
|
|
"loss": 0.186,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 9.59,
|
|
"grad_norm": 1.8517950773239136,
|
|
"learning_rate": 2.0693314659746278e-07,
|
|
"loss": 0.1863,
|
|
"step": 585
|
|
},
|
|
{
|
|
"epoch": 9.67,
|
|
"grad_norm": 1.9849419593811035,
|
|
"learning_rate": 1.3250310963527358e-07,
|
|
"loss": 0.1887,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 9.75,
|
|
"grad_norm": 1.8776540756225586,
|
|
"learning_rate": 7.456183878683243e-08,
|
|
"loss": 0.1851,
|
|
"step": 595
|
|
},
|
|
{
|
|
"epoch": 9.84,
|
|
"grad_norm": 2.024672746658325,
|
|
"learning_rate": 3.314775287923677e-08,
|
|
"loss": 0.1906,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 9.92,
|
|
"grad_norm": 1.9220367670059204,
|
|
"learning_rate": 8.288312142173959e-09,
|
|
"loss": 0.1836,
|
|
"step": 605
|
|
},
|
|
{
|
|
"epoch": 10.0,
|
|
"grad_norm": 2.1801717281341553,
|
|
"learning_rate": 0.0,
|
|
"loss": 0.1852,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 10.0,
|
|
"step": 610,
|
|
"total_flos": 3.693046753394688e+16,
|
|
"train_loss": 1.2541689154554585,
|
|
"train_runtime": 15808.2669,
|
|
"train_samples_per_second": 1.233,
|
|
"train_steps_per_second": 0.039
|
|
}
|
|
],
|
|
"logging_steps": 5,
|
|
"max_steps": 610,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 10,
|
|
"save_steps": 100,
|
|
"total_flos": 3.693046753394688e+16,
|
|
"train_batch_size": 4,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|