|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.995079950799508, |
|
"eval_steps": 102, |
|
"global_step": 1015, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004920049200492005, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.2408, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00984009840098401, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.2626, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.014760147601476014, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 3e-06, |
|
"loss": 0.2438, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01968019680196802, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.2335, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.024600246002460024, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 5e-06, |
|
"loss": 0.229, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02952029520295203, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 6e-06, |
|
"loss": 0.2125, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03444034440344403, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 7e-06, |
|
"loss": 0.1421, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03936039360393604, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.1727, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04428044280442804, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 9e-06, |
|
"loss": 0.1599, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04920049200492005, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 1e-05, |
|
"loss": 0.2048, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05412054120541206, |
|
"grad_norm": 2.125, |
|
"learning_rate": 9.99997557091071e-06, |
|
"loss": 0.1458, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05904059040590406, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 9.99990228388155e-06, |
|
"loss": 0.1435, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06396063960639606, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 9.999780139628657e-06, |
|
"loss": 0.1791, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06888068880688807, |
|
"grad_norm": 1.375, |
|
"learning_rate": 9.999609139345578e-06, |
|
"loss": 0.1412, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07380073800738007, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 9.999389284703265e-06, |
|
"loss": 0.1416, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07872078720787208, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 9.99912057785006e-06, |
|
"loss": 0.1255, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.08364083640836409, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 9.998803021411668e-06, |
|
"loss": 0.1201, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08856088560885608, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 9.99843661849113e-06, |
|
"loss": 0.2039, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.09348093480934809, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 9.998021372668809e-06, |
|
"loss": 0.1352, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0984009840098401, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 9.99755728800233e-06, |
|
"loss": 0.1448, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1033210332103321, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 9.997044369026563e-06, |
|
"loss": 0.1537, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.10824108241082411, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 9.996482620753565e-06, |
|
"loss": 0.1506, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.11316113161131611, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 9.995872048672535e-06, |
|
"loss": 0.1474, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.11808118081180811, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 9.99521265874976e-06, |
|
"loss": 0.1215, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.12300123001230012, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 9.994504457428557e-06, |
|
"loss": 0.126, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.12792127921279212, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 9.993747451629217e-06, |
|
"loss": 0.1417, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.13284132841328414, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 9.992941648748917e-06, |
|
"loss": 0.1163, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.13776137761377613, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 9.992087056661676e-06, |
|
"loss": 0.1332, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.14268142681426815, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 9.991183683718253e-06, |
|
"loss": 0.1269, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.14760147601476015, |
|
"grad_norm": 1.0, |
|
"learning_rate": 9.99023153874608e-06, |
|
"loss": 0.1071, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15252152521525214, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 9.989230631049171e-06, |
|
"loss": 0.1415, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.15744157441574416, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 9.988180970408032e-06, |
|
"loss": 0.1439, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.16236162361623616, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 9.987082567079563e-06, |
|
"loss": 0.1333, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.16728167281672818, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 9.985935431796962e-06, |
|
"loss": 0.134, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.17220172201722017, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 9.984739575769619e-06, |
|
"loss": 0.1315, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.17712177121771217, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 9.983495010683e-06, |
|
"loss": 0.1258, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.1820418204182042, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 9.982201748698543e-06, |
|
"loss": 0.1141, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.18696186961869618, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 9.980859802453533e-06, |
|
"loss": 0.1437, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1918819188191882, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 9.97946918506098e-06, |
|
"loss": 0.1326, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1968019680196802, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 9.978029910109491e-06, |
|
"loss": 0.0979, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2017220172201722, |
|
"grad_norm": 0.8203125, |
|
"learning_rate": 9.976541991663136e-06, |
|
"loss": 0.1086, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.2066420664206642, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 9.975005444261312e-06, |
|
"loss": 0.1504, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.2115621156211562, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 9.973420282918603e-06, |
|
"loss": 0.1282, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.21648216482164823, |
|
"grad_norm": 0.75, |
|
"learning_rate": 9.971786523124623e-06, |
|
"loss": 0.0807, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.22140221402214022, |
|
"grad_norm": 0.77734375, |
|
"learning_rate": 9.970104180843881e-06, |
|
"loss": 0.1084, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.22632226322263221, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 9.968373272515612e-06, |
|
"loss": 0.1385, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.23124231242312424, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 9.966593815053623e-06, |
|
"loss": 0.1538, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.23616236162361623, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 9.964765825846123e-06, |
|
"loss": 0.1035, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.24108241082410825, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 9.962889322755556e-06, |
|
"loss": 0.1055, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.24600246002460024, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 9.960964324118428e-06, |
|
"loss": 0.108, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.25092250922509224, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 9.958990848745123e-06, |
|
"loss": 0.1407, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.25584255842558423, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 9.956968915919725e-06, |
|
"loss": 0.1281, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.2607626076260763, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 9.954898545399822e-06, |
|
"loss": 0.1094, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.2656826568265683, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 9.952779757416326e-06, |
|
"loss": 0.1341, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.27060270602706027, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 9.950612572673255e-06, |
|
"loss": 0.1497, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.27552275522755226, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 9.948397012347553e-06, |
|
"loss": 0.146, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.28044280442804426, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 9.946133098088868e-06, |
|
"loss": 0.1294, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2853628536285363, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 9.943820852019345e-06, |
|
"loss": 0.1228, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2902829028290283, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 9.941460296733409e-06, |
|
"loss": 0.0948, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2952029520295203, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 9.939051455297548e-06, |
|
"loss": 0.1337, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.3001230012300123, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 9.936594351250082e-06, |
|
"loss": 0.1267, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.3050430504305043, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 9.934089008600937e-06, |
|
"loss": 0.0852, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.30996309963099633, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 9.931535451831408e-06, |
|
"loss": 0.0853, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.3148831488314883, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 9.928933705893924e-06, |
|
"loss": 0.104, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.3198031980319803, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 9.926283796211796e-06, |
|
"loss": 0.0999, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3247232472324723, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 9.923585748678976e-06, |
|
"loss": 0.1169, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3296432964329643, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 9.920839589659803e-06, |
|
"loss": 0.1413, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.33456334563345635, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 9.918045345988743e-06, |
|
"loss": 0.0995, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.33948339483394835, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 9.915203044970125e-06, |
|
"loss": 0.1242, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.34440344403444034, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 9.91231271437788e-06, |
|
"loss": 0.0816, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.34932349323493234, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 9.909374382455268e-06, |
|
"loss": 0.1365, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.35424354243542433, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 9.906388077914598e-06, |
|
"loss": 0.1388, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3591635916359164, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 9.903353829936943e-06, |
|
"loss": 0.1291, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3640836408364084, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 9.900271668171879e-06, |
|
"loss": 0.1014, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.36900369003690037, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 9.89714162273716e-06, |
|
"loss": 0.1653, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.37392373923739236, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 9.893963724218456e-06, |
|
"loss": 0.1097, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.37884378843788435, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 9.890738003669029e-06, |
|
"loss": 0.1509, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3837638376383764, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 9.887464492609447e-06, |
|
"loss": 0.119, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3886838868388684, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 9.884143223027267e-06, |
|
"loss": 0.1354, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3936039360393604, |
|
"grad_norm": 0.84375, |
|
"learning_rate": 9.880774227376727e-06, |
|
"loss": 0.0938, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3985239852398524, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 9.877357538578426e-06, |
|
"loss": 0.1072, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.4034440344403444, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 9.873893190018995e-06, |
|
"loss": 0.099, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.40836408364083643, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 9.870381215550796e-06, |
|
"loss": 0.1036, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.4132841328413284, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 9.866821649491558e-06, |
|
"loss": 0.0779, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.4182041820418204, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 9.863214526624065e-06, |
|
"loss": 0.1597, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.4231242312423124, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 9.85955988219581e-06, |
|
"loss": 0.1283, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.4280442804428044, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 9.855857751918644e-06, |
|
"loss": 0.1327, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.43296432964329645, |
|
"grad_norm": 0.80859375, |
|
"learning_rate": 9.852108171968438e-06, |
|
"loss": 0.1, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.43788437884378845, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 9.848311178984718e-06, |
|
"loss": 0.1653, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.44280442804428044, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 9.844466810070319e-06, |
|
"loss": 0.0841, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.44772447724477243, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 9.840575102791013e-06, |
|
"loss": 0.1193, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.45264452644526443, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 9.836636095175148e-06, |
|
"loss": 0.1187, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.4575645756457565, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 9.832649825713266e-06, |
|
"loss": 0.1198, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.46248462484624847, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 9.828616333357743e-06, |
|
"loss": 0.1148, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.46740467404674046, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 9.8245356575224e-06, |
|
"loss": 0.1074, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.47232472324723246, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 9.820407838082109e-06, |
|
"loss": 0.1193, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.47724477244772445, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 9.816232915372423e-06, |
|
"loss": 0.0722, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4821648216482165, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 9.812010930189163e-06, |
|
"loss": 0.0929, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4870848708487085, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 9.80774192378803e-06, |
|
"loss": 0.0973, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4920049200492005, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 9.803425937884202e-06, |
|
"loss": 0.1335, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4969249692496925, |
|
"grad_norm": 0.77734375, |
|
"learning_rate": 9.799063014651918e-06, |
|
"loss": 0.1169, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.5018450184501845, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 9.794653196724077e-06, |
|
"loss": 0.1159, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.5018450184501845, |
|
"eval_loss": 0.12004748731851578, |
|
"eval_runtime": 203.4298, |
|
"eval_samples_per_second": 8.042, |
|
"eval_steps_per_second": 1.342, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.5067650676506765, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 9.790196527191811e-06, |
|
"loss": 0.1142, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.5116851168511685, |
|
"grad_norm": 0.8203125, |
|
"learning_rate": 9.785693049604073e-06, |
|
"loss": 0.1113, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.5166051660516605, |
|
"grad_norm": 0.80859375, |
|
"learning_rate": 9.781142807967205e-06, |
|
"loss": 0.1035, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.5215252152521526, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 9.776545846744509e-06, |
|
"loss": 0.1238, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.5264452644526445, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 9.771902210855819e-06, |
|
"loss": 0.0921, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.5313653136531366, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 9.767211945677051e-06, |
|
"loss": 0.1459, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.5362853628536285, |
|
"grad_norm": 1.0, |
|
"learning_rate": 9.762475097039767e-06, |
|
"loss": 0.101, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.5412054120541205, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 9.757691711230728e-06, |
|
"loss": 0.1259, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5461254612546126, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 9.752861834991436e-06, |
|
"loss": 0.1146, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5510455104551045, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 9.747985515517683e-06, |
|
"loss": 0.1609, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5559655596555966, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 9.743062800459089e-06, |
|
"loss": 0.1242, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5608856088560885, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 9.738093737918627e-06, |
|
"loss": 0.1095, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.5658056580565806, |
|
"grad_norm": 1.0, |
|
"learning_rate": 9.733078376452172e-06, |
|
"loss": 0.1083, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5707257072570726, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 9.728016765068004e-06, |
|
"loss": 0.1428, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.5756457564575646, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 9.722908953226349e-06, |
|
"loss": 0.1073, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5805658056580566, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 9.717754990838882e-06, |
|
"loss": 0.1049, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5854858548585485, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 9.712554928268245e-06, |
|
"loss": 0.1479, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5904059040590406, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 9.707308816327557e-06, |
|
"loss": 0.1195, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5953259532595326, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 9.702016706279914e-06, |
|
"loss": 0.1048, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.6002460024600246, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 9.696678649837883e-06, |
|
"loss": 0.1073, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.6051660516605166, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 9.691294699163013e-06, |
|
"loss": 0.1308, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.6100861008610086, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 9.685864906865303e-06, |
|
"loss": 0.1248, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.6150061500615006, |
|
"grad_norm": 0.8515625, |
|
"learning_rate": 9.680389326002708e-06, |
|
"loss": 0.1312, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.6199261992619927, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 9.67486801008061e-06, |
|
"loss": 0.0868, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.6248462484624846, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 9.669301013051297e-06, |
|
"loss": 0.0859, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.6297662976629766, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 9.663688389313435e-06, |
|
"loss": 0.0878, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.6346863468634686, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 9.658030193711538e-06, |
|
"loss": 0.1229, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.6396063960639606, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 9.652326481535434e-06, |
|
"loss": 0.1439, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6445264452644527, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 9.646577308519719e-06, |
|
"loss": 0.1063, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.6494464944649446, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 9.640782730843219e-06, |
|
"loss": 0.1023, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6543665436654367, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 9.634942805128433e-06, |
|
"loss": 0.1002, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6592865928659286, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 9.629057588440993e-06, |
|
"loss": 0.0978, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6642066420664207, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 9.623127138289087e-06, |
|
"loss": 0.1042, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6691266912669127, |
|
"grad_norm": 0.8515625, |
|
"learning_rate": 9.617151512622918e-06, |
|
"loss": 0.0873, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6740467404674046, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 9.611130769834121e-06, |
|
"loss": 0.1118, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6789667896678967, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 9.6050649687552e-06, |
|
"loss": 0.13, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6838868388683886, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 9.598954168658956e-06, |
|
"loss": 0.1096, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.6888068880688807, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 9.592798429257899e-06, |
|
"loss": 0.1219, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6937269372693727, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 9.586597810703674e-06, |
|
"loss": 0.1002, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6986469864698647, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 9.580352373586468e-06, |
|
"loss": 0.0765, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.7035670356703567, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 9.574062178934414e-06, |
|
"loss": 0.1227, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.7084870848708487, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 9.567727288213005e-06, |
|
"loss": 0.1052, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.7134071340713407, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 9.561347763324484e-06, |
|
"loss": 0.0909, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.7183271832718328, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 9.554923666607245e-06, |
|
"loss": 0.1219, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.7232472324723247, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 9.548455060835223e-06, |
|
"loss": 0.1077, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.7281672816728167, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 9.541942009217273e-06, |
|
"loss": 0.132, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.7330873308733087, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 9.535384575396565e-06, |
|
"loss": 0.1462, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.7380073800738007, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 9.528782823449954e-06, |
|
"loss": 0.0951, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7429274292742928, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 9.522136817887353e-06, |
|
"loss": 0.1451, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.7478474784747847, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 9.51544662365111e-06, |
|
"loss": 0.072, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.7527675276752768, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 9.508712306115366e-06, |
|
"loss": 0.1202, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.7576875768757687, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 9.501933931085416e-06, |
|
"loss": 0.1068, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7626076260762608, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 9.495111564797073e-06, |
|
"loss": 0.1091, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7675276752767528, |
|
"grad_norm": 0.84375, |
|
"learning_rate": 9.488245273916016e-06, |
|
"loss": 0.0982, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7724477244772447, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 9.481335125537138e-06, |
|
"loss": 0.1064, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7773677736777368, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 9.47438118718389e-06, |
|
"loss": 0.1027, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7822878228782287, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 9.467383526807626e-06, |
|
"loss": 0.0917, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.7872078720787208, |
|
"grad_norm": 0.875, |
|
"learning_rate": 9.460342212786933e-06, |
|
"loss": 0.0975, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7921279212792128, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 9.453257313926969e-06, |
|
"loss": 0.0837, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.7970479704797048, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 9.44612889945878e-06, |
|
"loss": 0.093, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.8019680196801968, |
|
"grad_norm": 0.7578125, |
|
"learning_rate": 9.43895703903864e-06, |
|
"loss": 0.1094, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.8068880688806888, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 9.431741802747355e-06, |
|
"loss": 0.1081, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.8118081180811808, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 9.424483261089584e-06, |
|
"loss": 0.1207, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.8167281672816729, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 9.417181484993154e-06, |
|
"loss": 0.0717, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.8216482164821648, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 9.40983654580836e-06, |
|
"loss": 0.1013, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.8265682656826568, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 9.402448515307274e-06, |
|
"loss": 0.1122, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.8314883148831488, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 9.395017465683036e-06, |
|
"loss": 0.1234, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.8364083640836408, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 9.387543469549156e-06, |
|
"loss": 0.1259, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.8413284132841329, |
|
"grad_norm": 0.78515625, |
|
"learning_rate": 9.380026599938804e-06, |
|
"loss": 0.1007, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.8462484624846248, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 9.372466930304091e-06, |
|
"loss": 0.1148, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.8511685116851169, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 9.364864534515353e-06, |
|
"loss": 0.11, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.8560885608856088, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 9.357219486860434e-06, |
|
"loss": 0.1255, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.8610086100861009, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 9.349531862043952e-06, |
|
"loss": 0.0973, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.8659286592865929, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 9.34180173518658e-06, |
|
"loss": 0.0872, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.8708487084870848, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 9.3340291818243e-06, |
|
"loss": 0.089, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.8757687576875769, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 9.32621427790767e-06, |
|
"loss": 0.1148, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.8806888068880688, |
|
"grad_norm": 0.78515625, |
|
"learning_rate": 9.318357099801087e-06, |
|
"loss": 0.0896, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.8856088560885609, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 9.310457724282034e-06, |
|
"loss": 0.1104, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8905289052890529, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 9.302516228540328e-06, |
|
"loss": 0.1115, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.8954489544895449, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 9.294532690177373e-06, |
|
"loss": 0.12, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.9003690036900369, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 9.286507187205399e-06, |
|
"loss": 0.137, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.9052890528905289, |
|
"grad_norm": 0.80859375, |
|
"learning_rate": 9.278439798046697e-06, |
|
"loss": 0.1273, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.9102091020910209, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 9.270330601532855e-06, |
|
"loss": 0.1339, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.915129151291513, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 9.262179676903986e-06, |
|
"loss": 0.1011, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.9200492004920049, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 9.25398710380796e-06, |
|
"loss": 0.1096, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.9249692496924969, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 9.245752962299612e-06, |
|
"loss": 0.1316, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.9298892988929889, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 9.237477332839975e-06, |
|
"loss": 0.0908, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.9348093480934809, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 9.229160296295488e-06, |
|
"loss": 0.138, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.939729397293973, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 9.220801933937199e-06, |
|
"loss": 0.1259, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.9446494464944649, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 9.212402327439982e-06, |
|
"loss": 0.117, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.949569495694957, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 9.20396155888173e-06, |
|
"loss": 0.1405, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.9544895448954489, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 9.19547971074256e-06, |
|
"loss": 0.1154, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.959409594095941, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 9.186956865904004e-06, |
|
"loss": 0.1203, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.964329643296433, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 9.178393107648193e-06, |
|
"loss": 0.1009, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.9692496924969249, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 9.169788519657056e-06, |
|
"loss": 0.1023, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.974169741697417, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 9.161143186011492e-06, |
|
"loss": 0.1402, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.9790897908979089, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 9.15245719119055e-06, |
|
"loss": 0.1065, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.984009840098401, |
|
"grad_norm": 0.8515625, |
|
"learning_rate": 9.143730620070609e-06, |
|
"loss": 0.097, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.988929889298893, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 9.134963557924543e-06, |
|
"loss": 0.0979, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.993849938499385, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 9.12615609042089e-06, |
|
"loss": 0.1012, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.998769987699877, |
|
"grad_norm": 0.7578125, |
|
"learning_rate": 9.11730830362301e-06, |
|
"loss": 0.0764, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.0030750307503076, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 9.10842028398826e-06, |
|
"loss": 0.1883, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.0030750307503076, |
|
"eval_loss": 0.11158129572868347, |
|
"eval_runtime": 204.933, |
|
"eval_samples_per_second": 7.983, |
|
"eval_steps_per_second": 1.332, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.0079950799507995, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 9.099492118367123e-06, |
|
"loss": 0.1227, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.0129151291512914, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 9.090523894002386e-06, |
|
"loss": 0.0759, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.0178351783517836, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 9.081515698528267e-06, |
|
"loss": 0.1026, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.0227552275522755, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 9.072467619969574e-06, |
|
"loss": 0.1144, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.0276752767527675, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 9.06337974674083e-06, |
|
"loss": 0.1129, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.0325953259532596, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 9.054252167645426e-06, |
|
"loss": 0.081, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.0375153751537516, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 0.1211, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.0424354243542435, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 9.035878249007264e-06, |
|
"loss": 0.0857, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.0473554735547355, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 9.026632089007745e-06, |
|
"loss": 0.1407, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.0522755227552276, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 9.017346582226289e-06, |
|
"loss": 0.0835, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.0571955719557196, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 9.008021819397488e-06, |
|
"loss": 0.1263, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.0621156211562115, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 8.998657891639523e-06, |
|
"loss": 0.0747, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.0670356703567037, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 8.989254890453289e-06, |
|
"loss": 0.09, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.0719557195571956, |
|
"grad_norm": 1.0, |
|
"learning_rate": 8.979812907721485e-06, |
|
"loss": 0.0924, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.0768757687576875, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 8.97033203570773e-06, |
|
"loss": 0.1017, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.0817958179581795, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 8.960812367055646e-06, |
|
"loss": 0.1033, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.0867158671586716, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 8.951253994787975e-06, |
|
"loss": 0.1425, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.0916359163591636, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 8.941657012305644e-06, |
|
"loss": 0.1076, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.0965559655596555, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 8.93202151338687e-06, |
|
"loss": 0.081, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.1014760147601477, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 8.922347592186237e-06, |
|
"loss": 0.096, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.1063960639606396, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 8.912635343233784e-06, |
|
"loss": 0.0681, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.1113161131611315, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 8.902884861434066e-06, |
|
"loss": 0.117, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.1162361623616237, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 8.89309624206524e-06, |
|
"loss": 0.1133, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.1211562115621156, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 8.883269580778127e-06, |
|
"loss": 0.0933, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.1260762607626076, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 8.873404973595284e-06, |
|
"loss": 0.1243, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.1309963099630997, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 8.863502516910058e-06, |
|
"loss": 0.0959, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.1359163591635917, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 8.853562307485649e-06, |
|
"loss": 0.1095, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.1408364083640836, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 8.84358444245416e-06, |
|
"loss": 0.1128, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.1457564575645756, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 8.833569019315654e-06, |
|
"loss": 0.098, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.1506765067650677, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 8.8235161359372e-06, |
|
"loss": 0.1001, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.1555965559655597, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 8.81342589055191e-06, |
|
"loss": 0.1213, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.1605166051660516, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 8.803298381757987e-06, |
|
"loss": 0.0891, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.1654366543665438, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 8.793133708517759e-06, |
|
"loss": 0.0878, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.1703567035670357, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 8.782931970156708e-06, |
|
"loss": 0.0967, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.1752767527675276, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 8.772693266362504e-06, |
|
"loss": 0.0976, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.1801968019680196, |
|
"grad_norm": 0.875, |
|
"learning_rate": 8.762417697184034e-06, |
|
"loss": 0.0737, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.1851168511685117, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 8.752105363030414e-06, |
|
"loss": 0.1093, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.1900369003690037, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 8.741756364670018e-06, |
|
"loss": 0.0853, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.1949569495694956, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 8.731370803229488e-06, |
|
"loss": 0.0941, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.1998769987699878, |
|
"grad_norm": 1.125, |
|
"learning_rate": 8.720948780192747e-06, |
|
"loss": 0.0935, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.2047970479704797, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 8.710490397400007e-06, |
|
"loss": 0.1011, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.2097170971709716, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 8.699995757046773e-06, |
|
"loss": 0.1141, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.2146371463714638, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 8.689464961682853e-06, |
|
"loss": 0.1281, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.2195571955719557, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 8.678898114211338e-06, |
|
"loss": 0.1005, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.2244772447724477, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 8.668295317887615e-06, |
|
"loss": 0.1019, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.2293972939729398, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 8.657656676318346e-06, |
|
"loss": 0.1101, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.2343173431734318, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 8.646982293460461e-06, |
|
"loss": 0.1366, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.2392373923739237, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 8.63627227362014e-06, |
|
"loss": 0.0937, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.2441574415744157, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 8.625526721451799e-06, |
|
"loss": 0.1273, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.2490774907749078, |
|
"grad_norm": 1.125, |
|
"learning_rate": 8.614745741957054e-06, |
|
"loss": 0.097, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.2539975399753998, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 8.603929440483714e-06, |
|
"loss": 0.1064, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.2589175891758917, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 8.593077922724733e-06, |
|
"loss": 0.118, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.2638376383763839, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 8.582191294717192e-06, |
|
"loss": 0.0993, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.2687576875768758, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 8.571269662841253e-06, |
|
"loss": 0.0659, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.2736777367773677, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 8.560313133819124e-06, |
|
"loss": 0.12, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.2785977859778597, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 8.549321814714018e-06, |
|
"loss": 0.0868, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.2835178351783518, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 8.538295812929096e-06, |
|
"loss": 0.0902, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.2884378843788438, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 8.527235236206438e-06, |
|
"loss": 0.0966, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.293357933579336, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 8.516140192625962e-06, |
|
"loss": 0.0993, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.2982779827798279, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 8.5050107906044e-06, |
|
"loss": 0.0865, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.3031980319803198, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 8.49384713889421e-06, |
|
"loss": 0.089, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.3081180811808117, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 8.482649346582529e-06, |
|
"loss": 0.0956, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.3130381303813037, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 8.471417523090109e-06, |
|
"loss": 0.0985, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.3179581795817958, |
|
"grad_norm": 0.875, |
|
"learning_rate": 8.460151778170231e-06, |
|
"loss": 0.0789, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.3228782287822878, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 8.448852221907657e-06, |
|
"loss": 0.0995, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.32779827798278, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 8.43751896471753e-06, |
|
"loss": 0.1006, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.3327183271832719, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 8.426152117344314e-06, |
|
"loss": 0.1119, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.3376383763837638, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 8.414751790860697e-06, |
|
"loss": 0.1329, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.3425584255842558, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 8.40331809666652e-06, |
|
"loss": 0.0863, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.347478474784748, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 8.391851146487675e-06, |
|
"loss": 0.1255, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.3523985239852399, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 8.380351052375023e-06, |
|
"loss": 0.1031, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.3573185731857318, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 8.368817926703296e-06, |
|
"loss": 0.0798, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.362238622386224, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 8.357251882169993e-06, |
|
"loss": 0.0923, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.367158671586716, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 8.345653031794292e-06, |
|
"loss": 0.0831, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.3720787207872078, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 8.33402148891593e-06, |
|
"loss": 0.071, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.3769987699876998, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 8.32235736719411e-06, |
|
"loss": 0.0697, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.381918819188192, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 8.310660780606376e-06, |
|
"loss": 0.0704, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.3868388683886839, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 8.298931843447517e-06, |
|
"loss": 0.1128, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.391758917589176, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 8.28717067032843e-06, |
|
"loss": 0.1112, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.396678966789668, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 8.275377376175015e-06, |
|
"loss": 0.0798, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.40159901599016, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 8.263552076227048e-06, |
|
"loss": 0.1072, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.4065190651906518, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 8.251694886037052e-06, |
|
"loss": 0.0953, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.4114391143911438, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 8.239805921469168e-06, |
|
"loss": 0.1145, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.416359163591636, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 8.227885298698029e-06, |
|
"loss": 0.1047, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.4212792127921279, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 8.21593313420762e-06, |
|
"loss": 0.0905, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.42619926199262, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 8.203949544790131e-06, |
|
"loss": 0.1013, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.431119311193112, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 8.19193464754484e-06, |
|
"loss": 0.085, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.436039360393604, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 8.179888559876943e-06, |
|
"loss": 0.1029, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.4409594095940959, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 8.16781139949642e-06, |
|
"loss": 0.1262, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.445879458794588, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 8.155703284416884e-06, |
|
"loss": 0.125, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.45079950799508, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 8.143564332954426e-06, |
|
"loss": 0.0796, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.455719557195572, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 8.131394663726452e-06, |
|
"loss": 0.1024, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.460639606396064, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 8.119194395650545e-06, |
|
"loss": 0.0994, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.465559655596556, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 8.106963647943273e-06, |
|
"loss": 0.1104, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.470479704797048, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 8.09470254011905e-06, |
|
"loss": 0.1446, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.4753997539975399, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 8.082411191988956e-06, |
|
"loss": 0.1061, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.480319803198032, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 8.070089723659567e-06, |
|
"loss": 0.074, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.485239852398524, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 8.057738255531781e-06, |
|
"loss": 0.0852, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.4901599015990161, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 8.045356908299647e-06, |
|
"loss": 0.091, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.495079950799508, |
|
"grad_norm": 1.0, |
|
"learning_rate": 8.032945802949179e-06, |
|
"loss": 0.0849, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 8.02050506075718e-06, |
|
"loss": 0.1125, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.504920049200492, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 8.008034803290045e-06, |
|
"loss": 0.086, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.504920049200492, |
|
"eval_loss": 0.10929346084594727, |
|
"eval_runtime": 203.7138, |
|
"eval_samples_per_second": 8.031, |
|
"eval_steps_per_second": 1.34, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.5098400984009839, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 7.995535152402592e-06, |
|
"loss": 0.1118, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.514760147601476, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 7.983006230236855e-06, |
|
"loss": 0.0959, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.519680196801968, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 7.970448159220897e-06, |
|
"loss": 0.0916, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.5246002460024601, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 7.957861062067614e-06, |
|
"loss": 0.1117, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.529520295202952, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 7.945245061773531e-06, |
|
"loss": 0.073, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.534440344403444, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 7.93260028161761e-06, |
|
"loss": 0.072, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.539360393603936, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 7.919926845160037e-06, |
|
"loss": 0.1082, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.5442804428044279, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 7.907224876241015e-06, |
|
"loss": 0.0828, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.54920049200492, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 7.894494498979558e-06, |
|
"loss": 0.0924, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.5541205412054122, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 7.881735837772274e-06, |
|
"loss": 0.1099, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.5590405904059041, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 7.868949017292153e-06, |
|
"loss": 0.0909, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.563960639606396, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 7.856134162487346e-06, |
|
"loss": 0.0986, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.568880688806888, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 7.843291398579946e-06, |
|
"loss": 0.1045, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.57380073800738, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 7.830420851064767e-06, |
|
"loss": 0.0971, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.5787207872078721, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 7.817522645708106e-06, |
|
"loss": 0.0974, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.583640836408364, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 7.804596908546529e-06, |
|
"loss": 0.1178, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.5885608856088562, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 7.791643765885632e-06, |
|
"loss": 0.093, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.5934809348093482, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 7.778663344298804e-06, |
|
"loss": 0.0798, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.59840098400984, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 7.765655770625997e-06, |
|
"loss": 0.0991, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.603321033210332, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 7.752621171972486e-06, |
|
"loss": 0.0673, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.608241082410824, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 7.739559675707615e-06, |
|
"loss": 0.094, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.6131611316113161, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 7.726471409463572e-06, |
|
"loss": 0.1286, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.618081180811808, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 7.713356501134124e-06, |
|
"loss": 0.0938, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.6230012300123002, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 7.70021507887338e-06, |
|
"loss": 0.1141, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.6279212792127922, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 7.687047271094528e-06, |
|
"loss": 0.0708, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.632841328413284, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 7.673853206468593e-06, |
|
"loss": 0.0816, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.637761377613776, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 7.660633013923164e-06, |
|
"loss": 0.1224, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.642681426814268, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 7.64738682264115e-06, |
|
"loss": 0.1012, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.6476014760147601, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 7.634114762059504e-06, |
|
"loss": 0.103, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.6525215252152523, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 7.62081696186797e-06, |
|
"loss": 0.0773, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.6574415744157442, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 7.607493552007805e-06, |
|
"loss": 0.1176, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.6623616236162362, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 7.5941446626705175e-06, |
|
"loss": 0.0996, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.6672816728167281, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 7.580770424296591e-06, |
|
"loss": 0.1001, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.67220172201722, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 7.56737096757421e-06, |
|
"loss": 0.1052, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.6771217712177122, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 7.553946423437988e-06, |
|
"loss": 0.1104, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.6820418204182042, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 7.540496923067675e-06, |
|
"loss": 0.1092, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.6869618696186963, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 7.527022597886895e-06, |
|
"loss": 0.0994, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.6918819188191883, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 7.513523579561839e-06, |
|
"loss": 0.0787, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.6968019680196802, |
|
"grad_norm": 1.125, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.0986, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.7017220172201721, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 7.486451991348872e-06, |
|
"loss": 0.0806, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.706642066420664, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 7.472879685994658e-06, |
|
"loss": 0.1059, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.7115621156211562, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 7.459283216560982e-06, |
|
"loss": 0.0947, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.7164821648216482, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 7.445662715907591e-06, |
|
"loss": 0.0897, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.7214022140221403, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 7.432018317129056e-06, |
|
"loss": 0.1083, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.7263222632226323, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 7.418350153553471e-06, |
|
"loss": 0.07, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.7312423124231242, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 7.40465835874115e-06, |
|
"loss": 0.0942, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.7361623616236161, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 7.390943066483327e-06, |
|
"loss": 0.0881, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.741082410824108, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 7.377204410800839e-06, |
|
"loss": 0.1029, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.7460024600246002, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 7.363442525942827e-06, |
|
"loss": 0.093, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.7509225092250924, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 7.349657546385414e-06, |
|
"loss": 0.0857, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.7558425584255843, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 7.335849606830402e-06, |
|
"loss": 0.1016, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.7607626076260763, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 7.322018842203942e-06, |
|
"loss": 0.1107, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.7656826568265682, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 7.308165387655231e-06, |
|
"loss": 0.1411, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.7706027060270602, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 7.294289378555179e-06, |
|
"loss": 0.1037, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.7755227552275523, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 7.2803909504950935e-06, |
|
"loss": 0.0838, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.7804428044280443, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 7.266470239285347e-06, |
|
"loss": 0.0947, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.7853628536285364, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 7.252527380954062e-06, |
|
"loss": 0.0904, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.7902829028290284, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 7.238562511745768e-06, |
|
"loss": 0.0877, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.7952029520295203, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 7.2245757681200835e-06, |
|
"loss": 0.0871, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.8001230012300122, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 7.210567286750368e-06, |
|
"loss": 0.0898, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.8050430504305042, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 7.196537204522401e-06, |
|
"loss": 0.0826, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.8099630996309963, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 7.182485658533036e-06, |
|
"loss": 0.0991, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.8148831488314883, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 7.168412786088857e-06, |
|
"loss": 0.0775, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.8198031980319804, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 7.1543187247048525e-06, |
|
"loss": 0.0915, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.8247232472324724, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 7.140203612103052e-06, |
|
"loss": 0.0983, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.8296432964329643, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 7.1260675862111986e-06, |
|
"loss": 0.086, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.8345633456334562, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 7.111910785161381e-06, |
|
"loss": 0.1281, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.8394833948339482, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 7.0977333472887076e-06, |
|
"loss": 0.0902, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.8444034440344403, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 7.083535411129934e-06, |
|
"loss": 0.0996, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.8493234932349325, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 7.069317115422121e-06, |
|
"loss": 0.0887, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.8542435424354244, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 7.055078599101275e-06, |
|
"loss": 0.1046, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.8591635916359164, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 7.040820001300992e-06, |
|
"loss": 0.1243, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.8640836408364083, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 7.026541461351092e-06, |
|
"loss": 0.0664, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.8690036900369003, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 7.01224311877627e-06, |
|
"loss": 0.1314, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.8739237392373924, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 6.997925113294717e-06, |
|
"loss": 0.0811, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.8788437884378844, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 6.983587584816769e-06, |
|
"loss": 0.0776, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.8837638376383765, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 6.969230673443531e-06, |
|
"loss": 0.116, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.8886838868388685, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 6.9548545194655115e-06, |
|
"loss": 0.0983, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.8936039360393604, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 6.9404592633612486e-06, |
|
"loss": 0.1323, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.8985239852398523, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 6.926045045795944e-06, |
|
"loss": 0.1106, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.9034440344403443, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 6.911612007620077e-06, |
|
"loss": 0.0884, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.9083640836408364, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 6.897160289868042e-06, |
|
"loss": 0.101, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.9132841328413284, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 6.8826900337567595e-06, |
|
"loss": 0.0907, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.9182041820418205, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.8682013806842985e-06, |
|
"loss": 0.083, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.9231242312423125, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 6.853694472228504e-06, |
|
"loss": 0.0821, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.9280442804428044, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 6.839169450145595e-06, |
|
"loss": 0.0605, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.9329643296432963, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.824626456368802e-06, |
|
"loss": 0.1001, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.9378843788437883, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 6.810065633006956e-06, |
|
"loss": 0.0936, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.9428044280442804, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 6.795487122343124e-06, |
|
"loss": 0.0866, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.9477244772447726, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 6.7808910668331985e-06, |
|
"loss": 0.1078, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.9526445264452645, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 6.766277609104518e-06, |
|
"loss": 0.0854, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.9575645756457565, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.751646891954466e-06, |
|
"loss": 0.0848, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.9624846248462484, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.736999058349085e-06, |
|
"loss": 0.0861, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.9674046740467404, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.722334251421665e-06, |
|
"loss": 0.108, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.9723247232472325, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 6.707652614471359e-06, |
|
"loss": 0.0691, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.9772447724477245, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 6.692954290961774e-06, |
|
"loss": 0.1234, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.9821648216482166, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.678239424519575e-06, |
|
"loss": 0.0785, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.9870848708487086, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 6.6635081589330745e-06, |
|
"loss": 0.096, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.9920049200492005, |
|
"grad_norm": 1.25, |
|
"learning_rate": 6.648760638150833e-06, |
|
"loss": 0.1109, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.9969249692496924, |
|
"grad_norm": 1.125, |
|
"learning_rate": 6.6339970062802526e-06, |
|
"loss": 0.0892, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 2.0030750307503076, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 6.619217407586167e-06, |
|
"loss": 0.065, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 2.0079950799507995, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 6.604421986489428e-06, |
|
"loss": 0.1044, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 2.0079950799507995, |
|
"eval_loss": 0.10599377751350403, |
|
"eval_runtime": 203.102, |
|
"eval_samples_per_second": 8.055, |
|
"eval_steps_per_second": 1.344, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 2.0129151291512914, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 6.589610887565503e-06, |
|
"loss": 0.0784, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 2.0178351783517834, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.574784255543052e-06, |
|
"loss": 0.0788, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.0227552275522753, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 6.559942235302527e-06, |
|
"loss": 0.0876, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 2.0276752767527677, |
|
"grad_norm": 1.125, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 0.0702, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 2.0325953259532596, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 6.530212610439455e-06, |
|
"loss": 0.1014, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 2.0375153751537516, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 6.5153252963239736e-06, |
|
"loss": 0.0846, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 2.0424354243542435, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 6.500423175001705e-06, |
|
"loss": 0.1422, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.0473554735547355, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 6.485506392090749e-06, |
|
"loss": 0.1003, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 2.0522755227552274, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 6.470575093352477e-06, |
|
"loss": 0.1012, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 2.0571955719557193, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 6.4556294246900994e-06, |
|
"loss": 0.0778, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 2.0621156211562117, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 6.440669532147246e-06, |
|
"loss": 0.0975, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 2.0670356703567037, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.4256955619065375e-06, |
|
"loss": 0.0772, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.0719557195571956, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 6.410707660288154e-06, |
|
"loss": 0.1044, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 2.0768757687576875, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 6.395705973748414e-06, |
|
"loss": 0.0868, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 2.0817958179581795, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 6.3806906488783305e-06, |
|
"loss": 0.1051, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 2.0867158671586714, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 6.36566183240219e-06, |
|
"loss": 0.0572, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 2.091635916359164, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 6.350619671176111e-06, |
|
"loss": 0.1055, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.0965559655596557, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.335564312186615e-06, |
|
"loss": 0.0956, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 2.1014760147601477, |
|
"grad_norm": 1.75, |
|
"learning_rate": 6.3204959025491844e-06, |
|
"loss": 0.0727, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 2.1063960639606396, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 6.30541458950683e-06, |
|
"loss": 0.1014, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 2.1113161131611315, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 6.2903205204286474e-06, |
|
"loss": 0.0968, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 2.1162361623616235, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 6.275213842808383e-06, |
|
"loss": 0.0856, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.1211562115621154, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 6.260094704262986e-06, |
|
"loss": 0.0773, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 2.126076260762608, |
|
"grad_norm": 1.125, |
|
"learning_rate": 6.244963252531171e-06, |
|
"loss": 0.0802, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 2.1309963099630997, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 6.229819635471972e-06, |
|
"loss": 0.0809, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 2.1359163591635917, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 6.2146640010633e-06, |
|
"loss": 0.0833, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 2.1408364083640836, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 6.19949649740049e-06, |
|
"loss": 0.0784, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.1457564575645756, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.184317272694866e-06, |
|
"loss": 0.0885, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 2.1506765067650675, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 6.16912647527228e-06, |
|
"loss": 0.0763, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 2.15559655596556, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 6.1539242535716704e-06, |
|
"loss": 0.0805, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 2.160516605166052, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 6.138710756143613e-06, |
|
"loss": 0.1051, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 2.1654366543665438, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.123486131648859e-06, |
|
"loss": 0.0741, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.1703567035670357, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 6.108250528856895e-06, |
|
"loss": 0.0725, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 2.1752767527675276, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 6.0930040966444815e-06, |
|
"loss": 0.0957, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 2.1801968019680196, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 6.077746983994198e-06, |
|
"loss": 0.1151, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 2.1851168511685115, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 6.062479339992993e-06, |
|
"loss": 0.0939, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 2.190036900369004, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.047201313830724e-06, |
|
"loss": 0.0944, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.194956949569496, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 6.031913054798692e-06, |
|
"loss": 0.0825, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 2.1998769987699878, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.016614712288198e-06, |
|
"loss": 0.0732, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 2.2047970479704797, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 6.0013064357890715e-06, |
|
"loss": 0.0749, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 2.2097170971709716, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 5.985988374888216e-06, |
|
"loss": 0.0935, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 2.2146371463714636, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 5.970660679268139e-06, |
|
"loss": 0.0762, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.2195571955719555, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.955323498705501e-06, |
|
"loss": 0.0929, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 2.224477244772448, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 5.9399769830696404e-06, |
|
"loss": 0.1067, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 2.22939729397294, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 5.924621282321123e-06, |
|
"loss": 0.0736, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 2.234317343173432, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.909256546510257e-06, |
|
"loss": 0.0699, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 2.2392373923739237, |
|
"grad_norm": 1.0, |
|
"learning_rate": 5.893882925775648e-06, |
|
"loss": 0.0792, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.2441574415744157, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 5.878500570342714e-06, |
|
"loss": 0.0874, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 2.2490774907749076, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 5.86310963052223e-06, |
|
"loss": 0.1197, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 2.2539975399753995, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 5.847710256708854e-06, |
|
"loss": 0.1147, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 2.258917589175892, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 5.832302599379657e-06, |
|
"loss": 0.0647, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 2.263837638376384, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 5.816886809092651e-06, |
|
"loss": 0.0794, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.268757687576876, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 5.8014630364853275e-06, |
|
"loss": 0.0787, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 2.2736777367773677, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.7860314322731705e-06, |
|
"loss": 0.1395, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 2.2785977859778597, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 5.770592147248197e-06, |
|
"loss": 0.083, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 2.2835178351783516, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.755145332277472e-06, |
|
"loss": 0.1035, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 2.288437884378844, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 5.73969113830165e-06, |
|
"loss": 0.1229, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.293357933579336, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 5.72422971633348e-06, |
|
"loss": 0.0847, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 2.298277982779828, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 5.70876121745635e-06, |
|
"loss": 0.0944, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 2.30319803198032, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 5.69328579282279e-06, |
|
"loss": 0.0729, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.3081180811808117, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 5.677803593653018e-06, |
|
"loss": 0.076, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 2.3130381303813037, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 5.66231477123344e-06, |
|
"loss": 0.0779, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.3179581795817956, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 5.646819476915189e-06, |
|
"loss": 0.0596, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 2.322878228782288, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 5.631317862112636e-06, |
|
"loss": 0.1077, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 2.32779827798278, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 5.615810078301912e-06, |
|
"loss": 0.1024, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 2.332718327183272, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.600296277019434e-06, |
|
"loss": 0.1357, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 2.337638376383764, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 5.584776609860414e-06, |
|
"loss": 0.0891, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.3425584255842558, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 5.569251228477386e-06, |
|
"loss": 0.0744, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 2.3474784747847477, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 5.553720284578723e-06, |
|
"loss": 0.0669, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 2.35239852398524, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 5.538183929927152e-06, |
|
"loss": 0.0819, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 2.357318573185732, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 5.522642316338268e-06, |
|
"loss": 0.0794, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 2.362238622386224, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 5.507095595679059e-06, |
|
"loss": 0.0887, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.367158671586716, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.491543919866417e-06, |
|
"loss": 0.0863, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 2.372078720787208, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 5.47598744086565e-06, |
|
"loss": 0.0844, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 2.3769987699876998, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 5.460426310689006e-06, |
|
"loss": 0.0712, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 2.3819188191881917, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.4448606813941805e-06, |
|
"loss": 0.0768, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 2.3868388683886836, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 5.42929070508283e-06, |
|
"loss": 0.079, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.391758917589176, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 5.413716533899096e-06, |
|
"loss": 0.0853, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 2.396678966789668, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 5.3981383200281004e-06, |
|
"loss": 0.1153, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 2.40159901599016, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 5.382556215694478e-06, |
|
"loss": 0.0533, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 2.406519065190652, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 5.366970373160873e-06, |
|
"loss": 0.0905, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 2.411439114391144, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 5.351380944726465e-06, |
|
"loss": 0.0787, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.416359163591636, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 5.335788082725467e-06, |
|
"loss": 0.0781, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 2.421279212792128, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 5.3201919395256475e-06, |
|
"loss": 0.0689, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.42619926199262, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 5.304592667526835e-06, |
|
"loss": 0.0849, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.431119311193112, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 5.288990419159433e-06, |
|
"loss": 0.1141, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.436039360393604, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 5.2733853468829295e-06, |
|
"loss": 0.0842, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.440959409594096, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 5.257777603184408e-06, |
|
"loss": 0.0599, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.445879458794588, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 5.24216734057705e-06, |
|
"loss": 0.0906, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.4507995079950797, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 5.226554711598659e-06, |
|
"loss": 0.0934, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.455719557195572, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 5.210939868810156e-06, |
|
"loss": 0.0783, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.460639606396064, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 5.195322964794098e-06, |
|
"loss": 0.0944, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.465559655596556, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 5.1797041521531795e-06, |
|
"loss": 0.0831, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.470479704797048, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 5.16408358350875e-06, |
|
"loss": 0.1166, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.47539975399754, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 5.1484614114993156e-06, |
|
"loss": 0.089, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.480319803198032, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 5.132837788779049e-06, |
|
"loss": 0.1219, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.485239852398524, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.117212868016303e-06, |
|
"loss": 0.0802, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.490159901599016, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 5.101586801892109e-06, |
|
"loss": 0.0862, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.495079950799508, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.085959743098693e-06, |
|
"loss": 0.0834, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 1.125, |
|
"learning_rate": 5.07033184433798e-06, |
|
"loss": 0.0764, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.504920049200492, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 5.054703258320107e-06, |
|
"loss": 0.0789, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.509840098400984, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 5.03907413776192e-06, |
|
"loss": 0.0909, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.509840098400984, |
|
"eval_loss": 0.10507026314735413, |
|
"eval_runtime": 204.2755, |
|
"eval_samples_per_second": 8.009, |
|
"eval_steps_per_second": 1.336, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.514760147601476, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 5.0234446353854934e-06, |
|
"loss": 0.0751, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.5196801968019678, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 5.00781490391663e-06, |
|
"loss": 0.1011, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.52460024600246, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.992185096083372e-06, |
|
"loss": 0.1007, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.529520295202952, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 4.976555364614509e-06, |
|
"loss": 0.0879, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.534440344403444, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.96092586223808e-06, |
|
"loss": 0.0806, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.539360393603936, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 4.9452967416798945e-06, |
|
"loss": 0.0916, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.544280442804428, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.929668155662021e-06, |
|
"loss": 0.0825, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.5492004920049203, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 4.914040256901309e-06, |
|
"loss": 0.0761, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.554120541205412, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 4.898413198107892e-06, |
|
"loss": 0.0657, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.559040590405904, |
|
"grad_norm": 1.625, |
|
"learning_rate": 4.882787131983698e-06, |
|
"loss": 0.1164, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.563960639606396, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 4.867162211220952e-06, |
|
"loss": 0.0794, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.568880688806888, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 4.851538588500687e-06, |
|
"loss": 0.0943, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.57380073800738, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 4.835916416491251e-06, |
|
"loss": 0.0911, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.578720787207872, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 4.820295847846822e-06, |
|
"loss": 0.0579, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.583640836408364, |
|
"grad_norm": 1.125, |
|
"learning_rate": 4.804677035205903e-06, |
|
"loss": 0.0955, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.588560885608856, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 4.789060131189845e-06, |
|
"loss": 0.0879, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.593480934809348, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 4.773445288401343e-06, |
|
"loss": 0.089, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.59840098400984, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 4.75783265942295e-06, |
|
"loss": 0.0871, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.603321033210332, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 4.742222396815593e-06, |
|
"loss": 0.1021, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.608241082410824, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 4.726614653117071e-06, |
|
"loss": 0.0909, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.6131611316113164, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.711009580840569e-06, |
|
"loss": 0.1082, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.6180811808118083, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.695407332473166e-06, |
|
"loss": 0.083, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.6230012300123002, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 4.679808060474354e-06, |
|
"loss": 0.0745, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.627921279212792, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 4.6642119172745345e-06, |
|
"loss": 0.0776, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.632841328413284, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 4.6486190552735375e-06, |
|
"loss": 0.0676, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.637761377613776, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 4.633029626839128e-06, |
|
"loss": 0.0794, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.642681426814268, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 4.617443784305524e-06, |
|
"loss": 0.1136, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.64760147601476, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 4.601861679971901e-06, |
|
"loss": 0.0747, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.6525215252152523, |
|
"grad_norm": 1.625, |
|
"learning_rate": 4.5862834661009074e-06, |
|
"loss": 0.0982, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.6574415744157442, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 4.57070929491717e-06, |
|
"loss": 0.0788, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.662361623616236, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 4.555139318605821e-06, |
|
"loss": 0.0857, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.667281672816728, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 4.539573689310995e-06, |
|
"loss": 0.09, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.67220172201722, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.524012559134352e-06, |
|
"loss": 0.0934, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.6771217712177124, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 4.508456080133584e-06, |
|
"loss": 0.0742, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.6820418204182044, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 4.492904404320942e-06, |
|
"loss": 0.0677, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.6869618696186963, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 4.477357683661734e-06, |
|
"loss": 0.072, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.6918819188191883, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.461816070072851e-06, |
|
"loss": 0.0948, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.69680196801968, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 4.446279715421277e-06, |
|
"loss": 0.0846, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.701722017220172, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.430748771522615e-06, |
|
"loss": 0.0952, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.706642066420664, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 4.415223390139588e-06, |
|
"loss": 0.0797, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.711562115621156, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 4.399703722980569e-06, |
|
"loss": 0.0822, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.716482164821648, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.3841899216980895e-06, |
|
"loss": 0.1041, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.7214022140221403, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.368682137887365e-06, |
|
"loss": 0.0884, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.7263222632226323, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 4.3531805230848116e-06, |
|
"loss": 0.0799, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.731242312423124, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.337685228766561e-06, |
|
"loss": 0.1055, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.736162361623616, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 4.322196406346984e-06, |
|
"loss": 0.1206, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.741082410824108, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 4.30671420717721e-06, |
|
"loss": 0.085, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.7460024600246005, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 4.291238782543652e-06, |
|
"loss": 0.1146, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.7509225092250924, |
|
"grad_norm": 1.125, |
|
"learning_rate": 4.275770283666521e-06, |
|
"loss": 0.0742, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.7558425584255843, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 4.260308861698351e-06, |
|
"loss": 0.091, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.7607626076260763, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 4.244854667722527e-06, |
|
"loss": 0.0741, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.765682656826568, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 4.229407852751806e-06, |
|
"loss": 0.0738, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.77060270602706, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.21396856772683e-06, |
|
"loss": 0.0999, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 2.775522755227552, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 4.198536963514674e-06, |
|
"loss": 0.0619, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 2.780442804428044, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 4.183113190907349e-06, |
|
"loss": 0.0841, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.7853628536285364, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 4.1676974006203456e-06, |
|
"loss": 0.0738, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 2.7902829028290284, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.152289743291148e-06, |
|
"loss": 0.1018, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.7952029520295203, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 4.136890369477773e-06, |
|
"loss": 0.0664, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 2.8001230012300122, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 4.121499429657287e-06, |
|
"loss": 0.0769, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 2.805043050430504, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 4.106117074224354e-06, |
|
"loss": 0.0779, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.8099630996309966, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.090743453489744e-06, |
|
"loss": 0.0958, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 2.8148831488314885, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.07537871767888e-06, |
|
"loss": 0.1121, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 2.8198031980319804, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.060023016930359e-06, |
|
"loss": 0.1435, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 2.8247232472324724, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 4.044676501294501e-06, |
|
"loss": 0.1366, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 2.8296432964329643, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 4.029339320731862e-06, |
|
"loss": 0.0755, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.8345633456334562, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 4.0140116251117865e-06, |
|
"loss": 0.0837, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.839483394833948, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.998693564210929e-06, |
|
"loss": 0.1097, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 2.84440344403444, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 3.983385287711803e-06, |
|
"loss": 0.0652, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 2.8493234932349325, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 3.96808694520131e-06, |
|
"loss": 0.0785, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 2.8542435424354244, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 3.952798686169279e-06, |
|
"loss": 0.0947, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.8591635916359164, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 3.937520660007008e-06, |
|
"loss": 0.0803, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 2.8640836408364083, |
|
"grad_norm": 0.8203125, |
|
"learning_rate": 3.9222530160058025e-06, |
|
"loss": 0.0664, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 2.8690036900369003, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 3.90699590335552e-06, |
|
"loss": 0.0737, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 2.8739237392373926, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 3.891749471143106e-06, |
|
"loss": 0.1152, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.8788437884378846, |
|
"grad_norm": 1.0, |
|
"learning_rate": 3.876513868351142e-06, |
|
"loss": 0.0932, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.8837638376383765, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 3.861289243856388e-06, |
|
"loss": 0.1052, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.8886838868388685, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 3.84607574642833e-06, |
|
"loss": 0.1152, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 2.8936039360393604, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 3.830873524727722e-06, |
|
"loss": 0.083, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.8985239852398523, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 3.815682727305136e-06, |
|
"loss": 0.0824, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 2.9034440344403443, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 3.800503502599511e-06, |
|
"loss": 0.0933, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.908364083640836, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 3.7853359989367023e-06, |
|
"loss": 0.0831, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 2.913284132841328, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 3.7701803645280296e-06, |
|
"loss": 0.0954, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.9182041820418205, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 3.7550367474688315e-06, |
|
"loss": 0.0829, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.9231242312423125, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 3.739905295737015e-06, |
|
"loss": 0.0905, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.9280442804428044, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 3.7247861571916183e-06, |
|
"loss": 0.0838, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.9329643296432963, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 3.7096794795713542e-06, |
|
"loss": 0.1098, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.9378843788437883, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 3.6945854104931726e-06, |
|
"loss": 0.0776, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.9428044280442807, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 3.6795040974508164e-06, |
|
"loss": 0.0694, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.9477244772447726, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 3.6644356878133862e-06, |
|
"loss": 0.0996, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.9526445264452645, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.6493803288238894e-06, |
|
"loss": 0.0801, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.9575645756457565, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.634338167597812e-06, |
|
"loss": 0.0904, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 2.9624846248462484, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 3.61930935112167e-06, |
|
"loss": 0.0676, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.9674046740467404, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.6042940262515867e-06, |
|
"loss": 0.0985, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 2.9723247232472323, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 3.5892923397118473e-06, |
|
"loss": 0.0816, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.9772447724477242, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 3.5743044380934655e-06, |
|
"loss": 0.076, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.9821648216482166, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 3.5593304678527547e-06, |
|
"loss": 0.0932, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.9870848708487086, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 3.544370575309902e-06, |
|
"loss": 0.0982, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 2.9920049200492005, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 3.5294249066475245e-06, |
|
"loss": 0.0997, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 2.9969249692496924, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.5144936079092528e-06, |
|
"loss": 0.0907, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 3.002460024600246, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 3.4995768249982975e-06, |
|
"loss": 0.1999, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 3.007380073800738, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 3.4846747036760285e-06, |
|
"loss": 0.0815, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 3.01230012300123, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 3.4697873895605466e-06, |
|
"loss": 0.0754, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 3.01230012300123, |
|
"eval_loss": 0.1035788357257843, |
|
"eval_runtime": 204.6775, |
|
"eval_samples_per_second": 7.993, |
|
"eval_steps_per_second": 1.334, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 3.0172201722017222, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 0.1022, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 3.022140221402214, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 3.4400577646974766e-06, |
|
"loss": 0.074, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 3.027060270602706, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 3.4252157444569478e-06, |
|
"loss": 0.0619, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 3.031980319803198, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.410389112434499e-06, |
|
"loss": 0.0887, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 3.03690036900369, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 3.3955780135105736e-06, |
|
"loss": 0.0831, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 3.041820418204182, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 3.3807825924138356e-06, |
|
"loss": 0.1026, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 3.046740467404674, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.366002993719747e-06, |
|
"loss": 0.0814, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 3.0516605166051662, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 3.351239361849168e-06, |
|
"loss": 0.1059, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.056580565805658, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 3.336491841066928e-06, |
|
"loss": 0.083, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 3.06150061500615, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 3.3217605754804273e-06, |
|
"loss": 0.0612, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 3.066420664206642, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 3.307045709038226e-06, |
|
"loss": 0.0833, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 3.071340713407134, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 3.2923473855286426e-06, |
|
"loss": 0.0717, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 3.076260762607626, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 3.2776657485783357e-06, |
|
"loss": 0.0752, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.081180811808118, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 3.2630009416509167e-06, |
|
"loss": 0.0891, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 3.0861008610086103, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 3.2483531080455334e-06, |
|
"loss": 0.1059, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 3.091020910209102, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 3.2337223908954834e-06, |
|
"loss": 0.0874, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 3.095940959409594, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 3.2191089331668036e-06, |
|
"loss": 0.0814, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 3.100861008610086, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 3.2045128776568783e-06, |
|
"loss": 0.0982, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.105781057810578, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 3.1899343669930446e-06, |
|
"loss": 0.0647, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 3.11070110701107, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 3.1753735436312005e-06, |
|
"loss": 0.1091, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 3.1156211562115623, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 3.1608305498544056e-06, |
|
"loss": 0.0939, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 3.1205412054120543, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 3.146305527771499e-06, |
|
"loss": 0.074, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 3.125461254612546, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 3.1317986193157023e-06, |
|
"loss": 0.0734, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 3.130381303813038, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 3.1173099662432426e-06, |
|
"loss": 0.1068, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 3.13530135301353, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 3.1028397101319584e-06, |
|
"loss": 0.0917, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 3.140221402214022, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.0883879923799244e-06, |
|
"loss": 0.0743, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 3.145141451414514, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 3.0739549542040583e-06, |
|
"loss": 0.0971, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 3.1500615006150063, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 3.059540736638751e-06, |
|
"loss": 0.1033, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.1549815498154983, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 3.0451454805344893e-06, |
|
"loss": 0.0899, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 3.15990159901599, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 3.0307693265564708e-06, |
|
"loss": 0.0845, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 3.164821648216482, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 3.016412415183233e-06, |
|
"loss": 0.0914, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 3.169741697416974, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 3.002074886705284e-06, |
|
"loss": 0.0802, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 3.174661746617466, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 2.9877568812237325e-06, |
|
"loss": 0.0946, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 3.179581795817958, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.9734585386489095e-06, |
|
"loss": 0.0782, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 3.1845018450184504, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 2.9591799986990098e-06, |
|
"loss": 0.0594, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 3.1894218942189423, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 2.9449214008987253e-06, |
|
"loss": 0.0794, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 3.1943419434194342, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 2.93068288457788e-06, |
|
"loss": 0.0818, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 3.199261992619926, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.916464588870067e-06, |
|
"loss": 0.1144, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.204182041820418, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 2.9022666527112954e-06, |
|
"loss": 0.093, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 3.20910209102091, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 2.8880892148386198e-06, |
|
"loss": 0.0725, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 3.2140221402214024, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.873932413788805e-06, |
|
"loss": 0.068, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 3.2189421894218944, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 2.859796387896949e-06, |
|
"loss": 0.0583, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 3.2238622386223863, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 2.8456812752951483e-06, |
|
"loss": 0.0748, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 3.2287822878228782, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.831587213911142e-06, |
|
"loss": 0.1128, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 3.23370233702337, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 2.817514341466965e-06, |
|
"loss": 0.0812, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 3.238622386223862, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 2.8034627954775993e-06, |
|
"loss": 0.0995, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 3.243542435424354, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 2.7894327132496324e-06, |
|
"loss": 0.0865, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 3.2484624846248464, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.7754242318799174e-06, |
|
"loss": 0.0807, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.2533825338253384, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 2.761437488254232e-06, |
|
"loss": 0.1041, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 3.2583025830258303, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 2.74747261904594e-06, |
|
"loss": 0.074, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 3.2632226322263223, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 2.733529760714655e-06, |
|
"loss": 0.0852, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 3.268142681426814, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 2.719609049504911e-06, |
|
"loss": 0.1049, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 3.273062730627306, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.7057106214448216e-06, |
|
"loss": 0.0682, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.2779827798277985, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.6918346123447708e-06, |
|
"loss": 0.1297, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 3.2829028290282904, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 2.677981157796059e-06, |
|
"loss": 0.0748, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 3.2878228782287824, |
|
"grad_norm": 1.125, |
|
"learning_rate": 2.6641503931696e-06, |
|
"loss": 0.1069, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 3.2927429274292743, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 2.650342453614586e-06, |
|
"loss": 0.0802, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 3.2976629766297663, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.636557474057173e-06, |
|
"loss": 0.0908, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.302583025830258, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 2.6227955891991617e-06, |
|
"loss": 0.0663, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 3.30750307503075, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.609056933516675e-06, |
|
"loss": 0.0686, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 3.312423124231242, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 2.5953416412588504e-06, |
|
"loss": 0.0833, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 3.3173431734317345, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 2.58164984644653e-06, |
|
"loss": 0.0934, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 3.3222632226322264, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.567981682870946e-06, |
|
"loss": 0.0953, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.3271832718327183, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 2.554337284092411e-06, |
|
"loss": 0.1022, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 3.3321033210332103, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.540716783439019e-06, |
|
"loss": 0.0674, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 3.337023370233702, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 2.5271203140053436e-06, |
|
"loss": 0.1064, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 3.341943419434194, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.5135480086511306e-06, |
|
"loss": 0.0989, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 3.3468634686346865, |
|
"grad_norm": 1.25, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 0.0908, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.3517835178351785, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 2.4864764204381624e-06, |
|
"loss": 0.0834, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 3.3567035670356704, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 2.472977402113107e-06, |
|
"loss": 0.0902, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 3.3616236162361623, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 2.4595030769323246e-06, |
|
"loss": 0.0718, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 3.3665436654366543, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 2.4460535765620147e-06, |
|
"loss": 0.1142, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 3.3714637146371462, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.4326290324257896e-06, |
|
"loss": 0.0951, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.376383763837638, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 2.419229575703411e-06, |
|
"loss": 0.0985, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 3.3813038130381305, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 2.4058553373294846e-06, |
|
"loss": 0.0713, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 3.3862238622386225, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 2.3925064479921985e-06, |
|
"loss": 0.0882, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 3.3911439114391144, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.379183038132031e-06, |
|
"loss": 0.1156, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 3.3960639606396064, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.3658852379404973e-06, |
|
"loss": 0.0914, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.4009840098400983, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.352613177358852e-06, |
|
"loss": 0.0794, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 3.4059040590405902, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 2.3393669860768364e-06, |
|
"loss": 0.0941, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 3.4108241082410826, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 2.3261467935314097e-06, |
|
"loss": 0.0648, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 3.4157441574415746, |
|
"grad_norm": 1.25, |
|
"learning_rate": 2.3129527289054716e-06, |
|
"loss": 0.0912, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 3.4206642066420665, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 2.299784921126622e-06, |
|
"loss": 0.0801, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 3.4255842558425584, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 2.286643498865877e-06, |
|
"loss": 0.072, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 3.4305043050430504, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 2.2735285905364307e-06, |
|
"loss": 0.0599, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 3.4354243542435423, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 2.260440324292385e-06, |
|
"loss": 0.0748, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 3.4403444034440342, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 2.2473788280275164e-06, |
|
"loss": 0.0613, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 3.4452644526445266, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.234344229374003e-06, |
|
"loss": 0.107, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.4501845018450186, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 2.2213366557011988e-06, |
|
"loss": 0.083, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 3.4551045510455105, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 2.2083562341143695e-06, |
|
"loss": 0.0823, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 3.4600246002460024, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 2.195403091453473e-06, |
|
"loss": 0.1011, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 3.4649446494464944, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.1824773542918957e-06, |
|
"loss": 0.0832, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 3.4698646986469863, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 2.1695791489352346e-06, |
|
"loss": 0.1078, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 3.4747847478474787, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 2.156708601420053e-06, |
|
"loss": 0.0529, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 3.4797047970479706, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 2.1438658375126544e-06, |
|
"loss": 0.0788, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 3.4846248462484626, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 2.131050982707849e-06, |
|
"loss": 0.0749, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 3.4895448954489545, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 2.1182641622277273e-06, |
|
"loss": 0.0666, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 3.4944649446494465, |
|
"grad_norm": 1.0, |
|
"learning_rate": 2.1055055010204427e-06, |
|
"loss": 0.0892, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.4993849938499384, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 2.092775123758985e-06, |
|
"loss": 0.1143, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 3.5043050430504303, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 2.080073154839964e-06, |
|
"loss": 0.0703, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 3.5092250922509223, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 2.06739971838239e-06, |
|
"loss": 0.083, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 3.5141451414514147, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 2.05475493822647e-06, |
|
"loss": 0.0697, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 3.5141451414514147, |
|
"eval_loss": 0.10337568074464798, |
|
"eval_runtime": 203.8015, |
|
"eval_samples_per_second": 8.027, |
|
"eval_steps_per_second": 1.34, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 3.5190651906519066, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.042138937932388e-06, |
|
"loss": 0.0886, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 3.5239852398523985, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.0295518407791054e-06, |
|
"loss": 0.1, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 3.5289052890528905, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 2.016993769763147e-06, |
|
"loss": 0.0802, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 3.5338253382533824, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 2.0044648475974095e-06, |
|
"loss": 0.058, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 3.538745387453875, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.9919651967099556e-06, |
|
"loss": 0.0875, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 3.5436654366543667, |
|
"grad_norm": 1.25, |
|
"learning_rate": 1.979494939242822e-06, |
|
"loss": 0.0656, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.5485854858548587, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 1.9670541970508224e-06, |
|
"loss": 0.0676, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 3.5535055350553506, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 1.954643091700354e-06, |
|
"loss": 0.0672, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 3.5584255842558425, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.9422617444682196e-06, |
|
"loss": 0.0816, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 3.5633456334563345, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.9299102763404335e-06, |
|
"loss": 0.0769, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 3.5682656826568264, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 1.917588808011045e-06, |
|
"loss": 0.069, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.5731857318573184, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.9052974598809505e-06, |
|
"loss": 0.075, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 3.5781057810578107, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 1.893036352056728e-06, |
|
"loss": 0.1036, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 3.5830258302583027, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 1.880805604349456e-06, |
|
"loss": 0.0658, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 3.5879458794587946, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 1.8686053362735468e-06, |
|
"loss": 0.052, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 3.5928659286592866, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 1.856435667045577e-06, |
|
"loss": 0.0592, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.5977859778597785, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.844296715583117e-06, |
|
"loss": 0.0679, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 3.602706027060271, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.8321886005035812e-06, |
|
"loss": 0.0883, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 3.607626076260763, |
|
"grad_norm": 1.25, |
|
"learning_rate": 1.8201114401230586e-06, |
|
"loss": 0.0875, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 3.6125461254612548, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.8080653524551623e-06, |
|
"loss": 0.0662, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 3.6174661746617467, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 1.796050455209869e-06, |
|
"loss": 0.1162, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 3.6223862238622386, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.7840668657923838e-06, |
|
"loss": 0.0753, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 3.6273062730627306, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.772114701301972e-06, |
|
"loss": 0.0767, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 3.6322263222632225, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.7601940785308337e-06, |
|
"loss": 0.1026, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 3.6371463714637144, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 1.7483051139629482e-06, |
|
"loss": 0.0724, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 3.6420664206642064, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.7364479237729526e-06, |
|
"loss": 0.0915, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.6469864698646988, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.7246226238249853e-06, |
|
"loss": 0.057, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 3.6519065190651907, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 1.7128293296715704e-06, |
|
"loss": 0.0772, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 3.6568265682656826, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.7010681565524834e-06, |
|
"loss": 0.0597, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 3.6617466174661746, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 1.6893392193936231e-06, |
|
"loss": 0.1057, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 3.6666666666666665, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.677642632805892e-06, |
|
"loss": 0.0869, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.671586715867159, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.6659785110840704e-06, |
|
"loss": 0.0846, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 3.676506765067651, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 1.6543469682057105e-06, |
|
"loss": 0.0953, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 3.6814268142681428, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 1.6427481178300064e-06, |
|
"loss": 0.0984, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 3.6863468634686347, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.631182073296706e-06, |
|
"loss": 0.099, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 3.6912669126691267, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.6196489476249777e-06, |
|
"loss": 0.1055, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.6961869618696186, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.6081488535123274e-06, |
|
"loss": 0.1011, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 3.7011070110701105, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 1.5966819033334807e-06, |
|
"loss": 0.0697, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 3.7060270602706025, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.5852482091393045e-06, |
|
"loss": 0.0792, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 3.710947109471095, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 1.5738478826556885e-06, |
|
"loss": 0.0589, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 3.715867158671587, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.5624810352824709e-06, |
|
"loss": 0.1095, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.7207872078720787, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.551147778092344e-06, |
|
"loss": 0.0697, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 3.7257072570725707, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 1.539848221829769e-06, |
|
"loss": 0.0562, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 3.7306273062730626, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.5285824769098938e-06, |
|
"loss": 0.064, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 3.735547355473555, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 1.517350653417472e-06, |
|
"loss": 0.0805, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 3.740467404674047, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 1.5061528611057917e-06, |
|
"loss": 0.0751, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.745387453874539, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 1.4949892093956015e-06, |
|
"loss": 0.0958, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 3.750307503075031, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.4838598073740395e-06, |
|
"loss": 0.1045, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 3.7552275522755227, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 1.472764763793565e-06, |
|
"loss": 0.0532, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 3.7601476014760147, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 1.4617041870709042e-06, |
|
"loss": 0.0961, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 3.7650676506765066, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.4506781852859836e-06, |
|
"loss": 0.0696, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.7699876998769986, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.4396868661808777e-06, |
|
"loss": 0.076, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 3.774907749077491, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.428730337158749e-06, |
|
"loss": 0.0949, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 3.779827798277983, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 1.4178087052828098e-06, |
|
"loss": 0.0981, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 3.784747847478475, |
|
"grad_norm": 1.25, |
|
"learning_rate": 1.4069220772752685e-06, |
|
"loss": 0.1072, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 3.7896678966789668, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 1.3960705595162876e-06, |
|
"loss": 0.063, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.7945879458794587, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 1.385254258042948e-06, |
|
"loss": 0.0707, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 3.799507995079951, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.3744732785482035e-06, |
|
"loss": 0.0804, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 3.804428044280443, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 1.3637277263798603e-06, |
|
"loss": 0.1245, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 3.809348093480935, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.35301770653954e-06, |
|
"loss": 0.0788, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 3.814268142681427, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 1.3423433236816563e-06, |
|
"loss": 0.0559, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.819188191881919, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.3317046821123868e-06, |
|
"loss": 0.0958, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 3.8241082410824108, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.3211018857886632e-06, |
|
"loss": 0.0748, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 3.8290282902829027, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 1.3105350383171484e-06, |
|
"loss": 0.0673, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 3.8339483394833946, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 1.3000042429532267e-06, |
|
"loss": 0.0911, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 3.8388683886838866, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 1.289509602599996e-06, |
|
"loss": 0.0776, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.843788437884379, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 1.2790512198072558e-06, |
|
"loss": 0.0996, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 3.848708487084871, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.268629196770514e-06, |
|
"loss": 0.0685, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 3.853628536285363, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.2582436353299832e-06, |
|
"loss": 0.0924, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 3.8585485854858548, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 1.2478946369695882e-06, |
|
"loss": 0.0901, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 3.8634686346863467, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.2375823028159667e-06, |
|
"loss": 0.0971, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.868388683886839, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 1.2273067336374972e-06, |
|
"loss": 0.087, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 3.873308733087331, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.2170680298432934e-06, |
|
"loss": 0.106, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 3.878228782287823, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.2068662914822432e-06, |
|
"loss": 0.0634, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 3.883148831488315, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 1.1967016182420122e-06, |
|
"loss": 0.0983, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 3.888068880688807, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.186574109448091e-06, |
|
"loss": 0.0899, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.892988929889299, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.1764838640628011e-06, |
|
"loss": 0.0799, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 3.8979089790897907, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.1664309806843466e-06, |
|
"loss": 0.0942, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 3.9028290282902827, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.1564155575458414e-06, |
|
"loss": 0.0895, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 3.907749077490775, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 1.1464376925143528e-06, |
|
"loss": 0.0802, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 3.912669126691267, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 1.1364974830899438e-06, |
|
"loss": 0.0689, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.917589175891759, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.1265950264047171e-06, |
|
"loss": 0.0677, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 3.922509225092251, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.1167304192218737e-06, |
|
"loss": 0.1054, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 3.927429274292743, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.1069037579347613e-06, |
|
"loss": 0.1051, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 3.932349323493235, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.0971151385659357e-06, |
|
"loss": 0.0892, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 3.937269372693727, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 1.0873646567662165e-06, |
|
"loss": 0.0544, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.942189421894219, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.077652407813764e-06, |
|
"loss": 0.0906, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 3.947109471094711, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 1.067978486613131e-06, |
|
"loss": 0.056, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 3.952029520295203, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.0583429876943585e-06, |
|
"loss": 0.0857, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 3.956949569495695, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.0487460052120262e-06, |
|
"loss": 0.0857, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 3.961869618696187, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 1.0391876329443534e-06, |
|
"loss": 0.0975, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.9667896678966788, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.0296679642922718e-06, |
|
"loss": 0.0768, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 3.971709717097171, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.0201870922785156e-06, |
|
"loss": 0.1113, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 3.976629766297663, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.010745109546713e-06, |
|
"loss": 0.0812, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 3.981549815498155, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 1.0013421083604779e-06, |
|
"loss": 0.062, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 3.986469864698647, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 9.919781806025136e-07, |
|
"loss": 0.0666, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.991389913899139, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 9.826534177737106e-07, |
|
"loss": 0.0616, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 3.9963099630996313, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 9.733679109922567e-07, |
|
"loss": 0.0663, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 4.001230012300123, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 9.641217509927376e-07, |
|
"loss": 0.0926, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 4.006150061500615, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 9.549150281252633e-07, |
|
"loss": 0.0742, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 4.011070110701107, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 9.457478323545749e-07, |
|
"loss": 0.1039, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 4.015990159901599, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 9.366202532591717e-07, |
|
"loss": 0.0794, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 4.015990159901599, |
|
"eval_loss": 0.10322786867618561, |
|
"eval_runtime": 204.3584, |
|
"eval_samples_per_second": 8.006, |
|
"eval_steps_per_second": 1.336, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 4.020910209102091, |
|
"grad_norm": 0.78515625, |
|
"learning_rate": 9.275323800304287e-07, |
|
"loss": 0.0628, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 4.025830258302583, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 9.184843014717337e-07, |
|
"loss": 0.0652, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 4.030750307503075, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 9.094761059976154e-07, |
|
"loss": 0.0884, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 4.035670356703567, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 9.005078816328772e-07, |
|
"loss": 0.0858, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 4.040590405904059, |
|
"grad_norm": 1.125, |
|
"learning_rate": 8.915797160117423e-07, |
|
"loss": 0.1029, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 4.045510455104551, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 8.826916963769888e-07, |
|
"loss": 0.0981, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 4.0504305043050435, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 8.738439095791123e-07, |
|
"loss": 0.0688, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 4.055350553505535, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 8.650364420754581e-07, |
|
"loss": 0.0715, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 4.060270602706027, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 8.562693799293931e-07, |
|
"loss": 0.0624, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 4.065190651906519, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 8.475428088094517e-07, |
|
"loss": 0.0977, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 4.070110701107011, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 8.388568139885101e-07, |
|
"loss": 0.0827, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 4.075030750307503, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 8.30211480342945e-07, |
|
"loss": 0.0824, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 4.079950799507995, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 8.216068923518072e-07, |
|
"loss": 0.0713, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 4.084870848708487, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 8.130431340959982e-07, |
|
"loss": 0.0905, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 4.089790897908979, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 8.045202892574395e-07, |
|
"loss": 0.0692, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 4.094710947109471, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 7.960384411182709e-07, |
|
"loss": 0.0713, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 4.099630996309963, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 7.875976725600193e-07, |
|
"loss": 0.0824, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 4.104551045510455, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 7.791980660628029e-07, |
|
"loss": 0.0579, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 4.109471094710947, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 7.708397037045129e-07, |
|
"loss": 0.081, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 4.114391143911439, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 7.625226671600256e-07, |
|
"loss": 0.0884, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 4.1193111931119315, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 7.542470377003897e-07, |
|
"loss": 0.0809, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 4.124231242312423, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 7.460128961920432e-07, |
|
"loss": 0.0768, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 4.129151291512915, |
|
"grad_norm": 0.8203125, |
|
"learning_rate": 7.37820323096014e-07, |
|
"loss": 0.0809, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 4.134071340713407, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 7.296693984671465e-07, |
|
"loss": 0.0748, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 4.138991389913899, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 7.215602019533041e-07, |
|
"loss": 0.1046, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 4.143911439114391, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 7.134928127946017e-07, |
|
"loss": 0.0804, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 4.148831488314883, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 7.054673098226278e-07, |
|
"loss": 0.0735, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 4.153751537515375, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 6.974837714596732e-07, |
|
"loss": 0.0918, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 4.158671586715867, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 6.895422757179682e-07, |
|
"loss": 0.0926, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 4.163591635916359, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 6.816429001989133e-07, |
|
"loss": 0.0887, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 4.168511685116851, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 6.737857220923305e-07, |
|
"loss": 0.0546, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 4.173431734317343, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 6.659708181757013e-07, |
|
"loss": 0.0709, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 4.178351783517835, |
|
"grad_norm": 0.84375, |
|
"learning_rate": 6.581982648134217e-07, |
|
"loss": 0.0709, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 4.183271832718328, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 6.50468137956049e-07, |
|
"loss": 0.0639, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.1881918819188195, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 6.427805131395681e-07, |
|
"loss": 0.0931, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 4.1931119311193115, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 6.351354654846481e-07, |
|
"loss": 0.0811, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 4.198031980319803, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 6.275330696959109e-07, |
|
"loss": 0.0872, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 4.202952029520295, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 6.199734000611968e-07, |
|
"loss": 0.0826, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 4.207872078720787, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 6.12456530450844e-07, |
|
"loss": 0.0732, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 4.212792127921279, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 6.049825343169652e-07, |
|
"loss": 0.0697, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 4.217712177121771, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 5.975514846927271e-07, |
|
"loss": 0.0767, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 4.222632226322263, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 5.901634541916406e-07, |
|
"loss": 0.0632, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 4.227552275522755, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 5.828185150068472e-07, |
|
"loss": 0.0798, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 4.232472324723247, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 5.755167389104166e-07, |
|
"loss": 0.0685, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 4.237392373923739, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 5.682581972526463e-07, |
|
"loss": 0.0645, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 4.242312423124231, |
|
"grad_norm": 0.84375, |
|
"learning_rate": 5.610429609613615e-07, |
|
"loss": 0.0588, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 4.247232472324724, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 5.538711005412212e-07, |
|
"loss": 0.0581, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 4.252152521525216, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 5.467426860730334e-07, |
|
"loss": 0.082, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 4.2570725707257075, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 5.396577872130676e-07, |
|
"loss": 0.0584, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 4.2619926199261995, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 5.32616473192375e-07, |
|
"loss": 0.0822, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 4.266912669126691, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 5.256188128161116e-07, |
|
"loss": 0.0834, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 4.271832718327183, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 5.186648744628637e-07, |
|
"loss": 0.0703, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 4.276752767527675, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 5.117547260839845e-07, |
|
"loss": 0.1001, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 4.281672816728167, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 5.048884352029271e-07, |
|
"loss": 0.1088, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 4.286592865928659, |
|
"grad_norm": 0.80859375, |
|
"learning_rate": 4.980660689145855e-07, |
|
"loss": 0.0635, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 4.291512915129151, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 4.912876938846345e-07, |
|
"loss": 0.0876, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 4.296432964329643, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.845533763488902e-07, |
|
"loss": 0.0893, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 4.301353013530135, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 4.778631821126473e-07, |
|
"loss": 0.0946, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 4.306273062730627, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 4.712171765500484e-07, |
|
"loss": 0.0911, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 4.31119311193112, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 4.6461542460343565e-07, |
|
"loss": 0.1092, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 4.316113161131612, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 4.580579907827287e-07, |
|
"loss": 0.0921, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 4.321033210332104, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 4.515449391647786e-07, |
|
"loss": 0.0759, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 4.325953259532596, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 4.4507633339275494e-07, |
|
"loss": 0.0808, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 4.3308733087330875, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 4.386522366755169e-07, |
|
"loss": 0.0842, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 4.3357933579335795, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 4.322727117869951e-07, |
|
"loss": 0.1048, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 4.340713407134071, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 4.2593782106558676e-07, |
|
"loss": 0.0922, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 4.345633456334563, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 4.1964762641353297e-07, |
|
"loss": 0.0989, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 4.350553505535055, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 4.1340218929632636e-07, |
|
"loss": 0.0922, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 4.355473554735547, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 4.072015707421006e-07, |
|
"loss": 0.0831, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 4.360393603936039, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 4.0104583134104593e-07, |
|
"loss": 0.092, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 4.365313653136531, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 3.9493503124480135e-07, |
|
"loss": 0.0722, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 4.370233702337023, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 3.8886923016588195e-07, |
|
"loss": 0.0792, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 4.375153751537516, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.828484873770832e-07, |
|
"loss": 0.0541, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 4.380073800738008, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 3.7687286171091355e-07, |
|
"loss": 0.1026, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.3849938499385, |
|
"grad_norm": 1.0, |
|
"learning_rate": 3.709424115590088e-07, |
|
"loss": 0.0849, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 4.389913899138992, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 3.65057194871567e-07, |
|
"loss": 0.098, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 4.394833948339484, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 3.5921726915678247e-07, |
|
"loss": 0.0659, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 4.3997539975399755, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 3.534226914802813e-07, |
|
"loss": 0.0839, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 4.4046740467404675, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 3.4767351846456744e-07, |
|
"loss": 0.1014, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 4.409594095940959, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 3.4196980628846297e-07, |
|
"loss": 0.0843, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 4.414514145141451, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 3.3631161068656604e-07, |
|
"loss": 0.0727, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 4.419434194341943, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 3.3069898694870373e-07, |
|
"loss": 0.0771, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 4.424354243542435, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.2513198991939054e-07, |
|
"loss": 0.0975, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 4.429274292742927, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 3.196106739972926e-07, |
|
"loss": 0.0649, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.434194341943419, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 3.1413509313469816e-07, |
|
"loss": 0.0885, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 4.439114391143911, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 3.087053008369889e-07, |
|
"loss": 0.069, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 4.444034440344403, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 3.0332135016211794e-07, |
|
"loss": 0.0539, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 4.448954489544896, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 2.979832937200883e-07, |
|
"loss": 0.0869, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 4.453874538745388, |
|
"grad_norm": 1.0, |
|
"learning_rate": 2.9269118367244385e-07, |
|
"loss": 0.0936, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 4.45879458794588, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 2.8744507173175564e-07, |
|
"loss": 0.07, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 4.463714637146372, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 2.822450091611195e-07, |
|
"loss": 0.0865, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 4.468634686346864, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 2.770910467736532e-07, |
|
"loss": 0.0797, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 4.4735547355473555, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 2.719832349319973e-07, |
|
"loss": 0.0591, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 4.478474784747847, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 2.669216235478295e-07, |
|
"loss": 0.0716, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.483394833948339, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.619062620813728e-07, |
|
"loss": 0.0919, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 4.488314883148831, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.5693719954091257e-07, |
|
"loss": 0.073, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 4.493234932349323, |
|
"grad_norm": 1.0, |
|
"learning_rate": 2.520144844823169e-07, |
|
"loss": 0.0683, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 4.498154981549815, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 2.471381650085647e-07, |
|
"loss": 0.0885, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 4.503075030750307, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 2.4230828876927293e-07, |
|
"loss": 0.0676, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 4.507995079950799, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 2.375249029602339e-07, |
|
"loss": 0.0829, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 4.512915129151292, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.327880543229505e-07, |
|
"loss": 0.0787, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 4.517835178351784, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 2.2809778914418156e-07, |
|
"loss": 0.0839, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 4.517835178351784, |
|
"eval_loss": 0.10323299467563629, |
|
"eval_runtime": 203.6707, |
|
"eval_samples_per_second": 8.033, |
|
"eval_steps_per_second": 1.34, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 4.522755227552276, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 2.2345415325549125e-07, |
|
"loss": 0.1202, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 4.527675276752768, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 2.1885719203279587e-07, |
|
"loss": 0.0706, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.53259532595326, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 2.143069503959283e-07, |
|
"loss": 0.0835, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 4.537515375153752, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 2.0980347280818935e-07, |
|
"loss": 0.0678, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 4.5424354243542435, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 2.0534680327592426e-07, |
|
"loss": 0.0662, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 4.5473554735547355, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 2.009369853480825e-07, |
|
"loss": 0.0582, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 4.552275522755227, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 1.9657406211579966e-07, |
|
"loss": 0.1065, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 4.557195571955719, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 1.922580762119697e-07, |
|
"loss": 0.0513, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 4.562115621156211, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.8798906981083832e-07, |
|
"loss": 0.0734, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 4.567035670356703, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.8376708462757798e-07, |
|
"loss": 0.0901, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 4.571955719557195, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.7959216191789142e-07, |
|
"loss": 0.0806, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 4.576875768757688, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 1.7546434247760147e-07, |
|
"loss": 0.0858, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.58179581795818, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.713836666422569e-07, |
|
"loss": 0.0808, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 4.586715867158672, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 1.673501742867356e-07, |
|
"loss": 0.0951, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 4.591635916359164, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.633639048248542e-07, |
|
"loss": 0.0835, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 4.596555965559656, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 1.594248972089879e-07, |
|
"loss": 0.0686, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 4.601476014760148, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.555331899296808e-07, |
|
"loss": 0.1139, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 4.60639606396064, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.5168882101528282e-07, |
|
"loss": 0.0884, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 4.6113161131611315, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.4789182803156333e-07, |
|
"loss": 0.0855, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 4.6162361623616235, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 1.44142248081357e-07, |
|
"loss": 0.0854, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 4.621156211562115, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 1.4044011780419032e-07, |
|
"loss": 0.0947, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 4.626076260762607, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.3678547337593494e-07, |
|
"loss": 0.0637, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.630996309963099, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.3317835050844275e-07, |
|
"loss": 0.0743, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 4.635916359163591, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.296187844492053e-07, |
|
"loss": 0.0928, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 4.640836408364084, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.2610680998100476e-07, |
|
"loss": 0.0978, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 4.645756457564576, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 1.2264246142157656e-07, |
|
"loss": 0.0776, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 4.650676506765068, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 1.1922577262327374e-07, |
|
"loss": 0.0896, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 4.65559655596556, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 1.1585677697273312e-07, |
|
"loss": 0.0723, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 4.660516605166052, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.1253550739055374e-07, |
|
"loss": 0.0738, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 4.665436654366544, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.0926199633097156e-07, |
|
"loss": 0.0792, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 4.670356703567036, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.060362757815453e-07, |
|
"loss": 0.0992, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 4.675276752767528, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.0285837726283999e-07, |
|
"loss": 0.0638, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.68019680196802, |
|
"grad_norm": 1.0, |
|
"learning_rate": 9.972833182812225e-08, |
|
"loss": 0.0741, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 4.6851168511685115, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 9.664617006305665e-08, |
|
"loss": 0.1135, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 4.6900369003690034, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 9.361192208540427e-08, |
|
"loss": 0.0603, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 4.694956949569495, |
|
"grad_norm": 1.0, |
|
"learning_rate": 9.062561754473231e-08, |
|
"loss": 0.0811, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 4.699876998769987, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 8.768728562211948e-08, |
|
"loss": 0.1155, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 4.70479704797048, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 8.479695502987551e-08, |
|
"loss": 0.0756, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 4.709717097170972, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 8.195465401125812e-08, |
|
"loss": 0.1058, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 4.714637146371464, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 7.916041034019773e-08, |
|
"loss": 0.0538, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 4.719557195571956, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 7.64142513210242e-08, |
|
"loss": 0.0974, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 4.724477244772448, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 7.371620378820555e-08, |
|
"loss": 0.0975, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.72939729397294, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 7.10662941060769e-08, |
|
"loss": 0.0675, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 4.734317343173432, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 6.84645481685925e-08, |
|
"loss": 0.081, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 4.739237392373924, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.59109913990641e-08, |
|
"loss": 0.0836, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 4.744157441574416, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 6.340564874991906e-08, |
|
"loss": 0.0772, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 4.749077490774908, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 6.094854470245326e-08, |
|
"loss": 0.1065, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 4.7539975399753995, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 5.853970326659186e-08, |
|
"loss": 0.0749, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 4.7589175891758915, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 5.6179147980656154e-08, |
|
"loss": 0.0792, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 4.763837638376383, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 5.3866901911132086e-08, |
|
"loss": 0.0924, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 4.768757687576876, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 5.160298765244709e-08, |
|
"loss": 0.0824, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 4.773677736777367, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 4.9387427326745287e-08, |
|
"loss": 0.0622, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.77859778597786, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 4.722024258367597e-08, |
|
"loss": 0.0668, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 4.783517835178352, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.5101454600177676e-08, |
|
"loss": 0.1176, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 4.788437884378844, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 4.303108408027668e-08, |
|
"loss": 0.1058, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 4.793357933579336, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 4.1009151254878254e-08, |
|
"loss": 0.091, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 4.798277982779828, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 3.903567588157353e-08, |
|
"loss": 0.0783, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 4.80319803198032, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 3.711067724444517e-08, |
|
"loss": 0.0883, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 4.808118081180812, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 3.523417415387864e-08, |
|
"loss": 0.0801, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 4.813038130381304, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 3.340618494637793e-08, |
|
"loss": 0.0695, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 4.817958179581796, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 3.162672748438844e-08, |
|
"loss": 0.0959, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 4.822878228782288, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.989581915611994e-08, |
|
"loss": 0.0944, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.8277982779827795, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 2.821347687537834e-08, |
|
"loss": 0.0634, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 4.832718327183272, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 2.657971708139917e-08, |
|
"loss": 0.0954, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 4.837638376383763, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.4994555738688252e-08, |
|
"loss": 0.0935, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 4.842558425584256, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 2.3458008336864623e-08, |
|
"loss": 0.0965, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 4.847478474784748, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 2.1970089890509527e-08, |
|
"loss": 0.0944, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 4.85239852398524, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.0530814939020428e-08, |
|
"loss": 0.0842, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 4.857318573185732, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.9140197546467787e-08, |
|
"loss": 0.0857, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 4.862238622386224, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.7798251301458512e-08, |
|
"loss": 0.0715, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 4.867158671586716, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 1.6504989317001618e-08, |
|
"loss": 0.0868, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 4.872078720787208, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 1.5260424230382763e-08, |
|
"loss": 0.0842, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.8769987699877, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.4064568203037699e-08, |
|
"loss": 0.0964, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 4.881918819188192, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.2917432920437345e-08, |
|
"loss": 0.089, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 4.886838868388684, |
|
"grad_norm": 0.875, |
|
"learning_rate": 1.1819029591968456e-08, |
|
"loss": 0.0584, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 4.891758917589176, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 1.0769368950829252e-08, |
|
"loss": 0.0723, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 4.8966789667896675, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 9.768461253920614e-09, |
|
"loss": 0.1013, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.9015990159901595, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 8.816316281747839e-09, |
|
"loss": 0.0699, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 4.906519065190652, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 7.912943338324598e-09, |
|
"loss": 0.0806, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 4.911439114391144, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 7.058351251083007e-09, |
|
"loss": 0.0705, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 4.916359163591636, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 6.252548370784817e-09, |
|
"loss": 0.072, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 4.921279212792128, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 5.495542571443135e-09, |
|
"loss": 0.0715, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.92619926199262, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 4.787341250241384e-09, |
|
"loss": 0.0838, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 4.931119311193112, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 4.127951327466684e-09, |
|
"loss": 0.0672, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 4.936039360393604, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 3.5173792464360256e-09, |
|
"loss": 0.0753, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 4.940959409594096, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 2.955630973437429e-09, |
|
"loss": 0.0858, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 4.945879458794588, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 2.442711997670544e-09, |
|
"loss": 0.0674, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.95079950799508, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.978627331192806e-09, |
|
"loss": 0.0892, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 4.955719557195572, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.5633815088705872e-09, |
|
"loss": 0.074, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 4.960639606396064, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.196978588334785e-09, |
|
"loss": 0.0951, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 4.9655596555965555, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 8.794221499408562e-10, |
|
"loss": 0.0977, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 4.970479704797048, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 6.107152967349539e-10, |
|
"loss": 0.1068, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.97539975399754, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 3.908606544228422e-10, |
|
"loss": 0.0803, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 4.980319803198032, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 2.1986037134325012e-10, |
|
"loss": 0.0667, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 4.985239852398524, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 9.771611844955343e-11, |
|
"loss": 0.0616, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 4.990159901599016, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 2.4429089290345375e-11, |
|
"loss": 0.088, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 4.995079950799508, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 0.0, |
|
"loss": 0.1053, |
|
"step": 1015 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1015, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 51, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.1980182581321662e+19, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|