|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 1419, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.021141649048625793, |
|
"grad_norm": 6.24355232060165, |
|
"learning_rate": 7.042253521126762e-07, |
|
"loss": 0.5912, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.042283298097251586, |
|
"grad_norm": 2.216096271272777, |
|
"learning_rate": 1.4084507042253523e-06, |
|
"loss": 0.5024, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06342494714587738, |
|
"grad_norm": 1.449992996844573, |
|
"learning_rate": 2.1126760563380285e-06, |
|
"loss": 0.4488, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08456659619450317, |
|
"grad_norm": 1.5865270130811833, |
|
"learning_rate": 2.8169014084507046e-06, |
|
"loss": 0.431, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.10570824524312897, |
|
"grad_norm": 1.6309436803659814, |
|
"learning_rate": 3.5211267605633804e-06, |
|
"loss": 0.4102, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12684989429175475, |
|
"grad_norm": 1.3774805832619317, |
|
"learning_rate": 4.225352112676057e-06, |
|
"loss": 0.3953, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14799154334038056, |
|
"grad_norm": 1.7489511540042915, |
|
"learning_rate": 4.929577464788733e-06, |
|
"loss": 0.392, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.16913319238900634, |
|
"grad_norm": 1.4907503413861205, |
|
"learning_rate": 4.999505072506396e-06, |
|
"loss": 0.3845, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.19027484143763213, |
|
"grad_norm": 1.714741595800788, |
|
"learning_rate": 4.997794491573208e-06, |
|
"loss": 0.3754, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.21141649048625794, |
|
"grad_norm": 1.6723995412498653, |
|
"learning_rate": 4.994863075823568e-06, |
|
"loss": 0.3731, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"grad_norm": 1.559234855788911, |
|
"learning_rate": 4.990712417384526e-06, |
|
"loss": 0.3676, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2536997885835095, |
|
"grad_norm": 1.57054985630799, |
|
"learning_rate": 4.985344770585093e-06, |
|
"loss": 0.3693, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2748414376321353, |
|
"grad_norm": 1.6051226014645448, |
|
"learning_rate": 4.9787630507318594e-06, |
|
"loss": 0.3627, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2959830866807611, |
|
"grad_norm": 1.4668663144082323, |
|
"learning_rate": 4.9709708325256166e-06, |
|
"loss": 0.3613, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3171247357293869, |
|
"grad_norm": 1.686344018429158, |
|
"learning_rate": 4.961972348119846e-06, |
|
"loss": 0.3602, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3382663847780127, |
|
"grad_norm": 1.7636035584330838, |
|
"learning_rate": 4.951772484822128e-06, |
|
"loss": 0.3568, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3594080338266385, |
|
"grad_norm": 2.5501698658025536, |
|
"learning_rate": 4.940376782439721e-06, |
|
"loss": 0.3596, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.38054968287526425, |
|
"grad_norm": 1.3764768803172454, |
|
"learning_rate": 4.9277914302707466e-06, |
|
"loss": 0.3574, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.40169133192389006, |
|
"grad_norm": 1.6197573568256791, |
|
"learning_rate": 4.914023263742626e-06, |
|
"loss": 0.3514, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.42283298097251587, |
|
"grad_norm": 1.7518377059862333, |
|
"learning_rate": 4.8990797606995845e-06, |
|
"loss": 0.3468, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4439746300211416, |
|
"grad_norm": 1.507687286038259, |
|
"learning_rate": 4.882969037341239e-06, |
|
"loss": 0.3455, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.46511627906976744, |
|
"grad_norm": 1.5970818330074303, |
|
"learning_rate": 4.865699843814485e-06, |
|
"loss": 0.3428, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.48625792811839325, |
|
"grad_norm": 1.4210922325164967, |
|
"learning_rate": 4.847281559461071e-06, |
|
"loss": 0.3461, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.507399577167019, |
|
"grad_norm": 1.3912747321946244, |
|
"learning_rate": 4.827724187723427e-06, |
|
"loss": 0.3429, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5285412262156448, |
|
"grad_norm": 1.3955734367464652, |
|
"learning_rate": 4.80703835071155e-06, |
|
"loss": 0.3437, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5496828752642706, |
|
"grad_norm": 1.350657312273654, |
|
"learning_rate": 4.785235283433857e-06, |
|
"loss": 0.3393, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5708245243128964, |
|
"grad_norm": 1.2717206677797315, |
|
"learning_rate": 4.762326827695163e-06, |
|
"loss": 0.3411, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5919661733615222, |
|
"grad_norm": 1.191569222191925, |
|
"learning_rate": 4.7383254256650964e-06, |
|
"loss": 0.3385, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6131078224101479, |
|
"grad_norm": 1.1928472569401323, |
|
"learning_rate": 4.713244113120443e-06, |
|
"loss": 0.34, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6342494714587738, |
|
"grad_norm": 1.3276992158951393, |
|
"learning_rate": 4.687096512365067e-06, |
|
"loss": 0.3421, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6553911205073996, |
|
"grad_norm": 1.1887821846438988, |
|
"learning_rate": 4.659896824831302e-06, |
|
"loss": 0.3356, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6765327695560254, |
|
"grad_norm": 1.2621729084813325, |
|
"learning_rate": 4.631659823366783e-06, |
|
"loss": 0.3403, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6976744186046512, |
|
"grad_norm": 1.4771395957284508, |
|
"learning_rate": 4.6024008442109335e-06, |
|
"loss": 0.3346, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.718816067653277, |
|
"grad_norm": 1.0838005637440695, |
|
"learning_rate": 4.572135778665464e-06, |
|
"loss": 0.3361, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7399577167019028, |
|
"grad_norm": 1.1405549612151125, |
|
"learning_rate": 4.5408810644633956e-06, |
|
"loss": 0.3308, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7610993657505285, |
|
"grad_norm": 1.111444530105349, |
|
"learning_rate": 4.508653676841308e-06, |
|
"loss": 0.3337, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7822410147991543, |
|
"grad_norm": 1.1516361189573001, |
|
"learning_rate": 4.475471119319651e-06, |
|
"loss": 0.3279, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.8033826638477801, |
|
"grad_norm": 1.1394562536949298, |
|
"learning_rate": 4.441351414196139e-06, |
|
"loss": 0.329, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8245243128964059, |
|
"grad_norm": 1.2624616377095017, |
|
"learning_rate": 4.406313092757369e-06, |
|
"loss": 0.3307, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8456659619450317, |
|
"grad_norm": 1.3399500811254008, |
|
"learning_rate": 4.370375185214014e-06, |
|
"loss": 0.3308, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8668076109936576, |
|
"grad_norm": 1.2917888536055562, |
|
"learning_rate": 4.333557210365023e-06, |
|
"loss": 0.3301, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8879492600422833, |
|
"grad_norm": 1.1769502964733598, |
|
"learning_rate": 4.295879164996462e-06, |
|
"loss": 0.3271, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 1.2781090385858154, |
|
"learning_rate": 4.257361513020745e-06, |
|
"loss": 0.3242, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9302325581395349, |
|
"grad_norm": 1.2394021660894758, |
|
"learning_rate": 4.218025174362161e-06, |
|
"loss": 0.3236, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9513742071881607, |
|
"grad_norm": 1.2387248179248829, |
|
"learning_rate": 4.177891513594724e-06, |
|
"loss": 0.3249, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9725158562367865, |
|
"grad_norm": 1.2158882947276164, |
|
"learning_rate": 4.136982328338531e-06, |
|
"loss": 0.3245, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9936575052854123, |
|
"grad_norm": 1.0357890278389636, |
|
"learning_rate": 4.0953198374209045e-06, |
|
"loss": 0.3209, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.3238590359687805, |
|
"eval_runtime": 42.8216, |
|
"eval_samples_per_second": 297.607, |
|
"eval_steps_per_second": 1.168, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.014799154334038, |
|
"grad_norm": 1.7559135479092256, |
|
"learning_rate": 4.052926668808791e-06, |
|
"loss": 0.2656, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.0359408033826638, |
|
"grad_norm": 1.5639568698805122, |
|
"learning_rate": 4.009825847318922e-06, |
|
"loss": 0.2439, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.0570824524312896, |
|
"grad_norm": 1.1876279950755768, |
|
"learning_rate": 3.966040782112451e-06, |
|
"loss": 0.2422, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.0782241014799154, |
|
"grad_norm": 1.1202067555602988, |
|
"learning_rate": 3.921595253980836e-06, |
|
"loss": 0.2425, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.0993657505285412, |
|
"grad_norm": 1.1539051669533806, |
|
"learning_rate": 3.876513402429883e-06, |
|
"loss": 0.2431, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.120507399577167, |
|
"grad_norm": 1.199763607338339, |
|
"learning_rate": 3.83081971256896e-06, |
|
"loss": 0.2413, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.1416490486257929, |
|
"grad_norm": 1.1190658369259245, |
|
"learning_rate": 3.7845390018125065e-06, |
|
"loss": 0.2454, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.1627906976744187, |
|
"grad_norm": 1.116921299899802, |
|
"learning_rate": 3.737696406401062e-06, |
|
"loss": 0.2394, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.1839323467230445, |
|
"grad_norm": 1.1477296149656728, |
|
"learning_rate": 3.6903173677491266e-06, |
|
"loss": 0.2448, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.20507399577167, |
|
"grad_norm": 1.1454465891047556, |
|
"learning_rate": 3.642427618627277e-06, |
|
"loss": 0.2437, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.226215644820296, |
|
"grad_norm": 1.1642513264879382, |
|
"learning_rate": 3.5940531691860405e-06, |
|
"loss": 0.2428, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.2473572938689217, |
|
"grad_norm": 1.3763131648109288, |
|
"learning_rate": 3.545220292829113e-06, |
|
"loss": 0.2403, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.2684989429175475, |
|
"grad_norm": 1.291772297216049, |
|
"learning_rate": 3.4959555119436033e-06, |
|
"loss": 0.2425, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.2896405919661733, |
|
"grad_norm": 1.4742209496360983, |
|
"learning_rate": 3.446285583495041e-06, |
|
"loss": 0.2419, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.3107822410147991, |
|
"grad_norm": 1.2162630598081021, |
|
"learning_rate": 3.396237484494985e-06, |
|
"loss": 0.2434, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.331923890063425, |
|
"grad_norm": 1.1668893819388124, |
|
"learning_rate": 3.345838397349115e-06, |
|
"loss": 0.2413, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.3530655391120507, |
|
"grad_norm": 1.1883780283974819, |
|
"learning_rate": 3.2951156950937728e-06, |
|
"loss": 0.243, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.3742071881606766, |
|
"grad_norm": 1.2805764821322394, |
|
"learning_rate": 3.2440969265289624e-06, |
|
"loss": 0.2421, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.3953488372093024, |
|
"grad_norm": 1.0727847278011404, |
|
"learning_rate": 3.1928098012558895e-06, |
|
"loss": 0.244, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.4164904862579282, |
|
"grad_norm": 1.1764000750965518, |
|
"learning_rate": 3.1412821746271693e-06, |
|
"loss": 0.2439, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.437632135306554, |
|
"grad_norm": 1.1953048969350113, |
|
"learning_rate": 3.0895420326178705e-06, |
|
"loss": 0.2408, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.4587737843551798, |
|
"grad_norm": 1.2778095821404496, |
|
"learning_rate": 3.03761747662561e-06, |
|
"loss": 0.2423, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.4799154334038054, |
|
"grad_norm": 1.2694623156337455, |
|
"learning_rate": 2.985536708207971e-06, |
|
"loss": 0.2401, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.5010570824524314, |
|
"grad_norm": 1.1185495126584732, |
|
"learning_rate": 2.933328013765505e-06, |
|
"loss": 0.2449, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.522198731501057, |
|
"grad_norm": 1.168267493603954, |
|
"learning_rate": 2.8810197491786714e-06, |
|
"loss": 0.2431, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.543340380549683, |
|
"grad_norm": 1.1355953180673453, |
|
"learning_rate": 2.8286403244070252e-06, |
|
"loss": 0.2414, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.5644820295983086, |
|
"grad_norm": 1.0758782490829244, |
|
"learning_rate": 2.7762181880590442e-06, |
|
"loss": 0.2415, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.5856236786469344, |
|
"grad_norm": 1.1428398992616515, |
|
"learning_rate": 2.723781811940956e-06, |
|
"loss": 0.2431, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.6067653276955602, |
|
"grad_norm": 1.0783252394855087, |
|
"learning_rate": 2.6713596755929755e-06, |
|
"loss": 0.2417, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.627906976744186, |
|
"grad_norm": 1.0764260279790863, |
|
"learning_rate": 2.6189802508213297e-06, |
|
"loss": 0.2391, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.6490486257928119, |
|
"grad_norm": 1.0866425685585581, |
|
"learning_rate": 2.5666719862344957e-06, |
|
"loss": 0.2445, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.6701902748414377, |
|
"grad_norm": 1.0529013290697624, |
|
"learning_rate": 2.51446329179203e-06, |
|
"loss": 0.2409, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.6913319238900635, |
|
"grad_norm": 1.1356539893062763, |
|
"learning_rate": 2.4623825233743896e-06, |
|
"loss": 0.241, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.712473572938689, |
|
"grad_norm": 1.1111512278149636, |
|
"learning_rate": 2.4104579673821297e-06, |
|
"loss": 0.2434, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.733615221987315, |
|
"grad_norm": 1.102236029943368, |
|
"learning_rate": 2.358717825372831e-06, |
|
"loss": 0.2413, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.7547568710359407, |
|
"grad_norm": 1.124459202450407, |
|
"learning_rate": 2.3071901987441116e-06, |
|
"loss": 0.2385, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.7758985200845667, |
|
"grad_norm": 1.2179881959913421, |
|
"learning_rate": 2.2559030734710396e-06, |
|
"loss": 0.2386, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.7970401691331923, |
|
"grad_norm": 1.0767260671283274, |
|
"learning_rate": 2.2048843049062275e-06, |
|
"loss": 0.2398, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 1.1119843325482643, |
|
"learning_rate": 2.1541616026508854e-06, |
|
"loss": 0.2413, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.839323467230444, |
|
"grad_norm": 1.0955361377120234, |
|
"learning_rate": 2.103762515505016e-06, |
|
"loss": 0.2417, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.8604651162790697, |
|
"grad_norm": 1.0754774699879763, |
|
"learning_rate": 2.0537144165049597e-06, |
|
"loss": 0.236, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.8816067653276956, |
|
"grad_norm": 1.116290127791819, |
|
"learning_rate": 2.0040444880563974e-06, |
|
"loss": 0.2396, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.9027484143763214, |
|
"grad_norm": 1.0587161825711324, |
|
"learning_rate": 1.9547797071708873e-06, |
|
"loss": 0.2402, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.9238900634249472, |
|
"grad_norm": 1.0735022958430327, |
|
"learning_rate": 1.90594683081396e-06, |
|
"loss": 0.2454, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.945031712473573, |
|
"grad_norm": 1.0625353794584775, |
|
"learning_rate": 1.8575723813727234e-06, |
|
"loss": 0.241, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.9661733615221988, |
|
"grad_norm": 1.0540753292992686, |
|
"learning_rate": 1.8096826322508745e-06, |
|
"loss": 0.2377, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.9873150105708244, |
|
"grad_norm": 1.1469855369418602, |
|
"learning_rate": 1.7623035935989388e-06, |
|
"loss": 0.2363, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.3176339268684387, |
|
"eval_runtime": 46.7927, |
|
"eval_samples_per_second": 272.35, |
|
"eval_steps_per_second": 1.069, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.0084566596194504, |
|
"grad_norm": 1.442414950660575, |
|
"learning_rate": 1.7154609981874945e-06, |
|
"loss": 0.2124, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.029598308668076, |
|
"grad_norm": 1.3377936728948667, |
|
"learning_rate": 1.6691802874310402e-06, |
|
"loss": 0.1655, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.050739957716702, |
|
"grad_norm": 1.3199120792646895, |
|
"learning_rate": 1.6234865975701169e-06, |
|
"loss": 0.1695, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.0718816067653276, |
|
"grad_norm": 1.1515710499363918, |
|
"learning_rate": 1.5784047460191638e-06, |
|
"loss": 0.1628, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.0930232558139537, |
|
"grad_norm": 1.0930247346459778, |
|
"learning_rate": 1.5339592178875502e-06, |
|
"loss": 0.1629, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.1141649048625792, |
|
"grad_norm": 1.1196935797630683, |
|
"learning_rate": 1.4901741526810793e-06, |
|
"loss": 0.1632, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.1353065539112053, |
|
"grad_norm": 1.266477815888195, |
|
"learning_rate": 1.4470733311912094e-06, |
|
"loss": 0.1648, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.156448202959831, |
|
"grad_norm": 1.2032366464614765, |
|
"learning_rate": 1.4046801625790954e-06, |
|
"loss": 0.1635, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.177589852008457, |
|
"grad_norm": 1.1118635376846853, |
|
"learning_rate": 1.3630176716614696e-06, |
|
"loss": 0.1645, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.1987315010570825, |
|
"grad_norm": 1.1762161662179742, |
|
"learning_rate": 1.3221084864052755e-06, |
|
"loss": 0.167, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.219873150105708, |
|
"grad_norm": 1.148476335813781, |
|
"learning_rate": 1.2819748256378395e-06, |
|
"loss": 0.1621, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.241014799154334, |
|
"grad_norm": 1.1550786530748482, |
|
"learning_rate": 1.2426384869792552e-06, |
|
"loss": 0.1626, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.2621564482029597, |
|
"grad_norm": 1.1566446795682568, |
|
"learning_rate": 1.2041208350035386e-06, |
|
"loss": 0.1642, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.2832980972515857, |
|
"grad_norm": 1.1410532150519583, |
|
"learning_rate": 1.1664427896349775e-06, |
|
"loss": 0.1642, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.3044397463002113, |
|
"grad_norm": 1.1400713733613699, |
|
"learning_rate": 1.129624814785987e-06, |
|
"loss": 0.1647, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.3255813953488373, |
|
"grad_norm": 1.0699436865402963, |
|
"learning_rate": 1.0936869072426324e-06, |
|
"loss": 0.163, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.346723044397463, |
|
"grad_norm": 1.1541436695499816, |
|
"learning_rate": 1.0586485858038623e-06, |
|
"loss": 0.1626, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.367864693446089, |
|
"grad_norm": 1.125483674986556, |
|
"learning_rate": 1.0245288806803492e-06, |
|
"loss": 0.1669, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.3890063424947146, |
|
"grad_norm": 1.1026248670403718, |
|
"learning_rate": 9.913463231586928e-07, |
|
"loss": 0.1629, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.41014799154334, |
|
"grad_norm": 1.159203695893513, |
|
"learning_rate": 9.591189355366054e-07, |
|
"loss": 0.1644, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.431289640591966, |
|
"grad_norm": 1.1077004937863641, |
|
"learning_rate": 9.278642213345369e-07, |
|
"loss": 0.167, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.452431289640592, |
|
"grad_norm": 1.11182862218811, |
|
"learning_rate": 8.975991557890668e-07, |
|
"loss": 0.1638, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.473572938689218, |
|
"grad_norm": 1.1755110000283455, |
|
"learning_rate": 8.683401766332171e-07, |
|
"loss": 0.1642, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.4947145877378434, |
|
"grad_norm": 1.1475624161633082, |
|
"learning_rate": 8.401031751686978e-07, |
|
"loss": 0.1676, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.5158562367864694, |
|
"grad_norm": 1.148787879127806, |
|
"learning_rate": 8.129034876349334e-07, |
|
"loss": 0.1644, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.536997885835095, |
|
"grad_norm": 1.190372203322852, |
|
"learning_rate": 7.867558868795578e-07, |
|
"loss": 0.165, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.558139534883721, |
|
"grad_norm": 1.1595867121561987, |
|
"learning_rate": 7.616745743349038e-07, |
|
"loss": 0.1638, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.5792811839323466, |
|
"grad_norm": 1.1432214797090672, |
|
"learning_rate": 7.376731723048383e-07, |
|
"loss": 0.1652, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.6004228329809727, |
|
"grad_norm": 1.1142453334609799, |
|
"learning_rate": 7.147647165661439e-07, |
|
"loss": 0.1636, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.6215644820295982, |
|
"grad_norm": 1.0990743293486216, |
|
"learning_rate": 6.929616492884497e-07, |
|
"loss": 0.1625, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.6427061310782243, |
|
"grad_norm": 1.0991366585298548, |
|
"learning_rate": 6.722758122765728e-07, |
|
"loss": 0.1638, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.66384778012685, |
|
"grad_norm": 1.1273149039763855, |
|
"learning_rate": 6.527184405389298e-07, |
|
"loss": 0.1611, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.6849894291754755, |
|
"grad_norm": 1.101167246100859, |
|
"learning_rate": 6.343001561855149e-07, |
|
"loss": 0.1605, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.7061310782241015, |
|
"grad_norm": 1.0867484989228724, |
|
"learning_rate": 6.17030962658762e-07, |
|
"loss": 0.1655, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.7272727272727275, |
|
"grad_norm": 1.154566269426987, |
|
"learning_rate": 6.009202393004164e-07, |
|
"loss": 0.1636, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.748414376321353, |
|
"grad_norm": 1.1187041716958608, |
|
"learning_rate": 5.859767362573741e-07, |
|
"loss": 0.1644, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.7695560253699787, |
|
"grad_norm": 1.1267150964681174, |
|
"learning_rate": 5.722085697292536e-07, |
|
"loss": 0.1659, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.7906976744186047, |
|
"grad_norm": 1.0795591650211436, |
|
"learning_rate": 5.596232175602791e-07, |
|
"loss": 0.1667, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.8118393234672303, |
|
"grad_norm": 1.060071714533849, |
|
"learning_rate": 5.482275151778719e-07, |
|
"loss": 0.164, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.8329809725158563, |
|
"grad_norm": 1.10306526157592, |
|
"learning_rate": 5.380276518801548e-07, |
|
"loss": 0.1632, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.854122621564482, |
|
"grad_norm": 1.0906207707452118, |
|
"learning_rate": 5.290291674743844e-07, |
|
"loss": 0.1642, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.875264270613108, |
|
"grad_norm": 1.0845536549774597, |
|
"learning_rate": 5.212369492681416e-07, |
|
"loss": 0.1641, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.8964059196617336, |
|
"grad_norm": 1.1569339067127147, |
|
"learning_rate": 5.146552294149075e-07, |
|
"loss": 0.1629, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.9175475687103596, |
|
"grad_norm": 1.064398221843151, |
|
"learning_rate": 5.092875826154744e-07, |
|
"loss": 0.1648, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.938689217758985, |
|
"grad_norm": 1.093531486342711, |
|
"learning_rate": 5.051369241764327e-07, |
|
"loss": 0.1621, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.9598308668076108, |
|
"grad_norm": 1.0796661303223472, |
|
"learning_rate": 5.022055084267933e-07, |
|
"loss": 0.1623, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.980972515856237, |
|
"grad_norm": 1.0990164115583458, |
|
"learning_rate": 5.004949274936044e-07, |
|
"loss": 0.1634, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 0.3471969962120056, |
|
"eval_runtime": 49.2029, |
|
"eval_samples_per_second": 259.009, |
|
"eval_steps_per_second": 1.016, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1419, |
|
"total_flos": 2376877851279360.0, |
|
"train_loss": 0.25528628041493884, |
|
"train_runtime": 8757.0427, |
|
"train_samples_per_second": 82.948, |
|
"train_steps_per_second": 0.162 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1419, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2376877851279360.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|