|
{"current_steps": 10, "total_steps": 1752, "loss": 2.6042, "learning_rate": 5.681818181818182e-07, "epoch": 0.06837606837606838, "percentage": 0.57, "elapsed_time": "0:00:16", "remaining_time": "0:46:48"} |
|
{"current_steps": 20, "total_steps": 1752, "loss": 2.577, "learning_rate": 1.1363636363636364e-06, "epoch": 0.13675213675213677, "percentage": 1.14, "elapsed_time": "0:00:30", "remaining_time": "0:43:48"} |
|
{"current_steps": 30, "total_steps": 1752, "loss": 2.4842, "learning_rate": 1.7045454545454546e-06, "epoch": 0.20512820512820512, "percentage": 1.71, "elapsed_time": "0:00:44", "remaining_time": "0:42:36"} |
|
{"current_steps": 40, "total_steps": 1752, "loss": 2.4127, "learning_rate": 2.2727272727272728e-06, "epoch": 0.27350427350427353, "percentage": 2.28, "elapsed_time": "0:00:58", "remaining_time": "0:41:53"} |
|
{"current_steps": 50, "total_steps": 1752, "loss": 2.2065, "learning_rate": 2.8409090909090916e-06, "epoch": 0.3418803418803419, "percentage": 2.85, "elapsed_time": "0:01:13", "remaining_time": "0:41:33"} |
|
{"current_steps": 60, "total_steps": 1752, "loss": 2.2638, "learning_rate": 3.409090909090909e-06, "epoch": 0.41025641025641024, "percentage": 3.42, "elapsed_time": "0:01:27", "remaining_time": "0:41:02"} |
|
{"current_steps": 70, "total_steps": 1752, "loss": 2.0591, "learning_rate": 3.9772727272727275e-06, "epoch": 0.47863247863247865, "percentage": 4.0, "elapsed_time": "0:01:41", "remaining_time": "0:40:32"} |
|
{"current_steps": 80, "total_steps": 1752, "loss": 1.9693, "learning_rate": 4.5454545454545455e-06, "epoch": 0.5470085470085471, "percentage": 4.57, "elapsed_time": "0:01:55", "remaining_time": "0:40:08"} |
|
{"current_steps": 90, "total_steps": 1752, "loss": 1.9559, "learning_rate": 5.113636363636364e-06, "epoch": 0.6153846153846154, "percentage": 5.14, "elapsed_time": "0:02:09", "remaining_time": "0:39:49"} |
|
{"current_steps": 100, "total_steps": 1752, "loss": 1.8579, "learning_rate": 5.681818181818183e-06, "epoch": 0.6837606837606838, "percentage": 5.71, "elapsed_time": "0:02:23", "remaining_time": "0:39:34"} |
|
{"current_steps": 110, "total_steps": 1752, "loss": 1.8601, "learning_rate": 6.25e-06, "epoch": 0.7521367521367521, "percentage": 6.28, "elapsed_time": "0:02:37", "remaining_time": "0:39:06"} |
|
{"current_steps": 120, "total_steps": 1752, "loss": 1.7222, "learning_rate": 6.818181818181818e-06, "epoch": 0.8205128205128205, "percentage": 6.85, "elapsed_time": "0:02:51", "remaining_time": "0:38:45"} |
|
{"current_steps": 130, "total_steps": 1752, "loss": 1.7724, "learning_rate": 7.386363636363637e-06, "epoch": 0.8888888888888888, "percentage": 7.42, "elapsed_time": "0:03:04", "remaining_time": "0:38:21"} |
|
{"current_steps": 140, "total_steps": 1752, "loss": 1.8647, "learning_rate": 7.954545454545455e-06, "epoch": 0.9572649572649573, "percentage": 7.99, "elapsed_time": "0:03:18", "remaining_time": "0:38:02"} |
|
{"current_steps": 150, "total_steps": 1752, "loss": 1.7017, "learning_rate": 8.522727272727273e-06, "epoch": 1.0256410256410255, "percentage": 8.56, "elapsed_time": "0:03:32", "remaining_time": "0:37:46"} |
|
{"current_steps": 160, "total_steps": 1752, "loss": 1.6883, "learning_rate": 9.090909090909091e-06, "epoch": 1.0940170940170941, "percentage": 9.13, "elapsed_time": "0:03:45", "remaining_time": "0:37:22"} |
|
{"current_steps": 170, "total_steps": 1752, "loss": 1.5485, "learning_rate": 9.65909090909091e-06, "epoch": 1.1623931623931625, "percentage": 9.7, "elapsed_time": "0:03:58", "remaining_time": "0:37:01"} |
|
{"current_steps": 180, "total_steps": 1752, "loss": 1.5548, "learning_rate": 9.999841055681184e-06, "epoch": 1.2307692307692308, "percentage": 10.27, "elapsed_time": "0:04:12", "remaining_time": "0:36:42"} |
|
{"current_steps": 190, "total_steps": 1752, "loss": 1.602, "learning_rate": 9.998053048145735e-06, "epoch": 1.2991452991452992, "percentage": 10.84, "elapsed_time": "0:04:25", "remaining_time": "0:36:23"} |
|
{"current_steps": 200, "total_steps": 1752, "loss": 1.5151, "learning_rate": 9.994279065509094e-06, "epoch": 1.3675213675213675, "percentage": 11.42, "elapsed_time": "0:04:39", "remaining_time": "0:36:05"} |
|
{"current_steps": 210, "total_steps": 1752, "loss": 1.4208, "learning_rate": 9.988520607362297e-06, "epoch": 1.435897435897436, "percentage": 11.99, "elapsed_time": "0:04:52", "remaining_time": "0:35:44"} |
|
{"current_steps": 220, "total_steps": 1752, "loss": 1.4954, "learning_rate": 9.98077996182722e-06, "epoch": 1.5042735042735043, "percentage": 12.56, "elapsed_time": "0:05:05", "remaining_time": "0:35:25"} |
|
{"current_steps": 230, "total_steps": 1752, "loss": 1.3399, "learning_rate": 9.971060204647384e-06, "epoch": 1.5726495726495726, "percentage": 13.13, "elapsed_time": "0:05:18", "remaining_time": "0:35:05"} |
|
{"current_steps": 240, "total_steps": 1752, "loss": 1.2554, "learning_rate": 9.959365197965824e-06, "epoch": 1.641025641025641, "percentage": 13.7, "elapsed_time": "0:05:31", "remaining_time": "0:34:46"} |
|
{"current_steps": 250, "total_steps": 1752, "loss": 1.3161, "learning_rate": 9.945699588790455e-06, "epoch": 1.7094017094017095, "percentage": 14.27, "elapsed_time": "0:05:44", "remaining_time": "0:34:30"} |
|
{"current_steps": 260, "total_steps": 1752, "loss": 1.3603, "learning_rate": 9.930068807147585e-06, "epoch": 1.7777777777777777, "percentage": 14.84, "elapsed_time": "0:05:57", "remaining_time": "0:34:11"} |
|
{"current_steps": 270, "total_steps": 1752, "loss": 1.2576, "learning_rate": 9.912479063924309e-06, "epoch": 1.8461538461538463, "percentage": 15.41, "elapsed_time": "0:06:10", "remaining_time": "0:33:51"} |
|
{"current_steps": 280, "total_steps": 1752, "loss": 1.207, "learning_rate": 9.8929373484006e-06, "epoch": 1.9145299145299144, "percentage": 15.98, "elapsed_time": "0:06:22", "remaining_time": "0:33:32"} |
|
{"current_steps": 290, "total_steps": 1752, "loss": 1.232, "learning_rate": 9.871451425472128e-06, "epoch": 1.982905982905983, "percentage": 16.55, "elapsed_time": "0:06:35", "remaining_time": "0:33:14"} |
|
{"current_steps": 300, "total_steps": 1752, "loss": 1.0106, "learning_rate": 9.848029832564875e-06, "epoch": 2.051282051282051, "percentage": 17.12, "elapsed_time": "0:06:48", "remaining_time": "0:32:57"} |
|
{"current_steps": 310, "total_steps": 1752, "loss": 0.9484, "learning_rate": 9.822681876242797e-06, "epoch": 2.1196581196581197, "percentage": 17.69, "elapsed_time": "0:07:01", "remaining_time": "0:32:38"} |
|
{"current_steps": 320, "total_steps": 1752, "loss": 1.0094, "learning_rate": 9.795417628509857e-06, "epoch": 2.1880341880341883, "percentage": 18.26, "elapsed_time": "0:07:13", "remaining_time": "0:32:20"} |
|
{"current_steps": 330, "total_steps": 1752, "loss": 0.9317, "learning_rate": 9.766247922807927e-06, "epoch": 2.2564102564102564, "percentage": 18.84, "elapsed_time": "0:07:26", "remaining_time": "0:32:02"} |
|
{"current_steps": 340, "total_steps": 1752, "loss": 0.8936, "learning_rate": 9.73518434971211e-06, "epoch": 2.324786324786325, "percentage": 19.41, "elapsed_time": "0:07:38", "remaining_time": "0:31:44"} |
|
{"current_steps": 350, "total_steps": 1752, "loss": 0.9004, "learning_rate": 9.702239252325237e-06, "epoch": 2.393162393162393, "percentage": 19.98, "elapsed_time": "0:07:51", "remaining_time": "0:31:28"} |
|
{"current_steps": 360, "total_steps": 1752, "loss": 0.9335, "learning_rate": 9.667425721373333e-06, "epoch": 2.4615384615384617, "percentage": 20.55, "elapsed_time": "0:08:03", "remaining_time": "0:31:09"} |
|
{"current_steps": 370, "total_steps": 1752, "loss": 0.8982, "learning_rate": 9.630757590004023e-06, "epoch": 2.52991452991453, "percentage": 21.12, "elapsed_time": "0:08:15", "remaining_time": "0:30:51"} |
|
{"current_steps": 380, "total_steps": 1752, "loss": 0.8541, "learning_rate": 9.592249428289935e-06, "epoch": 2.5982905982905984, "percentage": 21.69, "elapsed_time": "0:08:27", "remaining_time": "0:30:33"} |
|
{"current_steps": 390, "total_steps": 1752, "loss": 0.8105, "learning_rate": 9.551916537439282e-06, "epoch": 2.6666666666666665, "percentage": 22.26, "elapsed_time": "0:08:40", "remaining_time": "0:30:16"} |
|
{"current_steps": 400, "total_steps": 1752, "loss": 0.8308, "learning_rate": 9.50977494371594e-06, "epoch": 2.735042735042735, "percentage": 22.83, "elapsed_time": "0:08:52", "remaining_time": "0:29:59"} |
|
{"current_steps": 410, "total_steps": 1752, "loss": 0.8515, "learning_rate": 9.465841392071396e-06, "epoch": 2.8034188034188032, "percentage": 23.4, "elapsed_time": "0:09:04", "remaining_time": "0:29:41"} |
|
{"current_steps": 420, "total_steps": 1752, "loss": 0.6671, "learning_rate": 9.420133339491171e-06, "epoch": 2.871794871794872, "percentage": 23.97, "elapsed_time": "0:09:16", "remaining_time": "0:29:23"} |
|
{"current_steps": 430, "total_steps": 1752, "loss": 0.7728, "learning_rate": 9.372668948058276e-06, "epoch": 2.9401709401709404, "percentage": 24.54, "elapsed_time": "0:09:27", "remaining_time": "0:29:05"} |
|
{"current_steps": 440, "total_steps": 1752, "loss": 0.6978, "learning_rate": 9.323467077736513e-06, "epoch": 3.0085470085470085, "percentage": 25.11, "elapsed_time": "0:09:39", "remaining_time": "0:28:48"} |
|
{"current_steps": 450, "total_steps": 1752, "loss": 0.561, "learning_rate": 9.272547278876475e-06, "epoch": 3.076923076923077, "percentage": 25.68, "elapsed_time": "0:09:52", "remaining_time": "0:28:33"} |
|
{"current_steps": 460, "total_steps": 1752, "loss": 0.5354, "learning_rate": 9.219929784447232e-06, "epoch": 3.1452991452991452, "percentage": 26.26, "elapsed_time": "0:10:03", "remaining_time": "0:28:16"} |
|
{"current_steps": 470, "total_steps": 1752, "loss": 0.5613, "learning_rate": 9.16563550199674e-06, "epoch": 3.213675213675214, "percentage": 26.83, "elapsed_time": "0:10:15", "remaining_time": "0:27:59"} |
|
{"current_steps": 480, "total_steps": 1752, "loss": 0.576, "learning_rate": 9.109686005344258e-06, "epoch": 3.282051282051282, "percentage": 27.4, "elapsed_time": "0:10:27", "remaining_time": "0:27:41"} |
|
{"current_steps": 490, "total_steps": 1752, "loss": 0.492, "learning_rate": 9.052103526007976e-06, "epoch": 3.3504273504273505, "percentage": 27.97, "elapsed_time": "0:10:38", "remaining_time": "0:27:24"} |
|
{"current_steps": 500, "total_steps": 1752, "loss": 0.5087, "learning_rate": 8.992910944371343e-06, "epoch": 3.4188034188034186, "percentage": 28.54, "elapsed_time": "0:10:50", "remaining_time": "0:27:08"} |
|
{"current_steps": 510, "total_steps": 1752, "loss": 0.476, "learning_rate": 8.932131780591542e-06, "epoch": 3.4871794871794872, "percentage": 29.11, "elapsed_time": "0:11:01", "remaining_time": "0:26:51"} |
|
{"current_steps": 520, "total_steps": 1752, "loss": 0.4111, "learning_rate": 8.869790185253766e-06, "epoch": 3.5555555555555554, "percentage": 29.68, "elapsed_time": "0:11:13", "remaining_time": "0:26:34"} |
|
{"current_steps": 530, "total_steps": 1752, "loss": 0.4426, "learning_rate": 8.805910929774989e-06, "epoch": 3.623931623931624, "percentage": 30.25, "elapsed_time": "0:11:24", "remaining_time": "0:26:17"} |
|
{"current_steps": 540, "total_steps": 1752, "loss": 0.4171, "learning_rate": 8.740519396561045e-06, "epoch": 3.6923076923076925, "percentage": 30.82, "elapsed_time": "0:11:35", "remaining_time": "0:26:01"} |
|
{"current_steps": 550, "total_steps": 1752, "loss": 0.445, "learning_rate": 8.673641568920944e-06, "epoch": 3.7606837606837606, "percentage": 31.39, "elapsed_time": "0:11:47", "remaining_time": "0:25:45"} |
|
{"current_steps": 560, "total_steps": 1752, "loss": 0.4653, "learning_rate": 8.60530402074241e-06, "epoch": 3.8290598290598292, "percentage": 31.96, "elapsed_time": "0:11:58", "remaining_time": "0:25:28"} |
|
{"current_steps": 570, "total_steps": 1752, "loss": 0.4276, "learning_rate": 8.535533905932739e-06, "epoch": 3.8974358974358974, "percentage": 32.53, "elapsed_time": "0:12:08", "remaining_time": "0:25:11"} |
|
{"current_steps": 580, "total_steps": 1752, "loss": 0.423, "learning_rate": 8.46435894762922e-06, "epoch": 3.965811965811966, "percentage": 33.11, "elapsed_time": "0:12:19", "remaining_time": "0:24:54"} |
|
{"current_steps": 590, "total_steps": 1752, "loss": 0.3372, "learning_rate": 8.39180742718334e-06, "epoch": 4.034188034188034, "percentage": 33.68, "elapsed_time": "0:12:30", "remaining_time": "0:24:38"} |
|
{"current_steps": 600, "total_steps": 1752, "loss": 0.29, "learning_rate": 8.317908172923207e-06, "epoch": 4.102564102564102, "percentage": 34.25, "elapsed_time": "0:12:42", "remaining_time": "0:24:23"} |
|
{"current_steps": 610, "total_steps": 1752, "loss": 0.2464, "learning_rate": 8.242690548698611e-06, "epoch": 4.170940170940171, "percentage": 34.82, "elapsed_time": "0:12:52", "remaining_time": "0:24:06"} |
|
{"current_steps": 620, "total_steps": 1752, "loss": 0.2754, "learning_rate": 8.166184442213314e-06, "epoch": 4.239316239316239, "percentage": 35.39, "elapsed_time": "0:13:03", "remaining_time": "0:23:50"} |
|
{"current_steps": 630, "total_steps": 1752, "loss": 0.2699, "learning_rate": 8.088420253149173e-06, "epoch": 4.3076923076923075, "percentage": 35.96, "elapsed_time": "0:13:13", "remaining_time": "0:23:34"} |
|
{"current_steps": 640, "total_steps": 1752, "loss": 0.2621, "learning_rate": 8.009428881086836e-06, "epoch": 4.3760683760683765, "percentage": 36.53, "elapsed_time": "0:13:24", "remaining_time": "0:23:17"} |
|
{"current_steps": 650, "total_steps": 1752, "loss": 0.2688, "learning_rate": 7.9292417132278e-06, "epoch": 4.444444444444445, "percentage": 37.1, "elapsed_time": "0:13:35", "remaining_time": "0:23:02"} |
|
{"current_steps": 660, "total_steps": 1752, "loss": 0.2871, "learning_rate": 7.847890611922721e-06, "epoch": 4.512820512820513, "percentage": 37.67, "elapsed_time": "0:13:46", "remaining_time": "0:22:46"} |
|
{"current_steps": 670, "total_steps": 1752, "loss": 0.2439, "learning_rate": 7.76540790201091e-06, "epoch": 4.581196581196581, "percentage": 38.24, "elapsed_time": "0:13:56", "remaining_time": "0:22:30"} |
|
{"current_steps": 680, "total_steps": 1752, "loss": 0.2478, "learning_rate": 7.68182635797606e-06, "epoch": 4.64957264957265, "percentage": 38.81, "elapsed_time": "0:14:06", "remaining_time": "0:22:14"} |
|
{"current_steps": 690, "total_steps": 1752, "loss": 0.2385, "learning_rate": 7.597179190923343e-06, "epoch": 4.717948717948718, "percentage": 39.38, "elapsed_time": "0:14:16", "remaining_time": "0:21:58"} |
|
{"current_steps": 700, "total_steps": 1752, "loss": 0.2525, "learning_rate": 7.511500035382943e-06, "epoch": 4.786324786324786, "percentage": 39.95, "elapsed_time": "0:14:27", "remaining_time": "0:21:44"} |
|
{"current_steps": 710, "total_steps": 1752, "loss": 0.2448, "learning_rate": 7.424822935945416e-06, "epoch": 4.854700854700854, "percentage": 40.53, "elapsed_time": "0:14:37", "remaining_time": "0:21:28"} |
|
{"current_steps": 720, "total_steps": 1752, "loss": 0.2173, "learning_rate": 7.33718233373407e-06, "epoch": 4.923076923076923, "percentage": 41.1, "elapsed_time": "0:14:47", "remaining_time": "0:21:12"} |
|
{"current_steps": 730, "total_steps": 1752, "loss": 0.1926, "learning_rate": 7.248613052719793e-06, "epoch": 4.9914529914529915, "percentage": 41.67, "elapsed_time": "0:14:57", "remaining_time": "0:20:56"} |
|
{"current_steps": 740, "total_steps": 1752, "loss": 0.1754, "learning_rate": 7.159150285883757e-06, "epoch": 5.05982905982906, "percentage": 42.24, "elapsed_time": "0:15:07", "remaining_time": "0:20:41"} |
|
{"current_steps": 750, "total_steps": 1752, "loss": 0.1334, "learning_rate": 7.0688295812334995e-06, "epoch": 5.128205128205128, "percentage": 42.81, "elapsed_time": "0:15:18", "remaining_time": "0:20:26"} |
|
{"current_steps": 760, "total_steps": 1752, "loss": 0.147, "learning_rate": 6.977686827677926e-06, "epoch": 5.196581196581197, "percentage": 43.38, "elapsed_time": "0:15:27", "remaining_time": "0:20:11"} |
|
{"current_steps": 770, "total_steps": 1752, "loss": 0.1549, "learning_rate": 6.885758240766867e-06, "epoch": 5.264957264957265, "percentage": 43.95, "elapsed_time": "0:15:37", "remaining_time": "0:19:55"} |
|
{"current_steps": 780, "total_steps": 1752, "loss": 0.1312, "learning_rate": 6.793080348300834e-06, "epoch": 5.333333333333333, "percentage": 44.52, "elapsed_time": "0:15:47", "remaining_time": "0:19:40"} |
|
{"current_steps": 790, "total_steps": 1752, "loss": 0.1403, "learning_rate": 6.69968997581671e-06, "epoch": 5.401709401709402, "percentage": 45.09, "elapsed_time": "0:15:57", "remaining_time": "0:19:25"} |
|
{"current_steps": 800, "total_steps": 1752, "loss": 0.1237, "learning_rate": 6.6056242319551315e-06, "epoch": 5.47008547008547, "percentage": 45.66, "elapsed_time": "0:16:07", "remaining_time": "0:19:11"} |
|
{"current_steps": 810, "total_steps": 1752, "loss": 0.1233, "learning_rate": 6.510920493715381e-06, "epoch": 5.538461538461538, "percentage": 46.23, "elapsed_time": "0:16:16", "remaining_time": "0:18:55"} |
|
{"current_steps": 820, "total_steps": 1752, "loss": 0.1667, "learning_rate": 6.415616391603639e-06, "epoch": 5.6068376068376065, "percentage": 46.8, "elapsed_time": "0:16:26", "remaining_time": "0:18:40"} |
|
{"current_steps": 830, "total_steps": 1752, "loss": 0.1224, "learning_rate": 6.3197497946805205e-06, "epoch": 5.6752136752136755, "percentage": 47.37, "elapsed_time": "0:16:35", "remaining_time": "0:18:26"} |
|
{"current_steps": 840, "total_steps": 1752, "loss": 0.118, "learning_rate": 6.223358795513812e-06, "epoch": 5.743589743589744, "percentage": 47.95, "elapsed_time": "0:16:45", "remaining_time": "0:18:11"} |
|
{"current_steps": 850, "total_steps": 1752, "loss": 0.0897, "learning_rate": 6.126481695042392e-06, "epoch": 5.811965811965812, "percentage": 48.52, "elapsed_time": "0:16:55", "remaining_time": "0:17:57"} |
|
{"current_steps": 860, "total_steps": 1752, "loss": 0.1142, "learning_rate": 6.029156987357373e-06, "epoch": 5.880341880341881, "percentage": 49.09, "elapsed_time": "0:17:04", "remaining_time": "0:17:42"} |
|
{"current_steps": 870, "total_steps": 1752, "loss": 0.1542, "learning_rate": 5.931423344406478e-06, "epoch": 5.948717948717949, "percentage": 49.66, "elapsed_time": "0:17:13", "remaining_time": "0:17:27"} |
|
{"current_steps": 880, "total_steps": 1752, "loss": 0.0937, "learning_rate": 5.8333196006277536e-06, "epoch": 6.017094017094017, "percentage": 50.23, "elapsed_time": "0:17:22", "remaining_time": "0:17:13"} |
|
{"current_steps": 890, "total_steps": 1752, "loss": 0.0699, "learning_rate": 5.734884737518714e-06, "epoch": 6.085470085470085, "percentage": 50.8, "elapsed_time": "0:17:31", "remaining_time": "0:16:58"} |
|
{"current_steps": 900, "total_steps": 1752, "loss": 0.0757, "learning_rate": 5.636157868147054e-06, "epoch": 6.153846153846154, "percentage": 51.37, "elapsed_time": "0:17:41", "remaining_time": "0:16:44"} |
|
{"current_steps": 910, "total_steps": 1752, "loss": 0.0657, "learning_rate": 5.537178221609088e-06, "epoch": 6.222222222222222, "percentage": 51.94, "elapsed_time": "0:17:50", "remaining_time": "0:16:30"} |
|
{"current_steps": 920, "total_steps": 1752, "loss": 0.0838, "learning_rate": 5.437985127442065e-06, "epoch": 6.2905982905982905, "percentage": 52.51, "elapsed_time": "0:17:59", "remaining_time": "0:16:16"} |
|
{"current_steps": 930, "total_steps": 1752, "loss": 0.0876, "learning_rate": 5.338617999996603e-06, "epoch": 6.358974358974359, "percentage": 53.08, "elapsed_time": "0:18:08", "remaining_time": "0:16:01"} |
|
{"current_steps": 940, "total_steps": 1752, "loss": 0.0652, "learning_rate": 5.239116322775392e-06, "epoch": 6.427350427350428, "percentage": 53.65, "elapsed_time": "0:18:17", "remaining_time": "0:15:47"} |
|
{"current_steps": 950, "total_steps": 1752, "loss": 0.0843, "learning_rate": 5.139519632744443e-06, "epoch": 6.495726495726496, "percentage": 54.22, "elapsed_time": "0:18:26", "remaining_time": "0:15:33"} |
|
{"current_steps": 960, "total_steps": 1752, "loss": 0.0677, "learning_rate": 5.039867504623084e-06, "epoch": 6.564102564102564, "percentage": 54.79, "elapsed_time": "0:18:34", "remaining_time": "0:15:19"} |
|
{"current_steps": 970, "total_steps": 1752, "loss": 0.0764, "learning_rate": 4.940199535158954e-06, "epoch": 6.632478632478632, "percentage": 55.37, "elapsed_time": "0:18:43", "remaining_time": "0:15:05"} |
|
{"current_steps": 980, "total_steps": 1752, "loss": 0.0642, "learning_rate": 4.8405553273942415e-06, "epoch": 6.700854700854701, "percentage": 55.94, "elapsed_time": "0:18:51", "remaining_time": "0:14:51"} |
|
{"current_steps": 990, "total_steps": 1752, "loss": 0.0444, "learning_rate": 4.740974474929438e-06, "epoch": 6.769230769230769, "percentage": 56.51, "elapsed_time": "0:19:00", "remaining_time": "0:14:37"} |
|
{"current_steps": 1000, "total_steps": 1752, "loss": 0.0713, "learning_rate": 4.641496546190813e-06, "epoch": 6.837606837606837, "percentage": 57.08, "elapsed_time": "0:19:09", "remaining_time": "0:14:24"} |
|
{"current_steps": 1000, "total_steps": 1752, "eval_loss": 0.19593119621276855, "epoch": 6.837606837606837, "percentage": 57.08, "elapsed_time": "0:19:14", "remaining_time": "0:14:28"} |
|
{"current_steps": 1010, "total_steps": 1752, "loss": 0.0494, "learning_rate": 4.542161068707927e-06, "epoch": 6.905982905982906, "percentage": 57.65, "elapsed_time": "0:19:22", "remaining_time": "0:14:14"} |
|
{"current_steps": 1020, "total_steps": 1752, "loss": 0.0492, "learning_rate": 4.443007513407368e-06, "epoch": 6.9743589743589745, "percentage": 58.22, "elapsed_time": "0:19:30", "remaining_time": "0:14:00"} |
|
{"current_steps": 1030, "total_steps": 1752, "loss": 0.1084, "learning_rate": 4.344075278928989e-06, "epoch": 7.042735042735043, "percentage": 58.79, "elapsed_time": "0:19:39", "remaining_time": "0:13:46"} |
|
{"current_steps": 1040, "total_steps": 1752, "loss": 0.0605, "learning_rate": 4.245403675970877e-06, "epoch": 7.111111111111111, "percentage": 59.36, "elapsed_time": "0:19:47", "remaining_time": "0:13:33"} |
|
{"current_steps": 1050, "total_steps": 1752, "loss": 0.0422, "learning_rate": 4.147031911669243e-06, "epoch": 7.17948717948718, "percentage": 59.93, "elapsed_time": "0:19:56", "remaining_time": "0:13:19"} |
|
{"current_steps": 1060, "total_steps": 1752, "loss": 0.0388, "learning_rate": 4.048999074019493e-06, "epoch": 7.247863247863248, "percentage": 60.5, "elapsed_time": "0:20:04", "remaining_time": "0:13:06"} |
|
{"current_steps": 1070, "total_steps": 1752, "loss": 0.0295, "learning_rate": 3.951344116344606e-06, "epoch": 7.316239316239316, "percentage": 61.07, "elapsed_time": "0:20:11", "remaining_time": "0:12:52"} |
|
{"current_steps": 1080, "total_steps": 1752, "loss": 0.0545, "learning_rate": 3.854105841817056e-06, "epoch": 7.384615384615385, "percentage": 61.64, "elapsed_time": "0:20:19", "remaining_time": "0:12:39"} |
|
{"current_steps": 1090, "total_steps": 1752, "loss": 0.0337, "learning_rate": 3.7573228880403734e-06, "epoch": 7.452991452991453, "percentage": 62.21, "elapsed_time": "0:20:27", "remaining_time": "0:12:25"} |
|
{"current_steps": 1100, "total_steps": 1752, "loss": 0.0507, "learning_rate": 3.661033711696501e-06, "epoch": 7.521367521367521, "percentage": 62.79, "elapsed_time": "0:20:35", "remaining_time": "0:12:12"} |
|
{"current_steps": 1110, "total_steps": 1752, "loss": 0.0419, "learning_rate": 3.5652765732650523e-06, "epoch": 7.589743589743589, "percentage": 63.36, "elapsed_time": "0:20:43", "remaining_time": "0:11:59"} |
|
{"current_steps": 1120, "total_steps": 1752, "loss": 0.0423, "learning_rate": 3.4700895218205026e-06, "epoch": 7.6581196581196584, "percentage": 63.93, "elapsed_time": "0:20:51", "remaining_time": "0:11:46"} |
|
{"current_steps": 1130, "total_steps": 1752, "loss": 0.0488, "learning_rate": 3.375510379913418e-06, "epoch": 7.726495726495727, "percentage": 64.5, "elapsed_time": "0:20:58", "remaining_time": "0:11:32"} |
|
{"current_steps": 1140, "total_steps": 1752, "loss": 0.0388, "learning_rate": 3.2815767285416576e-06, "epoch": 7.794871794871795, "percentage": 65.07, "elapsed_time": "0:21:06", "remaining_time": "0:11:20"} |
|
{"current_steps": 1150, "total_steps": 1752, "loss": 0.0197, "learning_rate": 3.188325892217587e-06, "epoch": 7.863247863247864, "percentage": 65.64, "elapsed_time": "0:21:14", "remaining_time": "0:11:07"} |
|
{"current_steps": 1160, "total_steps": 1752, "loss": 0.0326, "learning_rate": 3.0957949241371845e-06, "epoch": 7.931623931623932, "percentage": 66.21, "elapsed_time": "0:21:21", "remaining_time": "0:10:54"} |
|
{"current_steps": 1170, "total_steps": 1752, "loss": 0.0383, "learning_rate": 3.0040205914569664e-06, "epoch": 8.0, "percentage": 66.78, "elapsed_time": "0:21:29", "remaining_time": "0:10:41"} |
|
{"current_steps": 1180, "total_steps": 1752, "loss": 0.0296, "learning_rate": 2.913039360684565e-06, "epoch": 8.068376068376068, "percentage": 67.35, "elapsed_time": "0:21:36", "remaining_time": "0:10:28"} |
|
{"current_steps": 1190, "total_steps": 1752, "loss": 0.0564, "learning_rate": 2.822887383188775e-06, "epoch": 8.136752136752136, "percentage": 67.92, "elapsed_time": "0:21:44", "remaining_time": "0:10:15"} |
|
{"current_steps": 1200, "total_steps": 1752, "loss": 0.0295, "learning_rate": 2.7336004808348094e-06, "epoch": 8.205128205128204, "percentage": 68.49, "elapsed_time": "0:21:51", "remaining_time": "0:10:03"} |
|
{"current_steps": 1210, "total_steps": 1752, "loss": 0.0194, "learning_rate": 2.645214131750498e-06, "epoch": 8.273504273504274, "percentage": 69.06, "elapsed_time": "0:21:58", "remaining_time": "0:09:50"} |
|
{"current_steps": 1220, "total_steps": 1752, "loss": 0.0261, "learning_rate": 2.5577634562290567e-06, "epoch": 8.341880341880342, "percentage": 69.63, "elapsed_time": "0:22:05", "remaining_time": "0:09:38"} |
|
{"current_steps": 1230, "total_steps": 1752, "loss": 0.0237, "learning_rate": 2.4712832027740545e-06, "epoch": 8.41025641025641, "percentage": 70.21, "elapsed_time": "0:22:12", "remaining_time": "0:09:25"} |
|
{"current_steps": 1240, "total_steps": 1752, "loss": 0.037, "learning_rate": 2.385807734292097e-06, "epoch": 8.478632478632479, "percentage": 70.78, "elapsed_time": "0:22:20", "remaining_time": "0:09:13"} |
|
{"current_steps": 1250, "total_steps": 1752, "loss": 0.0241, "learning_rate": 2.3013710144387374e-06, "epoch": 8.547008547008547, "percentage": 71.35, "elapsed_time": "0:22:27", "remaining_time": "0:09:01"} |
|
{"current_steps": 1260, "total_steps": 1752, "loss": 0.0258, "learning_rate": 2.218006594123028e-06, "epoch": 8.615384615384615, "percentage": 71.92, "elapsed_time": "0:22:34", "remaining_time": "0:08:48"} |
|
{"current_steps": 1270, "total_steps": 1752, "loss": 0.0361, "learning_rate": 2.1357475981760704e-06, "epoch": 8.683760683760683, "percentage": 72.49, "elapsed_time": "0:22:41", "remaining_time": "0:08:36"} |
|
{"current_steps": 1280, "total_steps": 1752, "loss": 0.0243, "learning_rate": 2.0546267121888863e-06, "epoch": 8.752136752136753, "percentage": 73.06, "elapsed_time": "0:22:47", "remaining_time": "0:08:24"} |
|
{"current_steps": 1290, "total_steps": 1752, "loss": 0.0404, "learning_rate": 1.9746761695247803e-06, "epoch": 8.820512820512821, "percentage": 73.63, "elapsed_time": "0:22:54", "remaining_time": "0:08:12"} |
|
{"current_steps": 1300, "total_steps": 1752, "loss": 0.0284, "learning_rate": 1.8959277385114516e-06, "epoch": 8.88888888888889, "percentage": 74.2, "elapsed_time": "0:23:01", "remaining_time": "0:08:00"} |
|
{"current_steps": 1310, "total_steps": 1752, "loss": 0.028, "learning_rate": 1.8184127098178288e-06, "epoch": 8.957264957264957, "percentage": 74.77, "elapsed_time": "0:23:08", "remaining_time": "0:07:48"} |
|
{"current_steps": 1320, "total_steps": 1752, "loss": 0.0174, "learning_rate": 1.7421618840207576e-06, "epoch": 9.025641025641026, "percentage": 75.34, "elapsed_time": "0:23:14", "remaining_time": "0:07:36"} |
|
{"current_steps": 1330, "total_steps": 1752, "loss": 0.0293, "learning_rate": 1.667205559366372e-06, "epoch": 9.094017094017094, "percentage": 75.91, "elapsed_time": "0:23:21", "remaining_time": "0:07:24"} |
|
{"current_steps": 1340, "total_steps": 1752, "loss": 0.0459, "learning_rate": 1.5935735197311204e-06, "epoch": 9.162393162393162, "percentage": 76.48, "elapsed_time": "0:23:28", "remaining_time": "0:07:12"} |
|
{"current_steps": 1350, "total_steps": 1752, "loss": 0.0197, "learning_rate": 1.5212950227871292e-06, "epoch": 9.23076923076923, "percentage": 77.05, "elapsed_time": "0:23:34", "remaining_time": "0:07:01"} |
|
{"current_steps": 1360, "total_steps": 1752, "loss": 0.0208, "learning_rate": 1.4503987883766857e-06, "epoch": 9.2991452991453, "percentage": 77.63, "elapsed_time": "0:23:40", "remaining_time": "0:06:49"} |
|
{"current_steps": 1370, "total_steps": 1752, "loss": 0.0332, "learning_rate": 1.3809129871004113e-06, "epoch": 9.367521367521368, "percentage": 78.2, "elapsed_time": "0:23:47", "remaining_time": "0:06:37"} |
|
{"current_steps": 1380, "total_steps": 1752, "loss": 0.0186, "learning_rate": 1.312865229123681e-06, "epoch": 9.435897435897436, "percentage": 78.77, "elapsed_time": "0:23:53", "remaining_time": "0:06:26"} |
|
{"current_steps": 1390, "total_steps": 1752, "loss": 0.0365, "learning_rate": 1.2462825532057394e-06, "epoch": 9.504273504273504, "percentage": 79.34, "elapsed_time": "0:23:59", "remaining_time": "0:06:14"} |
|
{"current_steps": 1400, "total_steps": 1752, "loss": 0.0172, "learning_rate": 1.1811914159558374e-06, "epoch": 9.572649572649572, "percentage": 79.91, "elapsed_time": "0:24:05", "remaining_time": "0:06:03"} |
|
{"current_steps": 1410, "total_steps": 1752, "loss": 0.0307, "learning_rate": 1.117617681320729e-06, "epoch": 9.64102564102564, "percentage": 80.48, "elapsed_time": "0:24:11", "remaining_time": "0:05:52"} |
|
{"current_steps": 1420, "total_steps": 1752, "loss": 0.0138, "learning_rate": 1.0555866103076212e-06, "epoch": 9.709401709401709, "percentage": 81.05, "elapsed_time": "0:24:17", "remaining_time": "0:05:40"} |
|
{"current_steps": 1430, "total_steps": 1752, "loss": 0.0204, "learning_rate": 9.951228509467248e-07, "epoch": 9.777777777777779, "percentage": 81.62, "elapsed_time": "0:24:23", "remaining_time": "0:05:29"} |
|
{"current_steps": 1440, "total_steps": 1752, "loss": 0.017, "learning_rate": 9.362504284973683e-07, "epoch": 9.846153846153847, "percentage": 82.19, "elapsed_time": "0:24:29", "remaining_time": "0:05:18"} |
|
{"current_steps": 1450, "total_steps": 1752, "loss": 0.0236, "learning_rate": 8.789927359015643e-07, "epoch": 9.914529914529915, "percentage": 82.76, "elapsed_time": "0:24:35", "remaining_time": "0:05:07"} |
|
{"current_steps": 1460, "total_steps": 1752, "loss": 0.0234, "learning_rate": 8.233725244888291e-07, "epoch": 9.982905982905983, "percentage": 83.33, "elapsed_time": "0:24:41", "remaining_time": "0:04:56"} |
|
{"current_steps": 1470, "total_steps": 1752, "loss": 0.015, "learning_rate": 7.694118949359553e-07, "epoch": 10.051282051282051, "percentage": 83.9, "elapsed_time": "0:24:46", "remaining_time": "0:04:45"} |
|
{"current_steps": 1480, "total_steps": 1752, "loss": 0.0209, "learning_rate": 7.171322884852988e-07, "epoch": 10.11965811965812, "percentage": 84.47, "elapsed_time": "0:24:52", "remaining_time": "0:04:34"} |
|
{"current_steps": 1490, "total_steps": 1752, "loss": 0.0156, "learning_rate": 6.665544784251232e-07, "epoch": 10.188034188034187, "percentage": 85.05, "elapsed_time": "0:24:57", "remaining_time": "0:04:23"} |
|
{"current_steps": 1500, "total_steps": 1752, "loss": 0.0245, "learning_rate": 6.176985618353282e-07, "epoch": 10.256410256410255, "percentage": 85.62, "elapsed_time": "0:25:03", "remaining_time": "0:04:12"} |
|
{"current_steps": 1510, "total_steps": 1752, "loss": 0.0143, "learning_rate": 5.705839516018818e-07, "epoch": 10.324786324786325, "percentage": 86.19, "elapsed_time": "0:25:09", "remaining_time": "0:04:01"} |
|
{"current_steps": 1520, "total_steps": 1752, "loss": 0.0279, "learning_rate": 5.252293687031196e-07, "epoch": 10.393162393162394, "percentage": 86.76, "elapsed_time": "0:25:14", "remaining_time": "0:03:51"} |
|
{"current_steps": 1530, "total_steps": 1752, "loss": 0.0215, "learning_rate": 4.816528347709614e-07, "epoch": 10.461538461538462, "percentage": 87.33, "elapsed_time": "0:25:19", "remaining_time": "0:03:40"} |
|
{"current_steps": 1540, "total_steps": 1752, "loss": 0.0145, "learning_rate": 4.398716649300311e-07, "epoch": 10.52991452991453, "percentage": 87.9, "elapsed_time": "0:25:24", "remaining_time": "0:03:29"} |
|
{"current_steps": 1550, "total_steps": 1752, "loss": 0.0196, "learning_rate": 3.999024609174812e-07, "epoch": 10.598290598290598, "percentage": 88.47, "elapsed_time": "0:25:30", "remaining_time": "0:03:19"} |
|
{"current_steps": 1560, "total_steps": 1752, "loss": 0.0271, "learning_rate": 3.61761104486314e-07, "epoch": 10.666666666666666, "percentage": 89.04, "elapsed_time": "0:25:35", "remaining_time": "0:03:08"} |
|
{"current_steps": 1570, "total_steps": 1752, "loss": 0.0179, "learning_rate": 3.2546275109475554e-07, "epoch": 10.735042735042736, "percentage": 89.61, "elapsed_time": "0:25:40", "remaining_time": "0:02:58"} |
|
{"current_steps": 1580, "total_steps": 1752, "loss": 0.0228, "learning_rate": 2.9102182388425106e-07, "epoch": 10.803418803418804, "percentage": 90.18, "elapsed_time": "0:25:45", "remaining_time": "0:02:48"} |
|
{"current_steps": 1590, "total_steps": 1752, "loss": 0.0217, "learning_rate": 2.5845200794842154e-07, "epoch": 10.871794871794872, "percentage": 90.75, "elapsed_time": "0:25:50", "remaining_time": "0:02:37"} |
|
{"current_steps": 1600, "total_steps": 1752, "loss": 0.0212, "learning_rate": 2.2776624489530664e-07, "epoch": 10.94017094017094, "percentage": 91.32, "elapsed_time": "0:25:56", "remaining_time": "0:02:27"} |
|
{"current_steps": 1610, "total_steps": 1752, "loss": 0.0492, "learning_rate": 1.9897672770501198e-07, "epoch": 11.008547008547009, "percentage": 91.89, "elapsed_time": "0:26:11", "remaining_time": "0:02:18"} |
|
{"current_steps": 1620, "total_steps": 1752, "loss": 0.0265, "learning_rate": 1.7209489588483396e-07, "epoch": 11.076923076923077, "percentage": 92.47, "elapsed_time": "0:26:25", "remaining_time": "0:02:09"} |
|
{"current_steps": 1630, "total_steps": 1752, "loss": 0.0188, "learning_rate": 1.4713143092377534e-07, "epoch": 11.145299145299145, "percentage": 93.04, "elapsed_time": "0:26:40", "remaining_time": "0:01:59"} |
|
{"current_steps": 1640, "total_steps": 1752, "loss": 0.025, "learning_rate": 1.2409625204825802e-07, "epoch": 11.213675213675213, "percentage": 93.61, "elapsed_time": "0:26:54", "remaining_time": "0:01:50"} |
|
{"current_steps": 1650, "total_steps": 1752, "loss": 0.0186, "learning_rate": 1.0299851228072089e-07, "epoch": 11.282051282051283, "percentage": 94.18, "elapsed_time": "0:27:09", "remaining_time": "0:01:40"} |
|
{"current_steps": 1660, "total_steps": 1752, "loss": 0.0257, "learning_rate": 8.384659480266733e-08, "epoch": 11.350427350427351, "percentage": 94.75, "elapsed_time": "0:27:23", "remaining_time": "0:01:31"} |
|
{"current_steps": 1670, "total_steps": 1752, "loss": 0.0213, "learning_rate": 6.664810962361268e-08, "epoch": 11.418803418803419, "percentage": 95.32, "elapsed_time": "0:27:36", "remaining_time": "0:01:21"} |
|
{"current_steps": 1680, "total_steps": 1752, "loss": 0.0137, "learning_rate": 5.1409890557246876e-08, "epoch": 11.487179487179487, "percentage": 95.89, "elapsed_time": "0:27:50", "remaining_time": "0:01:11"} |
|
{"current_steps": 1690, "total_steps": 1752, "loss": 0.0164, "learning_rate": 3.813799250602046e-08, "epoch": 11.555555555555555, "percentage": 96.46, "elapsed_time": "0:28:04", "remaining_time": "0:01:01"} |
|
{"current_steps": 1700, "total_steps": 1752, "loss": 0.0197, "learning_rate": 2.683768905523243e-08, "epoch": 11.623931623931623, "percentage": 97.03, "elapsed_time": "0:28:19", "remaining_time": "0:00:51"} |
|
{"current_steps": 1710, "total_steps": 1752, "loss": 0.0246, "learning_rate": 1.7513470377570896e-08, "epoch": 11.692307692307692, "percentage": 97.6, "elapsed_time": "0:28:32", "remaining_time": "0:00:42"} |
|
{"current_steps": 1720, "total_steps": 1752, "loss": 0.0295, "learning_rate": 1.016904144894304e-08, "epoch": 11.760683760683762, "percentage": 98.17, "elapsed_time": "0:28:46", "remaining_time": "0:00:32"} |
|
{"current_steps": 1730, "total_steps": 1752, "loss": 0.029, "learning_rate": 4.807320576307728e-09, "epoch": 11.82905982905983, "percentage": 98.74, "elapsed_time": "0:28:59", "remaining_time": "0:00:22"} |
|
{"current_steps": 1740, "total_steps": 1752, "loss": 0.0177, "learning_rate": 1.4304382380819771e-09, "epoch": 11.897435897435898, "percentage": 99.32, "elapsed_time": "0:29:13", "remaining_time": "0:00:12"} |
|
{"current_steps": 1750, "total_steps": 1752, "loss": 0.0261, "learning_rate": 3.9736237600895846e-11, "epoch": 11.965811965811966, "percentage": 99.89, "elapsed_time": "0:29:27", "remaining_time": "0:00:02"} |
|
{"current_steps": 1752, "total_steps": 1752, "epoch": 11.97948717948718, "percentage": 100.0, "elapsed_time": "0:30:38", "remaining_time": "0:00:00"} |
|
|