Spaces:
Sleeping
Sleeping
{ | |
"train": [ | |
{ | |
"iteration": 66, | |
"learning_rate": 0.09940979100610842, | |
"alpha": 0.026222894604135148, | |
"loss": 0.5450071606672171, | |
"class_loss": 0.2719600424170494, | |
"p_class_loss": 0.27158846322334174, | |
"re_loss": 0.07284604478627443, | |
"conf_loss": 0.0, | |
"time": 17 | |
}, | |
{ | |
"iteration": 132, | |
"learning_rate": 0.09881010321233788, | |
"alpha": 0.0794755421079173, | |
"loss": 0.3239706930789081, | |
"class_loss": 0.15707427895430362, | |
"p_class_loss": 0.1604394937554995, | |
"re_loss": 0.07997607191403706, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 198, | |
"learning_rate": 0.09821001074703892, | |
"alpha": 0.13272818961169944, | |
"loss": 0.26553548420920514, | |
"class_loss": 0.12469609330097835, | |
"p_class_loss": 0.1285738782449202, | |
"re_loss": 0.0920192040503025, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 264, | |
"learning_rate": 0.09760951058596966, | |
"alpha": 0.1859808371154816, | |
"loss": 0.2733975572116447, | |
"class_loss": 0.1247994712356365, | |
"p_class_loss": 0.12962091635122444, | |
"re_loss": 0.10199228391954393, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 330, | |
"learning_rate": 0.09700859966144519, | |
"alpha": 0.23923348461926378, | |
"loss": 0.2663032448653019, | |
"class_loss": 0.1184216094287959, | |
"p_class_loss": 0.12372892045161941, | |
"re_loss": 0.10121869961872246, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 396, | |
"learning_rate": 0.09640727486140988, | |
"alpha": 0.29248613212304586, | |
"loss": 0.26374987467671884, | |
"class_loss": 0.11474183709783987, | |
"p_class_loss": 0.11929779454614177, | |
"re_loss": 0.10162909678889043, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 462, | |
"learning_rate": 0.09580553302848344, | |
"alpha": 0.3457387796268281, | |
"loss": 0.24972676779284622, | |
"class_loss": 0.10515463803753708, | |
"p_class_loss": 0.10960515701409543, | |
"re_loss": 0.10109633884646675, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 528, | |
"learning_rate": 0.09520337095897968, | |
"alpha": 0.3989914271306102, | |
"loss": 0.24802840184984784, | |
"class_loss": 0.10240552929992025, | |
"p_class_loss": 0.1072990692813288, | |
"re_loss": 0.0960450272329829, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 594, | |
"learning_rate": 0.09460078540189698, | |
"alpha": 0.45224407463439237, | |
"loss": 0.24083833215814648, | |
"class_loss": 0.09823239205235784, | |
"p_class_loss": 0.10228147075483293, | |
"re_loss": 0.0891773933262536, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 660, | |
"learning_rate": 0.09399777305787926, | |
"alpha": 0.5054967221381744, | |
"loss": 0.23403261895432617, | |
"class_loss": 0.09296581589362839, | |
"p_class_loss": 0.09666790310857874, | |
"re_loss": 0.08786474913358688, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 726, | |
"learning_rate": 0.09339433057814689, | |
"alpha": 0.5587493696419568, | |
"loss": 0.22584524231426645, | |
"class_loss": 0.08619872390320807, | |
"p_class_loss": 0.09082961042947842, | |
"re_loss": 0.08733003225290414, | |
"conf_loss": 0.0, | |
"time": 34 | |
}, | |
{ | |
"iteration": 792, | |
"learning_rate": 0.09279045456339578, | |
"alpha": 0.6120020171457388, | |
"loss": 0.2238871936093677, | |
"class_loss": 0.08420200346771514, | |
"p_class_loss": 0.0881081625367656, | |
"re_loss": 0.08435557867315682, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 858, | |
"learning_rate": 0.09218614156266403, | |
"alpha": 0.6652546646495209, | |
"loss": 0.25126359182776825, | |
"class_loss": 0.09570740976116875, | |
"p_class_loss": 0.09976435345456455, | |
"re_loss": 0.08390260583749323, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 924, | |
"learning_rate": 0.0915813880721646, | |
"alpha": 0.718507312153303, | |
"loss": 0.23258026903777412, | |
"class_loss": 0.087085176597942, | |
"p_class_loss": 0.09137819690460508, | |
"re_loss": 0.07531174616605947, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 990, | |
"learning_rate": 0.09097619053408289, | |
"alpha": 0.7717599596570852, | |
"loss": 0.24150935312112173, | |
"class_loss": 0.08869045521273758, | |
"p_class_loss": 0.09336557188494639, | |
"re_loss": 0.07710366176836418, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 1056, | |
"learning_rate": 0.0903705453353378, | |
"alpha": 0.8250126071608674, | |
"loss": 0.2436313062454715, | |
"class_loss": 0.08870799054927898, | |
"p_class_loss": 0.09312053735960614, | |
"re_loss": 0.07488705850008762, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 1122, | |
"learning_rate": 0.08976444880630503, | |
"alpha": 0.8782652546646494, | |
"loss": 0.2431275395281387, | |
"class_loss": 0.08851562299285874, | |
"p_class_loss": 0.09230115146122196, | |
"re_loss": 0.07094635001637718, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 1188, | |
"learning_rate": 0.0891578972195009, | |
"alpha": 0.9315179021684319, | |
"loss": 0.23833946296662994, | |
"class_loss": 0.08463501489975235, | |
"p_class_loss": 0.08869763192805377, | |
"re_loss": 0.06980998983437364, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 1254, | |
"learning_rate": 0.08855088678822559, | |
"alpha": 0.9847705496722139, | |
"loss": 0.25117628118305496, | |
"class_loss": 0.09045374653104579, | |
"p_class_loss": 0.09488564876444412, | |
"re_loss": 0.0668236751560912, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 1320, | |
"learning_rate": 0.08794341366516371, | |
"alpha": 1.038023197175996, | |
"loss": 0.24357454298120557, | |
"class_loss": 0.08597620096847866, | |
"p_class_loss": 0.09024365472071098, | |
"re_loss": 0.06491588569726005, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 1386, | |
"learning_rate": 0.08733547394094103, | |
"alpha": 1.0912758446797781, | |
"loss": 0.23244541906046146, | |
"class_loss": 0.0796355206632253, | |
"p_class_loss": 0.08425782045180147, | |
"re_loss": 0.0628145280096567, | |
"conf_loss": 0.0, | |
"time": 34 | |
}, | |
{ | |
"iteration": 1452, | |
"learning_rate": 0.08672706364263531, | |
"alpha": 1.1445284921835603, | |
"loss": 0.2405217303471132, | |
"class_loss": 0.08292913431245269, | |
"p_class_loss": 0.0869243981486017, | |
"re_loss": 0.06178495711223646, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 1518, | |
"learning_rate": 0.08611817873223943, | |
"alpha": 1.1977811396873423, | |
"loss": 0.2408737176747033, | |
"class_loss": 0.08204288509759036, | |
"p_class_loss": 0.08627836643294855, | |
"re_loss": 0.06059345631211093, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 1584, | |
"learning_rate": 0.08550881510507505, | |
"alpha": 1.2510337871911246, | |
"loss": 0.23835166430834567, | |
"class_loss": 0.08173276551745155, | |
"p_class_loss": 0.08601413226940414, | |
"re_loss": 0.05644771016456864, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 1650, | |
"learning_rate": 0.08489896858815486, | |
"alpha": 1.304286434694907, | |
"loss": 0.24540752917528152, | |
"class_loss": 0.08316354662405723, | |
"p_class_loss": 0.08685451988695246, | |
"re_loss": 0.057813251221721824, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 1716, | |
"learning_rate": 0.08428863493849083, | |
"alpha": 1.3575390821986888, | |
"loss": 0.2451987544243986, | |
"class_loss": 0.08187094438030865, | |
"p_class_loss": 0.08626364069906148, | |
"re_loss": 0.05675332694116867, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 1782, | |
"learning_rate": 0.08367780984134703, | |
"alpha": 1.4107917297024708, | |
"loss": 0.25886295261708175, | |
"class_loss": 0.08759044223662579, | |
"p_class_loss": 0.09151475345998099, | |
"re_loss": 0.05651629552470915, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 1848, | |
"learning_rate": 0.08306648890843393, | |
"alpha": 1.4640443772062532, | |
"loss": 0.25099986430370447, | |
"class_loss": 0.08509243268406752, | |
"p_class_loss": 0.08909717354584824, | |
"re_loss": 0.052473024944915916, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 1914, | |
"learning_rate": 0.08245466767604248, | |
"alpha": 1.5172970247100355, | |
"loss": 0.2478150702787168, | |
"class_loss": 0.0831136822023175, | |
"p_class_loss": 0.08717884315234242, | |
"re_loss": 0.05110970314479235, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 1980, | |
"learning_rate": 0.0818423416031146, | |
"alpha": 1.5705496722138175, | |
"loss": 0.2568096685590166, | |
"class_loss": 0.08583537370643833, | |
"p_class_loss": 0.08992152694951404, | |
"re_loss": 0.051602302570686195, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 2046, | |
"learning_rate": 0.08122950606924835, | |
"alpha": 1.6238023197175997, | |
"loss": 0.24313921865188715, | |
"class_loss": 0.0795568609327981, | |
"p_class_loss": 0.08352077148403182, | |
"re_loss": 0.04930350325550094, | |
"conf_loss": 0.0, | |
"time": 34 | |
}, | |
{ | |
"iteration": 2112, | |
"learning_rate": 0.0806161563726341, | |
"alpha": 1.677054967221382, | |
"loss": 0.25829118715994287, | |
"class_loss": 0.08546251916524136, | |
"p_class_loss": 0.08965610639389718, | |
"re_loss": 0.04959405699011051, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 2178, | |
"learning_rate": 0.08000228772791923, | |
"alpha": 1.7303076147251637, | |
"loss": 0.24346150593324142, | |
"class_loss": 0.07971308273122166, | |
"p_class_loss": 0.08370428554939502, | |
"re_loss": 0.04626408295536583, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 2244, | |
"learning_rate": 0.07938789526399825, | |
"alpha": 1.783560262228946, | |
"loss": 0.25505450051842316, | |
"class_loss": 0.08342806740917942, | |
"p_class_loss": 0.08767575945592287, | |
"re_loss": 0.04707317347779418, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 2310, | |
"learning_rate": 0.07877297402172462, | |
"alpha": 1.8368129097327284, | |
"loss": 0.2539670237086036, | |
"class_loss": 0.08332897248593243, | |
"p_class_loss": 0.0879055294913776, | |
"re_loss": 0.0450384822024992, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 2376, | |
"learning_rate": 0.07815751895154155, | |
"alpha": 1.8900655572365106, | |
"loss": 0.2583456201986833, | |
"class_loss": 0.08406420785820845, | |
"p_class_loss": 0.08900690349665555, | |
"re_loss": 0.04511673491673939, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 2442, | |
"learning_rate": 0.07754152491102728, | |
"alpha": 1.9433182047402924, | |
"loss": 0.25982668544306897, | |
"class_loss": 0.08582925977128925, | |
"p_class_loss": 0.08980320749635046, | |
"re_loss": 0.04332522602018082, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 2508, | |
"learning_rate": 0.0769249866623518, | |
"alpha": 1.9965708522440746, | |
"loss": 0.2634853637128165, | |
"class_loss": 0.0871019008936304, | |
"p_class_loss": 0.09112730616647186, | |
"re_loss": 0.04271891459145329, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 2574, | |
"learning_rate": 0.07630789886964046, | |
"alpha": 2.049823499747857, | |
"loss": 0.2545958874803601, | |
"class_loss": 0.08146914292239782, | |
"p_class_loss": 0.08554070476781238, | |
"re_loss": 0.04272544725487629, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 2640, | |
"learning_rate": 0.07569025609624035, | |
"alpha": 2.1030761472516386, | |
"loss": 0.25830947878685867, | |
"class_loss": 0.08550456183200533, | |
"p_class_loss": 0.089429453410434, | |
"re_loss": 0.039643184366551315, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 2706, | |
"learning_rate": 0.07507205280188498, | |
"alpha": 2.1563287947554204, | |
"loss": 0.2610703661586299, | |
"class_loss": 0.0842935936807683, | |
"p_class_loss": 0.08812702983392007, | |
"re_loss": 0.041114315701027714, | |
"conf_loss": 0.0, | |
"time": 34 | |
}, | |
{ | |
"iteration": 2772, | |
"learning_rate": 0.07445328333975257, | |
"alpha": 2.2095814422592035, | |
"loss": 0.25248731650186307, | |
"class_loss": 0.08096548768155502, | |
"p_class_loss": 0.08472800593484532, | |
"re_loss": 0.03927994846846118, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 2838, | |
"learning_rate": 0.07383394195341275, | |
"alpha": 2.2628340897629853, | |
"loss": 0.25395778995571716, | |
"class_loss": 0.08164638410689253, | |
"p_class_loss": 0.08568226636359186, | |
"re_loss": 0.03829048912633549, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 2904, | |
"learning_rate": 0.07321402277365652, | |
"alpha": 2.3160867372667675, | |
"loss": 0.26523306446545053, | |
"class_loss": 0.08443768785306902, | |
"p_class_loss": 0.08837210443435294, | |
"re_loss": 0.03990701140102112, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 2970, | |
"learning_rate": 0.07259351981520377, | |
"alpha": 2.369339384770549, | |
"loss": 0.2607404598684022, | |
"class_loss": 0.08362882471445834, | |
"p_class_loss": 0.08754278538805066, | |
"re_loss": 0.03781399904101184, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 3036, | |
"learning_rate": 0.07197242697328256, | |
"alpha": 2.4225920322743315, | |
"loss": 0.2754637786384785, | |
"class_loss": 0.08927830788448002, | |
"p_class_loss": 0.0934642262079499, | |
"re_loss": 0.03827794217927889, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 3102, | |
"learning_rate": 0.07135073802007359, | |
"alpha": 2.4758446797781133, | |
"loss": 0.26902585860454675, | |
"class_loss": 0.08719981219055074, | |
"p_class_loss": 0.09129711308262566, | |
"re_loss": 0.036564326252449646, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 3168, | |
"learning_rate": 0.07072844660101366, | |
"alpha": 2.5290973272818964, | |
"loss": 0.2728153695211266, | |
"class_loss": 0.09009763039648533, | |
"p_class_loss": 0.09386470744555647, | |
"re_loss": 0.03513314775332357, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 3234, | |
"learning_rate": 0.07010554623095056, | |
"alpha": 2.5823499747856786, | |
"loss": 0.2590617094979142, | |
"class_loss": 0.08220596571989132, | |
"p_class_loss": 0.0865419895924402, | |
"re_loss": 0.03496952448040247, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 3300, | |
"learning_rate": 0.06948203029014233, | |
"alpha": 2.6356026222894613, | |
"loss": 0.2650026462294839, | |
"class_loss": 0.08538902658178951, | |
"p_class_loss": 0.08923049982298505, | |
"re_loss": 0.03429930281797142, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 3366, | |
"learning_rate": 0.06885789202009267, | |
"alpha": 2.6888552697932426, | |
"loss": 0.2514105369195794, | |
"class_loss": 0.07832020476008907, | |
"p_class_loss": 0.08229877698150548, | |
"re_loss": 0.03377311815027938, | |
"conf_loss": 0.0, | |
"time": 34 | |
}, | |
{ | |
"iteration": 3432, | |
"learning_rate": 0.06823312451921427, | |
"alpha": 2.742107917297025, | |
"loss": 0.2762194709344344, | |
"class_loss": 0.08930381805156216, | |
"p_class_loss": 0.09330784687490175, | |
"re_loss": 0.03413160752053514, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 3498, | |
"learning_rate": 0.06760772073831112, | |
"alpha": 2.7953605648008066, | |
"loss": 0.2590884956898111, | |
"class_loss": 0.08126910369504582, | |
"p_class_loss": 0.08468272192685893, | |
"re_loss": 0.03332559208413868, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 3564, | |
"learning_rate": 0.06698167347587, | |
"alpha": 2.8486132123045893, | |
"loss": 0.28580498176090646, | |
"class_loss": 0.09190800915839094, | |
"p_class_loss": 0.0961967549766555, | |
"re_loss": 0.0343063265988321, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 3630, | |
"learning_rate": 0.06635497537315145, | |
"alpha": 2.901865859808371, | |
"loss": 0.26933442575461936, | |
"class_loss": 0.08560791092388557, | |
"p_class_loss": 0.09018072485923767, | |
"re_loss": 0.03223752086474137, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 3696, | |
"learning_rate": 0.06572761890906934, | |
"alpha": 2.9551185073121538, | |
"loss": 0.2647725167599591, | |
"class_loss": 0.08450695787641135, | |
"p_class_loss": 0.08811029029840772, | |
"re_loss": 0.031185046995453762, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 3762, | |
"learning_rate": 0.06509959639484725, | |
"alpha": 3.008371154815936, | |
"loss": 0.2703752718640096, | |
"class_loss": 0.08654467377698782, | |
"p_class_loss": 0.09070696501117764, | |
"re_loss": 0.030954612701228172, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 3828, | |
"learning_rate": 0.0644708999684401, | |
"alpha": 3.061623802319718, | |
"loss": 0.27522876249118283, | |
"class_loss": 0.08832836602673386, | |
"p_class_loss": 0.09207907070716222, | |
"re_loss": 0.030969640804511128, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 3894, | |
"learning_rate": 0.06384152158870743, | |
"alpha": 3.1148764498234995, | |
"loss": 0.27678272624810535, | |
"class_loss": 0.08827377630002571, | |
"p_class_loss": 0.09211757525124332, | |
"re_loss": 0.03094666756012223, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 3960, | |
"learning_rate": 0.06321145302932472, | |
"alpha": 3.1681290973272813, | |
"loss": 0.27723487918124057, | |
"class_loss": 0.08937885324386033, | |
"p_class_loss": 0.09317678653381088, | |
"re_loss": 0.029887577208379906, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 4026, | |
"learning_rate": 0.06258068587241798, | |
"alpha": 3.221381744831064, | |
"loss": 0.2771874015981501, | |
"class_loss": 0.08797728405757384, | |
"p_class_loss": 0.09164950229001767, | |
"re_loss": 0.030283973467620937, | |
"conf_loss": 0.0, | |
"time": 34 | |
}, | |
{ | |
"iteration": 4092, | |
"learning_rate": 0.061949211501905646, | |
"alpha": 3.274634392334846, | |
"loss": 0.2782447882222407, | |
"class_loss": 0.08838838638004029, | |
"p_class_loss": 0.09245423684743317, | |
"re_loss": 0.02974452916532755, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 4158, | |
"learning_rate": 0.061317021096530956, | |
"alpha": 3.3278870398386284, | |
"loss": 0.2727491236997373, | |
"class_loss": 0.08787473370179985, | |
"p_class_loss": 0.09178147329525514, | |
"re_loss": 0.027973051944916897, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 4224, | |
"learning_rate": 0.06068410562256659, | |
"alpha": 3.381139687342411, | |
"loss": 0.2738724262876944, | |
"class_loss": 0.08723411923556616, | |
"p_class_loss": 0.09128672347375841, | |
"re_loss": 0.028201025120462433, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 4290, | |
"learning_rate": 0.06005045582617229, | |
"alpha": 3.4343923348461924, | |
"loss": 0.2812728545430935, | |
"class_loss": 0.0891692392302282, | |
"p_class_loss": 0.09315378151156685, | |
"re_loss": 0.028813116534641296, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 4356, | |
"learning_rate": 0.05941606222538473, | |
"alpha": 3.487644982349974, | |
"loss": 0.2799885706468062, | |
"class_loss": 0.09117461063645103, | |
"p_class_loss": 0.09505547481504353, | |
"re_loss": 0.026883359800911312, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 4422, | |
"learning_rate": 0.05878091510171705, | |
"alpha": 3.5408976298537564, | |
"loss": 0.2824030134714011, | |
"class_loss": 0.09092485938559879, | |
"p_class_loss": 0.09484707795535073, | |
"re_loss": 0.027291545870177673, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 4488, | |
"learning_rate": 0.05814500449134452, | |
"alpha": 3.594150277357539, | |
"loss": 0.2726154167092208, | |
"class_loss": 0.08773354384483713, | |
"p_class_loss": 0.09125803580338304, | |
"re_loss": 0.026050779912056343, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 4554, | |
"learning_rate": 0.05750832017585045, | |
"alpha": 3.6474029248613213, | |
"loss": 0.27235606245019217, | |
"class_loss": 0.08726302353721677, | |
"p_class_loss": 0.09131332960995761, | |
"re_loss": 0.025711489756676285, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 4620, | |
"learning_rate": 0.05687085167250464, | |
"alpha": 3.700655572365103, | |
"loss": 0.2818877604423147, | |
"class_loss": 0.09109943636664838, | |
"p_class_loss": 0.09480206284559134, | |
"re_loss": 0.025938751091333954, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 4686, | |
"learning_rate": 0.05623258822404481, | |
"alpha": 3.753908219868886, | |
"loss": 0.28668873034643405, | |
"class_loss": 0.09219798400546565, | |
"p_class_loss": 0.09604735085458467, | |
"re_loss": 0.026226070145088615, | |
"conf_loss": 0.0, | |
"time": 34 | |
}, | |
{ | |
"iteration": 4752, | |
"learning_rate": 0.05559351878792893, | |
"alpha": 3.807160867372668, | |
"loss": 0.27499309317632153, | |
"class_loss": 0.08802187036384236, | |
"p_class_loss": 0.09179980759367798, | |
"re_loss": 0.024996825126987515, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 4818, | |
"learning_rate": 0.054953632025023895, | |
"alpha": 3.8604135148764493, | |
"loss": 0.2699499676624934, | |
"class_loss": 0.08544509124123689, | |
"p_class_loss": 0.08885088117059434, | |
"re_loss": 0.024776962698634827, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 4884, | |
"learning_rate": 0.05431291628769333, | |
"alpha": 3.9136661623802316, | |
"loss": 0.2863818799907511, | |
"class_loss": 0.09150531594500397, | |
"p_class_loss": 0.09508840999368465, | |
"re_loss": 0.02549773556264964, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 4950, | |
"learning_rate": 0.053671359607244264, | |
"alpha": 3.966918809884014, | |
"loss": 0.28270259115732077, | |
"class_loss": 0.0912272125256784, | |
"p_class_loss": 0.09491656133622835, | |
"re_loss": 0.024342426973761933, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 5016, | |
"learning_rate": 0.0530289496806894, | |
"alpha": 3.9996087959779336, | |
"loss": 0.2899526475053845, | |
"class_loss": 0.09317444010891697, | |
"p_class_loss": 0.097012222603415, | |
"re_loss": 0.02494419761227839, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 5082, | |
"learning_rate": 0.05238567385677741, | |
"alpha": 4.0, | |
"loss": 0.2809840032097065, | |
"class_loss": 0.08963506917158763, | |
"p_class_loss": 0.09351434298988545, | |
"re_loss": 0.024458647366951813, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 5148, | |
"learning_rate": 0.051741519121240934, | |
"alpha": 4.0, | |
"loss": 0.28262406023162784, | |
"class_loss": 0.0910856934885184, | |
"p_class_loss": 0.09472302821549503, | |
"re_loss": 0.02420383451901602, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 5214, | |
"learning_rate": 0.05109647208120638, | |
"alpha": 4.0, | |
"loss": 0.2820073569362814, | |
"class_loss": 0.09092362953180616, | |
"p_class_loss": 0.09484663289604765, | |
"re_loss": 0.02405927400810249, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 5280, | |
"learning_rate": 0.05045051894870592, | |
"alpha": 4.0, | |
"loss": 0.28320911368637375, | |
"class_loss": 0.09215585339927312, | |
"p_class_loss": 0.09586411895173969, | |
"re_loss": 0.02379728543261687, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 5346, | |
"learning_rate": 0.04980364552322607, | |
"alpha": 4.0, | |
"loss": 0.27445078550866153, | |
"class_loss": 0.08776470973636165, | |
"p_class_loss": 0.09164902127601883, | |
"re_loss": 0.023759262904411917, | |
"conf_loss": 0.0, | |
"time": 34 | |
}, | |
{ | |
"iteration": 5412, | |
"learning_rate": 0.04915583717322224, | |
"alpha": 4.0, | |
"loss": 0.28672305968674744, | |
"class_loss": 0.09333863233526547, | |
"p_class_loss": 0.09710404818708246, | |
"re_loss": 0.024070094875765568, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 5478, | |
"learning_rate": 0.04850707881652148, | |
"alpha": 4.0, | |
"loss": 0.28257156389229227, | |
"class_loss": 0.09313247048042038, | |
"p_class_loss": 0.09681258948914932, | |
"re_loss": 0.023156625190467545, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 5544, | |
"learning_rate": 0.04785735489952932, | |
"alpha": 4.0, | |
"loss": 0.27076589022621966, | |
"class_loss": 0.08652564093019023, | |
"p_class_loss": 0.09054027176038786, | |
"re_loss": 0.023424994496797972, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 5610, | |
"learning_rate": 0.04720664937514843, | |
"alpha": 4.0, | |
"loss": 0.27235389929829223, | |
"class_loss": 0.08707706771339431, | |
"p_class_loss": 0.09107179228555072, | |
"re_loss": 0.02355125964139447, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 5676, | |
"learning_rate": 0.04655494567930828, | |
"alpha": 4.0, | |
"loss": 0.27449259410301846, | |
"class_loss": 0.08705129338936372, | |
"p_class_loss": 0.09104211284129908, | |
"re_loss": 0.02409979644598383, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 5742, | |
"learning_rate": 0.045902226705995475, | |
"alpha": 4.0, | |
"loss": 0.27139446477998386, | |
"class_loss": 0.08774811396318855, | |
"p_class_loss": 0.09134821644560857, | |
"re_loss": 0.023074533818571857, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 5808, | |
"learning_rate": 0.04524847478066371, | |
"alpha": 4.0, | |
"loss": 0.279672516114784, | |
"class_loss": 0.09107140502469106, | |
"p_class_loss": 0.09490651817935886, | |
"re_loss": 0.023423648016019302, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 5874, | |
"learning_rate": 0.044593671631890416, | |
"alpha": 4.0, | |
"loss": 0.27692870931191876, | |
"class_loss": 0.0913755978372964, | |
"p_class_loss": 0.09487520367132896, | |
"re_loss": 0.02266947672504819, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 5940, | |
"learning_rate": 0.04393779836113402, | |
"alpha": 4.0, | |
"loss": 0.2749598342360872, | |
"class_loss": 0.08886864696714011, | |
"p_class_loss": 0.09274347860253218, | |
"re_loss": 0.023336927039605198, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 6006, | |
"learning_rate": 0.043280835410430665, | |
"alpha": 4.0, | |
"loss": 0.27103203015797067, | |
"class_loss": 0.0879701247833895, | |
"p_class_loss": 0.09168451702730222, | |
"re_loss": 0.022844346437716122, | |
"conf_loss": 0.0, | |
"time": 34 | |
}, | |
{ | |
"iteration": 6072, | |
"learning_rate": 0.04262276252785307, | |
"alpha": 4.0, | |
"loss": 0.26942289394862723, | |
"class_loss": 0.08836323813055501, | |
"p_class_loss": 0.09191939579040716, | |
"re_loss": 0.022285064823473946, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 6138, | |
"learning_rate": 0.041963558730535026, | |
"alpha": 4.0, | |
"loss": 0.27769372815435583, | |
"class_loss": 0.08951219747012312, | |
"p_class_loss": 0.09330245107412338, | |
"re_loss": 0.02371977007185871, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 6204, | |
"learning_rate": 0.041303202265044775, | |
"alpha": 4.0, | |
"loss": 0.26997587287967856, | |
"class_loss": 0.08803907843927543, | |
"p_class_loss": 0.09191509127391106, | |
"re_loss": 0.02250542531185078, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 6270, | |
"learning_rate": 0.04064167056486619, | |
"alpha": 4.0, | |
"loss": 0.27488198244210443, | |
"class_loss": 0.08943272636018017, | |
"p_class_loss": 0.09293239930588187, | |
"re_loss": 0.023129213671905523, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 6336, | |
"learning_rate": 0.039978940204720675, | |
"alpha": 4.0, | |
"loss": 0.2712762283556389, | |
"class_loss": 0.08745149466575998, | |
"p_class_loss": 0.09135077718758222, | |
"re_loss": 0.023118488829244267, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 6402, | |
"learning_rate": 0.039314986851432414, | |
"alpha": 4.0, | |
"loss": 0.27013291666905087, | |
"class_loss": 0.08711065780935866, | |
"p_class_loss": 0.09058049456639723, | |
"re_loss": 0.023110440974547106, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 6468, | |
"learning_rate": 0.03864978521100512, | |
"alpha": 4.0, | |
"loss": 0.26656877746184665, | |
"class_loss": 0.08653509103213296, | |
"p_class_loss": 0.0900559469380162, | |
"re_loss": 0.022494434745925846, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 6534, | |
"learning_rate": 0.03798330897154045, | |
"alpha": 4.0, | |
"loss": 0.2677343559987617, | |
"class_loss": 0.08488816717131571, | |
"p_class_loss": 0.08870497165304242, | |
"re_loss": 0.02353530413838047, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 6600, | |
"learning_rate": 0.037315530741583156, | |
"alpha": 4.0, | |
"loss": 0.27300241341193515, | |
"class_loss": 0.09070577212806905, | |
"p_class_loss": 0.09435731765221465, | |
"re_loss": 0.021984830865580025, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 6666, | |
"learning_rate": 0.036646421983428504, | |
"alpha": 4.0, | |
"loss": 0.27142718389178766, | |
"class_loss": 0.08775234611874277, | |
"p_class_loss": 0.09169461764395237, | |
"re_loss": 0.02299505460894469, | |
"conf_loss": 0.0, | |
"time": 34 | |
}, | |
{ | |
"iteration": 6732, | |
"learning_rate": 0.035975952940869374, | |
"alpha": 4.0, | |
"loss": 0.25844592623638385, | |
"class_loss": 0.08317464663449561, | |
"p_class_loss": 0.08665445932384694, | |
"re_loss": 0.022154204956622736, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 6798, | |
"learning_rate": 0.035304092560794184, | |
"alpha": 4.0, | |
"loss": 0.26580964638428256, | |
"class_loss": 0.08627154616018136, | |
"p_class_loss": 0.090236339830991, | |
"re_loss": 0.022325440324052717, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 6864, | |
"learning_rate": 0.034630808407971204, | |
"alpha": 4.0, | |
"loss": 0.2640183796033715, | |
"class_loss": 0.08414980295029553, | |
"p_class_loss": 0.08810644557304455, | |
"re_loss": 0.02294053268534216, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 6930, | |
"learning_rate": 0.03395606657226626, | |
"alpha": 4.0, | |
"loss": 0.26370580449248804, | |
"class_loss": 0.08541266129098156, | |
"p_class_loss": 0.08895942613934026, | |
"re_loss": 0.022333428701103636, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 6996, | |
"learning_rate": 0.03327983156743993, | |
"alpha": 4.0, | |
"loss": 0.2720783483801466, | |
"class_loss": 0.08773583614013412, | |
"p_class_loss": 0.09173141769839055, | |
"re_loss": 0.02315277329674273, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 7062, | |
"learning_rate": 0.03260206622055165, | |
"alpha": 4.0, | |
"loss": 0.25677314220052777, | |
"class_loss": 0.08221176548889189, | |
"p_class_loss": 0.08621381663463333, | |
"re_loss": 0.022086890287358652, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 7128, | |
"learning_rate": 0.03192273155086139, | |
"alpha": 4.0, | |
"loss": 0.2660258331082084, | |
"class_loss": 0.08682938394221393, | |
"p_class_loss": 0.09030625980460283, | |
"re_loss": 0.022222546888797573, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 7194, | |
"learning_rate": 0.03124178663695862, | |
"alpha": 4.0, | |
"loss": 0.2583186025872375, | |
"class_loss": 0.08346859427789848, | |
"p_class_loss": 0.08677470328455622, | |
"re_loss": 0.022018826030420536, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 7260, | |
"learning_rate": 0.030559188470660144, | |
"alpha": 4.0, | |
"loss": 0.26884931980660465, | |
"class_loss": 0.08691503384122343, | |
"p_class_loss": 0.09063762618285237, | |
"re_loss": 0.022824164451749035, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 7326, | |
"learning_rate": 0.029874891795997234, | |
"alpha": 4.0, | |
"loss": 0.266261718489907, | |
"class_loss": 0.08646944283761761, | |
"p_class_loss": 0.08997903019189835, | |
"re_loss": 0.022453311266321125, | |
"conf_loss": 0.0, | |
"time": 34 | |
}, | |
{ | |
"iteration": 7392, | |
"learning_rate": 0.029188848931350236, | |
"alpha": 4.0, | |
"loss": 0.26708939942446625, | |
"class_loss": 0.08629941968529513, | |
"p_class_loss": 0.09006970150001122, | |
"re_loss": 0.022680069404569538, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 7458, | |
"learning_rate": 0.02850100957247924, | |
"alpha": 4.0, | |
"loss": 0.26053636521101, | |
"class_loss": 0.08387578617442738, | |
"p_class_loss": 0.08771627520521481, | |
"re_loss": 0.02223607561917919, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 7524, | |
"learning_rate": 0.027811320573829224, | |
"alpha": 4.0, | |
"loss": 0.2510256977243857, | |
"class_loss": 0.08105461869501707, | |
"p_class_loss": 0.08464242771945217, | |
"re_loss": 0.02133216286981196, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 7590, | |
"learning_rate": 0.02711972570504625, | |
"alpha": 4.0, | |
"loss": 0.2630213571317268, | |
"class_loss": 0.0848130105119763, | |
"p_class_loss": 0.08842846976988243, | |
"re_loss": 0.022444968789138577, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 7656, | |
"learning_rate": 0.026426165379109703, | |
"alpha": 4.0, | |
"loss": 0.25552796414404205, | |
"class_loss": 0.08139325751725471, | |
"p_class_loss": 0.08499028003125479, | |
"re_loss": 0.022286106578328392, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 7722, | |
"learning_rate": 0.025730576347843644, | |
"alpha": 4.0, | |
"loss": 0.25198384825930453, | |
"class_loss": 0.08032257629163338, | |
"p_class_loss": 0.08415062832787182, | |
"re_loss": 0.021877660712396555, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 7788, | |
"learning_rate": 0.025032891359791087, | |
"alpha": 4.0, | |
"loss": 0.25168039988387714, | |
"class_loss": 0.08004032758375008, | |
"p_class_loss": 0.08366928712436647, | |
"re_loss": 0.02199269657615911, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 7854, | |
"learning_rate": 0.024333038774483465, | |
"alpha": 4.0, | |
"loss": 0.26368618688800116, | |
"class_loss": 0.084438663935571, | |
"p_class_loss": 0.08827974280398904, | |
"re_loss": 0.022741944726669426, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 7920, | |
"learning_rate": 0.02363094212596905, | |
"alpha": 4.0, | |
"loss": 0.26637161229595996, | |
"class_loss": 0.0849287613210353, | |
"p_class_loss": 0.08843515327934062, | |
"re_loss": 0.02325192428278652, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 7986, | |
"learning_rate": 0.022926519627021413, | |
"alpha": 4.0, | |
"loss": 0.2546160878105597, | |
"class_loss": 0.0811132758742932, | |
"p_class_loss": 0.08461448922753334, | |
"re_loss": 0.022222079661195025, | |
"conf_loss": 0.0, | |
"time": 34 | |
}, | |
{ | |
"iteration": 8052, | |
"learning_rate": 0.022219683603653076, | |
"alpha": 4.0, | |
"loss": 0.2614448217279983, | |
"class_loss": 0.08399147890282399, | |
"p_class_loss": 0.08777457740948055, | |
"re_loss": 0.02241969122692491, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 8118, | |
"learning_rate": 0.021510339847313098, | |
"alpha": 4.0, | |
"loss": 0.2554654569336862, | |
"class_loss": 0.08182670373582479, | |
"p_class_loss": 0.08551882354147507, | |
"re_loss": 0.02202998266809366, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 8184, | |
"learning_rate": 0.02079838686931096, | |
"alpha": 4.0, | |
"loss": 0.24650358815084805, | |
"class_loss": 0.0785150443288413, | |
"p_class_loss": 0.08229628470585201, | |
"re_loss": 0.021423064764927734, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 8250, | |
"learning_rate": 0.020083715038404844, | |
"alpha": 4.0, | |
"loss": 0.26195508602893713, | |
"class_loss": 0.08452466502785683, | |
"p_class_loss": 0.0882384530409719, | |
"re_loss": 0.02229799186302857, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 8316, | |
"learning_rate": 0.019366205577868813, | |
"alpha": 4.0, | |
"loss": 0.2562436761729645, | |
"class_loss": 0.08322792308348598, | |
"p_class_loss": 0.08679867152011755, | |
"re_loss": 0.021554270970889113, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 8382, | |
"learning_rate": 0.018645729392371076, | |
"alpha": 4.0, | |
"loss": 0.2568132674152201, | |
"class_loss": 0.08262469094585288, | |
"p_class_loss": 0.08608808012848551, | |
"re_loss": 0.02202512360544819, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 8448, | |
"learning_rate": 0.01792214568717831, | |
"alpha": 4.0, | |
"loss": 0.2609473155303435, | |
"class_loss": 0.08313413240918607, | |
"p_class_loss": 0.08711776142996369, | |
"re_loss": 0.022673854928915247, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 8514, | |
"learning_rate": 0.017195300331875923, | |
"alpha": 4.0, | |
"loss": 0.25722048634832556, | |
"class_loss": 0.08223536159052994, | |
"p_class_loss": 0.0861912699359836, | |
"re_loss": 0.022198463917117228, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 8580, | |
"learning_rate": 0.016465023907002885, | |
"alpha": 4.0, | |
"loss": 0.252747813409025, | |
"class_loss": 0.08024968364925096, | |
"p_class_loss": 0.08433184806596149, | |
"re_loss": 0.022041570169456078, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 8646, | |
"learning_rate": 0.015731129353344985, | |
"alpha": 4.0, | |
"loss": 0.2528942780512752, | |
"class_loss": 0.08167180994694884, | |
"p_class_loss": 0.08539805770146125, | |
"re_loss": 0.021456103009933777, | |
"conf_loss": 0.0, | |
"time": 34 | |
}, | |
{ | |
"iteration": 8712, | |
"learning_rate": 0.014993409118054087, | |
"alpha": 4.0, | |
"loss": 0.25171273450056714, | |
"class_loss": 0.08011508236328761, | |
"p_class_loss": 0.08391360323311704, | |
"re_loss": 0.021921012113153032, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 8778, | |
"learning_rate": 0.014251631656171146, | |
"alpha": 4.0, | |
"loss": 0.24869335375048898, | |
"class_loss": 0.07969781073431174, | |
"p_class_loss": 0.08358355493030765, | |
"re_loss": 0.021352996668693693, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 8844, | |
"learning_rate": 0.01350553709578782, | |
"alpha": 4.0, | |
"loss": 0.2572281157428568, | |
"class_loss": 0.08294896154918453, | |
"p_class_loss": 0.0869410118709008, | |
"re_loss": 0.021834536385016912, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 8910, | |
"learning_rate": 0.012754831802576104, | |
"alpha": 4.0, | |
"loss": 0.24979764494028958, | |
"class_loss": 0.07943106911173373, | |
"p_class_loss": 0.08287825463621905, | |
"re_loss": 0.021872080072309032, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 8976, | |
"learning_rate": 0.011999181472868131, | |
"alpha": 4.0, | |
"loss": 0.24369859740589606, | |
"class_loss": 0.07692616805434227, | |
"p_class_loss": 0.08070433038202199, | |
"re_loss": 0.021517024883492428, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 9042, | |
"learning_rate": 0.011238202224342653, | |
"alpha": 4.0, | |
"loss": 0.24896227545810468, | |
"class_loss": 0.07891058351731661, | |
"p_class_loss": 0.08250137322554082, | |
"re_loss": 0.02188757983403224, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 9108, | |
"learning_rate": 0.01047144890655713, | |
"alpha": 4.0, | |
"loss": 0.2651791220361536, | |
"class_loss": 0.08543208162441399, | |
"p_class_loss": 0.08925815491062222, | |
"re_loss": 0.022622221163615133, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 9174, | |
"learning_rate": 0.00969839946202967, | |
"alpha": 4.0, | |
"loss": 0.2582064426758073, | |
"class_loss": 0.08363607108141437, | |
"p_class_loss": 0.0871530185368928, | |
"re_loss": 0.021854338236153126, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 9240, | |
"learning_rate": 0.00891843352667202, | |
"alpha": 4.0, | |
"loss": 0.24548901617527008, | |
"class_loss": 0.07783742011948065, | |
"p_class_loss": 0.0813087872370626, | |
"re_loss": 0.02158570203535033, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 9306, | |
"learning_rate": 0.008130802365067418, | |
"alpha": 4.0, | |
"loss": 0.241554471579465, | |
"class_loss": 0.07608497018615405, | |
"p_class_loss": 0.079719364078659, | |
"re_loss": 0.021437533933556442, | |
"conf_loss": 0.0, | |
"time": 34 | |
}, | |
{ | |
"iteration": 9372, | |
"learning_rate": 0.0073345852886601455, | |
"alpha": 4.0, | |
"loss": 0.24608669294552368, | |
"class_loss": 0.07805259187113155, | |
"p_class_loss": 0.08190038111625296, | |
"re_loss": 0.021533429552095407, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 9438, | |
"learning_rate": 0.00652862404598149, | |
"alpha": 4.0, | |
"loss": 0.250537108065504, | |
"class_loss": 0.07887401951081825, | |
"p_class_loss": 0.08247577946520213, | |
"re_loss": 0.022296827314703754, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 9504, | |
"learning_rate": 0.005711419339067195, | |
"alpha": 4.0, | |
"loss": 0.2517375090357029, | |
"class_loss": 0.07957503047179092, | |
"p_class_loss": 0.08372594083123135, | |
"re_loss": 0.022109133572402326, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 9570, | |
"learning_rate": 0.004880957680822683, | |
"alpha": 4.0, | |
"loss": 0.25499846199245163, | |
"class_loss": 0.08113476255852164, | |
"p_class_loss": 0.0849764087434971, | |
"re_loss": 0.02222182255972064, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 9636, | |
"learning_rate": 0.004034398387053385, | |
"alpha": 4.0, | |
"loss": 0.25415949658914044, | |
"class_loss": 0.08022765595127236, | |
"p_class_loss": 0.08434248619684667, | |
"re_loss": 0.022397338638477253, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 9702, | |
"learning_rate": 0.003167444028983347, | |
"alpha": 4.0, | |
"loss": 0.24872907951022638, | |
"class_loss": 0.07845463699689417, | |
"p_class_loss": 0.08232282593168995, | |
"re_loss": 0.02198790418774341, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 9768, | |
"learning_rate": 0.0022728569206237922, | |
"alpha": 4.0, | |
"loss": 0.24348128428964905, | |
"class_loss": 0.07746653913548499, | |
"p_class_loss": 0.08072477364630411, | |
"re_loss": 0.021322492312527065, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 9834, | |
"learning_rate": 0.0013358848371585364, | |
"alpha": 4.0, | |
"loss": 0.2415681393309073, | |
"class_loss": 0.07638258650673158, | |
"p_class_loss": 0.07974133284931834, | |
"re_loss": 0.02136105476793918, | |
"conf_loss": 0.0, | |
"time": 15 | |
}, | |
{ | |
"iteration": 9900, | |
"learning_rate": 0.0003069336738416699, | |
"alpha": 4.0, | |
"loss": 0.25945339193849853, | |
"class_loss": 0.0842481419782747, | |
"p_class_loss": 0.08819409437251813, | |
"re_loss": 0.021752788445376085, | |
"conf_loss": 0.0, | |
"time": 15 | |
} | |
], | |
"validation": [ | |
{ | |
"iteration": 661, | |
"threshold": 0.1, | |
"train_mIoU": 40.431105085264676, | |
"best_train_mIoU": 40.431105085264676, | |
"time": 18 | |
}, | |
{ | |
"iteration": 1322, | |
"threshold": 0.1, | |
"train_mIoU": 43.46100840234902, | |
"best_train_mIoU": 43.46100840234902, | |
"time": 18 | |
}, | |
{ | |
"iteration": 1983, | |
"threshold": 0.1, | |
"train_mIoU": 43.177254158210296, | |
"best_train_mIoU": 43.46100840234902, | |
"time": 17 | |
}, | |
{ | |
"iteration": 2644, | |
"threshold": 0.1, | |
"train_mIoU": 44.51425697996, | |
"best_train_mIoU": 44.51425697996, | |
"time": 18 | |
}, | |
{ | |
"iteration": 3305, | |
"threshold": 0.1, | |
"train_mIoU": 45.100629296056006, | |
"best_train_mIoU": 45.100629296056006, | |
"time": 18 | |
}, | |
{ | |
"iteration": 3966, | |
"threshold": 0.1, | |
"train_mIoU": 46.224907220002024, | |
"best_train_mIoU": 46.224907220002024, | |
"time": 18 | |
}, | |
{ | |
"iteration": 4627, | |
"threshold": 0.1, | |
"train_mIoU": 45.987615324246725, | |
"best_train_mIoU": 46.224907220002024, | |
"time": 17 | |
}, | |
{ | |
"iteration": 5288, | |
"threshold": 0.1, | |
"train_mIoU": 45.93900520100114, | |
"best_train_mIoU": 46.224907220002024, | |
"time": 18 | |
}, | |
{ | |
"iteration": 5949, | |
"threshold": 0.1, | |
"train_mIoU": 46.19157269786109, | |
"best_train_mIoU": 46.224907220002024, | |
"time": 17 | |
}, | |
{ | |
"iteration": 6610, | |
"threshold": 0.1, | |
"train_mIoU": 45.6996113024023, | |
"best_train_mIoU": 46.224907220002024, | |
"time": 18 | |
}, | |
{ | |
"iteration": 7271, | |
"threshold": 0.1, | |
"train_mIoU": 45.848207089703266, | |
"best_train_mIoU": 46.224907220002024, | |
"time": 18 | |
}, | |
{ | |
"iteration": 7932, | |
"threshold": 0.1, | |
"train_mIoU": 46.26744171781166, | |
"best_train_mIoU": 46.26744171781166, | |
"time": 18 | |
}, | |
{ | |
"iteration": 8593, | |
"threshold": 0.1, | |
"train_mIoU": 46.422600225610935, | |
"best_train_mIoU": 46.422600225610935, | |
"time": 18 | |
}, | |
{ | |
"iteration": 9254, | |
"threshold": 0.1, | |
"train_mIoU": 45.940160699367595, | |
"best_train_mIoU": 46.422600225610935, | |
"time": 17 | |
}, | |
{ | |
"iteration": 9915, | |
"threshold": 0.1, | |
"train_mIoU": 46.11233098931772, | |
"best_train_mIoU": 46.422600225610935, | |
"time": 18 | |
} | |
] | |
} |