{ "cells": [ { "cell_type": "code", "execution_count": 1, "id": "3fffcf73", "metadata": { "_cell_guid": "b1076dfc-b9ad-4769-8c92-a6c4dae69d19", "_uuid": "8f2839f25d086af736a60e9eeb907d3b93b6e0e5", "execution": { "iopub.execute_input": "2023-07-20T14:05:12.412085Z", "iopub.status.busy": "2023-07-20T14:05:12.411655Z", "iopub.status.idle": "2023-07-20T14:05:12.424517Z", "shell.execute_reply": "2023-07-20T14:05:12.423275Z" }, "papermill": { "duration": 0.018751, "end_time": "2023-07-20T14:05:12.426607", "exception": false, "start_time": "2023-07-20T14:05:12.407856", "status": "completed" }, "tags": [] }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "/kaggle/input/icr-identify-age-related-conditions/sample_submission.csv\n", "/kaggle/input/icr-identify-age-related-conditions/greeks.csv\n", "/kaggle/input/icr-identify-age-related-conditions/train.csv\n", "/kaggle/input/icr-identify-age-related-conditions/test.csv\n" ] } ], "source": [ "# This Python 3 environment comes with many helpful analytics libraries installed\n", "# It is defined by the kaggle/python Docker image: https://github.com/kaggle/docker-python\n", "# For example, here's several helpful packages to load\n", "\n", "import numpy as np # linear algebra\n", "import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)\n", "\n", "# Input data files are available in the read-only \"../input/\" directory\n", "# For example, running this (by clicking run or pressing Shift+Enter) will list all files under the input directory\n", "\n", "import os\n", "for dirname, _, filenames in os.walk('/kaggle/input'):\n", " for filename in filenames:\n", " print(os.path.join(dirname, filename))\n", "\n", "# You can write up to 20GB to the current directory (/kaggle/working/) that gets preserved as output when you create a version using \"Save & Run All\" \n", "# You can also write temporary files to /kaggle/temp/, but they won't be saved outside of the current session" ] }, { "cell_type": "code", "execution_count": 2, "id": "74c4e89f", "metadata": { "execution": { "iopub.execute_input": "2023-07-20T14:05:12.431472Z", "iopub.status.busy": "2023-07-20T14:05:12.431168Z", "iopub.status.idle": "2023-07-20T14:10:21.303658Z", "shell.execute_reply": "2023-07-20T14:10:21.302504Z" }, "papermill": { "duration": 308.877761, "end_time": "2023-07-20T14:10:21.306166", "exception": false, "start_time": "2023-07-20T14:05:12.428405", "status": "completed" }, "tags": [] }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/opt/conda/lib/python3.10/site-packages/scipy/__init__.py:146: UserWarning: A NumPy version >=1.16.5 and <1.23.0 is required for this version of SciPy (detected version 1.23.5\n", " warnings.warn(f\"A NumPy version >={np_minversion} and <{np_maxversion}\"\n", "/opt/conda/lib/python3.10/site-packages/tensorflow_io/python/ops/__init__.py:98: UserWarning: unable to load libtensorflow_io_plugins.so: unable to open file: libtensorflow_io_plugins.so, from paths: ['/opt/conda/lib/python3.10/site-packages/tensorflow_io/python/ops/libtensorflow_io_plugins.so']\n", "caused by: ['/opt/conda/lib/python3.10/site-packages/tensorflow_io/python/ops/libtensorflow_io_plugins.so: undefined symbol: _ZN3tsl6StatusC1EN10tensorflow5error4CodeESt17basic_string_viewIcSt11char_traitsIcEENS_14SourceLocationE']\n", " warnings.warn(f\"unable to load libtensorflow_io_plugins.so: {e}\")\n", "/opt/conda/lib/python3.10/site-packages/tensorflow_io/python/ops/__init__.py:104: UserWarning: file system plugins are not loaded: unable to open file: libtensorflow_io.so, from paths: ['/opt/conda/lib/python3.10/site-packages/tensorflow_io/python/ops/libtensorflow_io.so']\n", "caused by: ['/opt/conda/lib/python3.10/site-packages/tensorflow_io/python/ops/libtensorflow_io.so: undefined symbol: _ZTVN10tensorflow13GcsFileSystemE']\n", " warnings.warn(f\"file system plugins are not loaded: {e}\")\n", "/tmp/ipykernel_21/3725071267.py:23: FutureWarning: The default value of numeric_only in DataFrame.mean is deprecated. In a future version, it will default to False. In addition, specifying 'numeric_only=None' is deprecated. Select only valid columns or specify the value of numeric_only to silence this warning.\n", " train_df.fillna(train_df.mean(), inplace=True)\n", "/tmp/ipykernel_21/3725071267.py:24: FutureWarning: The default value of numeric_only in DataFrame.mean is deprecated. In a future version, it will default to False. In addition, specifying 'numeric_only=None' is deprecated. Select only valid columns or specify the value of numeric_only to silence this warning.\n", " test_df.fillna(test_df.mean(), inplace=True)\n", "[I 2023-07-20 14:05:21,858] A new study created in memory with name: no-name-e7eb7b47-c741-4931-a5ee-4fb946ece2e5\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:05:24,195] Trial 0 finished with value: 0.37612669809311017 and parameters: {'num_units_layer1': 79, 'num_units_layer2': 50, 'learning_rate': 0.008012238952084676, 'dropout_rate': 0.4514525795507893}. Best is trial 0 with value: 0.37612669809311017.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:05:27,299] Trial 1 finished with value: 0.27934503086750023 and parameters: {'num_units_layer1': 97, 'num_units_layer2': 22, 'learning_rate': 0.009374399732980473, 'dropout_rate': 0.3313571774571368}. Best is trial 1 with value: 0.27934503086750023.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:05:29,197] Trial 2 finished with value: 0.28023319853065315 and parameters: {'num_units_layer1': 117, 'num_units_layer2': 33, 'learning_rate': 0.005335238480094896, 'dropout_rate': 0.12349899648941381}. Best is trial 1 with value: 0.27934503086750023.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:05:32,706] Trial 3 finished with value: 0.39982310724099 and parameters: {'num_units_layer1': 119, 'num_units_layer2': 48, 'learning_rate': 0.001922305501928269, 'dropout_rate': 0.3086496823751493}. Best is trial 1 with value: 0.27934503086750023.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:05:34,429] Trial 4 finished with value: 0.28851756949264895 and parameters: {'num_units_layer1': 127, 'num_units_layer2': 23, 'learning_rate': 0.007559568843057046, 'dropout_rate': 0.27387438757133353}. Best is trial 1 with value: 0.27934503086750023.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:05:37,127] Trial 5 finished with value: 0.30041905694264415 and parameters: {'num_units_layer1': 74, 'num_units_layer2': 61, 'learning_rate': 0.0018574154309762482, 'dropout_rate': 0.3278984300969777}. Best is trial 1 with value: 0.27934503086750023.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:05:39,718] Trial 6 finished with value: 0.31180198967996764 and parameters: {'num_units_layer1': 105, 'num_units_layer2': 17, 'learning_rate': 0.008644785209292752, 'dropout_rate': 0.24306004935091285}. Best is trial 1 with value: 0.27934503086750023.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:05:41,722] Trial 7 finished with value: 0.2606252274634733 and parameters: {'num_units_layer1': 53, 'num_units_layer2': 49, 'learning_rate': 0.0022056734755697195, 'dropout_rate': 0.16996154533603383}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:05:44,149] Trial 8 finished with value: 0.29825548417897874 and parameters: {'num_units_layer1': 57, 'num_units_layer2': 54, 'learning_rate': 0.009744070009904357, 'dropout_rate': 0.4163832059675847}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:05:46,773] Trial 9 finished with value: 0.3511917885647268 and parameters: {'num_units_layer1': 106, 'num_units_layer2': 29, 'learning_rate': 0.0027303168056022373, 'dropout_rate': 0.44886918763443007}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 1ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:05:52,342] Trial 10 finished with value: 0.3673723202219523 and parameters: {'num_units_layer1': 39, 'num_units_layer2': 41, 'learning_rate': 0.0001704039494836388, 'dropout_rate': 0.10589218064234204}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:05:54,559] Trial 11 finished with value: 0.31307462976375766 and parameters: {'num_units_layer1': 32, 'num_units_layer2': 41, 'learning_rate': 0.005695366751250268, 'dropout_rate': 0.20667104946486764}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:05:56,825] Trial 12 finished with value: 0.31523922943558214 and parameters: {'num_units_layer1': 59, 'num_units_layer2': 33, 'learning_rate': 0.0037284582580576022, 'dropout_rate': 0.36589849118051987}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:05:59,074] Trial 13 finished with value: 0.28580858382900787 and parameters: {'num_units_layer1': 92, 'num_units_layer2': 16, 'learning_rate': 0.006216821709086415, 'dropout_rate': 0.21192460188564918}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:01,566] Trial 14 finished with value: 0.27593882253429614 and parameters: {'num_units_layer1': 64, 'num_units_layer2': 64, 'learning_rate': 0.004321365944115111, 'dropout_rate': 0.3624593391828106}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:04,182] Trial 15 finished with value: 0.29297335109213535 and parameters: {'num_units_layer1': 53, 'num_units_layer2': 63, 'learning_rate': 0.003821792838115894, 'dropout_rate': 0.4888307490310298}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:06,068] Trial 16 finished with value: 0.33642095176352105 and parameters: {'num_units_layer1': 69, 'num_units_layer2': 57, 'learning_rate': 0.004210979716940852, 'dropout_rate': 0.36944331991872886}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:08,850] Trial 17 finished with value: 0.3460568693546701 and parameters: {'num_units_layer1': 48, 'num_units_layer2': 47, 'learning_rate': 0.006583834136688789, 'dropout_rate': 0.17471824787549986}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:10,898] Trial 18 finished with value: 0.41890818137745167 and parameters: {'num_units_layer1': 65, 'num_units_layer2': 57, 'learning_rate': 0.004678531762649018, 'dropout_rate': 0.2662980646907444}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 1ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:14,431] Trial 19 finished with value: 0.2913661391746397 and parameters: {'num_units_layer1': 44, 'num_units_layer2': 52, 'learning_rate': 0.0030321432560700694, 'dropout_rate': 0.1534275366531895}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:16,554] Trial 20 finished with value: 0.34978634643955864 and parameters: {'num_units_layer1': 87, 'num_units_layer2': 64, 'learning_rate': 0.005115393613074398, 'dropout_rate': 0.22947170190505378}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:19,630] Trial 21 finished with value: 0.29755621839735336 and parameters: {'num_units_layer1': 90, 'num_units_layer2': 44, 'learning_rate': 0.009995685751024844, 'dropout_rate': 0.33474853297826634}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:22,126] Trial 22 finished with value: 0.3331839960390171 and parameters: {'num_units_layer1': 98, 'num_units_layer2': 24, 'learning_rate': 0.006677906674047979, 'dropout_rate': 0.289565855415167}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:24,590] Trial 23 finished with value: 0.3135332877969219 and parameters: {'num_units_layer1': 63, 'num_units_layer2': 58, 'learning_rate': 0.004979051051311715, 'dropout_rate': 0.37065589496530976}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:26,827] Trial 24 finished with value: 0.30739817133585373 and parameters: {'num_units_layer1': 80, 'num_units_layer2': 36, 'learning_rate': 0.007113952402639697, 'dropout_rate': 0.29967649961339476}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:29,097] Trial 25 finished with value: 0.30979090378028495 and parameters: {'num_units_layer1': 71, 'num_units_layer2': 25, 'learning_rate': 0.005784825152318517, 'dropout_rate': 0.3408204886378802}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:31,203] Trial 26 finished with value: 0.3451856656588138 and parameters: {'num_units_layer1': 47, 'num_units_layer2': 44, 'learning_rate': 0.008421095344809273, 'dropout_rate': 0.2572000755727851}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:33,336] Trial 27 finished with value: 0.29525307321776145 and parameters: {'num_units_layer1': 84, 'num_units_layer2': 38, 'learning_rate': 0.004347598116072113, 'dropout_rate': 0.39543210773172643}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:35,079] Trial 28 finished with value: 0.33697038127624745 and parameters: {'num_units_layer1': 99, 'num_units_layer2': 54, 'learning_rate': 0.009279809295954967, 'dropout_rate': 0.30757356317532414}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:38,632] Trial 29 finished with value: 0.4124748273920907 and parameters: {'num_units_layer1': 76, 'num_units_layer2': 50, 'learning_rate': 0.00772419616095247, 'dropout_rate': 0.2772825595715494}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 1ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:40,386] Trial 30 finished with value: 0.30427276261702857 and parameters: {'num_units_layer1': 55, 'num_units_layer2': 20, 'learning_rate': 0.0090463767723505, 'dropout_rate': 0.18081459074591025}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:42,198] Trial 31 finished with value: 0.33501982726980695 and parameters: {'num_units_layer1': 114, 'num_units_layer2': 28, 'learning_rate': 0.008278174319061629, 'dropout_rate': 0.11733158603631458}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:45,518] Trial 32 finished with value: 0.3223232791289136 and parameters: {'num_units_layer1': 116, 'num_units_layer2': 33, 'learning_rate': 0.007509863469999774, 'dropout_rate': 0.15295945712567505}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:47,730] Trial 33 finished with value: 0.31101254017149926 and parameters: {'num_units_layer1': 127, 'num_units_layer2': 30, 'learning_rate': 0.0056919637625596625, 'dropout_rate': 0.1261411338393785}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:49,977] Trial 34 finished with value: 0.30227130576992994 and parameters: {'num_units_layer1': 122, 'num_units_layer2': 20, 'learning_rate': 0.007232284245142569, 'dropout_rate': 0.13971986399720845}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:53,502] Trial 35 finished with value: 0.3445439884551183 and parameters: {'num_units_layer1': 110, 'num_units_layer2': 45, 'learning_rate': 0.008086341692878349, 'dropout_rate': 0.10303447684976101}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:57,063] Trial 36 finished with value: 0.4095255081612573 and parameters: {'num_units_layer1': 96, 'num_units_layer2': 60, 'learning_rate': 0.0015425772047091605, 'dropout_rate': 0.322880170755142}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:06:59,310] Trial 37 finished with value: 0.2852131973917719 and parameters: {'num_units_layer1': 122, 'num_units_layer2': 36, 'learning_rate': 0.0029244059864094816, 'dropout_rate': 0.2470968397472011}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:01,265] Trial 38 finished with value: 0.3218293408001637 and parameters: {'num_units_layer1': 102, 'num_units_layer2': 27, 'learning_rate': 0.005271176142207982, 'dropout_rate': 0.28421584195758515}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 3ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:03,242] Trial 39 finished with value: 0.34002779896557245 and parameters: {'num_units_layer1': 61, 'num_units_layer2': 20, 'learning_rate': 0.002296289609382053, 'dropout_rate': 0.1281080789953684}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:05,249] Trial 40 finished with value: 0.29988896294507184 and parameters: {'num_units_layer1': 109, 'num_units_layer2': 31, 'learning_rate': 0.0035302830008299988, 'dropout_rate': 0.18773021122343028}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:07,504] Trial 41 finished with value: 0.3696038976416016 and parameters: {'num_units_layer1': 118, 'num_units_layer2': 37, 'learning_rate': 0.0030385577432395063, 'dropout_rate': 0.23154390903377836}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:11,192] Trial 42 finished with value: 0.31505614495386486 and parameters: {'num_units_layer1': 112, 'num_units_layer2': 34, 'learning_rate': 0.0012934253126366145, 'dropout_rate': 0.25014044068532826}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:13,360] Trial 43 finished with value: 0.3937344823795298 and parameters: {'num_units_layer1': 123, 'num_units_layer2': 41, 'learning_rate': 0.002383275390183585, 'dropout_rate': 0.1984112161171195}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:15,710] Trial 44 finished with value: 0.36541215604025806 and parameters: {'num_units_layer1': 124, 'num_units_layer2': 39, 'learning_rate': 0.003325552758964294, 'dropout_rate': 0.16219534209388065}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:17,611] Trial 45 finished with value: 0.30033904910847736 and parameters: {'num_units_layer1': 104, 'num_units_layer2': 35, 'learning_rate': 0.0042276734514743826, 'dropout_rate': 0.2133532269711762}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 1ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:21,151] Trial 46 finished with value: 0.3006095305990212 and parameters: {'num_units_layer1': 40, 'num_units_layer2': 31, 'learning_rate': 0.0037462478427525417, 'dropout_rate': 0.1718263348301251}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:24,698] Trial 47 finished with value: 0.3167763759382619 and parameters: {'num_units_layer1': 128, 'num_units_layer2': 48, 'learning_rate': 0.004533188174421255, 'dropout_rate': 0.14218531407738216}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:26,956] Trial 48 finished with value: 0.2943769711188189 and parameters: {'num_units_layer1': 50, 'num_units_layer2': 26, 'learning_rate': 0.003944004909125305, 'dropout_rate': 0.26747772240469425}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:30,483] Trial 49 finished with value: 0.440997884727711 and parameters: {'num_units_layer1': 66, 'num_units_layer2': 55, 'learning_rate': 0.002712311567480808, 'dropout_rate': 0.23619979068661792}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 1ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:32,744] Trial 50 finished with value: 0.4251551888938864 and parameters: {'num_units_layer1': 84, 'num_units_layer2': 61, 'learning_rate': 0.0033424195993594827, 'dropout_rate': 0.31964940556372223}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:34,983] Trial 51 finished with value: 0.33017757402756914 and parameters: {'num_units_layer1': 89, 'num_units_layer2': 16, 'learning_rate': 0.006238322323755751, 'dropout_rate': 0.2200540295923449}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:38,266] Trial 52 finished with value: 0.3368275847061491 and parameters: {'num_units_layer1': 95, 'num_units_layer2': 22, 'learning_rate': 0.004959542647738189, 'dropout_rate': 0.20796422025270803}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 1ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:40,147] Trial 53 finished with value: 0.29430190847801124 and parameters: {'num_units_layer1': 93, 'num_units_layer2': 18, 'learning_rate': 0.0046885346525678195, 'dropout_rate': 0.1950822741576108}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:41,966] Trial 54 finished with value: 0.3302132641573082 and parameters: {'num_units_layer1': 107, 'num_units_layer2': 22, 'learning_rate': 0.008747879130314433, 'dropout_rate': 0.2557158951406305}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:44,916] Trial 55 finished with value: 0.3307704011864173 and parameters: {'num_units_layer1': 80, 'num_units_layer2': 41, 'learning_rate': 0.004068079831939459, 'dropout_rate': 0.17055193760503104}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 3ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:48,545] Trial 56 finished with value: 0.3834081378888624 and parameters: {'num_units_layer1': 74, 'num_units_layer2': 19, 'learning_rate': 0.005480509344317555, 'dropout_rate': 0.35175311589439107}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:52,130] Trial 57 finished with value: 0.31808314082874733 and parameters: {'num_units_layer1': 120, 'num_units_layer2': 17, 'learning_rate': 0.006297258856683062, 'dropout_rate': 0.2951347972849815}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:53,895] Trial 58 finished with value: 0.3441539494641959 and parameters: {'num_units_layer1': 59, 'num_units_layer2': 43, 'learning_rate': 0.009833046422967735, 'dropout_rate': 0.18911907565924213}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:55,849] Trial 59 finished with value: 0.3396048545068776 and parameters: {'num_units_layer1': 70, 'num_units_layer2': 22, 'learning_rate': 0.005898105952580197, 'dropout_rate': 0.2438027000390262}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:57,608] Trial 60 finished with value: 0.3191284344030587 and parameters: {'num_units_layer1': 52, 'num_units_layer2': 52, 'learning_rate': 0.00948404508344911, 'dropout_rate': 0.31065682014764506}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:07:59,435] Trial 61 finished with value: 0.3406258744740516 and parameters: {'num_units_layer1': 117, 'num_units_layer2': 32, 'learning_rate': 0.006900267896584518, 'dropout_rate': 0.22319936226746576}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:01,595] Trial 62 finished with value: 0.30641436717695475 and parameters: {'num_units_layer1': 125, 'num_units_layer2': 23, 'learning_rate': 0.007962818071822786, 'dropout_rate': 0.2747813588180468}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:03,862] Trial 63 finished with value: 0.28179794592252505 and parameters: {'num_units_layer1': 114, 'num_units_layer2': 25, 'learning_rate': 0.007569653463919914, 'dropout_rate': 0.2876879217769269}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:06,088] Trial 64 finished with value: 0.3318268035964361 and parameters: {'num_units_layer1': 102, 'num_units_layer2': 25, 'learning_rate': 0.008730462178815844, 'dropout_rate': 0.34386162182281643}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:07,962] Trial 65 finished with value: 0.31226813550132804 and parameters: {'num_units_layer1': 85, 'num_units_layer2': 28, 'learning_rate': 0.006447234819207373, 'dropout_rate': 0.28988133805481586}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:10,027] Trial 66 finished with value: 0.3160016327763679 and parameters: {'num_units_layer1': 113, 'num_units_layer2': 16, 'learning_rate': 0.006047683105668464, 'dropout_rate': 0.3305617796209146}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:12,133] Trial 67 finished with value: 0.3339074616752304 and parameters: {'num_units_layer1': 120, 'num_units_layer2': 36, 'learning_rate': 0.006855380385314587, 'dropout_rate': 0.26143695073604256}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:14,958] Trial 68 finished with value: 0.30806032700154884 and parameters: {'num_units_layer1': 92, 'num_units_layer2': 29, 'learning_rate': 0.005448139875936953, 'dropout_rate': 0.3046998712725471}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:17,019] Trial 69 finished with value: 0.34323685614767785 and parameters: {'num_units_layer1': 115, 'num_units_layer2': 24, 'learning_rate': 0.007380085878446954, 'dropout_rate': 0.23923334158191983}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 1ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:19,206] Trial 70 finished with value: 0.33801278417260566 and parameters: {'num_units_layer1': 32, 'num_units_layer2': 46, 'learning_rate': 0.007697605400591505, 'dropout_rate': 0.38053910434780575}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:21,278] Trial 71 finished with value: 0.3400086136911512 and parameters: {'num_units_layer1': 45, 'num_units_layer2': 26, 'learning_rate': 0.006579564168592536, 'dropout_rate': 0.27229644423274985}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 1ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:24,569] Trial 72 finished with value: 0.3559453795047457 and parameters: {'num_units_layer1': 126, 'num_units_layer2': 64, 'learning_rate': 0.007046488011085729, 'dropout_rate': 0.2841370938548874}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:26,428] Trial 73 finished with value: 0.3451203322246218 and parameters: {'num_units_layer1': 121, 'num_units_layer2': 19, 'learning_rate': 0.0075766754858042976, 'dropout_rate': 0.25594175334649005}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:29,949] Trial 74 finished with value: 0.2988246257378646 and parameters: {'num_units_layer1': 99, 'num_units_layer2': 33, 'learning_rate': 0.00719047329875842, 'dropout_rate': 0.20426386324176127}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:31,929] Trial 75 finished with value: 0.31883317254466814 and parameters: {'num_units_layer1': 110, 'num_units_layer2': 39, 'learning_rate': 0.008524247018926042, 'dropout_rate': 0.31458532066465805}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:34,393] Trial 76 finished with value: 0.35333788864244525 and parameters: {'num_units_layer1': 37, 'num_units_layer2': 59, 'learning_rate': 0.009003008095091405, 'dropout_rate': 0.2237089464153391}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:36,152] Trial 77 finished with value: 0.2946403262161131 and parameters: {'num_units_layer1': 118, 'num_units_layer2': 21, 'learning_rate': 0.008048144362171855, 'dropout_rate': 0.29658398682482506}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:37,992] Trial 78 finished with value: 0.3186774993825529 and parameters: {'num_units_layer1': 57, 'num_units_layer2': 49, 'learning_rate': 0.006702360433259403, 'dropout_rate': 0.11148157401707896}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:39,936] Trial 79 finished with value: 0.32706644181867517 and parameters: {'num_units_layer1': 77, 'num_units_layer2': 24, 'learning_rate': 0.008272036320580869, 'dropout_rate': 0.33002958613190414}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:41,945] Trial 80 finished with value: 0.3539015181222879 and parameters: {'num_units_layer1': 106, 'num_units_layer2': 29, 'learning_rate': 0.006043252725700554, 'dropout_rate': 0.24360144418154975}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:44,741] Trial 81 finished with value: 0.33275888654400904 and parameters: {'num_units_layer1': 44, 'num_units_layer2': 52, 'learning_rate': 0.005124294325173998, 'dropout_rate': 0.16292843402492038}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:46,857] Trial 82 finished with value: 0.3148395586044235 and parameters: {'num_units_layer1': 36, 'num_units_layer2': 43, 'learning_rate': 0.009497738809590834, 'dropout_rate': 0.15135489779793546}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:49,159] Trial 83 finished with value: 0.31633203039957586 and parameters: {'num_units_layer1': 41, 'num_units_layer2': 52, 'learning_rate': 0.004445769558310712, 'dropout_rate': 0.18104000236554074}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:51,454] Trial 84 finished with value: 0.31882329593663966 and parameters: {'num_units_layer1': 49, 'num_units_layer2': 56, 'learning_rate': 0.004823231158328794, 'dropout_rate': 0.1324695431768389}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:53,512] Trial 85 finished with value: 0.38889594213955125 and parameters: {'num_units_layer1': 123, 'num_units_layer2': 35, 'learning_rate': 0.005669902075978545, 'dropout_rate': 0.14781713921715928}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:55,785] Trial 86 finished with value: 0.35697610153859777 and parameters: {'num_units_layer1': 55, 'num_units_layer2': 62, 'learning_rate': 0.0030059694779333923, 'dropout_rate': 0.10137018293326602}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:57,836] Trial 87 finished with value: 0.347621671054218 and parameters: {'num_units_layer1': 127, 'num_units_layer2': 38, 'learning_rate': 0.003943626003352976, 'dropout_rate': 0.15922825433269255}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:08:59,594] Trial 88 finished with value: 0.2990032138543389 and parameters: {'num_units_layer1': 115, 'num_units_layer2': 31, 'learning_rate': 0.007326071121200572, 'dropout_rate': 0.13284769054813195}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 1ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:09:01,888] Trial 89 finished with value: 0.42955945972245807 and parameters: {'num_units_layer1': 68, 'num_units_layer2': 57, 'learning_rate': 0.003705639777940834, 'dropout_rate': 0.11675813280628525}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 1ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:09:03,700] Trial 90 finished with value: 0.3565296067644221 and parameters: {'num_units_layer1': 46, 'num_units_layer2': 18, 'learning_rate': 0.007801470654329167, 'dropout_rate': 0.14331500504189257}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:09:05,922] Trial 91 finished with value: 0.3517807694724378 and parameters: {'num_units_layer1': 43, 'num_units_layer2': 62, 'learning_rate': 0.004289405481864196, 'dropout_rate': 0.2634647576995469}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:09:08,272] Trial 92 finished with value: 0.26911828616381633 and parameters: {'num_units_layer1': 53, 'num_units_layer2': 64, 'learning_rate': 0.004582212672141839, 'dropout_rate': 0.49590582182180176}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:09:10,433] Trial 93 finished with value: 0.29186629334445774 and parameters: {'num_units_layer1': 63, 'num_units_layer2': 63, 'learning_rate': 0.005245162758801556, 'dropout_rate': 0.4417241795177358}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:09:13,312] Trial 94 finished with value: 0.33801032602976966 and parameters: {'num_units_layer1': 51, 'num_units_layer2': 58, 'learning_rate': 0.004638294673099432, 'dropout_rate': 0.31866197250059936}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:09:15,814] Trial 95 finished with value: 0.28393733693880613 and parameters: {'num_units_layer1': 57, 'num_units_layer2': 60, 'learning_rate': 0.004460413910337653, 'dropout_rate': 0.4990832686578031}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:09:19,408] Trial 96 finished with value: 0.3089972494008311 and parameters: {'num_units_layer1': 55, 'num_units_layer2': 60, 'learning_rate': 0.0049992805287375226, 'dropout_rate': 0.47225679427339756}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 1ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:09:21,965] Trial 97 finished with value: 0.3434066445114366 and parameters: {'num_units_layer1': 59, 'num_units_layer2': 64, 'learning_rate': 0.004157608060349069, 'dropout_rate': 0.4124959998239309}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:09:24,256] Trial 98 finished with value: 0.375243140522661 and parameters: {'num_units_layer1': 62, 'num_units_layer2': 21, 'learning_rate': 0.004621505364863438, 'dropout_rate': 0.498137247477919}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 0s 2ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2023-07-20 14:09:27,811] Trial 99 finished with value: 0.33788637293996476 and parameters: {'num_units_layer1': 53, 'num_units_layer2': 61, 'learning_rate': 0.0053571649426193465, 'dropout_rate': 0.4707768872501595}. Best is trial 7 with value: 0.2606252274634733.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/100\n", "16/16 [==============================] - 1s 2ms/step - loss: 1.8827\n", "Epoch 2/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.4672\n", "Epoch 3/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.2867\n", "Epoch 4/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.1272\n", "Epoch 5/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.0231\n", "Epoch 6/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.9274\n", "Epoch 7/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.8300\n", "Epoch 8/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.7598\n", "Epoch 9/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.7013\n", "Epoch 10/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6306\n", "Epoch 11/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.6220\n", "Epoch 12/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5420\n", "Epoch 13/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5026\n", "Epoch 14/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4542\n", "Epoch 15/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.4489\n", "Epoch 16/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4192\n", "Epoch 17/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4102\n", "Epoch 18/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3587\n", "Epoch 19/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3300\n", "Epoch 20/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3335\n", "Epoch 21/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3186\n", "Epoch 22/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2921\n", "Epoch 23/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2748\n", "Epoch 24/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2728\n", "Epoch 25/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2401\n", "Epoch 26/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2382\n", "Epoch 27/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2475\n", "Epoch 28/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2246\n", "Epoch 29/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2062\n", "Epoch 30/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2381\n", "Epoch 31/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2066\n", "Epoch 32/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2018\n", "Epoch 33/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1924\n", "Epoch 34/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2062\n", "Epoch 35/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1783\n", "Epoch 36/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2363\n", "Epoch 37/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1982\n", "Epoch 38/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2060\n", "Epoch 39/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1725\n", "Epoch 40/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2048\n", "Epoch 41/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2020\n", "Epoch 42/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2057\n", "Epoch 43/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2028\n", "Epoch 44/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1996\n", "Epoch 45/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1788\n", "Epoch 46/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1719\n", "Epoch 47/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1591\n", "Epoch 48/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1698\n", "Epoch 49/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1489\n", "Epoch 50/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1481\n", "Epoch 51/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1569\n", "Epoch 52/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1329\n", "Epoch 53/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1580\n", "Epoch 54/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1439\n", "Epoch 55/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1354\n", "Epoch 56/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1398\n", "Epoch 57/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1193\n", "Epoch 58/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1386\n", "Epoch 59/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1282\n", "Epoch 60/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1417\n", "Epoch 61/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1469\n", "Epoch 62/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1532\n", "Epoch 63/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1534\n", "Epoch 64/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1369\n", "Epoch 65/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1454\n", "Epoch 66/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1307\n", "Epoch 67/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1245\n", "Epoch 68/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1197\n", "Epoch 69/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1279\n", "Epoch 70/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1124\n", "Epoch 71/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1136\n", "Epoch 72/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1301\n", "Epoch 73/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1027\n", "Epoch 74/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1049\n", "Epoch 75/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0998\n", "Epoch 76/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1254\n", "Epoch 77/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1099\n", "Epoch 78/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1038\n", "Epoch 79/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1360\n", "Epoch 80/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1271\n", "Epoch 81/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1107\n", "Epoch 82/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0956\n", "Epoch 83/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0997\n", "Epoch 84/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0988\n", "Epoch 85/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0988\n", "Epoch 86/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1124\n", "Epoch 87/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0980\n", "Epoch 88/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0959\n", "Epoch 89/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1226\n", "Epoch 90/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1117\n", "Epoch 91/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1147\n", "Epoch 92/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1797\n", "Epoch 93/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1425\n", "Epoch 94/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1357\n", "Epoch 95/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1305\n", "Epoch 96/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1253\n", "Epoch 97/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1211\n", "Epoch 98/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1116\n", "Epoch 99/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1218\n", "Epoch 100/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1095\n", "Epoch 1/100\n", "16/16 [==============================] - 1s 2ms/step - loss: 1.8326\n", "Epoch 2/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.4683\n", "Epoch 3/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.2700\n", "Epoch 4/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.1394\n", "Epoch 5/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.0014\n", "Epoch 6/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.9198\n", "Epoch 7/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.8325\n", "Epoch 8/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.7560\n", "Epoch 9/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6665\n", "Epoch 10/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6092\n", "Epoch 11/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.5647\n", "Epoch 12/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5194\n", "Epoch 13/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4989\n", "Epoch 14/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4463\n", "Epoch 15/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4475\n", "Epoch 16/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3882\n", "Epoch 17/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3780\n", "Epoch 18/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3283\n", "Epoch 19/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3130\n", "Epoch 20/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.3103\n", "Epoch 21/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2983\n", "Epoch 22/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3122\n", "Epoch 23/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2811\n", "Epoch 24/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2598\n", "Epoch 25/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2581\n", "Epoch 26/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2310\n", "Epoch 27/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2293\n", "Epoch 28/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2198\n", "Epoch 29/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2202\n", "Epoch 30/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1952\n", "Epoch 31/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1828\n", "Epoch 32/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1927\n", "Epoch 33/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1881\n", "Epoch 34/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1667\n", "Epoch 35/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1796\n", "Epoch 36/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2129\n", "Epoch 37/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1712\n", "Epoch 38/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1805\n", "Epoch 39/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2089\n", "Epoch 40/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1816\n", "Epoch 41/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1916\n", "Epoch 42/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1738\n", "Epoch 43/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1509\n", "Epoch 44/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1338\n", "Epoch 45/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1326\n", "Epoch 46/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1331\n", "Epoch 47/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1500\n", "Epoch 48/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1537\n", "Epoch 49/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1409\n", "Epoch 50/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1606\n", "Epoch 51/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1701\n", "Epoch 52/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1573\n", "Epoch 53/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1474\n", "Epoch 54/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1544\n", "Epoch 55/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1398\n", "Epoch 56/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1215\n", "Epoch 57/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1276\n", "Epoch 58/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1221\n", "Epoch 59/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1480\n", "Epoch 60/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1391\n", "Epoch 61/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1234\n", "Epoch 62/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1195\n", "Epoch 63/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1270\n", "Epoch 64/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1394\n", "Epoch 65/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1210\n", "Epoch 66/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1138\n", "Epoch 67/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1296\n", "Epoch 68/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1596\n", "Epoch 69/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1488\n", "Epoch 70/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1198\n", "Epoch 71/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1172\n", "Epoch 72/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1030\n", "Epoch 73/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1096\n", "Epoch 74/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1325\n", "Epoch 75/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1379\n", "Epoch 76/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1154\n", "Epoch 77/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1318\n", "Epoch 78/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1134\n", "Epoch 79/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1243\n", "Epoch 80/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1307\n", "Epoch 81/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1240\n", "Epoch 82/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1083\n", "Epoch 83/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1100\n", "Epoch 84/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1372\n", "Epoch 85/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1303\n", "Epoch 86/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1141\n", "Epoch 87/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1215\n", "Epoch 88/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1092\n", "Epoch 89/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1126\n", "Epoch 90/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1178\n", "Epoch 91/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1278\n", "Epoch 92/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1053\n", "Epoch 93/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0975\n", "Epoch 94/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1129\n", "Epoch 95/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1681\n", "Epoch 96/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1396\n", "Epoch 97/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1387\n", "Epoch 98/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1117\n", "Epoch 99/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1134\n", "Epoch 100/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1096\n", "Epoch 1/100\n", "16/16 [==============================] - 1s 2ms/step - loss: 1.8215\n", "Epoch 2/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.4539\n", "Epoch 3/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.2721\n", "Epoch 4/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.1569\n", "Epoch 5/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.9946\n", "Epoch 6/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.9005\n", "Epoch 7/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.8471\n", "Epoch 8/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.7497\n", "Epoch 9/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6761\n", "Epoch 10/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6221\n", "Epoch 11/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5684\n", "Epoch 12/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5199\n", "Epoch 13/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4821\n", "Epoch 14/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4355\n", "Epoch 15/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4234\n", "Epoch 16/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3606\n", "Epoch 17/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3529\n", "Epoch 18/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3308\n", "Epoch 19/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3086\n", "Epoch 20/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2852\n", "Epoch 21/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2620\n", "Epoch 22/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2637\n", "Epoch 23/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2520\n", "Epoch 24/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2355\n", "Epoch 25/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2250\n", "Epoch 26/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2241\n", "Epoch 27/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2162\n", "Epoch 28/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.2043\n", "Epoch 29/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1896\n", "Epoch 30/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1845\n", "Epoch 31/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1937\n", "Epoch 32/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1969\n", "Epoch 33/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1799\n", "Epoch 34/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1757\n", "Epoch 35/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1814\n", "Epoch 36/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1760\n", "Epoch 37/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1980\n", "Epoch 38/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1766\n", "Epoch 39/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1769\n", "Epoch 40/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1558\n", "Epoch 41/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1552\n", "Epoch 42/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1496\n", "Epoch 43/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1611\n", "Epoch 44/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1476\n", "Epoch 45/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1381\n", "Epoch 46/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1525\n", "Epoch 47/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1398\n", "Epoch 48/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1403\n", "Epoch 49/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1403\n", "Epoch 50/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1599\n", "Epoch 51/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1629\n", "Epoch 52/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1375\n", "Epoch 53/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1469\n", "Epoch 54/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1302\n", "Epoch 55/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1329\n", "Epoch 56/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1334\n", "Epoch 57/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1118\n", "Epoch 58/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1049\n", "Epoch 59/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1079\n", "Epoch 60/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1000\n", "Epoch 61/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0963\n", "Epoch 62/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1211\n", "Epoch 63/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0976\n", "Epoch 64/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0933\n", "Epoch 65/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1383\n", "Epoch 66/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1347\n", "Epoch 67/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1088\n", "Epoch 68/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1559\n", "Epoch 69/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1497\n", "Epoch 70/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1510\n", "Epoch 71/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1303\n", "Epoch 72/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1429\n", "Epoch 73/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1177\n", "Epoch 74/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1151\n", "Epoch 75/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1367\n", "Epoch 76/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1210\n", "Epoch 77/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1345\n", "Epoch 78/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1274\n", "Epoch 79/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1348\n", "Epoch 80/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1325\n", "Epoch 81/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1267\n", "Epoch 82/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1121\n", "Epoch 83/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1010\n", "Epoch 84/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1285\n", "Epoch 85/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1007\n", "Epoch 86/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1044\n", "Epoch 87/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1141\n", "Epoch 88/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1095\n", "Epoch 89/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0939\n", "Epoch 90/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1219\n", "Epoch 91/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1290\n", "Epoch 92/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1428\n", "Epoch 93/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1224\n", "Epoch 94/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1176\n", "Epoch 95/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1210\n", "Epoch 96/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0962\n", "Epoch 97/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0965\n", "Epoch 98/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0860\n", "Epoch 99/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0814\n", "Epoch 100/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1051\n", "Epoch 1/100\n", "16/16 [==============================] - 1s 2ms/step - loss: 1.7062\n", "Epoch 2/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.4050\n", "Epoch 3/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 1.2071\n", "Epoch 4/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.0647\n", "Epoch 5/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.9177\n", "Epoch 6/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.8038\n", "Epoch 7/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.7062\n", "Epoch 8/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6734\n", "Epoch 9/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5864\n", "Epoch 10/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5276\n", "Epoch 11/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5014\n", "Epoch 12/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4505\n", "Epoch 13/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4385\n", "Epoch 14/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3946\n", "Epoch 15/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3608\n", "Epoch 16/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3520\n", "Epoch 17/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3073\n", "Epoch 18/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2829\n", "Epoch 19/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2925\n", "Epoch 20/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2857\n", "Epoch 21/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.2520\n", "Epoch 22/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2225\n", "Epoch 23/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2307\n", "Epoch 24/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2625\n", "Epoch 25/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2164\n", "Epoch 26/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2221\n", "Epoch 27/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1947\n", "Epoch 28/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2210\n", "Epoch 29/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2142\n", "Epoch 30/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1872\n", "Epoch 31/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1826\n", "Epoch 32/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1728\n", "Epoch 33/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1724\n", "Epoch 34/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1617\n", "Epoch 35/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1514\n", "Epoch 36/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1534\n", "Epoch 37/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1572\n", "Epoch 38/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1588\n", "Epoch 39/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1468\n", "Epoch 40/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1528\n", "Epoch 41/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1515\n", "Epoch 42/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1782\n", "Epoch 43/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1617\n", "Epoch 44/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1519\n", "Epoch 45/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1344\n", "Epoch 46/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1335\n", "Epoch 47/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1286\n", "Epoch 48/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1051\n", "Epoch 49/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1171\n", "Epoch 50/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1188\n", "Epoch 51/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1182\n", "Epoch 52/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1394\n", "Epoch 53/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1617\n", "Epoch 54/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1696\n", "Epoch 55/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1509\n", "Epoch 56/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1564\n", "Epoch 57/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1441\n", "Epoch 58/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1369\n", "Epoch 59/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1297\n", "Epoch 60/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1269\n", "Epoch 61/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1475\n", "Epoch 62/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1283\n", "Epoch 63/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1397\n", "Epoch 64/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1414\n", "Epoch 65/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1508\n", "Epoch 66/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1413\n", "Epoch 67/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1298\n", "Epoch 68/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1273\n", "Epoch 69/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1331\n", "Epoch 70/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1112\n", "Epoch 71/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1068\n", "Epoch 72/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1001\n", "Epoch 73/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1015\n", "Epoch 74/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0991\n", "Epoch 75/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1026\n", "Epoch 76/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1095\n", "Epoch 77/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1597\n", "Epoch 78/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1766\n", "Epoch 79/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1588\n", "Epoch 80/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1418\n", "Epoch 81/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1374\n", "Epoch 82/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1500\n", "Epoch 83/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1554\n", "Epoch 84/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1552\n", "Epoch 85/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1465\n", "Epoch 86/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1587\n", "Epoch 87/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1331\n", "Epoch 88/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1193\n", "Epoch 89/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1141\n", "Epoch 90/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1147\n", "Epoch 91/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1448\n", "Epoch 92/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1171\n", "Epoch 93/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1442\n", "Epoch 94/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1212\n", "Epoch 95/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1303\n", "Epoch 96/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1348\n", "Epoch 97/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1168\n", "Epoch 98/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1281\n", "Epoch 99/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1170\n", "Epoch 100/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1354\n", "Epoch 1/100\n", "16/16 [==============================] - 1s 2ms/step - loss: 1.6644\n", "Epoch 2/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 1.3654\n", "Epoch 3/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.1702\n", "Epoch 4/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.0575\n", "Epoch 5/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.9304\n", "Epoch 6/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.8172\n", "Epoch 7/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.7273\n", "Epoch 8/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6558\n", "Epoch 9/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6140\n", "Epoch 10/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5668\n", "Epoch 11/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4982\n", "Epoch 12/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4647\n", "Epoch 13/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4084\n", "Epoch 14/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3992\n", "Epoch 15/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3566\n", "Epoch 16/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3534\n", "Epoch 17/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3400\n", "Epoch 18/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3036\n", "Epoch 19/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2746\n", "Epoch 20/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2585\n", "Epoch 21/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.2702\n", "Epoch 22/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2404\n", "Epoch 23/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2371\n", "Epoch 24/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2418\n", "Epoch 25/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2365\n", "Epoch 26/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2164\n", "Epoch 27/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2284\n", "Epoch 28/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2009\n", "Epoch 29/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1860\n", "Epoch 30/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1912\n", "Epoch 31/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2029\n", "Epoch 32/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1831\n", "Epoch 33/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1751\n", "Epoch 34/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1898\n", "Epoch 35/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1787\n", "Epoch 36/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2016\n", "Epoch 37/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1551\n", "Epoch 38/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1552\n", "Epoch 39/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1405\n", "Epoch 40/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1418\n", "Epoch 41/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1552\n", "Epoch 42/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1949\n", "Epoch 43/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1553\n", "Epoch 44/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1416\n", "Epoch 45/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1424\n", "Epoch 46/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1415\n", "Epoch 47/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1312\n", "Epoch 48/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1826\n", "Epoch 49/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1643\n", "Epoch 50/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1462\n", "Epoch 51/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1473\n", "Epoch 52/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1753\n", "Epoch 53/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1425\n", "Epoch 54/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1487\n", "Epoch 55/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1674\n", "Epoch 56/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1459\n", "Epoch 57/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1734\n", "Epoch 58/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1940\n", "Epoch 59/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1801\n", "Epoch 60/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1385\n", "Epoch 61/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1324\n", "Epoch 62/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1417\n", "Epoch 63/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1334\n", "Epoch 64/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1242\n", "Epoch 65/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1311\n", "Epoch 66/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1389\n", "Epoch 67/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1276\n", "Epoch 68/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1369\n", "Epoch 69/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1380\n", "Epoch 70/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1471\n", "Epoch 71/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1318\n", "Epoch 72/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1338\n", "Epoch 73/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1381\n", "Epoch 74/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1504\n", "Epoch 75/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1317\n", "Epoch 76/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1242\n", "Epoch 77/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1200\n", "Epoch 78/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1251\n", "Epoch 79/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1124\n", "Epoch 80/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1091\n", "Epoch 81/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1073\n", "Epoch 82/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1000\n", "Epoch 83/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1030\n", "Epoch 84/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1024\n", "Epoch 85/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1360\n", "Epoch 86/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1068\n", "Epoch 87/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1168\n", "Epoch 88/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1328\n", "Epoch 89/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1244\n", "Epoch 90/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1134\n", "Epoch 91/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1357\n", "Epoch 92/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1629\n", "Epoch 93/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1640\n", "Epoch 94/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1303\n", "Epoch 95/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1232\n", "Epoch 96/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1381\n", "Epoch 97/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1035\n", "Epoch 98/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1307\n", "Epoch 99/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1235\n", "Epoch 100/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1040\n", "Epoch 1/100\n", "16/16 [==============================] - 1s 2ms/step - loss: 1.7262\n", "Epoch 2/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.4274\n", "Epoch 3/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.2307\n", "Epoch 4/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.0635\n", "Epoch 5/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.9249\n", "Epoch 6/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.8296\n", "Epoch 7/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.7520\n", "Epoch 8/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6692\n", "Epoch 9/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6008\n", "Epoch 10/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5317\n", "Epoch 11/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5116\n", "Epoch 12/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4531\n", "Epoch 13/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4168\n", "Epoch 14/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3742\n", "Epoch 15/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3533\n", "Epoch 16/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3304\n", "Epoch 17/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2891\n", "Epoch 18/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2938\n", "Epoch 19/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2864\n", "Epoch 20/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2733\n", "Epoch 21/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2479\n", "Epoch 22/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2204\n", "Epoch 23/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2206\n", "Epoch 24/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2257\n", "Epoch 25/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2078\n", "Epoch 26/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1982\n", "Epoch 27/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1897\n", "Epoch 28/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1995\n", "Epoch 29/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2062\n", "Epoch 30/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1844\n", "Epoch 31/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1736\n", "Epoch 32/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1606\n", "Epoch 33/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1684\n", "Epoch 34/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1866\n", "Epoch 35/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1948\n", "Epoch 36/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1825\n", "Epoch 37/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1601\n", "Epoch 38/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1592\n", "Epoch 39/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1726\n", "Epoch 40/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1590\n", "Epoch 41/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1813\n", "Epoch 42/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1617\n", "Epoch 43/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1505\n", "Epoch 44/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1529\n", "Epoch 45/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1538\n", "Epoch 46/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1274\n", "Epoch 47/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1504\n", "Epoch 48/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1399\n", "Epoch 49/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1305\n", "Epoch 50/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1390\n", "Epoch 51/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1426\n", "Epoch 52/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1347\n", "Epoch 53/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1384\n", "Epoch 54/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1528\n", "Epoch 55/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1257\n", "Epoch 56/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1357\n", "Epoch 57/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1142\n", "Epoch 58/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1053\n", "Epoch 59/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1191\n", "Epoch 60/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0970\n", "Epoch 61/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0942\n", "Epoch 62/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1049\n", "Epoch 63/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1251\n", "Epoch 64/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1485\n", "Epoch 65/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1787\n", "Epoch 66/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2097\n", "Epoch 67/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1615\n", "Epoch 68/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1608\n", "Epoch 69/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1479\n", "Epoch 70/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1258\n", "Epoch 71/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1401\n", "Epoch 72/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1340\n", "Epoch 73/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1318\n", "Epoch 74/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1134\n", "Epoch 75/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1173\n", "Epoch 76/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1195\n", "Epoch 77/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1324\n", "Epoch 78/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1340\n", "Epoch 79/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1588\n", "Epoch 80/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1529\n", "Epoch 81/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1476\n", "Epoch 82/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1652\n", "Epoch 83/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1347\n", "Epoch 84/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1477\n", "Epoch 85/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1296\n", "Epoch 86/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1203\n", "Epoch 87/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1196\n", "Epoch 88/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1157\n", "Epoch 89/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1176\n", "Epoch 90/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1120\n", "Epoch 91/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1343\n", "Epoch 92/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1100\n", "Epoch 93/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1054\n", "Epoch 94/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1118\n", "Epoch 95/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1094\n", "Epoch 96/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1224\n", "Epoch 97/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1047\n", "Epoch 98/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1013\n", "Epoch 99/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1116\n", "Epoch 100/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1325\n", "Epoch 1/100\n", "16/16 [==============================] - 1s 2ms/step - loss: 1.7454\n", "Epoch 2/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.4561\n", "Epoch 3/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.2481\n", "Epoch 4/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.1019\n", "Epoch 5/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.9952\n", "Epoch 6/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.8887\n", "Epoch 7/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.8392\n", "Epoch 8/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.7472\n", "Epoch 9/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6694\n", "Epoch 10/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6048\n", "Epoch 11/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5749\n", "Epoch 12/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4927\n", "Epoch 13/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4656\n", "Epoch 14/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4218\n", "Epoch 15/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4307\n", "Epoch 16/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3641\n", "Epoch 17/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3436\n", "Epoch 18/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3124\n", "Epoch 19/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3114\n", "Epoch 20/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2979\n", "Epoch 21/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3102\n", "Epoch 22/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2919\n", "Epoch 23/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2603\n", "Epoch 24/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2449\n", "Epoch 25/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2613\n", "Epoch 26/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2207\n", "Epoch 27/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2011\n", "Epoch 28/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1986\n", "Epoch 29/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1789\n", "Epoch 30/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1728\n", "Epoch 31/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1746\n", "Epoch 32/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1749\n", "Epoch 33/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1782\n", "Epoch 34/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1708\n", "Epoch 35/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1791\n", "Epoch 36/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1623\n", "Epoch 37/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1867\n", "Epoch 38/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2001\n", "Epoch 39/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1604\n", "Epoch 40/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1907\n", "Epoch 41/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1575\n", "Epoch 42/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1396\n", "Epoch 43/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1430\n", "Epoch 44/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1521\n", "Epoch 45/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1348\n", "Epoch 46/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1276\n", "Epoch 47/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1313\n", "Epoch 48/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1504\n", "Epoch 49/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1708\n", "Epoch 50/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1570\n", "Epoch 51/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1955\n", "Epoch 52/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1795\n", "Epoch 53/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1573\n", "Epoch 54/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1760\n", "Epoch 55/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1603\n", "Epoch 56/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1770\n", "Epoch 57/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1510\n", "Epoch 58/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1341\n", "Epoch 59/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1368\n", "Epoch 60/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1397\n", "Epoch 61/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1228\n", "Epoch 62/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1200\n", "Epoch 63/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1187\n", "Epoch 64/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1194\n", "Epoch 65/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1348\n", "Epoch 66/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1413\n", "Epoch 67/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1830\n", "Epoch 68/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1496\n", "Epoch 69/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1485\n", "Epoch 70/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1585\n", "Epoch 71/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1307\n", "Epoch 72/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1305\n", "Epoch 73/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1244\n", "Epoch 74/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1181\n", "Epoch 75/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1056\n", "Epoch 76/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1110\n", "Epoch 77/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1058\n", "Epoch 78/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0968\n", "Epoch 79/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1095\n", "Epoch 80/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0914\n", "Epoch 81/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1520\n", "Epoch 82/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1156\n", "Epoch 83/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1446\n", "Epoch 84/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1459\n", "Epoch 85/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1678\n", "Epoch 86/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1566\n", "Epoch 87/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1469\n", "Epoch 88/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1241\n", "Epoch 89/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1274\n", "Epoch 90/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1075\n", "Epoch 91/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1206\n", "Epoch 92/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1412\n", "Epoch 93/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1368\n", "Epoch 94/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1188\n", "Epoch 95/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1041\n", "Epoch 96/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1165\n", "Epoch 97/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1182\n", "Epoch 98/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1443\n", "Epoch 99/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1352\n", "Epoch 100/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1377\n", "Epoch 1/100\n", "16/16 [==============================] - 1s 2ms/step - loss: 1.7409\n", "Epoch 2/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.3748\n", "Epoch 3/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.2077\n", "Epoch 4/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.0881\n", "Epoch 5/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.9603\n", "Epoch 6/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.8296\n", "Epoch 7/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.7470\n", "Epoch 8/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6603\n", "Epoch 9/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.6062\n", "Epoch 10/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5441\n", "Epoch 11/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4902\n", "Epoch 12/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4511\n", "Epoch 13/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.4199\n", "Epoch 14/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.3946\n", "Epoch 15/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3675\n", "Epoch 16/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3254\n", "Epoch 17/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3164\n", "Epoch 18/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2868\n", "Epoch 19/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2758\n", "Epoch 20/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2559\n", "Epoch 21/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2510\n", "Epoch 22/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2392\n", "Epoch 23/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.2385\n", "Epoch 24/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.2240\n", "Epoch 25/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2290\n", "Epoch 26/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2281\n", "Epoch 27/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2235\n", "Epoch 28/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2265\n", "Epoch 29/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2116\n", "Epoch 30/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1853\n", "Epoch 31/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1994\n", "Epoch 32/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1845\n", "Epoch 33/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1846\n", "Epoch 34/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1806\n", "Epoch 35/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1797\n", "Epoch 36/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1908\n", "Epoch 37/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1468\n", "Epoch 38/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1461\n", "Epoch 39/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1618\n", "Epoch 40/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1338\n", "Epoch 41/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1349\n", "Epoch 42/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1327\n", "Epoch 43/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1274\n", "Epoch 44/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1430\n", "Epoch 45/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1284\n", "Epoch 46/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1334\n", "Epoch 47/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1520\n", "Epoch 48/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1771\n", "Epoch 49/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1741\n", "Epoch 50/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1612\n", "Epoch 51/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1790\n", "Epoch 52/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1620\n", "Epoch 53/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1429\n", "Epoch 54/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1287\n", "Epoch 55/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1340\n", "Epoch 56/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1326\n", "Epoch 57/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1215\n", "Epoch 58/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1379\n", "Epoch 59/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1119\n", "Epoch 60/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1255\n", "Epoch 61/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1493\n", "Epoch 62/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1201\n", "Epoch 63/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1598\n", "Epoch 64/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1925\n", "Epoch 65/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1724\n", "Epoch 66/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1526\n", "Epoch 67/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1686\n", "Epoch 68/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1264\n", "Epoch 69/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1168\n", "Epoch 70/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1285\n", "Epoch 71/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1093\n", "Epoch 72/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1233\n", "Epoch 73/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1149\n", "Epoch 74/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1060\n", "Epoch 75/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1138\n", "Epoch 76/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1238\n", "Epoch 77/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1298\n", "Epoch 78/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1138\n", "Epoch 79/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1183\n", "Epoch 80/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1143\n", "Epoch 81/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1433\n", "Epoch 82/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1335\n", "Epoch 83/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1470\n", "Epoch 84/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1563\n", "Epoch 85/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1474\n", "Epoch 86/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1204\n", "Epoch 87/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1255\n", "Epoch 88/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1127\n", "Epoch 89/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1010\n", "Epoch 90/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1298\n", "Epoch 91/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1165\n", "Epoch 92/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1117\n", "Epoch 93/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1081\n", "Epoch 94/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1127\n", "Epoch 95/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1224\n", "Epoch 96/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1248\n", "Epoch 97/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1226\n", "Epoch 98/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1137\n", "Epoch 99/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1150\n", "Epoch 100/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1266\n", "Epoch 1/100\n", "16/16 [==============================] - 1s 2ms/step - loss: 1.7501\n", "Epoch 2/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.4304\n", "Epoch 3/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.2349\n", "Epoch 4/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.0926\n", "Epoch 5/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.9849\n", "Epoch 6/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.8803\n", "Epoch 7/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.7478\n", "Epoch 8/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6969\n", "Epoch 9/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6307\n", "Epoch 10/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5573\n", "Epoch 11/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5038\n", "Epoch 12/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4635\n", "Epoch 13/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4442\n", "Epoch 14/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4109\n", "Epoch 15/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3711\n", "Epoch 16/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3286\n", "Epoch 17/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3325\n", "Epoch 18/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3149\n", "Epoch 19/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2913\n", "Epoch 20/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2807\n", "Epoch 21/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2754\n", "Epoch 22/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2670\n", "Epoch 23/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2278\n", "Epoch 24/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2267\n", "Epoch 25/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2216\n", "Epoch 26/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2002\n", "Epoch 27/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2005\n", "Epoch 28/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1819\n", "Epoch 29/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1778\n", "Epoch 30/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1617\n", "Epoch 31/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1821\n", "Epoch 32/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1548\n", "Epoch 33/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1613\n", "Epoch 34/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1615\n", "Epoch 35/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1390\n", "Epoch 36/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1487\n", "Epoch 37/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1773\n", "Epoch 38/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1936\n", "Epoch 39/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2150\n", "Epoch 40/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1938\n", "Epoch 41/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2106\n", "Epoch 42/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1645\n", "Epoch 43/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1420\n", "Epoch 44/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1441\n", "Epoch 45/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1459\n", "Epoch 46/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1338\n", "Epoch 47/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1423\n", "Epoch 48/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1378\n", "Epoch 49/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1328\n", "Epoch 50/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1302\n", "Epoch 51/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1438\n", "Epoch 52/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1295\n", "Epoch 53/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1184\n", "Epoch 54/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1135\n", "Epoch 55/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1165\n", "Epoch 56/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1554\n", "Epoch 57/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1188\n", "Epoch 58/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1221\n", "Epoch 59/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1401\n", "Epoch 60/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1308\n", "Epoch 61/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1341\n", "Epoch 62/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1502\n", "Epoch 63/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1336\n", "Epoch 64/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1866\n", "Epoch 65/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1577\n", "Epoch 66/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1550\n", "Epoch 67/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1292\n", "Epoch 68/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1168\n", "Epoch 69/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1189\n", "Epoch 70/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1342\n", "Epoch 71/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1371\n", "Epoch 72/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1251\n", "Epoch 73/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1649\n", "Epoch 74/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1251\n", "Epoch 75/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1231\n", "Epoch 76/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1303\n", "Epoch 77/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1312\n", "Epoch 78/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1059\n", "Epoch 79/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1116\n", "Epoch 80/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1157\n", "Epoch 81/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1191\n", "Epoch 82/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1092\n", "Epoch 83/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1300\n", "Epoch 84/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0956\n", "Epoch 85/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1038\n", "Epoch 86/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1074\n", "Epoch 87/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1147\n", "Epoch 88/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1175\n", "Epoch 89/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1069\n", "Epoch 90/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1040\n", "Epoch 91/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1081\n", "Epoch 92/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0911\n", "Epoch 93/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1002\n", "Epoch 94/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1037\n", "Epoch 95/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1278\n", "Epoch 96/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1240\n", "Epoch 97/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1121\n", "Epoch 98/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1168\n", "Epoch 99/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1087\n", "Epoch 100/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1157\n", "Epoch 1/100\n", "16/16 [==============================] - 1s 2ms/step - loss: 1.8165\n", "Epoch 2/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.4686\n", "Epoch 3/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.2700\n", "Epoch 4/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 1.1161\n", "Epoch 5/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.9954\n", "Epoch 6/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.8993\n", "Epoch 7/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.8062\n", "Epoch 8/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.7090\n", "Epoch 9/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6582\n", "Epoch 10/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5872\n", "Epoch 11/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.5474\n", "Epoch 12/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5226\n", "Epoch 13/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4623\n", "Epoch 14/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4430\n", "Epoch 15/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4066\n", "Epoch 16/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3838\n", "Epoch 17/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3752\n", "Epoch 18/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3153\n", "Epoch 19/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2894\n", "Epoch 20/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.2873\n", "Epoch 21/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2580\n", "Epoch 22/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.2745\n", "Epoch 23/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.2598\n", "Epoch 24/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2481\n", "Epoch 25/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2412\n", "Epoch 26/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2604\n", "Epoch 27/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2115\n", "Epoch 28/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2126\n", "Epoch 29/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2075\n", "Epoch 30/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1932\n", "Epoch 31/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1977\n", "Epoch 32/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1820\n", "Epoch 33/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1771\n", "Epoch 34/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1679\n", "Epoch 35/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1779\n", "Epoch 36/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1707\n", "Epoch 37/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1968\n", "Epoch 38/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1799\n", "Epoch 39/100\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1844\n", "Epoch 40/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1789\n", "Epoch 41/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1800\n", "Epoch 42/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1790\n", "Epoch 43/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1710\n", "Epoch 44/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1540\n", "Epoch 45/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1477\n", "Epoch 46/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1486\n", "Epoch 47/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1484\n", "Epoch 48/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1647\n", "Epoch 49/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1476\n", "Epoch 50/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1536\n", "Epoch 51/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1667\n", "Epoch 52/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1438\n", "Epoch 53/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1410\n", "Epoch 54/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1754\n", "Epoch 55/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1402\n", "Epoch 56/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1395\n", "Epoch 57/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1625\n", "Epoch 58/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1491\n", "Epoch 59/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1299\n", "Epoch 60/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1246\n", "Epoch 61/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1142\n", "Epoch 62/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1112\n", "Epoch 63/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1336\n", "Epoch 64/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1189\n", "Epoch 65/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1142\n", "Epoch 66/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1150\n", "Epoch 67/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1307\n", "Epoch 68/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1191\n", "Epoch 69/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1463\n", "Epoch 70/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1329\n", "Epoch 71/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1219\n", "Epoch 72/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1469\n", "Epoch 73/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1342\n", "Epoch 74/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1371\n", "Epoch 75/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1263\n", "Epoch 76/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1679\n", "Epoch 77/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1332\n", "Epoch 78/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1443\n", "Epoch 79/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1203\n", "Epoch 80/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1150\n", "Epoch 81/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1306\n", "Epoch 82/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1147\n", "Epoch 83/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1174\n", "Epoch 84/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1363\n", "Epoch 85/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1376\n", "Epoch 86/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1322\n", "Epoch 87/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1352\n", "Epoch 88/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1406\n", "Epoch 89/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1376\n", "Epoch 90/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1115\n", "Epoch 91/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1180\n", "Epoch 92/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1188\n", "Epoch 93/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1001\n", "Epoch 94/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1165\n", "Epoch 95/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1272\n", "Epoch 96/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1215\n", "Epoch 97/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1140\n", "Epoch 98/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1199\n", "Epoch 99/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1104\n", "Epoch 100/100\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1113\n", "4/4 [==============================] - 0s 2ms/step\n", "4/4 [==============================] - 0s 1ms/step\n", "4/4 [==============================] - 0s 2ms/step\n", "4/4 [==============================] - 0s 2ms/step\n", "4/4 [==============================] - 0s 2ms/step\n", "4/4 [==============================] - 0s 1ms/step\n", "4/4 [==============================] - 0s 2ms/step\n", "4/4 [==============================] - 0s 2ms/step\n", "4/4 [==============================] - 0s 2ms/step\n", "4/4 [==============================] - 0s 2ms/step\n", "1/1 [==============================] - 0s 18ms/step\n", "1/1 [==============================] - 0s 18ms/step\n", "1/1 [==============================] - 0s 19ms/step\n", "1/1 [==============================] - 0s 19ms/step\n", "1/1 [==============================] - 0s 18ms/step\n", "1/1 [==============================] - 0s 18ms/step\n", "1/1 [==============================] - 0s 20ms/step\n", "1/1 [==============================] - 0s 19ms/step\n", "1/1 [==============================] - 0s 19ms/step\n", "1/1 [==============================] - 0s 21ms/step\n" ] } ], "source": [ "import pandas as pd\n", "import numpy as np\n", "from sklearn.model_selection import train_test_split\n", "from sklearn.metrics import log_loss\n", "from keras.models import Sequential\n", "from keras.layers import Dense, Dropout, BatchNormalization\n", "from keras.optimizers import Adam\n", "from keras.callbacks import EarlyStopping, ReduceLROnPlateau\n", "# !pip install optuna\n", "import optuna\n", "\n", "# Data preprocessing (scaling the features)\n", "from sklearn.preprocessing import StandardScaler\n", "\n", "# Read the data\n", "train_df = pd.read_csv('/kaggle/input/icr-identify-age-related-conditions/train.csv')\n", "test_df = pd.read_csv('/kaggle/input/icr-identify-age-related-conditions/test.csv')\n", "greeks_df = pd.read_csv('/kaggle/input/icr-identify-age-related-conditions/greeks.csv')\n", "\n", "train_df[\"EJ\"] = train_df[\"EJ\"].replace({\"A\": 0, \"B\": 1})\n", "test_df[\"EJ\"] = test_df[\"EJ\"].replace({\"A\": 0, \"B\": 1})\n", "\n", "train_df.fillna(train_df.mean(), inplace=True)\n", "test_df.fillna(test_df.mean(), inplace=True)\n", "\n", "train_values = train_df.drop([\"Class\", \"Id\"], axis=1)\n", "test_values = train_df[\"Class\"]\n", "\n", "# Split the data into train and validation sets\n", "X_train, X_val, y_train, y_val = train_test_split(train_values, test_values, test_size=0.2, random_state=73)\n", "\n", "# Data preprocessing: Scale the features\n", "scaler = StandardScaler()\n", "X_train = scaler.fit_transform(X_train)\n", "X_val = scaler.transform(X_val)\n", "\n", "# Define the neural network model with L2 regularization and dropout\n", "def build_model(num_units_layer1, num_units_layer2, learning_rate, dropout_rate):\n", " model = Sequential()\n", " model.add(Dense(units=num_units_layer1, activation='relu', input_shape=(train_values.shape[1],),\n", " kernel_regularizer='l2')) # L2 regularization\n", " model.add(BatchNormalization()) # Batch Normalization layer\n", " model.add(Dropout(dropout_rate)) # Dropout layer for regularization\n", " model.add(Dense(units=num_units_layer2, activation='relu', kernel_regularizer='l2')) # L2 regularization\n", " model.add(BatchNormalization()) # Batch Normalization layer\n", " model.add(Dropout(dropout_rate)) # Dropout layer for regularization\n", " model.add(Dense(1, activation='sigmoid'))\n", "\n", " # Compile the model\n", " model.compile(loss='binary_crossentropy', optimizer=Adam(learning_rate=learning_rate))\n", "\n", " return model\n", "\n", "# Objective function for Hyperparameter Optimization (using Optuna)\n", "def objective(trial):\n", " num_units_layer1 = trial.suggest_int(\"num_units_layer1\", 32, 128)\n", " num_units_layer2 = trial.suggest_int(\"num_units_layer2\", 16, 64)\n", " learning_rate = trial.suggest_float(\"learning_rate\", 1e-4, 1e-2)\n", " dropout_rate = trial.suggest_float(\"dropout_rate\", 0.1, 0.5)\n", "\n", " model = build_model(num_units_layer1, num_units_layer2, learning_rate, dropout_rate)\n", "\n", " # Early stopping callback to prevent overfitting\n", " early_stopping = EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True)\n", "\n", " # Learning rate scheduling with patience=3 (reduce learning rate if validation loss plateaus for 3 epochs)\n", " lr_scheduler = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=3)\n", "\n", " # Train the model\n", " history = model.fit(X_train, y_train, batch_size=32, epochs=100, validation_data=(X_val, y_val),\n", " callbacks=[early_stopping, lr_scheduler], verbose=0)\n", "\n", " # Make predictions\n", " final_pred = model.predict(X_val)\n", "\n", " # Calculate log loss\n", " loss = log_loss(y_val, final_pred)\n", " return loss\n", "\n", "# Call the objective function for hyperparameter tuning\n", "study = optuna.create_study(direction=\"minimize\")\n", "study.optimize(objective, n_trials=100)\n", "\n", "# Retrieve best hyperparameters\n", "best_params = study.best_params\n", "\n", "# Create an ensemble of models with L2 regularization and dropout\n", "num_models = 10\n", "models = []\n", "for i in range(num_models):\n", " model = build_model(best_params[\"num_units_layer1\"], best_params[\"num_units_layer2\"],\n", " best_params[\"learning_rate\"], best_params[\"dropout_rate\"])\n", " models.append(model)\n", "\n", "# Train the models on the entire dataset\n", "for model in models:\n", " model.fit(X_train, y_train, batch_size=32, epochs=100, verbose=1)\n", "\n", "# Make predictions on the test data and average the results using weighted average based on validation performance\n", "val_preds = [model.predict(X_val) for model in models]\n", "val_losses = [log_loss(y_val, pred) for pred in val_preds]\n", "weights = [1.0 / loss for loss in val_losses] # Inverse of validation loss as weights\n", "weights_sum = sum(weights)\n", "final_pred_ensemble = np.zeros_like(val_preds[0])\n", "\n", "for i in range(num_models):\n", " final_pred_ensemble += val_preds[i] * (weights[i] / weights_sum)\n", "\n", "# Make predictions on the test data using the weighted average of the models\n", "test_values_scaled = scaler.transform(test_df.drop(\"Id\", axis=1))\n", "test_preds = [model.predict(test_values_scaled) for model in models]\n", "final_pred_test = np.zeros_like(test_preds[0])\n", "\n", "for i in range(num_models):\n", " final_pred_test += test_preds[i] * (weights[i] / weights_sum)\n", "\n", "# Prepare the submission\n", "sample_submission_df = pd.read_csv('/kaggle/input/icr-identify-age-related-conditions/sample_submission.csv')\n", "sample_submission_df['Id'] = test_df.reset_index()['Id']\n", "sample_submission_df[\"class_0\"] = 1 - final_pred_test.flatten() # Assuming class_0 is the positive class\n", "sample_submission_df[\"class_1\"] = final_pred_test.flatten() # Assuming class_1 is the negative class\n", "sample_submission_df.set_index('Id').to_csv('submission.csv')\n" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.12" }, "papermill": { "default_parameters": {}, "duration": 319.4058, "end_time": "2023-07-20T14:10:23.935116", "environment_variables": {}, "exception": null, "input_path": "__notebook__.ipynb", "output_path": "__notebook__.ipynb", "parameters": {}, "start_time": "2023-07-20T14:05:04.529316", "version": "2.4.0" } }, "nbformat": 4, "nbformat_minor": 5 }