{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import pandas as pd \n", "from datetime import datetime \n", "from datetime import date\n", "import matplotlib.pyplot as plt\n", "import seaborn as sns\n", "import numpy as np\n", "import pandas as pd\n", "from keras.models import Sequential\n", "from keras.layers import LSTM, Dense\n", "from sklearn.model_selection import train_test_split\n", "from sklearn.preprocessing import MinMaxScaler,StandardScaler\n", "from keras.callbacks import ModelCheckpoint\n" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
datezone_047_hw_valvertu_004_sat_sp_tnzone_047_tempzone_047_fan_spdrtu_004_fltrd_sa_flow_tnrtu_004_sa_temprtu_004_pa_static_stpt_tnrtu_004_oa_flow_tnrtu_004_oadmpr_pct...zone_047_heating_spUnnamed: 47_yhvac_Shp_hws_temparu_001_cwr_temparu_001_cws_fr_gpmaru_001_cws_temparu_001_hwr_temparu_001_hws_fr_gpmaru_001_hws_temp
02018-01-01 00:00:00100.069.067.520.09265.60466.10.060.00000028.0...NaNNaNNaN75.3NaNNaNNaNNaNNaNNaN
12018-01-01 00:01:00100.069.067.520.09265.60466.00.066572.09916228.0...NaNNaNNaN75.3NaNNaNNaNNaNNaNNaN
22018-01-01 00:02:00100.069.067.520.09708.24066.10.067628.83254228.0...NaNNaNNaN75.3NaNNaNNaNNaNNaNNaN
32018-01-01 00:03:00100.069.067.520.09611.63866.10.067710.29461728.0...NaNNaNNaN75.3NaNNaNNaNNaNNaNNaN
42018-01-01 00:04:00100.069.067.520.09215.11066.00.067139.18409028.0...NaNNaNNaN75.3NaNNaNNaNNaNNaNNaN
..................................................................
20721492020-12-31 23:58:00100.068.063.220.018884.83464.40.062938.32000023.4...71.069.023.145000123.856.2554.7156.4123.4261.6122.36
20721502020-12-31 23:58:00100.068.063.220.018884.83464.40.062938.32000023.4...71.069.023.145000123.856.2554.7156.4123.4261.6122.36
20721512020-12-31 23:59:00100.068.063.220.019345.50864.30.063154.39000023.4...71.069.023.145000123.856.2554.7156.4123.4261.6122.36
20721522020-12-31 23:59:00100.068.063.220.019345.50864.30.063154.39000023.4...71.069.023.145000123.856.2554.7156.4123.4261.6122.36
20721532021-01-01 00:00:00100.068.063.220.018650.23264.10.063076.27000022.9...71.069.023.788947123.856.2554.7156.4123.4261.6122.36
\n", "

2072154 rows × 30 columns

\n", "
" ], "text/plain": [ " date zone_047_hw_valve rtu_004_sat_sp_tn \\\n", "0 2018-01-01 00:00:00 100.0 69.0 \n", "1 2018-01-01 00:01:00 100.0 69.0 \n", "2 2018-01-01 00:02:00 100.0 69.0 \n", "3 2018-01-01 00:03:00 100.0 69.0 \n", "4 2018-01-01 00:04:00 100.0 69.0 \n", "... ... ... ... \n", "2072149 2020-12-31 23:58:00 100.0 68.0 \n", "2072150 2020-12-31 23:58:00 100.0 68.0 \n", "2072151 2020-12-31 23:59:00 100.0 68.0 \n", "2072152 2020-12-31 23:59:00 100.0 68.0 \n", "2072153 2021-01-01 00:00:00 100.0 68.0 \n", "\n", " zone_047_temp zone_047_fan_spd rtu_004_fltrd_sa_flow_tn \\\n", "0 67.5 20.0 9265.604 \n", "1 67.5 20.0 9265.604 \n", "2 67.5 20.0 9708.240 \n", "3 67.5 20.0 9611.638 \n", "4 67.5 20.0 9215.110 \n", "... ... ... ... \n", "2072149 63.2 20.0 18884.834 \n", "2072150 63.2 20.0 18884.834 \n", "2072151 63.2 20.0 19345.508 \n", "2072152 63.2 20.0 19345.508 \n", "2072153 63.2 20.0 18650.232 \n", "\n", " rtu_004_sa_temp rtu_004_pa_static_stpt_tn rtu_004_oa_flow_tn \\\n", "0 66.1 0.06 0.000000 \n", "1 66.0 0.06 6572.099162 \n", "2 66.1 0.06 7628.832542 \n", "3 66.1 0.06 7710.294617 \n", "4 66.0 0.06 7139.184090 \n", "... ... ... ... \n", "2072149 64.4 0.06 2938.320000 \n", "2072150 64.4 0.06 2938.320000 \n", "2072151 64.3 0.06 3154.390000 \n", "2072152 64.3 0.06 3154.390000 \n", "2072153 64.1 0.06 3076.270000 \n", "\n", " rtu_004_oadmpr_pct ... zone_047_heating_sp Unnamed: 47_y \\\n", "0 28.0 ... NaN NaN \n", "1 28.0 ... NaN NaN \n", "2 28.0 ... NaN NaN \n", "3 28.0 ... NaN NaN \n", "4 28.0 ... NaN NaN \n", "... ... ... ... ... \n", "2072149 23.4 ... 71.0 69.0 \n", "2072150 23.4 ... 71.0 69.0 \n", "2072151 23.4 ... 71.0 69.0 \n", "2072152 23.4 ... 71.0 69.0 \n", "2072153 22.9 ... 71.0 69.0 \n", "\n", " hvac_S hp_hws_temp aru_001_cwr_temp aru_001_cws_fr_gpm \\\n", "0 NaN 75.3 NaN NaN \n", "1 NaN 75.3 NaN NaN \n", "2 NaN 75.3 NaN NaN \n", "3 NaN 75.3 NaN NaN \n", "4 NaN 75.3 NaN NaN \n", "... ... ... ... ... \n", "2072149 23.145000 123.8 56.25 54.71 \n", "2072150 23.145000 123.8 56.25 54.71 \n", "2072151 23.145000 123.8 56.25 54.71 \n", "2072152 23.145000 123.8 56.25 54.71 \n", "2072153 23.788947 123.8 56.25 54.71 \n", "\n", " aru_001_cws_temp aru_001_hwr_temp aru_001_hws_fr_gpm \\\n", "0 NaN NaN NaN \n", "1 NaN NaN NaN \n", "2 NaN NaN NaN \n", "3 NaN NaN NaN \n", "4 NaN NaN NaN \n", "... ... ... ... \n", "2072149 56.4 123.42 61.6 \n", "2072150 56.4 123.42 61.6 \n", "2072151 56.4 123.42 61.6 \n", "2072152 56.4 123.42 61.6 \n", "2072153 56.4 123.42 61.6 \n", "\n", " aru_001_hws_temp \n", "0 NaN \n", "1 NaN \n", "2 NaN \n", "3 NaN \n", "4 NaN \n", "... ... \n", "2072149 122.36 \n", "2072150 122.36 \n", "2072151 122.36 \n", "2072152 122.36 \n", "2072153 122.36 \n", "\n", "[2072154 rows x 30 columns]" ] }, "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "merged = pd.read_csv(r'C:\\Users\\jerin\\Downloads\\lbnlbldg59\\lbnlbldg59\\lbnlbldg59.processed\\LBNLBLDG59\\clean_Bldg59_2018to2020\\clean data\\long_merge.csv')\n", "\n", "zone = \"47\"\n", "\n", "if zone in [\"36\", \"37\", \"38\", \"39\", \"40\", \"41\", \"42\", \"64\", \"65\", \"66\", \"67\", \"68\", \"69\", \"70\"]:\n", " rtu = \"rtu_001\"\n", " wing = \"hvac_N\"\n", "elif zone in [\"18\", \"25\", \"26\", \"45\", \"48\", \"55\", \"56\", \"61\"]:\n", " rtu = \"rtu_003\"\n", " wing = \"hvac_S\"\n", "elif zone in [\"16\", \"17\", \"21\", \"22\", \"23\", \"24\", \"46\", \"47\", \"51\", \"52\", \"53\", \"54\"]:\n", " rtu = \"rtu_004\"\n", " wing = \"hvac_S\"\n", "else:\n", " rtu = \"rtu_002\"\n", " wing = \"hvac_N\"\n", "#merged is the dataframe\n", "sorted = merged[[\"date\"]+[col for col in merged.columns if zone in col or rtu in col or wing in col]+[\"hp_hws_temp\", \"aru_001_cwr_temp\" , \"aru_001_cws_fr_gpm\" ,\"aru_001_cws_temp\",\"aru_001_hwr_temp\" ,\"aru_001_hws_fr_gpm\" ,\"aru_001_hws_temp\"]]\n", "sorted" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "date 0\n", "zone_047_hw_valve 0\n", "rtu_004_sat_sp_tn 0\n", "zone_047_temp 0\n", "zone_047_fan_spd 0\n", "rtu_004_fltrd_sa_flow_tn 0\n", "rtu_004_sa_temp 0\n", "rtu_004_pa_static_stpt_tn 0\n", "rtu_004_oa_flow_tn 0\n", "rtu_004_oadmpr_pct 0\n", "rtu_004_econ_stpt_tn 0\n", "rtu_004_ra_temp 0\n", "rtu_004_oa_temp 0\n", "rtu_004_ma_temp 0\n", "rtu_004_sf_vfd_spd_fbk_tn 0\n", "rtu_004_rf_vfd_spd_fbk_tn 0\n", "rtu_004_fltrd_gnd_lvl_plenum_press_tn 0\n", "rtu_004_fltrd_lvl2_plenum_press_tn 0\n", "zone_047_cooling_sp 0\n", "Unnamed: 47_x 394570\n", "zone_047_heating_sp 0\n", "Unnamed: 47_y 394570\n", "hvac_S 13035\n", "hp_hws_temp 0\n", "aru_001_cwr_temp 524350\n", "aru_001_cws_fr_gpm 524350\n", "aru_001_cws_temp 524350\n", "aru_001_hwr_temp 299165\n", "aru_001_hws_fr_gpm 299165\n", "aru_001_hws_temp 299165\n", "dtype: int64" ] }, "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ "final_df = sorted.copy()\n", "final_df['date'] = pd.to_datetime(final_df['date'], format = \"%Y-%m-%d %H:%M:%S\")\n", "final_df = final_df[ (final_df.date.dt.date >date(2019, 4, 1)) & (final_df.date.dt.date< date(2020, 2, 15))]\n", "final_df.isna().sum()" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "testdataset_df = final_df[(final_df.date.dt.date date(2019, 11, 8))]\n", "\n", "testdataset = testdataset_df[['rtu_004_oa_temp','rtu_004_ra_temp','hp_hws_temp','rtu_004_oa_flow_tn','rtu_004_oadmpr_pct',\n", " 'rtu_004_sat_sp_tn','rtu_004_rf_vfd_spd_fbk_tn','rtu_004_ma_temp','rtu_004_sa_temp','rtu_004_fltrd_sa_flow_tn',\n", " 'rtu_004_sf_vfd_spd_fbk_tn']].values\n", "\n", "\n", "traindataset = traindataset_df[['rtu_004_oa_temp','rtu_004_ra_temp','hp_hws_temp','rtu_004_oa_flow_tn','rtu_004_oadmpr_pct',\n", " 'rtu_004_sat_sp_tn','rtu_004_rf_vfd_spd_fbk_tn','rtu_004_ma_temp','rtu_004_sa_temp','rtu_004_fltrd_sa_flow_tn',\n", " 'rtu_004_sf_vfd_spd_fbk_tn']].values" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "traindataset = traindataset.astype('float32')\n", "testdataset = testdataset.astype('float32')\n", "\n", "\n", "scaler = MinMaxScaler(feature_range=(0, 1))\n", "traindataset = scaler.fit_transform(traindataset)\n", "testdataset = scaler.transform(testdataset)" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/10\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "c:\\Users\\jerin\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\keras\\src\\layers\\rnn\\rnn.py:205: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead.\n", " super().__init__(**kwargs)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "\u001b[1m2745/3220\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m━━━\u001b[0m \u001b[1m6s\u001b[0m 14ms/step - loss: 0.0100" ] }, { "ename": "KeyboardInterrupt", "evalue": "", "output_type": "error", "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", "Cell \u001b[1;32mIn[6], line 38\u001b[0m\n\u001b[0;32m 36\u001b[0m checkpoint_path \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mlstm2.keras\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 37\u001b[0m checkpoint_callback \u001b[38;5;241m=\u001b[39m ModelCheckpoint(filepath\u001b[38;5;241m=\u001b[39mcheckpoint_path, monitor\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mval_loss\u001b[39m\u001b[38;5;124m'\u001b[39m, verbose\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m, save_best_only\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m, mode\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mmin\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[1;32m---> 38\u001b[0m \u001b[43mmodel\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfit\u001b[49m\u001b[43m(\u001b[49m\u001b[43mX_train\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43my_train\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvalidation_data\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mX_test\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43my_test\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mepochs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m10\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbatch_size\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m64\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mverbose\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcallbacks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m[\u001b[49m\u001b[43mcheckpoint_callback\u001b[49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[1;32mc:\\Users\\jerin\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\keras\\src\\utils\\traceback_utils.py:118\u001b[0m, in \u001b[0;36mfilter_traceback..error_handler\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 116\u001b[0m filtered_tb \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 117\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 118\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 119\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m 120\u001b[0m filtered_tb \u001b[38;5;241m=\u001b[39m _process_traceback_frames(e\u001b[38;5;241m.\u001b[39m__traceback__)\n", "File \u001b[1;32mc:\\Users\\jerin\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\keras\\src\\backend\\tensorflow\\trainer.py:323\u001b[0m, in \u001b[0;36mTensorFlowTrainer.fit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq)\u001b[0m\n\u001b[0;32m 321\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m step, iterator \u001b[38;5;129;01min\u001b[39;00m epoch_iterator\u001b[38;5;241m.\u001b[39menumerate_epoch():\n\u001b[0;32m 322\u001b[0m callbacks\u001b[38;5;241m.\u001b[39mon_train_batch_begin(step)\n\u001b[1;32m--> 323\u001b[0m logs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtrain_function\u001b[49m\u001b[43m(\u001b[49m\u001b[43miterator\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 324\u001b[0m callbacks\u001b[38;5;241m.\u001b[39mon_train_batch_end(\n\u001b[0;32m 325\u001b[0m step, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pythonify_logs(logs)\n\u001b[0;32m 326\u001b[0m )\n\u001b[0;32m 327\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstop_training:\n", "File \u001b[1;32mc:\\Users\\jerin\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\util\\traceback_utils.py:150\u001b[0m, in \u001b[0;36mfilter_traceback..error_handler\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 148\u001b[0m filtered_tb \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 149\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 150\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 151\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m 152\u001b[0m filtered_tb \u001b[38;5;241m=\u001b[39m _process_traceback_frames(e\u001b[38;5;241m.\u001b[39m__traceback__)\n", "File \u001b[1;32mc:\\Users\\jerin\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\polymorphic_function.py:833\u001b[0m, in \u001b[0;36mFunction.__call__\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 830\u001b[0m compiler \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mxla\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_jit_compile \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mnonXla\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 832\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m OptionalXlaContext(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_jit_compile):\n\u001b[1;32m--> 833\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwds\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 835\u001b[0m new_tracing_count \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mexperimental_get_tracing_count()\n\u001b[0;32m 836\u001b[0m without_tracing \u001b[38;5;241m=\u001b[39m (tracing_count \u001b[38;5;241m==\u001b[39m new_tracing_count)\n", "File \u001b[1;32mc:\\Users\\jerin\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\polymorphic_function.py:878\u001b[0m, in \u001b[0;36mFunction._call\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 875\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_lock\u001b[38;5;241m.\u001b[39mrelease()\n\u001b[0;32m 876\u001b[0m \u001b[38;5;66;03m# In this case we have not created variables on the first call. So we can\u001b[39;00m\n\u001b[0;32m 877\u001b[0m \u001b[38;5;66;03m# run the first trace but we should fail if variables are created.\u001b[39;00m\n\u001b[1;32m--> 878\u001b[0m results \u001b[38;5;241m=\u001b[39m \u001b[43mtracing_compilation\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcall_function\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 879\u001b[0m \u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwds\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_variable_creation_config\u001b[49m\n\u001b[0;32m 880\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 881\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_created_variables:\n\u001b[0;32m 882\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCreating variables on a non-first call to a function\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 883\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m decorated with tf.function.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", "File \u001b[1;32mc:\\Users\\jerin\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\tracing_compilation.py:139\u001b[0m, in \u001b[0;36mcall_function\u001b[1;34m(args, kwargs, tracing_options)\u001b[0m\n\u001b[0;32m 137\u001b[0m bound_args \u001b[38;5;241m=\u001b[39m function\u001b[38;5;241m.\u001b[39mfunction_type\u001b[38;5;241m.\u001b[39mbind(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 138\u001b[0m flat_inputs \u001b[38;5;241m=\u001b[39m function\u001b[38;5;241m.\u001b[39mfunction_type\u001b[38;5;241m.\u001b[39munpack_inputs(bound_args)\n\u001b[1;32m--> 139\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunction\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_flat\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# pylint: disable=protected-access\u001b[39;49;00m\n\u001b[0;32m 140\u001b[0m \u001b[43m \u001b[49m\u001b[43mflat_inputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcaptured_inputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfunction\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcaptured_inputs\u001b[49m\n\u001b[0;32m 141\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[1;32mc:\\Users\\jerin\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\concrete_function.py:1322\u001b[0m, in \u001b[0;36mConcreteFunction._call_flat\u001b[1;34m(self, tensor_inputs, captured_inputs)\u001b[0m\n\u001b[0;32m 1318\u001b[0m possible_gradient_type \u001b[38;5;241m=\u001b[39m gradients_util\u001b[38;5;241m.\u001b[39mPossibleTapeGradientTypes(args)\n\u001b[0;32m 1319\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (possible_gradient_type \u001b[38;5;241m==\u001b[39m gradients_util\u001b[38;5;241m.\u001b[39mPOSSIBLE_GRADIENT_TYPES_NONE\n\u001b[0;32m 1320\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m executing_eagerly):\n\u001b[0;32m 1321\u001b[0m \u001b[38;5;66;03m# No tape is watching; skip to running the function.\u001b[39;00m\n\u001b[1;32m-> 1322\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_inference_function\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcall_preflattened\u001b[49m\u001b[43m(\u001b[49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1323\u001b[0m forward_backward \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_select_forward_and_backward_functions(\n\u001b[0;32m 1324\u001b[0m args,\n\u001b[0;32m 1325\u001b[0m possible_gradient_type,\n\u001b[0;32m 1326\u001b[0m executing_eagerly)\n\u001b[0;32m 1327\u001b[0m forward_function, args_with_tangents \u001b[38;5;241m=\u001b[39m forward_backward\u001b[38;5;241m.\u001b[39mforward()\n", "File \u001b[1;32mc:\\Users\\jerin\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\atomic_function.py:216\u001b[0m, in \u001b[0;36mAtomicFunction.call_preflattened\u001b[1;34m(self, args)\u001b[0m\n\u001b[0;32m 214\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcall_preflattened\u001b[39m(\u001b[38;5;28mself\u001b[39m, args: Sequence[core\u001b[38;5;241m.\u001b[39mTensor]) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Any:\n\u001b[0;32m 215\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Calls with flattened tensor inputs and returns the structured output.\"\"\"\u001b[39;00m\n\u001b[1;32m--> 216\u001b[0m flat_outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcall_flat\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 217\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfunction_type\u001b[38;5;241m.\u001b[39mpack_output(flat_outputs)\n", "File \u001b[1;32mc:\\Users\\jerin\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\atomic_function.py:251\u001b[0m, in \u001b[0;36mAtomicFunction.call_flat\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 249\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m record\u001b[38;5;241m.\u001b[39mstop_recording():\n\u001b[0;32m 250\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_bound_context\u001b[38;5;241m.\u001b[39mexecuting_eagerly():\n\u001b[1;32m--> 251\u001b[0m outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_bound_context\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcall_function\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mlist\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mlen\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfunction_type\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mflat_outputs\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 257\u001b[0m outputs \u001b[38;5;241m=\u001b[39m make_call_op_in_graph(\n\u001b[0;32m 258\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m 259\u001b[0m \u001b[38;5;28mlist\u001b[39m(args),\n\u001b[0;32m 260\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_bound_context\u001b[38;5;241m.\u001b[39mfunction_call_options\u001b[38;5;241m.\u001b[39mas_attrs(),\n\u001b[0;32m 261\u001b[0m )\n", "File \u001b[1;32mc:\\Users\\jerin\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\context.py:1500\u001b[0m, in \u001b[0;36mContext.call_function\u001b[1;34m(self, name, tensor_inputs, num_outputs)\u001b[0m\n\u001b[0;32m 1498\u001b[0m cancellation_context \u001b[38;5;241m=\u001b[39m cancellation\u001b[38;5;241m.\u001b[39mcontext()\n\u001b[0;32m 1499\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m cancellation_context \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m-> 1500\u001b[0m outputs \u001b[38;5;241m=\u001b[39m \u001b[43mexecute\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexecute\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 1501\u001b[0m \u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdecode\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mutf-8\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1502\u001b[0m \u001b[43m \u001b[49m\u001b[43mnum_outputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mnum_outputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1503\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtensor_inputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1504\u001b[0m \u001b[43m \u001b[49m\u001b[43mattrs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mattrs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1505\u001b[0m \u001b[43m \u001b[49m\u001b[43mctx\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1506\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1507\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 1508\u001b[0m outputs \u001b[38;5;241m=\u001b[39m execute\u001b[38;5;241m.\u001b[39mexecute_with_cancellation(\n\u001b[0;32m 1509\u001b[0m name\u001b[38;5;241m.\u001b[39mdecode(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mutf-8\u001b[39m\u001b[38;5;124m\"\u001b[39m),\n\u001b[0;32m 1510\u001b[0m num_outputs\u001b[38;5;241m=\u001b[39mnum_outputs,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1514\u001b[0m cancellation_manager\u001b[38;5;241m=\u001b[39mcancellation_context,\n\u001b[0;32m 1515\u001b[0m )\n", "File \u001b[1;32mc:\\Users\\jerin\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\execute.py:53\u001b[0m, in \u001b[0;36mquick_execute\u001b[1;34m(op_name, num_outputs, inputs, attrs, ctx, name)\u001b[0m\n\u001b[0;32m 51\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 52\u001b[0m ctx\u001b[38;5;241m.\u001b[39mensure_initialized()\n\u001b[1;32m---> 53\u001b[0m tensors \u001b[38;5;241m=\u001b[39m \u001b[43mpywrap_tfe\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mTFE_Py_Execute\u001b[49m\u001b[43m(\u001b[49m\u001b[43mctx\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_handle\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice_name\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mop_name\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 54\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mattrs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnum_outputs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 55\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m core\u001b[38;5;241m.\u001b[39m_NotOkStatusException \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m 56\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m name \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n", "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } ], "source": [ "train,test = traindataset,testdataset\n", "\n", "def create_dataset(dataset,time_step):\n", " x1,x2,x3,x4,x5,x6,x7,x8,x9,Y = [],[],[],[],[],[],[],[],[],[]\n", " for i in range(len(dataset)-time_step-1):\n", " x1.append(dataset[i:(i+time_step), 0])\n", " x2.append(dataset[i:(i+time_step), 1])\n", " x3.append(dataset[i:(i+time_step), 2])\n", " x4.append(dataset[i:(i+time_step), 3])\n", " x5.append(dataset[i:(i+time_step), 4])\n", " x6.append(dataset[i:(i+time_step), 5])\n", " x7.append(dataset[i:(i+time_step), 6])\n", " x8.append(dataset[i:(i+time_step), 7])\n", " # x9.append(dataset[i:(i+time_step), 8])\n", " Y.append([dataset[i + time_step, 7]])\n", " x1,x2,x3,x4,x5,x6,x7,x8 = np.array(x1),np.array(x2),np.array(x3), np.array(x4),np.array(x5),np.array(x6),np.array(x7),np.array(x8)#,np.array(x9)\n", " Y = np.reshape(Y,(len(Y),1))\n", " return np.stack([x1,x2,x3,x4,x5,x6,x7,x8],axis=2),Y\n", "\n", "\n", "\n", "\n", "time_step = 30\n", "X_train, y_train = create_dataset(train, time_step)\n", "X_test, y_test = create_dataset(test, time_step)\n", "\n", "\n", "model = Sequential()\n", "model.add(LSTM(units=50, return_sequences=True, input_shape=(X_train.shape[1], X_train.shape[2])))\n", "model.add(LSTM(units=50, return_sequences=True))\n", "model.add(LSTM(units=30))\n", "model.add(Dense(units=1))\n", "\n", "model.compile(optimizer='adam', loss='mean_squared_error')\n", "\n", "checkpoint_path = \"lstm2.keras\"\n", "checkpoint_callback = ModelCheckpoint(filepath=checkpoint_path, monitor='val_loss', verbose=1, save_best_only=True, mode='min')\n", "model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=10, batch_size=64, verbose=1, callbacks=[checkpoint_callback])\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/5\n", "\u001b[1m3218/3220\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m━\u001b[0m \u001b[1m0s\u001b[0m 19ms/step - loss: 1.8977e-04\n", "Epoch 1: val_loss improved from inf to 0.01131, saving model to lstm2.keras\n", "\u001b[1m3220/3220\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m94s\u001b[0m 29ms/step - loss: 1.8977e-04 - val_loss: 0.0113\n", "Epoch 2/5\n", "\u001b[1m3219/3220\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m━\u001b[0m \u001b[1m0s\u001b[0m 18ms/step - loss: 1.7357e-04\n", "Epoch 2: val_loss did not improve from 0.01131\n", "\u001b[1m3220/3220\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m91s\u001b[0m 28ms/step - loss: 1.7358e-04 - val_loss: 0.0123\n", "Epoch 3/5\n", "\u001b[1m3219/3220\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m━\u001b[0m \u001b[1m0s\u001b[0m 18ms/step - loss: 1.6701e-04\n", "Epoch 3: val_loss did not improve from 0.01131\n", "\u001b[1m3220/3220\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m92s\u001b[0m 28ms/step - loss: 1.6701e-04 - val_loss: 0.0127\n", "Epoch 4/5\n", "\u001b[1m3220/3220\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step - loss: 1.7043e-04\n", "Epoch 4: val_loss did not improve from 0.01131\n", "\u001b[1m3220/3220\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m91s\u001b[0m 28ms/step - loss: 1.7043e-04 - val_loss: 0.0131\n", "Epoch 5/5\n", "\u001b[1m3218/3220\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m━\u001b[0m \u001b[1m0s\u001b[0m 19ms/step - loss: 1.6319e-04\n", "Epoch 5: val_loss did not improve from 0.01131\n", "\u001b[1m3220/3220\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m104s\u001b[0m 32ms/step - loss: 1.6319e-04 - val_loss: 0.0134\n" ] }, { "data": { "text/plain": [ "" ] }, "execution_count": 45, "metadata": {}, "output_type": "execute_result" } ], "source": [ "checkpoint_callback = ModelCheckpoint(filepath=checkpoint_path, monitor='val_loss', verbose=1, save_best_only=True, mode='min')\n", "model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=5, batch_size=64, verbose=1, callbacks=[checkpoint_callback])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\u001b[1m9900/9900\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m34s\u001b[0m 3ms/step\n" ] } ], "source": [ "# train_predict = model.predict(X_train)\n", "test_predict = model.predict(X_test)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "%matplotlib qt\n", "#'rtu_004_ma_temp','rtu_004_sa_temp'\n", "var = 0\n", "plt.plot(testdataset_df['date'][31:],y_test, label='Original Testing Data', color='blue')\n", "plt.plot(testdataset_df['date'][31:],test_predict, label='Predicted Testing Data', color='red',alpha=0.8)\n", "# anomalies = np.where(abs(test_predict[:,var] - y_test[:,var]) > 0.38)[0]\n", "# plt.scatter(anomalies,test_predict[anomalies,var], color='black',marker =\"o\",s=100 )\n", "\n", "\n", "plt.title('Testing Data - Predicted vs Actual')\n", "plt.xlabel('Time')\n", "plt.ylabel('Value')\n", "plt.legend()\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "WARNING:absl:You are saving your model as an HDF5 file via `model.save()` or `keras.saving.save_model(model)`. This file format is considered legacy. We recommend using instead the native Keras format, e.g. `model.save('my_model.keras')` or `keras.saving.save_model(model, 'my_model.keras')`. \n" ] } ], "source": [ "from tensorflow.keras.models import load_model\n", "# model.save(\"MA_temp_model.h5\") \n", "# loaded_model = load_model(\"MA_temp_model.h5\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "ENERGY DATA" ] }, { "cell_type": "code", "execution_count": 267, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
dateair_temp_set_1air_temp_set_2dew_point_temperature_set_1drelative_humidity_set_1solar_radiation_set_1wifi_third_southwifi_fourth_southhvac_Nhvac_S
02018-01-01 00:00:0011.6411.518.179.0786.7NaNNaNNaNNaN
12018-01-01 00:01:0011.6411.518.179.0786.7NaNNaNNaNNaN
\n", "
" ], "text/plain": [ " date air_temp_set_1 air_temp_set_2 \\\n", "0 2018-01-01 00:00:00 11.64 11.51 \n", "1 2018-01-01 00:01:00 11.64 11.51 \n", "\n", " dew_point_temperature_set_1d relative_humidity_set_1 \\\n", "0 8.1 79.07 \n", "1 8.1 79.07 \n", "\n", " solar_radiation_set_1 wifi_third_south wifi_fourth_south hvac_N hvac_S \n", "0 86.7 NaN NaN NaN NaN \n", "1 86.7 NaN NaN NaN NaN " ] }, "execution_count": 267, "metadata": {}, "output_type": "execute_result" } ], "source": [ "zone = [\"18\", \"25\", \"26\", \"45\", \"48\", \"55\", \"56\", \"61\",\"16\", \"17\", \"21\", \"23\", \"24\", \"46\", \"47\", \"51\", \"52\", \"53\", \"54\"]\n", "rtu = [\"rtu_001\",\"rtu_002\",\"rtu_003\",\"rtu_004\"]\n", "wing = [\"hvac_N\",\"hvac_S\"]\n", "env = [\"air_temp_set_1\",\"air_temp_set_2\",\"dew_point_temperature_set_1d\",\"relative_humidity_set_1\",\"solar_radiation_set_1\"]\n", "wifi=[\"wifi_third_south\",\"wifi_fourth_south\"]\n", "# any(sub in col for sub in zone) or\n", "energy_data = merged[[\"date\"]+[col for col in merged.columns if \n", " any(sub in col for sub in env) or any(sub in col for sub in wifi)]+wing]\n", "df_filtered = energy_data[[col for col in energy_data.columns if 'Unnamed' not in col]]\n", "df_filtered = df_filtered[[col for col in df_filtered.columns if 'co2' not in col]]\n", "df_filtered = df_filtered[[col for col in df_filtered.columns if 'templogger' not in col]]\n", "# df_filtered = df_filtered.dropna()\n", "df_filtered.head(2)" ] }, { "cell_type": "code", "execution_count": 268, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "There are NA values in the DataFrame columns.\n" ] } ], "source": [ "df_filtered['date'] = pd.to_datetime(df_filtered['date'], format = \"%Y-%m-%d %H:%M:%S\")\n", "df_filtered = df_filtered[ (df_filtered.date.dt.date >date(2019, 4, 1)) & (df_filtered.date.dt.date< date(2020, 2, 15))]\n", "# df_filtered.isna().sum()\n", "if df_filtered.isna().any().any():\n", " print(\"There are NA values in the DataFrame columns.\")" ] }, { "cell_type": "code", "execution_count": 269, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[]" ] }, "execution_count": 269, "metadata": {}, "output_type": "execute_result" } ], "source": [ "testdataset_df = df_filtered[(df_filtered.date.dt.date date(2019, 11, 8))]\n", "\n", "testdataset = testdataset_df.drop(columns=[\"date\"]).values\n", "\n", "traindataset = traindataset_df.drop(columns=[\"date\"]).values\n", "\n", "columns_with_na = traindataset_df.columns[traindataset_df.isna().any()].tolist()\n", "columns_with_na" ] }, { "cell_type": "code", "execution_count": 270, "metadata": {}, "outputs": [], "source": [ "traindataset = traindataset.astype('float32')\n", "testdataset = testdataset.astype('float32')\n", "\n", "scaler = MinMaxScaler(feature_range=(0, 1))\n", "traindataset = scaler.fit_transform(traindataset)\n", "testdataset = scaler.transform(testdataset)" ] }, { "cell_type": "code", "execution_count": 271, "metadata": {}, "outputs": [], "source": [ "train,test = traindataset,testdataset\n", "\n", "def create_dataset(dataset,time_step):\n", " x = [[] for _ in range(9)] \n", " Y = []\n", " for i in range(len(dataset) - time_step - 1):\n", " for j in range(9):\n", " x[j].append(dataset[i:(i + time_step), j])\n", " Y.append([dataset[i + time_step, 7],dataset[i + time_step, 8]])\n", " x= [np.array(feature_list) for feature_list in x]\n", " Y = np.reshape(Y,(len(Y),2))\n", " return np.stack(x,axis=2),Y\n", "\n", "time_step = 30\n", "X_train, y_train = create_dataset(train, time_step)\n", "X_test, y_test = create_dataset(test, time_step)\n", "\n", "\n", "model = Sequential()\n", "model.add(LSTM(units=50, return_sequences=True, input_shape=(X_train.shape[1], X_train.shape[2])))\n", "model.add(LSTM(units=50, return_sequences=True))\n", "model.add(LSTM(units=30))\n", "model.add(Dense(units=2))\n", "\n", "model.compile(optimizer='adam', loss='mean_squared_error')\n", "\n", "checkpoint_path = \"lstm_energy_01.keras\"\n", "checkpoint_callback = ModelCheckpoint(filepath=checkpoint_path, monitor='val_loss', verbose=1, save_best_only=True, mode='min')\n", "model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=3, batch_size=64, verbose=1, callbacks=[checkpoint_callback])\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# checkpoint_callback = ModelCheckpoint(filepath=checkpoint_path, monitor='val_loss', verbose=1, save_best_only=True, mode='min')\n", "# model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=10, batch_size=64, verbose=1, callbacks=[checkpoint_callback])" ] }, { "cell_type": "code", "execution_count": 272, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\u001b[1m6345/6345\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m42s\u001b[0m 6ms/step\n" ] } ], "source": [ "from tensorflow.keras.models import load_model\n", "checkpoint_path = \"lstm_energy_01.keras\"\n", "model = load_model(checkpoint_path)\n", "test_predict1 = model.predict(X_test)\n", "# train_predict1 = model.predict(X_train)" ] }, { "cell_type": "code", "execution_count": 273, "metadata": {}, "outputs": [], "source": [ "%matplotlib qt\n", "var = 1\n", "plt.plot(testdataset_df['date'][31:],y_test[:,1], label='Original Testing Data', color='blue')\n", "plt.plot(testdataset_df['date'][31:],test_predict1[:,1], label='Predicted Testing Data', color='red',alpha=0.8)\n", "# anomalies = np.where(abs(test_predict[:,var] - y_test[:,var]) > 0.38)[0]\n", "# plt.scatter(anomalies,test_predict[anomalies,var], color='black',marker =\"o\",s=100 )\n", "\n", "\n", "plt.title('Testing Data - Predicted vs Actual')\n", "plt.xlabel('Time')\n", "plt.ylabel('Value')\n", "plt.legend()\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "WARNING:absl:You are saving your model as an HDF5 file via `model.save()` or `keras.saving.save_model(model)`. This file format is considered legacy. We recommend using instead the native Keras format, e.g. `model.save('my_model.keras')` or `keras.saving.save_model(model, 'my_model.keras')`. \n" ] } ], "source": [ "# from tensorflow.keras.models import load_model\n", "# model.save(\"energy_model_01.h5\") " ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[]" ] }, "execution_count": 8, "metadata": {}, "output_type": "execute_result" } ], "source": [ "%matplotlib qt\n", "plt.plot(df_filtered['date'],df_filtered['hvac_S'])\n", "plt.plot(df_filtered['date'],df_filtered['rtu_003_sf_vfd_spd_fbk_tn'])\n", "plt.plot(df_filtered['date'],df_filtered['zone_025_temp'])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[]" ] }, "execution_count": 22, "metadata": {}, "output_type": "execute_result" } ], "source": [ "plt.plot(merged['hvac_S'])\n", "plt.plot(testdataset_df['hvac_S'])\n", "plt.plot(traindataset_df['hvac_S'],'r')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[]" ] }, "execution_count": 24, "metadata": {}, "output_type": "execute_result" } ], "source": [ "plt.plot(merged['hvac_N'])\n", "plt.plot(testdataset_df['hvac_N'])\n", "plt.plot(traindataset_df['hvac_N'],'r')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# merged.columns.to_list()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "2.16.1\n" ] } ], "source": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "LSTM 2.0" ] }, { "cell_type": "code", "execution_count": 325, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
datehp_hws_temprtu_001_sat_sp_tnrtu_002_sat_sp_tnrtu_003_sat_sp_tnrtu_004_sat_sp_tnrtu_001_fltrd_sa_flow_tnrtu_002_fltrd_sa_flow_tnrtu_003_fltrd_sa_flow_tnrtu_004_fltrd_sa_flow_tn...rtu_002_fltrd_lvl2_plenum_press_tnrtu_003_fltrd_lvl2_plenum_press_tnrtu_004_fltrd_lvl2_plenum_press_tnwifi_third_southwifi_fourth_southair_temp_set_1air_temp_set_2dew_point_temperature_set_1drelative_humidity_set_1solar_radiation_set_1
02018-01-01 00:00:0075.368.070.065.069.014131.44913998.75713558.5399265.604...0.050.050.050NaNNaN11.6411.518.179.0786.7
12018-01-01 00:01:0075.368.070.065.069.014164.42914065.25913592.9099265.604...0.050.040.046NaNNaN11.6411.518.179.0786.7
\n", "

2 rows × 65 columns

\n", "
" ], "text/plain": [ " date hp_hws_temp rtu_001_sat_sp_tn rtu_002_sat_sp_tn \\\n", "0 2018-01-01 00:00:00 75.3 68.0 70.0 \n", "1 2018-01-01 00:01:00 75.3 68.0 70.0 \n", "\n", " rtu_003_sat_sp_tn rtu_004_sat_sp_tn rtu_001_fltrd_sa_flow_tn \\\n", "0 65.0 69.0 14131.449 \n", "1 65.0 69.0 14164.429 \n", "\n", " rtu_002_fltrd_sa_flow_tn rtu_003_fltrd_sa_flow_tn \\\n", "0 13998.757 13558.539 \n", "1 14065.259 13592.909 \n", "\n", " rtu_004_fltrd_sa_flow_tn ... rtu_002_fltrd_lvl2_plenum_press_tn \\\n", "0 9265.604 ... 0.05 \n", "1 9265.604 ... 0.05 \n", "\n", " rtu_003_fltrd_lvl2_plenum_press_tn rtu_004_fltrd_lvl2_plenum_press_tn \\\n", "0 0.05 0.050 \n", "1 0.04 0.046 \n", "\n", " wifi_third_south wifi_fourth_south air_temp_set_1 air_temp_set_2 \\\n", "0 NaN NaN 11.64 11.51 \n", "1 NaN NaN 11.64 11.51 \n", "\n", " dew_point_temperature_set_1d relative_humidity_set_1 \\\n", "0 8.1 79.07 \n", "1 8.1 79.07 \n", "\n", " solar_radiation_set_1 \n", "0 86.7 \n", "1 86.7 \n", "\n", "[2 rows x 65 columns]" ] }, "execution_count": 325, "metadata": {}, "output_type": "execute_result" } ], "source": [ "rtu = [\"rtu_003\",\"rtu_004\",\"rtu_001\",\"rtu_002\"]\n", "# wing = [\"hvac_N\",\"hvac_S\"]\n", "env = [\"air_temp_set_1\",\"air_temp_set_2\",\"dew_point_temperature_set_1d\",\"relative_humidity_set_1\",\"solar_radiation_set_1\"]\n", "wifi=[\"wifi_third_south\",\"wifi_fourth_south\"]\n", "# any(sub in col for sub in zone) or\n", "energy_data = merged[[\"date\",\"hp_hws_temp\"]+[col for col in merged.columns if \n", " any(sub in col for sub in rtu) or any(sub in col for sub in wifi)]+env]\n", "df_filtered = energy_data[[col for col in energy_data.columns if 'Unnamed' not in col]]\n", "df_filtered = df_filtered[[col for col in df_filtered.columns if 'co2' not in col]]\n", "df_filtered = df_filtered[[col for col in df_filtered.columns if 'templogger' not in col]]\n", "# df_filtered = df_filtered.dropna()\n", "df_filtered.head(2)" ] }, { "cell_type": "code", "execution_count": 363, "metadata": {}, "outputs": [], "source": [ "df_filtered['date'] = pd.to_datetime(df_filtered['date'], format = \"%Y-%m-%d %H:%M:%S\")\n", "df_filtered = df_filtered[ (df_filtered.date.dt.date >date(2018, 5, 1)) & (df_filtered.date.dt.date< date(2020, 5, 1))] #(2018, 5, 1)\n", "# df_filtered.isna().sum()\n", "if df_filtered.isna().any().any():\n", " print(\"There are NA values in the DataFrame columns.\",df_filtered.isna().sum().tolist())" ] }, { "cell_type": "code", "execution_count": 364, "metadata": {}, "outputs": [], "source": [ "# df_filtered = df_filtered.loc[:,['date','hp_hws_temp',\n", "# 'rtu_003_sa_temp',\n", "# 'rtu_003_oadmpr_pct',\n", "# 'rtu_003_ra_temp',\n", "# 'rtu_003_oa_temp',\n", "# 'rtu_003_ma_temp',\n", "# 'rtu_003_sf_vfd_spd_fbk_tn',\n", "# 'rtu_003_rf_vfd_spd_fbk_tn',\n", "# 'air_temp_set_1',\n", "# 'air_temp_set_2',\n", "# 'dew_point_temperature_set_1d',\n", "# 'relative_humidity_set_1',\n", "# 'solar_radiation_set_1']]\n", "\n", "df_filtered = df_filtered.loc[:,['date','hp_hws_temp',\n", " 'rtu_003_sa_temp',\n", " 'rtu_003_oadmpr_pct',\n", " 'rtu_003_ra_temp',\n", " 'rtu_003_oa_temp',\n", " 'rtu_003_ma_temp',\n", " 'rtu_003_sf_vfd_spd_fbk_tn',\n", " 'rtu_003_rf_vfd_spd_fbk_tn',\n", " 'rtu_004_sa_temp',\n", " 'rtu_004_oadmpr_pct',\n", " 'rtu_004_ra_temp',\n", " 'rtu_004_oa_temp',\n", " 'rtu_004_ma_temp',\n", " 'rtu_004_sf_vfd_spd_fbk_tn',\n", " 'rtu_004_rf_vfd_spd_fbk_tn',\n", " 'rtu_001_sa_temp',\n", " 'rtu_001_oadmpr_pct',\n", " 'rtu_001_ra_temp',\n", " 'rtu_001_oa_temp',\n", " 'rtu_001_ma_temp',\n", " 'rtu_001_sf_vfd_spd_fbk_tn',\n", " 'rtu_001_rf_vfd_spd_fbk_tn',\n", " 'rtu_002_sa_temp',\n", " 'rtu_002_oadmpr_pct',\n", " 'rtu_002_ra_temp',\n", " 'rtu_002_oa_temp',\n", " 'rtu_002_ma_temp',\n", " 'rtu_002_sf_vfd_spd_fbk_tn',\n", " 'rtu_002_rf_vfd_spd_fbk_tn',\n", " 'air_temp_set_1',\n", " 'air_temp_set_2',\n", " 'dew_point_temperature_set_1d',\n", " 'relative_humidity_set_1',\n", " 'solar_radiation_set_1']]" ] }, { "cell_type": "code", "execution_count": 365, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "[]\n", "[]\n" ] } ], "source": [ "df_filtered = df_filtered.dropna()\n", "# df_filtered.to_csv(\"sample_data.csv\",index=False)\n", "\n", "#----------------------------------------------------------------------------------\n", "# testdataset_df = df_filtered[(df_filtered.date.dt.date >date(2019, 7, 21))]\n", "\n", "# traindataset_df = df_filtered[(df_filtered.date.dt.date date(2020, 1, 1))]\n", "\n", "traindataset_df = df_filtered[(df_filtered.date.dt.date " ] }, "execution_count": 201, "metadata": {}, "output_type": "execute_result" } ], "source": [ "#2 rtu model\n", "\n", "# train,test = traindataset,testdataset\n", "\n", "# def create_dataset(dataset,time_step):\n", "# x = [[] for _ in range(20)] \n", "# Y = []\n", "# for i in range(len(dataset) - time_step - 1):\n", "# for j in range(20):\n", "# x[j].append(dataset[i:(i + time_step), j])\n", "# Y.append([dataset[i + time_step, 0],dataset[i + time_step, 1],dataset[i + time_step, 2],dataset[i + time_step, 3],\n", "# dataset[i + time_step, 4],dataset[i + time_step, 5],\n", "# dataset[i + time_step, 6],dataset[i + time_step, 7],\n", "# dataset[i + time_step, 8],dataset[i + time_step, 9],dataset[i + time_step, 10],\n", "# dataset[i + time_step, 11],dataset[i + time_step, 12],\n", "# dataset[i + time_step, 13],dataset[i + time_step, 14]])\n", "# x= [np.array(feature_list) for feature_list in x]\n", "# Y = np.reshape(Y,(len(Y),15))\n", "# return np.stack(x,axis=2),Y\n", "\n", "# time_step = 30\n", "# X_train, y_train = create_dataset(train, time_step)\n", "# X_test, y_test = create_dataset(test, time_step)\n", "\n", "\n", "# model = Sequential()\n", "# model.add(LSTM(units=50, return_sequences=True, input_shape=(X_train.shape[1], X_train.shape[2])))\n", "# model.add(LSTM(units=50, return_sequences=True))\n", "# model.add(LSTM(units=30))\n", "# model.add(Dense(units=15))\n", "\n", "# model.compile(optimizer='adam', loss='mean_squared_error')\n", "\n", "# checkpoint_path = \"lstm_2rtu_smooth_02.keras\"\n", "# checkpoint_callback = ModelCheckpoint(filepath=checkpoint_path, monitor='val_loss', verbose=1, save_best_only=True, mode='min')\n", "# model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=5, batch_size=64, verbose=1, callbacks=[checkpoint_callback])" ] }, { "cell_type": "code", "execution_count": 340, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "c:\\Users\\jerin\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\keras\\src\\layers\\rnn\\rnn.py:205: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead.\n", " super().__init__(**kwargs)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/5\n", "\u001b[1m7132/7133\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m━\u001b[0m \u001b[1m0s\u001b[0m 25ms/step - loss: 0.0395\n", "Epoch 1: val_loss improved from inf to 0.06411, saving model to lstm_4rtu_smooth_04.keras\n", "\u001b[1m7133/7133\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m247s\u001b[0m 34ms/step - loss: 0.0395 - val_loss: 0.0641\n", "Epoch 2/5\n", "\u001b[1m7131/7133\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m━\u001b[0m \u001b[1m0s\u001b[0m 25ms/step - loss: 0.0012\n", "Epoch 2: val_loss improved from 0.06411 to 0.04068, saving model to lstm_4rtu_smooth_04.keras\n", "\u001b[1m7133/7133\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m248s\u001b[0m 35ms/step - loss: 0.0012 - val_loss: 0.0407\n", "Epoch 3/5\n", "\u001b[1m7133/7133\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 25ms/step - loss: 7.2848e-04\n", "Epoch 3: val_loss improved from 0.04068 to 0.03509, saving model to lstm_4rtu_smooth_04.keras\n", "\u001b[1m7133/7133\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m245s\u001b[0m 34ms/step - loss: 7.2848e-04 - val_loss: 0.0351\n", "Epoch 4/5\n", "\u001b[1m7132/7133\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m━\u001b[0m \u001b[1m0s\u001b[0m 25ms/step - loss: 5.7519e-04\n", "Epoch 4: val_loss did not improve from 0.03509\n", "\u001b[1m7133/7133\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m245s\u001b[0m 34ms/step - loss: 5.7520e-04 - val_loss: 0.0400\n", "Epoch 5/5\n", "\u001b[1m7132/7133\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m━\u001b[0m \u001b[1m0s\u001b[0m 25ms/step - loss: 5.3138e-04\n", "Epoch 5: val_loss did not improve from 0.03509\n", "\u001b[1m7133/7133\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m244s\u001b[0m 34ms/step - loss: 5.3136e-04 - val_loss: 0.0357\n" ] }, { "data": { "text/plain": [ "" ] }, "execution_count": 340, "metadata": {}, "output_type": "execute_result" } ], "source": [ "train,test = traindataset,testdataset\n", "\n", "def create_dataset(dataset,time_step):\n", " x = [[] for _ in range(34)] \n", " Y = []\n", " for i in range(len(dataset) - time_step - 1):\n", " for j in range(34):\n", " x[j].append(dataset[i:(i + time_step), j])\n", " Y.append([dataset[i + time_step, 0],dataset[i + time_step, 1],dataset[i + time_step, 2],dataset[i + time_step, 3],\n", " dataset[i + time_step, 4],dataset[i + time_step, 5],\n", " dataset[i + time_step, 6],dataset[i + time_step, 7],\n", " dataset[i + time_step, 8],dataset[i + time_step, 9],dataset[i + time_step, 10],\n", " dataset[i + time_step, 11],dataset[i + time_step, 12],\n", " dataset[i + time_step, 13],dataset[i + time_step, 14],\n", " dataset[i + time_step, 15],dataset[i + time_step, 16],dataset[i + time_step, 17],\n", " dataset[i + time_step, 18],dataset[i + time_step, 19],\n", " dataset[i + time_step, 20],dataset[i + time_step, 21],\n", " dataset[i + time_step, 22],dataset[i + time_step, 23],dataset[i + time_step, 24],\n", " dataset[i + time_step, 24],dataset[i + time_step, 26],\n", " dataset[i + time_step, 27],dataset[i + time_step, 28]])\n", " x= [np.array(feature_list) for feature_list in x]\n", " Y = np.reshape(Y,(len(Y),29))\n", " return np.stack(x,axis=2),Y\n", "\n", "time_step = 30\n", "X_train, y_train = create_dataset(train, time_step)\n", "X_test, y_test = create_dataset(test, time_step)\n", "\n", "\n", "model = Sequential()\n", "model.add(LSTM(units=80, return_sequences=True, input_shape=(X_train.shape[1], X_train.shape[2])))\n", "model.add(LSTM(units=60, return_sequences=True))\n", "model.add(LSTM(units=60))\n", "model.add(Dense(units=29))\n", "\n", "model.compile(optimizer='adam', loss='mean_squared_error')\n", "\n", "checkpoint_path = \"lstm_4rtu_smooth_04.keras\"\n", "checkpoint_callback = ModelCheckpoint(filepath=checkpoint_path, monitor='val_loss', verbose=1, save_best_only=True, mode='min')\n", "model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=5, batch_size=64, verbose=1, callbacks=[checkpoint_callback])" ] }, { "cell_type": "code", "execution_count": 19, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/5\n", "\u001b[1m8061/8061\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step - loss: 8.9528e-04\n", "Epoch 1: val_loss did not improve from 0.19006\n", "\u001b[1m8061/8061\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m239s\u001b[0m 30ms/step - loss: 8.9529e-04 - val_loss: 0.2020\n", "Epoch 2/5\n", "\u001b[1m8060/8061\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m━\u001b[0m \u001b[1m0s\u001b[0m 19ms/step - loss: 8.0121e-04\n", "Epoch 2: val_loss improved from 0.19006 to 0.18934, saving model to lstm_4rtu_smooth_03.keras\n", "\u001b[1m8061/8061\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m234s\u001b[0m 29ms/step - loss: 8.0122e-04 - val_loss: 0.1893\n", "Epoch 3/5\n", "\u001b[1m8061/8061\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step - loss: 9.8454e-04\n", "Epoch 3: val_loss did not improve from 0.18934\n", "\u001b[1m8061/8061\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m231s\u001b[0m 29ms/step - loss: 9.8453e-04 - val_loss: 0.2004\n", "Epoch 4/5\n", "\u001b[1m8060/8061\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m━\u001b[0m \u001b[1m0s\u001b[0m 19ms/step - loss: 7.8113e-04\n", "Epoch 4: val_loss did not improve from 0.18934\n", "\u001b[1m8061/8061\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m232s\u001b[0m 29ms/step - loss: 7.8114e-04 - val_loss: 0.2031\n", "Epoch 5/5\n", "\u001b[1m8059/8061\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m━\u001b[0m \u001b[1m0s\u001b[0m 19ms/step - loss: 8.8365e-04\n", "Epoch 5: val_loss did not improve from 0.18934\n", "\u001b[1m8061/8061\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m234s\u001b[0m 29ms/step - loss: 8.8360e-04 - val_loss: 0.1915\n" ] }, { "data": { "text/plain": [ "" ] }, "execution_count": 19, "metadata": {}, "output_type": "execute_result" } ], "source": [ "model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=5, batch_size=64, verbose=1, callbacks=[checkpoint_callback])" ] }, { "cell_type": "code", "execution_count": 284, "metadata": {}, "outputs": [], "source": [ "from tensorflow.keras.models import load_model\n", "checkpoint_path = \"lstm_4rtu_smooth_02.keras\"\n", "model = load_model(checkpoint_path)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": 341, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\u001b[1m11765/11765\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m84s\u001b[0m 7ms/step\n" ] } ], "source": [ "test_predict1 = model.predict(X_test)\n", "# train_predict1 = model.predict(X_train)" ] }, { "cell_type": "code", "execution_count": 362, "metadata": {}, "outputs": [], "source": [ "%matplotlib qt\n", "var = 13\n", "plt.plot(y_test[:,var], label='Original Testing Data', color='blue')\n", "plt.plot(test_predict2[:,var], label='Predicted Testing Data', color='red',alpha=0.8)\n", "# anomalies = np.where(abs(test_predict1[:,var] - y_test[:,var]) > 0.38)\n", "# plt.scatter(anomalies,test_predict1[anomalies,var], color='black',marker =\"o\",s=100 )\n", "\n", "\n", "plt.title('Testing Data - Predicted vs Actual')\n", "plt.xlabel('Time')\n", "plt.ylabel('Value')\n", "plt.legend()\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": 82, "metadata": {}, "outputs": [], "source": [ "params = [\n", " 'rtu_003_sa_temp',\n", " 'rtu_003_oadmpr_pct',\n", " 'rtu_003_ra_temp',\n", " 'rtu_003_oa_temp',\n", " 'rtu_003_ma_temp',\n", " 'rtu_003_sf_vfd_spd_fbk_tn',\n", " 'rtu_003_rf_vfd_spd_fbk_tn']\n", "\n", "idx_2_params = {}\n", "for i, param in enumerate(params):\n", " idx_2_params[i] = param" ] }, { "cell_type": "code", "execution_count": 83, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "{0: 'rtu_003_sa_temp',\n", " 1: 'rtu_003_oadmpr_pct',\n", " 2: 'rtu_003_ra_temp',\n", " 3: 'rtu_003_oa_temp',\n", " 4: 'rtu_003_ma_temp',\n", " 5: 'rtu_003_sf_vfd_spd_fbk_tn',\n", " 6: 'rtu_003_rf_vfd_spd_fbk_tn'}" ] }, "execution_count": 83, "metadata": {}, "output_type": "execute_result" } ], "source": [ "idx_2_params" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "KMEANS" ] }, { "cell_type": "code", "execution_count": 195, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
KMeans(n_clusters=1, random_state=10)
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
" ], "text/plain": [ "KMeans(n_clusters=1, random_state=10)" ] }, "execution_count": 195, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from sklearn.cluster import KMeans\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "from sklearn.decomposition import PCA\n", "\n", "np.random.seed(0)\n", "X1 = train_predict1[:,1:8] - y_train[:,1:8]\n", "X2 = train_predict1[:,8:15] - y_train[:,8:15]\n", "X3 = train_predict1[:,15:22] - y_train[:,15:22]\n", "X4 = train_predict1[:,22:29] - y_train[:,22:29]\n", "\n", "k = 1\n", "\n", "kmeans1 = KMeans(n_clusters=k, random_state=10) #2 for without smoothing\n", "kmeans1.fit(X1)\n", "kmeans2 = KMeans(n_clusters=k, random_state=10) #2 for without smoothing\n", "kmeans2.fit(X2)\n", "kmeans3 = KMeans(n_clusters=k, random_state=10) #2 for without smoothing\n", "kmeans3.fit(X3)\n", "kmeans4 = KMeans(n_clusters=k, random_state=10) #2 for without smoothing\n", "kmeans4.fit(X4)\n", "\n", "\n", "\n", "# pca = PCA(n_components=2)\n", "# X = pca.fit_transform(X2)\n", "\n", "\n", "\n", "\n", "# # Getting the cluster centers and labels\n", "# centroids = pca.transform(kmeans.cluster_centers_)\n", "# labels = kmeans.labels_\n", "# print(kmeans.cluster_centers_)\n", "# Plotting the data points and cluster centers\n", "# plt.scatter(X[:, 0], X[:, 1], c=labels, cmap='viridis', alpha=0.5)\n", "# plt.scatter(centroids[:, 0], centroids[:, 1], marker='x', c='red', s=200, linewidths=2)\n", "# plt.title('KMeans Clustering')\n", "# plt.xlabel('Feature 1')\n", "# plt.ylabel('Feature 2')\n", "# plt.show()\n" ] }, { "cell_type": "code", "execution_count": 249, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "['kmeans_model4.pkl']" ] }, "execution_count": 249, "metadata": {}, "output_type": "execute_result" } ], "source": [ "import joblib\n", "# joblib.dump(kmeans4, 'kmeans_model4.pkl')\n", "# joblib.dump(pca, 'pca_model.pkl')\n" ] }, { "cell_type": "code", "execution_count": 196, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "['rtu_003_oadmpr_pct',\n", " 'rtu_003_oa_temp',\n", " 'rtu_003_ra_temp',\n", " 'rtu_003_rf_vfd_spd_fbk_tn',\n", " 'rtu_003_sf_vfd_spd_fbk_tn',\n", " 'rtu_003_ma_temp',\n", " 'rtu_003_sa_temp']" ] }, "execution_count": 196, "metadata": {}, "output_type": "execute_result" } ], "source": [ "[ params[i] for i in np.flip(np.argsort(abs(kmeans1.cluster_centers_[0])))]" ] }, { "cell_type": "code", "execution_count": 323, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[]" ] }, "execution_count": 323, "metadata": {}, "output_type": "execute_result" } ], "source": [ "%matplotlib qt\n", "\n", "distance1 = np.linalg.norm((test_predict1[:,1:8]-y_test[:,1:8])-kmeans1.cluster_centers_[0], ord=2, axis = 1)\n", "distance2 = np.linalg.norm((test_predict1[:,8:15]-y_test[:,8:15])-kmeans2.cluster_centers_[0], ord=2, axis = 1)\n", "distance3 = np.linalg.norm((test_predict1[:,15:22]-y_test[:,15:22])-kmeans3.cluster_centers_[0], ord=2, axis = 1)\n", "distance4 = np.linalg.norm((test_predict1[:,22:29]-y_test[:,22:29])-kmeans4.cluster_centers_[0], ord=2, axis = 1)\n", "# distance_a = np.linalg.norm(test_predict1[:,8:]-a, ord=2, axis = 1)\n", "# plt.plot(y_test[:,23],alpha=0.6)\n", "plt.plot(y_test[:,3],alpha=0.6)\n", "# plt.plot(y_test[:,6],alpha=0.6)\n", "# plt.plot(y_test[:,7],alpha=0.6)\n", "plt.plot(test_predict1[:,3],alpha=0.6)\n", "plt.plot(abs(distance1)>2)\n", "# plt.plot(abs(distance2)>3.5)\n", "# plt.plot(abs(distance3)>5)\n", "# plt.plot(abs(distance4)>5)\n", "# plt.plot(distance_a>8,c='g')\n", "\n", " " ] }, { "cell_type": "code", "execution_count": 277, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[]" ] }, "execution_count": 277, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# fault = []\n", "# for i in range(60,len(distance4)):\n", "# if np.mean((abs(distance4)>5)[i-60:i])>0.60 :\n", "# fault.append(1)\n", "# else:\n", "# fault.append(0)\n", "plt.plot(fault,c='r')\n", "plt.plot(abs(distance4)>5,c='b')\n" ] }, { "cell_type": "code", "execution_count": 126, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[]" ] }, "execution_count": 126, "metadata": {}, "output_type": "execute_result" } ], "source": [ "test[:,23]\n", "arr = test[:, 23]\n", "condition = (arr < 5) & (np.arange(len(arr)) < 35000) & (np.arange(len(arr)) > 30000)\n", "arr[condition] = 2\n", "plt.plot(test[:,23])\n" ] }, { "cell_type": "code", "execution_count": 77, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "array([[ 5.8607887e-02, -2.4713947e-01, 2.4978706e-01, -7.8289807e-01,\n", " -2.0218764e-01, -2.8860569e-01, 2.7817219e-01, 2.4209845e-01],\n", " [-2.6845999e-02, 1.2596852e-01, 9.6294099e-01, 2.0099232e-01,\n", " 3.3391420e-02, 7.7613303e-04, -7.1204931e-02, -9.7836025e-02]],\n", " dtype=float32)" ] }, "execution_count": 77, "metadata": {}, "output_type": "execute_result" } ], "source": [ "pca.components_" ] }, { "cell_type": "code", "execution_count": 84, "metadata": {}, "outputs": [], "source": [ "k = 60\n", "X= test_predict1 - y_test\n", "processed_data = []\n", "feat_df = pd.DataFrame(columns=[\"mean\",\"std\",])\n", "for i in range(0,len(X), 40 ):\n", " mean = X[i:i+k].mean(axis = 0)\n", " std = X[i:i+k].std(axis = 0)\n", " max = X[i:i+k].max(axis = 0)\n", " min = X[i:i+k].min(axis = 0)\n", " iqr = np.percentile(X[i:i+k], 75, axis=0) - np.percentile(X[i:i+k], 25,axis=0)\n", " data = np.concatenate([mean, std, max, min, iqr])\n", " processed_data.append([data])\n", "processed_data = np.concatenate(processed_data,axis=0) " ] }, { "cell_type": "code", "execution_count": 38, "metadata": {}, "outputs": [ { "ename": "NameError", "evalue": "name 'processed_data' is not defined", "output_type": "error", "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)", "Cell \u001b[1;32mIn[38], line 1\u001b[0m\n\u001b[1;32m----> 1\u001b[0m X \u001b[38;5;241m=\u001b[39m \u001b[43mprocessed_data\u001b[49m\n\u001b[0;32m 4\u001b[0m k \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m 6\u001b[0m kmeans \u001b[38;5;241m=\u001b[39m KMeans(n_clusters\u001b[38;5;241m=\u001b[39mk,random_state\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m4\u001b[39m)\n", "\u001b[1;31mNameError\u001b[0m: name 'processed_data' is not defined" ] } ], "source": [ "X = processed_data\n", "\n", "\n", "k = 2\n", "\n", "kmeans = KMeans(n_clusters=k,random_state=4)\n", "\n", "kmeans.fit(X)\n", "\n", "pca = PCA(n_components=2)\n", "X = pca.fit_transform(X)\n", "\n", "centroids = pca.transform(kmeans.cluster_centers_)\n", "labels = kmeans.labels_\n", "\n", "# Plotting the data points and cluster centers\n", "plt.scatter(X[:, 0], X[:, 1], c=labels, cmap='viridis', alpha=0.5)\n", "plt.scatter(centroids[:, 0], centroids[:, 1], marker='x', c='red', s=200, linewidths=2)\n", "plt.title('KMeans Clustering')\n", "plt.xlabel('Feature 1')\n", "plt.ylabel('Feature 2')\n", "plt.show()\n" ] }, { "cell_type": "code", "execution_count": 88, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "array([[-1.76482953e-02, -4.84286715e-03],\n", " [ 2.35918490e+01, 6.47383719e+00]])" ] }, "execution_count": 88, "metadata": {}, "output_type": "execute_result" } ], "source": [ "kmeans.cluster_centers_" ] }, { "cell_type": "code", "execution_count": 123, "metadata": {}, "outputs": [ { "ename": "ValueError", "evalue": "operands could not be broadcast together with shapes (481579,2) (15,) ", "output_type": "error", "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)", "Cell \u001b[1;32mIn[123], line 1\u001b[0m\n\u001b[1;32m----> 1\u001b[0m distance \u001b[38;5;241m=\u001b[39m np\u001b[38;5;241m.\u001b[39mlinalg\u001b[38;5;241m.\u001b[39mnorm(\u001b[43mpca\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtransform\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtest_predict1\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m-\u001b[39;49m\u001b[43mkmeans\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcluster_centers_\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m]\u001b[49m, \u001b[38;5;28mord\u001b[39m\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m2\u001b[39m, axis \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m)\n\u001b[0;32m 2\u001b[0m plt\u001b[38;5;241m.\u001b[39mplot(y_test[:,\u001b[38;5;241m6\u001b[39m])\n\u001b[0;32m 3\u001b[0m plt\u001b[38;5;241m.\u001b[39mplot(distance\u001b[38;5;241m>\u001b[39m\u001b[38;5;241m50\u001b[39m,c\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124m'\u001b[39m)\n", "\u001b[1;31mValueError\u001b[0m: operands could not be broadcast together with shapes (481579,2) (15,) " ] } ], "source": [ "distance = np.linalg.norm(pca.transform(test_predict1)-kmeans.cluster_centers_[1], ord=2, axis = 1)\n", "plt.plot(y_test[:,6])\n", "plt.plot(distance>50,c='r')" ] }, { "cell_type": "code", "execution_count": 86, "metadata": {}, "outputs": [], "source": [ "# from sklearn.mixture import GaussianMixture\n", "# import numpy as np\n", "# import matplotlib.pyplot as plt\n", "\n", "# X = processed_data\n", "\n", "\n", "# gmm = GaussianMixture(n_components=2,random_state=10)\n", "# gmm.fit(X)\n", "# labels = gmm.predict(X)\n", "\n", "\n", "# plt.scatter(X[:, 0], X[:, 1], c=labels, cmap='viridis', alpha=0.5)\n", "# plt.title('GMM Clustering')\n", "# plt.xlabel('Feature 1')\n", "# plt.ylabel('Feature 2')\n", "# plt.show()\n" ] }, { "cell_type": "code", "execution_count": 297, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[]" ] }, "execution_count": 297, "metadata": {}, "output_type": "execute_result" } ], "source": [ "merged['date'] = pd.to_datetime(merged['date'], format = \"%Y-%m-%d %H:%M:%S\")\n", "plt.plot(merged['date'],merged['wifi_fourth_south'])\n", "# plt.plot(merged['date'],merged['hp_hws_temp'])\n", "# plt.plot(df_filtered['date'],df_filtered['hp_hws_temp'])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": 234, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "False" ] }, "execution_count": 234, "metadata": {}, "output_type": "execute_result" } ], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "smartbuilding", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.8" } }, "nbformat": 4, "nbformat_minor": 2 }