{ "cells": [ { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [], "source": [ "import pandas as pd \n", "from datetime import datetime \n", "from datetime import date\n", "import matplotlib.pyplot as plt\n", "import numpy as np\n", "import pandas as pd\n", "from keras.models import Sequential\n", "from keras.layers import LSTM, Dense\n", "from sklearn.model_selection import train_test_split\n", "from sklearn.preprocessing import MinMaxScaler,StandardScaler\n", "from keras.callbacks import ModelCheckpoint\n", "import tensorflow as tf\n", "import joblib" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [], "source": [ "merged = pd.read_csv(r'../data/long_merge.csv')" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [], "source": [ "zones = [69, 68, 67, 66, 65, 64, 42, 41, 40, 39, 38, 37, 36]\n", "rtu = 1\n", "cols = []\n", "\n", "for zone in zones:\n", " for column in merged.columns:\n", " if (\n", " f\"zone_0{zone}\" in column\n", " and \"co2\" not in column\n", " and \"hw_valve\" not in column\n", " and \"cooling_sp\" not in column\n", " and \"heating_sp\" not in column\n", " ):\n", " cols.append(column)\n", "\n", "cols = (\n", " [\"date\"]\n", " + cols\n", " + [\n", " f\"rtu_00{rtu}_fltrd_sa_flow_tn\",\n", " f\"rtu_00{rtu}_sa_temp\", \n", " \"air_temp_set_1\",\n", " \"air_temp_set_2\",\n", " \"dew_point_temperature_set_1d\",\n", " \"relative_humidity_set_1\",\n", " \"solar_radiation_set_1\",\n", " ]\n", ")\n", "\n", "for zone in zones:\n", " for column in merged.columns:\n", " if f\"zone_0{zone}\" in column:\n", " if \"cooling_sp\" in column or \"heating_sp\" in column:\n", " cols.append(column)\n", " \n", "input_dataset = merged[cols]" ] }, { "cell_type": "code", "execution_count": 12, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\arbal\\AppData\\Local\\Temp\\ipykernel_368\\4293840618.py:1: SettingWithCopyWarning: \n", "A value is trying to be set on a copy of a slice from a DataFrame.\n", "Try using .loc[row_indexer,col_indexer] = value instead\n", "\n", "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", " input_dataset['date'] = pd.to_datetime(input_dataset['date'], format = \"%Y-%m-%d %H:%M:%S\")\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "There are NA values in the DataFrame columns.\n" ] } ], "source": [ "input_dataset['date'] = pd.to_datetime(input_dataset['date'], format = \"%Y-%m-%d %H:%M:%S\")\n", "df_filtered = input_dataset[ (input_dataset.date.dt.date >date(2019, 1, 1)) & (input_dataset.date.dt.date< date(2021, 1, 1))]\n", "\n", "if df_filtered.isna().any().any():\n", " print(\"There are NA values in the DataFrame columns.\")" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[]" ] }, "execution_count": 13, "metadata": {}, "output_type": "execute_result" } ], "source": [ "testdataset_df = df_filtered[(df_filtered.date.dt.date >date(2020, 3, 1)) & (df_filtered.date.dt.date date(2019, 11, 8))]\n", "\n", "traindataset_df = df_filtered[(df_filtered.date.dt.date >date(2019, 3, 1)) & (df_filtered.date.dt.date date(2020, 7, 1)) & (df_filtered.date.dt.date 11\u001b[0m \u001b[43mmodel\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfit\u001b[49m\u001b[43m(\u001b[49m\u001b[43mX_train\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43my_train\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvalidation_data\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mX_test\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43my_test\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mepochs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m3\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbatch_size\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m128\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mverbose\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcallbacks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m[\u001b[49m\u001b[43mcheckpoint_callback\u001b[49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[1;32md:\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\keras\\src\\utils\\traceback_utils.py:117\u001b[0m, in \u001b[0;36mfilter_traceback..error_handler\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 115\u001b[0m filtered_tb \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 116\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 117\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 118\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m 119\u001b[0m filtered_tb \u001b[38;5;241m=\u001b[39m _process_traceback_frames(e\u001b[38;5;241m.\u001b[39m__traceback__)\n", "File \u001b[1;32md:\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\keras\\src\\backend\\tensorflow\\trainer.py:314\u001b[0m, in \u001b[0;36mTensorFlowTrainer.fit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq)\u001b[0m\n\u001b[0;32m 312\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m step, iterator \u001b[38;5;129;01min\u001b[39;00m epoch_iterator\u001b[38;5;241m.\u001b[39menumerate_epoch():\n\u001b[0;32m 313\u001b[0m callbacks\u001b[38;5;241m.\u001b[39mon_train_batch_begin(step)\n\u001b[1;32m--> 314\u001b[0m logs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtrain_function\u001b[49m\u001b[43m(\u001b[49m\u001b[43miterator\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 315\u001b[0m logs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pythonify_logs(logs)\n\u001b[0;32m 316\u001b[0m callbacks\u001b[38;5;241m.\u001b[39mon_train_batch_end(step, logs)\n", "File \u001b[1;32md:\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\util\\traceback_utils.py:150\u001b[0m, in \u001b[0;36mfilter_traceback..error_handler\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 148\u001b[0m filtered_tb \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 149\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 150\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 151\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m 152\u001b[0m filtered_tb \u001b[38;5;241m=\u001b[39m _process_traceback_frames(e\u001b[38;5;241m.\u001b[39m__traceback__)\n", "File \u001b[1;32md:\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\polymorphic_function.py:833\u001b[0m, in \u001b[0;36mFunction.__call__\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 830\u001b[0m compiler \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mxla\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_jit_compile \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mnonXla\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 832\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m OptionalXlaContext(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_jit_compile):\n\u001b[1;32m--> 833\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwds\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 835\u001b[0m new_tracing_count \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mexperimental_get_tracing_count()\n\u001b[0;32m 836\u001b[0m without_tracing \u001b[38;5;241m=\u001b[39m (tracing_count \u001b[38;5;241m==\u001b[39m new_tracing_count)\n", "File \u001b[1;32md:\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\polymorphic_function.py:878\u001b[0m, in \u001b[0;36mFunction._call\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 875\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_lock\u001b[38;5;241m.\u001b[39mrelease()\n\u001b[0;32m 876\u001b[0m \u001b[38;5;66;03m# In this case we have not created variables on the first call. So we can\u001b[39;00m\n\u001b[0;32m 877\u001b[0m \u001b[38;5;66;03m# run the first trace but we should fail if variables are created.\u001b[39;00m\n\u001b[1;32m--> 878\u001b[0m results \u001b[38;5;241m=\u001b[39m \u001b[43mtracing_compilation\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcall_function\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 879\u001b[0m \u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwds\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_variable_creation_config\u001b[49m\n\u001b[0;32m 880\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 881\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_created_variables:\n\u001b[0;32m 882\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCreating variables on a non-first call to a function\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 883\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m decorated with tf.function.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", "File \u001b[1;32md:\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\tracing_compilation.py:139\u001b[0m, in \u001b[0;36mcall_function\u001b[1;34m(args, kwargs, tracing_options)\u001b[0m\n\u001b[0;32m 137\u001b[0m bound_args \u001b[38;5;241m=\u001b[39m function\u001b[38;5;241m.\u001b[39mfunction_type\u001b[38;5;241m.\u001b[39mbind(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 138\u001b[0m flat_inputs \u001b[38;5;241m=\u001b[39m function\u001b[38;5;241m.\u001b[39mfunction_type\u001b[38;5;241m.\u001b[39munpack_inputs(bound_args)\n\u001b[1;32m--> 139\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunction\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_flat\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# pylint: disable=protected-access\u001b[39;49;00m\n\u001b[0;32m 140\u001b[0m \u001b[43m \u001b[49m\u001b[43mflat_inputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcaptured_inputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfunction\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcaptured_inputs\u001b[49m\n\u001b[0;32m 141\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[1;32md:\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\concrete_function.py:1322\u001b[0m, in \u001b[0;36mConcreteFunction._call_flat\u001b[1;34m(self, tensor_inputs, captured_inputs)\u001b[0m\n\u001b[0;32m 1318\u001b[0m possible_gradient_type \u001b[38;5;241m=\u001b[39m gradients_util\u001b[38;5;241m.\u001b[39mPossibleTapeGradientTypes(args)\n\u001b[0;32m 1319\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (possible_gradient_type \u001b[38;5;241m==\u001b[39m gradients_util\u001b[38;5;241m.\u001b[39mPOSSIBLE_GRADIENT_TYPES_NONE\n\u001b[0;32m 1320\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m executing_eagerly):\n\u001b[0;32m 1321\u001b[0m \u001b[38;5;66;03m# No tape is watching; skip to running the function.\u001b[39;00m\n\u001b[1;32m-> 1322\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_inference_function\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcall_preflattened\u001b[49m\u001b[43m(\u001b[49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1323\u001b[0m forward_backward \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_select_forward_and_backward_functions(\n\u001b[0;32m 1324\u001b[0m args,\n\u001b[0;32m 1325\u001b[0m possible_gradient_type,\n\u001b[0;32m 1326\u001b[0m executing_eagerly)\n\u001b[0;32m 1327\u001b[0m forward_function, args_with_tangents \u001b[38;5;241m=\u001b[39m forward_backward\u001b[38;5;241m.\u001b[39mforward()\n", "File \u001b[1;32md:\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\atomic_function.py:216\u001b[0m, in \u001b[0;36mAtomicFunction.call_preflattened\u001b[1;34m(self, args)\u001b[0m\n\u001b[0;32m 214\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcall_preflattened\u001b[39m(\u001b[38;5;28mself\u001b[39m, args: Sequence[core\u001b[38;5;241m.\u001b[39mTensor]) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Any:\n\u001b[0;32m 215\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Calls with flattened tensor inputs and returns the structured output.\"\"\"\u001b[39;00m\n\u001b[1;32m--> 216\u001b[0m flat_outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcall_flat\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 217\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfunction_type\u001b[38;5;241m.\u001b[39mpack_output(flat_outputs)\n", "File \u001b[1;32md:\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\atomic_function.py:251\u001b[0m, in \u001b[0;36mAtomicFunction.call_flat\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 249\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m record\u001b[38;5;241m.\u001b[39mstop_recording():\n\u001b[0;32m 250\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_bound_context\u001b[38;5;241m.\u001b[39mexecuting_eagerly():\n\u001b[1;32m--> 251\u001b[0m outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_bound_context\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcall_function\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mlist\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mlen\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfunction_type\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mflat_outputs\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 257\u001b[0m outputs \u001b[38;5;241m=\u001b[39m make_call_op_in_graph(\n\u001b[0;32m 258\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m 259\u001b[0m \u001b[38;5;28mlist\u001b[39m(args),\n\u001b[0;32m 260\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_bound_context\u001b[38;5;241m.\u001b[39mfunction_call_options\u001b[38;5;241m.\u001b[39mas_attrs(),\n\u001b[0;32m 261\u001b[0m )\n", "File \u001b[1;32md:\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\context.py:1500\u001b[0m, in \u001b[0;36mContext.call_function\u001b[1;34m(self, name, tensor_inputs, num_outputs)\u001b[0m\n\u001b[0;32m 1498\u001b[0m cancellation_context \u001b[38;5;241m=\u001b[39m cancellation\u001b[38;5;241m.\u001b[39mcontext()\n\u001b[0;32m 1499\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m cancellation_context \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m-> 1500\u001b[0m outputs \u001b[38;5;241m=\u001b[39m \u001b[43mexecute\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexecute\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 1501\u001b[0m \u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdecode\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mutf-8\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1502\u001b[0m \u001b[43m \u001b[49m\u001b[43mnum_outputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mnum_outputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1503\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtensor_inputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1504\u001b[0m \u001b[43m \u001b[49m\u001b[43mattrs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mattrs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1505\u001b[0m \u001b[43m \u001b[49m\u001b[43mctx\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1506\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1507\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 1508\u001b[0m outputs \u001b[38;5;241m=\u001b[39m execute\u001b[38;5;241m.\u001b[39mexecute_with_cancellation(\n\u001b[0;32m 1509\u001b[0m name\u001b[38;5;241m.\u001b[39mdecode(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mutf-8\u001b[39m\u001b[38;5;124m\"\u001b[39m),\n\u001b[0;32m 1510\u001b[0m num_outputs\u001b[38;5;241m=\u001b[39mnum_outputs,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1514\u001b[0m cancellation_manager\u001b[38;5;241m=\u001b[39mcancellation_context,\n\u001b[0;32m 1515\u001b[0m )\n", "File \u001b[1;32md:\\anaconda3\\envs\\smartbuilding\\Lib\\site-packages\\tensorflow\\python\\eager\\execute.py:53\u001b[0m, in \u001b[0;36mquick_execute\u001b[1;34m(op_name, num_outputs, inputs, attrs, ctx, name)\u001b[0m\n\u001b[0;32m 51\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 52\u001b[0m ctx\u001b[38;5;241m.\u001b[39mensure_initialized()\n\u001b[1;32m---> 53\u001b[0m tensors \u001b[38;5;241m=\u001b[39m \u001b[43mpywrap_tfe\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mTFE_Py_Execute\u001b[49m\u001b[43m(\u001b[49m\u001b[43mctx\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_handle\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice_name\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mop_name\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 54\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mattrs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnum_outputs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 55\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m core\u001b[38;5;241m.\u001b[39m_NotOkStatusException \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m 56\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m name \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n", "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } ], "source": [ "\n", "model = Sequential()\n", "model.add(LSTM(units=50, return_sequences=True, input_shape=(X_train.shape[1], X_train.shape[2])))\n", "model.add(LSTM(units=50, return_sequences=True))\n", "model.add(LSTM(units=30))\n", "model.add(Dense(units=y_train.shape[1]))\n", "\n", "model.compile(optimizer='adam', loss='mean_squared_error')\n", "\n", "checkpoint_path = \"lstm_vav_01.keras\"\n", "checkpoint_callback = ModelCheckpoint(filepath=checkpoint_path, monitor='val_loss', verbose=1, save_best_only=True, mode='min')\n", "model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=3, batch_size=128, verbose=1, callbacks=[checkpoint_callback])" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [], "source": [ "import keras\n", "checkpoint_path = \"lstm_vav_01.keras\"\n", "\n", "model = keras.models.load_model(checkpoint_path)" ] }, { "cell_type": "code", "execution_count": 55, "metadata": {}, "outputs": [], "source": [ "model.load_weights(checkpoint_path)" ] }, { "cell_type": "code", "execution_count": 56, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\u001b[1m12244/12244\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m110s\u001b[0m 9ms/step\n" ] } ], "source": [ "test_predict1 = model.predict(X_test)" ] }, { "cell_type": "code", "execution_count": 60, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "{0: 'zone_069_temp',\n", " 1: 'zone_069_fan_spd',\n", " 2: 'zone_068_temp',\n", " 3: 'zone_068_fan_spd',\n", " 4: 'zone_067_temp',\n", " 5: 'zone_067_fan_spd',\n", " 6: 'zone_066_temp',\n", " 7: 'zone_066_fan_spd',\n", " 8: 'zone_065_temp',\n", " 9: 'zone_065_fan_spd',\n", " 10: 'zone_064_temp',\n", " 11: 'zone_064_fan_spd',\n", " 12: 'zone_042_temp',\n", " 13: 'zone_042_fan_spd',\n", " 14: 'zone_041_temp',\n", " 15: 'zone_041_fan_spd',\n", " 16: 'zone_040_temp',\n", " 17: 'zone_040_fan_spd',\n", " 18: 'zone_039_temp',\n", " 19: 'zone_039_fan_spd',\n", " 20: 'zone_038_temp',\n", " 21: 'zone_038_fan_spd',\n", " 22: 'zone_037_temp',\n", " 23: 'zone_037_fan_spd',\n", " 24: 'zone_036_temp',\n", " 25: 'zone_036_fan_spd',\n", " 26: 'rtu_001_fltrd_sa_flow_tn',\n", " 27: 'rtu_001_sa_temp',\n", " 28: 'air_temp_set_1',\n", " 29: 'air_temp_set_2',\n", " 30: 'dew_point_temperature_set_1d',\n", " 31: 'relative_humidity_set_1',\n", " 32: 'solar_radiation_set_1',\n", " 33: 'zone_069_cooling_sp',\n", " 34: 'zone_069_heating_sp',\n", " 35: 'zone_067_cooling_sp',\n", " 36: 'zone_067_heating_sp',\n", " 37: 'zone_066_cooling_sp',\n", " 38: 'zone_066_heating_sp',\n", " 39: 'zone_065_cooling_sp',\n", " 40: 'zone_065_heating_sp',\n", " 41: 'zone_064_cooling_sp',\n", " 42: 'zone_064_heating_sp',\n", " 43: 'zone_042_cooling_sp',\n", " 44: 'zone_042_heating_sp',\n", " 45: 'zone_041_cooling_sp',\n", " 46: 'zone_041_heating_sp',\n", " 47: 'zone_039_cooling_sp',\n", " 48: 'zone_039_heating_sp',\n", " 49: 'zone_038_cooling_sp',\n", " 50: 'zone_038_heating_sp',\n", " 51: 'zone_037_cooling_sp',\n", " 52: 'zone_037_heating_sp',\n", " 53: 'zone_036_cooling_sp',\n", " 54: 'zone_036_heating_sp'}" ] }, "execution_count": 60, "metadata": {}, "output_type": "execute_result" } ], "source": [ "idx_to_col = {i:col for i,col in enumerate(traindataset_df.drop(columns = ['date']).columns)}\n", "idx_to_col" ] }, { "cell_type": "code", "execution_count": 84, "metadata": {}, "outputs": [], "source": [ "%matplotlib qt\n", "plt.figure()\n", "var = 10\n", "plt.plot(y_test[:,var], label='Original Testing Data', color='blue')\n", "plt.plot(test_predict1[:,var], label='Predicted Testing Data', color='red',alpha=0.8)\n", "anomalies = np.where(abs(test_predict1[:,var] - y_test[:,var]) > 0.5)\n", "plt.scatter(anomalies,test_predict1[anomalies,var], color='black',marker =\"o\",s=100 )\n", "\n", "\n", "plt.title('Testing Data - Predicted vs Actual')\n", "plt.xlabel('Time')\n", "plt.ylabel('Value')\n", "plt.legend()\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": 18, "metadata": {}, "outputs": [], "source": [ "from sklearn.mixture import GaussianMixture\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "from sklearn.decomposition import PCA\n", "\n", "\n", "# Generating random data for demonstration\n", "np.random.seed(0)\n", "X = test_predict1 - y_test\n", "\n", "\n", "pca = PCA(n_components=2)\n", "X = pca.fit_transform(X)\n", "\n", "\n", "# Creating the GMM instance with desired number of clusters\n", "gmm = GaussianMixture(n_components=2)\n", "\n", "# Fitting the model to the data\n", "gmm.fit(X)\n", "\n", "# Getting the cluster labels\n", "labels = gmm.predict(X)\n", "\n", "# Plotting the data points with colors representing different clusters\n", "plt.scatter(X[:, 0], X[:, 1], c=labels, cmap='viridis', alpha=0.5)\n", "plt.title('GMM Clustering')\n", "plt.xlabel('Feature 1')\n", "plt.ylabel('Feature 2')\n", "plt.show()\n" ] }, { "cell_type": "code", "execution_count": 86, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "['kmeans_vav_1.pkl']" ] }, "execution_count": 86, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from sklearn.cluster import KMeans\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "from sklearn.decomposition import PCA\n", "\n", "# Generating random data for demonstration\n", "np.random.seed(0)\n", "X = (test_predict1 - y_test)\n", "\n", "k = 2\n", "\n", "kmeans = KMeans(n_clusters=k)\n", "\n", "kmeans.fit(X)\n", "\n", "\n", "pca = PCA(n_components=2)\n", "X = pca.fit_transform(X)\n", "\n", "\n", "\n", "# Getting the cluster centers and labels\n", "centroids = kmeans.cluster_centers_\n", "centroids = pca.transform(centroids)\n", "labels = kmeans.labels_\n", "\n", "# Plotting the data points and cluster centers\n", "plt.scatter(X[:, 0], X[:, 1], c=labels, cmap='viridis', alpha=0.5)\n", "plt.scatter(centroids[:, 0], centroids[:, 1], marker='x', c='red', s=200, linewidths=2)\n", "plt.title('KMeans Clustering')\n", "plt.xlabel('Feature 1')\n", "plt.ylabel('Feature 2')\n", "plt.show()\n", "\n", "joblib.dump(kmeans, 'kmeans_vav_1.pkl')" ] }, { "cell_type": "code", "execution_count": 19, "metadata": {}, "outputs": [], "source": [ "k = 60\n", "X= test_predict1 - y_test\n", "processed_data = []\n", "feat_df = pd.DataFrame(columns=[\"mean\",\"std\",])\n", "for i in range(0,len(X), 60):\n", " mean = X[i:i+k].mean(axis = 0)\n", " std = X[i:i+k].std(axis = 0)\n", " max = X[i:i+k].max(axis = 0)\n", " min = X[i:i+k].min(axis = 0)\n", " iqr = np.percentile(X[i:i+k], 75, axis=0) - np.percentile(X[i:i+k], 25,axis=0)\n", " data = np.concatenate([mean, std, max, min, iqr])\n", " processed_data.append([data])\n", "processed_data = np.concatenate(processed_data,axis=0) " ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "X = processed_data\n", "\n", "kmeans = KMeans(n_clusters=3, algorithm='elkan', max_iter=1000, n_init = 5)\n", "\n", "kmeans.fit(X)\n", "\n", "pca = PCA(n_components=2)\n", "X = pca.fit_transform(X)\n", "\n", "\n", "# Getting the cluster centers and labels\n", "centroids = kmeans.cluster_centers_\n", "centroids = pca.transform(centroids)\n", "labels = kmeans.labels_\n", "\n", "# Plotting the data points and cluster centers\n", "plt.scatter(X[:, 0], X[:, 1], c=labels, cmap='viridis', alpha=0.5)\n", "plt.scatter(centroids[:, 0], centroids[:, 1], marker='x', c='red', s=200, linewidths=2)\n", "plt.title('KMeans Clustering')\n", "plt.xlabel('Feature 1')\n", "plt.ylabel('Feature 2')\n", "plt.show()\n" ] }, { "cell_type": "code", "execution_count": 21, "metadata": {}, "outputs": [], "source": [ "from sklearn.mixture import GaussianMixture\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "from sklearn.decomposition import PCA\n", "\n", "# Generating random data for demonstration\n", "np.random.seed(0)\n", "X = processed_data\n", "\n", "# Creating the GMM instance with desired number of clusters\n", "gmm = GaussianMixture(n_components=2, init_params='k-means++')\n", "\n", "# Fitting the model to the data\n", "gmm.fit(X)\n", "labels = gmm.predict(X)\n", "\n", "\n", "pca = PCA(n_components=2)\n", "X = pca.fit_transform(X)\n", "\n", "\n", "# Getting the cluster labels\n", "\n", "# Plotting the data points with colors representing different clusters\n", "plt.scatter(X[:, 0], X[:, 1], c=labels, cmap='viridis', alpha=0.5)\n", "plt.title('GMM Clustering')\n", "plt.xlabel('Feature 1')\n", "plt.ylabel('Feature 2')\n", "plt.show()\n", "\n" ] }, { "cell_type": "code", "execution_count": 38, "metadata": {}, "outputs": [], "source": [ "from sklearn.cluster import KMeans\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "# Generating random data for demonstration\n", "np.random.seed(0)\n", "X = test_predict1 - y_test \n", "\n", "kmeans = KMeans(n_clusters=2)\n", "\n", "kmeans.fit(X)\n", "\n", "\n", "pca = PCA(n_components=2)\n", "X = pca.fit_transform(X)\n", "\n", "\n", "\n", "# Getting the cluster centers and labels\n", "centroids = kmeans.cluster_centers_\n", "centroids = pca.transform(centroids)\n", "labels = kmeans.labels_\n", "\n", "# Plotting the data points and cluster centers\n", "plt.figure()\n", "plt.scatter(X[:, 0], X[:, 1], c=labels, cmap='viridis', alpha=0.5)\n", "plt.scatter(centroids[:, 0], centroids[:, 1], marker='x', c='red', s=200, linewidths=2)\n", "plt.text(centroids[0,0], centroids[0,1], 'Normal', fontsize=12, color='red')\n", "plt.text(centroids[1,0], centroids[1,1], 'Anomaly', fontsize=12, color='red')\n", "plt.title('KMeans Clustering')\n", "plt.xlabel('Feature 1')\n", "plt.ylabel('Feature 2')\n", "plt.show()\n" ] }, { "cell_type": "code", "execution_count": 29, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "329810" ] }, "execution_count": 29, "metadata": {}, "output_type": "execute_result" } ], "source": [ "sum(labels==0)" ] } ], "metadata": { "kernelspec": { "display_name": "tensorflow", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.8" } }, "nbformat": 4, "nbformat_minor": 2 }