levimohle commited on
Commit
b98571e
·
1 Parent(s): ee469ba

cleaned up notebooks

Browse files
EnergyLSTM/EDA_lstm_energy.ipynb CHANGED
@@ -2,13 +2,14 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 60,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
9
  "import pandas as pd \n",
10
  "from datetime import datetime \n",
11
  "from datetime import timedelta\n",
 
12
  "import matplotlib.pyplot as plt\n",
13
  "# import seaborn as sns\n",
14
  "import numpy as np\n",
@@ -31,7 +32,7 @@
31
  },
32
  {
33
  "cell_type": "code",
34
- "execution_count": 61,
35
  "metadata": {},
36
  "outputs": [],
37
  "source": [
@@ -50,7 +51,7 @@
50
  },
51
  {
52
  "cell_type": "code",
53
- "execution_count": 62,
54
  "metadata": {
55
  "vscode": {
56
  "languageId": "ruby"
@@ -101,26 +102,6 @@
101
  "plt.show()"
102
  ]
103
  },
104
- {
105
- "cell_type": "code",
106
- "execution_count": 63,
107
- "metadata": {},
108
- "outputs": [
109
- {
110
- "data": {
111
- "text/plain": [
112
- "[<matplotlib.lines.Line2D at 0x1d98c8cdfd0>]"
113
- ]
114
- },
115
- "execution_count": 63,
116
- "metadata": {},
117
- "output_type": "execute_result"
118
- }
119
- ],
120
- "source": [
121
- "plt.plot(eed_1h['hvac_N'])"
122
- ]
123
- },
124
  {
125
  "cell_type": "markdown",
126
  "metadata": {},
@@ -140,7 +121,7 @@
140
  " one = timedelta(hours=1)\n",
141
  " secondTSr = secondTS[::-1].copy()\n",
142
  " firstTSr = firstTS[::-1].copy()\n",
143
- " indexr = pd.date_range(start=firstTS.index[0], end=secondTS.index[-1], freq='h')\n",
144
  " firstTSr.index = indexr[-len(firstTSr):]\n",
145
  " secondTSr.index = indexr[:len(secondTSr)]\n",
146
  " \n",
@@ -161,7 +142,7 @@
161
  },
162
  {
163
  "cell_type": "code",
164
- "execution_count": 67,
165
  "metadata": {},
166
  "outputs": [],
167
  "source": [
@@ -195,7 +176,7 @@
195
  },
196
  {
197
  "cell_type": "code",
198
- "execution_count": 68,
199
  "metadata": {},
200
  "outputs": [],
201
  "source": [
@@ -209,7 +190,7 @@
209
  " for ii in range(len(dfs)-1):\n",
210
  " seasonal_periods = max(min([len(dfs[ii]), len(dfs[ii+1])]) // 2 - 10, 2)\n",
211
  " \n",
212
- " if seasonal_periods > 2*24*7 + 10: # Using more than 1 week of seasonal patterns is not necessary\n",
213
  " seasonal_periods = 24*7\n",
214
  " interpolation = fillgap(dfs[ii][col], dfs[ii+1][col], seasonal_periods)\n",
215
  " else:\n",
@@ -225,148 +206,19 @@
225
  },
226
  {
227
  "cell_type": "code",
228
- "execution_count": 69,
229
  "metadata": {},
230
- "outputs": [
231
- {
232
- "name": "stderr",
233
- "output_type": "stream",
234
- "text": [
235
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
236
- " warnings.warn(\n",
237
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
238
- " warnings.warn(\n",
239
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
240
- " warnings.warn(\n",
241
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
242
- " warnings.warn(\n",
243
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
244
- " warnings.warn(\n",
245
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
246
- " warnings.warn(\n",
247
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
248
- " warnings.warn(\n",
249
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
250
- " warnings.warn(\n",
251
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
252
- " warnings.warn(\n",
253
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
254
- " warnings.warn(\n",
255
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
256
- " warnings.warn(\n",
257
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
258
- " warnings.warn(\n",
259
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
260
- " warnings.warn(\n",
261
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
262
- " warnings.warn(\n",
263
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
264
- " warnings.warn(\n",
265
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
266
- " warnings.warn(\n",
267
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
268
- " warnings.warn(\n",
269
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
270
- " warnings.warn(\n",
271
- "c:\\Users\\levim\\anaconda3\\envs\\experiments\\lib\\site-packages\\statsmodels\\tsa\\holtwinters\\model.py:917: ConvergenceWarning: Optimization failed to converge. Check mle_retvals.\n",
272
- " warnings.warn(\n"
273
- ]
274
- },
275
- {
276
- "data": {
277
- "text/html": [
278
- "<div>\n",
279
- "<style scoped>\n",
280
- " .dataframe tbody tr th:only-of-type {\n",
281
- " vertical-align: middle;\n",
282
- " }\n",
283
- "\n",
284
- " .dataframe tbody tr th {\n",
285
- " vertical-align: top;\n",
286
- " }\n",
287
- "\n",
288
- " .dataframe thead th {\n",
289
- " text-align: right;\n",
290
- " }\n",
291
- "</style>\n",
292
- "<table border=\"1\" class=\"dataframe\">\n",
293
- " <thead>\n",
294
- " <tr style=\"text-align: right;\">\n",
295
- " <th></th>\n",
296
- " <th>hvac_N</th>\n",
297
- " <th>hvac_S</th>\n",
298
- " <th>air_temp_set_1</th>\n",
299
- " <th>solar_radiation_set_1</th>\n",
300
- " </tr>\n",
301
- " </thead>\n",
302
- " <tbody>\n",
303
- " <tr>\n",
304
- " <th>2018-01-01 01:00:00</th>\n",
305
- " <td>37.525001</td>\n",
306
- " <td>19.395</td>\n",
307
- " <td>10.8900</td>\n",
308
- " <td>2.125</td>\n",
309
- " </tr>\n",
310
- " <tr>\n",
311
- " <th>2018-01-01 02:00:00</th>\n",
312
- " <td>37.750001</td>\n",
313
- " <td>22.775</td>\n",
314
- " <td>10.7550</td>\n",
315
- " <td>0.000</td>\n",
316
- " </tr>\n",
317
- " <tr>\n",
318
- " <th>2018-01-01 03:00:00</th>\n",
319
- " <td>37.550001</td>\n",
320
- " <td>18.920</td>\n",
321
- " <td>10.4775</td>\n",
322
- " <td>0.000</td>\n",
323
- " </tr>\n",
324
- " <tr>\n",
325
- " <th>2018-01-01 04:00:00</th>\n",
326
- " <td>36.675001</td>\n",
327
- " <td>21.600</td>\n",
328
- " <td>9.9925</td>\n",
329
- " <td>0.000</td>\n",
330
- " </tr>\n",
331
- " <tr>\n",
332
- " <th>2018-01-01 05:00:00</th>\n",
333
- " <td>37.272500</td>\n",
334
- " <td>19.000</td>\n",
335
- " <td>9.8050</td>\n",
336
- " <td>0.000</td>\n",
337
- " </tr>\n",
338
- " </tbody>\n",
339
- "</table>\n",
340
- "</div>"
341
- ],
342
- "text/plain": [
343
- " hvac_N hvac_S air_temp_set_1 solar_radiation_set_1\n",
344
- "2018-01-01 01:00:00 37.525001 19.395 10.8900 2.125\n",
345
- "2018-01-01 02:00:00 37.750001 22.775 10.7550 0.000\n",
346
- "2018-01-01 03:00:00 37.550001 18.920 10.4775 0.000\n",
347
- "2018-01-01 04:00:00 36.675001 21.600 9.9925 0.000\n",
348
- "2018-01-01 05:00:00 37.272500 19.000 9.8050 0.000"
349
- ]
350
- },
351
- "execution_count": 69,
352
- "metadata": {},
353
- "output_type": "execute_result"
354
- }
355
- ],
356
  "source": [
357
  "# interpolation of the whole data set\n",
358
  "\n",
359
  "ip_eed_1h = pd.DataFrame()\n",
360
  "for ii in eed_1h.columns:\n",
361
- " ip_df = interpolate_gaps(eed_1h, ii)\n",
362
  " ip_eed_1h = pd.concat([ip_eed_1h, ip_df[0]], axis=1) # axis=1 for horizontal concat\n",
363
  "ip_eed_1h.columns = list(eed_1h.columns)\n",
364
  "\n",
365
- "# Reset the index and rename the columns\n",
366
- "ip_eed_1h = ip_eed_1h.reset_index()\n",
367
- "ip_eed_1h = ip_eed_1h.rename(columns={'index': 'date'})\n",
368
- "ip_eed_1h.head()\n",
369
- "\n",
370
  "ip_eed_1h.to_csv(dataPATH + r\"\\interpolated_energy_data.csv\")\n",
371
  "\n",
372
  "ip_eed_1h.head()"
@@ -384,26 +236,6 @@
384
  "\n",
385
  "plt.show()"
386
  ]
387
- },
388
- {
389
- "cell_type": "code",
390
- "execution_count": 73,
391
- "metadata": {},
392
- "outputs": [],
393
- "source": [
394
- "# Reset the index and rename the columns\n",
395
- "# ip_eed_1h = ip_eed_1h.set_index('date')\n",
396
- "ip_eed_1h.head()\n",
397
- "\n",
398
- "ip_eed_1h.to_csv(dataPATH + r\"\\interpolated_energy_data.csv\")"
399
- ]
400
- },
401
- {
402
- "cell_type": "code",
403
- "execution_count": null,
404
- "metadata": {},
405
- "outputs": [],
406
- "source": []
407
  }
408
  ],
409
  "metadata": {
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": null,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
9
  "import pandas as pd \n",
10
  "from datetime import datetime \n",
11
  "from datetime import timedelta\n",
12
+ "from datetime import date\n",
13
  "import matplotlib.pyplot as plt\n",
14
  "# import seaborn as sns\n",
15
  "import numpy as np\n",
 
32
  },
33
  {
34
  "cell_type": "code",
35
+ "execution_count": null,
36
  "metadata": {},
37
  "outputs": [],
38
  "source": [
 
51
  },
52
  {
53
  "cell_type": "code",
54
+ "execution_count": null,
55
  "metadata": {
56
  "vscode": {
57
  "languageId": "ruby"
 
102
  "plt.show()"
103
  ]
104
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
105
  {
106
  "cell_type": "markdown",
107
  "metadata": {},
 
121
  " one = timedelta(hours=1)\n",
122
  " secondTSr = secondTS[::-1].copy()\n",
123
  " firstTSr = firstTS[::-1].copy()\n",
124
+ " indexr = pd.date_range(start=firstTS.index[0], end=secondTS.index[-1], freq='H')\n",
125
  " firstTSr.index = indexr[-len(firstTSr):]\n",
126
  " secondTSr.index = indexr[:len(secondTSr)]\n",
127
  " \n",
 
142
  },
143
  {
144
  "cell_type": "code",
145
+ "execution_count": null,
146
  "metadata": {},
147
  "outputs": [],
148
  "source": [
 
176
  },
177
  {
178
  "cell_type": "code",
179
+ "execution_count": null,
180
  "metadata": {},
181
  "outputs": [],
182
  "source": [
 
190
  " for ii in range(len(dfs)-1):\n",
191
  " seasonal_periods = max(min([len(dfs[ii]), len(dfs[ii+1])]) // 2 - 10, 2)\n",
192
  " \n",
193
+ " if seasonal_periods > 24*7: # Using more than 1 week of seasonal patterns is not necessary\n",
194
  " seasonal_periods = 24*7\n",
195
  " interpolation = fillgap(dfs[ii][col], dfs[ii+1][col], seasonal_periods)\n",
196
  " else:\n",
 
206
  },
207
  {
208
  "cell_type": "code",
209
+ "execution_count": null,
210
  "metadata": {},
211
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
212
  "source": [
213
  "# interpolation of the whole data set\n",
214
  "\n",
215
  "ip_eed_1h = pd.DataFrame()\n",
216
  "for ii in eed_1h.columns:\n",
217
+ " ip_df = interpolate_gaps(eed_1h['2018-1-2':], ii)\n",
218
  " ip_eed_1h = pd.concat([ip_eed_1h, ip_df[0]], axis=1) # axis=1 for horizontal concat\n",
219
  "ip_eed_1h.columns = list(eed_1h.columns)\n",
220
  "\n",
221
+ "ip_eed_1h = ip_eed_1h.set_axis('date', axis=0)\n",
 
 
 
 
222
  "ip_eed_1h.to_csv(dataPATH + r\"\\interpolated_energy_data.csv\")\n",
223
  "\n",
224
  "ip_eed_1h.head()"
 
236
  "\n",
237
  "plt.show()"
238
  ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
239
  }
240
  ],
241
  "metadata": {
EnergyLSTM/lstm_energy.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 61,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
@@ -58,7 +58,7 @@
58
  },
59
  {
60
  "cell_type": "code",
61
- "execution_count": 62,
62
  "metadata": {},
63
  "outputs": [
64
  {
@@ -92,7 +92,7 @@
92
  " </thead>\n",
93
  " <tbody>\n",
94
  " <tr>\n",
95
- " <th>23</th>\n",
96
  " <td>2018-01-02 00:00:00</td>\n",
97
  " <td>38.225000</td>\n",
98
  " <td>26.4000</td>\n",
@@ -101,7 +101,7 @@
101
  " <td>87.4450</td>\n",
102
  " </tr>\n",
103
  " <tr>\n",
104
- " <th>24</th>\n",
105
  " <td>2018-01-02 01:00:00</td>\n",
106
  " <td>38.297501</td>\n",
107
  " <td>21.1750</td>\n",
@@ -110,7 +110,7 @@
110
  " <td>2.8675</td>\n",
111
  " </tr>\n",
112
  " <tr>\n",
113
- " <th>25</th>\n",
114
  " <td>2018-01-02 02:00:00</td>\n",
115
  " <td>38.072500</td>\n",
116
  " <td>21.7225</td>\n",
@@ -119,7 +119,7 @@
119
  " <td>0.0925</td>\n",
120
  " </tr>\n",
121
  " <tr>\n",
122
- " <th>26</th>\n",
123
  " <td>2018-01-02 03:00:00</td>\n",
124
  " <td>39.147500</td>\n",
125
  " <td>21.7000</td>\n",
@@ -128,7 +128,7 @@
128
  " <td>0.1175</td>\n",
129
  " </tr>\n",
130
  " <tr>\n",
131
- " <th>27</th>\n",
132
  " <td>2018-01-02 04:00:00</td>\n",
133
  " <td>38.172500</td>\n",
134
  " <td>21.6250</td>\n",
@@ -141,22 +141,22 @@
141
  "</div>"
142
  ],
143
  "text/plain": [
144
- " date hvac_N hvac_S day_of_week air_temp_set_1 \\\n",
145
- "23 2018-01-02 00:00:00 38.225000 26.4000 1 14.9550 \n",
146
- "24 2018-01-02 01:00:00 38.297501 21.1750 1 14.2125 \n",
147
- "25 2018-01-02 02:00:00 38.072500 21.7225 1 14.2700 \n",
148
- "26 2018-01-02 03:00:00 39.147500 21.7000 1 14.1375 \n",
149
- "27 2018-01-02 04:00:00 38.172500 21.6250 1 13.9850 \n",
150
  "\n",
151
- " solar_radiation_set_1 \n",
152
- "23 87.4450 \n",
153
- "24 2.8675 \n",
154
- "25 0.0925 \n",
155
- "26 0.1175 \n",
156
- "27 0.0725 "
157
  ]
158
  },
159
- "execution_count": 62,
160
  "metadata": {},
161
  "output_type": "execute_result"
162
  }
@@ -184,7 +184,7 @@
184
  },
185
  {
186
  "cell_type": "code",
187
- "execution_count": 70,
188
  "metadata": {},
189
  "outputs": [
190
  {
@@ -193,7 +193,7 @@
193
  "[]"
194
  ]
195
  },
196
- "execution_count": 70,
197
  "metadata": {},
198
  "output_type": "execute_result"
199
  }
@@ -213,7 +213,7 @@
213
  },
214
  {
215
  "cell_type": "code",
216
- "execution_count": 71,
217
  "metadata": {},
218
  "outputs": [],
219
  "source": [
@@ -230,7 +230,7 @@
230
  },
231
  {
232
  "cell_type": "code",
233
- "execution_count": null,
234
  "metadata": {},
235
  "outputs": [],
236
  "source": [
@@ -255,13 +255,50 @@
255
  },
256
  {
257
  "cell_type": "code",
258
- "execution_count": null,
259
  "metadata": {},
260
- "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
261
  "source": [
262
  "train,test = traindataset,testdataset\n",
263
- "steps_in_past = 3 \n",
264
- "time_step = 4*6\n",
265
  "no_inputs = 5\n",
266
  "no_outputs = 2\n",
267
  "def create_dataset(dataset,time_step):\n",
@@ -291,9 +328,19 @@
291
  },
292
  {
293
  "cell_type": "code",
294
- "execution_count": null,
295
  "metadata": {},
296
- "outputs": [],
 
 
 
 
 
 
 
 
 
 
297
  "source": [
298
  "loss = model.evaluate(X_test, y_test)\n",
299
  "test_predict1 = model.predict(X_test)\n",
@@ -306,7 +353,7 @@
306
  },
307
  {
308
  "cell_type": "code",
309
- "execution_count": null,
310
  "metadata": {},
311
  "outputs": [],
312
  "source": [
@@ -315,12 +362,12 @@
315
  "# Create a 3x3 grid of subplots\n",
316
  "fig, axes = plt.subplots(3, 3, figsize=(10, 10))\n",
317
  "\n",
318
- "var = 100\n",
319
  "# Loop over the value index\n",
320
  "for i, ax in enumerate(axes.flat):\n",
321
  " # Plot your data or perform any other operations\n",
322
- " ax.plot(y_test[var+i,0:time_step], label='Original Testing Data', color='blue')\n",
323
- " ax.plot(test_predict1[var+i,0:time_step], label='Predicted Testing Data', color='red',alpha=0.8)\n",
324
  " # ax.set_title(f'Plot {i+1}')\n",
325
  " ax.set_title('Testing Data - Predicted vs Actual')\n",
326
  " ax.set_xlabel('Time [hours]')\n",
@@ -369,7 +416,7 @@
369
  },
370
  {
371
  "cell_type": "code",
372
- "execution_count": 79,
373
  "metadata": {},
374
  "outputs": [
375
  {
@@ -377,94 +424,94 @@
377
  "output_type": "stream",
378
  "text": [
379
  "Epoch 1/20\n",
380
- "16/16 [==============================] - ETA: 0s - loss: 0.1003\n",
381
- "Epoch 1: val_loss improved from inf to 0.04277, saving model to lstm_energy_01.keras\n",
382
- "16/16 [==============================] - 6s 89ms/step - loss: 0.1003 - val_loss: 0.0428\n",
383
  "Epoch 2/20\n",
384
- "16/16 [==============================] - ETA: 0s - loss: 0.0340\n",
385
- "Epoch 2: val_loss improved from 0.04277 to 0.03142, saving model to lstm_energy_01.keras\n",
386
- "16/16 [==============================] - 0s 17ms/step - loss: 0.0340 - val_loss: 0.0314\n",
387
  "Epoch 3/20\n",
388
- "14/16 [=========================>....] - ETA: 0s - loss: 0.0270\n",
389
- "Epoch 3: val_loss improved from 0.03142 to 0.02204, saving model to lstm_energy_01.keras\n",
390
- "16/16 [==============================] - 0s 17ms/step - loss: 0.0268 - val_loss: 0.0220\n",
391
  "Epoch 4/20\n",
392
- "15/16 [===========================>..] - ETA: 0s - loss: 0.0220\n",
393
- "Epoch 4: val_loss improved from 0.02204 to 0.01482, saving model to lstm_energy_01.keras\n",
394
- "16/16 [==============================] - 0s 15ms/step - loss: 0.0220 - val_loss: 0.0148\n",
395
  "Epoch 5/20\n",
396
- "13/16 [=======================>......] - ETA: 0s - loss: 0.0197\n",
397
- "Epoch 5: val_loss improved from 0.01482 to 0.01388, saving model to lstm_energy_01.keras\n",
398
- "16/16 [==============================] - 0s 18ms/step - loss: 0.0192 - val_loss: 0.0139\n",
399
  "Epoch 6/20\n",
400
- "14/16 [=========================>....] - ETA: 0s - loss: 0.0176\n",
401
- "Epoch 6: val_loss did not improve from 0.01388\n",
402
- "16/16 [==============================] - 0s 15ms/step - loss: 0.0177 - val_loss: 0.0156\n",
403
  "Epoch 7/20\n",
404
- "15/16 [===========================>..] - ETA: 0s - loss: 0.0177\n",
405
- "Epoch 7: val_loss improved from 0.01388 to 0.01233, saving model to lstm_energy_01.keras\n",
406
- "16/16 [==============================] - 0s 17ms/step - loss: 0.0177 - val_loss: 0.0123\n",
407
  "Epoch 8/20\n",
408
- "13/16 [=======================>......] - ETA: 0s - loss: 0.0172\n",
409
- "Epoch 8: val_loss improved from 0.01233 to 0.01210, saving model to lstm_energy_01.keras\n",
410
- "16/16 [==============================] - 0s 14ms/step - loss: 0.0171 - val_loss: 0.0121\n",
411
  "Epoch 9/20\n",
412
- "13/16 [=======================>......] - ETA: 0s - loss: 0.0174\n",
413
- "Epoch 9: val_loss did not improve from 0.01210\n",
414
- "16/16 [==============================] - 0s 15ms/step - loss: 0.0174 - val_loss: 0.0126\n",
415
  "Epoch 10/20\n",
416
- "14/16 [=========================>....] - ETA: 0s - loss: 0.0162\n",
417
- "Epoch 10: val_loss did not improve from 0.01210\n",
418
- "16/16 [==============================] - 0s 16ms/step - loss: 0.0165 - val_loss: 0.0138\n",
419
  "Epoch 11/20\n",
420
  "16/16 [==============================] - ETA: 0s - loss: 0.0164\n",
421
- "Epoch 11: val_loss did not improve from 0.01210\n",
422
- "16/16 [==============================] - 0s 13ms/step - loss: 0.0164 - val_loss: 0.0141\n",
423
  "Epoch 12/20\n",
424
- "14/16 [=========================>....] - ETA: 0s - loss: 0.0167\n",
425
- "Epoch 12: val_loss did not improve from 0.01210\n",
426
- "16/16 [==============================] - 0s 17ms/step - loss: 0.0166 - val_loss: 0.0139\n",
427
  "Epoch 13/20\n",
428
- "14/16 [=========================>....] - ETA: 0s - loss: 0.0165\n",
429
- "Epoch 13: val_loss did not improve from 0.01210\n",
430
- "16/16 [==============================] - 0s 17ms/step - loss: 0.0162 - val_loss: 0.0137\n",
431
  "Epoch 14/20\n",
432
- "14/16 [=========================>....] - ETA: 0s - loss: 0.0158\n",
433
- "Epoch 14: val_loss did not improve from 0.01210\n",
434
- "16/16 [==============================] - 0s 16ms/step - loss: 0.0156 - val_loss: 0.0122\n",
435
  "Epoch 15/20\n",
436
- "14/16 [=========================>....] - ETA: 0s - loss: 0.0150\n",
437
- "Epoch 15: val_loss improved from 0.01210 to 0.01155, saving model to lstm_energy_01.keras\n",
438
- "16/16 [==============================] - 0s 17ms/step - loss: 0.0153 - val_loss: 0.0116\n",
439
  "Epoch 16/20\n",
440
- "12/16 [=====================>........] - ETA: 0s - loss: 0.0157\n",
441
- "Epoch 16: val_loss did not improve from 0.01155\n",
442
- "16/16 [==============================] - 0s 12ms/step - loss: 0.0158 - val_loss: 0.0116\n",
443
- "Epoch 17/20\n",
444
  "16/16 [==============================] - ETA: 0s - loss: 0.0149\n",
445
- "Epoch 17: val_loss did not improve from 0.01155\n",
446
- "16/16 [==============================] - 0s 14ms/step - loss: 0.0149 - val_loss: 0.0118\n",
 
 
 
 
447
  "Epoch 18/20\n",
448
- "15/16 [===========================>..] - ETA: 0s - loss: 0.0142\n",
449
- "Epoch 18: val_loss did not improve from 0.01155\n",
450
- "16/16 [==============================] - 0s 15ms/step - loss: 0.0144 - val_loss: 0.0118\n",
451
  "Epoch 19/20\n",
452
- "16/16 [==============================] - ETA: 0s - loss: 0.0142\n",
453
- "Epoch 19: val_loss improved from 0.01155 to 0.01153, saving model to lstm_energy_01.keras\n",
454
- "16/16 [==============================] - 0s 15ms/step - loss: 0.0142 - val_loss: 0.0115\n",
455
  "Epoch 20/20\n",
456
- "12/16 [=====================>........] - ETA: 0s - loss: 0.0147\n",
457
- "Epoch 20: val_loss did not improve from 0.01153\n",
458
- "16/16 [==============================] - 0s 12ms/step - loss: 0.0142 - val_loss: 0.0125\n"
459
  ]
460
  },
461
  {
462
  "data": {
463
  "text/plain": [
464
- "<keras.callbacks.History at 0x1da5016dcd0>"
465
  ]
466
  },
467
- "execution_count": 79,
468
  "metadata": {},
469
  "output_type": "execute_result"
470
  }
@@ -494,30 +541,30 @@
494
  "X_train, y_train = create_dataset(train, time_step)\n",
495
  "X_test, y_test = create_dataset(test, time_step)\n",
496
  "\n",
497
- "model = create_model(X_train, time_step, no_outputs)\n",
498
  "checkpoint_path = \"lstm_energy_01.keras\"\n",
499
  "checkpoint_callback = ModelCheckpoint(filepath=checkpoint_path, monitor='val_loss', verbose=1, save_best_only=True, mode='min')\n",
500
- "model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=20, batch_size=64, verbose=1, callbacks=[checkpoint_callback])"
501
  ]
502
  },
503
  {
504
  "cell_type": "code",
505
- "execution_count": 80,
506
  "metadata": {},
507
  "outputs": [
508
  {
509
  "name": "stdout",
510
  "output_type": "stream",
511
  "text": [
512
- "3/3 [==============================] - 0s 3ms/step - loss: 0.0125\n",
513
- "3/3 [==============================] - 1s 4ms/step\n",
514
- "Loss: 0.012460779398679733\n"
515
  ]
516
  }
517
  ],
518
  "source": [
519
- "loss = model.evaluate(X_test, y_test)\n",
520
- "test_predict1 = model.predict(X_test)\n",
521
  "print(\"Loss: \", loss)\n",
522
  "# Converting values back to the original scale\n",
523
  "scalerBack = MinMaxScaler(feature_range=(mintest, maxtest))\n",
@@ -527,7 +574,7 @@
527
  },
528
  {
529
  "cell_type": "code",
530
- "execution_count": 81,
531
  "metadata": {},
532
  "outputs": [],
533
  "source": [
@@ -556,10 +603,15 @@
556
  ]
557
  },
558
  {
559
- "cell_type": "code",
560
- "execution_count": null,
 
 
 
 
 
 
561
  "metadata": {},
562
- "outputs": [],
563
  "source": []
564
  }
565
  ],
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 85,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
 
58
  },
59
  {
60
  "cell_type": "code",
61
+ "execution_count": 86,
62
  "metadata": {},
63
  "outputs": [
64
  {
 
92
  " </thead>\n",
93
  " <tbody>\n",
94
  " <tr>\n",
95
+ " <th>0</th>\n",
96
  " <td>2018-01-02 00:00:00</td>\n",
97
  " <td>38.225000</td>\n",
98
  " <td>26.4000</td>\n",
 
101
  " <td>87.4450</td>\n",
102
  " </tr>\n",
103
  " <tr>\n",
104
+ " <th>1</th>\n",
105
  " <td>2018-01-02 01:00:00</td>\n",
106
  " <td>38.297501</td>\n",
107
  " <td>21.1750</td>\n",
 
110
  " <td>2.8675</td>\n",
111
  " </tr>\n",
112
  " <tr>\n",
113
+ " <th>2</th>\n",
114
  " <td>2018-01-02 02:00:00</td>\n",
115
  " <td>38.072500</td>\n",
116
  " <td>21.7225</td>\n",
 
119
  " <td>0.0925</td>\n",
120
  " </tr>\n",
121
  " <tr>\n",
122
+ " <th>3</th>\n",
123
  " <td>2018-01-02 03:00:00</td>\n",
124
  " <td>39.147500</td>\n",
125
  " <td>21.7000</td>\n",
 
128
  " <td>0.1175</td>\n",
129
  " </tr>\n",
130
  " <tr>\n",
131
+ " <th>4</th>\n",
132
  " <td>2018-01-02 04:00:00</td>\n",
133
  " <td>38.172500</td>\n",
134
  " <td>21.6250</td>\n",
 
141
  "</div>"
142
  ],
143
  "text/plain": [
144
+ " date hvac_N hvac_S day_of_week air_temp_set_1 \\\n",
145
+ "0 2018-01-02 00:00:00 38.225000 26.4000 1 14.9550 \n",
146
+ "1 2018-01-02 01:00:00 38.297501 21.1750 1 14.2125 \n",
147
+ "2 2018-01-02 02:00:00 38.072500 21.7225 1 14.2700 \n",
148
+ "3 2018-01-02 03:00:00 39.147500 21.7000 1 14.1375 \n",
149
+ "4 2018-01-02 04:00:00 38.172500 21.6250 1 13.9850 \n",
150
  "\n",
151
+ " solar_radiation_set_1 \n",
152
+ "0 87.4450 \n",
153
+ "1 2.8675 \n",
154
+ "2 0.0925 \n",
155
+ "3 0.1175 \n",
156
+ "4 0.0725 "
157
  ]
158
  },
159
+ "execution_count": 86,
160
  "metadata": {},
161
  "output_type": "execute_result"
162
  }
 
184
  },
185
  {
186
  "cell_type": "code",
187
+ "execution_count": 88,
188
  "metadata": {},
189
  "outputs": [
190
  {
 
193
  "[]"
194
  ]
195
  },
196
+ "execution_count": 88,
197
  "metadata": {},
198
  "output_type": "execute_result"
199
  }
 
213
  },
214
  {
215
  "cell_type": "code",
216
+ "execution_count": 89,
217
  "metadata": {},
218
  "outputs": [],
219
  "source": [
 
230
  },
231
  {
232
  "cell_type": "code",
233
+ "execution_count": 104,
234
  "metadata": {},
235
  "outputs": [],
236
  "source": [
 
255
  },
256
  {
257
  "cell_type": "code",
258
+ "execution_count": 94,
259
  "metadata": {},
260
+ "outputs": [
261
+ {
262
+ "name": "stdout",
263
+ "output_type": "stream",
264
+ "text": [
265
+ "Epoch 1/5\n",
266
+ "370/371 [============================>.] - ETA: 0s - loss: 0.0224\n",
267
+ "Epoch 1: val_loss improved from inf to 0.01162, saving model to lstm_energy_01.keras\n",
268
+ "371/371 [==============================] - 11s 15ms/step - loss: 0.0224 - val_loss: 0.0116\n",
269
+ "Epoch 2/5\n",
270
+ "368/371 [============================>.] - ETA: 0s - loss: 0.0139\n",
271
+ "Epoch 2: val_loss improved from 0.01162 to 0.01146, saving model to lstm_energy_01.keras\n",
272
+ "371/371 [==============================] - 5s 12ms/step - loss: 0.0139 - val_loss: 0.0115\n",
273
+ "Epoch 3/5\n",
274
+ "370/371 [============================>.] - ETA: 0s - loss: 0.0125\n",
275
+ "Epoch 3: val_loss improved from 0.01146 to 0.01132, saving model to lstm_energy_01.keras\n",
276
+ "371/371 [==============================] - 5s 13ms/step - loss: 0.0125 - val_loss: 0.0113\n",
277
+ "Epoch 4/5\n",
278
+ "367/371 [============================>.] - ETA: 0s - loss: 0.0119\n",
279
+ "Epoch 4: val_loss improved from 0.01132 to 0.01007, saving model to lstm_energy_01.keras\n",
280
+ "371/371 [==============================] - 5s 13ms/step - loss: 0.0119 - val_loss: 0.0101\n",
281
+ "Epoch 5/5\n",
282
+ "371/371 [==============================] - ETA: 0s - loss: 0.0117\n",
283
+ "Epoch 5: val_loss did not improve from 0.01007\n",
284
+ "371/371 [==============================] - 5s 13ms/step - loss: 0.0117 - val_loss: 0.0101\n"
285
+ ]
286
+ },
287
+ {
288
+ "data": {
289
+ "text/plain": [
290
+ "<keras.callbacks.History at 0x1da353bd790>"
291
+ ]
292
+ },
293
+ "execution_count": 94,
294
+ "metadata": {},
295
+ "output_type": "execute_result"
296
+ }
297
+ ],
298
  "source": [
299
  "train,test = traindataset,testdataset\n",
300
+ "steps_in_past = 7 \n",
301
+ "time_step = 24\n",
302
  "no_inputs = 5\n",
303
  "no_outputs = 2\n",
304
  "def create_dataset(dataset,time_step):\n",
 
328
  },
329
  {
330
  "cell_type": "code",
331
+ "execution_count": 95,
332
  "metadata": {},
333
+ "outputs": [
334
+ {
335
+ "name": "stdout",
336
+ "output_type": "stream",
337
+ "text": [
338
+ "60/60 [==============================] - 0s 4ms/step - loss: 0.0101\n",
339
+ "60/60 [==============================] - 1s 3ms/step\n",
340
+ "Loss: 0.010141444392502308\n"
341
+ ]
342
+ }
343
+ ],
344
  "source": [
345
  "loss = model.evaluate(X_test, y_test)\n",
346
  "test_predict1 = model.predict(X_test)\n",
 
353
  },
354
  {
355
  "cell_type": "code",
356
+ "execution_count": 100,
357
  "metadata": {},
358
  "outputs": [],
359
  "source": [
 
362
  "# Create a 3x3 grid of subplots\n",
363
  "fig, axes = plt.subplots(3, 3, figsize=(10, 10))\n",
364
  "\n",
365
+ "var = 15\n",
366
  "# Loop over the value index\n",
367
  "for i, ax in enumerate(axes.flat):\n",
368
  " # Plot your data or perform any other operations\n",
369
+ " ax.plot(y_test1[var+i*9,0:time_step], label='Original Testing Data', color='blue')\n",
370
+ " ax.plot(test_predict2[var+i*9,0:time_step], label='Predicted Testing Data', color='red',alpha=0.8)\n",
371
  " # ax.set_title(f'Plot {i+1}')\n",
372
  " ax.set_title('Testing Data - Predicted vs Actual')\n",
373
  " ax.set_xlabel('Time [hours]')\n",
 
416
  },
417
  {
418
  "cell_type": "code",
419
+ "execution_count": 105,
420
  "metadata": {},
421
  "outputs": [
422
  {
 
424
  "output_type": "stream",
425
  "text": [
426
  "Epoch 1/20\n",
427
+ "13/16 [=======================>......] - ETA: 0s - loss: 0.0893\n",
428
+ "Epoch 1: val_loss improved from inf to 0.02898, saving model to lstm_energy_01.keras\n",
429
+ "16/16 [==============================] - 6s 100ms/step - loss: 0.0820 - val_loss: 0.0290\n",
430
  "Epoch 2/20\n",
431
+ "13/16 [=======================>......] - ETA: 0s - loss: 0.0316\n",
432
+ "Epoch 2: val_loss improved from 0.02898 to 0.02435, saving model to lstm_energy_01.keras\n",
433
+ "16/16 [==============================] - 0s 20ms/step - loss: 0.0310 - val_loss: 0.0243\n",
434
  "Epoch 3/20\n",
435
+ "16/16 [==============================] - ETA: 0s - loss: 0.0242\n",
436
+ "Epoch 3: val_loss improved from 0.02435 to 0.01740, saving model to lstm_energy_01.keras\n",
437
+ "16/16 [==============================] - 0s 24ms/step - loss: 0.0242 - val_loss: 0.0174\n",
438
  "Epoch 4/20\n",
439
+ "16/16 [==============================] - ETA: 0s - loss: 0.0213\n",
440
+ "Epoch 4: val_loss improved from 0.01740 to 0.01566, saving model to lstm_energy_01.keras\n",
441
+ "16/16 [==============================] - 0s 25ms/step - loss: 0.0213 - val_loss: 0.0157\n",
442
  "Epoch 5/20\n",
443
+ "16/16 [==============================] - ETA: 0s - loss: 0.0189\n",
444
+ "Epoch 5: val_loss improved from 0.01566 to 0.01483, saving model to lstm_energy_01.keras\n",
445
+ "16/16 [==============================] - 0s 25ms/step - loss: 0.0189 - val_loss: 0.0148\n",
446
  "Epoch 6/20\n",
447
+ "13/16 [=======================>......] - ETA: 0s - loss: 0.0184\n",
448
+ "Epoch 6: val_loss improved from 0.01483 to 0.01359, saving model to lstm_energy_01.keras\n",
449
+ "16/16 [==============================] - 0s 25ms/step - loss: 0.0182 - val_loss: 0.0136\n",
450
  "Epoch 7/20\n",
451
+ "14/16 [=========================>....] - ETA: 0s - loss: 0.0177\n",
452
+ "Epoch 7: val_loss improved from 0.01359 to 0.01285, saving model to lstm_energy_01.keras\n",
453
+ "16/16 [==============================] - 0s 22ms/step - loss: 0.0175 - val_loss: 0.0128\n",
454
  "Epoch 8/20\n",
455
+ "13/16 [=======================>......] - ETA: 0s - loss: 0.0168\n",
456
+ "Epoch 8: val_loss did not improve from 0.01285\n",
457
+ "16/16 [==============================] - 0s 20ms/step - loss: 0.0171 - val_loss: 0.0148\n",
458
  "Epoch 9/20\n",
459
+ "14/16 [=========================>....] - ETA: 0s - loss: 0.0178\n",
460
+ "Epoch 9: val_loss did not improve from 0.01285\n",
461
+ "16/16 [==============================] - 0s 20ms/step - loss: 0.0175 - val_loss: 0.0143\n",
462
  "Epoch 10/20\n",
463
+ "15/16 [===========================>..] - ETA: 0s - loss: 0.0165\n",
464
+ "Epoch 10: val_loss improved from 0.01285 to 0.01277, saving model to lstm_energy_01.keras\n",
465
+ "16/16 [==============================] - 0s 22ms/step - loss: 0.0166 - val_loss: 0.0128\n",
466
  "Epoch 11/20\n",
467
  "16/16 [==============================] - ETA: 0s - loss: 0.0164\n",
468
+ "Epoch 11: val_loss did not improve from 0.01277\n",
469
+ "16/16 [==============================] - 0s 23ms/step - loss: 0.0164 - val_loss: 0.0139\n",
470
  "Epoch 12/20\n",
471
+ "15/16 [===========================>..] - ETA: 0s - loss: 0.0162\n",
472
+ "Epoch 12: val_loss improved from 0.01277 to 0.01235, saving model to lstm_energy_01.keras\n",
473
+ "16/16 [==============================] - 1s 33ms/step - loss: 0.0162 - val_loss: 0.0124\n",
474
  "Epoch 13/20\n",
475
+ "15/16 [===========================>..] - ETA: 0s - loss: 0.0154\n",
476
+ "Epoch 13: val_loss did not improve from 0.01235\n",
477
+ "16/16 [==============================] - 0s 20ms/step - loss: 0.0153 - val_loss: 0.0131\n",
478
  "Epoch 14/20\n",
479
+ "13/16 [=======================>......] - ETA: 0s - loss: 0.0156\n",
480
+ "Epoch 14: val_loss did not improve from 0.01235\n",
481
+ "16/16 [==============================] - 0s 21ms/step - loss: 0.0160 - val_loss: 0.0136\n",
482
  "Epoch 15/20\n",
483
+ "13/16 [=======================>......] - ETA: 0s - loss: 0.0167\n",
484
+ "Epoch 15: val_loss did not improve from 0.01235\n",
485
+ "16/16 [==============================] - 0s 20ms/step - loss: 0.0164 - val_loss: 0.0125\n",
486
  "Epoch 16/20\n",
 
 
 
 
487
  "16/16 [==============================] - ETA: 0s - loss: 0.0149\n",
488
+ "Epoch 16: val_loss improved from 0.01235 to 0.01134, saving model to lstm_energy_01.keras\n",
489
+ "16/16 [==============================] - 0s 25ms/step - loss: 0.0149 - val_loss: 0.0113\n",
490
+ "Epoch 17/20\n",
491
+ "16/16 [==============================] - ETA: 0s - loss: 0.0147\n",
492
+ "Epoch 17: val_loss did not improve from 0.01134\n",
493
+ "16/16 [==============================] - 0s 21ms/step - loss: 0.0147 - val_loss: 0.0125\n",
494
  "Epoch 18/20\n",
495
+ "15/16 [===========================>..] - ETA: 0s - loss: 0.0143\n",
496
+ "Epoch 18: val_loss did not improve from 0.01134\n",
497
+ "16/16 [==============================] - 0s 23ms/step - loss: 0.0143 - val_loss: 0.0116\n",
498
  "Epoch 19/20\n",
499
+ "15/16 [===========================>..] - ETA: 0s - loss: 0.0138\n",
500
+ "Epoch 19: val_loss improved from 0.01134 to 0.01108, saving model to lstm_energy_01.keras\n",
501
+ "16/16 [==============================] - 0s 23ms/step - loss: 0.0138 - val_loss: 0.0111\n",
502
  "Epoch 20/20\n",
503
+ "16/16 [==============================] - ETA: 0s - loss: 0.0137\n",
504
+ "Epoch 20: val_loss improved from 0.01108 to 0.01093, saving model to lstm_energy_01.keras\n",
505
+ "16/16 [==============================] - 0s 25ms/step - loss: 0.0137 - val_loss: 0.0109\n"
506
  ]
507
  },
508
  {
509
  "data": {
510
  "text/plain": [
511
+ "<keras.callbacks.History at 0x1da50f44760>"
512
  ]
513
  },
514
+ "execution_count": 105,
515
  "metadata": {},
516
  "output_type": "execute_result"
517
  }
 
541
  "X_train, y_train = create_dataset(train, time_step)\n",
542
  "X_test, y_test = create_dataset(test, time_step)\n",
543
  "\n",
544
+ "model2 = create_model(X_train, time_step, no_outputs)\n",
545
  "checkpoint_path = \"lstm_energy_01.keras\"\n",
546
  "checkpoint_callback = ModelCheckpoint(filepath=checkpoint_path, monitor='val_loss', verbose=1, save_best_only=True, mode='min')\n",
547
+ "model2.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=20, batch_size=64, verbose=1, callbacks=[checkpoint_callback])"
548
  ]
549
  },
550
  {
551
  "cell_type": "code",
552
+ "execution_count": 106,
553
  "metadata": {},
554
  "outputs": [
555
  {
556
  "name": "stdout",
557
  "output_type": "stream",
558
  "text": [
559
+ "3/3 [==============================] - 0s 5ms/step - loss: 0.0109\n",
560
+ "3/3 [==============================] - 1s 5ms/step\n",
561
+ "Loss: 0.010930849239230156\n"
562
  ]
563
  }
564
  ],
565
  "source": [
566
+ "loss = model2.evaluate(X_test, y_test)\n",
567
+ "test_predict1 = model2.predict(X_test)\n",
568
  "print(\"Loss: \", loss)\n",
569
  "# Converting values back to the original scale\n",
570
  "scalerBack = MinMaxScaler(feature_range=(mintest, maxtest))\n",
 
574
  },
575
  {
576
  "cell_type": "code",
577
+ "execution_count": 107,
578
  "metadata": {},
579
  "outputs": [],
580
  "source": [
 
603
  ]
604
  },
605
  {
606
+ "cell_type": "markdown",
607
+ "metadata": {},
608
+ "source": [
609
+ "### Model 3 predicting based on past Mondays"
610
+ ]
611
+ },
612
+ {
613
+ "cell_type": "markdown",
614
  "metadata": {},
 
615
  "source": []
616
  }
617
  ],