akshayballal commited on
Commit
816790f
1 Parent(s): dbd7ac0

Update publisher

Browse files
mqttpublisher.ipynb CHANGED
@@ -2,14 +2,14 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 4,
6
  "metadata": {},
7
  "outputs": [
8
  {
9
  "name": "stderr",
10
  "output_type": "stream",
11
  "text": [
12
- "C:\\Users\\arbal\\AppData\\Local\\Temp\\ipykernel_20872\\3033510885.py:13: DeprecationWarning: Callback API version 1 is deprecated, update to latest version\n",
13
  " client = mqtt.Client(mqtt.CallbackAPIVersion.VERSION1, clientId)\n"
14
  ]
15
  },
@@ -171,8 +171,485 @@
171
  "published!\n",
172
  "published!\n",
173
  "published!\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
174
  "published!\n"
175
  ]
 
 
 
 
 
 
 
 
 
 
 
 
176
  }
177
  ],
178
  "source": [
@@ -193,85 +670,14 @@
193
  "topic = \"sensor_data\"\n",
194
  "\n",
195
  "def publish_sensor_data(): \n",
196
- " for index, row in df.iterrows():\n",
197
- " hp_hws_temp = row['hp_hws_temp']\n",
198
- " rtu_003_sa_temp = row['rtu_003_sa_temp']\n",
199
- " rtu_003_oadmpr_pct = row['rtu_003_oadmpr_pct']\n",
200
- " rtu_003_ra_temp = row['rtu_003_ra_temp']\n",
201
- " rtu_003_oa_temp = row['rtu_003_oa_temp']\n",
202
- " rtu_003_ma_temp = row['rtu_003_ma_temp']\n",
203
- " rtu_003_sf_vfd_spd_fbk_tn = row['rtu_003_sf_vfd_spd_fbk_tn']\n",
204
- " rtu_003_rf_vfd_spd_fbk_tn =row['rtu_003_rf_vfd_spd_fbk_tn']\n",
205
- " rtu_004_sa_temp = row['rtu_004_sa_temp']\n",
206
- " rtu_004_oadmpr_pct = row['rtu_004_oadmpr_pct']\n",
207
- " rtu_004_ra_temp = row['rtu_004_ra_temp']\n",
208
- " rtu_004_oa_temp = row['rtu_004_oa_temp']\n",
209
- " rtu_004_ma_temp = row['rtu_004_ma_temp']\n",
210
- " rtu_004_sf_vfd_spd_fbk_tn = row['rtu_004_sf_vfd_spd_fbk_tn']\n",
211
- " rtu_004_rf_vfd_spd_fbk_tn = row['rtu_004_rf_vfd_spd_fbk_tn']\n",
212
- " rtu_001_sa_temp = row['rtu_001_sa_temp']\n",
213
- " rtu_001_oadmpr_pct = row['rtu_001_oadmpr_pct']\n",
214
- " rtu_001_ra_temp = row['rtu_001_ra_temp']\n",
215
- " rtu_001_oa_temp = row['rtu_001_oa_temp']\n",
216
- " rtu_001_ma_temp = row['rtu_001_ma_temp']\n",
217
- " rtu_001_sf_vfd_spd_fbk_tn = row['rtu_001_sf_vfd_spd_fbk_tn']\n",
218
- " rtu_001_rf_vfd_spd_fbk_tn =row['rtu_001_rf_vfd_spd_fbk_tn']\n",
219
- " rtu_002_sa_temp = row['rtu_002_sa_temp']\n",
220
- " rtu_002_oadmpr_pct = row['rtu_002_oadmpr_pct']\n",
221
- " rtu_002_ra_temp = row['rtu_002_ra_temp']\n",
222
- " rtu_002_oa_temp = row['rtu_002_oa_temp']\n",
223
- " rtu_002_ma_temp = row['rtu_002_ma_temp']\n",
224
- " rtu_002_sf_vfd_spd_fbk_tn = row['rtu_002_sf_vfd_spd_fbk_tn']\n",
225
- " rtu_002_rf_vfd_spd_fbk_tn = row['rtu_002_rf_vfd_spd_fbk_tn']\n",
226
- " # rtu_004_sat_sp_tn = row['rtu_004_sat_sp_tn']\n",
227
- " # rtu_003_sat_sp_tn = row['rtu_003_sat_sp_tn']\n",
228
- " # rtu_001_sat_sp_tn = row['rtu_001_sat_sp_tn']\n",
229
- " # rtu_002_sat_sp_tn = row['rtu_002_sat_sp_tn']\n",
230
- " air_temp_set_1 = row['air_temp_set_1']\n",
231
- " air_temp_set_2 = row['air_temp_set_2']\n",
232
- " dew_point_temperature_set_1d = row['dew_point_temperature_set_1d']\n",
233
- " relative_humidity_set_1 = row['relative_humidity_set_1']\n",
234
- " solar_radiation_set_1 = row['solar_radiation_set_1']\n",
235
- " \n",
236
  " \n",
237
- " client.publish(topic, payload=json.dumps({'hp_hws_temp':hp_hws_temp,\n",
238
- " 'rtu_003_sa_temp':rtu_003_sa_temp,\n",
239
- " 'rtu_003_oadmpr_pct': rtu_003_oadmpr_pct,\n",
240
- " 'rtu_003_ra_temp':rtu_003_ra_temp,\n",
241
- " 'rtu_003_oa_temp': rtu_003_oa_temp,\n",
242
- " 'rtu_003_ma_temp': rtu_003_ma_temp,\n",
243
- " 'rtu_003_sf_vfd_spd_fbk_tn': rtu_003_sf_vfd_spd_fbk_tn,\n",
244
- " 'rtu_003_rf_vfd_spd_fbk_tn':rtu_003_rf_vfd_spd_fbk_tn,\n",
245
- " 'rtu_004_sa_temp':rtu_004_sa_temp,\n",
246
- " 'rtu_004_oadmpr_pct':rtu_004_oadmpr_pct,\n",
247
- " 'rtu_004_ra_temp':rtu_004_ra_temp,\n",
248
- " 'rtu_004_oa_temp':rtu_004_oa_temp,\n",
249
- " 'rtu_004_ma_temp':rtu_004_ma_temp,\n",
250
- " 'rtu_004_sf_vfd_spd_fbk_tn':rtu_004_sf_vfd_spd_fbk_tn,\n",
251
- " 'rtu_004_rf_vfd_spd_fbk_tn':rtu_004_rf_vfd_spd_fbk_tn,\n",
252
- " 'rtu_001_sa_temp':rtu_001_sa_temp,\n",
253
- " 'rtu_001_oadmpr_pct': rtu_001_oadmpr_pct,\n",
254
- " 'rtu_001_ra_temp':rtu_001_ra_temp,\n",
255
- " 'rtu_001_oa_temp': rtu_001_oa_temp,\n",
256
- " 'rtu_001_ma_temp': rtu_001_ma_temp,\n",
257
- " 'rtu_001_sf_vfd_spd_fbk_tn': rtu_001_sf_vfd_spd_fbk_tn,\n",
258
- " 'rtu_001_rf_vfd_spd_fbk_tn':rtu_001_rf_vfd_spd_fbk_tn,\n",
259
- " 'rtu_002_sa_temp':rtu_002_sa_temp,\n",
260
- " 'rtu_002_oadmpr_pct':rtu_002_oadmpr_pct,\n",
261
- " 'rtu_002_ra_temp':rtu_002_ra_temp,\n",
262
- " 'rtu_002_oa_temp':rtu_002_oa_temp,\n",
263
- " 'rtu_002_ma_temp':rtu_002_ma_temp,\n",
264
- " 'rtu_002_sf_vfd_spd_fbk_tn':rtu_002_sf_vfd_spd_fbk_tn,\n",
265
- " 'rtu_002_rf_vfd_spd_fbk_tn':rtu_002_rf_vfd_spd_fbk_tn,\n",
266
- " # 'rtu_004_sat_sp_tn':rtu_004_sat_sp_tn,\n",
267
- " # 'rtu_003_sat_sp_tn' :rtu_003_sat_sp_tn,\n",
268
- " # 'rtu_001_sat_sp_tn':rtu_001_sat_sp_tn,\n",
269
- " # 'rtu_002_sat_sp_tn':rtu_002_sat_sp_tn,\n",
270
- " 'air_temp_set_1':air_temp_set_1,\n",
271
- " 'air_temp_set_2':air_temp_set_2,\n",
272
- " 'dew_point_temperature_set_1d':dew_point_temperature_set_1d,\n",
273
- " 'relative_humidity_set_1':relative_humidity_set_1,\n",
274
- " 'solar_radiation_set_1':solar_radiation_set_1}))\n",
275
  " print(\"published!\")\n",
276
  " time.sleep(0.2)\n",
277
  "\n",
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 2,
6
  "metadata": {},
7
  "outputs": [
8
  {
9
  "name": "stderr",
10
  "output_type": "stream",
11
  "text": [
12
+ "C:\\Users\\arbal\\AppData\\Local\\Temp\\ipykernel_1472\\1157986887.py:13: DeprecationWarning: Callback API version 1 is deprecated, update to latest version\n",
13
  " client = mqtt.Client(mqtt.CallbackAPIVersion.VERSION1, clientId)\n"
14
  ]
15
  },
 
171
  "published!\n",
172
  "published!\n",
173
  "published!\n",
174
+ "published!\n",
175
+ "published!\n",
176
+ "published!\n",
177
+ "published!\n",
178
+ "published!\n",
179
+ "published!\n",
180
+ "published!\n",
181
+ "published!\n",
182
+ "published!\n",
183
+ "published!\n",
184
+ "published!\n",
185
+ "published!\n",
186
+ "published!\n",
187
+ "published!\n",
188
+ "published!\n",
189
+ "published!\n",
190
+ "published!\n",
191
+ "published!\n",
192
+ "published!\n",
193
+ "published!\n",
194
+ "published!\n",
195
+ "published!\n",
196
+ "published!\n",
197
+ "published!\n",
198
+ "published!\n",
199
+ "published!\n",
200
+ "published!\n",
201
+ "published!\n",
202
+ "published!\n",
203
+ "published!\n",
204
+ "published!\n",
205
+ "published!\n",
206
+ "published!\n",
207
+ "published!\n",
208
+ "published!\n",
209
+ "published!\n",
210
+ "published!\n",
211
+ "published!\n",
212
+ "published!\n",
213
+ "published!\n",
214
+ "published!\n",
215
+ "published!\n",
216
+ "published!\n",
217
+ "published!\n",
218
+ "published!\n",
219
+ "published!\n",
220
+ "published!\n",
221
+ "published!\n",
222
+ "published!\n",
223
+ "published!\n",
224
+ "published!\n",
225
+ "published!\n",
226
+ "published!\n",
227
+ "published!\n",
228
+ "published!\n",
229
+ "published!\n",
230
+ "published!\n",
231
+ "published!\n",
232
+ "published!\n",
233
+ "published!\n",
234
+ "published!\n",
235
+ "published!\n",
236
+ "published!\n",
237
+ "published!\n",
238
+ "published!\n",
239
+ "published!\n",
240
+ "published!\n",
241
+ "published!\n",
242
+ "published!\n",
243
+ "published!\n",
244
+ "published!\n",
245
+ "published!\n",
246
+ "published!\n",
247
+ "published!\n",
248
+ "published!\n",
249
+ "published!\n",
250
+ "published!\n",
251
+ "published!\n",
252
+ "published!\n",
253
+ "published!\n",
254
+ "published!\n",
255
+ "published!\n",
256
+ "published!\n",
257
+ "published!\n",
258
+ "published!\n",
259
+ "published!\n",
260
+ "published!\n",
261
+ "published!\n",
262
+ "published!\n",
263
+ "published!\n",
264
+ "published!\n",
265
+ "published!\n",
266
+ "published!\n",
267
+ "published!\n",
268
+ "published!\n",
269
+ "published!\n",
270
+ "published!\n",
271
+ "published!\n",
272
+ "published!\n",
273
+ "published!\n",
274
+ "published!\n",
275
+ "published!\n",
276
+ "published!\n",
277
+ "published!\n",
278
+ "published!\n",
279
+ "published!\n",
280
+ "published!\n",
281
+ "published!\n",
282
+ "published!\n",
283
+ "published!\n",
284
+ "published!\n",
285
+ "published!\n",
286
+ "published!\n",
287
+ "published!\n",
288
+ "published!\n",
289
+ "published!\n",
290
+ "published!\n",
291
+ "published!\n",
292
+ "published!\n",
293
+ "published!\n",
294
+ "published!\n",
295
+ "published!\n",
296
+ "published!\n",
297
+ "published!\n",
298
+ "published!\n",
299
+ "published!\n",
300
+ "published!\n",
301
+ "published!\n",
302
+ "published!\n",
303
+ "published!\n",
304
+ "published!\n",
305
+ "published!\n",
306
+ "published!\n",
307
+ "published!\n",
308
+ "published!\n",
309
+ "published!\n",
310
+ "published!\n",
311
+ "published!\n",
312
+ "published!\n",
313
+ "published!\n",
314
+ "published!\n",
315
+ "published!\n",
316
+ "published!\n",
317
+ "published!\n",
318
+ "published!\n",
319
+ "published!\n",
320
+ "published!\n",
321
+ "published!\n",
322
+ "published!\n",
323
+ "published!\n",
324
+ "published!\n",
325
+ "published!\n",
326
+ "published!\n",
327
+ "published!\n",
328
+ "published!\n",
329
+ "published!\n",
330
+ "published!\n",
331
+ "published!\n",
332
+ "published!\n",
333
+ "published!\n",
334
+ "published!\n",
335
+ "published!\n",
336
+ "published!\n",
337
+ "published!\n",
338
+ "published!\n",
339
+ "published!\n",
340
+ "published!\n",
341
+ "published!\n",
342
+ "published!\n",
343
+ "published!\n",
344
+ "published!\n",
345
+ "published!\n",
346
+ "published!\n",
347
+ "published!\n",
348
+ "published!\n",
349
+ "published!\n",
350
+ "published!\n",
351
+ "published!\n",
352
+ "published!\n",
353
+ "published!\n",
354
+ "published!\n",
355
+ "published!\n",
356
+ "published!\n",
357
+ "published!\n",
358
+ "published!\n",
359
+ "published!\n",
360
+ "published!\n",
361
+ "published!\n",
362
+ "published!\n",
363
+ "published!\n",
364
+ "published!\n",
365
+ "published!\n",
366
+ "published!\n",
367
+ "published!\n",
368
+ "published!\n",
369
+ "published!\n",
370
+ "published!\n",
371
+ "published!\n",
372
+ "published!\n",
373
+ "published!\n",
374
+ "published!\n",
375
+ "published!\n",
376
+ "published!\n",
377
+ "published!\n",
378
+ "published!\n",
379
+ "published!\n",
380
+ "published!\n",
381
+ "published!\n",
382
+ "published!\n",
383
+ "published!\n",
384
+ "published!\n",
385
+ "published!\n",
386
+ "published!\n",
387
+ "published!\n",
388
+ "published!\n",
389
+ "published!\n",
390
+ "published!\n",
391
+ "published!\n",
392
+ "published!\n",
393
+ "published!\n",
394
+ "published!\n",
395
+ "published!\n",
396
+ "published!\n",
397
+ "published!\n",
398
+ "published!\n",
399
+ "published!\n",
400
+ "published!\n",
401
+ "published!\n",
402
+ "published!\n",
403
+ "published!\n",
404
+ "published!\n",
405
+ "published!\n",
406
+ "published!\n",
407
+ "published!\n",
408
+ "published!\n",
409
+ "published!\n",
410
+ "published!\n",
411
+ "published!\n",
412
+ "published!\n",
413
+ "published!\n",
414
+ "published!\n",
415
+ "published!\n",
416
+ "published!\n",
417
+ "published!\n",
418
+ "published!\n",
419
+ "published!\n",
420
+ "published!\n",
421
+ "published!\n",
422
+ "published!\n",
423
+ "published!\n",
424
+ "published!\n",
425
+ "published!\n",
426
+ "published!\n",
427
+ "published!\n",
428
+ "published!\n",
429
+ "published!\n",
430
+ "published!\n",
431
+ "published!\n",
432
+ "published!\n",
433
+ "published!\n",
434
+ "published!\n",
435
+ "published!\n",
436
+ "published!\n",
437
+ "published!\n",
438
+ "published!\n",
439
+ "published!\n",
440
+ "published!\n",
441
+ "published!\n",
442
+ "published!\n",
443
+ "published!\n",
444
+ "published!\n",
445
+ "published!\n",
446
+ "published!\n",
447
+ "published!\n",
448
+ "published!\n",
449
+ "published!\n",
450
+ "published!\n",
451
+ "published!\n",
452
+ "published!\n",
453
+ "published!\n",
454
+ "published!\n",
455
+ "published!\n",
456
+ "published!\n",
457
+ "published!\n",
458
+ "published!\n",
459
+ "published!\n",
460
+ "published!\n",
461
+ "published!\n",
462
+ "published!\n",
463
+ "published!\n",
464
+ "published!\n",
465
+ "published!\n",
466
+ "published!\n",
467
+ "published!\n",
468
+ "published!\n",
469
+ "published!\n",
470
+ "published!\n",
471
+ "published!\n",
472
+ "published!\n",
473
+ "published!\n",
474
+ "published!\n",
475
+ "published!\n",
476
+ "published!\n",
477
+ "published!\n",
478
+ "published!\n",
479
+ "published!\n",
480
+ "published!\n",
481
+ "published!\n",
482
+ "published!\n",
483
+ "published!\n",
484
+ "published!\n",
485
+ "published!\n",
486
+ "published!\n",
487
+ "published!\n",
488
+ "published!\n",
489
+ "published!\n",
490
+ "published!\n",
491
+ "published!\n",
492
+ "published!\n",
493
+ "published!\n",
494
+ "published!\n",
495
+ "published!\n",
496
+ "published!\n",
497
+ "published!\n",
498
+ "published!\n",
499
+ "published!\n",
500
+ "published!\n",
501
+ "published!\n",
502
+ "published!\n",
503
+ "published!\n",
504
+ "published!\n",
505
+ "published!\n",
506
+ "published!\n",
507
+ "published!\n",
508
+ "published!\n",
509
+ "published!\n",
510
+ "published!\n",
511
+ "published!\n",
512
+ "published!\n",
513
+ "published!\n",
514
+ "published!\n",
515
+ "published!\n",
516
+ "published!\n",
517
+ "published!\n",
518
+ "published!\n",
519
+ "published!\n",
520
+ "published!\n",
521
+ "published!\n",
522
+ "published!\n",
523
+ "published!\n",
524
+ "published!\n",
525
+ "published!\n",
526
+ "published!\n",
527
+ "published!\n",
528
+ "published!\n",
529
+ "published!\n",
530
+ "published!\n",
531
+ "published!\n",
532
+ "published!\n",
533
+ "published!\n",
534
+ "published!\n",
535
+ "published!\n",
536
+ "published!\n",
537
+ "published!\n",
538
+ "published!\n",
539
+ "published!\n",
540
+ "published!\n",
541
+ "published!\n",
542
+ "published!\n",
543
+ "published!\n",
544
+ "published!\n",
545
+ "published!\n",
546
+ "published!\n",
547
+ "published!\n",
548
+ "published!\n",
549
+ "published!\n",
550
+ "published!\n",
551
+ "published!\n",
552
+ "published!\n",
553
+ "published!\n",
554
+ "published!\n",
555
+ "published!\n",
556
+ "published!\n",
557
+ "published!\n",
558
+ "published!\n",
559
+ "published!\n",
560
+ "published!\n",
561
+ "published!\n",
562
+ "published!\n",
563
+ "published!\n",
564
+ "published!\n",
565
+ "published!\n",
566
+ "published!\n",
567
+ "published!\n",
568
+ "published!\n",
569
+ "published!\n",
570
+ "published!\n",
571
+ "published!\n",
572
+ "published!\n",
573
+ "published!\n",
574
+ "published!\n",
575
+ "published!\n",
576
+ "published!\n",
577
+ "published!\n",
578
+ "published!\n",
579
+ "published!\n",
580
+ "published!\n",
581
+ "published!\n",
582
+ "published!\n",
583
+ "published!\n",
584
+ "published!\n",
585
+ "published!\n",
586
+ "published!\n",
587
+ "published!\n",
588
+ "published!\n",
589
+ "published!\n",
590
+ "published!\n",
591
+ "published!\n",
592
+ "published!\n",
593
+ "published!\n",
594
+ "published!\n",
595
+ "published!\n",
596
+ "published!\n",
597
+ "published!\n",
598
+ "published!\n",
599
+ "published!\n",
600
+ "published!\n",
601
+ "published!\n",
602
+ "published!\n",
603
+ "published!\n",
604
+ "published!\n",
605
+ "published!\n",
606
+ "published!\n",
607
+ "published!\n",
608
+ "published!\n",
609
+ "published!\n",
610
+ "published!\n",
611
+ "published!\n",
612
+ "published!\n",
613
+ "published!\n",
614
+ "published!\n",
615
+ "published!\n",
616
+ "published!\n",
617
+ "published!\n",
618
+ "published!\n",
619
+ "published!\n",
620
+ "published!\n",
621
+ "published!\n",
622
+ "published!\n",
623
+ "published!\n",
624
+ "published!\n",
625
+ "published!\n",
626
+ "published!\n",
627
+ "published!\n",
628
+ "published!\n",
629
+ "published!\n",
630
+ "published!\n",
631
+ "published!\n",
632
+ "published!\n",
633
+ "published!\n",
634
+ "published!\n",
635
+ "published!\n",
636
+ "published!\n",
637
+ "published!\n",
638
+ "published!\n",
639
  "published!\n"
640
  ]
641
+ },
642
+ {
643
+ "ename": "KeyboardInterrupt",
644
+ "evalue": "",
645
+ "output_type": "error",
646
+ "traceback": [
647
+ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
648
+ "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
649
+ "Cell \u001b[1;32mIn[2], line 102\u001b[0m\n\u001b[0;32m 98\u001b[0m time\u001b[38;5;241m.\u001b[39msleep(\u001b[38;5;241m0.2\u001b[39m)\n\u001b[0;32m 101\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[1;32m--> 102\u001b[0m \u001b[43mpublish_sensor_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 103\u001b[0m \u001b[38;5;66;03m# time.sleep(0.1)\u001b[39;00m\n\u001b[0;32m 104\u001b[0m client\u001b[38;5;241m.\u001b[39mdisconnect()\n",
650
+ "Cell \u001b[1;32mIn[2], line 98\u001b[0m, in \u001b[0;36mpublish_sensor_data\u001b[1;34m()\u001b[0m\n\u001b[0;32m 59\u001b[0m client\u001b[38;5;241m.\u001b[39mpublish(topic, payload\u001b[38;5;241m=\u001b[39mjson\u001b[38;5;241m.\u001b[39mdumps({\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mhp_hws_temp\u001b[39m\u001b[38;5;124m'\u001b[39m:hp_hws_temp,\n\u001b[0;32m 60\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mrtu_003_sa_temp\u001b[39m\u001b[38;5;124m'\u001b[39m:rtu_003_sa_temp,\n\u001b[0;32m 61\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mrtu_003_oadmpr_pct\u001b[39m\u001b[38;5;124m'\u001b[39m: rtu_003_oadmpr_pct,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 95\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mrelative_humidity_set_1\u001b[39m\u001b[38;5;124m'\u001b[39m:relative_humidity_set_1,\n\u001b[0;32m 96\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124msolar_radiation_set_1\u001b[39m\u001b[38;5;124m'\u001b[39m:solar_radiation_set_1}))\n\u001b[0;32m 97\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpublished!\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m---> 98\u001b[0m time\u001b[38;5;241m.\u001b[39msleep(\u001b[38;5;241m0.2\u001b[39m)\n",
651
+ "\u001b[1;31mKeyboardInterrupt\u001b[0m: "
652
+ ]
653
  }
654
  ],
655
  "source": [
 
670
  "topic = \"sensor_data\"\n",
671
  "\n",
672
  "def publish_sensor_data(): \n",
673
+ " for i in range(len(df)):\n",
674
+ "\n",
675
+ " data = {}\n",
676
+ " for col in df.columns:\n",
677
+ " data[col] = df[col][i]\n",
678
+ "\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
679
  " \n",
680
+ " client.publish(topic, payload=json.dumps(data))\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
681
  " print(\"published!\")\n",
682
  " time.sleep(0.2)\n",
683
  "\n",
physLSTM/lstm_vav_rtu1.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 35,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
@@ -23,7 +23,7 @@
23
  },
24
  {
25
  "cell_type": "code",
26
- "execution_count": 2,
27
  "metadata": {},
28
  "outputs": [],
29
  "source": [
@@ -32,7 +32,7 @@
32
  },
33
  {
34
  "cell_type": "code",
35
- "execution_count": 31,
36
  "metadata": {},
37
  "outputs": [],
38
  "source": [
@@ -51,12 +51,6 @@
51
  " ):\n",
52
  " cols.append(column)\n",
53
  "\n",
54
- "\n",
55
- "# for rtu in rtus:\n",
56
- "# for column in merged.columns:\n",
57
- "# if f\"rtu_00{rtu}_fltrd_sa\" or f\"rtu_00{rtu}_sa_temp\" in column:\n",
58
- "# cols.append(column)\n",
59
- "\n",
60
  "cols = (\n",
61
  " [\"date\"]\n",
62
  " + cols\n",
@@ -82,14 +76,14 @@
82
  },
83
  {
84
  "cell_type": "code",
85
- "execution_count": 32,
86
  "metadata": {},
87
  "outputs": [
88
  {
89
  "name": "stderr",
90
  "output_type": "stream",
91
  "text": [
92
- "C:\\Users\\arbal\\AppData\\Local\\Temp\\ipykernel_29192\\4293840618.py:1: SettingWithCopyWarning: \n",
93
  "A value is trying to be set on a copy of a slice from a DataFrame.\n",
94
  "Try using .loc[row_indexer,col_indexer] = value instead\n",
95
  "\n",
@@ -115,7 +109,7 @@
115
  },
116
  {
117
  "cell_type": "code",
118
- "execution_count": 36,
119
  "metadata": {},
120
  "outputs": [
121
  {
@@ -124,7 +118,7 @@
124
  "[]"
125
  ]
126
  },
127
- "execution_count": 36,
128
  "metadata": {},
129
  "output_type": "execute_result"
130
  }
@@ -144,7 +138,46 @@
144
  },
145
  {
146
  "cell_type": "code",
147
- "execution_count": 37,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
148
  "metadata": {},
149
  "outputs": [
150
  {
@@ -161,7 +194,7 @@
161
  },
162
  {
163
  "cell_type": "code",
164
- "execution_count": 38,
165
  "metadata": {},
166
  "outputs": [
167
  {
@@ -170,7 +203,7 @@
170
  "(1073512, 391818)"
171
  ]
172
  },
173
- "execution_count": 38,
174
  "metadata": {},
175
  "output_type": "execute_result"
176
  }
@@ -181,7 +214,7 @@
181
  },
182
  {
183
  "cell_type": "code",
184
- "execution_count": 39,
185
  "metadata": {},
186
  "outputs": [
187
  {
@@ -190,7 +223,7 @@
190
  "['scaler_vav_1.pkl']"
191
  ]
192
  },
193
- "execution_count": 39,
194
  "metadata": {},
195
  "output_type": "execute_result"
196
  }
@@ -208,7 +241,7 @@
208
  },
209
  {
210
  "cell_type": "code",
211
- "execution_count": 51,
212
  "metadata": {},
213
  "outputs": [],
214
  "source": [
@@ -231,7 +264,7 @@
231
  },
232
  {
233
  "cell_type": "code",
234
- "execution_count": 52,
235
  "metadata": {},
236
  "outputs": [
237
  {
@@ -251,7 +284,7 @@
251
  },
252
  {
253
  "cell_type": "code",
254
- "execution_count": 54,
255
  "metadata": {},
256
  "outputs": [
257
  {
@@ -304,6 +337,18 @@
304
  "model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=3, batch_size=128, verbose=1, callbacks=[checkpoint_callback])"
305
  ]
306
  },
 
 
 
 
 
 
 
 
 
 
 
 
307
  {
308
  "cell_type": "code",
309
  "execution_count": 55,
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 9,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
 
23
  },
24
  {
25
  "cell_type": "code",
26
+ "execution_count": 10,
27
  "metadata": {},
28
  "outputs": [],
29
  "source": [
 
32
  },
33
  {
34
  "cell_type": "code",
35
+ "execution_count": 11,
36
  "metadata": {},
37
  "outputs": [],
38
  "source": [
 
51
  " ):\n",
52
  " cols.append(column)\n",
53
  "\n",
 
 
 
 
 
 
54
  "cols = (\n",
55
  " [\"date\"]\n",
56
  " + cols\n",
 
76
  },
77
  {
78
  "cell_type": "code",
79
+ "execution_count": 12,
80
  "metadata": {},
81
  "outputs": [
82
  {
83
  "name": "stderr",
84
  "output_type": "stream",
85
  "text": [
86
+ "C:\\Users\\arbal\\AppData\\Local\\Temp\\ipykernel_368\\4293840618.py:1: SettingWithCopyWarning: \n",
87
  "A value is trying to be set on a copy of a slice from a DataFrame.\n",
88
  "Try using .loc[row_indexer,col_indexer] = value instead\n",
89
  "\n",
 
109
  },
110
  {
111
  "cell_type": "code",
112
+ "execution_count": 13,
113
  "metadata": {},
114
  "outputs": [
115
  {
 
118
  "[]"
119
  ]
120
  },
121
+ "execution_count": 13,
122
  "metadata": {},
123
  "output_type": "execute_result"
124
  }
 
138
  },
139
  {
140
  "cell_type": "code",
141
+ "execution_count": 14,
142
+ "metadata": {},
143
+ "outputs": [
144
+ {
145
+ "data": {
146
+ "text/plain": [
147
+ "Index(['date', 'zone_069_temp', 'zone_069_fan_spd', 'zone_068_temp',\n",
148
+ " 'zone_068_fan_spd', 'zone_067_temp', 'zone_067_fan_spd',\n",
149
+ " 'zone_066_temp', 'zone_066_fan_spd', 'zone_065_temp',\n",
150
+ " 'zone_065_fan_spd', 'zone_064_temp', 'zone_064_fan_spd',\n",
151
+ " 'zone_042_temp', 'zone_042_fan_spd', 'zone_041_temp',\n",
152
+ " 'zone_041_fan_spd', 'zone_040_temp', 'zone_040_fan_spd',\n",
153
+ " 'zone_039_temp', 'zone_039_fan_spd', 'zone_038_temp',\n",
154
+ " 'zone_038_fan_spd', 'zone_037_temp', 'zone_037_fan_spd',\n",
155
+ " 'zone_036_temp', 'zone_036_fan_spd', 'rtu_001_fltrd_sa_flow_tn',\n",
156
+ " 'rtu_001_sa_temp', 'air_temp_set_1', 'air_temp_set_2',\n",
157
+ " 'dew_point_temperature_set_1d', 'relative_humidity_set_1',\n",
158
+ " 'solar_radiation_set_1', 'zone_069_cooling_sp', 'zone_069_heating_sp',\n",
159
+ " 'zone_067_cooling_sp', 'zone_067_heating_sp', 'zone_066_cooling_sp',\n",
160
+ " 'zone_066_heating_sp', 'zone_065_cooling_sp', 'zone_065_heating_sp',\n",
161
+ " 'zone_064_cooling_sp', 'zone_064_heating_sp', 'zone_042_cooling_sp',\n",
162
+ " 'zone_042_heating_sp', 'zone_041_cooling_sp', 'zone_041_heating_sp',\n",
163
+ " 'zone_039_cooling_sp', 'zone_039_heating_sp', 'zone_038_cooling_sp',\n",
164
+ " 'zone_038_heating_sp', 'zone_037_cooling_sp', 'zone_037_heating_sp',\n",
165
+ " 'zone_036_cooling_sp', 'zone_036_heating_sp'],\n",
166
+ " dtype='object')"
167
+ ]
168
+ },
169
+ "execution_count": 14,
170
+ "metadata": {},
171
+ "output_type": "execute_result"
172
+ }
173
+ ],
174
+ "source": [
175
+ "traindataset_df.columns"
176
+ ]
177
+ },
178
+ {
179
+ "cell_type": "code",
180
+ "execution_count": 15,
181
  "metadata": {},
182
  "outputs": [
183
  {
 
194
  },
195
  {
196
  "cell_type": "code",
197
+ "execution_count": 16,
198
  "metadata": {},
199
  "outputs": [
200
  {
 
203
  "(1073512, 391818)"
204
  ]
205
  },
206
+ "execution_count": 16,
207
  "metadata": {},
208
  "output_type": "execute_result"
209
  }
 
214
  },
215
  {
216
  "cell_type": "code",
217
+ "execution_count": 18,
218
  "metadata": {},
219
  "outputs": [
220
  {
 
223
  "['scaler_vav_1.pkl']"
224
  ]
225
  },
226
+ "execution_count": 18,
227
  "metadata": {},
228
  "output_type": "execute_result"
229
  }
 
241
  },
242
  {
243
  "cell_type": "code",
244
+ "execution_count": 10,
245
  "metadata": {},
246
  "outputs": [],
247
  "source": [
 
264
  },
265
  {
266
  "cell_type": "code",
267
+ "execution_count": null,
268
  "metadata": {},
269
  "outputs": [
270
  {
 
284
  },
285
  {
286
  "cell_type": "code",
287
+ "execution_count": null,
288
  "metadata": {},
289
  "outputs": [
290
  {
 
337
  "model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=3, batch_size=128, verbose=1, callbacks=[checkpoint_callback])"
338
  ]
339
  },
340
+ {
341
+ "cell_type": "code",
342
+ "execution_count": 13,
343
+ "metadata": {},
344
+ "outputs": [],
345
+ "source": [
346
+ "import keras\n",
347
+ "checkpoint_path = \"lstm_vav_01.keras\"\n",
348
+ "\n",
349
+ "model = keras.models.load_model(checkpoint_path)"
350
+ ]
351
+ },
352
  {
353
  "cell_type": "code",
354
  "execution_count": 55,
src/main.py CHANGED
@@ -22,21 +22,34 @@ def main():
22
 
23
  vav_pipeline = VAVPipeline(rtu_id=1, scaler_path="src/vav/models/scaler_vav_1.pkl")
24
 
25
- vav_anomalizer = VAVAnomalizer(prediction_model_path="src/vav/models/lstm__vav_01")
 
 
 
 
 
 
26
  # print(vav_pipeline.input_col_names)
27
 
28
  # print(len(vav_pipeline.output_col_names))
29
 
30
  def on_message(client, userdata, message):
31
- # print(json.loads(message.payload.decode()))
32
- df_new, df_trans = rtu_data_pipeline.fit(message)
33
- if not df_new is None and not df_trans is None:
34
- out = rtu_anomalizer.pipeline(df_new, df_trans, rtu_data_pipeline.scaler)
 
 
 
 
 
 
35
 
36
  broker_address = "localhost"
37
  broker_port = 1883
38
  topic = "sensor_data"
39
  client = mqtt.Client(mqtt.CallbackAPIVersion.VERSION1)
 
40
  client.on_message = on_message
41
  client.connect(broker_address, broker_port)
42
  client.subscribe(topic)
 
22
 
23
  vav_pipeline = VAVPipeline(rtu_id=1, scaler_path="src/vav/models/scaler_vav_1.pkl")
24
 
25
+ vav_anomalizer = VAVAnomalizer(
26
+ rtu_id=1,
27
+ prediction_model_path="src/vav/models/lstm_vav_01.keras",
28
+ clustering_model_path="src/vav/models/kmeans_vav_1.pkl",
29
+ num_inputs=vav_pipeline.num_inputs,
30
+ num_outputs=vav_pipeline.num_outputs,
31
+ )
32
  # print(vav_pipeline.input_col_names)
33
 
34
  # print(len(vav_pipeline.output_col_names))
35
 
36
  def on_message(client, userdata, message):
37
+ # df_new, df_trans = rtu_data_pipeline.fit(message)
38
+ df_new_vav, df_trans_vav = vav_pipeline.fit(message)
39
+ vav_anomalizer.num_inputs = vav_pipeline.num_inputs
40
+ vav_anomalizer.num_outputs = vav_pipeline.num_outputs
41
+ # if not df_new is None and not df_trans is None:
42
+ # out = rtu_anomalizer.pipeline(df_new, df_trans, rtu_data_pipeline.scaler)
43
+ if not df_new_vav is None and not df_trans_vav is None:
44
+ out_vav = vav_anomalizer.pipeline(
45
+ df_new_vav, df_trans_vav, vav_pipeline.scaler
46
+ )
47
 
48
  broker_address = "localhost"
49
  broker_port = 1883
50
  topic = "sensor_data"
51
  client = mqtt.Client(mqtt.CallbackAPIVersion.VERSION1)
52
+ print("Connecting to broker")
53
  client.on_message = on_message
54
  client.connect(broker_address, broker_port)
55
  client.subscribe(topic)
src/vav/VAVPipeline.py CHANGED
@@ -1,15 +1,41 @@
1
  import json
 
 
2
  from sklearn.preprocessing import StandardScaler
3
- from pickle import load
4
  import numpy as np
5
 
6
 
7
  class VAVPipeline:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
  def __init__(self, rtu_id, scaler_path=None, window_size=30):
10
-
 
 
 
 
 
 
 
 
11
  self.window_size = window_size
12
-
13
  if rtu_id == 1:
14
  self.zones = [69, 68, 67, 66, 65, 64, 42, 41, 40, 39, 38, 37, 36]
15
  if rtu_id == 2:
@@ -36,8 +62,6 @@ class VAVPipeline:
36
  28,
37
  ]
38
 
39
- outputs = ["temp", "fan_speed"]
40
- inputs = ["cooling_sp", "heating_sp"]
41
  self.output_col_names = []
42
  self.input_col_names = [
43
  f"rtu_00{rtu_id}_fltrd_sa_flow_tn",
@@ -48,21 +72,37 @@ class VAVPipeline:
48
  "relative_humidity_set_1",
49
  "solar_radiation_set_1",
50
  ]
51
- for zone in self.zones:
52
- for output in outputs:
53
- self.output_col_names.append(f"zone_0{zone}_{output}")
54
- for input in inputs:
55
- self.input_col_names.append(f"zone_0{zone}_{input}")
56
 
57
  self.column_names = self.output_col_names + self.input_col_names
58
 
 
 
 
59
  if scaler_path:
60
  self.scaler = self.get_scaler(scaler_path)
61
 
62
  def get_scaler(self, scaler_path):
63
- return load(scaler_path)
 
 
 
 
 
 
 
 
 
64
 
65
  def get_window(self, df):
 
 
 
 
 
 
 
 
 
66
  len_df = len(df)
67
  if len_df > self.window_size:
68
  return df[len_df - (self.window_size + 1) : len_df].astype("float32")
@@ -70,26 +110,99 @@ class VAVPipeline:
70
  return None
71
 
72
  def transform_window(self, df_window):
73
- return self.scaler.transform(df_window)
 
 
 
 
 
 
 
 
 
74
 
75
  def prepare_input(self, df_trans):
 
 
 
 
 
 
 
 
 
76
  return df_trans[: self.window_size, :].reshape(
77
  (1, self.window_size, len(self.column_names))
78
  )
79
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
  def extract_data_from_message(self, message):
81
- payload = json.loads(message.payload.decode())
 
82
 
83
- len_df = len(self.df)
 
 
 
 
 
 
 
 
 
 
 
 
84
 
85
- k = {}
86
- for col in self.column_names:
87
- k[col] = payload[col]
88
- self.df.loc[len_df] = k
89
  return self.df
90
 
91
  def fit(self, message):
 
 
 
 
 
 
 
 
 
92
  df = self.extract_data_from_message(message)
 
93
  df_window = self.get_window(df)
94
  if df_window is not None:
95
  df_trans = self.transform_window(df_window)
 
1
  import json
2
+ import joblib
3
+ import pandas as pd
4
  from sklearn.preprocessing import StandardScaler
 
5
  import numpy as np
6
 
7
 
8
  class VAVPipeline:
9
+ """
10
+ A class representing a Variable Air Volume (VAV) pipeline.
11
+
12
+ Attributes:
13
+ rtu_id (int): The ID of the RTU (Roof Top Unit).
14
+ scaler_path (str): The path to the scaler file.
15
+ window_size (int): The size of the sliding window.
16
+
17
+ Methods:
18
+ get_scaler(scaler_path): Loads the scaler from the given path.
19
+ get_window(df): Returns the sliding window of the given dataframe.
20
+ transform_window(df_window): Transforms the values of the dataframe using the scaler.
21
+ prepare_input(df_trans): Prepares the input for the model.
22
+ get_input_output(df): Extracts the input and output column names from the dataframe.
23
+ extract_data_from_message(message): Extracts data from the message payload and returns a dataframe.
24
+ fit(message): Fits the model with the extracted data and returns the prepared input and transformed data.
25
+ """
26
 
27
  def __init__(self, rtu_id, scaler_path=None, window_size=30):
28
+ """
29
+ Initializes a VAVPipeline object.
30
+
31
+ Args:
32
+ rtu_id (int): The ID of the RTU (Roof Top Unit).
33
+ scaler_path (str, optional): The path to the scaler file. Defaults to None.
34
+ window_size (int, optional): The size of the sliding window. Defaults to 30.
35
+ """
36
+ self.get_cols = True
37
  self.window_size = window_size
38
+ self.rtu_id = rtu_id
39
  if rtu_id == 1:
40
  self.zones = [69, 68, 67, 66, 65, 64, 42, 41, 40, 39, 38, 37, 36]
41
  if rtu_id == 2:
 
62
  28,
63
  ]
64
 
 
 
65
  self.output_col_names = []
66
  self.input_col_names = [
67
  f"rtu_00{rtu_id}_fltrd_sa_flow_tn",
 
72
  "relative_humidity_set_1",
73
  "solar_radiation_set_1",
74
  ]
 
 
 
 
 
75
 
76
  self.column_names = self.output_col_names + self.input_col_names
77
 
78
+ self.num_inputs = len(self.input_col_names)
79
+ self.num_outputs = len(self.output_col_names)
80
+
81
  if scaler_path:
82
  self.scaler = self.get_scaler(scaler_path)
83
 
84
  def get_scaler(self, scaler_path):
85
+ """
86
+ Loads the scaler from the given path.
87
+
88
+ Args:
89
+ scaler_path (str): The path to the scaler file.
90
+
91
+ Returns:
92
+ StandardScaler: The loaded scaler object.
93
+ """
94
+ return joblib.load(scaler_path)
95
 
96
  def get_window(self, df):
97
+ """
98
+ Returns the sliding window of the given dataframe.
99
+
100
+ Args:
101
+ df (pd.DataFrame): The dataframe.
102
+
103
+ Returns:
104
+ pd.DataFrame: The sliding window dataframe.
105
+ """
106
  len_df = len(df)
107
  if len_df > self.window_size:
108
  return df[len_df - (self.window_size + 1) : len_df].astype("float32")
 
110
  return None
111
 
112
  def transform_window(self, df_window):
113
+ """
114
+ Transforms the values of the dataframe using the scaler.
115
+
116
+ Args:
117
+ df_window (pd.DataFrame): The dataframe.
118
+
119
+ Returns:
120
+ np.ndarray: The transformed values.
121
+ """
122
+ return self.scaler.transform(df_window.values)
123
 
124
  def prepare_input(self, df_trans):
125
+ """
126
+ Prepares the input for the model.
127
+
128
+ Args:
129
+ df_trans (np.ndarray): The transformed values.
130
+
131
+ Returns:
132
+ np.ndarray: The prepared input.
133
+ """
134
  return df_trans[: self.window_size, :].reshape(
135
  (1, self.window_size, len(self.column_names))
136
  )
137
 
138
+ def get_input_output(self, df: pd.DataFrame):
139
+ """
140
+ Extracts the input and output column names from the dataframe.
141
+
142
+ Args:
143
+ df (pd.DataFrame): The dataframe.
144
+ """
145
+ for zone in self.zones:
146
+ for column in df.columns:
147
+ if (
148
+ f"zone_0{zone}" in column
149
+ and "co2" not in column
150
+ and "hw_valve" not in column
151
+ and "cooling_sp" not in column
152
+ and "heating_sp" not in column
153
+ ):
154
+ self.output_col_names.append(column)
155
+ self.input_col_names = [
156
+ f"rtu_00{self.rtu_id}_fltrd_sa_flow_tn",
157
+ f"rtu_00{self.rtu_id}_sa_temp",
158
+ "air_temp_set_1",
159
+ "air_temp_set_2",
160
+ "dew_point_temperature_set_1d",
161
+ "relative_humidity_set_1",
162
+ "solar_radiation_set_1",
163
+ ]
164
+ for zone in self.zones:
165
+ for column in df.columns:
166
+ if f"zone_0{zone}" in column:
167
+ if "cooling_sp" in column or "heating_sp" in column:
168
+ self.input_col_names.append(column)
169
+ self.column_names = self.output_col_names + self.input_col_names
170
+ self.num_inputs = len(self.input_col_names)
171
+ self.num_outputs = len(self.output_col_names)
172
+ self.df = pd.DataFrame(columns=self.column_names)
173
+
174
  def extract_data_from_message(self, message):
175
+ """
176
+ Extracts data from the message payload and returns a dataframe.
177
 
178
+ Args:
179
+ message: The message containing the payload.
180
+
181
+ Returns:
182
+ pd.DataFrame: The extracted data as a dataframe.
183
+ """
184
+ payload = json.loads(message.payload.decode())
185
+ df = pd.DataFrame.from_dict(payload, orient="index").T
186
+ if self.get_cols == True:
187
+ self.get_input_output(df)
188
+ self.get_cols = False
189
+ df = df[self.column_names]
190
+ self.df.loc[len(self.df)] = df.values[0]
191
 
 
 
 
 
192
  return self.df
193
 
194
  def fit(self, message):
195
+ """
196
+ Fits the model with the extracted data and returns the prepared input and transformed data.
197
+
198
+ Args:
199
+ message: The message containing the data.
200
+
201
+ Returns:
202
+ tuple: A tuple containing the prepared input and transformed data.
203
+ """
204
  df = self.extract_data_from_message(message)
205
+
206
  df_window = self.get_window(df)
207
  if df_window is not None:
208
  df_trans = self.transform_window(df_window)