sycod commited on
Commit
38dbb38
·
1 Parent(s): a03ee84

first train ok, oversampling begun

Browse files
Files changed (5) hide show
  1. .gitignore +1 -0
  2. EDA.ipynb +562 -266
  3. README.md +3 -2
  4. config.yaml +15 -8
  5. src/load_data.py +16 -2
.gitignore CHANGED
@@ -13,5 +13,6 @@ eval-results/
13
  eval-queue-bk/
14
  eval-results-bk/
15
  logs/
 
16
  pyro-sdis/
17
  venv/
 
13
  eval-queue-bk/
14
  eval-results-bk/
15
  logs/
16
+ models/
17
  pyro-sdis/
18
  venv/
EDA.ipynb CHANGED
@@ -5,15 +5,20 @@
5
  "metadata": {},
6
  "source": [
7
  "**Table of contents**<a id='toc0_'></a> \n",
8
- "- [🚧 Info](#toc1_) \n",
9
  "- [Load and import](#toc2_) \n",
10
  " - [Import libraries](#toc2_1_) \n",
11
  " - [Load configuration](#toc2_2_) \n",
12
- " - [Load data](#toc2_3_) \n",
13
  "- [EDA](#toc3_) \n",
14
- " - [🚧 Image exploration](#toc3_1_) \n",
15
- " - [🚧 fin Code JL](#toc3_2_) \n",
16
- " - [Random Baseline](#toc3_3_) \n",
 
 
 
 
 
17
  "\n",
18
  "<!-- vscode-jupyter-toc-config\n",
19
  "\tnumbering=false\n",
@@ -29,7 +34,7 @@
29
  "cell_type": "markdown",
30
  "metadata": {},
31
  "source": [
32
- "# <a id='toc1_'></a>[🚧 Info](#toc0_)\n",
33
  "\n",
34
  "https://huggingface.co/datasets/pyronear/pyro-sdis\n",
35
  "\n",
@@ -61,25 +66,25 @@
61
  "text": [
62
  "/Users/julmat/Documents/hugging_face/frugal_cviz/.venv/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
63
  " from .autonotebook import tqdm as notebook_tqdm\n",
64
- "[codecarbon WARNING @ 00:06:12] Multiple instances of codecarbon are allowed to run at the same time.\n",
65
- "[codecarbon INFO @ 00:06:12] [setup] RAM Tracking...\n",
66
- "[codecarbon INFO @ 00:06:12] [setup] CPU Tracking...\n",
67
- "[codecarbon WARNING @ 00:06:12] No CPU tracking mode found. Falling back on CPU constant mode. \n",
68
  " Mac OS and ARM processor detected: Please enable PowerMetrics sudo to measure CPU\n",
69
  "\n",
70
- "[codecarbon INFO @ 00:06:12] CPU Model on constant consumption mode: Apple M1\n",
71
- "[codecarbon INFO @ 00:06:12] [setup] GPU Tracking...\n",
72
- "[codecarbon INFO @ 00:06:12] No GPU found.\n",
73
- "[codecarbon INFO @ 00:06:12] >>> Tracker's metadata:\n",
74
- "[codecarbon INFO @ 00:06:12] Platform system: macOS-15.2-arm64-arm-64bit\n",
75
- "[codecarbon INFO @ 00:06:12] Python version: 3.12.7\n",
76
- "[codecarbon INFO @ 00:06:12] CodeCarbon version: 2.8.3\n",
77
- "[codecarbon INFO @ 00:06:12] Available RAM : 16.000 GB\n",
78
- "[codecarbon INFO @ 00:06:12] CPU count: 8\n",
79
- "[codecarbon INFO @ 00:06:12] CPU model: Apple M1\n",
80
- "[codecarbon INFO @ 00:06:12] GPU count: None\n",
81
- "[codecarbon INFO @ 00:06:12] GPU model: None\n",
82
- "[codecarbon INFO @ 00:06:15] Saving emissions data to file /Users/julmat/Documents/hugging_face/frugal_cviz/emissions.csv\n"
83
  ]
84
  }
85
  ],
@@ -88,6 +93,7 @@
88
  "from fastapi import APIRouter\n",
89
  "import logging\n",
90
  "import matplotlib.pyplot as plt\n",
 
91
  "from PIL import Image, ImageOps, ImageEnhance, ImageFilter\n",
92
  "import plotly.express as px\n",
93
  "import random\n",
@@ -96,9 +102,10 @@
96
  "# ML\n",
97
  "from keras import Model\n",
98
  "from keras.applications import EfficientNetB0\n",
99
- "from keras.utils import image_dataset_from_directory\n",
100
  "from keras.layers import Flatten, Dense\n",
 
101
  "from keras.optimizers import AdamW\n",
 
102
  "import numpy as np\n",
103
  "import pandas as pd\n",
104
  "from scipy.ndimage import median_filter\n",
@@ -113,6 +120,7 @@
113
  "from tasks.utils.emissions import tracker, clean_emissions_data, get_space_info\n",
114
  "from tasks.image import parse_boxes, compute_iou, compute_max_iou\n",
115
  "\n",
 
116
  "# Logging configuration (see all outputs, even DEBUG or INFO)\n",
117
  "logger = logging.getLogger()\n",
118
  "logger.setLevel(logging.INFO)"
@@ -134,11 +142,17 @@
134
  "# local config\n",
135
  "with open(\"config.yaml\", \"r\") as f:\n",
136
  " cfg = yaml.safe_load(f)\n",
 
137
  "OUTPUT_DIR = cfg[\"data_root_dir\"]\n",
138
  "DB_INFO_URI = cfg[\"db_info_uri\"]\n",
139
  "REPO_ID = cfg[\"repo_id\"]\n",
140
  "SPLIT_SIZE = cfg[\"split_size\"]\n",
141
  "RDM_SEED = cfg[\"rdm_seed\"]\n",
 
 
 
 
 
142
  "\n",
143
  "request = ImageEvaluationRequest()"
144
  ]
@@ -771,7 +785,7 @@
771
  "cell_type": "markdown",
772
  "metadata": {},
773
  "source": [
774
- "## Classes distribution"
775
  ]
776
  },
777
  {
@@ -1742,20 +1756,20 @@
1742
  "cell_type": "markdown",
1743
  "metadata": {},
1744
  "source": [
1745
- "## 🚧 Constats :\n",
1746
  "\n",
1747
  "- 1 seul définition : 1280x720\n",
1748
  "- 1 seul format : JPG\n",
1749
  "- 1 seul mode : RGB\n",
1750
  "\n",
1751
- "- voir répartitino des caméras et partenaires selon les splits"
1752
  ]
1753
  },
1754
  {
1755
  "cell_type": "markdown",
1756
  "metadata": {},
1757
  "source": [
1758
- "## <a id='toc3_1_'></a>[🚧 Image exploration](#toc0_)"
1759
  ]
1760
  },
1761
  {
@@ -1906,7 +1920,7 @@
1906
  "cell_type": "markdown",
1907
  "metadata": {},
1908
  "source": [
1909
- "# Data preprocessing"
1910
  ]
1911
  },
1912
  {
@@ -1941,28 +1955,7 @@
1941
  "name": "stderr",
1942
  "output_type": "stream",
1943
  "text": [
1944
- "/Users/julmat/Documents/hugging_face/frugal_cviz/src/load_data.py:189: SettingWithCopyWarning:\n",
1945
- "\n",
1946
- "\n",
1947
- "A value is trying to be set on a copy of a slice from a DataFrame.\n",
1948
- "Try using .loc[row_indexer,col_indexer] = value instead\n",
1949
- "\n",
1950
- "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
1951
- "\n",
1952
- "/Users/julmat/Documents/hugging_face/frugal_cviz/src/load_data.py:195: SettingWithCopyWarning:\n",
1953
- "\n",
1954
- "\n",
1955
- "A value is trying to be set on a copy of a slice from a DataFrame\n",
1956
- "\n",
1957
- "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
1958
- "\n",
1959
- "/Users/julmat/Documents/hugging_face/frugal_cviz/src/load_data.py:196: SettingWithCopyWarning:\n",
1960
- "\n",
1961
- "\n",
1962
- "A value is trying to be set on a copy of a slice from a DataFrame\n",
1963
- "\n",
1964
- "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
1965
- "\n"
1966
  ]
1967
  }
1968
  ],
@@ -1974,7 +1967,251 @@
1974
  "cell_type": "markdown",
1975
  "metadata": {},
1976
  "source": [
1977
- "# 🦄🦄 CHECKPOINT 🦄🦄"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1978
  ]
1979
  },
1980
  {
@@ -2002,83 +2239,122 @@
2002
  "cell_type": "markdown",
2003
  "metadata": {},
2004
  "source": [
2005
- "# Model Training"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2006
  ]
2007
  },
2008
  {
2009
  "cell_type": "code",
2010
- "execution_count": 12,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2011
  "metadata": {},
2012
  "outputs": [
2013
  {
2014
  "name": "stdout",
2015
  "output_type": "stream",
2016
  "text": [
2017
- "cellule non exécutée\n"
 
 
 
 
 
 
 
2018
  ]
2019
  }
2020
  ],
2021
  "source": [
2022
- "%%script echo \"cellule non exécutée\"\n",
2023
- "\n",
2024
- "from keras import Model\n",
2025
- "from keras.applications import EfficientNetB0\n",
2026
- "from keras.layers import Flatten, Dense\n",
2027
- "from keras.optimizers import AdamW\n",
2028
- "from keras.utils import image_dataset_from_directory\n",
2029
- "from sklearn.model_selection import train_test_split\n",
2030
- "import tensorflow as tf\n",
2031
- "from src.models import eval_pretrained_model, ConditionalAugmentation\n",
2032
- "\n",
2033
- "\n",
2034
- "# *****************************************************************************\n",
2035
- "# CONFIGURATION\n",
2036
- "# *****************************************************************************\n",
2037
- "# load config\n",
2038
- "MODEL_PREV = cfg[\"previous\"][\"model\"]\n",
2039
- "CLASSES_10 = cfg[\"previous\"][\"classes_10\"]\n",
2040
- "TRAIN_DIR_10 = os.path.join(DATA_DIR, cfg[\"previous\"][\"train_dir\"]) + \"_10\"\n",
2041
- "TEST_DIR_10 = os.path.join(DATA_DIR, cfg[\"previous\"][\"test_dir\"]) + \"_10\"\n",
2042
- "CHKPT_DIR = os.path.join(DATA_DIR, cfg[\"previous\"][\"checkpoint_dir\"])\n",
2043
- "LOG_DIR = cfg[\"previous\"][\"log_dir\"]\n",
2044
  "\n",
2045
- "# config model\n",
2046
- "model_name = \"EfficientNetB0\"\n",
2047
- "input_size = (224, 224)\n",
2048
- "batch_size = 48\n",
2049
- "n_epochs = 100\n",
2050
- "optimizer = AdamW(learning_rate=0.0002, weight_decay=0.05)\n",
2051
- "loss = \"sparse_categorical_crossentropy\"\n",
2052
- "metrics = [\"accuracy\"]\n",
2053
- "\n",
2054
- "# *****************************************************************************\n",
2055
- "# DATA PREPARATION\n",
2056
- "# *****************************************************************************\n",
2057
- "y = img_df[\"class_label\"]\n",
2058
- "X = img_df[\"img_uri\"]\n",
2059
- "# train / test split\n",
2060
- "X_train, X_test, y_train, y_test = train_test_split(\n",
2061
- " X, y, test_size=0.1, stratify=y, random_state=42\n",
2062
  ")\n",
2063
- "# create samples directories\n",
2064
- "if not os.path.exists(TRAIN_DIR_10):\n",
2065
- " os.makedirs(TRAIN_DIR_10)\n",
2066
- "if not os.path.exists(TEST_DIR_10):\n",
2067
- " os.makedirs(TEST_DIR_10)\n",
2068
- "\n",
2069
- "print(\"Train set: \")\n",
2070
- "copy_images_prev(X_train, TRAIN_DIR_10)\n",
2071
- "print(\"Test set: \")\n",
2072
- "copy_images_prev(X_test, TEST_DIR_10)\n",
2073
  "\n",
2074
- "print(\"Train / val datasets:\")\n",
2075
- "train_ds, val_ds = image_dataset_from_directory(\n",
2076
- " TRAIN_DIR_10,\n",
2077
  " labels=\"inferred\", # class names upon folders structure\n",
2078
  " label_mode=\"int\", # integer encoding\n",
2079
- " validation_split=0.2, # train / val split\n",
2080
- " subset=\"both\", # returns both train and val datasets\n",
2081
- " shuffle=True, # shuffles images\n",
2082
  " seed=42, # random seed\n",
2083
  " image_size=input_size, # automatic resizing\n",
2084
  " batch_size=batch_size, # tensor shape[0]\n",
@@ -2086,59 +2362,13 @@
2086
  "\n",
2087
  "print(\"\\nTest dataset:\")\n",
2088
  "test_ds = image_dataset_from_directory(\n",
2089
- " TEST_DIR_10,\n",
2090
  " labels=\"inferred\", # class names upon folders structure\n",
2091
  " label_mode=\"int\", # integer encoding\n",
2092
- " shuffle=False, # do not shuffles images\n",
2093
  " seed=42, # random seed\n",
2094
  " image_size=input_size, # automatic resizing\n",
2095
  " batch_size=batch_size, # tensor shape[0]\n",
2096
- ")\n",
2097
- "\n",
2098
- "# *****************************************************************************\n",
2099
- "# MODEL\n",
2100
- "# *****************************************************************************\n",
2101
- "# load pre-trained model without top layers\n",
2102
- "model = EfficientNetB0(\n",
2103
- " weights=\"imagenet\", # pre-trained weights\n",
2104
- " include_top=False, # no dense layer\n",
2105
- " input_shape=(input_size[0], input_size[1], 3), # input shape\n",
2106
- ")\n",
2107
- "# create explicit input layer\n",
2108
- "inputs = tf.keras.Input(shape=(input_size[0], input_size[1], 3))\n",
2109
- "# add data augmentation\n",
2110
- "augmented = ConditionalAugmentation(rate=0.4)(inputs)\n",
2111
- "x = model(augmented)\n",
2112
- "\n",
2113
- "# flatten output\n",
2114
- "x = Flatten()(x)\n",
2115
- "# for feature extraction only\n",
2116
- "for layer in model.layers:\n",
2117
- " layer.trainable = False\n",
2118
- "# new FC layer for 3 classes classification\n",
2119
- "predictions = Dense(10, activation=\"softmax\")(x)\n",
2120
- "# define new model\n",
2121
- "effnetB0_10 = Model(inputs=inputs, outputs=predictions)\n",
2122
- "# display model summary\n",
2123
- "effnetB0_10.summary()\n",
2124
- "\n",
2125
- "# *****************************************************************************\n",
2126
- "# TRAINING\n",
2127
- "# *****************************************************************************\n",
2128
- "model_trained, history = eval_pretrained_model(\n",
2129
- " model=effnetB0_10,\n",
2130
- " train_ds=train_ds,\n",
2131
- " val_ds=val_ds,\n",
2132
- " test_ds=test_ds,\n",
2133
- " LOG_DIR=LOG_DIR,\n",
2134
- " CHKPT_DIR=CHKPT_DIR,\n",
2135
- " model_name=model_name,\n",
2136
- " input_size=input_size,\n",
2137
- " batch_size=batch_size,\n",
2138
- " n_epochs=n_epochs,\n",
2139
- " optimizer=optimizer,\n",
2140
- " loss=loss,\n",
2141
- " metrics=metrics,\n",
2142
  ")"
2143
  ]
2144
  },
@@ -2146,51 +2376,32 @@
2146
  "cell_type": "markdown",
2147
  "metadata": {},
2148
  "source": [
2149
- "Common config"
2150
- ]
2151
- },
2152
- {
2153
- "cell_type": "code",
2154
- "execution_count": 21,
2155
- "metadata": {},
2156
- "outputs": [],
2157
- "source": [
2158
- "input_size = (224, 224)\n",
2159
- "batch_size = 48\n",
2160
- "n_epochs = 100\n",
2161
- "optimizer = AdamW(learning_rate=0.0002, weight_decay=0.05)\n",
2162
- "loss = \"sparse_categorical_crossentropy\"\n",
2163
- "metrics = [\"accuracy\"]"
2164
- ]
2165
- },
2166
- {
2167
- "cell_type": "markdown",
2168
- "metadata": {},
2169
- "source": [
2170
- "Train EfficientNet"
2171
  ]
2172
  },
2173
  {
2174
  "cell_type": "code",
2175
- "execution_count": 22,
2176
  "metadata": {},
2177
  "outputs": [
2178
  {
2179
  "name": "stderr",
2180
  "output_type": "stream",
2181
  "text": [
2182
- "/Users/julmat/Documents/hugging_face/frugal_cviz/.venv/lib/python3.12/site-packages/keras/src/layers/layer.py:393: UserWarning: `build()` was called on layer 'conditional_augmentation_1', however the layer does not have a `build()` method implemented and it looks like it has unbuilt state. This will cause the layer to be marked as built, despite not being actually built, which may cause failures down the line. Make sure to implement a proper `build()` method.\n",
2183
- " warnings.warn(\n"
 
 
2184
  ]
2185
  },
2186
  {
2187
  "data": {
2188
  "text/html": [
2189
- "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\">Model: \"functional\"</span>\n",
2190
  "</pre>\n"
2191
  ],
2192
  "text/plain": [
2193
- "\u001b[1mModel: \"functional\"\u001b[0m\n"
2194
  ]
2195
  },
2196
  "metadata": {},
@@ -2202,16 +2413,16 @@
2202
  "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\">┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
2203
  "┃<span style=\"font-weight: bold\"> Layer (type) </span>┃<span style=\"font-weight: bold\"> Output Shape </span>┃<span style=\"font-weight: bold\"> Param # </span>┃\n",
2204
  "┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
2205
- "│ input_layer_3 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">InputLayer</span>) │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">224</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">224</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">3</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">0</span> │\n",
2206
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2207
- "│ conditional_augmentation_1 │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">224</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">224</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">3</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">0</span> │\n",
2208
  "│ (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">ConditionalAugmentation</span>) │ │ │\n",
2209
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2210
  "│ efficientnetb0 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Functional</span>) │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">7</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">7</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">1280</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">4,049,571</span> │\n",
2211
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2212
- "│ flatten_1 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Flatten</span>) │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">62720</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">0</span> │\n",
2213
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2214
- "│ dense_1 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Dense</span>) │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">3</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">188,163</span> │\n",
2215
  "└─────────────────────────────────┴────────────────────────┴───────────────┘\n",
2216
  "</pre>\n"
2217
  ],
@@ -2219,16 +2430,16 @@
2219
  "┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
2220
  "┃\u001b[1m \u001b[0m\u001b[1mLayer (type) \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1mOutput Shape \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1m Param #\u001b[0m\u001b[1m \u001b[0m┃\n",
2221
  "┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
2222
- "│ input_layer_3 (\u001b[38;5;33mInputLayer\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m224\u001b[0m, \u001b[38;5;34m224\u001b[0m, \u001b[38;5;34m3\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n",
2223
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2224
- "│ conditional_augmentation_1 │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m224\u001b[0m, \u001b[38;5;34m224\u001b[0m, \u001b[38;5;34m3\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n",
2225
  "│ (\u001b[38;5;33mConditionalAugmentation\u001b[0m) │ │ │\n",
2226
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2227
  "│ efficientnetb0 (\u001b[38;5;33mFunctional\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m7\u001b[0m, \u001b[38;5;34m7\u001b[0m, \u001b[38;5;34m1280\u001b[0m) │ \u001b[38;5;34m4,049,571\u001b[0m │\n",
2228
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2229
- "│ flatten_1 (\u001b[38;5;33mFlatten\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m62720\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n",
2230
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2231
- "│ dense_1 (\u001b[38;5;33mDense\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m3\u001b[0m) │ \u001b[38;5;34m188,163\u001b[0m │\n",
2232
  "└─────────────────────────────────┴────────────────────────┴───────────────┘\n"
2233
  ]
2234
  },
@@ -2238,11 +2449,11 @@
2238
  {
2239
  "data": {
2240
  "text/html": [
2241
- "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\"> Total params: </span><span style=\"color: #00af00; text-decoration-color: #00af00\">4,237,734</span> (16.17 MB)\n",
2242
  "</pre>\n"
2243
  ],
2244
  "text/plain": [
2245
- "\u001b[1m Total params: \u001b[0m\u001b[38;5;34m4,237,734\u001b[0m (16.17 MB)\n"
2246
  ]
2247
  },
2248
  "metadata": {},
@@ -2251,11 +2462,11 @@
2251
  {
2252
  "data": {
2253
  "text/html": [
2254
- "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\"> Trainable params: </span><span style=\"color: #00af00; text-decoration-color: #00af00\">188,163</span> (735.01 KB)\n",
2255
  "</pre>\n"
2256
  ],
2257
  "text/plain": [
2258
- "\u001b[1m Trainable params: \u001b[0m\u001b[38;5;34m188,163\u001b[0m (735.01 KB)\n"
2259
  ]
2260
  },
2261
  "metadata": {},
@@ -2276,30 +2487,28 @@
2276
  }
2277
  ],
2278
  "source": [
2279
- "model_name = \"EfficientNetB0\"\n",
2280
- "\n",
2281
- "# load pre-trained model without top layers\n",
2282
  "model = EfficientNetB0(\n",
2283
  " weights=\"imagenet\", # pre-trained weights\n",
2284
  " include_top=False, # no dense layer\n",
2285
  " input_shape=(input_size[0], input_size[1], 3), # input shape\n",
2286
  ")\n",
2287
- "# create explicit input layer\n",
2288
  "inputs = tf.keras.Input(shape=(input_size[0], input_size[1], 3))\n",
2289
  "# add data augmentation\n",
2290
  "augmented = ConditionalAugmentation(rate=0.4)(inputs)\n",
2291
  "x = model(augmented)\n",
2292
  "\n",
2293
- "# flatten output\n",
2294
  "x = Flatten()(x)\n",
2295
- "# for feature extraction only\n",
2296
  "for layer in model.layers:\n",
2297
  " layer.trainable = False\n",
2298
- "# new FC layer for 3 classes classification\n",
2299
- "predictions = Dense(3, activation=\"softmax\")(x)\n",
2300
- "# define new model\n",
2301
  "effnetB0 = Model(inputs=inputs, outputs=predictions)\n",
2302
- "# display model summary\n",
2303
  "effnetB0.summary()"
2304
  ]
2305
  },
@@ -2307,71 +2516,165 @@
2307
  "cell_type": "markdown",
2308
  "metadata": {},
2309
  "source": [
2310
- "Create datasets from local images and labels"
2311
- ]
2312
- },
2313
- {
2314
- "cell_type": "code",
2315
- "execution_count": null,
2316
- "metadata": {},
2317
- "outputs": [],
2318
- "source": [
2319
- "train_ds = image_dataset_from_directory(\n",
2320
- " TRAIN_DIR,\n",
2321
- " labels=\"inferred\", # class names upon folders structure\n",
2322
- " label_mode=\"int\", # integer encoding\n",
2323
- " # validation_split=0.2, # train / val split\n",
2324
- " # subset=\"both\", # returns both train and val datasets\n",
2325
- " shuffle=True, # shuffles images\n",
2326
- " seed=42, # random seed\n",
2327
- " image_size=input_size, # automatic resizing\n",
2328
- " batch_size=batch_size, # tensor shape[0]\n",
2329
- ")"
2330
  ]
2331
  },
2332
  {
2333
- "cell_type": "code",
2334
- "execution_count": null,
2335
  "metadata": {},
2336
- "outputs": [],
2337
  "source": [
2338
- "# test_ds = image_dataset_from_directory(\n",
2339
- "# TEST_DIR,\n",
2340
- "# labels=\"inferred\", # class names upon folders structure\n",
2341
- "# label_mode=\"int\", # integer encoding\n",
2342
- "# shuffle=False, # do not shuffles images\n",
2343
- "# seed=42, # random seed\n",
2344
- "# image_size=input_size, # automatic resizing\n",
2345
- "# batch_size=batch_size, # tensor shape[0]\n",
2346
- "# )"
2347
  ]
2348
  },
2349
  {
2350
  "cell_type": "code",
2351
- "execution_count": 14,
2352
  "metadata": {},
2353
- "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2354
  "source": [
2355
- "# model_trained, history = eval_pretrained_model(\n",
2356
- "# model=effnetB0,\n",
2357
- "# train_ds=insert_train_ds_here,\n",
2358
- "# val_ds=insert_val_ds_here,\n",
2359
- "# test_ds=insert_test_ds_here,\n",
2360
- "# LOG_DIR=LOG_DIR,\n",
2361
- "# CHKPT_DIR=CHKPT_DIR,\n",
2362
- "# model_name=model_name,\n",
2363
- "# input_size=input_size,\n",
2364
- "# batch_size=batch_size,\n",
2365
- "# n_epochs=n_epochs,\n",
2366
- "# optimizer=optimizer,\n",
2367
- "# loss=loss,\n",
2368
- "# metrics=metrics,\n",
2369
- "# )"
2370
  ]
2371
  },
2372
  {
2373
  "cell_type": "code",
2374
- "execution_count": 15,
2375
  "metadata": {},
2376
  "outputs": [
2377
  {
@@ -2381,7 +2684,7 @@
2381
  "traceback": [
2382
  "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
2383
  "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
2384
- "Cell \u001b[0;32mIn[15], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mstop\u001b[49m\n",
2385
  "\u001b[0;31mNameError\u001b[0m: name 'stop' is not defined"
2386
  ]
2387
  }
@@ -2434,14 +2737,7 @@
2434
  "cell_type": "markdown",
2435
  "metadata": {},
2436
  "source": [
2437
- "# <a id='toc3_2_'></a>[🚧 fin Code JL](#toc0_)"
2438
- ]
2439
- },
2440
- {
2441
- "cell_type": "markdown",
2442
- "metadata": {},
2443
- "source": [
2444
- "## <a id='toc3_3_'></a>[Random Baseline](#toc0_)"
2445
  ]
2446
  },
2447
  {
 
5
  "metadata": {},
6
  "source": [
7
  "**Table of contents**<a id='toc0_'></a> \n",
8
+ "- [Sources](#toc1_) \n",
9
  "- [Load and import](#toc2_) \n",
10
  " - [Import libraries](#toc2_1_) \n",
11
  " - [Load configuration](#toc2_2_) \n",
12
+ " - [Load, split, export data](#toc2_3_) \n",
13
  "- [EDA](#toc3_) \n",
14
+ " - [Classes distribution](#toc3_1_) \n",
15
+ " - [🚧 Constats :](#toc3_2_) \n",
16
+ " - [🚧 Image exploration](#toc3_3_) \n",
17
+ "- [Data preprocessing](#toc4_) \n",
18
+ "- [🦄🦄 CHECKPOINT 🦄🦄](#toc5_) \n",
19
+ "- [Model Training](#toc6_) \n",
20
+ "- [🚧 fin Code JL](#toc7_) \n",
21
+ " - [Random Baseline](#toc7_1_) \n",
22
  "\n",
23
  "<!-- vscode-jupyter-toc-config\n",
24
  "\tnumbering=false\n",
 
34
  "cell_type": "markdown",
35
  "metadata": {},
36
  "source": [
37
+ "# <a id='toc1_'></a>[Sources](#toc0_)\n",
38
  "\n",
39
  "https://huggingface.co/datasets/pyronear/pyro-sdis\n",
40
  "\n",
 
66
  "text": [
67
  "/Users/julmat/Documents/hugging_face/frugal_cviz/.venv/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
68
  " from .autonotebook import tqdm as notebook_tqdm\n",
69
+ "[codecarbon WARNING @ 10:42:32] Multiple instances of codecarbon are allowed to run at the same time.\n",
70
+ "[codecarbon INFO @ 10:42:32] [setup] RAM Tracking...\n",
71
+ "[codecarbon INFO @ 10:42:32] [setup] CPU Tracking...\n",
72
+ "[codecarbon WARNING @ 10:42:33] No CPU tracking mode found. Falling back on CPU constant mode. \n",
73
  " Mac OS and ARM processor detected: Please enable PowerMetrics sudo to measure CPU\n",
74
  "\n",
75
+ "[codecarbon INFO @ 10:42:33] CPU Model on constant consumption mode: Apple M1\n",
76
+ "[codecarbon INFO @ 10:42:33] [setup] GPU Tracking...\n",
77
+ "[codecarbon INFO @ 10:42:33] No GPU found.\n",
78
+ "[codecarbon INFO @ 10:42:33] >>> Tracker's metadata:\n",
79
+ "[codecarbon INFO @ 10:42:33] Platform system: macOS-15.2-arm64-arm-64bit\n",
80
+ "[codecarbon INFO @ 10:42:33] Python version: 3.12.7\n",
81
+ "[codecarbon INFO @ 10:42:33] CodeCarbon version: 2.8.3\n",
82
+ "[codecarbon INFO @ 10:42:33] Available RAM : 16.000 GB\n",
83
+ "[codecarbon INFO @ 10:42:33] CPU count: 8\n",
84
+ "[codecarbon INFO @ 10:42:33] CPU model: Apple M1\n",
85
+ "[codecarbon INFO @ 10:42:33] GPU count: None\n",
86
+ "[codecarbon INFO @ 10:42:33] GPU model: None\n",
87
+ "[codecarbon INFO @ 10:42:34] Saving emissions data to file /Users/julmat/Documents/hugging_face/frugal_cviz/emissions.csv\n"
88
  ]
89
  }
90
  ],
 
93
  "from fastapi import APIRouter\n",
94
  "import logging\n",
95
  "import matplotlib.pyplot as plt\n",
96
+ "import os\n",
97
  "from PIL import Image, ImageOps, ImageEnhance, ImageFilter\n",
98
  "import plotly.express as px\n",
99
  "import random\n",
 
102
  "# ML\n",
103
  "from keras import Model\n",
104
  "from keras.applications import EfficientNetB0\n",
 
105
  "from keras.layers import Flatten, Dense\n",
106
+ "from keras.metrics import Precision, Recall\n",
107
  "from keras.optimizers import AdamW\n",
108
+ "from keras.utils import image_dataset_from_directory\n",
109
  "import numpy as np\n",
110
  "import pandas as pd\n",
111
  "from scipy.ndimage import median_filter\n",
 
120
  "from tasks.utils.emissions import tracker, clean_emissions_data, get_space_info\n",
121
  "from tasks.image import parse_boxes, compute_iou, compute_max_iou\n",
122
  "\n",
123
+ "\n",
124
  "# Logging configuration (see all outputs, even DEBUG or INFO)\n",
125
  "logger = logging.getLogger()\n",
126
  "logger.setLevel(logging.INFO)"
 
142
  "# local config\n",
143
  "with open(\"config.yaml\", \"r\") as f:\n",
144
  " cfg = yaml.safe_load(f)\n",
145
+ "# Data\n",
146
  "OUTPUT_DIR = cfg[\"data_root_dir\"]\n",
147
  "DB_INFO_URI = cfg[\"db_info_uri\"]\n",
148
  "REPO_ID = cfg[\"repo_id\"]\n",
149
  "SPLIT_SIZE = cfg[\"split_size\"]\n",
150
  "RDM_SEED = cfg[\"rdm_seed\"]\n",
151
+ "# Model (common)\n",
152
+ "MODELS_ROOT_DIR = cfg[\"models_common\"][\"models_root_dir\"]\n",
153
+ "CLASSES = cfg[\"models_common\"][\"classes\"]\n",
154
+ "LOG_DIR = cfg[\"models_common\"][\"log_dir\"]\n",
155
+ "CHKPT_DIR = cfg[\"models_common\"][\"chkpts_dir\"]\n",
156
  "\n",
157
  "request = ImageEvaluationRequest()"
158
  ]
 
785
  "cell_type": "markdown",
786
  "metadata": {},
787
  "source": [
788
+ "## <a id='toc3_1_'></a>[Classes distribution](#toc0_)"
789
  ]
790
  },
791
  {
 
1756
  "cell_type": "markdown",
1757
  "metadata": {},
1758
  "source": [
1759
+ "## <a id='toc3_2_'></a>[🚧 Constats :](#toc0_)\n",
1760
  "\n",
1761
  "- 1 seul définition : 1280x720\n",
1762
  "- 1 seul format : JPG\n",
1763
  "- 1 seul mode : RGB\n",
1764
  "\n",
1765
+ "- voir répartition des caméras et partenaires selon les splits"
1766
  ]
1767
  },
1768
  {
1769
  "cell_type": "markdown",
1770
  "metadata": {},
1771
  "source": [
1772
+ "## <a id='toc3_3_'></a>[🚧 Image exploration](#toc0_)"
1773
  ]
1774
  },
1775
  {
 
1920
  "cell_type": "markdown",
1921
  "metadata": {},
1922
  "source": [
1923
+ "# <a id='toc4_'></a>[Data preprocessing](#toc0_)"
1924
  ]
1925
  },
1926
  {
 
1955
  "name": "stderr",
1956
  "output_type": "stream",
1957
  "text": [
1958
+ "INFO:root:data/keras already exists: data already formatted\n"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1959
  ]
1960
  }
1961
  ],
 
1967
  "cell_type": "markdown",
1968
  "metadata": {},
1969
  "source": [
1970
+ "Balance classes by oversampling \"no_smoke\" label"
1971
+ ]
1972
+ },
1973
+ {
1974
+ "cell_type": "markdown",
1975
+ "metadata": {},
1976
+ "source": [
1977
+ "# <a id='toc5_'></a>[🦄🦄 CHECKPOINT 🦄🦄](#toc0_)"
1978
+ ]
1979
+ },
1980
+ {
1981
+ "cell_type": "markdown",
1982
+ "metadata": {},
1983
+ "source": [
1984
+ "# 🚧 OVERSAMPLING"
1985
+ ]
1986
+ },
1987
+ {
1988
+ "cell_type": "code",
1989
+ "execution_count": 72,
1990
+ "metadata": {},
1991
+ "outputs": [
1992
+ {
1993
+ "data": {
1994
+ "text/html": [
1995
+ "<div>\n",
1996
+ "<style scoped>\n",
1997
+ " .dataframe tbody tr th:only-of-type {\n",
1998
+ " vertical-align: middle;\n",
1999
+ " }\n",
2000
+ "\n",
2001
+ " .dataframe tbody tr th {\n",
2002
+ " vertical-align: top;\n",
2003
+ " }\n",
2004
+ "\n",
2005
+ " .dataframe thead th {\n",
2006
+ " text-align: right;\n",
2007
+ " }\n",
2008
+ "</style>\n",
2009
+ "<table border=\"1\" class=\"dataframe\">\n",
2010
+ " <thead>\n",
2011
+ " <tr style=\"text-align: right;\">\n",
2012
+ " <th></th>\n",
2013
+ " <th>split</th>\n",
2014
+ " <th>label</th>\n",
2015
+ " <th>count</th>\n",
2016
+ " </tr>\n",
2017
+ " </thead>\n",
2018
+ " <tbody>\n",
2019
+ " <tr>\n",
2020
+ " <th>0</th>\n",
2021
+ " <td>test</td>\n",
2022
+ " <td>no_smoke</td>\n",
2023
+ " <td>967</td>\n",
2024
+ " </tr>\n",
2025
+ " <tr>\n",
2026
+ " <th>1</th>\n",
2027
+ " <td>test</td>\n",
2028
+ " <td>smoke</td>\n",
2029
+ " <td>4941</td>\n",
2030
+ " </tr>\n",
2031
+ " <tr>\n",
2032
+ " <th>2</th>\n",
2033
+ " <td>train</td>\n",
2034
+ " <td>no_smoke</td>\n",
2035
+ " <td>3802</td>\n",
2036
+ " </tr>\n",
2037
+ " <tr>\n",
2038
+ " <th>3</th>\n",
2039
+ " <td>train</td>\n",
2040
+ " <td>smoke</td>\n",
2041
+ " <td>19827</td>\n",
2042
+ " </tr>\n",
2043
+ " </tbody>\n",
2044
+ "</table>\n",
2045
+ "</div>"
2046
+ ],
2047
+ "text/plain": [
2048
+ " split label count\n",
2049
+ "0 test no_smoke 967\n",
2050
+ "1 test smoke 4941\n",
2051
+ "2 train no_smoke 3802\n",
2052
+ "3 train smoke 19827"
2053
+ ]
2054
+ },
2055
+ "metadata": {},
2056
+ "output_type": "display_data"
2057
+ },
2058
+ {
2059
+ "data": {
2060
+ "text/html": [
2061
+ "<div>\n",
2062
+ "<style scoped>\n",
2063
+ " .dataframe tbody tr th:only-of-type {\n",
2064
+ " vertical-align: middle;\n",
2065
+ " }\n",
2066
+ "\n",
2067
+ " .dataframe tbody tr th {\n",
2068
+ " vertical-align: top;\n",
2069
+ " }\n",
2070
+ "\n",
2071
+ " .dataframe thead th {\n",
2072
+ " text-align: right;\n",
2073
+ " }\n",
2074
+ "</style>\n",
2075
+ "<table border=\"1\" class=\"dataframe\">\n",
2076
+ " <thead>\n",
2077
+ " <tr style=\"text-align: right;\">\n",
2078
+ " <th></th>\n",
2079
+ " <th>name</th>\n",
2080
+ " <th>label</th>\n",
2081
+ " <th>split</th>\n",
2082
+ " <th>uri</th>\n",
2083
+ " </tr>\n",
2084
+ " </thead>\n",
2085
+ " <tbody>\n",
2086
+ " <tr>\n",
2087
+ " <th>0</th>\n",
2088
+ " <td>sdis-07_marguerite-282_2024-01-30T17-01-57</td>\n",
2089
+ " <td>no_smoke</td>\n",
2090
+ " <td>train</td>\n",
2091
+ " <td>data/raw/images/train/sdis-07_marguerite-282_2...</td>\n",
2092
+ " </tr>\n",
2093
+ " <tr>\n",
2094
+ " <th>1</th>\n",
2095
+ " <td>force-06_courmettes-212_2024-01-04T12-06-07</td>\n",
2096
+ " <td>smoke</td>\n",
2097
+ " <td>train</td>\n",
2098
+ " <td>data/raw/images/train/force-06_courmettes-212_...</td>\n",
2099
+ " </tr>\n",
2100
+ " <tr>\n",
2101
+ " <th>2</th>\n",
2102
+ " <td>force-06_courmettes-212_2024-01-16T09-53-11</td>\n",
2103
+ " <td>no_smoke</td>\n",
2104
+ " <td>train</td>\n",
2105
+ " <td>data/raw/images/train/force-06_courmettes-212_...</td>\n",
2106
+ " </tr>\n",
2107
+ " <tr>\n",
2108
+ " <th>3</th>\n",
2109
+ " <td>force-06_courmettes-160_2024-04-26T09-19-42</td>\n",
2110
+ " <td>no_smoke</td>\n",
2111
+ " <td>train</td>\n",
2112
+ " <td>data/raw/images/train/force-06_courmettes-160_...</td>\n",
2113
+ " </tr>\n",
2114
+ " <tr>\n",
2115
+ " <th>4</th>\n",
2116
+ " <td>force-06_courmettes-212_2024-01-12T12-47-33</td>\n",
2117
+ " <td>no_smoke</td>\n",
2118
+ " <td>train</td>\n",
2119
+ " <td>data/raw/images/train/force-06_courmettes-212_...</td>\n",
2120
+ " </tr>\n",
2121
+ " <tr>\n",
2122
+ " <th>...</th>\n",
2123
+ " <td>...</td>\n",
2124
+ " <td>...</td>\n",
2125
+ " <td>...</td>\n",
2126
+ " <td>...</td>\n",
2127
+ " </tr>\n",
2128
+ " <tr>\n",
2129
+ " <th>33631</th>\n",
2130
+ " <td>sdis-07_brison-200_2024-01-04T12-49-23</td>\n",
2131
+ " <td>smoke</td>\n",
2132
+ " <td>test</td>\n",
2133
+ " <td>data/raw/images/test/sdis-07_brison-200_2024-0...</td>\n",
2134
+ " </tr>\n",
2135
+ " <tr>\n",
2136
+ " <th>33632</th>\n",
2137
+ " <td>sdis-07_brison-110_2024-03-08T11-23-33</td>\n",
2138
+ " <td>smoke</td>\n",
2139
+ " <td>test</td>\n",
2140
+ " <td>data/raw/images/test/sdis-07_brison-110_2024-0...</td>\n",
2141
+ " </tr>\n",
2142
+ " <tr>\n",
2143
+ " <th>33633</th>\n",
2144
+ " <td>sdis-07_marguerite-29_2024-01-31T12-18-29</td>\n",
2145
+ " <td>smoke</td>\n",
2146
+ " <td>test</td>\n",
2147
+ " <td>data/raw/images/test/sdis-07_marguerite-29_202...</td>\n",
2148
+ " </tr>\n",
2149
+ " <tr>\n",
2150
+ " <th>33634</th>\n",
2151
+ " <td>sdis-07_marguerite-29_2024-01-31T09-19-45</td>\n",
2152
+ " <td>smoke</td>\n",
2153
+ " <td>test</td>\n",
2154
+ " <td>data/raw/images/test/sdis-07_marguerite-29_202...</td>\n",
2155
+ " </tr>\n",
2156
+ " <tr>\n",
2157
+ " <th>33635</th>\n",
2158
+ " <td>sdis-07_marguerite-29_2024-03-25T10-11-12</td>\n",
2159
+ " <td>smoke</td>\n",
2160
+ " <td>test</td>\n",
2161
+ " <td>data/raw/images/test/sdis-07_marguerite-29_202...</td>\n",
2162
+ " </tr>\n",
2163
+ " </tbody>\n",
2164
+ "</table>\n",
2165
+ "<p>33636 rows × 4 columns</p>\n",
2166
+ "</div>"
2167
+ ],
2168
+ "text/plain": [
2169
+ " name label split \\\n",
2170
+ "0 sdis-07_marguerite-282_2024-01-30T17-01-57 no_smoke train \n",
2171
+ "1 force-06_courmettes-212_2024-01-04T12-06-07 smoke train \n",
2172
+ "2 force-06_courmettes-212_2024-01-16T09-53-11 no_smoke train \n",
2173
+ "3 force-06_courmettes-160_2024-04-26T09-19-42 no_smoke train \n",
2174
+ "4 force-06_courmettes-212_2024-01-12T12-47-33 no_smoke train \n",
2175
+ "... ... ... ... \n",
2176
+ "33631 sdis-07_brison-200_2024-01-04T12-49-23 smoke test \n",
2177
+ "33632 sdis-07_brison-110_2024-03-08T11-23-33 smoke test \n",
2178
+ "33633 sdis-07_marguerite-29_2024-01-31T12-18-29 smoke test \n",
2179
+ "33634 sdis-07_marguerite-29_2024-01-31T09-19-45 smoke test \n",
2180
+ "33635 sdis-07_marguerite-29_2024-03-25T10-11-12 smoke test \n",
2181
+ "\n",
2182
+ " uri \n",
2183
+ "0 data/raw/images/train/sdis-07_marguerite-282_2... \n",
2184
+ "1 data/raw/images/train/force-06_courmettes-212_... \n",
2185
+ "2 data/raw/images/train/force-06_courmettes-212_... \n",
2186
+ "3 data/raw/images/train/force-06_courmettes-160_... \n",
2187
+ "4 data/raw/images/train/force-06_courmettes-212_... \n",
2188
+ "... ... \n",
2189
+ "33631 data/raw/images/test/sdis-07_brison-200_2024-0... \n",
2190
+ "33632 data/raw/images/test/sdis-07_brison-110_2024-0... \n",
2191
+ "33633 data/raw/images/test/sdis-07_marguerite-29_202... \n",
2192
+ "33634 data/raw/images/test/sdis-07_marguerite-29_202... \n",
2193
+ "33635 data/raw/images/test/sdis-07_marguerite-29_202... \n",
2194
+ "\n",
2195
+ "[33636 rows x 4 columns]"
2196
+ ]
2197
+ },
2198
+ "metadata": {},
2199
+ "output_type": "display_data"
2200
+ }
2201
+ ],
2202
+ "source": [
2203
+ "count_df_ = df.groupby([\"split\", \"label\"]).size().reset_index(name=\"count\")\n",
2204
+ "count_df_ = count_df_.loc[count_df_[\"split\"] != \"val\"]\n",
2205
+ "display(count_df_)\n",
2206
+ "display(df)\n",
2207
+ "\n",
2208
+ "\n",
2209
+ "# for split in [\"train\", \"test\"]\n",
2210
+ " # quel label pour le min count ?\n",
2211
+ " # calculer l'écart min / max\n",
2212
+ " # échantillonner le DF pour ce split et ce label\n",
2213
+ " # boucler sur les lignes jusqu'à complétion de l'écart\n",
2214
+ " # avec une technique de data aug différente pour chaque époque => fonction"
2215
  ]
2216
  },
2217
  {
 
2239
  "cell_type": "markdown",
2240
  "metadata": {},
2241
  "source": [
2242
+ "# <a id='toc6_'></a>[Model Training](#toc0_)"
2243
+ ]
2244
+ },
2245
+ {
2246
+ "cell_type": "markdown",
2247
+ "metadata": {},
2248
+ "source": [
2249
+ "## Load configuration"
2250
+ ]
2251
+ },
2252
+ {
2253
+ "cell_type": "markdown",
2254
+ "metadata": {},
2255
+ "source": [
2256
+ "# 🚧 MODEL CHOICE\n",
2257
+ "\n",
2258
+ "- https://paperswithcode.com/sota/image-classification-on-imagenet\n",
2259
+ "- https://keras.io/api/applications/efficientnet_v2/#efficientnetv2m-function"
2260
  ]
2261
  },
2262
  {
2263
  "cell_type": "code",
2264
+ "execution_count": null,
2265
+ "metadata": {},
2266
+ "outputs": [],
2267
+ "source": [
2268
+ "# from keras.applications import EfficientNetV2M\n",
2269
+ "\n",
2270
+ "# model = EfficientNetV2M("
2271
+ ]
2272
+ },
2273
+ {
2274
+ "cell_type": "code",
2275
+ "execution_count": 29,
2276
+ "metadata": {},
2277
+ "outputs": [],
2278
+ "source": [
2279
+ "# Model config\n",
2280
+ "model_name = \"EfficientNetB0\"\n",
2281
+ "input_size = (224, 224)\n",
2282
+ "batch_size = 48\n",
2283
+ "n_epochs = 100\n",
2284
+ "optimizer = AdamW(learning_rate=0.0002, weight_decay=0.05)\n",
2285
+ "loss = \"binary_crossentropy\"\n",
2286
+ "metrics = [\"accuracy\", Precision(), Recall()]\n",
2287
+ "# metrics = [\"accuracy\", Precision(), Recall(), \"f1_score\"]\n",
2288
+ "\n",
2289
+ "# Input paths\n",
2290
+ "data_dir = os.path.join(OUTPUT_DIR, \"keras\")\n",
2291
+ "train_dir = os.path.join(data_dir, \"train\")\n",
2292
+ "val_dir = os.path.join(data_dir, \"val\")\n",
2293
+ "test_dir = os.path.join(data_dir, \"test\")\n",
2294
+ "# Output paths\n",
2295
+ "MODEL_PATH = os.path.join(MODELS_ROOT_DIR, model_name)\n",
2296
+ "LOG_PATH = os.path.join(MODEL_PATH, LOG_DIR)\n",
2297
+ "CHKPT_PATH = os.path.join(MODEL_PATH, CHKPT_DIR)"
2298
+ ]
2299
+ },
2300
+ {
2301
+ "cell_type": "markdown",
2302
+ "metadata": {},
2303
+ "source": [
2304
+ "## Prepare data"
2305
+ ]
2306
+ },
2307
+ {
2308
+ "cell_type": "markdown",
2309
+ "metadata": {},
2310
+ "source": [
2311
+ "Create datasets from local images and labels"
2312
+ ]
2313
+ },
2314
+ {
2315
+ "cell_type": "code",
2316
+ "execution_count": 30,
2317
  "metadata": {},
2318
  "outputs": [
2319
  {
2320
  "name": "stdout",
2321
  "output_type": "stream",
2322
  "text": [
2323
+ "Train dataset:\n",
2324
+ "Found 23629 files belonging to 2 classes.\n",
2325
+ "\n",
2326
+ "Val dataset:\n",
2327
+ "Found 4099 files belonging to 2 classes.\n",
2328
+ "\n",
2329
+ "Test dataset:\n",
2330
+ "Found 5908 files belonging to 2 classes.\n"
2331
  ]
2332
  }
2333
  ],
2334
  "source": [
2335
+ "# Prepare for outputs\n",
2336
+ "os.makedirs(MODELS_ROOT_DIR, exist_ok=True)\n",
2337
+ "y = df[\"label\"]\n",
2338
+ "X = df[\"uri\"]\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2339
  "\n",
2340
+ "# Create datasets\n",
2341
+ "print(\"Train dataset:\")\n",
2342
+ "train_ds = image_dataset_from_directory(\n",
2343
+ " train_dir,\n",
2344
+ " labels=\"inferred\", # class names upon folders structure\n",
2345
+ " label_mode=\"int\", # integer encoding\n",
2346
+ " shuffle=True, # shuffle images\n",
2347
+ " seed=42, # random seed\n",
2348
+ " image_size=input_size, # automatic resizing\n",
2349
+ " batch_size=batch_size, # tensor shape[0]\n",
 
 
 
 
 
 
 
2350
  ")\n",
 
 
 
 
 
 
 
 
 
 
2351
  "\n",
2352
+ "print(\"\\nVal dataset:\")\n",
2353
+ "val_ds = image_dataset_from_directory(\n",
2354
+ " val_dir,\n",
2355
  " labels=\"inferred\", # class names upon folders structure\n",
2356
  " label_mode=\"int\", # integer encoding\n",
2357
+ " shuffle=True, # shuffle images\n",
 
 
2358
  " seed=42, # random seed\n",
2359
  " image_size=input_size, # automatic resizing\n",
2360
  " batch_size=batch_size, # tensor shape[0]\n",
 
2362
  "\n",
2363
  "print(\"\\nTest dataset:\")\n",
2364
  "test_ds = image_dataset_from_directory(\n",
2365
+ " test_dir,\n",
2366
  " labels=\"inferred\", # class names upon folders structure\n",
2367
  " label_mode=\"int\", # integer encoding\n",
2368
+ " shuffle=False, # do not shuffle images\n",
2369
  " seed=42, # random seed\n",
2370
  " image_size=input_size, # automatic resizing\n",
2371
  " batch_size=batch_size, # tensor shape[0]\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2372
  ")"
2373
  ]
2374
  },
 
2376
  "cell_type": "markdown",
2377
  "metadata": {},
2378
  "source": [
2379
+ "## Prepare model"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2380
  ]
2381
  },
2382
  {
2383
  "cell_type": "code",
2384
+ "execution_count": 31,
2385
  "metadata": {},
2386
  "outputs": [
2387
  {
2388
  "name": "stderr",
2389
  "output_type": "stream",
2390
  "text": [
2391
+ "/Users/julmat/Documents/hugging_face/frugal_cviz/.venv/lib/python3.12/site-packages/keras/src/layers/layer.py:393: UserWarning:\n",
2392
+ "\n",
2393
+ "`build()` was called on layer 'conditional_augmentation_3', however the layer does not have a `build()` method implemented and it looks like it has unbuilt state. This will cause the layer to be marked as built, despite not being actually built, which may cause failures down the line. Make sure to implement a proper `build()` method.\n",
2394
+ "\n"
2395
  ]
2396
  },
2397
  {
2398
  "data": {
2399
  "text/html": [
2400
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\">Model: \"functional_3\"</span>\n",
2401
  "</pre>\n"
2402
  ],
2403
  "text/plain": [
2404
+ "\u001b[1mModel: \"functional_3\"\u001b[0m\n"
2405
  ]
2406
  },
2407
  "metadata": {},
 
2413
  "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\">┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
2414
  "┃<span style=\"font-weight: bold\"> Layer (type) </span>┃<span style=\"font-weight: bold\"> Output Shape </span>┃<span style=\"font-weight: bold\"> Param # </span>┃\n",
2415
  "┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
2416
+ "│ input_layer_7 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">InputLayer</span>) │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">224</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">224</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">3</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">0</span> │\n",
2417
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2418
+ "│ conditional_augmentation_3 │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">224</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">224</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">3</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">0</span> │\n",
2419
  "│ (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">ConditionalAugmentation</span>) │ │ │\n",
2420
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2421
  "│ efficientnetb0 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Functional</span>) │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">7</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">7</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">1280</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">4,049,571</span> │\n",
2422
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2423
+ "│ flatten_3 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Flatten</span>) │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">62720</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">0</span> │\n",
2424
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2425
+ "│ dense_3 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Dense</span>) │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">1</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">62,721</span> │\n",
2426
  "└─────────────────────────────────┴────────────────────────┴───────────────┘\n",
2427
  "</pre>\n"
2428
  ],
 
2430
  "┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
2431
  "┃\u001b[1m \u001b[0m\u001b[1mLayer (type) \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1mOutput Shape \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1m Param #\u001b[0m\u001b[1m \u001b[0m┃\n",
2432
  "┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
2433
+ "│ input_layer_7 (\u001b[38;5;33mInputLayer\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m224\u001b[0m, \u001b[38;5;34m224\u001b[0m, \u001b[38;5;34m3\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n",
2434
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2435
+ "│ conditional_augmentation_3 │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m224\u001b[0m, \u001b[38;5;34m224\u001b[0m, \u001b[38;5;34m3\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n",
2436
  "│ (\u001b[38;5;33mConditionalAugmentation\u001b[0m) │ │ │\n",
2437
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2438
  "│ efficientnetb0 (\u001b[38;5;33mFunctional\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m7\u001b[0m, \u001b[38;5;34m7\u001b[0m, \u001b[38;5;34m1280\u001b[0m) │ \u001b[38;5;34m4,049,571\u001b[0m │\n",
2439
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2440
+ "│ flatten_3 (\u001b[38;5;33mFlatten\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m62720\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n",
2441
  "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
2442
+ "│ dense_3 (\u001b[38;5;33mDense\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m1\u001b[0m) │ \u001b[38;5;34m62,721\u001b[0m │\n",
2443
  "└─────────────────────────────────┴────────────────────────┴───────────────┘\n"
2444
  ]
2445
  },
 
2449
  {
2450
  "data": {
2451
  "text/html": [
2452
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\"> Total params: </span><span style=\"color: #00af00; text-decoration-color: #00af00\">4,112,292</span> (15.69 MB)\n",
2453
  "</pre>\n"
2454
  ],
2455
  "text/plain": [
2456
+ "\u001b[1m Total params: \u001b[0m\u001b[38;5;34m4,112,292\u001b[0m (15.69 MB)\n"
2457
  ]
2458
  },
2459
  "metadata": {},
 
2462
  {
2463
  "data": {
2464
  "text/html": [
2465
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\"> Trainable params: </span><span style=\"color: #00af00; text-decoration-color: #00af00\">62,721</span> (245.00 KB)\n",
2466
  "</pre>\n"
2467
  ],
2468
  "text/plain": [
2469
+ "\u001b[1m Trainable params: \u001b[0m\u001b[38;5;34m62,721\u001b[0m (245.00 KB)\n"
2470
  ]
2471
  },
2472
  "metadata": {},
 
2487
  }
2488
  ],
2489
  "source": [
2490
+ "# Load pre-trained model without top layers\n",
 
 
2491
  "model = EfficientNetB0(\n",
2492
  " weights=\"imagenet\", # pre-trained weights\n",
2493
  " include_top=False, # no dense layer\n",
2494
  " input_shape=(input_size[0], input_size[1], 3), # input shape\n",
2495
  ")\n",
2496
+ "# Create explicit input layer\n",
2497
  "inputs = tf.keras.Input(shape=(input_size[0], input_size[1], 3))\n",
2498
  "# add data augmentation\n",
2499
  "augmented = ConditionalAugmentation(rate=0.4)(inputs)\n",
2500
  "x = model(augmented)\n",
2501
  "\n",
2502
+ "# Flatten output\n",
2503
  "x = Flatten()(x)\n",
2504
+ "# For feature extraction only\n",
2505
  "for layer in model.layers:\n",
2506
  " layer.trainable = False\n",
2507
+ "# New FC layer for binary classification\n",
2508
+ "predictions = Dense(1, activation=\"sigmoid\")(x)\n",
2509
+ "# Define new model\n",
2510
  "effnetB0 = Model(inputs=inputs, outputs=predictions)\n",
2511
+ "# Display model summary\n",
2512
  "effnetB0.summary()"
2513
  ]
2514
  },
 
2516
  "cell_type": "markdown",
2517
  "metadata": {},
2518
  "source": [
2519
+ "## Training"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2520
  ]
2521
  },
2522
  {
2523
+ "cell_type": "markdown",
 
2524
  "metadata": {},
 
2525
  "source": [
2526
+ "Follow with : `tensorboard --logdir models/EfficientNetB0/runs`"
 
 
 
 
 
 
 
 
2527
  ]
2528
  },
2529
  {
2530
  "cell_type": "code",
2531
+ "execution_count": 32,
2532
  "metadata": {},
2533
+ "outputs": [
2534
+ {
2535
+ "name": "stderr",
2536
+ "output_type": "stream",
2537
+ "text": [
2538
+ "INFO:root:⚙️ compiling\n",
2539
+ "INFO:root:🛎️ declaring callbacks\n",
2540
+ "INFO:root:💪 starting training\n"
2541
+ ]
2542
+ },
2543
+ {
2544
+ "name": "stdout",
2545
+ "output_type": "stream",
2546
+ "text": [
2547
+ "Epoch 1/100\n",
2548
+ "\u001b[1m493/493\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m406s\u001b[0m 815ms/step - accuracy: 0.8178 - loss: 0.4710 - precision_3: 0.8521 - recall_3: 0.9467 - val_accuracy: 0.7821 - val_loss: 0.5143 - val_precision_3: 0.8256 - val_recall_3: 0.9283\n",
2549
+ "Epoch 2/100\n",
2550
+ "\u001b[1m493/493\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m398s\u001b[0m 808ms/step - accuracy: 0.8334 - loss: 0.4258 - precision_3: 0.8697 - recall_3: 0.9422 - val_accuracy: 0.8046 - val_loss: 0.5463 - val_precision_3: 0.8188 - val_recall_3: 0.9757\n",
2551
+ "Epoch 3/100\n",
2552
+ "\u001b[1m493/493\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m394s\u001b[0m 799ms/step - accuracy: 0.8461 - loss: 0.4060 - precision_3: 0.8763 - recall_3: 0.9506 - val_accuracy: 0.7590 - val_loss: 0.6189 - val_precision_3: 0.8438 - val_recall_3: 0.8636\n",
2553
+ "Epoch 4/100\n",
2554
+ "\u001b[1m493/493\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m383s\u001b[0m 776ms/step - accuracy: 0.8472 - loss: 0.3938 - precision_3: 0.8793 - recall_3: 0.9476 - val_accuracy: 0.7785 - val_loss: 0.5448 - val_precision_3: 0.8263 - val_recall_3: 0.9214\n",
2555
+ "Epoch 5/100\n",
2556
+ "\u001b[1m493/493\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m361s\u001b[0m 733ms/step - accuracy: 0.8579 - loss: 0.3752 - precision_3: 0.8865 - recall_3: 0.9524 - val_accuracy: 0.7856 - val_loss: 0.6732 - val_precision_3: 0.8133 - val_recall_3: 0.9559\n",
2557
+ "Epoch 6/100\n",
2558
+ "\u001b[1m493/493\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m368s\u001b[0m 747ms/step - accuracy: 0.8504 - loss: 0.3840 - precision_3: 0.8866 - recall_3: 0.9417 - val_accuracy: 0.8104 - val_loss: 0.5844 - val_precision_3: 0.8233 - val_recall_3: 0.9766\n",
2559
+ "Epoch 7/100\n",
2560
+ "\u001b[1m493/493\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m366s\u001b[0m 743ms/step - accuracy: 0.8543 - loss: 0.3848 - precision_3: 0.8870 - recall_3: 0.9468 - val_accuracy: 0.7868 - val_loss: 0.6400 - val_precision_3: 0.8209 - val_recall_3: 0.9439\n",
2561
+ "Epoch 8/100\n",
2562
+ "\u001b[1m493/493\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m359s\u001b[0m 728ms/step - accuracy: 0.8572 - loss: 0.3700 - precision_3: 0.8884 - recall_3: 0.9486 - val_accuracy: 0.7965 - val_loss: 0.5693 - val_precision_3: 0.8252 - val_recall_3: 0.9514\n",
2563
+ "Epoch 9/100\n",
2564
+ "\u001b[1m493/493\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m363s\u001b[0m 737ms/step - accuracy: 0.8564 - loss: 0.3684 - precision_3: 0.8890 - recall_3: 0.9467 - val_accuracy: 0.7939 - val_loss: 0.6147 - val_precision_3: 0.8341 - val_recall_3: 0.9319\n",
2565
+ "Epoch 10/100\n",
2566
+ "\u001b[1m493/493\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m369s\u001b[0m 749ms/step - accuracy: 0.8603 - loss: 0.3629 - precision_3: 0.8904 - recall_3: 0.9500 - val_accuracy: 0.8026 - val_loss: 0.5798 - val_precision_3: 0.8311 - val_recall_3: 0.9505\n",
2567
+ "Epoch 11/100\n",
2568
+ "\u001b[1m493/493\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m368s\u001b[0m 746ms/step - accuracy: 0.8634 - loss: 0.3589 - precision_3: 0.8931 - recall_3: 0.9507 - val_accuracy: 0.7573 - val_loss: 0.6606 - val_precision_3: 0.8164 - val_recall_3: 0.9052\n"
2569
+ ]
2570
+ },
2571
+ {
2572
+ "name": "stderr",
2573
+ "output_type": "stream",
2574
+ "text": [
2575
+ "INFO:root:🧐 evaluating model\n"
2576
+ ]
2577
+ },
2578
+ {
2579
+ "name": "stdout",
2580
+ "output_type": "stream",
2581
+ "text": [
2582
+ "\u001b[1m124/124\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m72s\u001b[0m 579ms/step - accuracy: 0.6824 - loss: 0.8199 - precision_3: 0.6028 - recall_3: 0.7630\n",
2583
+ "\u001b[1m124/124\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m74s\u001b[0m 586ms/step\n"
2584
+ ]
2585
+ },
2586
+ {
2587
+ "name": "stderr",
2588
+ "output_type": "stream",
2589
+ "text": [
2590
+ "INFO:root:📈 plotting results\n"
2591
+ ]
2592
+ },
2593
+ {
2594
+ "name": "stdout",
2595
+ "output_type": "stream",
2596
+ "text": [
2597
+ " precision recall f1-score support\n",
2598
+ "\n",
2599
+ " no_smoke 0.16 1.00 0.28 967\n",
2600
+ " smoke 0.00 0.00 0.00 4941\n",
2601
+ "\n",
2602
+ " accuracy 0.16 5908\n",
2603
+ " macro avg 0.08 0.50 0.14 5908\n",
2604
+ "weighted avg 0.03 0.16 0.05 5908\n",
2605
+ "\n"
2606
+ ]
2607
+ },
2608
+ {
2609
+ "name": "stderr",
2610
+ "output_type": "stream",
2611
+ "text": [
2612
+ "2025-01-27 12:32:42.266301: W tensorflow/core/framework/local_rendezvous.cc:404] Local rendezvous is aborting with status: OUT_OF_RANGE: End of sequence\n",
2613
+ "/Users/julmat/Documents/hugging_face/frugal_cviz/.venv/lib/python3.12/site-packages/sklearn/metrics/_classification.py:1565: UndefinedMetricWarning:\n",
2614
+ "\n",
2615
+ "Precision is ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.\n",
2616
+ "\n",
2617
+ "/Users/julmat/Documents/hugging_face/frugal_cviz/.venv/lib/python3.12/site-packages/sklearn/metrics/_classification.py:1565: UndefinedMetricWarning:\n",
2618
+ "\n",
2619
+ "Precision is ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.\n",
2620
+ "\n",
2621
+ "/Users/julmat/Documents/hugging_face/frugal_cviz/.venv/lib/python3.12/site-packages/sklearn/metrics/_classification.py:1565: UndefinedMetricWarning:\n",
2622
+ "\n",
2623
+ "Precision is ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.\n",
2624
+ "\n",
2625
+ "/Users/julmat/Documents/hugging_face/frugal_cviz/.venv/lib/python3.12/site-packages/sklearn/metrics/_classification.py:1565: UndefinedMetricWarning:\n",
2626
+ "\n",
2627
+ "Precision is ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.\n",
2628
+ "\n",
2629
+ "/Users/julmat/Documents/hugging_face/frugal_cviz/.venv/lib/python3.12/site-packages/sklearn/metrics/_classification.py:1565: UndefinedMetricWarning:\n",
2630
+ "\n",
2631
+ "Precision is ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.\n",
2632
+ "\n",
2633
+ "/Users/julmat/Documents/hugging_face/frugal_cviz/.venv/lib/python3.12/site-packages/sklearn/metrics/_classification.py:1565: UndefinedMetricWarning:\n",
2634
+ "\n",
2635
+ "Precision is ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.\n",
2636
+ "\n"
2637
+ ]
2638
+ },
2639
+ {
2640
+ "data": {
2641
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgYAAAGbCAYAAAC/L1igAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAUxxJREFUeJzt3Qd4U9X7B/C3LbRltuy9994b2YIMZcqQDSIgU5RRlamAP/gDsocIRWQvQTYyZUPZG2TK3ptCaf7P98R7mzQpNG3apMn343Mlubm5OblJc99zznvO9TAYDAYhIiIiEhFPRxeAiIiInAcDAyIiItIxMCAiIiIdAwMiIiLSMTAgIiIiHQMDIiIi0jEwICIiIh0DAyIiItIxMCAiIiIdAwNyez//LJI3r4iPj4iHh0jRomGPbdwoUqaMSJIkxsewPHokUqVK2P3Ll217vW3bwp7brp3d3w7FYfb4bvD7RdHFwIBcypAhYT+K1hZ/f/PtFy4U+eorkbNnRV6/Nn8MJ/z69UX27xd59kziNAQzODZYAgMtH8c60+M0ZUrEx3XAgOgFYVo5wsPxDv95eXoag7JChUT69xd58MDyecHBIiNGiOTPL+LrK5IihUiDBiKHDkW9nETuLJ6jC0DkSKtXh90eNEjkww9FEic23v/rL5FXr4y3caLp3VvEy8t4opo4UeTxY+Nj6dLZ9prFion8/bfxdpo0EmuBwdChxtuVK7+/JjlqlEinTiLx49u3HAgMrlwx3rYWHISHK7kgKDtxwrhs2CBy8KBIvP9+uUJCROrWFdm82TxQWLlSZP16kTVrRKpXt+97IHJ1DAzIZdWuLfLtt+brtBOK5saNsNs4WWbLZv2xTz4xnlA1qMFGlZ+fSMWK4tRw8p47V6RDB8eVAcETAoPDh0X69BF5+1bk6FGRnTuNXTmAlg0tKChY0Bj8YPsffzQGCPhML1wwdhMRUeSwK4FcVurUxhOw6VK2rHk/7NatYdtnzx7WL4t/Bw8OewwnSKzLmtV4/105Bqip1qkjkiqViLe3SIYMIk2ahNWU39UHfPeu8SSYK5fxZJYsmbFGvHev+Xbh94GadKlSxqb0zJlFJkyIOODZvj3sudoJ1pqffjKejN8nMmXWuiq0YwCmXQbW4PP64AORnj2NrSyaW7fCbk+bFnb7l19EGjUS+eEHkVq1jOv+/de8Vcga024UtGJMnmz8nBMlMn6O164ZW4569RJJmdLYYtSsmfVujS1bjO8d2+Gzz5TJePzPn7fc9sgR4/FPkEAkY0ZjUIMWkIhE9rtBFG247DKRqxg8GHVM49K2bcTbbd0atl34Bc+L6LEsWYzPr1w5bN2lS2H7HTo04ufiNcO/tmkZr1wxGDJmtP7c+PENhpUrrZcfZfL0tHzOpk3Gbd/1fvA+YPbssHXFioXtb948y+Pav7/tZTbdv7UFcBzDrwsNNRiOHDEYkiYNW3/smPGx+/fNXyskxPrn0KvXu78zpmXLkcOybIULGwwNGliub9nSfD+TJxsMHh7W31+SJAbD/v1h254/bzD4+Vl/LXt+N971N0AUEbYYkMuaM8cymU2roWv9/KYjEJYsMa777jvjv+3bhz2GLgmsW7o04tdD37dpK0PHjiJ//imyYIHIp58aE+ne5csvjTVcaNPG2PIwdaox5+HNG2OrxfPnls9DLfzjj42v1bx52Prp043/4v3gvWnwnvFesCBXIrzcuY3lBST14RQT3TKj5o3XS5s27LlaGbR8i/C05EOU98kT4/2AgLBuHNOWGiQcIv/DtLVIc+mSRNo//4j062fMUUBLDxw7Zmx1+L//E5k/31jD1xJXtTwTtCogiRXHCmX+/ntjfoN2HJ8+NX73tGM5cGDYc/Fd/OMP42eBbg97fjeIooI5BuSWtH5+/KspWTKsqwDNtUg+1OD++/ICfv897HaLFiIzZ4bdNz1hW4Nm6bVrjbdx8kTin9ZvjoTIFStE7t83nhAaNzZ/Lk6CixYZm5fRnYATFmgnGZTdNIkwMjkOCCYWLxY5eVJk+XL7lBnlNO3rtzXPAvkhGDmCkyuCBNMTIZrtTZnet+WEWb68yP/+FzZUFd0K2uf59dfG2/PmGU/66GZBcFKkiDFg1Ea1NGxo7M4AHAcEPuj+OHXKmCNRuLB59wa+NxhRAdhu+HD7fTeIooKBAblV8mFMjgI4dy7sdr16tj0XJ3GtNomTA/rWrTl92nId8ia0Ey5qzqYjEaIKtXIM1URNFicqJF/as8yRobUkoDaOGvjFiyJjxhj77dHfjxwADRINTZkOPTXd7n1Klw67nTy5edCoQf5A+GNs+tlj3gsNAjK0CKxbF7YdTu7a8FeUTQsKwr9+bB1novAYGJDLJx+6Emu1XyShWRt18a4ugMjAyRiBAbL8w4/msEVUm7hNPzs0z2utLmgdQWCgte4AasxI3NPKaZqgaJp4+T6mLUimXT9Jk1rfPjLHOKLkyuhuGx67EshemGNAZCfom9egqdkWOXOGnRRy5DCe5MKnmaEWPGxY1MpmepILDY3cc0qUEPnoI+PtAwfsU+aolCP8CVgbDYAafb58xtt4bdMy7tkTdjuiGnZMffaYEEuD/n8EVqbbIWDVWjFwMjet6e/bF/vfDaLwGBiQy7pzxzjmPfwSvtnZXlq2DLuNJLUvvjAGCOirx2M7dkT8XJzk0PWhJcCh6R59+5s2GXMVunUzDkO8fj1qZTNtVTh+3NgSgGNx9er7Ww3sWWbTciDZDmVAeazRPi+0ECBZz9pJuEuXsNvoe8fro8zIDwAMA7S1WycqMBxVy+NAGZCEiu6D1q1Fbt40rkeXAfIREByZlgnbINkRczJgAqjY/m4QWYhwvAJRHGQ6rC6ixXR4YUTDDsPvC0PaTEX0vEGDYma4YvjXeteQNNNhjKZKlLDcH95j+CF7zZqZP69KFfPnRHa4orXj8/XXEQ+ZDD9c0dri42Mw7N4dtr83bwyG6tUj3vavvwzvZfretePxrs/fdPin9pnaOlzx3DnzIZjakitXzH43iCKDLQZEdoRJatBKgCZ4JAKiFpk+vXHinff1daPWh2bnvn2NF3XCZEWYTAe3MURt1Spj4l1UYdgkymVaa48M09p6dMuMmjRaUnBMItufjmOIoYNNm4rs2iVSrlzYY8gpwPFGgqR2ISzUsFGr3r07dqdDxpBC1OJRu0cZUDa8TxyHoCDjiBENRopgcq1KlYxlRkIirgVhbfhobHw3iEx5IDowW0NERERuiy0GREREpGNgQERERDoGBkRERKRjYEBEREQ6BgZERESkY2BAREREOgYG5HDt2rWTBg0aOLoYFIEhQ4ZIUdPrU8ewwMBA8ff3j5F9Z82aVX62Nr0gEekYGLigBw8eSMuWLSVp0qTqB7Zjx47yTLucWwRmzJghVapUUc/x8PCQRxFcmm/NmjVSpkwZSZAggSRLlswlTuhLliyRvHnziq+vrxQqVEjWate4jYRdu3ZJvHjxLE6cO3bskI8//ljSp0+vjucfmIPYDido7OtdiyMCsG3btr3zO2OrZs2ayTnTyxXaMbg4cOCAfIEZlpzEnj17pFq1apIoUSL1t1epUiV5+fKl/jiOQ/369SVlypTq8YoVK8pWzIz0n8uXL5t97tr98MvevXvNXhefVbdu3SRdunTi4+MjuXPntul7T66NgYELQlBw8uRJ2bRpk6xevVqdpN73Y/jixQv56KOP5Nvw1yk2sWzZMmndurW0b99ejh49qk6Kn332mcRlu3fvlhYtWqjg6fDhw+rEieXEiRPvfS5+XNu0aSPVrUyv9/z5cylSpIhMnjzZbmX95ptv5ObNm/qSMWNGGTZsmNk6Z/ba9FrI74CgMzWuNBQDUqVKJQkTJhRnCQrwN1ezZk3Zv3+/Clq6d+8uniZXmqpXr56EhITIli1bJCgoSH2nsO6W6eUjrfjrr7/MvhclcEUsk8/hww8/VEHE0qVL5ezZs/LLL79IBkwvSQSRmjiZIm3dunWGChUqGPz8/AzJkyc31K1b13DhwgWzba5du2Zo3ry5IVmyZIaECRMaSpQoYdi7d6/++KpVqwwlS5Y0+Pj4GFKkSGFo0KBBpF//1KlTmMnScODAAbMyeXh4GK5fv/7e52/dulU9/+HDh2br37x5Y8iQIYNh5syZBntr27atoX79+vr9V69eGXr06GFIlSqVOgY4nvtNJpp/8OCB4bPPPjOkTJnS4Ovra8iZM6dh1qxZ6rHg4GBDt27dDGnTplXPzZw5s2HEiBERvnbTpk3VZ2SqTJkyhs6dO7+33M2aNTN8//33hsGDBxuKFCkS4XY4nitWrDDYW5YsWQzjxo3T7x87dsxQtWpVdUzw3evUqZPh6dOn6jGUEeUwXfBZQ79+/Qy5cuUyJEiQwJAtWzb1nl6/fq3v913v79KlSxb7xecJlStXVp9Fr1691Pe4Ci66YDAYxowZYyhYsKD67mfMmNHQtWtXvZwwe/Zs9fcT/vV/++039Z6TJk2qjv2TJ0/e+R02XbAPa8cMj02bNk19B/D+8+bNa9i9e7fh/PnzqvwoY7ly5Sz+hv/44w9DsWLF1HcMx2zIkCHqb8QW+J7hWEfk7t27qnw7duzQ1+E9Y92mTZvMjr9Gu3/48OEI9zt16lRD9uzZzT7j8JYsWaI+I+27VL16dcOzZ89sen8Ud7HFwM5QU+zTp48cPHhQNm/erKL/hg0bSuh/15hFk37lypXl+vXrsmrVKlXz7tevn/44muqxfZ06dVQNFvsoXbq0TbUQNKGWLFlSX1ejRg1Vjn3WrukaSYcOHVJlxn6KFSummiBr164dqZq1rXA80DoxZ84c9bo5c+aUWrVqqS4SGDhwoJw6dUrWrVsnp0+flqlTp6qmVpgwYYI6rosXL1Y1oXnz5ql+5XcdLxwfU3gtrH+X2bNny8WLF2UwJv93ku8dyo3uHdQ80T2CWiNqoFprQ9OmTVUNVatFli9fXj2WJEkS1fSOYzp+/HhVexw3blykXjdTpkzqswIcb+wX+9DgM/T29latS9OmTVPr8B3C54RWLTyO2jA+83f5559/VHcMWsCwbN++XX766Ser2+J9IY8ATe/ae8X7j8gPP/ygWn6OHDmiupTQCta5c2cJCAhQf8eIH7TjCH///bfavlevXuqYTZ8+XR2/4bhgg0m3DbrmInLnzh3194iWEZQ3TZo06ndhJy4n+Z8UKVJInjx55LffflOfL1oO8Fp4jmkLgDWffPKJ2g5dD/h7MIX75cqVU10JeN2CBQvKiBEj5O3bt+pxHC+0onXo0EH9faGrqFGjRuo4kJtwdGTi6rSo//jx4+r+9OnTDUmSJDHcv3/f6vaonbRs2TLKrzd8+HBD7ty5Ldaj9j1lypQotxgsWLBArUcNfOnSpYaDBw8aWrRooWqCEb2XqLQYoFYSP358w7x58/THUbNJnz69YdSoUer+xx9/bGjfvr3VfaGloVq1aobQ0NBIvTZea/78+WbrJk+ebEidOnWEzzl37px6/OzZs+q+M7QYzJgxQ7VAmdbq1qxZY/D09DTcunXLastMREaPHq1asTTve38RfWdQ40at+n1QO8X36F0tBqi5m7YQ9O3bV9W4IxJ+HxprLQamtfY9e/aodb/++qvZdx81Zw1qz+FboebOnWtIly6dfn/AgAGG1q1bR1g+7XVQG0dr16FDhwy9e/c2eHt7q++XaesiPgu0+Hl5eanXwLbv+r1BiwxaINHK1r9/f/XclStX6tvkyZNHtXR06NBB/R0vXLhQlQOtHhAUFKTKdvny5Qhfh1wbWwzs7Pz58yrazp49u6qxaLXVq/9d+B61EtS4k+Pya1bgcWt91o6mtWh899130rhxY1VjQa0ZiU2onVqDWkjixIn1RTsG76sZvnnzRipUqKCvix8/vmo1Qe0FunbtKgsXLlQJf6hpIk/AtKaGY4iaVs+ePWXjxo1iT6hVoUY5dOhQlbBlLzg2pscKx84WODbof0YSmwbHEJ8bavLvsmjRIrVt2rRp1Wt///33kfqsIsNazRYtGfiOo08brRXIW7l//77Kc4kI/o6wrQYtVqh120PhwoX126hBA5JQTde9evVKnjx5ou6jlQ+5HaafV6dOnVRNW3sPI0eOVDX99/09oWUCOTv4TUArDb63s2bNUo8hbkGtHjV/tFIgDwH5L0hqjSifBC1naLFEgnCpUqVUq0qrVq1k9OjRZq+NfSLhGJ8Pkj3xd6216OB7hM8Hx+DTTz9VLUgPHz6M1jGmuIWBgZ3hjxZN3vhjQlOh1nyvJV4hsepd3vf4++DHPfwPJpogUSY8FlX4IYb8+fPr65DNjAAoopNIly5d1ElaW5Chbw/owrhy5Yp89dVXcuPGDfUjpjUVFy9eXC5duqSah5HdjebzJk2aRLgvHJPbt2+brcP9iI7V06dPVfMympYxGgELThI4WeA2msWjAsfG9Fjh2MUGdJkgWRVdV2iiR/cVThKRTRR8H9NABZDwhuQ5nIzRBYGEOi1B812vieDQFAJS7eQaXab71jL8ra0z7Q5EYGj6eR0/flxVCjCyJap/T5AvXz797wnfJXwmCIIRuOG7PWXKFPUbgS6YyEKQcOHCBbPXRlDr5eVl9rpIaMRngPVIXEZXHco3ceJEFbDg74rcAwMDO0KtB7Uz1LhwssIfW/hIGz+I+CHR+svDw+PIK4gq9B0iWx4/uBr8wOBHDT8QUYWaBQIB09onavb4oc+SJYvV56BVBPkB2oIT5/vkyJFD75M2fR30m5v+iCK7vG3btvL777+r/mTUfjRoqUEtCMEZasM4AUV0vHG8wh9v/ChivTXYN04C4U/i+OHE7ageYxwb02MVUYtSRPBdQ3CCvmgNjiH681E2wHHV+pE1aG3B54dgAHkpuXLlUkGXLbBfCL9va/C9xHdxzJgxUrZsWXWCQnBnb9beq73gBI2/A9PPS1tMRxS8C1pAEAyGb83B8ETt70lrfQi/T9y3JSjC91ILRABBBgIF033gdbGN9lkiGMJ2CIAQLGL9ihUrIv2aFLe9/5eaIg2JX0gYwkkKf2SI/AcMGGC2DboZ0EyMJkE0N2I7/OHhRwInIySzIajACbJ58+aqto/xxf3791fPR0IUkgAjaqbECQIJZmjaRNMgTqqo3WJfWo0dz8drYB9aYiNqC1i0mgVOfmi6zZw5szpJ4YSIEyDKh4Qz/HhpzZNobrQX1DDRVdC3b1/1unj9UaNGqR9JDCmEQYMGqUClQIECEhwcrGpVeN8wduxYdUzRNIsfUHRzoPYf0YQ5SCBD0hdOVHXr1lW1M7QImAYapscc+0Sylik0y6KmaLoetUrTWhpqW/iB1t6TvaHWj88GwRLmO7h796706NFDNdNrzeM4GW3YsEGdjPA99fPzU4EAvqd432h6RvKrrScAfBdwIsHngJYH1GjRvG4NTp74TqIWitY106REe8J7xWeAoA9N4xiiaK9hivj+odUDnyNao/CdQFCGRNwff/wxUn+nOF74juMzQ/nQLYZWgDNnzqghhIDfA/ym4DPFa+K4ItjFdwnfVWu0ZE98/2H58uWqa2LmzJn6Nvj7mjRpkvru4zuClg78JqHrDdDKieOGYZT4buM+vk/a3xi5AUcnObgaDCPKly+fSu4pXLiwYdu2bRbJZ0jqady4sRp2haQqDE3ct2+f/viyZcsMRYsWVYlIGJLXqFEj/TEkkCGp612QDIjEwMSJE6vXQKKe6XAwbUiTNlwtouFsWJDEZZoE+PXXX6vEOyRQ1qhRw3DixIloH7PwSXEvX75USYR479aGK/7www/qGGN4GZKm8NyLFy/qSXg4dokSJVLvHYli70rWgsWLF6uETRzvAgUKqKS98OV71zG3lpxnbcic6VC+2B6uCHfu3DF8+OGH6nth+vkjkQ/Jf1iPYYDYp7Xhgu8ybNgwNUQUiW6mwxUxVDG8sWPHqiQ6fH61atVSwxBNkxcjGq5oCmXE+3+XLl26qPf1vuGKpn+b1ob7WUuuXL9+vaF8+fLqPeB7Vrp0afXds+XvFEaOHKmGbGrDIv/++2+zxzHsuGbNmurzxN9c2bJlDWvXro1wf4GBgepvA/vTyoXkzvAwJBPJm/j7wtBFJC2HhIToQ57xuWjDhfG3MXHixPe+F3IdHvifo4MTIiIicg7MMSAiIiIdAwMiIiLSMTAgIiIiHQMDIiIi0jEwICIiIh0DAyIiItIxMCAiIiLXnvnw6LWnji4CUYzLky7sokJErso3hs9SCYqFXVLbVi8PTxJX5JKBARERUaR4sOE8PAYGRETkvv67eiaFYWBARETuiy0GFhgYEBGR+2KLgQWGSkRERKRjiwEREbkvdiVYYGBARETui10JFhgYEBGR+2KLgQUGBkRE5L7YYmCBgQEREbkvthhY4BEhIiIiHVsMiIjIfbErwQIDAyIicl/sSrDAwICIiNwXWwwsMDAgIiL3xRYDCwwMiIjIfTEwsMAjQkRERDq2GBARkfvyZI5BeAwMiIjIfbErwQIDAyIicl8clWCBgQEREbkvthhYYGBARETuiy0GFhgqERERkY4tBkRE5L7YlWCBgQEREbkvdiVYYGBARETuiy0GFhgYEBGR+2KLgQUGBkRE5L7YYmCBR4SIiIh0bDEgIiL3xa4ECwwMiIjIfbErwQIDAyIicl8MDCwwMCAiIvfFrgQLDAyIiMh9scXAAo8IERER6dhiQERE7otdCRYYGBARkftiV4IFBgZEROS+2GJggYEBERG5LQ8GBhYYGBARkdtiYGCJnStERESkY4sBERG5LzYYWGBgQEREbotdCZYYGBARkdtiYGCJgQEREbktBgZOnHz4999/S6tWraRcuXJy/fp1tW7u3Lmyc+dORxeNiIjIbThFYLBs2TKpVauWJEiQQA4fPizBwcFq/ePHj2XEiBGOLh4REblwi0FUF1flFIHBjz/+KNOmTZNffvlF4sePr6+vUKGCHDp0yKFlIyIiF+YRjcVFOUWOwdmzZ6VSpUoW6/38/OTRo0cOKRMREbk+V675x+kWg7Rp08qFCxcs1iO/IHv27A4pExERuT52JThpYNCpUyfp1auX7Nu3Tx3sGzduyLx58+Sbb76Rrl27Orp4RETkohgYOGlXwoABAyQ0NFSqV68uL168UN0KPj4+KjDo0aOHo4tHRETkNjwMBoPB0YV48+aNSjp8/fq16lJ49uyZ5M+fXxInTiz37t2TlClT2rS/o9eexlhZiZxFnnRJHF0EohjnG8PV1xRtFkT5ufd/ayGuyCm6Epo3by6IT7y9vVVAULp0aRUU3L59W6pUqeLo4hERkatywKiEn376SXVF9O7dW1/36tUr6datm6RIkUKd/xo3bqzOgaauXr0qdevWlYQJE0rq1Kmlb9++EhISYrbNtm3bpHjx4qrVPWfOnBIYGBg3AwO82c8//9xs3c2bN1VQkDdvXoeVi4iIXFts5xgcOHBApk+fLoULFzZb/9VXX8mff/4pS5Yske3bt6tcu0aNGumPv337VgUFaFnfvXu3zJkzR530Bw0apG9z6dIltU3VqlXlyJEjKvDAuXXDhg1xryvh7t27Kq+gdu3aMnbsWHVA8MaKFCkiCxcuFE9P2+IXdiWQO2BXArmDmO5KSNV+UZSfe3d2M5u2Rzc5avNTpkxR8/cULVpUfv75ZzWZX6pUqWT+/PnSpEkTte2ZM2ckX758smfPHilbtqysW7dO6tWrp86PadKkUdtg/p/+/furcyha3HF7zZo1cuLECbMWeQz7X79+fdxqMcAB2bhxo5oBsU+fPqqloFixYrJgwQKbgwIiIqLYaDEIDg6WJ0+emC3azL3WoKsANfoaNWqYrQ8KClK5dqbr0VqeOXNmFRgA/i1UqJAeFABmDMZrnjx5Ut8m/L6xjbaPyHKas26mTJlk06ZNapgicgwQFHh5eTm6WERERFaNHDlSTcRnumCdNWj9xky+1h6/deuWqvH7+/ubrUcQgMe0bUyDAu1x7bF3bYPg4eXLl+L0wxWTJUtmtY8GwxXRz4IEDM2DBw9iuXREROQWopFEGBAQoFq5TSHpL7xr166puXpQ+fX19RVn57DAAP0qREREjhSdiYp8fHysBgLhoavgzp07Kr/ANJlwx44dMmnSJJUciKRC5AKYthpgVAJmBgb8u3//frP9aqMWTLcJP5IB95MmTaouUuj0gUHbtm0d9dJERERKbMxgWL16dTl+/LjZuvbt26s8AiQMoisdc/ls3rxZDVPUriGEEXvlypVT9/Hv8OHDVYCBoYqAFgic9DHMX9tm7dq1Zq+DbbR9xKmZD7Xo6Y8//pDTp0+r+wUKFJBPPvmEeQZERBSnA4MkSZJIwYIFzdYlSpRIdZlr6zt27Ki6JZInT65O9pj1Fyd0jEiAmjVrqgCgdevWMmrUKJVP8P3336uERq3VokuXLqoFol+/ftKhQwfZsmWLLF68WI1UiHOBAWY7rFOnjly/fl3y5Mmj1iFBA1EU3lCOHDkcXUQiInJBznLNg3HjxqlReGgxwMgGjCbAsEYNKsmrV69W1w9CwIDAAi3vw4YN07fJli2bOmdiToTx48dLxowZZebMmWpfcW4eAwQFKAZGJCBagvv370urVq3UgbI12uE8BuQOOI8BuYOYnscgfeflUX7ujelhExC5EqdoMcAsT3v37tWDAkATC6aNrFChgkPLRkRELsw5GgycilMEBugfefr0qdVZojC2k4iIyJW7EpyJU0xwhGkev/jiC9m3b5/qUsCCFgQkUiABkYiIyBWulRAXOEVgMGHCBJVgiIQKTP6ABV0IuDIUEiiIiIhiAgMDJ+1KwIQOK1eulPPnz6sLRwAuHoHAgIiIKMa47vk9bgcGmly5cqmFiIiI3DgwQE7B0qVLZevWrWpWp9DQULPHly+P+nASirqXL57LosBpsn/nVnn86KFky5lH2n35teTMW0Df5t8rl2TezAly6ughCQ19KxkzZ5evB4+SlGnSyp1bN6R7K+s5Il8N/EnKVTa/ChiRM1s4f57Mmf2r3Lt3V3LnySsDvh0ohQoXdnSxKJpcuUsgTgcGvXv3lunTp0vVqlXVlaD4QTmHaWN+lGuX/5HuA4ZJ8hSpZMdfa+WHfl/KuFlLJHnK1HLrxr8yqPfnUq32J9K0TWdJkCix/Hv5H4n/30iSlKnSyIzF5tcA/2vNClm1eK4UK13eQe+KyHbr162V/xs1Ur4fPFQKFSoi8+bOka6dO8rK1evNLvhGcQ/PN04aGMydO1e1CmCiI3IOr4Nfyb6/t0i/YWMkf2HjhT+atu0sQXv/lo2rlkrzDl/KwlmTpViZ8tLqi17689Kmz6jf9vTyEv/kKc32i9YHtBT4JkgYi++GKHrmzpktjZo0lQYNjfPYI0DYsWOb/LF8mXTs9IWji0fRwMDASUcl4BrW2bNnd3QxKNy1K9A1oNX+Nd7ePnLmxBHV3XNo3y5JlzGLDO/fXT5v8qF8272t7N+1LcJ9Xjx3Wi7/c06q1a4fC++AyD7evH4tp0+dlLLlwlq5MCNr2bLl5djRww4tG0UfRyU4aWAwZMgQGTp0qLx8+dLRRaH/JEiYSHLnLyzLfp8pD+7dlVBcIvSvtXLu9HF5+OCePHn0QF69fCErFwZKkVLl5PufJknpClVlzJC+cupokNV9blm3UjJkziZ5ChSJ9fdDFFUPHz1UgXL4LgPcv3fvnsPKRXbiEY3FRTlFV0LTpk1lwYIF6lKSWbNmVZefNHXo0KEIn4uLTWAx9Tr4tXhH4hrZ9G7ILZj6f8OkS/Pa4unpJdly5ZEKVWvJpfOnJTTUeImNkuUqS70mLdXtrDnzyNlTR2Xj6mWSv0gJi66JnVvWS+NWnzvkvRARURwKDHCFqKCgIHXRJFuTD3EVRrQ2mOrce4B07fNtDJTUvSBfYOjYGfLq5Us1QiFZipQy7ocASZ02gyT181dX+8qYJZvZc9AicPbEEYt97d2xWYKDX0nlD+vG4jsgir5k/snUdx0XdjOF+ylTmufQUNzjyl0CcTowwNUTN2zYIBUrVrT5uQEBAeoa1qbO3nltx9KRb4IEann29IkcPbhHWnXqKfHix5cceQrIjX+vmG1789+rkjJ1OqvdCCXLVZKk/sliseRE0Yc8m3z5C8i+vXukWnXjEFvk2Ozbt0eat2jl6OJRNDEwcNLAIFOmTJI0adIoX4AJiynvx7zssj0cObAHk0xI+kxZ5NaNazJ3xgTJkCmrVPnIODfBJ01by7gfAyRfoeJSsGhJOXJgtwTt+VuGjJlutp9b16/J6eOHJWA4p7emuKl12/Yy8Nv+UqBAQSlYqLD8PneOyolq0NA1L7vrThgXOGlgMGbMGOnXr59MmzZN5RiQc3jx/Jks+HWS3L93RxInSSplPqgmLdp3k3jxjF+b0hWrSqdeAfLHwkCZPfn/VADx9eD/Sd5CRc32s2X9KjXvQeGSZR30Toii56PadeThgwcyZdIENcFRnrz5ZMr0mZKCXQlxHlsMLHkYMO2ggyVLlkxevHghISEhkjBhQovkwwcPHti0v6PX2GJAri9PuiSOLgJRjPON4epr7n7mk7DZ4tyoj8QVOUWLwc8//+zoIhAREZEzjUogIiKKbexKcNIJjjBPwfHjx/X7uARzgwYN5Ntvv5XXrznCgIiIYgbigqgursopAoPOnTvLuXPn1O2LFy9Ks2bNVK7BkiVLVFIiERFRTPD09Ijy4qqcIjBAUFC0qDGTHcFA5cqVZf78+RIYGCjLli1zdPGIiMhFscXASXMMMDACE4bAX3/9JfXq1dPnN+Bc5EREFFOYY+CkLQYlS5aUH3/8UV1+efv27VK3rnHa3EuXLqkpkomIiMiNAgMMV0QCYvfu3eW7776TnDlzqvVLly6V8uXDLnVKRERkT+xKcNKuhMKFC5uNStCMHj1aXbxEgyswfvLJJ5IoUaJYLiEREbkidiU4aYtBRHx9fc1mQcTohdu3bzu0TERE5FqBQVQXV+UULQaR5QSzNxMRkQtx4fO7ewQGRERE9uTKNX+X7EogIiKi2MUWAyIicltsMLDEwICIiNwWuxLieGCQJUsWs1EKRERE0cG4wMkDg6CgIDl9+rS6nT9/filevLjZ4ydOnHBQyYiIyBWxxcBJA4M7d+5I8+bNZdu2beLv76/WPXr0SKpWrSoLFy6UVKlSObqIRETkghgXOOmohB49esjTp0/l5MmT8uDBA7WgdeDJkyfSs2dPRxePiIjIbThFi8H69evVVRXz5cunr0NXwuTJk6VmzZoOLRsREbkudiU4aWCASy5bSyrEOu1yzERERPbGuMBJuxKqVasmvXr1khs3bujrrl+/Ll999ZVUr17doWUjIiLXxWslOGlgMGnSJJVPkDVrVsmRI4dacBvrJk6c6OjiERGRi+Jll520KyFTpkxy6NAh2bx5sz5cEfkGNWrUcHTRiIjIhblyzT9OBwawZcsWtWDoIvIKDh8+LPPnz1ePzZo1y9HFIyIicgtOERgMHTpUhg0bJiVLlpR06dIxgiMioljB042TBgbTpk2TwMBAad26taOLQkREboQVUScNDF6/fi3ly5d3dDGIiMjNMDBw0lEJn3/+uZ5PQEREFFs4KsFJWwxevXolM2bMULMfFi5c2GKyo7FjxzqsbERE5LrYYuCkgcGxY8ekaNGiVq+gyA+NiIjIiQOD9RfWy/7r+6V5weaSLnE6+Wz5Z7Ljyg4pkqaIzG88XzImzWhzIbZu3Wrzc4iIiKKLdU875BiM3j1ahm0fJn4+fjI9aLqsObdGngY/lV3XdsmAvwbYujsiIiKH4ZTIdggMTt09pVoF0iROI9uvbJekPkllQeMF4hvPV7Zd3mbr7oiIiByGyYd2CAwevnwoqRKlUrfP3DsjJdOXlGYFm0m+lPnk3ot7tu6OiIjIYTw9PKK82GLq1KkquT5p0qRqKVeunKxbt84sCb9bt26SIkUKSZw4sTRu3Fhu375tto+rV69K3bp1JWHChJI6dWrp27evhISEmG2zbds2KV68uPj4+EjOnDnVHEExHhgkT5Bczt0/JwuOL5DLjy5LwdQF1frHwY/F39ff5gIQERG5eotBxowZ5aeffpKgoCA5ePCguqpw/fr15eTJk+pxXE34zz//lCVLlsj27dvV1YYbNWqkP//t27cqKMC8P7t375Y5c+aok/6gQYP0bS5duqS2qVq1qhw5ckR69+6tpgPYsGGDbcfEYDAYbHlCy+UtVVCg9a+sbrFaKmWpJClGpZBSGUrJ3+3/Fkc7eu2po4tAFOPypEvi6CIQxTjfGB47V3Py3ig/d2O3stF67eTJk8vo0aOlSZMmkipVKjWfD27DmTNn1MUE9+zZI2XLllWtC/Xq1VMBQ5o0afRZg/v37y93794Vb29vdXvNmjVmo/uaN28ujx49kvXr18dci8HYmmOlQd4GUiBVARlQYYDUzlVbjtw6ooKC5gWa27o7IiIit0o+fPv2rSxcuFCeP3+uuhTQivDmzRuzKwrnzZtXMmfOrAIDwL+FChXSgwKoVauWPHnyRG91wDbhr0qMbbR9RJbNsRiSDpc3W262rkLmCk7RUkBERGQLz2gkEQYHB6vFFPr2sVhz/PhxFQggnwB5BCtWrJD8+fOrZn/U+P39zbvjEQTcunVL3ca/pkGB9rj22Lu2QfDw8uVLSZAggf0CA8xTEFnoViAiIooLolPzHzlypLo6sKnBgwfLkCFDrG6fJ08eFQQ8fvxYli5dKm3btlX5BM4mUoFBlcAqkTp4HuIhIYPMMySJiIicVXSGHQYEBEifPn3M1kXUWgBoFcBIAShRooQcOHBAxo8fL82aNVNJhcgFMG01wKiEtGnTqtv4d//+/Wb700YtmG4TfiQD7mMURGRbC2zKMUCO4nsXsSmPkYiIKM7y8fHRhx9qy7sCg/BCQ0NVVwSCBFwjaPPmzfpjZ8+eVcMT0fUA+BddEXfu3NG32bRpk3pNdEdo25juQ9tG24ddWwwu9bpk006JiIjiArR0x4aAgACpXbu2Sih8+vSpGoGAOQcwlNDPz086duyoWh8wUgEn+x49eqgTOkYkQM2aNVUA0Lp1axk1apTKJ/j+++/V3AdaMNKlSxeZNGmS9OvXTzp06CBbtmyRxYsXq5EKdg8MsvhnifCxkNAQiefpFNdiIiIiirXkQ1ugpt+mTRu5efOmCgQw2RGCgg8//FA9Pm7cOPH09FQTG6EVAaMJpkyZoj/fy8tLVq9eLV27dlUBQ6JEiVSOwrBhw/RtsmXLpoIAzImALgrMnTBz5ky1rxidxwC2X94ug7YNkn3/7pMS6UvIsCrDZN7xefJ58c+lfKby4micx4DcAecxIHcQ0/MY1P/lYJSfu7JTSXFFNh9yXA+h5tyaqqUAEFdk9sssgUeM0y46Q2BAREQUGa58zYOosnmCo0FbB8lbw1tpmK+hvi5XilxqfgNcYZGIiCiuiK1rJbh0YHDwxkHJ5p9NljVdZrY+XeJ0cv3JdXuWjYiIiJy9KwGJhuGHJYYaQuX60+vi5ellz7IRERHFKBeu+Mdei0GxdMXUVRU7reqk7t99cVdaLGshd5/flRLpSkS9JERERG5wrQSXCwxw4SSYdWSWOjAXH16UpaeWqtt9y/eNiTISERHF6csuu3RggKspzm80X41E0GY8xO3fG/6uHiMiIoormHxoKUojRJsVbKaWey/uqfspE6aMym6IiIgcynVP77EcGLwKeSW/Hf1Njt8+ru4XTlNYWhdpLb7xfKNRFCIiIopzgcGJOyekzrw6ahSCqWE7hsnaz9ZKoTSF7Fk+IiKiGOPKSYSxlmPQeXVn+ffJvyq3wN/XXy24jTkMuq7pGuWCEBEROeJaCVFdXJXNLQaHbh4Sby9vWdVildTMUVOt2/TPJvl4wccSdDMoJspIREQUI9hiYIfAIKt/VjXJkRYUwIc5PlTTImOiIyIioriCcYEduhJG1Rgllx5ekr8u/qWvw22s+1+N/9m6OyIiIofhBEdRbDHIPj672X1cRKnW77UkmW8ydf/hq4eqe6HX+l5SL3e9yOySiIiI4mpggCmQrXnw8oF+OzgkOMLtiIiInJErJxHGaGAwuPLgKL8AERGRs3LlLoGYDQyqMDAgIiLXw7DATjMfApINbzy9ofINTFXKUimquyQiIopVrnzNg1gLDG49uyUNFjaQAzcOWDzmIR4SMigkyoUhIiKKTYwL7BAYDPhrgOy/vt/6gzzARERE7jWPwaaLm8TTw1N++fgXdT9/qvwysvpISZ4guSxqsigmykhERBQjOI+BHQKDu8/vSp6UeaRj8Y7qfmLvxNK/Yn9JnSi1LDyx0NbdEREROQzO71FdXJXNXQmJvBOpKZG12xcfXpTbz27L3Rd3ZcM/G2KijERERDGCyYd2aDHIkCSDXHt8Td3OnSK33H95X9KPTa8mO8KVFomIiOIKthjYITDAlMdZ/LPIyTsnpXeZ3modLruMpVeZXrbujoiIyGGYY2DJw4AzejTsvLpTjVIonKaw1MheQ5zB0WtPHV0EohiXJ10SRxeBKMb5Rnm2ncjptuJ0lJ87uWE+cUXRPuQVM1dUizMp+0mAo4tAFOMeHpjk6CIQuV+zuRuIVGBQbU61SO0MTSub22yObpmIiIhihSt3CcRoYLDt8jZ18N7X68ADTEREcQmvrhjFwKBNkTY86RMRkcthYBDFwCCwQWBkNiMiIopTWOm1xLwLIiIi0sXwQBAiIiLnxa4ESwwMiIjIbbEnwRIDAyIiclu8VoIlBgZEROS2mGhnp8AgOCRY5h+fL3v/3StpE6dVl2C+/OiyFExdUJInSB6VXRIREcU6NhjYITC4/+K+VJlTRU7dPaXul8lQRspnKi915teRgZUGypAqQ2zdJREREcXVVpR+m/qpKyv6xvPVZ0LExZMSxk8o6y6si4kyEhERxViOQVQXV2VzYLD6/Grx8/WTf3r+o6/z8vSSLH5Z5OLDi/YuHxERUYzB+T2qi6uyuSvh0atHkj9VfpVbYOqt4a08DebljomIKO7gPAZ2CAzQMoCuhJ1Xd+rr/jz7p5y9d1Zyp8ht6+6IiIgcxpW7BGKtK6FFwRYSEhoilQMrqzmm913fJw0WNVC38RgREVFcwa4EOwQG31X6Tmrnqq0SD02XmjlqSsAHAbbujoiIiOJyV4K3l7es+WyN7LiyQ/Zf36/WlUpfSipnrRwT5SMiIooxzDGw48yHlbJUUgsREVFc5SGMDKIdGFSbUy3Cx5BnsLnNZlt3SURE5BBsMbBDYLDt8jYVAGiTG4F2H/8SERHFFQwM7BAYtCnSxiwAePzqsQoWnr5+Ks0LNrd1d0RERA7DCq0dAoPABoEW6+69uCeFpxaWjEky2ro7IiIicrUrTqZMmFJyJs8pgUctgwYiIiJn7kqI6uKqbA4Mhm0fZrYM3jpYWixroWZCfP32dcyUkoiIKA5PcDRy5EgpVaqUJEmSRFKnTi0NGjSQs2fPmm3z6tUr6datm6RIkUISJ04sjRs3ltu3b5ttc/XqValbt64kTJhQ7adv374SEhJits22bdukePHi4uPjIzlz5pTAwMCY7UoYsm1IhH0y9XLXs3V3RERELj8l8vbt29VJH8EBTuTffvut1KxZU06dOiWJEiVS23z11VeyZs0aWbJkifj5+Un37t2lUaNGsmvXLvX427dvVVCQNm1a2b17t9y8eVPatGkj8ePHlxEjRqhtLl26pLbp0qWLzJs3TzZv3iyff/65pEuXTmrVqhWpsnoYTIcXRELWn7OaBQYYA5o6UWqpnq26mvkwsXdicbQExbo7ughEMe7hgUmOLgJRjPON8mw7kTNh56UoP7dnxWxRfu7du3dVjR8BQ6VKleTx48eSKlUqmT9/vjRp0kRtc+bMGcmXL5/s2bNHypYtK+vWrZN69erJjRs3JE2aNGqbadOmSf/+/dX+vL291W0EFydOnNBfq3nz5vLo0SNZv359pMpm8yG/3PuyrU8hIiJyStFpMAgODlaLKTTfY3kfBAKQPHly9W9QUJC8efNGatSooW+TN29eyZw5sx4Y4N9ChQrpQQGgFaBr165y8uRJKVasmNrGdB/aNr17946ZHIM3b99Ijgk5pPj04mbzGBAREbmbkSNHqiZ/0wXr3ic0NFSdqCtUqCAFCxZU627duqVq/P7+/mbbIgjAY9o2pkGB9rj22Lu2efLkibx8+dL+LQbxveLL0+CnqruAYz+JiCiu84zGlMgBAQHSp08fs3WRaS1ArgGa+nfu3CkuMSqhXdF2cvbeWTlxJ6z/goiIyN1GJfj4+EjSpEnNlvcFBkgoXL16tWzdulUyZgyb+wcJha9fv1a5AKYwKgGPaduEH6Wg3X/fNihbggQJYibH4NYzY3NFqV9KSdWsVSVN4jT6RSjw76/1f7V1l0RERA4RW/MRGAwG6dGjh6xYsUINJ8yWzTxxsUSJEmp0AUYRYJgiYDgjhieWK1dO3ce/w4cPlzt37qjERdi0aZM66efPn1/fZu3atWb7xjbaPmJkVILnUE+zayVoXQratRLeDnorjsZRCeQOOCqB3EFMj0qYsfdKlJ/7Rdkskd72yy+/VCMOVq5cKXny5NHXIy9Bq8kjiRAndcw7gJM9AgnA0ERtuGLRokUlffr0MmrUKJVP0Lp1azUc0XS4IvIW0F3RoUMH2bJli/Ts2VONVIjscMVIH3JMZpQxaUapnLVypA8EERGRM4utdLmpU6eqf6tUqWK2fvbs2dKuXTt1e9y4ceLp6alaDDDaASfyKVOm6Nt6eXmpbggEEGgBwPwHbdu2lWHDhunboCUCQQDmRBg/frzqrpg5c2akgwKbWgzQUlA2Y1nZ3dEYuTgzthiQO2CLAbmDmG4x+GVf1FsMOpWJfItBXBLDh5yIiMh5xdbMhy4bGAS/DZarj6++c5vMfpmjWyYiIqJYwbggmoHBkVtHJNv4iKeAxKiEkEHmF3MgIiJy6UsMu3tXwjtTEhh5ERFRHMLJ+qIZGGRIkkE6Futoy1OIiIicFsOCaAYGGK44uMpgW55CREREcQhHJRARkdviqIRoBAYYbZAuSbrIbk5EROT0GBZEIzC43PtyZDclIiKKE9hgYIldCURE5LY4KsESh3ASERGRji0GRETktlg7tsTAgIiI3Ba7EiwxMCAiIrfFsMASAwMiInJbbDGwxMCAiIjcFnMMLPGYEBERkY4tBkRE5LbYlWCJgQEREbkthgWWGBgQEZHbYoOBJQYGRETktjzZZuCcyYePHj2SmTNnSkBAgDx48ECtO3TokFy/ft3RRSMiIhdvMYjq4qoc3mJw7NgxqVGjhvj5+cnly5elU6dOkjx5clm+fLlcvXpVfvvtN0cXkYiIyG04vMWgT58+0q5dOzl//rz4+vrq6+vUqSM7duxwaNmIiMi1eUTjP1fl8BaDAwcOyPTp0y3WZ8iQQW7duuWQMhERkXtw5S6BOBsY+Pj4yJMnTyzWnzt3TlKlSuWQMhERkXtg8qETdiV88sknMmzYMHnz5o0+2QRyC/r37y+NGzd2dPGIiMiFMfnQCQODMWPGyLNnzyR16tTy8uVLqVy5suTMmVOSJEkiw4cPd3TxiIjIhTEwcMKuBIxG2LRpk+zatUuOHj2qgoTixYurkQoGg8HRxSMiInIrDg8MRo8eLX379pUKFSqoRfP27Vtp1aqVLFiwwKHlIyIi1+XKowvibFcCAoNff/3VbB2CgubNm8uRI0ccVi4iInJ9nh5RX1yVw1sM1qxZIzVr1lRdCk2aNJGQkBBp2rSpnDlzRrZu3ero4hERkQtji4ETBgalSpWSZcuWSYMGDcTb21u1Hly4cEEFBWnSpHF08YiIyIW5chJhnA0MoFq1amrqYwxPzJcvn2zfvl1Spkzp6GIREZGLY4uBkwQGjRo1sroeExr5+/vLF198oa/DNROIiIjIhZMPkU9gbalVq5bkyJHDbB3Fvm/afygvD0+S0d+ETTCVLWNKWTSmk1zdMlJu/z1afv9fB0mdPInV53vHjyd7Fw5Q+yicO4O+3sc7nswY2koOLP5Wnh4YL4vHdoqV90NkDwvnz5PaH1aTUsUKScvmn8rxY8ccXSSyAyYfOkmLwezZsx3xshQJJfJnlo6NK8ixc//q6xL6esvqKd3k+LnrUvuLiWrd4C/ryrLxnaVSmzEW802M6F1fbt59LEXyZDRb7+XpKS+D38iUBdukQfWisfSOiKJv/bq18n+jRsr3g4dKoUJFZN7cOdK1c0dZuXq9pEiRwtHFo2hgV4ITDlfU3L17V3bu3KkW3KbYlyiBt8we0U6+/GGBPHryUl9frmh2yZI+hXQa/LucvHBDLZ8PmivF82eWKqVzm+2jZoX8Ur1sPgkYt8Ji/y9evZZeIxbJ7BW75fZ9y+tjEDmruXNmS6MmTaVBw8aSI2dOFSDgarB/LF/m6KJRNHHmQycMDJ4/fy4dOnSQdOnSSaVKldSSPn166dixo7x48cLRxXMrPwc0k/V/n5Ct+86arUcXAFoFgl+H6OteBYdIaKhByhfNoa9D18KUgS2k48Df5MXL17FadqKY8ub1azl96qSULVdeX+fp6Slly5aXY0cPO7RsFH0e0VhclcMDgz59+qhRCH/++ac8evRILStXrlTrvv76a0cXz218WquEFM2bSQZOXGXx2P7jl+X5y9cyvFd9SeAbX3Ut/NSnocSL5yVpUybVt5sxrJX8snSnHDp1NZZLTxRzHj56qCZdC99lgPv37t1zWLnIPjw9PKK8uCqHD1fEHAZLly6VKlWq6Ovq1KkjCRIkUBMdTZ069Z3PDw4OVospQ+hb8fD0irEyu5qMafxldN/GUq/rJLNWAc29h8+kZb9fZcK3zeTLFpVVS8Hi9UEqAAj9L78A65Mk9JXRszY64B0QEZHLBAboLrA2kRGuthiZroSRI0fK0KFDzdZ5pSkl8dOVtms5XVmxfJklTYqksmd+f30dWgMqFs8hXZpVEr8yvWXz3jNS4JOhksI/kYSEhMrjZy/l0qYRcnlDkNq+SqncUqZwNnm872ezfe+a108WrjsonQbNjfX3RWQPyfyTiZeXl9y/f99sPe5zvpW4z3Xr/XE4MChXrpwMHjxYTXCEZB7A5Zdxssdj7xMQEKC6I0yl/iDsBEfvt3X/WSnRxPwS1xhWePbSbRkTuEm1EGjuP3qu/q1cKrekTp5YVm8/ru5/PWqpDJm8Wt8uXSo/WT21u7QeMFsOHL8ca++FyN7ie3tLvvwFZN/ePVKteg21LjQ0VPbt2yPNW7RydPEouhgZOF9gMH78eDV/QcaMGaVIkSJqHS6/jCBhw4YN732+j4+PWkyxG8E2z14Ey6l/bpqtQ07Bg8fP9fWtPykrZy/dkrsPn6mWgf/r20Qmztsq56/cUY9fu/XQYp9w8dpduX7nkb4+b/a04h3PS5L5JZIkCX30eQ6Onbse4++TKKpat20vA7/tLwUKFJSChQrL73PnqApMg4bWJ2ujuIPDFZ0wMChYsKCcP39e5s2bpy6cBC1atJCWLVuqPANyDrmzppZhPT6R5H4J5cqNBzLq1w0y4fctNu/nj4ld1dBHzb5FAerfBMW627W8RPb0Ue068vDBA5kyaYLcu3dX8uTNJ1Omz5QU7EqI81w4hzDKPAzhZ6dxATzJkDt4eGCSo4tAFON8Y7j6euDi4yg/t1R215yd1+EtBnDjxg01sdGdO3dU352pnj17OqxcRERE7sbhgUFgYKB07txZXXIZ44I9TNp1cJuBARERxRh2JThfYDBw4EAZNGiQGl2A2cSIiIhiC5MPnTAwwFwFzZs3Z1BARESxjsmHlhx+NsY1EZYsWeLoYhARkRvitRKcMDDAzIW4LgKmRO7Ro4earMh0ISIiiuuRwY4dO+Tjjz9WFwlE/twff/xh9jgGCKJbHRcUxFD9GjVqqKH8ph48eKCG8idNmlT8/f1VxfrZs2dm2xw7dkw++OADNRdQpkyZZNSoUXEzMMBERrdv35bjx4/L4cOH9eXIkSOOLh4REZFdriSMSfwmT55s9XGcwCdMmCDTpk2Tffv2SaJEidTkf69evdK3QVBw8uRJ2bRpk6xevVoFG1988YX++JMnT6RmzZqSJUsWCQoKktGjR8uQIUNkxowZcWseg2TJksm4ceOkXbt2dtsn5zEgd8B5DMgdxPQ8BoevPI3yc4tlSRKl56HFYMWKFdKgQQN1H6dhtCTgisLffPONWvf48WN1HSGM3EMe3unTpyV//vxy4MABKVmypNpm/fr16qKD//77r3o+Ljr43Xffya1bt9RIPxgwYIBqndAmEIwTLQaYzrhChQqOLgYREblp8mFUl+DgYFVLN13CX+03Mi5duqRO5ug+0Pj5+UmZMmVkz5496j7+RfeBFhQAtkfiPloYtG0qVaqkBwWAVoezZ8/Kw4fm09Y7dWDQq1cvmThxoqOLQUREbig6KQYjR45UJ3DTBetshaAAwl9pGPe1x/AvrjpsKl68eJI8eXKzbaztw/Q14sRwxf3798uWLVtUf0mBAgUkfvz4Zo8vX77cYWUjIiIXF43hBQFWru4b/qJ+cZHDAwM0jTRqxCuUERFR3JrgyMfK1X2jIm3atOpfJOFjVIIG94sWLapvg8sGmAoJCVEjFbTn4188x5R2X9smTgQGU6ZMUddHQAYmXL58WSVK5MuXT/WNEBERubJs2bKpE/fmzZv1QAD5Csgd6Nq1q7pfrlw5efTokRptUKJECbUOre04fyIXQdsGyYdv3rzRW98xgiFPnjwq0T/O5BjUr19f5s6dq27jTZctW1bGjBmjsjWRYUlEROSMyYe2wHwDGIKvDcNHwiFuX716VY1S6N27t/z444+yatUqNXS/TZs2aqSBNnIBleWPPvpIOnXqpLrgd+3aJd27d1cjFrAdfPbZZyrxEPMbYFjjokWLZPz48TbPCeTwwODQoUNqMgZYunSpSpS4cuWK/Pbbb2pMJxERUVyf+fDgwYNSrFgxtQBO1riNSY2gX79+apI/zEtQqlQpFUhgOCImKtLMmzdP8ubNK9WrV1fDFCtWrGg2RwGSHzdu3KiCDrQqYPgj9m8610GcmMcgYcKEanxl5syZpWnTpioBcfDgwXLt2jXV/IFrKdiK8xiQO+A8BuQOYnoegxPXzWcOtEXBDInFFTm8xSBnzpwqpwCBAGZAxKxNgCQLTPtIREQUk8mHUf3PVTk8MEAzB2Z6ypo1q0qgQPIEoDlEa3IhIiKKyzkGcYnDRyU0adJE9ZPcvHlTzSOtQR9Kw4YNHVo2IiIid+PwwAAwTCP8GMvSpUs7rDxEROQeXLjiH7cDAyIiIodgZGCBgQEREbktV04ijCoGBkRE5LZcOYkwqhgYEBGR22Jc4ITDFYmIiMh5sMWAiIjcF5sMLDAwICIit8XkQ0sMDIiIyG0x+dASAwMiInJbjAssMTAgIiL3xcjAAkclEBERkY4tBkRE5LaYfGiJgQEREbktJh9aYmBARERui3GBJQYGRETkvhgZWGBgQEREbos5BpY4KoGIiIh0bDEgIiK3xeRDSwwMiIjIbTEusMTAgIiI3BZbDCwxMCAiIjfGyCA8Jh8SERGRji0GRETkttiVYImBARERuS3GBZYYGBARkdtii4ElBgZEROS2OPOhJQYGRETkvhgXWOCoBCIiItKxxYCIiNwWGwwsMTAgIiK3xeRDSwwMiIjIbTH50BIDAyIicl+MCywwMCAiIrfFuMASRyUQERGRji0GRETktph8aImBARERuS0mH1piYEBERG6LLQaWmGNAREREOrYYEBGR22KLgSW2GBAREZGOLQZEROS2mHxoiYEBERG5LXYlWGJgQEREbotxgSUGBkRE5L4YGVhgYEBERG6LOQaWOCqBiIiIdGwxICIit8XkQ0tsMSAiIrflEY0lKiZPnixZs2YVX19fKVOmjOzfv1+cDQMDIiJyX7EYGSxatEj69OkjgwcPlkOHDkmRIkWkVq1acufOHXEmDAyIiMitkw+j+p+txo4dK506dZL27dtL/vz5Zdq0aZIwYUKZNWuWOBMGBkRE5NY5BlFdbPH69WsJCgqSGjVq6Os8PT3V/T179ogzYfIhERFRFAQHB6vFlI+Pj1rCu3fvnrx9+1bSpEljth73z5w5I87EJQODl4cnOboIbgV/GCNHjpSAgACrfxBEroDfc9fkG42z4JAfR8rQoUPN1iF/YMiQIRKXeRgMBoOjC0Fx25MnT8TPz08eP34sSZMmdXRxiGIEv+cUnRYDdCUgn2Dp0qXSoEEDfX3btm3l0aNHsnLlSnEWzDEgIiKKAh8fHxUkmi4RtSZ5e3tLiRIlZPPmzfq60NBQdb9cuXLiTFyyK4GIiMjZ9OnTR7UQlCxZUkqXLi0///yzPH/+XI1ScCYMDIiIiGJBs2bN5O7duzJo0CC5deuWFC1aVNavX2+RkOhoDAwo2tB0hoQbJmSRK+P3nOyhe/fuanFmTD4kIiIiHZMPiYiISMfAgIiIiHQMDIiIiEjHwICcTmBgoPj7+zu6GEQ2adeundnENURxFQMDIiIi0jEwICIiIh0DAxdQpUoV6dmzp/Tr10+SJ08uadOmNbuIx9WrV6V+/fqSOHFiNWVn06ZN5fbt25Ha99GjR6Vq1aqSJEkS9VxM6Xnw4EGzJv/Vq1dLnjx51DzgTZo0kRcvXsicOXMka9askixZMlU2XFVM8/DhQ2nTpo16DM+pXbu2nD9/PsIyYEIQzBTWsGFDNS85phHFxWyyZcsmCRIkkCJFiqj5x4lM4TtRqFAh9R1JkSKFurwtZpnTmvxHjBihJpbBd3jYsGESEhIiffv2VX9DGTNmlNmzZ5vt7/jx41KtWjV9f1988YU8e/Yswtc/cOCApEqVSv73v/+p+5gP//PPP1fr8LeEfeHvi8jZMDBwETgRJ0qUSPbt2yejRo1SP3SbNm1SJ1EEBQ8ePJDt27erdRcvXlQzcEVGy5Yt1Y8kfuRwLfEBAwZI/Pjx9ccRBEyYMEEWLlyoZvDatm2bOoGvXbtWLXPnzpXp06ebnbjxw4zgYtWqVeo65JhKo06dOvLmzRuL17927Zp88MEHUrBgQbUPTC6DoOC3336TadOmycmTJ+Wrr76SVq1aqfdHBDdv3pQWLVpIhw4d5PTp0+p72ahRI/Vdgy1btsiNGzdkx44dMnbsWDVxUb169VSwir+hLl26SOfOneXff/9V2yOgqFWrlnocfwtLliyRv/76K8KJarD/Dz/8UIYPHy79+/dX6z799FO5c+eOrFu3Tv0tFS9eXKpXr67+NomcCiY4oritcuXKhooVK5qtK1WqlKF///6GjRs3Gry8vAxXr17VHzt58iR+HQ379+9/776TJEliCAwMtPrY7Nmz1X4uXLigr+vcubMhYcKEhqdPn+rratWqpdbDuXPn1HN27dqlP37v3j1DggQJDIsXL9b36+fnZzhz5owhU6ZMhp49expCQ0PVY69evVL73717t1lZOnbsaGjRosV73w+5h6CgIPU9u3z5ssVjbdu2NWTJksXw9u1bfV2ePHkMH3zwgX4/JCTEkChRIsOCBQvU/RkzZhiSJUtmePbsmb7NmjVrDJ6enoZbt27p+61fv75h+fLlhsSJExsWLlyob/v3338bkiZNqr6/pnLkyGGYPn26nd89UfRwSmQXUbhwYbP76dKlU7UT1JYyZcqkFk3+/PlV8ykeK1Wq1Hsv+oHmT9T80RSLWk+OHDn0x9EVYHofTbPoQkC3hek6lAXwmvHixZMyZcroj6NZFl0ReEzz8uVL1VLw2WefqQuNaC5cuKBaKVAbC39J02LFikX6eJFrQ/cSauPoSkBNv2bNmqqbCzV+KFCggHh6epp9R9EqpfHy8lLfS9PvLfaJVjlNhQoVVIvc2bNn9bnu0dqArrXwl9ZFlwG6HbBPU/ie//PPPzF4JIhsx64EF2HavA8eHh7qRyu6kKuA5vq6deuq5lEEFStWrHjn69qjLOgyQCCCH9nr16/r67U+3TVr1siRI0f05dSpU8wzILMTO7rN0GyP7+zEiRNV8Hnp0qUY/d4iSM6bN6/MmjXLrGsM31sE66bfWSwIKpDXQORMGBi4uHz58ql+eiwanESRCIUfzMjInTu36sffuHGj6qcNn5Rla3mQ5IWaleb+/fvqB9K0PKjNoZUCyY5IfkR/MGAbBA1IqMyZM6fZYtoqQoQTO2r1Q4cOlcOHD4u3t7dZUGvr9xa1fuQaaHbt2qW+pwg4NClTplQBNFq2kOSrBQfIJ8DV9NBaFv57i+cQORMGBi4OtW40pyKJ8NChQ7J//341IqBy5coq0/9d0MyJ5Cokbl25ckX9ECLxCj+SUZUrVy6VDNmpUyfZuXOn+rFF4mCGDBnU+vC1vnnz5qkmXGRw44cVoyO++eYbFagg4RLNsHhfqBHiPhEg8MSoAyS5Iohcvny5Gt0S1e8u/n58fX2lbdu2cuLECdm6dav06NFDWrdubXHJ3NSpU6vg4MyZMyoBEoEw/g7LlSunuhcQYF++fFl2794t3333nT7Kh8hZMDBwg1rTypUrVd9qpUqV1A9U9uzZZdGiRe99Lk7MqM0jkECrAWpAGFqIGlh0oMUBLQHIAsePJTLFMYIhfFMuoIa1YMEC1SeM4AB9vj/88IMMHDhQjU7AD/1HH32kuhYwfJEIMBwQIw4w2gXf3e+//17GjBmjvr9RgVyaDRs2qBEEyMtBvgJyGCZNmmR1ewwZRnCAIY4IKtAlge84/gbbt2+vytS8eXMVcIcPLIgcjZddJiIiIh1bDIiIiEjHwMDNoYkeQwutLejfJyIi98KuBDeHPk5rMw4C+j6R7EdERO6DgQERERHp2JVAREREOgYGREREpGNgQERERDoGBkTO4PJlzEZlXAIDjevwr7bOHrJmNe5ryBD77I+IXBIDAyJTVaqEnYyxeHmJZMgg8vHHIrt3x25ZUqUSwVUoTa5E+V7btoWVHcGGKVx9EvvKmNHuRSUi18HLLhNZ4+1tPJEGB4ucOCGyerXI+vW4co5I6dKW279+bXyOPdWta1zsJYoXECIi98IWAyJr0qUT2btX5PBhkT/+MK4LCRGZP1+kXTtjjRytC6NGGWvgvr5hz/39d5FSpTDBvgjmgfjoI5EjR8z3v3WrSMGCxudVrIhLXlqWIaKuBFznonx5kcSJja9RpIjIpk3GLoKqVcO2w7Uj8FyUN6KuhKtXRdq0weT+uBax8b18+aXIgwdh25i+38mTjfvB+6pXT+TWrbDtcLyqVxdJkcL4vrBdgwYi//wTpY+AiByDgQFRVO3ZIxIQgCv2iCRPblyHQKF1axFcMQ+XgcZjGzYYT/6nTxu3wckUXRMnTxq7Ku7fF2naNHKvOWaMSPPmxtfGc3PkELlwwbgvnNRNrx5YtKix6wDbWHPnjki5ciJz54o8eoTra4vcvi0ydapI5coir16Zb4+ulG++MbaMPHsmsmaNyNdfGx8LDTUGClu2GAMMlOPFC5GVK0VMLvlNRM6PgQGRNTdvipQta+xOQK0X4sUTadHCvPsAXQyo7eOEihOhduVJ/Hv2LKaWFMHlrZ8/FxkxwvgYat24jxP7/v3GgOGrr95fJux/8GDjbZzQccI9ftz42jgpf/65yJQp5l0HqMUPHGh9fyjHjRsinp7Gkz6CiyVLjI+h+2TBAvPt37417u/cOZGGDY3rNm82/vvwoTHAgaAgY0sLAg/sJ3/+9783InIaDAyIrMFJf98+kWPHjEmA6Ovfvt08ETBPHhHtMr44yePEipM34ASO5nfUntF6ADipArbTnl+ggPF2ZFoM8DwEFNCtm7E1AtClkDOn7e/xwIGwchQvbryNIAjdE6CVW1OokLHbArSTPYISQPcBghVAWbAtgigECClT2l42InIYJh8SWZMli2VWf3hp0kT8GJrStRO3BifPuMzfP+w2Wk/CQ+sBcjCQoIlWlKVLRRYuNLa+9O0bq0UloqhjiwFRVIVPCkTtP0EC420kHCIPAK0EWNBv/913YdsBuhq0vAOcRN8Hz0uUyHgb+3v61HgbrQjIMwCttq+tfxckSGrlOHTIeBuJllqrB7pAIguXXEF3BBIVZ80yvueOHY2P7dgR+f0QkcMxMCCyF5yUtf78ceOMyYBIAERLAZrqN240Poasf2yLPnucfNG6gKTFyOxfy2FArRzJjYULG1sukOsASDRE9wXUqGHMk4go6EB3BEZfIHEQoxwwSuLTT42P4bZpPsX74L3g9ZIlMwYw6Er45RfjYygjEcUZDAyI7AmjFObMMdbGkZCHmnzq1CJduog0amTcBifjVauM/fQYAomhf/PmRW7/GAWApED05+Ny2dh/9uxhff4IQiZMMAYN6P9HnoTpkEJTKBdq9hhFgW4CtBwgyEBZkU9hOgTzfZBjgedhiOT168ZyYbgiRjEMGhT5/RCRw/Gyy0RERKRjiwERERHpGBgQERGRjoEBERER6RgYEBERkY6BAREREekYGBAREZGOgQERERHpGBgQERGRjoEBERER6RgYEBERkY6BAREREekYGBAREZFo/h+2fLvP3MIzrwAAAABJRU5ErkJggg==",
2642
+ "text/plain": [
2643
+ "<Figure size 600x400 with 2 Axes>"
2644
+ ]
2645
+ },
2646
+ "metadata": {},
2647
+ "output_type": "display_data"
2648
+ },
2649
+ {
2650
+ "name": "stderr",
2651
+ "output_type": "stream",
2652
+ "text": [
2653
+ "INFO:root:📓 logging results\n"
2654
+ ]
2655
+ }
2656
+ ],
2657
  "source": [
2658
+ "model_trained, history = eval_pretrained_model(\n",
2659
+ " model=effnetB0,\n",
2660
+ " train_ds=train_ds,\n",
2661
+ " val_ds=val_ds,\n",
2662
+ " test_ds=test_ds,\n",
2663
+ " LOG_DIR=LOG_PATH,\n",
2664
+ " CHKPT_DIR=CHKPT_PATH,\n",
2665
+ " model_name=model_name,\n",
2666
+ " input_size=input_size,\n",
2667
+ " batch_size=batch_size,\n",
2668
+ " n_epochs=n_epochs,\n",
2669
+ " optimizer=optimizer,\n",
2670
+ " loss=loss,\n",
2671
+ " metrics=metrics,\n",
2672
+ ")"
2673
  ]
2674
  },
2675
  {
2676
  "cell_type": "code",
2677
+ "execution_count": 33,
2678
  "metadata": {},
2679
  "outputs": [
2680
  {
 
2684
  "traceback": [
2685
  "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
2686
  "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
2687
+ "Cell \u001b[0;32mIn[33], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mstop\u001b[49m\n",
2688
  "\u001b[0;31mNameError\u001b[0m: name 'stop' is not defined"
2689
  ]
2690
  }
 
2737
  "cell_type": "markdown",
2738
  "metadata": {},
2739
  "source": [
2740
+ "## <a id='toc7_1_'></a>[Random Baseline](#toc0_)"
 
 
 
 
 
 
 
2741
  ]
2742
  },
2743
  {
README.md CHANGED
@@ -16,11 +16,12 @@ datasets:
16
 
17
  # 🚧 TODO
18
 
 
 
19
  - voir répartition partenaires, caméras, temporalité, annotations
20
- - utiliser **classification binaire**
21
  - métriques : **matrice de confusion** complète
22
  - décrire voir **erreurs de types et conséquences**
23
- - tester plusieurs pré-entraînements
24
  - modèles :
25
  - [ ] EfficientNet
26
  - [ ] EfficientDet
 
16
 
17
  # 🚧 TODO
18
 
19
+ - faire data augmentation en amont pour **no_smoke oversampling**
20
+ - réduire **n_epochs**
21
  - voir répartition partenaires, caméras, temporalité, annotations
 
22
  - métriques : **matrice de confusion** complète
23
  - décrire voir **erreurs de types et conséquences**
24
+ - tester plusieurs pré-entraînements (est-ce que charger un modèle pré-entraîné ImageNet aide vraiment ?)
25
  - modèles :
26
  - [ ] EfficientNet
27
  - [ ] EfficientDet
config.yaml CHANGED
@@ -7,13 +7,20 @@ rdm_seed: 42
7
  data_root_dir: "data"
8
  raw_data_dir: "raw"
9
  clr_hf_cache_script_abs_path: './src/clear_hf_cache.sh'
10
- data_format: "keras" # "ultralytics" or "keras"
11
  db_info_uri: "data_info.csv"
12
 
13
- # log:
14
- # log_dir: "logs"
15
-
16
- # app_data:
17
- # local_path: "app_data"
18
- # model: "EfficientNetB0_app.keras"
19
- # onnx: "EfficientNetB0_app.onnx"
 
 
 
 
 
 
 
 
 
7
  data_root_dir: "data"
8
  raw_data_dir: "raw"
9
  clr_hf_cache_script_abs_path: './src/clear_hf_cache.sh'
 
10
  db_info_uri: "data_info.csv"
11
 
12
+ # Models
13
+ models_common:
14
+ models_root_dir: "models"
15
+ classes: ["no_smoke", "smoke"]
16
+ log_dir: "runs"
17
+ chkpts_dir : "chkpts"
18
+ # model_EffNet:
19
+ # model_name: "EfficientNetB0"
20
+ # input_size: (224, 224)
21
+ # batch_size: 48
22
+ # n_epochs: 100
23
+ # loss: "binary_crossentropy"
24
+ # metrics: ["accuracy"]
25
+ # optimizer: AdamW(learning_rate=0.0002, weight_decay=0.05)
26
+ # trained_model_uri: "EfficientNetB0_app.keras"
src/load_data.py CHANGED
@@ -22,7 +22,6 @@ RDM_SEED = cfg["rdm_seed"]
22
  OUTPUT_DIR = cfg["data_root_dir"]
23
  RAW_DATA_DIR = os.path.join(OUTPUT_DIR, cfg["raw_data_dir"])
24
  CLR_CACHE_SCRIPT = cfg["clr_hf_cache_script_abs_path"]
25
- DATA_FORMAT = cfg["data_format"]
26
  DB_INFO_URI = os.path.join(OUTPUT_DIR, cfg["db_info_uri"])
27
 
28
 
@@ -172,9 +171,13 @@ def format_data_keras(df):
172
  if not os.path.exists(OUTPUT_DIR):
173
  logging.warning(f"{OUTPUT_DIR} doesn't exist: (re)load data first")
174
  return df
175
-
176
  # Create Keras parent folder
177
  keras_dir = os.path.join(OUTPUT_DIR, "keras")
 
 
 
 
178
  os.makedirs(keras_dir, exist_ok=True)
179
  # Create splits folders
180
  for split in df.split.unique():
@@ -195,6 +198,17 @@ def format_data_keras(df):
195
  df.drop(columns="uri", inplace=True)
196
  df.rename(columns={"uri_dest": "uri"}, inplace=True)
197
 
 
 
 
 
 
 
 
 
 
 
 
198
  return df
199
 
200
 
 
22
  OUTPUT_DIR = cfg["data_root_dir"]
23
  RAW_DATA_DIR = os.path.join(OUTPUT_DIR, cfg["raw_data_dir"])
24
  CLR_CACHE_SCRIPT = cfg["clr_hf_cache_script_abs_path"]
 
25
  DB_INFO_URI = os.path.join(OUTPUT_DIR, cfg["db_info_uri"])
26
 
27
 
 
171
  if not os.path.exists(OUTPUT_DIR):
172
  logging.warning(f"{OUTPUT_DIR} doesn't exist: (re)load data first")
173
  return df
174
+
175
  # Create Keras parent folder
176
  keras_dir = os.path.join(OUTPUT_DIR, "keras")
177
+ # Check if data already exists
178
+ if os.path.exists(keras_dir) and len(os.listdir("./data/keras")) > 0:
179
+ logging.info(f"{keras_dir} already exists: data already formatted")
180
+ return df
181
  os.makedirs(keras_dir, exist_ok=True)
182
  # Create splits folders
183
  for split in df.split.unique():
 
198
  df.drop(columns="uri", inplace=True)
199
  df.rename(columns={"uri_dest": "uri"}, inplace=True)
200
 
201
+ return df
202
+
203
+
204
+ def oversample_class(df):
205
+ """Oversample an under-represented class"""
206
+ count_df = df.groupby(["split", "label"]).size().reset_index(name="count")
207
+ count_df = count_df.loc[count_df["split"] != "val"]
208
+
209
+
210
+
211
+
212
  return df
213
 
214