Chadsglm commited on
Commit
f326e97
Β·
verified Β·
1 Parent(s): ee605dc

Upload Skin Cancer Classification.ipynb

Browse files
Files changed (1) hide show
  1. Skin Cancer Classification.ipynb +581 -0
Skin Cancer Classification.ipynb ADDED
@@ -0,0 +1,581 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "id": "c6c2112d",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "import cv2\n",
11
+ "import os\n",
12
+ "\n",
13
+ "import pandas as pd\n",
14
+ "import matplotlib.pyplot as plt\n",
15
+ "import numpy as np"
16
+ ]
17
+ },
18
+ {
19
+ "cell_type": "code",
20
+ "execution_count": 2,
21
+ "id": "677fd6f1",
22
+ "metadata": {},
23
+ "outputs": [],
24
+ "source": [
25
+ "labels=['Cancer', 'Non Cancer']\n",
26
+ "img_path='Skin Data/'"
27
+ ]
28
+ },
29
+ {
30
+ "cell_type": "code",
31
+ "execution_count": 3,
32
+ "id": "1ae9f20f",
33
+ "metadata": {},
34
+ "outputs": [],
35
+ "source": [
36
+ "img_list=[]\n",
37
+ "label_list=[]\n",
38
+ "\n",
39
+ "for label in labels:\n",
40
+ " for img_file in os.listdir(img_path+label):\n",
41
+ " img_list.append(img_path+label+'/'+img_file)\n",
42
+ " label_list.append(label)"
43
+ ]
44
+ },
45
+ {
46
+ "cell_type": "code",
47
+ "execution_count": 4,
48
+ "id": "6736fc5f",
49
+ "metadata": {},
50
+ "outputs": [],
51
+ "source": [
52
+ "df = pd.DataFrame({'img': img_list, 'label': label_list})"
53
+ ]
54
+ },
55
+ {
56
+ "cell_type": "code",
57
+ "execution_count": 5,
58
+ "id": "bb9a0009",
59
+ "metadata": {},
60
+ "outputs": [
61
+ {
62
+ "data": {
63
+ "text/html": [
64
+ "<div>\n",
65
+ "<style scoped>\n",
66
+ " .dataframe tbody tr th:only-of-type {\n",
67
+ " vertical-align: middle;\n",
68
+ " }\n",
69
+ "\n",
70
+ " .dataframe tbody tr th {\n",
71
+ " vertical-align: top;\n",
72
+ " }\n",
73
+ "\n",
74
+ " .dataframe thead th {\n",
75
+ " text-align: right;\n",
76
+ " }\n",
77
+ "</style>\n",
78
+ "<table border=\"1\" class=\"dataframe\">\n",
79
+ " <thead>\n",
80
+ " <tr style=\"text-align: right;\">\n",
81
+ " <th></th>\n",
82
+ " <th>img</th>\n",
83
+ " <th>label</th>\n",
84
+ " </tr>\n",
85
+ " </thead>\n",
86
+ " <tbody>\n",
87
+ " <tr>\n",
88
+ " <th>130</th>\n",
89
+ " <td>Skin Data/Non Cancer/614.JPG</td>\n",
90
+ " <td>Non Cancer</td>\n",
91
+ " </tr>\n",
92
+ " <tr>\n",
93
+ " <th>73</th>\n",
94
+ " <td>Skin Data/Cancer/2301-1.JPG</td>\n",
95
+ " <td>Cancer</td>\n",
96
+ " </tr>\n",
97
+ " <tr>\n",
98
+ " <th>202</th>\n",
99
+ " <td>Skin Data/Non Cancer/1111.JPG</td>\n",
100
+ " <td>Non Cancer</td>\n",
101
+ " </tr>\n",
102
+ " <tr>\n",
103
+ " <th>211</th>\n",
104
+ " <td>Skin Data/Non Cancer/1248-1.JPG</td>\n",
105
+ " <td>Non Cancer</td>\n",
106
+ " </tr>\n",
107
+ " <tr>\n",
108
+ " <th>199</th>\n",
109
+ " <td>Skin Data/Non Cancer/1065.jpg</td>\n",
110
+ " <td>Non Cancer</td>\n",
111
+ " </tr>\n",
112
+ " </tbody>\n",
113
+ "</table>\n",
114
+ "</div>"
115
+ ],
116
+ "text/plain": [
117
+ " img label\n",
118
+ "130 Skin Data/Non Cancer/614.JPG Non Cancer\n",
119
+ "73 Skin Data/Cancer/2301-1.JPG Cancer\n",
120
+ "202 Skin Data/Non Cancer/1111.JPG Non Cancer\n",
121
+ "211 Skin Data/Non Cancer/1248-1.JPG Non Cancer\n",
122
+ "199 Skin Data/Non Cancer/1065.jpg Non Cancer"
123
+ ]
124
+ },
125
+ "execution_count": 5,
126
+ "metadata": {},
127
+ "output_type": "execute_result"
128
+ }
129
+ ],
130
+ "source": [
131
+ "df.sample(5)"
132
+ ]
133
+ },
134
+ {
135
+ "cell_type": "code",
136
+ "execution_count": 6,
137
+ "id": "54440c37",
138
+ "metadata": {},
139
+ "outputs": [],
140
+ "source": [
141
+ "d={'Non Cancer': 0, 'Cancer': 1}\n",
142
+ "df['encode_label']=df['label'].map(d)"
143
+ ]
144
+ },
145
+ {
146
+ "cell_type": "code",
147
+ "execution_count": 7,
148
+ "id": "53cd4b47",
149
+ "metadata": {},
150
+ "outputs": [],
151
+ "source": [
152
+ "x = []\n",
153
+ "\n",
154
+ "for img in df['img']:\n",
155
+ " img = cv2.imread(str(img))\n",
156
+ " img = cv2.resize(img, (170, 170))\n",
157
+ " img = img / 255.0 #normalize\n",
158
+ " x.append(img)"
159
+ ]
160
+ },
161
+ {
162
+ "cell_type": "code",
163
+ "execution_count": 8,
164
+ "id": "d81879b5",
165
+ "metadata": {},
166
+ "outputs": [],
167
+ "source": [
168
+ "x = np.array(x)"
169
+ ]
170
+ },
171
+ {
172
+ "cell_type": "code",
173
+ "execution_count": 9,
174
+ "id": "6578ffef",
175
+ "metadata": {},
176
+ "outputs": [],
177
+ "source": [
178
+ "y=df['encode_label']"
179
+ ]
180
+ },
181
+ {
182
+ "cell_type": "code",
183
+ "execution_count": 10,
184
+ "id": "0f9856e8",
185
+ "metadata": {},
186
+ "outputs": [
187
+ {
188
+ "name": "stderr",
189
+ "output_type": "stream",
190
+ "text": [
191
+ "2024-05-15 10:21:21.775290: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n",
192
+ "To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n"
193
+ ]
194
+ }
195
+ ],
196
+ "source": [
197
+ "from sklearn.model_selection import train_test_split\n",
198
+ "\n",
199
+ "from keras.models import Sequential\n",
200
+ "from keras.layers import Conv2D, Dense, Flatten, Input, MaxPooling2D, Dropout, BatchNormalization, Reshape"
201
+ ]
202
+ },
203
+ {
204
+ "cell_type": "code",
205
+ "execution_count": 11,
206
+ "id": "6eefab7a",
207
+ "metadata": {},
208
+ "outputs": [],
209
+ "source": [
210
+ "x_train,x_test,y_train,y_test=train_test_split(x,y, test_size=.20, random_state=42)"
211
+ ]
212
+ },
213
+ {
214
+ "cell_type": "code",
215
+ "execution_count": 12,
216
+ "id": "281f7cae",
217
+ "metadata": {},
218
+ "outputs": [],
219
+ "source": [
220
+ "# CNN = Convolutional Neural Networks"
221
+ ]
222
+ },
223
+ {
224
+ "cell_type": "code",
225
+ "execution_count": 23,
226
+ "id": "4e283b50",
227
+ "metadata": {},
228
+ "outputs": [],
229
+ "source": [
230
+ "model=Sequential()\n",
231
+ "model.add(Input(shape=(170,170,3)))\n",
232
+ "model.add(Conv2D(32,kernel_size=(3,3),activation='relu'))\n",
233
+ "model.add(MaxPooling2D(pool_size=(2,2)))\n",
234
+ "model.add(Conv2D(64,kernel_size=(3,3),activation='relu'))\n",
235
+ "model.add(MaxPooling2D(pool_size=(2,2)))\n",
236
+ "model.add(Flatten())\n",
237
+ "model.add(Dense(128))\n",
238
+ "model.add(Dense(2, activation='softmax')) # 10 fakli cevap classification 0-9 a kadar olan rakamlar\n",
239
+ "\n",
240
+ "# Compile the model\n",
241
+ "model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])"
242
+ ]
243
+ },
244
+ {
245
+ "cell_type": "code",
246
+ "execution_count": 24,
247
+ "id": "fcc1a740",
248
+ "metadata": {},
249
+ "outputs": [
250
+ {
251
+ "name": "stdout",
252
+ "output_type": "stream",
253
+ "text": [
254
+ "Epoch 1/15\n",
255
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m7s\u001b[0m 660ms/step - accuracy: 0.5291 - loss: 9.7854 - val_accuracy: 0.7414 - val_loss: 2.5156\n",
256
+ "Epoch 2/15\n",
257
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m5s\u001b[0m 610ms/step - accuracy: 0.6474 - loss: 2.9485 - val_accuracy: 0.2586 - val_loss: 0.7912\n",
258
+ "Epoch 3/15\n",
259
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m5s\u001b[0m 623ms/step - accuracy: 0.6237 - loss: 0.6547 - val_accuracy: 0.7586 - val_loss: 0.5047\n",
260
+ "Epoch 4/15\n",
261
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m5s\u001b[0m 669ms/step - accuracy: 0.7573 - loss: 0.5762 - val_accuracy: 0.7931 - val_loss: 0.4346\n",
262
+ "Epoch 5/15\n",
263
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m5s\u001b[0m 650ms/step - accuracy: 0.7664 - loss: 0.4830 - val_accuracy: 0.7414 - val_loss: 0.6113\n",
264
+ "Epoch 6/15\n",
265
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m5s\u001b[0m 619ms/step - accuracy: 0.7919 - loss: 0.4656 - val_accuracy: 0.8448 - val_loss: 0.3715\n",
266
+ "Epoch 7/15\n",
267
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m5s\u001b[0m 643ms/step - accuracy: 0.8623 - loss: 0.3305 - val_accuracy: 0.8276 - val_loss: 0.4111\n",
268
+ "Epoch 8/15\n",
269
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m5s\u001b[0m 617ms/step - accuracy: 0.8871 - loss: 0.3118 - val_accuracy: 0.8103 - val_loss: 0.3918\n",
270
+ "Epoch 9/15\n",
271
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m5s\u001b[0m 612ms/step - accuracy: 0.8852 - loss: 0.2627 - val_accuracy: 0.7241 - val_loss: 0.7321\n",
272
+ "Epoch 10/15\n",
273
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m5s\u001b[0m 624ms/step - accuracy: 0.8766 - loss: 0.2683 - val_accuracy: 0.7931 - val_loss: 0.4346\n",
274
+ "Epoch 11/15\n",
275
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m5s\u001b[0m 632ms/step - accuracy: 0.9435 - loss: 0.1946 - val_accuracy: 0.8103 - val_loss: 0.3652\n",
276
+ "Epoch 12/15\n",
277
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m5s\u001b[0m 622ms/step - accuracy: 0.9718 - loss: 0.1293 - val_accuracy: 0.8621 - val_loss: 0.4700\n",
278
+ "Epoch 13/15\n",
279
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m5s\u001b[0m 613ms/step - accuracy: 0.9279 - loss: 0.1620 - val_accuracy: 0.8276 - val_loss: 0.4200\n",
280
+ "Epoch 14/15\n",
281
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m5s\u001b[0m 639ms/step - accuracy: 0.9648 - loss: 0.0937 - val_accuracy: 0.7586 - val_loss: 0.6257\n",
282
+ "Epoch 15/15\n",
283
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m5s\u001b[0m 635ms/step - accuracy: 0.9669 - loss: 0.1067 - val_accuracy: 0.8448 - val_loss: 0.3362\n"
284
+ ]
285
+ }
286
+ ],
287
+ "source": [
288
+ "# Train the model\n",
289
+ "history=model.fit( x_train, y_train,validation_data=(x_test,y_test), epochs=15, verbose=1)\n"
290
+ ]
291
+ },
292
+ {
293
+ "cell_type": "code",
294
+ "execution_count": 20,
295
+ "id": "8199ab93",
296
+ "metadata": {},
297
+ "outputs": [
298
+ {
299
+ "name": "stderr",
300
+ "output_type": "stream",
301
+ "text": [
302
+ "WARNING:absl:You are saving your model as an HDF5 file via `model.save()` or `keras.saving.save_model(model)`. This file format is considered legacy. We recommend using instead the native Keras format, e.g. `model.save('my_model.keras')` or `keras.saving.save_model(model, 'my_model.keras')`. \n"
303
+ ]
304
+ }
305
+ ],
306
+ "source": [
307
+ "model.save('cnn_model.h5')"
308
+ ]
309
+ },
310
+ {
311
+ "cell_type": "code",
312
+ "execution_count": null,
313
+ "id": "ca51b883",
314
+ "metadata": {},
315
+ "outputs": [],
316
+ "source": []
317
+ },
318
+ {
319
+ "cell_type": "code",
320
+ "execution_count": null,
321
+ "id": "b26379a9",
322
+ "metadata": {},
323
+ "outputs": [],
324
+ "source": [
325
+ "# VGGNET, ResNet50, Inceptionv3, Xception, MobileNetv2 Transfer Learning"
326
+ ]
327
+ },
328
+ {
329
+ "cell_type": "code",
330
+ "execution_count": 27,
331
+ "id": "d3f206da",
332
+ "metadata": {},
333
+ "outputs": [],
334
+ "source": [
335
+ "from keras.models import Sequential\n",
336
+ "from keras.layers import Conv2D, Dense, Flatten, Input, MaxPooling2D, Dropout, BatchNormalization, Reshape\n",
337
+ "\n",
338
+ "from tensorflow.keras.applications import VGG16, ResNet50\n",
339
+ "from tensorflow.keras.preprocessing.image import ImageDataGenerator"
340
+ ]
341
+ },
342
+ {
343
+ "cell_type": "code",
344
+ "execution_count": 30,
345
+ "id": "fccd2086",
346
+ "metadata": {},
347
+ "outputs": [
348
+ {
349
+ "name": "stdout",
350
+ "output_type": "stream",
351
+ "text": [
352
+ "Found 232 images belonging to 2 classes.\n",
353
+ "Found 56 images belonging to 2 classes.\n",
354
+ "Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/vgg16/vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5\n",
355
+ "\u001b[1m58889256/58889256\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m3s\u001b[0m 0us/step\n",
356
+ "Epoch 1/10\n"
357
+ ]
358
+ },
359
+ {
360
+ "name": "stderr",
361
+ "output_type": "stream",
362
+ "text": [
363
+ "/opt/anaconda3/lib/python3.11/site-packages/keras/src/trainers/data_adapters/py_dataset_adapter.py:121: UserWarning: Your `PyDataset` class should call `super().__init__(**kwargs)` in its constructor. `**kwargs` can include `workers`, `use_multiprocessing`, `max_queue_size`. Do not pass these arguments to `fit()`, as they will be ignored.\n",
364
+ " self._warn_if_super_not_called()\n"
365
+ ]
366
+ },
367
+ {
368
+ "name": "stdout",
369
+ "output_type": "stream",
370
+ "text": [
371
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m43s\u001b[0m 5s/step - accuracy: 0.5209 - loss: 5.4695 - val_accuracy: 0.7143 - val_loss: 1.6115\n",
372
+ "Epoch 2/10\n",
373
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m41s\u001b[0m 5s/step - accuracy: 0.6928 - loss: 2.1946 - val_accuracy: 0.3036 - val_loss: 1.9652\n",
374
+ "Epoch 3/10\n",
375
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m38s\u001b[0m 5s/step - accuracy: 0.5871 - loss: 1.1498 - val_accuracy: 0.7679 - val_loss: 0.5415\n",
376
+ "Epoch 4/10\n",
377
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m38s\u001b[0m 5s/step - accuracy: 0.8169 - loss: 0.4627 - val_accuracy: 0.7679 - val_loss: 0.5914\n",
378
+ "Epoch 5/10\n",
379
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m37s\u001b[0m 5s/step - accuracy: 0.8383 - loss: 0.3790 - val_accuracy: 0.7857 - val_loss: 0.4250\n",
380
+ "Epoch 6/10\n",
381
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m37s\u001b[0m 5s/step - accuracy: 0.9351 - loss: 0.1650 - val_accuracy: 0.8393 - val_loss: 0.3612\n",
382
+ "Epoch 7/10\n",
383
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m34s\u001b[0m 4s/step - accuracy: 0.9531 - loss: 0.1619 - val_accuracy: 0.8393 - val_loss: 0.3391\n",
384
+ "Epoch 8/10\n",
385
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━��━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m33s\u001b[0m 4s/step - accuracy: 0.9621 - loss: 0.1155 - val_accuracy: 0.8393 - val_loss: 0.3643\n",
386
+ "Epoch 9/10\n",
387
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m36s\u001b[0m 5s/step - accuracy: 0.9667 - loss: 0.1090 - val_accuracy: 0.8214 - val_loss: 0.3249\n",
388
+ "Epoch 10/10\n",
389
+ "\u001b[1m8/8\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m36s\u001b[0m 4s/step - accuracy: 0.9823 - loss: 0.0831 - val_accuracy: 0.8214 - val_loss: 0.4653\n"
390
+ ]
391
+ },
392
+ {
393
+ "data": {
394
+ "text/plain": [
395
+ "<keras.src.callbacks.history.History at 0x169e63650>"
396
+ ]
397
+ },
398
+ "execution_count": 30,
399
+ "metadata": {},
400
+ "output_type": "execute_result"
401
+ }
402
+ ],
403
+ "source": [
404
+ "data_dir='Skin Data'\n",
405
+ "img_width,img_heigth=224,224\n",
406
+ "\n",
407
+ "train_datagen=ImageDataGenerator(rescale=1/255, validation_split=.20)\n",
408
+ "train_datagenerator=train_datagen.flow_from_directory(directory=data_dir,target_size=(img_width,img_heigth),\n",
409
+ " class_mode='binary', subset='training')\n",
410
+ "\n",
411
+ " \n",
412
+ "test_datagen=ImageDataGenerator(rescale=1/255)\n",
413
+ "test_datagenerator=train_datagen.flow_from_directory(directory=data_dir,target_size=(img_width,img_heigth),\n",
414
+ " class_mode='binary', subset='validation')\n",
415
+ "\n",
416
+ " \n",
417
+ "base_model=VGG16(weights='imagenet', input_shape=(img_width,img_heigth,3),include_top=False)\n",
418
+ "\n",
419
+ "model=Sequential()\n",
420
+ "\n",
421
+ "model.add(base_model)\n",
422
+ "for layer in base_model.layers:\n",
423
+ " layer.trainable=False\n",
424
+ "\n",
425
+ "model.add(Flatten())\n",
426
+ "model.add(Dense(1024,activation='relu'))\n",
427
+ "model.add(Dense(1,activation='sigmoid'))\n",
428
+ "\n",
429
+ "model.compile(optimizer='adam',loss='binary_crossentropy',metrics=['accuracy'])\n",
430
+ "\n",
431
+ "model.fit(train_datagenerator,epochs=10,validation_data=test_datagenerator)"
432
+ ]
433
+ },
434
+ {
435
+ "cell_type": "code",
436
+ "execution_count": 31,
437
+ "id": "ffef776f",
438
+ "metadata": {},
439
+ "outputs": [
440
+ {
441
+ "data": {
442
+ "text/html": [
443
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\">Model: \"sequential_5\"</span>\n",
444
+ "</pre>\n"
445
+ ],
446
+ "text/plain": [
447
+ "\u001b[1mModel: \"sequential_5\"\u001b[0m\n"
448
+ ]
449
+ },
450
+ "metadata": {},
451
+ "output_type": "display_data"
452
+ },
453
+ {
454
+ "data": {
455
+ "text/html": [
456
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\">┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
457
+ "┃<span style=\"font-weight: bold\"> Layer (type) </span>┃<span style=\"font-weight: bold\"> Output Shape </span>┃<span style=\"font-weight: bold\"> Param # </span>┃\n",
458
+ "┑━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
459
+ "β”‚ vgg16 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Functional</span>) β”‚ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">7</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">7</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">512</span>) β”‚ <span style=\"color: #00af00; text-decoration-color: #00af00\">14,714,688</span> β”‚\n",
460
+ "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n",
461
+ "β”‚ flatten_5 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Flatten</span>) β”‚ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">25088</span>) β”‚ <span style=\"color: #00af00; text-decoration-color: #00af00\">0</span> β”‚\n",
462
+ "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n",
463
+ "β”‚ dense_10 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Dense</span>) β”‚ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">1024</span>) β”‚ <span style=\"color: #00af00; text-decoration-color: #00af00\">25,691,136</span> β”‚\n",
464
+ "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n",
465
+ "β”‚ dense_11 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Dense</span>) β”‚ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">1</span>) β”‚ <span style=\"color: #00af00; text-decoration-color: #00af00\">1,025</span> β”‚\n",
466
+ "β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜\n",
467
+ "</pre>\n"
468
+ ],
469
+ "text/plain": [
470
+ "┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
471
+ "┃\u001b[1m \u001b[0m\u001b[1mLayer (type) \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1mOutput Shape \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1m Param #\u001b[0m\u001b[1m \u001b[0m┃\n",
472
+ "┑━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
473
+ "β”‚ vgg16 (\u001b[38;5;33mFunctional\u001b[0m) β”‚ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m7\u001b[0m, \u001b[38;5;34m7\u001b[0m, \u001b[38;5;34m512\u001b[0m) β”‚ \u001b[38;5;34m14,714,688\u001b[0m β”‚\n",
474
+ "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n",
475
+ "β”‚ flatten_5 (\u001b[38;5;33mFlatten\u001b[0m) β”‚ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m25088\u001b[0m) β”‚ \u001b[38;5;34m0\u001b[0m β”‚\n",
476
+ "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n",
477
+ "β”‚ dense_10 (\u001b[38;5;33mDense\u001b[0m) β”‚ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m1024\u001b[0m) β”‚ \u001b[38;5;34m25,691,136\u001b[0m β”‚\n",
478
+ "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n",
479
+ "β”‚ dense_11 (\u001b[38;5;33mDense\u001b[0m) β”‚ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m1\u001b[0m) β”‚ \u001b[38;5;34m1,025\u001b[0m β”‚\n",
480
+ "β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜\n"
481
+ ]
482
+ },
483
+ "metadata": {},
484
+ "output_type": "display_data"
485
+ },
486
+ {
487
+ "data": {
488
+ "text/html": [
489
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\"> Total params: </span><span style=\"color: #00af00; text-decoration-color: #00af00\">91,791,173</span> (350.16 MB)\n",
490
+ "</pre>\n"
491
+ ],
492
+ "text/plain": [
493
+ "\u001b[1m Total params: \u001b[0m\u001b[38;5;34m91,791,173\u001b[0m (350.16 MB)\n"
494
+ ]
495
+ },
496
+ "metadata": {},
497
+ "output_type": "display_data"
498
+ },
499
+ {
500
+ "data": {
501
+ "text/html": [
502
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\"> Trainable params: </span><span style=\"color: #00af00; text-decoration-color: #00af00\">25,692,161</span> (98.01 MB)\n",
503
+ "</pre>\n"
504
+ ],
505
+ "text/plain": [
506
+ "\u001b[1m Trainable params: \u001b[0m\u001b[38;5;34m25,692,161\u001b[0m (98.01 MB)\n"
507
+ ]
508
+ },
509
+ "metadata": {},
510
+ "output_type": "display_data"
511
+ },
512
+ {
513
+ "data": {
514
+ "text/html": [
515
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\"> Non-trainable params: </span><span style=\"color: #00af00; text-decoration-color: #00af00\">14,714,688</span> (56.13 MB)\n",
516
+ "</pre>\n"
517
+ ],
518
+ "text/plain": [
519
+ "\u001b[1m Non-trainable params: \u001b[0m\u001b[38;5;34m14,714,688\u001b[0m (56.13 MB)\n"
520
+ ]
521
+ },
522
+ "metadata": {},
523
+ "output_type": "display_data"
524
+ },
525
+ {
526
+ "data": {
527
+ "text/html": [
528
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\"> Optimizer params: </span><span style=\"color: #00af00; text-decoration-color: #00af00\">51,384,324</span> (196.02 MB)\n",
529
+ "</pre>\n"
530
+ ],
531
+ "text/plain": [
532
+ "\u001b[1m Optimizer params: \u001b[0m\u001b[38;5;34m51,384,324\u001b[0m (196.02 MB)\n"
533
+ ]
534
+ },
535
+ "metadata": {},
536
+ "output_type": "display_data"
537
+ }
538
+ ],
539
+ "source": [
540
+ "model.summary()"
541
+ ]
542
+ },
543
+ {
544
+ "cell_type": "code",
545
+ "execution_count": null,
546
+ "id": "4b483c57",
547
+ "metadata": {},
548
+ "outputs": [],
549
+ "source": []
550
+ },
551
+ {
552
+ "cell_type": "code",
553
+ "execution_count": null,
554
+ "id": "eee6be78",
555
+ "metadata": {},
556
+ "outputs": [],
557
+ "source": []
558
+ }
559
+ ],
560
+ "metadata": {
561
+ "kernelspec": {
562
+ "display_name": "Python 3 (ipykernel)",
563
+ "language": "python",
564
+ "name": "python3"
565
+ },
566
+ "language_info": {
567
+ "codemirror_mode": {
568
+ "name": "ipython",
569
+ "version": 3
570
+ },
571
+ "file_extension": ".py",
572
+ "mimetype": "text/x-python",
573
+ "name": "python",
574
+ "nbconvert_exporter": "python",
575
+ "pygments_lexer": "ipython3",
576
+ "version": "3.11.7"
577
+ }
578
+ },
579
+ "nbformat": 4,
580
+ "nbformat_minor": 5
581
+ }