{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import pandas as pd\n", "import json\n", "import os\n", "import shutil\n", "import tensorflow as tf\n", "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n", "from tensorflow.keras.utils import image_dataset_from_directory\n", "from tensorflow.keras.models import Sequential\n", "from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense\n", "from tensorflow.keras.callbacks import Callback" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "def create_dataframe(annotations_path):\n", " with open(annotations_path, 'r') as file:\n", " data = json.load(file)\n", "\n", " images = pd.DataFrame(data['images']).rename(columns={'id': 'image_id'})[['image_id', 'file_name']]\n", "\n", " categories = pd.DataFrame(data['categories'])[['id', 'name']]\n", " categories.rename(columns={'id': 'category_id'}, inplace=True)\n", "\n", " usecols = ['image_id', 'category_id']\n", " annotations = pd.DataFrame(data['annotations'])[usecols]\n", "\n", " dataframe = annotations.merge(categories, on='category_id').merge(images, on='image_id')[['file_name', 'name']]\n", " \n", " return dataframe" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "def copy_images_to_destination(base_dir, dataframe, split):\n", " images_dir = os.path.join(base_dir, 'images')\n", "\n", " for index, row in dataframe.iterrows():\n", " file_name = row['file_name']\n", " file_class = row['name']\n", "\n", " dest_dir = os.path.join(split, file_class)\n", " os.makedirs(dest_dir, exist_ok=True)\n", "\n", " source_path = os.path.join(images_dir, file_name)\n", " destination_path = os.path.join(dest_dir, file_name)\n", "\n", " shutil.copyfile(source_path, destination_path)\n", "\n", " print(\"Done copying images.\")" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
file_namename
0131094.jpgsoft-cheese
1131094.jpgham-raw
2131094.jpghard-cheese
3131094.jpgbread-wholemeal
4131094.jpgcottage-cheese
.........
76486117029.jpgdamson-plum
76487117524.jpgdamson-plum
76488117849.jpgdamson-plum
76489123468.jpgdamson-plum
76490095795.jpgbean-seeds
\n", "

76491 rows × 2 columns

\n", "
" ], "text/plain": [ " file_name name\n", "0 131094.jpg soft-cheese\n", "1 131094.jpg ham-raw\n", "2 131094.jpg hard-cheese\n", "3 131094.jpg bread-wholemeal\n", "4 131094.jpg cottage-cheese\n", "... ... ...\n", "76486 117029.jpg damson-plum\n", "76487 117524.jpg damson-plum\n", "76488 117849.jpg damson-plum\n", "76489 123468.jpg damson-plum\n", "76490 095795.jpg bean-seeds\n", "\n", "[76491 rows x 2 columns]" ] }, "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], "source": [ "train_df = create_dataframe('train/annotations.json')\n", "train_df" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "splits = ['train', 'val']\n", "\n", "for split in splits:\n", " root = f'{split}'\n", "\n", " for index, row in train_df.iterrows():\n", " directory_name = row['name']\n", " directory_path = os.path.join(root, directory_name)\n", "\n", " if not os.path.exists(directory_path):\n", " os.makedirs(directory_path)" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
file_namename
0149022.jpgespresso-with-caffeine
1149022.jpgdark-chocolate
2167905.jpgespresso-with-caffeine
3121313.jpgespresso-with-caffeine
4153429.jpgespresso-with-caffeine
.........
1825144675.jpgoat-milk
1826103273.jpgsoup-potato
1827159922.jpgred-cabbage
1828011275.jpgpasta-in-conch-form
1829166537.jpgchocolate
\n", "

1830 rows × 2 columns

\n", "
" ], "text/plain": [ " file_name name\n", "0 149022.jpg espresso-with-caffeine\n", "1 149022.jpg dark-chocolate\n", "2 167905.jpg espresso-with-caffeine\n", "3 121313.jpg espresso-with-caffeine\n", "4 153429.jpg espresso-with-caffeine\n", "... ... ...\n", "1825 144675.jpg oat-milk\n", "1826 103273.jpg soup-potato\n", "1827 159922.jpg red-cabbage\n", "1828 011275.jpg pasta-in-conch-form\n", "1829 166537.jpg chocolate\n", "\n", "[1830 rows x 2 columns]" ] }, "execution_count": 6, "metadata": {}, "output_type": "execute_result" } ], "source": [ "val_df = create_dataframe('val/annotations.json')\n", "val_df" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Done copying images.\n" ] } ], "source": [ "base_dir = 'train'\n", "dataframe = train_df\n", "copy_images_to_destination(base_dir, dataframe, 'train')" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Done copying images.\n" ] } ], "source": [ "base_dir = 'val'\n", "dataframe = val_df\n", "copy_images_to_destination(base_dir, dataframe, 'val')" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Found 70397 files belonging to 498 classes.\n", "Found 1799 files belonging to 498 classes.\n" ] } ], "source": [ "train = image_dataset_from_directory(\n", " directory='train',\n", " label_mode='categorical',\n", " batch_size=32,\n", " image_size=(299, 299)\n", ")\n", "\n", "val = image_dataset_from_directory(\n", " directory='val',\n", " label_mode='categorical',\n", " batch_size=32,\n", " image_size=(299, 299)\n", ")" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "train_datagen = ImageDataGenerator(\n", " rescale=1./255,\n", " shear_range=0.2,\n", " zoom_range=0.2,\n", " horizontal_flip=True\n", ")\n", "\n", "val_datagen = ImageDataGenerator(rescale=1./255)" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "class MyCallback(Callback):\n", " def on_epoch_end(self, epoch, logs={}):\n", " if logs.get('val_categorical_accuracy') >= 0.81:\n", " print('Validation accuracy reached 81%. Stopping training.')" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Model: \"sequential\"\n", "_________________________________________________________________\n", " Layer (type) Output Shape Param # \n", "=================================================================\n", " conv2d (Conv2D) (None, 297, 297, 32) 896 \n", " \n", " max_pooling2d (MaxPooling2D (None, 148, 148, 32) 0 \n", " ) \n", " \n", " conv2d_1 (Conv2D) (None, 146, 146, 64) 18496 \n", " \n", " max_pooling2d_1 (MaxPooling (None, 73, 73, 64) 0 \n", " 2D) \n", " \n", " conv2d_2 (Conv2D) (None, 71, 71, 128) 73856 \n", " \n", " max_pooling2d_2 (MaxPooling (None, 35, 35, 128) 0 \n", " 2D) \n", " \n", " flatten (Flatten) (None, 156800) 0 \n", " \n", " dense (Dense) (None, 128) 20070528 \n", " \n", " dense_1 (Dense) (None, 498) 64242 \n", " \n", "=================================================================\n", "Total params: 20,228,018\n", "Trainable params: 20,228,018\n", "Non-trainable params: 0\n", "_________________________________________________________________\n" ] } ], "source": [ "model = Sequential()\n", "model.add(Conv2D(32, (3, 3), activation='relu', input_shape=(299, 299, 3)))\n", "model.add(MaxPooling2D((2, 2)))\n", "model.add(Conv2D(64, (3, 3), activation='relu'))\n", "model.add(MaxPooling2D((2, 2)))\n", "model.add(Conv2D(128, (3, 3), activation='relu'))\n", "model.add(MaxPooling2D((2, 2)))\n", "model.add(Flatten())\n", "model.add(Dense(128, activation='relu'))\n", "model.add(Dense(498, activation='softmax'))\n", "\n", "model.summary()\n", "\n", "model.compile(optimizer=tf.keras.optimizers.Adam(),\n", " loss=tf.keras.losses.CategoricalCrossentropy(),\n", " metrics=[tf.keras.metrics.CategoricalAccuracy()])" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/32\n", " 6/2200 [..............................] - ETA: 6:25 - loss: 504.5968 - categorical_accuracy: 0.0052 WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0608s vs `on_train_batch_end` time: 0.0957s). Check your callbacks.\n", "2200/2200 [==============================] - 291s 130ms/step - loss: 6.9090 - categorical_accuracy: 0.0398 - val_loss: 5.5961 - val_categorical_accuracy: 0.0411\n", "Epoch 2/32\n", "2200/2200 [==============================] - 279s 127ms/step - loss: 5.4654 - categorical_accuracy: 0.0420 - val_loss: 5.5951 - val_categorical_accuracy: 0.0417\n", "Epoch 3/32\n", "2200/2200 [==============================] - 276s 125ms/step - loss: 5.4428 - categorical_accuracy: 0.0449 - val_loss: 5.6058 - val_categorical_accuracy: 0.0417\n", "Epoch 4/32\n", "2200/2200 [==============================] - 285s 130ms/step - loss: 5.3952 - categorical_accuracy: 0.0528 - val_loss: 5.6658 - val_categorical_accuracy: 0.0411\n", "Epoch 5/32\n", "2200/2200 [==============================] - 282s 128ms/step - loss: 5.3362 - categorical_accuracy: 0.0630 - val_loss: 5.7703 - val_categorical_accuracy: 0.0406\n", "Epoch 6/32\n", "2200/2200 [==============================] - 326s 148ms/step - loss: 5.2673 - categorical_accuracy: 0.0755 - val_loss: 5.7254 - val_categorical_accuracy: 0.0411\n", "Epoch 7/32\n", "2200/2200 [==============================] - 300s 136ms/step - loss: 5.2040 - categorical_accuracy: 0.0875 - val_loss: 5.8228 - val_categorical_accuracy: 0.0411\n", "Epoch 8/32\n", "2200/2200 [==============================] - 382s 174ms/step - loss: 5.1794 - categorical_accuracy: 0.0927 - val_loss: 6.0131 - val_categorical_accuracy: 0.0411\n", "Epoch 9/32\n", "2200/2200 [==============================] - 372s 169ms/step - loss: 5.1426 - categorical_accuracy: 0.0984 - val_loss: 6.0550 - val_categorical_accuracy: 0.0406\n", "Epoch 10/32\n", "2200/2200 [==============================] - 335s 152ms/step - loss: 5.0958 - categorical_accuracy: 0.1058 - val_loss: 6.3628 - val_categorical_accuracy: 0.0389\n", "Epoch 11/32\n", "2200/2200 [==============================] - 354s 161ms/step - loss: 5.0727 - categorical_accuracy: 0.1111 - val_loss: 6.4603 - val_categorical_accuracy: 0.0378\n", "Epoch 12/32\n", "2200/2200 [==============================] - 356s 162ms/step - loss: 5.0326 - categorical_accuracy: 0.1166 - val_loss: 6.7461 - val_categorical_accuracy: 0.0417\n", "Epoch 13/32\n", "2200/2200 [==============================] - 354s 161ms/step - loss: 5.0137 - categorical_accuracy: 0.1208 - val_loss: 6.9263 - val_categorical_accuracy: 0.0395\n", "Epoch 14/32\n", "2200/2200 [==============================] - 349s 159ms/step - loss: 4.9708 - categorical_accuracy: 0.1281 - val_loss: 6.9836 - val_categorical_accuracy: 0.0378\n", "Epoch 15/32\n", "2200/2200 [==============================] - 368s 167ms/step - loss: 4.9531 - categorical_accuracy: 0.1318 - val_loss: 6.6221 - val_categorical_accuracy: 0.0384\n", "Epoch 16/32\n", "2200/2200 [==============================] - 360s 164ms/step - loss: 4.9288 - categorical_accuracy: 0.1357 - val_loss: 6.6952 - val_categorical_accuracy: 0.0378\n", "Epoch 17/32\n", "2200/2200 [==============================] - 359s 163ms/step - loss: 4.8955 - categorical_accuracy: 0.1403 - val_loss: 6.6760 - val_categorical_accuracy: 0.0400\n", "Epoch 18/32\n", "2200/2200 [==============================] - 354s 161ms/step - loss: 4.8613 - categorical_accuracy: 0.1455 - val_loss: 7.7695 - val_categorical_accuracy: 0.0384\n", "Epoch 19/32\n", "2200/2200 [==============================] - 327s 148ms/step - loss: 4.8498 - categorical_accuracy: 0.1494 - val_loss: 7.5958 - val_categorical_accuracy: 0.0361\n", "Epoch 20/32\n", "2200/2200 [==============================] - 362s 165ms/step - loss: 4.7999 - categorical_accuracy: 0.1556 - val_loss: 7.8458 - val_categorical_accuracy: 0.0372\n", "Epoch 21/32\n", "2200/2200 [==============================] - 361s 164ms/step - loss: 4.7786 - categorical_accuracy: 0.1594 - val_loss: 8.5637 - val_categorical_accuracy: 0.0389\n", "Epoch 22/32\n", "2200/2200 [==============================] - 360s 164ms/step - loss: 4.7561 - categorical_accuracy: 0.1645 - val_loss: 8.0804 - val_categorical_accuracy: 0.0384\n", "Epoch 23/32\n", "2200/2200 [==============================] - 301s 137ms/step - loss: 4.7279 - categorical_accuracy: 0.1694 - val_loss: 8.9041 - val_categorical_accuracy: 0.0372\n", "Epoch 24/32\n", "2200/2200 [==============================] - 310s 140ms/step - loss: 4.6962 - categorical_accuracy: 0.1732 - val_loss: 9.0381 - val_categorical_accuracy: 0.0361\n", "Epoch 25/32\n", "2200/2200 [==============================] - 314s 142ms/step - loss: 4.6756 - categorical_accuracy: 0.1769 - val_loss: 8.6350 - val_categorical_accuracy: 0.0378\n", "Epoch 26/32\n", "2200/2200 [==============================] - 296s 134ms/step - loss: 4.6531 - categorical_accuracy: 0.1820 - val_loss: 9.3287 - val_categorical_accuracy: 0.0367\n", "Epoch 27/32\n", "2200/2200 [==============================] - 282s 128ms/step - loss: 4.6207 - categorical_accuracy: 0.1875 - val_loss: 9.8095 - val_categorical_accuracy: 0.0361\n", "Epoch 28/32\n", "2200/2200 [==============================] - 349s 158ms/step - loss: 4.6045 - categorical_accuracy: 0.1904 - val_loss: 9.4419 - val_categorical_accuracy: 0.0378\n", "Epoch 29/32\n", "2200/2200 [==============================] - 326s 148ms/step - loss: 4.5832 - categorical_accuracy: 0.1945 - val_loss: 9.4719 - val_categorical_accuracy: 0.0361\n", "Epoch 30/32\n", "2200/2200 [==============================] - 361s 164ms/step - loss: 4.5393 - categorical_accuracy: 0.2010 - val_loss: 9.8935 - val_categorical_accuracy: 0.0395\n", "Epoch 31/32\n", "2200/2200 [==============================] - 334s 152ms/step - loss: 4.5176 - categorical_accuracy: 0.2052 - val_loss: 9.9011 - val_categorical_accuracy: 0.0378\n", "Epoch 32/32\n", "2200/2200 [==============================] - 344s 156ms/step - loss: 4.4989 - categorical_accuracy: 0.2082 - val_loss: 10.2300 - val_categorical_accuracy: 0.0378\n" ] } ], "source": [ "callback = MyCallback()\n", "history = model.fit(train, epochs=32, validation_data=val, callbacks=[callback])" ] } ], "metadata": { "kernelspec": { "display_name": "gpu", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.18" } }, "nbformat": 4, "nbformat_minor": 2 }