Deep learning project idea: A CIFAR-10 dataset is a collection of images of 10 different categories, such as cars, birds, dogs, horses, ships, trucks, and so on. The idea of the project is to build an image classification model that is able to determine which category an input image belongs to. Image classification is used in many applications and is a great project for starting deep learning.
Input
%matplotlib inline
import tensorflow as tf
import os
import numpy as np
from matplotlib import pyplot as plt
if not os.path.isdir('models'):
os.mkdir('models')
print('TensorFlow version:', tf.__version__)
Output
TensorFlow version: 2.11.0
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.cifar10.load_data()
y_train = tf.keras.utils.to_categorical(y_train)
y_test = tf.keras.utils.to_categorical(y_test)
print(x_train.shape, y_train.shape)
print(x_test.shape, y_test.shape)
output
(50000, 32, 32, 3) (50000, 10)
(10000, 32, 32, 3) (10000, 10)
class_names = ['aeroplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
def show_random_examples(x, y, p):
indices = np.random.choice(range(x.shape[0]), 10, replace=False)
x = x[indices]
y = y[indices]
p = p[indices]
plt.figure(figsize=(10, 5))
for i in range(10):
plt.subplot(2, 5, i + 1)
plt.imshow(x[i])
plt.xticks([])
plt.yticks([])
col = 'green' if np.argmax(y[i]) == np.argmax(p[i]) else 'red'
plt.xlabel(class_names[np.argmax(p[i])], color=col)
plt.show()
show_random_examples(x_train, y_train, y_train)
show_random_examples(x_test, y_test, y_test)
Output
Showing some results of photos
from tensorflow.keras.layers import Conv2D, MaxPooling2D, BatchNormalization
from tensorflow.keras.layers import Dropout, Flatten, Input, Dense
def create_model():
def add_conv_block(model, num_filters):
model.add(Conv2D(num_filters, 3, activation='relu', padding='same'))
model.add(BatchNormalization())
model.add(Conv2D(num_filters, 3, activation='relu', padding='valid'))
model.add(MaxPooling2D(pool_size=2))
model.add(Dropout(0.2))
return model
model = tf.keras.models.Sequential()
model.add(Input(shape=(32, 32, 3)))
model = add_conv_block(model, 32)
model = add_conv_block(model, 64)
model = add_conv_block(model, 128)
model.add(Flatten())
model.add(Dense(10, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
model = create_model()
model.summary()
epochs= 20
batch_size = 128
history = model.fit(
x_train/255., y_train,
validation_data=(x_test/255., y_test),
epochs=epochs, batch_size=batch_size,
callbacks=[
tf.keras.callbacks.EarlyStopping(monitor='val_accuracy', patience=2),
tf.keras.callbacks.ModelCheckpoint('models/model_{val_accuracy:.3f}.h5', save_best_only=True,
save_weights_only=False, monitor='val_accuracy')
]
)
losses = history.history['loss']
accs = history.history['accuracy']
val_losses = history.history['val_loss']
val_accs = history.history['val_accuracy']
epochs = len(losses)
plt.figure(figsize=(12, 4))
for i, metrics in enumerate(zip([losses, accs], [val_losses, val_accs], ['Loss', 'Accuracy'])):
plt.subplot(1, 2, i + 1)
plt.plot(range(epochs), metrics[0], label='Training {}'.format(metrics[2]))
plt.plot(range(epochs), metrics[1], label='Validation {}'.format(metrics[2]))
plt.legend()
plt.show()
Output:
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 32, 32, 32) 896
batch_normalization (BatchN (None, 32, 32, 32) 128
ormalization)
conv2d_1 (Conv2D) (None, 30, 30, 32) 9248
max_pooling2d (MaxPooling2D (None, 15, 15, 32) 0
)
dropout (Dropout) (None, 15, 15, 32) 0
conv2d_2 (Conv2D) (None, 15, 15, 64) 18496
batch_normalization_1 (Batc (None, 15, 15, 64) 256
hNormalization)
conv2d_3 (Conv2D) (None, 13, 13, 64) 36928
max_pooling2d_1 (MaxPooling (None, 6, 6, 64) 0
2D)
dropout_1 (Dropout) (None, 6, 6, 64) 0
conv2d_4 (Conv2D) (None, 6, 6, 128) 73856
batch_normalization_2 (Batc (None, 6, 6, 128) 512
hNormalization)
conv2d_5 (Conv2D) (None, 4, 4, 128) 147584
max_pooling2d_2 (MaxPooling (None, 2, 2, 128) 0
2D)
dropout_2 (Dropout) (None, 2, 2, 128) 0
flatten (Flatten) (None, 512) 0
dense (Dense) (None, 10) 5130
=================================================================
Total params: 293,034
Trainable params: 292,586
Non-trainable params: 448
_________________________________________________________________
Epoch 1/20
391/391 [==============================] - 272s 681ms/step - loss: 1.5111 - accuracy: 0.4643 - val_loss: 2.6051 - val_accuracy: 0.1347
Epoch 2/20
391/391 [==============================] - 263s 671ms/step - loss: 1.0590 - accuracy: 0.6250 - val_loss: 1.0038 - val_accuracy: 0.6471
Epoch 3/20
391/391 [==============================] - 249s 636ms/step - loss: 0.8665 - accuracy: 0.6948 - val_loss: 0.8819 - val_accuracy: 0.7041
Epoch 4/20
391/391 [==============================] - 246s 630ms/step - loss: 0.7563 - accuracy: 0.7345 - val_loss: 0.9613 - val_accuracy: 0.6840
Epoch 5/20
391/391 [==============================] - 246s 628ms/step - loss: 0.6816 - accuracy: 0.7622 - val_loss: 0.6904 - val_accuracy: 0.7549
Epoch 6/20
391/391 [==============================] - 245s 626ms/step - loss: 0.6200 - accuracy: 0.7817 - val_loss: 0.8163 - val_accuracy: 0.7326
Epoch 7/20
391/391 [==============================] - 245s 627ms/step - loss: 0.5830 - accuracy: 0.7939 - val_loss: 0.6392 - val_accuracy: 0.7860
Epoch 8/20
391/391 [==============================] - 244s 624ms/step - loss: 0.5423 - accuracy: 0.8100 - val_loss: 0.6147 - val_accuracy: 0.7891
Epoch 9/20
391/391 [==============================] - 248s 635ms/step - loss: 0.5032 - accuracy: 0.8238 - val_loss: 0.6820 - val_accuracy: 0.7659
Epoch 10/20
391/391 [==============================] - 248s 633ms/step - loss: 0.4749 - accuracy: 0.8312 - val_loss: 0.6307 - val_accuracy: 0.7876
model = tf.keras.models.load_model('models/model_0.820.h5')
preds = model.predict(x_test/255.)
313/313 [==============================] - 13s 40ms/step
show_random_examples(x_test, y_test, preds)
Comments
Post a Comment