卷積神經網絡(二)AlexNet模型結構

在這裏插入圖片描述
版本:Tensordlow2.1
運行環境:Google Colaboratory

import os

os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import keras_applications
import tensorflow as tf
from tensorflow.keras import datasets, Sequential, layers
from tensorflow.keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout
import matplotlib.pyplot as plt

# datasets download
(x, y), (x_val, y_val) = datasets.cifar10.load_data()
print(x.shape, y.shape)


def preprocess(x, y):
    """

    :param x:
    :param y:
    :return:
    """
    x = tf.cast(x, dtype=tf.float32) / 255.
    x = tf.reshape(x, [-1, 32, 32, 3])
    y = tf.one_hot(tf.cast(y, dtype=tf.int32), depth=10)
    y = tf.reshape(y, [-1, 10])
    return x, y


train_db = tf.data.Dataset.from_tensor_slices((x, y))
val_db = tf.data.Dataset.from_tensor_slices((x_val, y_val))
train_db = train_db.batch(256).map(preprocess)
val_db = val_db.map(preprocess)
sample = next(iter(train_db))
print(sample[0].shape, sample[1].shape)


class LRN(layers.Layer):
    def __init__(self):
        super().__init__()
        self.depth_radius = 2
        self.bias = 1
        self.alpha = 1e-4
        self.beta = 0.75

    def call(self, inputs):
        return tf.nn.lrn(inputs, depth_radius=self.depth_radius, bias=self.bias, alpha=self.alpha, beta=self.beta)


model = Sequential()
model.add(Conv2D(
    filters=96,
    kernel_size=(5, 5),
    activation='relu',
    strides=2,
    padding='same',
    input_shape=(32, 32, 3)
))
model.add(MaxPool2D(
    pool_size=(2, 2),
    strides=1
))
model.add(LRN())  # 歸一化

model.add(Conv2D(
    filters=256,
    kernel_size=(3, 3),
    strides=1,
    activation='relu'
))
model.add(MaxPool2D(
    pool_size=(2, 2),
    strides=1
))
model.add(LRN())  # 歸一化.

model.add(Conv2D(
    filters=384,
    kernel_size=(3, 3),
    activation='relu',
    strides=1,
    padding='same',
))
model.add(Conv2D(
    filters=384,
    kernel_size=(3, 3),
    activation='relu',
    strides=1,
    padding='same',
))
model.add(Conv2D(
    filters=256,
    kernel_size=(3, 3),
    activation='relu',
    strides=1,
    padding='same',
))
model.add(MaxPool2D(
    pool_size=(2, 2),
    strides=1
))
model.add(Flatten())  # 扁平化
model.add(Dense(512, activation='relu'))
model.add(Dropout(rate=0.5))
model.add(Dense(256, activation='relu'))
model.add(Dropout(rate=0.5))
model.add(Dense(10, activation='softmax'))
model.summary()

model.compile(optimizer='sgd', loss='categorical_crossentropy', metrics=['accuracy'])
history = model.fit(train_db, epochs=20, validation_data=val_db, shuffle=True)

plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('AlexNet')
plt.xlabel('epoch')
plt.ylabel('loss')
plt.legend(['Train', 'loss'])
plt.show()

輸出:

Downloading data from https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz
170500096/170498071 [==============================] - 2s 0us/step
(50000, 32, 32, 3) (50000, 1)
(256, 32, 32, 3) (256, 10)
Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv2d (Conv2D)              (None, 16, 16, 96)        7296      
_________________________________________________________________
max_pooling2d (MaxPooling2D) (None, 15, 15, 96)        0         
_________________________________________________________________
lrn (LRN)                    (None, 15, 15, 96)        0         
_________________________________________________________________
conv2d_1 (Conv2D)            (None, 13, 13, 256)       221440    
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 12, 12, 256)       0         
_________________________________________________________________
lrn_1 (LRN)                  (None, 12, 12, 256)       0         
_________________________________________________________________
conv2d_2 (Conv2D)            (None, 12, 12, 384)       885120    
_________________________________________________________________
conv2d_3 (Conv2D)            (None, 12, 12, 384)       1327488   
_________________________________________________________________
conv2d_4 (Conv2D)            (None, 12, 12, 256)       884992    
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 11, 11, 256)       0         
_________________________________________________________________
flatten (Flatten)            (None, 30976)             0         
_________________________________________________________________
dense (Dense)                (None, 512)               15860224  
_________________________________________________________________
dropout (Dropout)            (None, 512)               0         
_________________________________________________________________
dense_1 (Dense)              (None, 256)               131328    
_________________________________________________________________
dropout_1 (Dropout)          (None, 256)               0         
_________________________________________________________________
dense_2 (Dense)              (None, 10)                2570      
=================================================================
Total params: 19,320,458
Trainable params: 19,320,458
Non-trainable params: 0
_________________________________________________________________
Epoch 1/20
196/196 [==============================] - 99s 507ms/step - loss: 2.2953 - accuracy: 0.1254 - val_loss: 2.2724 - val_accuracy: 0.2122
Epoch 2/20
196/196 [==============================] - 96s 491ms/step - loss: 2.2191 - accuracy: 0.1751 - val_loss: 2.1614 - val_accuracy: 0.1942
Epoch 3/20
196/196 [==============================] - 96s 492ms/step - loss: 2.1165 - accuracy: 0.2127 - val_loss: 1.9895 - val_accuracy: 0.2805
Epoch 4/20
196/196 [==============================] - 98s 498ms/step - loss: 2.0217 - accuracy: 0.2584 - val_loss: 2.0198 - val_accuracy: 0.2498
Epoch 5/20
196/196 [==============================] - 96s 492ms/step - loss: 1.9376 - accuracy: 0.2901 - val_loss: 1.8257 - val_accuracy: 0.3443
Epoch 6/20
196/196 [==============================] - 97s 493ms/step - loss: 1.8502 - accuracy: 0.3272 - val_loss: 1.7727 - val_accuracy: 0.3569
Epoch 7/20
196/196 [==============================] - 96s 492ms/step - loss: 1.7851 - accuracy: 0.3507 - val_loss: 1.6387 - val_accuracy: 0.3956
Epoch 8/20
196/196 [==============================] - 97s 493ms/step - loss: 1.7256 - accuracy: 0.3734 - val_loss: 1.7032 - val_accuracy: 0.3926
Epoch 9/20
196/196 [==============================] - 97s 496ms/step - loss: 1.6814 - accuracy: 0.3888 - val_loss: 1.6427 - val_accuracy: 0.4062
Epoch 10/20
196/196 [==============================] - 97s 494ms/step - loss: 1.6348 - accuracy: 0.4096 - val_loss: 1.5945 - val_accuracy: 0.4249
Epoch 11/20
196/196 [==============================] - 97s 495ms/step - loss: 1.5934 - accuracy: 0.4207 - val_loss: 1.5731 - val_accuracy: 0.4332
Epoch 12/20
196/196 [==============================] - 97s 496ms/step - loss: 1.5589 - accuracy: 0.4369 - val_loss: 1.5351 - val_accuracy: 0.4464
Epoch 13/20
196/196 [==============================] - 97s 497ms/step - loss: 1.5234 - accuracy: 0.4504 - val_loss: 1.4803 - val_accuracy: 0.4647
Epoch 14/20
196/196 [==============================] - 97s 497ms/step - loss: 1.4931 - accuracy: 0.4625 - val_loss: 1.5121 - val_accuracy: 0.4577
Epoch 15/20
196/196 [==============================] - 97s 497ms/step - loss: 1.4664 - accuracy: 0.4735 - val_loss: 1.3656 - val_accuracy: 0.5036
Epoch 16/20
196/196 [==============================] - 97s 495ms/step - loss: 1.4364 - accuracy: 0.4851 - val_loss: 1.3698 - val_accuracy: 0.5004
Epoch 17/20
196/196 [==============================] - 98s 500ms/step - loss: 1.4152 - accuracy: 0.4933 - val_loss: 1.3749 - val_accuracy: 0.4984
Epoch 18/20
196/196 [==============================] - 97s 494ms/step - loss: 1.3872 - accuracy: 0.5021 - val_loss: 1.3117 - val_accuracy: 0.5241
Epoch 19/20
196/196 [==============================] - 97s 494ms/step - loss: 1.3630 - accuracy: 0.5134 - val_loss: 1.3548 - val_accuracy: 0.5079
Epoch 20/20
196/196 [==============================] - 98s 500ms/step - loss: 1.3443 - accuracy: 0.5217 - val_loss: 1.3223 - val_accuracy: 0.5280

在這裏插入圖片描述

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章