关于python:深度学习实践catsvsdogs数据集

50次阅读

共计 10889 个字符,预计需要花费 28 分钟才能阅读完成。

一. 下载数据集

数据集来自 kaggle 数据集的 Dogs vs Cats 数据集
百度云盘下载地址
链接:https://pan.baidu.com/s/177uL…
提取码:9j40

二. 对数据进行划分

1. 创立文件夹如下:

  1. train

    1. cats
    2. dogs
  2. val

    1. cats
    2. dogs
  3. test

2. 对数据进行划分, 代码如下

def data_cats_processing():
    base_train_path = r'E:\mldata\dogvscat\data\train'
    base_dest_train_path = r'E:\mldata\dogvscat\train\cats'
    base_dest_test_path = r'E:\mldata\dogvscat\test\cats'
    base_dest_val_path = r'E:\mldata\dogvscat\val\cats'
    cat_fnames = ['cat.{}.jpg'.format(i) for i in range(10000)]
    for fname in cat_fnames:
        src_path = os.path.join(base_train_path, fname)
        dest_path = os.path.join(base_dest_train_path, fname)
        shutil.copyfile(src_path, dest_path)
    cat_fnames = ['cat.{}.jpg'.format(i) for i in range(10001, 11501)]
    for fname in cat_fnames:
        src_path = os.path.join(base_train_path, fname)
        dest_path = os.path.join(base_dest_val_path, fname)
        shutil.copyfile(src_path, dest_path)
    cat_fnames = ['cat.{}.jpg'.format(i) for i in range(11501, 12500)]
    for fname in cat_fnames:
        src_path = os.path.join(base_train_path, fname)
        dest_path = os.path.join(base_dest_test_path, fname)
        shutil.copyfile(src_path, dest_path)


def data_dog_processing():
    base_train_path = r'E:\mldata\dogvscat\data\train'
    base_dest_train_path = r'E:\mldata\dogvscat\train\dogs'
    base_dest_test_path = r'E:\mldata\dogvscat\test\dogs'
    base_dest_val_path = r'E:\mldata\dogvscat\val\dogs'
    dog_fnames = ['dog.{}.jpg'.format(i) for i in range(10000)]
    for fname in dog_fnames:
        src_path = os.path.join(base_train_path, fname)
        dest_path = os.path.join(base_dest_train_path, fname)
        shutil.copyfile(src_path, dest_path)
    dog_fnames = ['dog.{}.jpg'.format(i) for i in range(10001, 11501)]
    for fname in dog_fnames:
        src_path = os.path.join(base_train_path, fname)
        dest_path = os.path.join(base_dest_val_path, fname)
        shutil.copyfile(src_path, dest_path)
    dog_fnames = ['dog.{}.jpg'.format(i) for i in range(11501, 12500)]
    for fname in dog_fnames:
        src_path = os.path.join(base_train_path, fname)
        dest_path = os.path.join(base_dest_test_path, fname)
        shutil.copyfile(src_path, dest_path)

三. 批量读取数据

代码:

def datagen():
    train_datagen = ImageDataGenerator(rescale=1 / 255.)
    test_datagen = ImageDataGenerator(rescale=1 / 255.)
    train_dir = r'E:\mldata\dogvscat\train'
    val_dir = r'E:\mldata\dogvscat\val'

    train_datagen = train_datagen.flow_from_directory(
        train_dir,
        target_size=(150, 150),
        batch_size=20,
        class_mode='binary'
    )
    val_datagen = test_datagen.flow_from_directory(
        val_dir,
        target_size=(150, 150),
        batch_size=20,
        class_mode='binary'
    )
    return train_datagen, val_datagen

四. 创立神经网络模型

模型一. 根底神经网络

应用后欠拟合,

def base_model1():
    # acc 0.8255  val_acc 0.7940 , 欠拟合
    # 解决形式
    # 1. 尝试更好的优化器
    # 2. 尝试更大网络
    model = Sequential()
    model.add(Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 3)))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(64, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(128, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(128, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Flatten())
    model.add(Dense(512, activation='relu'))
    model.add(Dense(1, activation='sigmoid'))
    model.compile(loss='binary_crossentropy', optimizer=RMSprop(learning_rate=1e-4), metrics=['acc'])
    return model

模型二. 应用 adam 作为优化器

def create_model2():
    # 应用 adam
    #  acc: 0.8537  - val_acc: 0.7920
    model = Sequential()
    model.add(Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 3)))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(64, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(128, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(128, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Flatten())
    model.add(Dense(512, activation='relu'))
    model.add(Dense(1, activation='sigmoid'))
    model.compile(loss='binary_crossentropy', optimizer=Adam(learning_rate=1e-4), metrics=['acc'])
    return model

五. 开始训练

    # 加载数据
    train_datagen, val_datagen = datagen()
    # 创立模型
    model = create_model2()
    # 创立 tensorboard 应用的 log 文件夹
    log_dir = "logs/fit/" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
    # 创立 tensorboard callback 回调
    tensorboard_callback = callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
    # 开始训练
    model.fit_generator(train_datagen,
                        steps_per_epoch=100,
                        epochs=30,
                        validation_data=val_datagen,
                        validation_steps=50,
                        callbacks=[tensorboard_callback])

残缺代码

import os, shutil
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, MaxPooling2D, Conv2D, Flatten, Dropout
from tensorflow.keras.optimizers import RMSprop, Adam, SGD
import datetime
from tensorflow.keras import callbacks
from tensorflow.keras.applications import VGG16
from tensorflow.keras.models import Model


# 划分数据集 共 25000
# 训练集 20000
# 验证集 3000
# 测试集 2000

def data_cats_processing():
    base_train_path = r'E:\mldata\dogvscat\data\train'
    base_dest_train_path = r'E:\mldata\dogvscat\train\cats'
    base_dest_test_path = r'E:\mldata\dogvscat\test\cats'
    base_dest_val_path = r'E:\mldata\dogvscat\val\cats'
    cat_fnames = ['cat.{}.jpg'.format(i) for i in range(10000)]
    for fname in cat_fnames:
        src_path = os.path.join(base_train_path, fname)
        dest_path = os.path.join(base_dest_train_path, fname)
        shutil.copyfile(src_path, dest_path)
    cat_fnames = ['cat.{}.jpg'.format(i) for i in range(10001, 11501)]
    for fname in cat_fnames:
        src_path = os.path.join(base_train_path, fname)
        dest_path = os.path.join(base_dest_val_path, fname)
        shutil.copyfile(src_path, dest_path)
    cat_fnames = ['cat.{}.jpg'.format(i) for i in range(11501, 12500)]
    for fname in cat_fnames:
        src_path = os.path.join(base_train_path, fname)
        dest_path = os.path.join(base_dest_test_path, fname)
        shutil.copyfile(src_path, dest_path)


def data_dog_processing():
    base_train_path = r'E:\mldata\dogvscat\data\train'
    base_dest_train_path = r'E:\mldata\dogvscat\train\dogs'
    base_dest_test_path = r'E:\mldata\dogvscat\test\dogs'
    base_dest_val_path = r'E:\mldata\dogvscat\val\dogs'
    dog_fnames = ['dog.{}.jpg'.format(i) for i in range(10000)]
    for fname in dog_fnames:
        src_path = os.path.join(base_train_path, fname)
        dest_path = os.path.join(base_dest_train_path, fname)
        shutil.copyfile(src_path, dest_path)
    dog_fnames = ['dog.{}.jpg'.format(i) for i in range(10001, 11501)]
    for fname in dog_fnames:
        src_path = os.path.join(base_train_path, fname)
        dest_path = os.path.join(base_dest_val_path, fname)
        shutil.copyfile(src_path, dest_path)
    dog_fnames = ['dog.{}.jpg'.format(i) for i in range(11501, 12500)]
    for fname in dog_fnames:
        src_path = os.path.join(base_train_path, fname)
        dest_path = os.path.join(base_dest_test_path, fname)
        shutil.copyfile(src_path, dest_path)


def datagen():
    train_datagen = ImageDataGenerator(
        rescale=1. / 255,
        rotation_range=40,
        width_shift_range=0.2,
        height_shift_range=0.2,
        shear_range=0.2,
        zoom_range=0.2,
        horizontal_flip=True
    )
    test_datagen = ImageDataGenerator(rescale=1 / 255.)
    train_dir = r'E:\mldata\dogvscat\train'
    val_dir = r'E:\mldata\dogvscat\val'

    train_datagen = train_datagen.flow_from_directory(
        train_dir,
        target_size=(150, 150),
        batch_size=20,
        class_mode='binary'
    )
    val_datagen = test_datagen.flow_from_directory(
        val_dir,
        target_size=(150, 150),
        batch_size=20,
        class_mode='binary'
    )
    return train_datagen, val_datagen


def base_model1():
    # acc 0.8255  val_acc 0.7940 , 欠拟合
    # 解决形式
    # 1. 尝试更好的优化器
    # 2. 尝试更大网络
    # 3. 更多数据, 应用数据加强
    model = Sequential()
    model.add(Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 3)))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(64, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(128, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(128, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Flatten())
    model.add(Dense(512, activation='relu'))
    model.add(Dense(1, activation='sigmoid'))
    model.compile(loss='binary_crossentropy', optimizer=RMSprop(learning_rate=1e-4), metrics=['acc'])
    return model


def create_model2():
    # 应用 adam
    #  acc: 0.8537  - val_acc: 0.7920
    # 解决形式
    # 1. 尝试更好的优化器
    # 2. 尝试更大网络
    # 3. 更多数据, 应用数据加强
    model = Sequential()
    model.add(Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 3)))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(64, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(128, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(128, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Flatten())
    model.add(Dense(512, activation='relu'))
    model.add(Dense(1, activation='sigmoid'))
    model.compile(loss='binary_crossentropy', optimizer=Adam(learning_rate=1e-4), metrics=['acc'])
    return model


def create_model3():
    # 应用 adam, 数据增强器
    # acc: 0.7487 - val_acc: 0.7760
    model = Sequential()
    model.add(Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 3)))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(64, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(128, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(128, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Flatten())
    model.add(Dense(512, activation='relu'))
    model.add(Dense(1, activation='sigmoid'))
    model.compile(loss='binary_crossentropy', optimizer=Adam(learning_rate=1e-4), metrics=['acc'])
    return model


def create_model4():
    # 应用 adam, 增大网络
    # acc: 0.7366 - val_loss: 0.5200 - val_acc: 0.7350
    # 不应用数据加强
    #  acc: 0.8382 - val_loss: 0.4190 - val_acc: 0.8110
    model = Sequential()
    model.add(Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 3)))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(64, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(128, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(128, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(256, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Flatten())
    model.add(Dense(1024, activation='relu'))
    model.add(Dense(512, activation='relu'))
    model.add(Dense(1, activation='sigmoid'))
    model.compile(loss='binary_crossentropy', optimizer=Adam(learning_rate=1e-4), metrics=['acc'])
    return model


def create_model5():
    # 应用 vgg 网络
    #  acc: 0.8466 - val_loss: 0.3708 - val_acc: 0.8320
    model = Sequential()
    model.add(Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 3)))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(64, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(64, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(128, (3, 3), activation='relu'))
    model.add(MaxPooling2D((2, 2)))
    model.add(Flatten())
    model.add(Dense(512, activation='relu'))
    model.add(Dense(1, activation='sigmoid'))
    model.compile(loss='binary_crossentropy', optimizer=Adam(learning_rate=1e-4), metrics=['acc'])
    return model


def vgg_model():
    # 应用 vgg 网络
    #  acc: 0.8466 - val_loss: 0.3708 - val_acc: 0.8320
    top_model = Sequential()
    top_model.add(Flatten())
    top_model.add(Dense(256, activation='relu'))
    top_model.add(Dropout(0.5))
    top_model.add(Dense(1, activation='sigmoid'))

    base_model = VGG16(weights='imagenet',
                       include_top=False,
                       input_shape=(150, 150, 3))
    model = Model(inputs=base_model.input, outputs=top_model(base_model.output))
    for layer in model.layers[:-1]:
        layer.trainable = False
    model.compile(loss='categorical_crossentropy',
                  optimizer=SGD(lr=1e-4, momentum=0.9),
                  metrics=['accuracy'])
    model.summary()
    return model


if __name__ == '__main__':
    # train_data_processing()
    # 加载数据
    train_datagen, val_datagen = datagen()
    # 创立模型
    model = create_model2()
    # 创立 tensorboard 应用的 log 文件夹
    log_dir = "logs/fit/" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
    # 创立 tensorboard callback 回调
    tensorboard_callback = callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
    # 开始训练
    model.fit_generator(train_datagen,
                        steps_per_epoch=100,
                        epochs=30,
                        validation_data=val_datagen,
                        validation_steps=50,
                        callbacks=[tensorboard_callback])

正文完
 0