Alexnet Keras实现

1.原版

import pickle
import numpy as np
import matplotlib.pyplot as plt
from keras.models import Sequential,Model
from keras.layers import Dropout, Dense, Conv2D, Flatten, MaxPooling2D, Input
from keras.callbacks import ModelCheckpoint
from keras import utils  # 归一化变量
from sklearn.model_selection import train_test_split


inpt = Input(shape=(227,227,3))
# (227,227,3) -> (55,55,96) -> (27,27,96)
x = Conv2D(filters=96, kernel_size=(11, 11), strides=(4, 4), activation='relu',kernel_initializer='uniform')(inpt)
x = MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(x)

# (27,27,96) -> (27,27,256)->(13,13,256)
x = Conv2D(filters=256, kernel_size=(5, 5), strides=(1, 1), padding='same', activation='relu',kernel_initializer='uniform')(x)
x = MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(x)

# (13,13,256)->(13,13,384)->(13,13,384)->(13,13,256)->(6,6,256)
x = Conv2D(filters=384, kernel_size=(3, 3), strides=(1, 1), padding='same', activation='relu',kernel_initializer='uniform')(x)
x = Conv2D(filters=384, kernel_size=(3, 3), strides=(1, 1), padding='same', activation='relu',kernel_initializer='uniform')(x)
x = Conv2D(filters=256, kernel_size=(5, 5), strides=(1, 1), padding='same', activation='relu',kernel_initializer='uniform')(x)
x = MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(x)

# (6,6,256)->9216->4096->4096->1000
x = Flatten()(x)
x = Dense(4096, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(4096, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(1000, activation='softmax')(x)


model = Model(inputs=inpt, outputs=x)


# 优化器用的sgd

2.改进一下

第一个步长设置为1, padding = same

inpt = Input(shape=(60,60,8))
# (227,227,3) -> (55,55,96) -> (27,27,96)
x = Conv2D(filters=96, kernel_size=(11, 11), strides=(1, 1), padding='same', activation='relu',kernel_initializer='uniform')(inpt)
x = MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(x)

# (27,27,96) -> (27,27,256)->(13,13,256)
x = Conv2D(filters=256, kernel_size=(5, 5), strides=(1, 1), padding='same', activation='relu',kernel_initializer='uniform')(x)
x = MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(x)

# (13,13,256)->(13,13,384)->(13,13,384)->(13,13,256)->(6,6,256)
x = Conv2D(filters=384, kernel_size=(3, 3), strides=(1, 1), padding='same', activation='relu',kernel_initializer='uniform')(x)
x = Conv2D(filters=384, kernel_size=(3, 3), strides=(1, 1), padding='same', activation='relu',kernel_initializer='uniform')(x)
x = Conv2D(filters=256, kernel_size=(5, 5), strides=(1, 1), padding='same', activation='relu',kernel_initializer='uniform')(x)
x = MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(x)

# (6,6,256)->9216->4096->4096->1000
x = Flatten()(x)
x = Dense(4096, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(4096, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(14, activation='softmax')(x)

猜你喜欢

转载自blog.csdn.net/weixin_47289438/article/details/112134037
今日推荐