快速搭一个Conv+BN+ReLU

1.导入各种包

import numpy as np
from keras import layers
from keras.layers import Input, Dense, Activation, ZeroPadding2D, BatchNormalization, Flatten, Conv2D
from keras.layers import AveragePooling2D, MaxPooling2D, Dropout, GlobalMaxPooling2D, GlobalAveragePooling2D
from keras.models import Model

2.Conv+BN+ReLU

X = Conv2D(32, (7, 7), strides = (1, 1),padding = 'same' , name = 'conv0')(X)
X = BatchNormalization(axis = 3, name = 'bn0')(X)
X = Activation('relu')(X)

3.封装为函数

def model(input_shape):
	"""
	模型大纲
	"""
    #定义一个tensor的placeholder,维度为input_shape
    X_input = Input(input_shape)
    
    #使用0填充:X_input的周围填充0
    X = ZeroPadding2D((3,3))(X_input)
    
    # 对X使用 CONV -> BN -> RELU 块
    X = Conv2D(32, (7, 7), strides = (1, 1), name = 'conv0')(X)
    X = BatchNormalization(axis = 3, name = 'bn0')(X)
    X = Activation('relu')(X)
    
    #最大值池化层
    X = MaxPooling2D((2,2),name="max_pool")(X)
    
    #降维,矩阵转化为向量 + 全连接层
    X = Flatten()(X)
    X = Dense(1, activation='sigmoid', name='fc')(X)
    
    #创建模型,讲话创建一个模型的实体,我们可以用它来训练、测试。
    model = Model(inputs = X_input, outputs = X, name='HappyModel')
    
    return model

参考来源

猜你喜欢

转载自blog.csdn.net/weixin_47289438/article/details/112154102