初步训练结果

训练25次用验证集的效果(3067个文件1492个类)

得到上面的结果不同的分布很好,相同的分布太散而且与不同的重区域太多,可能 因为不同的训练集占得比重太多的原因吧,这次不同的数据量与相同的数据量比值为1:20那么相对而言相同的就不好训练了,这是什么原因那?留个问题,训练集的比例与训练效果数据分布是什么关系,理论解释是什么

import os
import tqdm
import numpy as np
from PIL import Image
from keras.models import Model
from keras.callbacks import TensorBoard
from keras.layers import Input,Dense,BatchNormalization,Activation,Dropout,concatenate
BASE_DIR = 'D:/zhy/python_work/saved_model/finger/121/'
def save_all_weights(d, epoch_number, current_loss):
    d.save_weights(os.path.join(BASE_DIR, 'model_{}.h5'.format(epoch_number)), True)
train=[]
y=[]
path='D:\zhy\python_work\data\Distence_sample'
data=os.listdir(path)
for pictures in data:
    picture_ord=os.path.join(path,pictures)
    picture=os.listdir(picture_ord)
    lw=[]
    for i in np.arange(2):
        imge=Image.open(os.path.join(picture_ord,picture[i]))
        imge=np.array(imge)
        arr=imge.reshape(-1)
        lw.append(arr)
    arr=np.block([lw[0],lw[1]])
    train.append(arr)
    y.append(1)
for i in np.arange(len(data)):
    picture_ord=os.path.join(path,data[i])
    pictur1=os.listdir(picture_ord)
    for pictu in pictur1:
        imge1=Image.open(os.path.join(picture_ord,pictu))
        imge1=np.array(imge1)
        arr1=imge1.reshape(-1)
        for w in range(10):
            a=np.random.randint(0,len(data))
            if a==i:
               a=np.random.randint(0,len(data))
            picture_ot=os.path.join(path,data[a])
            pictur2=os.listdir(picture_ot)
            imge2=Image.open(os.path.join(picture_ot,pictur2[np.random.randint(0,len(pictur2))]))
            imge2=np.array(imge2)
            arr2=imge2.reshape(-1)
            arr=np.block([arr1,arr2])
            train.append(arr)
            y.append(0)
train=np.array(train)
y=np.array(y)
inpu=Input(shape=(2880,))
x=Dense(2880,input_shape=(2880,))(inpu)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones')(x)
a1=Activation('relu')(x)
x=concatenate([a1,inpu],axis=1)
x=Dense(2880)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
a2=Activation('relu')(x)
x=concatenate([a2,a1],axis=1)
x=Dense(2880)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
a3=Activation('relu')(x)
x=concatenate([a3,a2],axis=1)
x=Dense(2880)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
a=Activation('relu')(x)
x=concatenate([a,a3],axis=1)
x=Dense(2880)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
a4=Activation('relu')(x)
x=concatenate([a4,a],axis=1)
x=Dense(2880)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
a5=Activation('relu')(x)
x=concatenate([a5,a4],axis=1)
x=Dense(1440)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
b=Activation('relu')(x)
x=concatenate([b,a5],axis=1)
x=Dense(1440)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
b1=Activation('relu')(x)
x=concatenate([b1,b],axis=1)
x=Dense(1440)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
b2=Activation('relu')(x)
x=concatenate([b2,b1],axis=1)
x=Dense(1440)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
b3=Activation('relu')(x)
x=concatenate([b3,b2],axis=1)
x=Dense(1440)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
b4=Activation('relu')(x)
x=concatenate([b4,b3],axis=1)
x=Dense(1440)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
b5=Activation('relu')(x)
x=concatenate([b5,b4],axis=1)
x=Dense(720)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
c=Activation('relu')(x)
x=concatenate([c,b5],axis=1)
x=Dense(720)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
c1=Activation('relu')(x)
x=concatenate([c1,c],axis=1)
x=Dense(720)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
c2=Activation('relu')(x)
x=concatenate([c2,c1],axis=1)
x=Dense(720)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
c3=Activation('relu')(x)
x=concatenate([c3,c2],axis=1)
x=Dense(720)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
c4=Activation('relu')(x)
x=concatenate([c4,c3],axis=1)
x=Dense(720)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
c5=Activation('relu')(x)
x=concatenate([c5,c4],axis=1)
x=Dense(720)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
d=Activation('relu')(x)
x=concatenate([d,c5],axis=1)
x=Dense(360)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
d1=Activation('relu')(x)
x=concatenate([d1,d],axis=1)
x=Dense(360)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
d2=Activation('relu')(x)
x=concatenate([d2,d1],axis=1)
x=Dense(360)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
d3=Activation('relu')(x)
x=concatenate([d3,d2],axis=1)
x=Dense(360)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
d4=Activation('relu')(x)
x=concatenate([d4,d3],axis=1)
x=Dense(360)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
d5=Activation('relu')(x)
x=concatenate([d5,d4],axis=1)
x=Dense(360)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
x=Activation('relu')(x)
x=concatenate([x,d5],axis=1)
x=Dense(360)(x)
x=Dropout(0.5)(x)
x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
x=Activation('tanh')(x)
w=Dense(1)(x)
model=Model(inputs=inpu,outputs=w)
model.compile(optimizer='Adam', loss='mean_squared_error')
model.trainable = True
log_path = 'D:/zhy/python_work/logs/'
tensorboard_callback = TensorBoard(log_path)
for epoch in tqdm.tqdm(range(100)):
    permutated_indexes = np.random.permutation(train.shape[0])
    losses = []
    for index in range(int(train.shape[0] /64)):
        batch_indexes = permutated_indexes[index*64:(index+1)*64]
        output_true_batch = y[batch_indexes]
        image_full_batch = train[batch_indexes]
        for _ in range(5):
           loss = model.train_on_batch(image_full_batch, output_true_batch)
           losses.append(loss)
    with open('D:/zhy/python_work/logs/log.txt', 'a+') as f:
             f.write('{} - {}\n'.format(epoch, np.mean(losses)))
    save_all_weights(model, 13, np.mean(losses))

猜你喜欢

转载自blog.csdn.net/haoyu_does/article/details/84847384
今日推荐