数据分析师养成之路--keras学习中metrics和callbacks笔记

complie中的metrics参数

model.compile(...metrics=['accuracy'])

如上,
metrics中有两个arguments,y_true,和y_pred
它所接收的评估指标为,损失函数,(mse等)和准确率(accuracy等),它们都作用于训练集和验证集(eg: loss:..acc..mae .. val_loss:..val_acc..val_mae..),当然该性能的评估结果不会真正用于训练,只用来显示(作为参考)
(每个batch,输出:loss acc mae (后两个就是设置metrics得到),每个epoch(一轮),输出:loss,acc mae … val_loss…val_acc… val_mae)
metrics中的损失函数和准确率函数可由自己改写:
如下:

def top_3_accuracy(y_true,y_pred):
# 返回的函数即为metrics 中支持的函数
    return metrics.top_k_categorical_accuracy(y_true,y_pred,k=3)

model.compile(loss='categorical_crossentropy',
              optimizer=sgd,
              metrics=[metrics.mae,top_3_accuracy])
import keras.backend as K
 # 对metrics中的accuracy函数进行重写
def mean_pred(y_true, y_pred):
    return K.mean(y_pred)

model.compile(optimizer='rmsprop',
              loss='binary_crossentropy',
              metrics=['accuracy', mean_pred])

fit中的callbacks参数:

from keras.callbacks import ModelCheckpoint

model = Sequential()
model.add(Dense(10, input_dim=784, kernel_initializer='uniform'))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')

'''
saves the model weights after each epoch if the validation loss decreased
'''
checkpointer = ModelCheckpoint(filepath='/tmp/weights.hdf5', verbose=1, save_best_only=True)
model.fit(x_train, y_train, batch_size=128, epochs=20, verbose=0, validation_data=(X_test, Y_test), callbacks=[checkpointer])
class LossHistory(keras.callbacks.Callback):
    def on_train_begin(self, logs={}):
        self.losses = []

    def on_batch_end(self, batch, logs={}):
        self.losses.append(logs.get('loss'))

model = Sequential()
model.add(Dense(10, input_dim=784, kernel_initializer='uniform'))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')

history = LossHistory()
model.fit(x_train, y_train, batch_size=128, epochs=20, verbose=0, callbacks=[history])

print(history.losses)
# outputs
'''
[0.66047596406559383, 0.3547245744908703, ..., 0.25953155204159617, 0.25901699725311789]
'''

就我而言,callbacks很方便作为 验证集数据评估的参考
如下,为fit_generator的callbacks参数写一个RocAuc接口

class RocAuc(keras.callbacks.Callback):
    def __init__(self,validation_generate,interval=1):
        self.interval=interval
        self.validation_generate=validation_generate
    def on_epoch_end(self,epoch, logs={}):
    # 每次epoch,读取一批生成的数据
        x_val,y_val=next(self.validation_generate)
        #print(y_val)
        if epoch % self.interval == 0:
            try:
                y_pred=self.model.predict(x_val,verbose=0)
                score=roc_auc_score(y_val,y_pred)
                print('\n ROC_AUC - epoch:%d - score:%.6f \n' % (epoch + 1, score*100))

            except:

                print('\n  epoch:%d  only one class!!\n' % (epoch + 1))

def validata(iter_test):
    for index in iter_test:
        yield index
vli=validata(iter_test)
rocauc=RocAuc(validation_generate=vli)
model.fit_generator(...callbacks=[rocauc])

当然也可以把earlyStopping加入其中,不过需要传入roc_auc_val 参数,

class RocAuc(keras.callbacks.Callback):
    def __init__(self,validation_generate,interval=1):
        self.interval=interval
        self.validation_generate=validation_generate
    def on_train_begin(self,logs={}):
    #添加roc_auc_val属性
        self.roc_auc_score=[]
        if not ('roc_auc_val' in self.params['metrics']):
            self.params['metrics'].append('roc_auc_val')
    def on_epoch_end(self,epoch, logs={}):
        logs['roc_auc_val']=float('-inf')
        x_val,y_val=next(self.validation_generate)
        #print(y_val)
        if epoch % self.interval == 0:
            try:
                y_pred=self.model.predict(x_val,verbose=0)
                score=roc_auc_score(y_val,y_pred)
                logs['roc_auc_val']=score*100
                print('\n ROC_AUC - epoch:%d - score:%.6f \n' % (epoch + 1, score*100))

            except:
                logs['roc_auc_val']='cant compute!!'
                print('\n  epoch:%d  only one class!!\n' % (epoch + 1))

        self.roc_auc_score.append(logs.get('roc_auc_val'))

def validata(iter_test):
    for index in iter_test:
        yield index
vli=validata(iter_test)
model.fit_generator(...callbacks=[rocauc, EarlyStopping(monitor='roc_auc_val',patience=30, verbose=2,mode='max')])
print(rocauc.roc_auc_score)

猜你喜欢

转载自blog.csdn.net/lulujiang1996/article/details/81120082