keras实现异或模拟

这可能是最简单的网络实现:
利用keras,直接添加网络,添加损失函数,优化方法等,就可直观的观察网络的实现。

from keras.models import Sequential
from keras.layers import Dense, Activation
import numpy as np

import matplotlib.pyplot as plt
def main():
    #数据准备
    x_train = np.array([[1,0],[1,1],[0,0],[0,1]])
    y_train = np.array([[1],[0],[0],[1]])

    #网络添加
    model = Sequential()

    model.add(Dense(units = 2,input_dim = 2))
    model.add(Activation("relu"))

    model.add(Dense(units = 1))
    model.add(Activation("sigmoid"))

    #配置模型
    model.compile(loss='binary_crossentropy',optimizer = 'sgd',metrics=['accuracy'])

    #训练
    hist = model.fit(x_train,y_train,epochs = 1000)
    plt.scatter(range(len(hist.history['loss'])), hist.history['loss'])

    #测试
    loss_and_metrics = model.evaluate(x_train, y_train)
    print(loss_and_metrics)

    plt.show()


if __name__ == '__main__':\
        main()

换一个损失函数:

import numpy as np
from keras.models import Sequential
from keras.layers.core import Activation, Dense
from keras.optimizers import SGD

def main():

     X = np.zeros((4, 2), dtype='uint8')
     y = np.zeros(4, dtype='uint8')
     X[0] = [0, 0]
     y[0] = 0
     X[1] = [0, 1]
     y[1] = 1
     X[2] = [1, 0]
     y[2] = 1
     X[3] = [1, 1]
     y[3] = 0

     model = Sequential()
     model.add(Dense(2, input_dim=2))
     model.add(Activation('relu'))
     model.add(Dense(1))
     model.add(Activation('sigmoid'))

     sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)

     model.compile(loss='mse', optimizer=sgd)

     history = model.fit(X, y, epochs=1000, batch_size=4, verbose=2)

     print(model.predict(X))

if __name__=="__main__":
     main()

猜你喜欢

转载自blog.csdn.net/legalhighhigh/article/details/81530748