numpy【极简】神经网络(非线性)

版权声明:本文为博主原创文章,转载请注明出处。 https://blog.csdn.net/Yellow_python/article/details/85769987

流程和算法

在这里插入图片描述
在这里插入图片描述

import numpy as np, matplotlib.pyplot as mp

"""创建随机样本"""
def make_moons(n_samples=100, n_features=2, species=3):
    X = np.zeros((n_samples * species, n_features))
    y = np.zeros(n_samples * species, dtype=int)
    for j in range(species):
        ix = range(n_samples * j, n_samples * (j + 1))
        r = np.linspace(0, 1, n_samples)
        t = np.linspace(j * 4, (j + 1) * 4, n_samples) + np.random.randn(n_samples) * .2
        X[ix] = np.c_[r * np.sin(t), r * np.cos(t)]
        y[ix] = j
    return X, y
X, y = make_moons()
n = X.shape[0]  # 样本数
d = X.shape[1]  # 维数
k = len(np.unique(y))  # 种类数

"""神经网络模型参数"""
nn = 100  # 神经元个数
W1 = .01 * np.random.randn(d, nn)
b1 = np.zeros((1, nn))
W2 = .01 * np.random.randn(nn, k)
b2 = np.zeros((1, k))

"""梯度下降循环"""
for i in range(2999):
    # 1、ReLU激活
    hidden_layer = np.maximum(0, np.dot(X, W1) + b1)
    # 2、得分
    scores = np.dot(hidden_layer, W2) + b2
    # 3、概率
    exp_scores = np.exp(scores)
    probs = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)
    # 4、梯度
    probs[range(n), y] -= 1
    dscores = probs / n
    # 5、反向传播:ReLU
    dhidden = np.dot(dscores, W2.T)
    dhidden[hidden_layer <= 0] = 0
    # 6、参数更新:W2、b2
    W2 -= np.dot(hidden_layer.T, dscores)
    b2 -= np.sum(dscores, axis=0, keepdims=True)
    # 7、参数更新:W1、b1
    W1 -= np.dot(X.T, dhidden)
    b1 -= np.sum(dhidden, axis=0, keepdims=True)

"""模型评估"""
hidden_layer = np.maximum(0, np.dot(X, W1) + b1)
scores = np.dot(hidden_layer, W2) + b2
predicted_class = np.argmax(scores, axis=1)
print('准确率:%.2f' % (np.mean(predicted_class == y)))

"""可视化"""
x_min, x_max = X[:, 0].min() - .1, X[:, 0].max() + .1
y_min, y_max = X[:, 1].min() - .1, X[:, 1].max() + .1
xx, yy = np.meshgrid(np.arange(x_min, x_max, .02), np.arange(y_min, y_max, .02))
Z = np.dot(np.maximum(0, np.dot(np.c_[xx.ravel(), yy.ravel()], W1) + b1), W2) + b2
Z = np.argmax(Z, axis=1)
Z = Z.reshape(xx.shape)
mp.contourf(xx, yy, Z, alpha=.1)  # 填色等位线
mp.scatter(X[:, 0], X[:, 1], c=y, s=20, alpha=.7)
mp.show()

在这里插入图片描述

面向对象版

import numpy as np, matplotlib.pyplot as mp
from sklearn.datasets import make_circles


class NN:
    def __init__(self, nn):
        self.nn = nn  # 神经元个数
        self.W1, self.b1 = None, None
        self.W2, self.b2 = None, None

    def fit(self, X, y):
        n = X.shape[0]  # 样本数
        d = X.shape[1]  # 维数
        k = len(np.unique(y))  # 种类数
        # 参数初始化
        W1 = 0.01 * np.random.randn(d, self.nn)
        b1 = np.zeros((1, self.nn))
        W2 = 0.01 * np.random.randn(self.nn, k)
        b2 = np.zeros((1, k))
        # 梯度下降循环
        for i in range(9999):
            # ReLU激活
            hidden_layer = np.maximum(0, np.dot(X, W1) + b1)
            # 得分
            scores = np.dot(hidden_layer, W2) + b2
            # 概率
            exp_scores = np.exp(scores)
            probs = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)
            # 梯度
            probs[range(n), y] -= 1
            dscores = probs / n
            # 反向传播
            dhidden = np.dot(dscores, W2.T)  # 隐层
            dhidden[hidden_layer <= 0] = 0  # ReLU
            # 参数更新:W2、b2
            W2 -= np.dot(hidden_layer.T, dscores)
            b2 -= np.sum(dscores, axis=0, keepdims=True)
            # 参数更新:W1、b1
            W1 -= np.dot(X.T, dhidden)
            b1 -= np.sum(dhidden, axis=0, keepdims=True)
        # 训练完成
        self.W1, self.b1 = W1, b1
        self.W2, self.b2 = W2, b2


def visualize(X, y, nn, i, le):
    mp.subplot(1, le, i)
    x_min, x_max = X[:, 0].min() - .1, X[:, 0].max() + .1
    y_min, y_max = X[:, 1].min() - .1, X[:, 1].max() + .1
    xx, yy = np.meshgrid(np.arange(x_min, x_max, .02), np.arange(y_min, y_max, .02))
    Z = np.dot(np.maximum(0, np.dot(np.c_[xx.ravel(), yy.ravel()], nn.W1) + nn.b1), nn.W2) + nn.b2
    Z = np.argmax(Z, axis=1)
    Z = Z.reshape(xx.shape)
    mp.contourf(xx, yy, Z, alpha=.1)
    mp.scatter(X[:, 0], X[:, 1], c=y, s=20, alpha=.7)


"""探究【神经元个数】对分类结果的影响"""
X, y = make_circles(200, noise=.1, factor=.4)  # 创建随机样本
nnn = [1, 2, 3, 4, 99]  # 神经元个数
le = len(nnn)
for i in range(le):
    nn = NN(nnn[i])  # 建模
    nn.fit(X, y)  # 拟合
    visualize(X, y, nn, i + 1, le)
mp.show()

在这里插入图片描述

补充

  • numpy.sum
>>> X = np.array([[0, 0],
...               [0, 1],
...               [2, 0]])
>>> np.sum(X)
3
>>> np.sum(X, axis=0)
array([2, 1])
>>> np.sum(X, axis=1)
array([0, 1, 2])
>>> np.sum(X, axis=0, keepdims=True)
array([[2, 1]])
>>> np.sum(X, axis=1, keepdims=True)
array([[0],
       [1],
       [2]])
  • numpy.maximum
>>> X = np.array([[-1, -1],
...               [10, 99],
...               [10, -1]])
>>> np.max(X)
99
>>> np.maximum(0, X)
array([[ 0,  0],
       [10, 99],
       [10,  0]])
  • range
>>> X = np.array([[0, 0, 0],
...               [0, 0, 0],
...               [0, 0, 0],
...               [0, 0, 0]])
>>> y = np.array([2, 2, 1, 0])
>>> X[range(X.shape[0]), y] += 100
>>> X
array([[  0,   0, 100],
       [  0,   0, 100],
       [  0, 100,   0],
       [100,   0,   0]])
  • numpy.argmax
    Returns the indices of the maximum values along an axis
>>> X = np.array([[0, 1],
...               [2, 1],
...               [0, 3]])
>>> np.argmax(X, axis=0)
array([1, 2], dtype=int64)
>>> np.argmax(X, axis=1)
array([1, 0, 1], dtype=int64)

猜你喜欢

转载自blog.csdn.net/Yellow_python/article/details/85769987