机器学习-有监督学习-线性回归-梯度下降

import numpy as np
import matplotlib.pyplot as plt

# 定义损失函数 E(w,b) = (y -w * i -b) ** 2
def cost(w , b , points):
    sum_cost = 0
    M = len(points)
    for i in range(M):
        x = points[i,0]
        y = points[i,1]
        sum_cost +=  ( y - w * x - b ) ** 2
    return sum_cost

# 定义核心梯度算法公式
def grad_desc(init_w , init_b , points , alpha , num_iter):
    w = init_w
    b = init_b
    # 定义list存放损失函数的值,展示下降趋势
    cost_list = []
    for i in range(num_iter):
        cost_list.append( cost( w , b , points ) )
        # 定义step_gard_desc函数获取更新的w,b值
        w , b = step_grad_desc(w , b , alpha , points)
    return [w , b , cost_list]

# 定义函数获取更新后的w, b 的值
def step_grad_desc(current_w , current_b , alpha , points):
    sum_grad_w = 0
    sum_grad_b = 0
    M = len(points)
    for i in range(M):
        x = points[i,0]
        y = points[i,1]
        sum_grad_w += (current_w * x + current_b - y) * x
        sum_grad_b += current_w * x + current_b - y
    # 根据公式求梯度
    grad_w = 2 / M * sum_grad_w
    grad_b = 2 / M * sum_grad_b
    # 获取更新后的 w , b 值
    updata_w = current_w - alpha * grad_w
    updata_b = current_b - alpha * grad_b
    return updata_w,updata_b

if __name__ == '__main__':
    # 定义初始化参数
    alpha = 0.0001
    init_w = 0
    init_b = 0
    num_iter = 10000
    points = np.genfromtxt("D:\projects\PythonProjects\PythonStudy\data.csv",delimiter=",")
    # 求出w , b , cost_list损失函数
    w , b , cost_list = grad_desc(init_w,init_b,points,alpha,num_iter)
    print("w is :" , w)
    print("b is :" , b)
    x = points[:,0]
    y = points[:,1]
    plt.scatter(x , y )

    grad_y = w * x + b
    plt.plot(x,grad_y,c = "r")


    # 打
    # plt.plot(cost_list)
    plt.show()


D:\Python\python.exe D:/projects/PythonProjects/PythonStudy/python-1/com/python/stuay/GradientDescent.py
w is : 1.4675440436333027
b is : 0.6078985997054932

猜你喜欢

转载自blog.csdn.net/weixin_43233971/article/details/108014765