y=ax^2+bx+c--梯度下降法

y=ax^2+bx+c–梯度下降法
本文用梯度下降法拟合y=ax^2+bx+c数据。以往例子中都是以y=ax+b为例子的介绍,效果较好。但是非线性的函数用梯度下降法效果还是很好吗?

#y=ax^2+bx+c
import  tensorflow as tf
import numpy as np
data = []
for i in range(100):
    x = np.random.uniform(-10,10)
    eps = np.random.normal(0,0.01)

    y = 1.477*(x**2)+0.089*x+2+eps
    data.append([x,y])
data1 = np.array(data)
def computer_error(a,b,c,data):
    totalError = 0
    for i in range(0,len(data)):
        x = data[i,0]
        y = data[i,0]
        totalError += (y-(a*(x**2)+b*x+c))**2
    return totalError/float(len(data))

def step_gradient(a_current,b_current,c_current,data,lr):
    a_gradient = 0
    b_gradient = 0
    c_gradient = 0
    N = float(len(data))
    for i in range(0,len(data)):
        x = data[i,0]
        y = data[i,1]
        a_gradient += (2/N)*((a_current*(x**2)+b_current*x+c_current)-y)*(x**2)
        b_gradient += (2/N)*((a_current*(x**2)+b_current*x+c_current)-y)*x
        c_gradient += (2/N)*((a_current*(x**2)+b_current*x+c_current)-y)
    new_a = a_current - (lr*a_gradient)
    new_b = b_current - (lr*b_gradient)
    new_c = c_current - (lr*c_gradient)
    return [new_a,new_b,new_c]

def gradient_descent(data,starting_a,strating_b,starting_c,lr,num_iterations):
    a = starting_a
    b = strating_b
    c = starting_c
    for i in range(num_iterations):
        a,b,c = step_gradient(a,b,c,data,lr)
    return [a,b,c]

def run():
    lr = 0.0001
    data = data1
    initial_a =0
    initial_b=0
    initial_c=1
    num_iterations=2000
    loss = computer_error(initial_a,initial_b,initial_c,data)
    print('a:{},b:{},c:{},loss:{}'.format(initial_a,initial_b,initial_c,loss))
    print('Running')
    [a,b,c] = gradient_descent(data, initial_a,initial_b, initial_c, lr, num_iterations)
    print("After {0} iterations a = {1},b = {2}, c = {3}, loss= {4}".
          format(num_iterations,a,b,c,
                 computer_error(a,b,c,data))
          )
if __name__ == '__main__':
    run()

注意代码中初始a,b,c的选取,选取不一样,拟合的差距是很大的!!!其他的非线性函数还能这样拟合吗?请思考

猜你喜欢

转载自blog.csdn.net/weixin_50918736/article/details/119063370
今日推荐