TF第一个例子-线性回归

#安装什么就直接在Tf2.0安装

import tensorflow as tf
import pandas as pd
import numpy as np




#保存样本的列表
data=[]
# 循环采集100个点
for i in range(100):
    # 随即输入x -10<x<10
    x= np.random.uniform(-10., 10.)
    # 采用高斯噪声
    eps= np.random.normal(0.,0.1)
    # 模型输出
    y=1.477*x+0.089+eps
    #保存样本点
    data.append([x,y])
#     转存为2维数组
data=np.array(data)

def mse(b,w,points):
    totalError=0
    for i in range(0,len(points)):
        x=points[i,0]
        y=points[i,1]
        totalError+=(y-(w*x+b))**2
    return totalError/float(len(points))

def step_gradient(b_current,w_current,points,lr):
    b_gradient=0
    W_gradient=0
    M=float(len(points))
    for i in range(0,len(points)):
        x= points[i,0]
        y=points[i,1]
        b_gradient+=(2/M)*((w_current*x+b_current)-y)
        W_gradient+=(2/M)*((w_current*x+b_current)-y)
    new_b=b_current-(lr*b_gradient)
    new_w=w_current-(lr*W_gradient)
    return [new_b,new_w]

def gradient_descent(points,starting_b,starting_w,lr,num_iterations):
    b=starting_b
    w=starting_w
    for step in range(num_iterations):
        b,w=step_gradient(b,w,np.array(points),lr)
        loss=mse(b,w,points)
        if step%50==0:
            print(f"iteration:{step},   loss{loss},    w:{w},   b:{b}")
    return [b,w]

def main():
    lr = 0.01
    initial_b=0
    initial_w=0
    num_iterations=1000
    [b,w]=gradient_descent(data,initial_b,initial_w,lr,num_iterations)
    loss=mse(b,w,data)
    print(f'Fnal loss:{loss},   w:{w},   b:{b}')

main()

  

猜你喜欢

转载自www.cnblogs.com/Loving-Q/p/12671330.html