梯度上升


import numpy as np
import matplotlib.pyplot as plt

X = np.empty((100,2))
X[:,0] = np.random.uniform(0,100,size=100)

X[:,1] = 0.75 * X[:,0] + 3. + np.random.normal(0,10.,size=100)

plt.scatter(X[:,0],X[:,1])
plt.show()

def demean(X):
return X - np.mean(X,axis=0)

x_demean = demean(X)
plt.scatter(x_demean[:,0],x_demean[:,1])
plt.show()


def direction(w):
return w / np.linalg.norm(w)

def f(w,X):
return np.sum((X.dot(w)**2)) / len(X)

def df_math(w,X):
return X.T.dot(X.dot(w)) * 2. /len(X)


def gradient_ascent(df,X, initial_w, eta=0.01, n_iters=1e4, epsilon=1e-8):
w = direction(initial_w)
cur_iter = 0

while cur_iter < n_iters:
gradient = df(w, X)
last_w = w
w = w + eta * gradient
w = direction(w)
if (abs(f(w, X) - f(last_w, X)) < epsilon):
break
cur_iter += 1

return w


initial_w = np.random.random(X.shape[1])

print(initial_w)

w1 = gradient_ascent(df_math,x_demean,initial_w)

print(w1)

plt.scatter(x_demean[:,0],x_demean[:,1])
plt.plot([0,w1[0]*30],[0,w1[1]*30],color ='r')
plt.show()

猜你喜欢

转载自www.cnblogs.com/heguoxiu/p/10135598.html