实现梯度下降(批梯度下降,随机梯度下降,带动态步长)
import numpy as np
def gradient(x,y,w):
m,n=np.shape(x)
g=np.mat(np.zeros((n,1)))
for i in range(m):
err = y[i,0]-x[i, ]*w
for j in range(n):
g[j, ]-=err*x[i,j]
return g
def lossValue(x,y,w):
k=y-x*w
return k.T*k/2
temperatures=[15,20,25,30,35,40]
flowers=[136,140,155,160,157,175]
X=(np.mat([[1,1,1,1,1,1],temperatures])).T
y=(np.mat(flowers)).T
w=(np.mat([0.0,0.0])).T
print(w)
loss_change=0.000001
loss=lossValue(X,y,w)
for i in range(30000):
alpha=0.00025
decay=0.00001
alpha=alpha*1.0/(1.0+decay*i)
#上面实现了动态步长
w=w-alpha*gradient(X,y,w)
newloss=lossValue(X,y,w)
# print(str(i)+":"+str(w[0])+':'+str(w[1]))
# print(newloss)
if abs(loss-newloss)
http://t.csdn.cn/ejjljhttp://t.csdn.cn/ejjlj
作业完成



