怎么在python中利用梯度下降算法实现一个多线性回归

这篇文章给大家介绍怎么在python中利用梯度下降算法实现一个多线性回归,内容非常详细,感兴趣的小伙伴们可以参考借鉴,希望对大家能有所帮助。

具体内容如下

怎么在python中利用梯度下降算法实现一个多线性回归

怎么在python中利用梯度下降算法实现一个多线性回归

importpandasaspd
importmatplotlib.pylabasplt
importnumpyasnp
#Readdatafromcsv
pga=pd.read_csv("D:\python3\data\Test.csv")
#Normalizethedata归一化值(x-mean)/(std)
pga.AT=(pga.AT-pga.AT.mean())/pga.AT.std()
pga.V=(pga.V-pga.V.mean())/pga.V.std()
pga.AP=(pga.AP-pga.AP.mean())/pga.AP.std()
pga.RH=(pga.RH-pga.RH.mean())/pga.RH.std()
pga.PE=(pga.PE-pga.PE.mean())/pga.PE.std()


defcost(theta0,theta1,theta2,theta3,theta4,x1,x2,x3,x4,y):
#Initializecost
J=0
#Thenumberofobservations
m=len(x1)
#Loopthrougheachobservation
#通过每次观察进行循环
foriinrange(m):
#Computethehypothesis
#计算假设
h=theta0+x1[i]*theta1+x2[i]*theta2+x3[i]*theta3+x4[i]*theta4
#Addtocost
J+=(h-y[i])**2
#Averageandnormalizecost
J/=(2*m)
returnJ
#Thecostfortheta0=0andtheta1=1


defpartial_cost_theta4(theta0,theta1,theta2,theta3,theta4,x1,x2,x3,x4,y):
h=theta0+x1*theta1+x2*theta2+x3*theta3+x4*theta4
diff=(h-y)*x4
partial=diff.sum()/(x2.shape[0])
returnpartial


defpartial_cost_theta3(theta0,theta1,theta2,theta3,theta4,x1,x2,x3,x4,y):
h=theta0+x1*theta1+x2*theta2+x3*theta3+x4*theta4
diff=(h-y)*x3
partial=diff.sum()/(x2.shape[0])
returnpartial


defpartial_cost_theta2(theta0,theta1,theta2,theta3,theta4,x1,x2,x3,x4,y):
h=theta0+x1*theta1+x2*theta2+x3*theta3+x4*theta4
diff=(h-y)*x2
partial=diff.sum()/(x2.shape[0])
returnpartial


defpartial_cost_theta1(theta0,theta1,theta2,theta3,theta4,x1,x2,x3,x4,y):
h=theta0+x1*theta1+x2*theta2+x3*theta3+x4*theta4
diff=(h-y)*x1
partial=diff.sum()/(x2.shape[0])
returnpartial

#对theta0进行求导
#Partialderivativeofcostintermsoftheta0


defpartial_cost_theta0(theta0,theta1,theta2,theta3,theta4,x1,x2,x3,x4,y):
h=theta0+x1*theta1+x2*theta2+x3*theta3+x4*theta4
diff=(h-y)
partial=diff.sum()/(x2.shape[0])
returnpartial


defgradient_descent(x1,x2,x3,x4,y,alpha=0.1,theta0=0,theta1=0,theta2=0,theta3=0,theta4=0):
max_epochs=1000#Maximumnumberofiterations最大迭代次数
counter=0#Intializeacounter当前第几次
c=cost(theta0,theta1,theta2,theta3,theta4,x1,x2,x3,x4,y)##Initialcost当前代价函数
costs=[c]#Letsstoreeachupdate每次损失值都记录下来
#Setaconvergencethresholdtofindwherethecostfunctioninminimized
#Whenthedifferencebetweenthepreviouscostandcurrentcost
#islessthanthisvaluewewillsaytheparametersconverged
#设置一个收敛的阈值(两次迭代目标函数值相差没有相差多少,就可以停止了)
convergence_thres=0.000001
cprev=c+10
theta0s=[theta0]
theta1s=[theta1]
theta2s=[theta2]
theta3s=[theta3]
theta4s=[theta4]
#Whenthecostsconvergeorwehitalargenumberofiterationswillwestopupdating
#两次间隔迭代目标函数值相差没有相差多少(说明可以停止了)
while(np.abs(cprev-c)>convergence_thres)and(counter<max_epochs):
cprev=c
#Alphatimesthepartialderiviativeisourupdated
#先求导,导数相当于步长
update0=alpha*partial_cost_theta0(theta0,theta1,theta2,theta3,theta4,x1,x2,x3,x4,y)
update1=alpha*partial_cost_theta1(theta0,theta1,theta2,theta3,theta4,x1,x2,x3,x4,y)
update2=alpha*partial_cost_theta2(theta0,theta1,theta2,theta3,theta4,x1,x2,x3,x4,y)
update3=alpha*partial_cost_theta3(theta0,theta1,theta2,theta3,theta4,x1,x2,x3,x4,y)
update4=alpha*partial_cost_theta4(theta0,theta1,theta2,theta3,theta4,x1,x2,x3,x4,y)
#Updatetheta0andtheta1atthesametime
#Wewanttocomputetheslopesatthesamesetofhypothesisedparameters
#soweupdateafterfindingthepartialderivatives
#-=梯度下降,+=梯度上升
theta0-=update0
theta1-=update1
theta2-=update2
theta3-=update3
theta4-=update4

#Storethetas
theta0s.append(theta0)
theta1s.append(theta1)
theta2s.append(theta2)
theta3s.append(theta3)
theta4s.append(theta4)

#Computethenewcost
#当前迭代之后,参数发生更新
c=cost(theta0,theta1,theta2,theta3,theta4,x1,x2,x3,x4,y)

#Storeupdates,可以进行保存当前代价值
costs.append(c)
counter+=1#Count
#将当前的theta0,theta1,costs值都返回去
#return{'theta0':theta0,'theta1':theta1,'theta2':theta2,'theta3':theta3,'theta4':theta4,"costs":costs}
return{'costs':costs}

print("costs=",gradient_descent(pga.AT,pga.V,pga.AP,pga.RH,pga.PE)['costs'])
descend=gradient_descent(pga.AT,pga.V,pga.AP,pga.RH,pga.PE,alpha=.01)
plt.scatter(range(len(descend["costs"])),descend["costs"])
plt.show()

损失函数随迭代次数变换图:

怎么在python中利用梯度下降算法实现一个多线性回归

关于怎么在python中利用梯度下降算法实现一个多线性回归就分享到这里了,希望以上内容可以对大家有一定的帮助,可以学到更多知识。如果觉得文章不错,可以把它分享出去让更多的人看到。

发布于 2021-03-21 22:37:56
收藏
分享
海报
0 条评论
165
上一篇:count函数如何在Python中使用 下一篇:怎么在python中使用PIL剪切图片
目录

    推荐阅读

    0 条评论

    本站已关闭游客评论,请登录或者注册后再评论吧~

    忘记密码?

    图形验证码