目录

线性回归

案例:画图模拟梯度下降的过程

1.通过w0与w1模型参数,绘制回归线

2.训练过程图,绘制w0,w1,loss的变化曲线

3.绘制三维曲面图

4.以等高线的方式绘制梯度下降的过程

线性回归相关API:

案例:基于线性回归训练 single.txt 中的训练样本,使用模型预测测试样本。


线性回归

预测函数:y=w0+w1x

x:输入

y:输出

w0和w1:模型参数

所谓模型训练,就是根据一直到x和y,找到最佳的模型参数w0和w1,尽可能精确地描述出输入和输出的关系。

案例:画图模拟梯度下降的过程

1.通过w0与w1模型参数,绘制回归线

import numpy as np
import matplotlib.pyplot as mp

train_x = np.array([0.5,0.6,0.8,1.1,1.4])
train_y = np.array([5.0,5.5,6.0,6.8,7.0])
test_x = np.array([0.45,0.55,1.0,1.3,1.5])
test_y = np.array([4.8,5.3,6.4,6.9,7.3])

w0,w1 = 1,1
times = 1000       #记录每次梯度下降的索引
lrate = 0.01       #记录每次梯度下降参数变化率
for i in range(1, times +1):
    #求损失函数关于w0与w1的偏导数,从而更新模型参数
    d0 = (( w0 + w1 * train_x ) - train_y ).sum()
    d1 = ((( w0 + w1 * train_x ) - train_y ) * train_x).sum()
    #根据梯度下降公式,更新w0与w1
    w0 = w0 - lrate * d0
    w1 = w1 - lrate * d1
print('w0:',w0)
print('w1:',w1)

#通过w0与w1模型参数,绘制回归线
linex = np.linspace (train_x.min(), train_x.max(),100)
liney = w1 * linex + w0
#画图
mp.figure ('Linear Regression', facecolor ='lightgray')
mp.title ('Linear Regression',fontsize=18)
mp.grid (linestyle=':')
mp.scatter (train_x,train_y,s=80,marker='o',color='dodgerblue',label='Samples')
mp.plot (linex,liney,color='orangered',linewidth=2,label='Regression Line')
mp.legend()
mp.show()

线性回归头歌 线性回归视频_线性回归头歌

2.训练过程图,绘制w0,w1,loss的变化曲线

import numpy as np
import matplotlib.pyplot as mp

train_x = np.array([0.5,0.6,0.8,1.1,1.4])
train_y = np.array([5.0,5.5,6.0,6.8,7.0])
test_x = np.array([0.45,0.55,1.0,1.3,1.5])
test_y = np.array([4.8,5.3,6.4,6.9,7.3])

w0,w1,losses,epoches = [1],[1],[],[]
times = 1000       #记录每次梯度下降的索引
lrate = 0.01       #记录每次梯度下降参数变化率
for i in range(1, times +1):
    epoches.append(i)
    #求损失值
    loss = ((( w0[-1] + w1[-1] * train_x ) - train_y ) ** 2).sum()/2
    losses.append (loss)
    print('{:4}>w0={:.8f},w1={:.8f},loss={:.8f}'.format(i,w0[-1],w1[-1],loss))
    #求损失函数关于w0与w1的偏导数,从而更新模型参数
    d0 = (( w0[-1] + w1[-1] * train_x ) - train_y ).sum()
    d1 = ((( w0[-1] + w1[-1] * train_x ) - train_y ) * train_x).sum()
    #根据梯度下降公式,更新w0与w1
    w0.append (w0[-1] - lrate * d0)
    w1.append (w1[-1] - lrate * d1)
print('w0:',w0[-1])
print('w1:',w1[-1])
#通过w0与w1模型参数,绘制回归线
linex = np.linspace (train_x.min(), train_x.max(),100)
liney = w1[-1] * linex + w0[-1]

#训练过程图,绘制w0,w1,loss的变化曲线
mp.figure ('Training Progress', facecolor ='lightgray')
mp.title ('Training Progress',fontsize=18)
mp.subplot (311)
mp.grid (linestyle=':')
mp.ylabel(r'$w_0$',fontsize=14)
mp.plot (epoches,w0[:-1],color='dodgerblue',label=r'$w_0$')
mp.legend()
mp.tight_layout()

mp.subplot (312)
mp.grid (linestyle=':')
mp.ylabel(r'$w_1$',fontsize=14)
mp.plot (epoches,w1[:-1],color='dodgerblue',label=r'$w_1$')
mp.legend()
mp.tight_layout()

mp.subplot (313)
mp.grid (linestyle=':')
mp.ylabel(r'$w_loss$',fontsize=14)
mp.plot (epoches,losses,color='orangered',label=r'$w_loss$')
mp.legend()
mp.tight_layout()

mp.show()

 

线性回归头歌 线性回归视频_梯度下降_02

3.绘制三维曲面图

import numpy as np
import matplotlib.pyplot as mp

train_x = np.array([0.5,0.6,0.8,1.1,1.4])
train_y = np.array([5.0,5.5,6.0,6.8,7.0])
test_x = np.array([0.45,0.55,1.0,1.3,1.5])
test_y = np.array([4.8,5.3,6.4,6.9,7.3])

w0,w1,losses,epoches = [1],[1],[],[]
times = 1000       #记录每次梯度下降的索引
lrate = 0.01       #记录每次梯度下降参数变化率
for i in range(1, times +1):
    epoches.append(i)
    #求损失值
    loss = ((( w0[-1] + w1[-1] * train_x ) - train_y ) ** 2).sum()/2
    losses.append (loss)
    print('{:4}>w0={:.8f},w1={:.8f},loss={:.8f}'.format(i,w0[-1],w1[-1],loss))
    #求损失函数关于w0与w1的偏导数,从而更新模型参数
    d0 = (( w0[-1] + w1[-1] * train_x ) - train_y ).sum()
    d1 = ((( w0[-1] + w1[-1] * train_x ) - train_y ) * train_x).sum()
    #根据梯度下降公式,更新w0与w1
    w0.append (w0[-1] - lrate * d0)
    w1.append (w1[-1] - lrate * d1)
print('w0:',w0[-1])
print('w1:',w1[-1])
#通过w0与w1模型参数,绘制回归线
linex = np.linspace (train_x.min(), train_x.max(),100)
liney = w1[-1] * linex + w0[-1]


#绘制三维曲面图
from mpl_toolkits.mplot3d import axes3d
n = 500
w0_grid ,w1_grid = np.meshgrid(np.linspace(-3,10,n),np.linspace (-3,10,n))
loss_grid = 0
for x, y in zip (train_x , train_y):
    loss += ( w0_grid + w1_grid *x-y )**2/2
mp.figure ('Loss Function',facecolor ='lightgray')
ax3d= mp.gca (projection='3d')
ax3d.set_xlabel ('w0')
ax3d.set_ylabel ('w1')
ax3d.set_zlabel ('loss')
ax3d.plot_surface (w0_grid,w1_grid,loss,cstride=30,rstride=30,cmap='jet')
mp.tight_layout()

mp.show()

 

线性回归头歌 线性回归视频_机器学习_03

4.以等高线的方式绘制梯度下降的过程

mp.figure ('Batch Gradient Descent', facecolor='lightgray')
mp.title ('Batch Gradient Descent', fontsize=20)
mp.ylabel('y', fontsize=14)
mp.xlabel('x', fontsize=14)
mp.tick_params(labelsize=10)
mp.grid (linestyle=':')
mp.contourf (grid_w0[-1], grid_w1[-1], grid_loss ,10, cmap ='jet')
cntr = mp.contour(grid_w0[-1],grid_w1[-1],grid_loss,10,colors='black',linewidths=0.5)
mp.clabel(cntr,inline_spacing=0.1,fmt='%.2f',fontsize=8)
mp.plot (w0[-1],w1[-1],'o-',c='orangered',label='BGD')
mp.legend()

 

线性回归相关API:

import sklearn2
#创建模型
model = lm.LinearRegression()
#训练模型
#输入为一个二维数组表示的样本矩阵
#输出为每个样本最终的结果
model.fit(输入,输出)#通过梯度下降法计算模型参数
#预测输出
#输入 array 是一个二维数组,每一行是一个样本,每一列是一个特征。
result = model.predict (array)

案例:基于线性回归训练 single.txt 中的训练样本,使用模型预测测试样本。

import numpy as np
import sklearn.linear_model as lm
import matplotlib.pyplot as mp
#采集数据
x,y = np.loadtxt ('../ data / single.txt ',delimiter =',',unpack = True )
x = x.reshape (-1,1)
mp.figure ('Linear Regression', facecolor ='lightgray')
mp.title ('Linear Regression',fontsize=18)
mp.grid (linestyle=':')
mp.scatter (x,y,s=70,color='dodgerblue',label='Sample Points')
#构建线性回归模型,训练模型
model = lm.LinearRegression()
#针对训练数据,得到预测结果,画图
model.fit(x ,y)
pred_y = model.predict(x)
mp.plot (x,pred_y,color='orangered',label='Regression Line')
mp.legend()
mp.show()

线性回归头歌 线性回归视频_3d_04