本文最后更新于 845 天前,其中的信息可能已经有所发展或是发生改变。
题1:详见上一篇文章
题2:线性回归基础
本题与题1类似,也是通过确定参数的值来使得损失最小。与第一题不同的是,本题含有两个参数,即w和b,在每次更新参数时,我们要分别对w和b求导,并同步更新w、b的值。不难写出代码。
from tkinter import N
import numpy as np
import math
from matplotlib import pyplot as plt
from random import randint, random
LEARNING_RATE = 0.1 #步长
ITERATIONS = 10000 #次数
EPS = 10e-4 #极小值
#随机生成二维点阵
def linearf(x):
return 3*x + np.random.randn(x.shape[0])
#计算
def y_cal(w,b,x):
return w*x+b
def mse_cal(w,b,x,y):
re = 0
for i,num in enumerate(y):
re += (num - y_cal(w,b,x[i])) ** 2
#print(str(w)+" "+ str(b) + " ", str(re))
return re/11
def cal_w_grad(w,b,x,y):
return (mse_cal(w+EPS,b,x,y)-mse_cal(w,b,x,y))/EPS
def cal_b_grad(w,b,x,y):
return (mse_cal(w,b+EPS,x,y)-mse_cal(w,b,x,y))/EPS
#w b init
w = randint(-20,20)
b = randint(-20,20)
print("initial w:{:.2f} b:{:.2f}".format(w,b))
#绘制点阵
x = np.linspace(-3,3,11)
y = linearf(x)
plt.scatter(x,y,c="#66CCFF")
for iter in range(ITERATIONS):
w -= LEARNING_RATE * cal_w_grad(w,b,x,y)
b -= LEARNING_RATE * cal_b_grad(w,b,x,y)
plt.plot([-3,3], [-3*w+b,3*w+b],c="r")
print("w:{:.2f}".format(w)+" b:{:.2f}".format(b))
print("Loss: "+ str(mse_cal(w,b,x,y)))
if b >= 0.01:
plt.title("y = {:.2f} x + {:.2f}\nLOSS = {:.2f}".format(w,b,mse_cal(w,b,x,y)))
elif b < 0.01 and b > -0.01:
plt.title("y = {:.2f} x\nLOSS = {:.2f}".format(w,mse_cal(w,b,x,y)))
else:
plt.title("y = {:.2f} x - {:.2f}\nLOSS = {:.2f}".format(w,-1*b,mse_cal(w,b,x,y)))
plt.xlabel("X")
plt.ylabel("Y")
plt.show()
效果比较满意: