Skip to content

Instantly share code, notes, and snippets.

@JustinSDK
Last active August 11, 2021 06:30
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save JustinSDK/a46db17f0a63541b0a5c8a1767331b39 to your computer and use it in GitHub Desktop.
Save JustinSDK/a46db17f0a63541b0a5c8a1767331b39 to your computer and use it in GitHub Desktop.
NumPy實現線性迴歸(一)
import numpy as np
import cv2
import matplotlib.pyplot as plt
def model(x, w, b):
return w * x + b
def mse_loss(p, y):
return ((p - y) ** 2).mean()
def grad_loss(x, y, p, w, b):
# 函式 f 的梯度
def numerical_grad(f, params):
h = 1e-4
grad = np.zeros_like(params)
for i in range(params.size):
tmp = params[i]
params[i] = tmp + h
fxh1 = f(params)
params[i] = tmp - h
fxh2 = f(params)
grad[i] = (fxh1 - fxh2) / (2 * h)
params[i] = tmp
return grad
return numerical_grad(
# 計算損失
# 進一步建立損失函數
lambda params: mse_loss(model(x, params[0], params[1]), y),
np.array([w, b])
)
def training_loop(epochs, lr, params, x, y, verbose = True):
for epoch in range(1, epochs + 1):
w, b = params
p = model(x, w, b)
grad = grad_loss(x, y, p, w, b)
params = params - lr * grad
if verbose:
print('週期', epoch, '--')
print('\t損失:', float(mse_loss(p, y)))
print('\t模型參數:', params)
return params
# https://openhome.cc/Gossip/DCHardWay/images/PolynomialRegression-1.JPG
img = cv2.imread('PolynomialRegression-1.JPG', cv2.IMREAD_GRAYSCALE)
idx = np.where(img < 127) # 黑點的索引
x = idx[1]
y = -idx[0] + img.shape[0] # 反轉 y 軸
plt.gca().set_aspect(1)
plt.scatter(x, y)
w, b = training_loop(
epochs = 100,
lr = 0.001,
params = np.array([1.0, 0.0]),
x = x,
y = y
)
x = np.linspace(0, 50, 50)
y = w * x + b
plt.plot(x, y)
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment