Skip to content

Instantly share code, notes, and snippets.

@KeitaTakenouchi
Last active August 2, 2021 02:16
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save KeitaTakenouchi/9b868180c13e09d69b11b7d1ca8d3f06 to your computer and use it in GitHub Desktop.
Save KeitaTakenouchi/9b868180c13e09d69b11b7d1ca8d3f06 to your computer and use it in GitHub Desktop.
import numpy as np
from sklearn.linear_model import LinearRegression
import matplotlib.pyplot as plt
from random import randrange
xs = np.array([[1], [3.1], [2.9], [1.3], [2], [4],
[4.7], [5], [6], [7], [7.1], [7.2]])
ys = np.array([[2], [5.8], [4.2], [2.4], [4], [8],
[9.2], [9], [10], [10], [11], [12]])
reg = LinearRegression().fit(xs, ys)
print(reg.coef_, reg.intercept_)
ys_predicted = reg.predict(xs)
def plot_liner_regression():
ys_min = np.zeros(shape=(len(ys)))
ys_max = np.zeros(shape=(len(ys)))
for i, (y, yp) in enumerate(zip(ys, ys_predicted)):
ys_min[i] = min(y, yp)
ys_max[i] = max(y, yp)
plt.vlines(xs, ys_min, ys_max, color="red", linestyles="dotted")
plt.scatter(xs, ys, color="black", s=20)
plt.plot(xs, ys_predicted, color="red", linewidth=1.5)
plt.show()
def plot_line_intercept():
## a = 1.0
predicted = 1.0 * xs + 1.0
ys_min = np.zeros(shape=(len(ys)))
ys_max = np.zeros(shape=(len(ys)))
for i, (y, yp) in enumerate(zip(ys, predicted)):
ys_min[i] = min(y, yp)
ys_max[i] = max(y, yp)
plt.vlines(xs, ys_min, ys_max, color="red", linestyles="dotted")
plt.scatter(xs, ys, color="black", s=20)
plt.plot(xs, predicted, color="red", linewidth=1.5)
## a = 2.0
predicted = 2.0 * xs + 1.0
ys_min = np.zeros(shape=(len(ys)))
ys_max = np.zeros(shape=(len(ys)))
for i, (y, yp) in enumerate(zip(ys, predicted)):
ys_min[i] = min(y, yp)
ys_max[i] = max(y, yp)
plt.vlines(xs, ys_min, ys_max, color="green", linestyles="dotted")
plt.scatter(xs, ys, color="black", s=20)
plt.plot(xs, predicted, color="green", linewidth=1.5)
plt.show()
def plot_line_decent():
## b = -1
predicted = 1.5 * xs - 1.0
ys_min = np.zeros(shape=(len(ys)))
ys_max = np.zeros(shape=(len(ys)))
for i, (y, yp) in enumerate(zip(ys, predicted)):
ys_min[i] = min(y, yp)
ys_max[i] = max(y, yp)
plt.vlines(xs, ys_min, ys_max, color="red", linestyles="dotted")
plt.scatter(xs, ys, color="black", s=20)
plt.plot(xs, predicted, color="red", linewidth=1.5)
## b = 3.0
predicted = 1.5 * xs + 3.0
ys_min = np.zeros(shape=(len(ys)))
ys_max = np.zeros(shape=(len(ys)))
for i, (y, yp) in enumerate(zip(ys, predicted)):
ys_min[i] = min(y, yp)
ys_max[i] = max(y, yp)
plt.vlines(xs, ys_min, ys_max, color="green", linestyles="dotted")
plt.scatter(xs, ys, color="black", s=20)
plt.plot(xs, predicted, color="green", linewidth=1.5)
plt.show()
def Q(a, b):
sum = 0
for (x, y) in zip(xs, ys):
v = (y - (a * x + b))
sum += v * v
return sum
def partial_a_Q(a, b):
sum = 0
for (x, y) in zip(xs, ys):
sum += x * (y - (a * x + b))
return -2 * sum
def partial_b_Q(a, b):
sum = 0
for (x, y) in zip(xs, ys):
sum += y - (a * x + b)
return -2 * sum
def plot_countour_Q():
a_range = np.arange(0, 4, 0.01)
b_range = np.arange(-5, 5, 0.01)
q_values = np.zeros(shape=(len(b_range), len(a_range)))
for bi, b in enumerate(b_range):
for ai, a in enumerate(a_range):
q_values[bi][ai] = np.sqrt(Q(a, b))
_, ax = plt.subplots()
ax.contour(a_range, b_range, q_values, 100, cmap="RdGy")
ax.set_xlabel("value of a")
ax.set_ylabel("value of b")
plt.show()
def plot_countour_GD():
# update parameter values
(a, b) = (3.5, -2)
eta = 0.001
history = [(a, b)]
while True:
decent_a = partial_a_Q(a, b)
decent_b = partial_b_Q(a, b)
a = a - eta * decent_a
b = b - eta * decent_b
history.append((a, b))
if decent_a < 0.001 and decent_b < 0.001:
break
# plot graphs
_, ax = plt.subplots()
ax.set_xlabel("value of a")
ax.set_ylabel("value of b")
# plot coutour
a_range = np.arange(0, 4, 0.01)
b_range = np.arange(-5, 5, 0.01)
q_values = np.zeros(shape=(len(b_range), len(a_range)))
for bi, b in enumerate(b_range):
for ai, a in enumerate(a_range):
q_values[bi][ai] = np.sqrt(Q(a, b))
ax.contour(a_range, b_range, q_values, 100, cmap="RdGy", linewidths=0.5)
# plot history of parameter values
for i, (a, b) in enumerate(history):
if i == len(history)-1:
break
if i == 0:
ax.scatter([a], [b], color="red", s=40, zorder=100)
(a_next, b_next) = history[i+1]
ax.plot([a, a_next], [b, b_next], color="red")
plt.show()
def plot_linerReg_point():
i = 5
predicted = 1.0 * xs + 1.0
plt.vlines([xs[i]], [predicted[i]], [ys[i]],
color="red", linestyles="dotted")
plt.scatter(xs, ys, color="black", s=20)
plt.plot(xs, predicted, color="red", linewidth=1.5)
plt.show()
def partial_a_Q_i(a, b, i):
return -2 * xs[i] * (ys[i] - (a * xs[i] + b))
def partial_b_Q_i(a, b, i):
return -2 * (ys[i] - (a * xs[i] + b))
def plot_contour_SGD():
# update parameter values
eta = 0.001
histories = np.empty(3, dtype=object)
for i_hist in range(3):
(a, b) = (3.5, -2)
history = [(a, b)]
for _ in range(7000):
i = randrange(0, len(xs))
decent_a = partial_a_Q_i(a, b, i)
decent_b = partial_b_Q_i(a, b, i)
a = a - eta * decent_a
b = b - eta * decent_b
history.append((a, b))
histories[i_hist] = history
# plot graphs
_, ax = plt.subplots()
ax.set_xlabel("value of a")
ax.set_ylabel("value of b")
# plot coutour
a_range = np.arange(1, 4, 0.01)
b_range = np.arange(-4, 2, 0.01)
q_values = np.zeros(shape=(len(b_range), len(a_range)))
for bi, b in enumerate(b_range):
for ai, a in enumerate(a_range):
q_values[bi][ai] = np.sqrt(Q(a, b))
ax.contour(a_range, b_range, q_values, 100, cmap="RdGy", linewidths=0.5)
# plot history of parameter values
for i_hist, color in enumerate(["red", "green", "blue"]):
history = histories[i_hist]
for i, (a, b) in enumerate(history):
if i == len(history)-1:
break
if i == 0:
ax.scatter([a], [b], color="black", s=40, zorder=100)
(a_next, b_next) = history[i+1]
ax.plot([a, a_next], [b, b_next], color=color,
linewidth=0.7, alpha=0.2, zorder=100)
plt.show()
# plot_liner_regression()
# plot_line_intercept()
# plot_countour_Q()
# plot_countour_GD()
# plot_linerReg_point()
# plot_contour_SGD()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment