Skip to content

Instantly share code, notes, and snippets.

@BrambleXu
Last active September 21, 2019 10:13
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save BrambleXu/2640af09b1f43b93c2d951ba91ca3d5c to your computer and use it in GitHub Desktop.
Save BrambleXu/2640af09b1f43b93c2d951ba91ca3d5c to your computer and use it in GitHub Desktop.
import numpy as np
import matplotlib.pyplot as plt
# read data
data = np.loadtxt("linear_data.csv", delimiter=',', skiprows=1)
train_x = data[:, 0:2]
train_y = data[:, 2]
# initialize parameter
theta = np.random.randn(3)
# standardization
mu = train_x.mean(axis=0)
sigma = train_x.std(axis=0)
def standardizer(x):
return (x - mu) / sigma
std_x = standardizer(train_x)
# get matrix
def to_matrix(std_x):
return np.array([[1, x1, x2] for x1, x2 in std_x])
mat_x = to_matrix(std_x)
# dot product
def f(x):
return np.dot(x, theta)
# sigmoid function
def f(x):
return 1 / (1 + np.exp(-np.dot(x, theta)))
# update times
epoch = 2000
# learning rate
ETA = 1e-3
# update parameter
for _ in range(epoch):
"""
f(mat_x) - train_y: (20,)
mat_x: (20, 3)
theta: (3,)
dot production: (20,) x (20, 3) -> (3,)
"""
theta = theta - ETA * np.dot(f(mat_x) - train_y, mat_x)
# plot line
x1 = np.linspace(-2, 2, 100)
x2 = - (theta[0] + x1 * theta[1]) / theta[2]
plt.plot(std_x[train_y == 1, 0], std_x[train_y == 1, 1], 'o') # train data of class 1
plt.plot(std_x[train_y == 0, 0], std_x[train_y == 0, 1], 'x') # train data of class 0
plt.plot(x1, x2, linestyle='dashed') # plot the line we learned
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment