Skip to content

Instantly share code, notes, and snippets.

@alextanhongpin
Last active December 26, 2017 07:40
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save alextanhongpin/eead2b452c8421176d4048dcf48fbff5 to your computer and use it in GitHub Desktop.
Save alextanhongpin/eead2b452c8421176d4048dcf48fbff5 to your computer and use it in GitHub Desktop.
Gradient descent in python
import numpy as np
X = [1, 2, 4, 3, 5]
y = [1, 3, 3, 2, 5]
alpha = 0.01
B_0 = 0
B_1 = 0
EPOCHS = 10
def predict(X_i, B_0, B_1):
y_i = B_0 + B_1 * X_i
return y_i
def calculate_error(X_i, y_i, B_0, B_1):
return predict(X_i, B_0, B_1) - y_i
def calculate_B_0(X_i, y_i, B_0, B_1, alpha):
error = calculate_error(X_i, y_i, B_0, B_1)
B_n = B_0 - alpha * error
return B_n
def calculate_B_1(X_i, y_i, B_0, B_1, alpha):
error = calculate_error(X_i, y_i, B_0, B_1)
B_n = B_1 - alpha * error * X_i
return B_n
for i in range(EPOCHS):
for i, X_i in enumerate(X):
y_i = y[i]
B_0 = calculate_B_0(X_i, y_i, B_0, B_1, alpha)
B_1 = calculate_B_1(X_i, y_i, B_0, B_1, alpha)
print(B_0, B_1)
# B_0 = 0.2420425534906209
# B_1 = 0.8182420919125936
def f(X, y):
y = B_0 + B_1 * X
return y
y_pred = []
for i, X_i in enumerate(X):
pred = f(X_i, y[i])
y_pred.append(pred)
print(pred)
def RMSE(X, y):
return np.sqrt(np.mean(np.square(X - y)))
print(RMSE(np.array(X), np.array(y_pred)))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment