Skip to content

Instantly share code, notes, and snippets.

@creotiv
Created April 24, 2021 15:39
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save creotiv/9d9911d4707c74d338aa4a4786a48017 to your computer and use it in GitHub Desktop.
Save creotiv/9d9911d4707c74d338aa4a4786a48017 to your computer and use it in GitHub Desktop.
import matplotlib.pyplot as plt
import numpy as np
from sklearn import datasets, linear_model
from sklearn.metrics import mean_squared_error, r2_score
# Load the diabetes dataset
diabetes_X, diabetes_y = datasets.load_diabetes(return_X_y=True)
# Use only one feature
diabetes_X = diabetes_X
# Split the data into training/testing sets
diabetes_X_train = diabetes_X[:-20]
diabetes_X_test = diabetes_X[-20:]
# Split the targets into training/testing sets
diabetes_y_train = diabetes_y[:-20]
diabetes_y_test = diabetes_y[-20:]
def train_custom_linear_model(X, Y, XT, YT, lr=0.01, toll=0.01):
# F = A*X + b
n = X.shape[0]
A = np.random.randn(X.shape[1])
b = 0
error_prev = 0
_toll = toll + 1
def mse(x,y):
return np.sum((x-y)**2)/n
def predict(x):
return np.dot(x,A) + b
while _toll > toll:
Y_pred = predict(X)
error = mse(Y, Y_pred)
# E(x) = 1/n*sum( (y-F(x))^2 ) = 1/n*sum( (y-A*X-b))^2 )
# dE/dA = 1/n*sum(j^2)/Dj * dj/dA = 1/n*2*sum(j) * (y-f(x))/da = 1/n*2*sum(j) * (y- Ax-b)/dA = 1/n*2*sum(j) * - x = - 2/n * sum(y-f(x)) * x
dA = -2 * np.dot(X.T, Y-Y_pred) / n
# dE/db = 1/n*sum(j^2)/Dj * dj/db = 1/n*2*sum(j) * (y-f(x))/db = 1/n*2*sum(j) * (y- Ax-b)/db = 1/n*2*sum(j) * - 1 = -2/n * sum(y-f(x))
db = -2 * np.sum(Y-Y_pred) / n
A = A - lr*dA
b = b - lr*db
_toll = abs(error_prev - error)
error_prev = error
Y_pred = predict(XT)
print('Mean squared error: %.2f'
% mean_squared_error(YT, Y_pred))
# The coefficient of determination: 1 is perfect prediction
print('Coefficient of determination: %.2f'
% r2_score(YT, Y_pred))
def train_custom_ridge_model(X, Y, XT, YT, alpha=1.0, lr=0.01, toll=0.01):
'''
Performs L2 regularization, i.e. adds penalty equivalent to square of the magnitude of coefficients
Minimization objective = Loss + Alpha * (sum of square of coefficients)
'''
# F = A*X + b
n = X.shape[0]
k = X.shape[1]
A = np.random.randn(X.shape[1])
b = 0
error_prev = 0
_toll = toll + 1
def mse(x,y):
return np.sum((x-y)**2)/n
def predict(x):
return np.dot(x,A) + b
while _toll > toll:
Y_pred = predict(X)
error = mse(Y, Y_pred)
# E(x) = 1/n*(sum( (y-F(x))^2 ) + alpha*sum(A^2)) = 1/n*(sum( (y-A*X-b)^2 ) + alpha*sum(A^2))
# dE/dA = 1/n*(sum( (y-A*x-b)^2 ) + alpha*sum(A^2)) = 1/n*(- 2 * sum(y-f(x)) * x + alpha*sum(A))
dA = (-2 * np.dot(X.T, Y-Y_pred) + alpha*2*A)/n
# dE/db = 1/n*sum(j^2)/Dj * dj/db = 1/n*2*sum(j) * (y-f(x))/db = 1/n*2*sum(j) * (y- Ax-b)/db = 1/n*2*sum(j) * - 1 = -2/n * sum(y-f(x))
db = -2 * np.sum(Y-Y_pred) / n
A = A - lr*dA
b = b - lr*db
_toll = abs(error_prev - error)
error_prev = error
Y_pred = predict(XT)
print('Mean squared error: %.2f'
% mean_squared_error(YT, Y_pred))
# The coefficient of determination: 1 is perfect prediction
print('Coefficient of determination: %.2f'
% r2_score(YT, Y_pred))
def train_custom_lasso_model(X, Y, XT, YT, alpha=1.0, lr=0.01, toll=0.01):
'''
Performs L1 regularization, i.e. adds penalty equivalent to absolute value of the magnitude of coefficients
Minimization objective = Loss + alpha * (sum of absolute value of coefficients)
'''
# F = A*X + b
n = X.shape[0]
k = X.shape[1]
A = np.random.randn(X.shape[1])
b = 0
error_prev = 0
_toll = toll + 1
def mse(x,y):
return np.sum((x-y)**2)/n
def predict(x):
return np.dot(x,A) + b
while _toll > toll:
Y_pred = predict(X)
error = mse(Y, Y_pred)
# E(x) = 1/n*(sum( (y-F(x))^2 ) + alpha*sum(abs(A))) = 1/n*(sum( (y-A*X-b)^2 ) + alpha*sum(abs(A)))
# dE/dA = 1/n*(sum( (y-A*x-b)^2 ) + alpha*sum(|A|)) = 1/n*(- 2 * sum(y-f(x)) * x + alpha*(A/abs(A)))
dA = (-2 * np.dot(X.T, Y-Y_pred) + alpha*(A/abs(A)))/n
# dE/db = 1/n*sum(j^2)/Dj * dj/db = 1/n*2*sum(j) * (y-f(x))/db = 1/n*2*sum(j) * (y- Ax-b)/db = 1/n*2*sum(j) * - 1 = -2/n * sum(y-f(x))
db = -2 * np.sum(Y-Y_pred) / n
A = A - lr*dA
b = b - lr*db
_toll = abs(error_prev - error)
error_prev = error
Y_pred = predict(XT)
print('Mean squared error: %.2f'
% mean_squared_error(YT, Y_pred))
# The coefficient of determination: 1 is perfect prediction
print('Coefficient of determination: %.2f'
% r2_score(YT, Y_pred))
def train_stock_linear_model():
# Create linear regression object
regr = linear_model.LinearRegression()
# Train the model using the training sets
regr.fit(diabetes_X_train, diabetes_y_train)
# Make predictions using the testing set
diabetes_y_pred = regr.predict(diabetes_X_test)
# The mean squared error
print('Mean squared error: %.2f'
% mean_squared_error(diabetes_y_test, diabetes_y_pred))
# The coefficient of determination: 1 is perfect prediction
print('Coefficient of determination: %.2f'
% r2_score(diabetes_y_test, diabetes_y_pred))
def train_stock_ridge_model():
# Create linear regression object
regr = linear_model.Ridge(alpha=1, normalize=False)
# Train the model using the training sets
regr.fit(diabetes_X_train, diabetes_y_train)
# Make predictions using the testing set
diabetes_y_pred = regr.predict(diabetes_X_test)
# The mean squared error
print('Mean squared error: %.2f'
% mean_squared_error(diabetes_y_test, diabetes_y_pred))
# The coefficient of determination: 1 is perfect prediction
print('Coefficient of determination: %.2f'
% r2_score(diabetes_y_test, diabetes_y_pred))
def train_stock_lasso_model():
# Create linear regression object
regr = linear_model.Lasso(alpha=1, normalize=False)
# Train the model using the training sets
regr.fit(diabetes_X_train, diabetes_y_train)
# Make predictions using the testing set
diabetes_y_pred = regr.predict(diabetes_X_test)
# The mean squared error
print('Mean squared error: %.2f'
% mean_squared_error(diabetes_y_test, diabetes_y_pred))
# The coefficient of determination: 1 is perfect prediction
print('Coefficient of determination: %.2f'
% r2_score(diabetes_y_test, diabetes_y_pred))
print('Sklearn Linear================')
train_stock_linear_model()
print('Our Linear ====================')
train_custom_linear_model(diabetes_X_train, diabetes_y_train, diabetes_X_test, diabetes_y_test, 0.9, 0.001)
print('Sklearn Ridge================')
train_stock_ridge_model()
print('Our Ridge ====================')
train_custom_ridge_model(diabetes_X_train, diabetes_y_train, diabetes_X_test, diabetes_y_test,1.0, 0.9,0.001)
print('Sklearn Lasso================')
train_stock_lasso_model()
print('Our Lasso ====================')
train_custom_lasso_model(diabetes_X_train, diabetes_y_train, diabetes_X_test, diabetes_y_test,1.0, 0.9,0.001)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment