Skip to content

Instantly share code, notes, and snippets.

@jdmoore7
Created December 9, 2019 16:58
Show Gist options
  • Save jdmoore7/fc6dc90f7226be0b6e41b17a2b22fcfc to your computer and use it in GitHub Desktop.
Save jdmoore7/fc6dc90f7226be0b6e41b17a2b22fcfc to your computer and use it in GitHub Desktop.
python implementation of binary logistic regression
class LogReg():
"""
Implementation of binary logistic regression
"""
def __init__(self,dataframe,y_column):
"""
Initialize LogReg class with a pandas dataframe and the name of the dependent variable column, y.
All other variable will be assumed to be independent variables, x.
"""
import pandas as pd
import numpy as np
self.y = dataframe[y_column].to_numpy()
self.x = dataframe.drop(y_column,axis=1).to_numpy()
self.weights = np.random.uniform(low=0.001, high=0.1, size=(1,self.x.shape[1]))[0]
self.bias = np.random.uniform(low=0.001, high=0.1, size=(1))[0]
def forward(self,row):
"""
Produce a predicted y-value given a row (vector of x values).
The prediction is the dot product of weights and vector x added to the bias.
"""
x_vars = self.x[row]
z = np.dot(x_vars,self.weights) + self.bias
y_pred = 1/(1 + np.exp(-z))
return y_pred
def loss(self,row):
"""
Standard logistic regression loss function.
When y is 1, the loss function evaluates to the natural log of y_pred.
When y_pred is near 1, ln(1) will assign a loss near 0.
When y_pred is near 0, ln(0) will assign a penalty >0.
When y is 0, the loss function evaluates to natural log of (1-y_pred)
When y_pred is near 0, ln(1-0) will assign a penalty near 0.
When y_pred is near 1, ln(1-0) will assign a penalty >0.
"""
y_pred = self.forward(row)
y_true = self.y[row]
loss = -1*(y_true*np.log(y_pred) + (1-y_true)*np.log(1-y_pred))
return loss
def cost(self):
"""
Cost function finds average loss function applied to all rows.
"""
return sum([self.loss(row) for row in range(self.x.shape[0])])/self.x.shape[0]
def gradient_loss_y_pred(self,row):
"""
Function that returns the gradient of the loss function with respect to y_pred
"""
y_true = self.y[row]
y_pred = self.forward(row)
return -1*y_true/y_pred + (1-y_true)/(1-y_pred)
def gradient_y_pred_z(self,row):
"""
Function that returns the gradient of y_pred with respect to z (dot product(weights.X) + bias)
"""
y_pred = self.forward(row)
return y_pred*(1-y_pred)
def gradient_z_w(self,row):
"""
Function that returns the gradient of z with respect to weights. Note, this is not the dot product;
each weight will be multiplied by its corresponding x-value, but not summed.
"""
return self.weights * self.x[row]
def gradient_weights(self,row):
"""
Function that returs the gradient of loss with respect to weights by stringing together the above 3 gradients
by observing the chain rule.
"""
return self.gradient_loss_y_pred(row)*self.gradient_y_pred_z(row)*self.gradient_z_w(row)
def gradient_bias(self,row):
"""
Function that returns the gradient of loss with respect to bias by observing the chain rule, similar to above.
Note, the gradient of z with respect to bias evaluates to 1; thus there are only 2 functions to determine the
this gradient
"""
return self.gradient_loss_y_pred(row)*self.gradient_y_pred_z(row)
def train_model(self, iterations, lr):
"""
Function that trains the LogReg model via gradient descent. Given a number of iterations and a learning rate,
this function will begin an epoch (iteration), initialize weight and bias gradients to zero, iterate through
rows in training data. Once all rows have been operated on, the gradients will be applied to the weights
and biases according to the learning rate specified.
"""
for epoch in range(iterations):
d_w = np.zeros_like(self.weights)
d_b = 0.0
for i in range(self.x.shape[0]):
d_w += self.gradient_weights(i)
d_b += self.gradient_bias(i)
d_w = d_w/self.x.shape[0]
d_b = d_b/self.x.shape[0]
self.weights -= lr*d_w
self.bias -= lr*d_b
print(self.cost())
def predict(self,row):
z = np.dot(row,self.weights) + self.bias
y_pred = 1/(1 + np.exp(-z))
if y_pred >= 0.5:
return 1
else:
return 0
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment