Skip to content

Instantly share code, notes, and snippets.

@iamjalipo
Created February 12, 2022 13:25
Show Gist options
  • Save iamjalipo/89bfa037454dbe9c292ea8c4b9663b8d to your computer and use it in GitHub Desktop.
Save iamjalipo/89bfa037454dbe9c292ea8c4b9663b8d to your computer and use it in GitHub Desktop.
logistic-03
## Logistic Regression
import numpy as np
def sigmoid(x):
return (1 / (1 + np.exp(-x)))
m = 90
alpha = 0.0001
theta_0 = np.zeros((m,1))
theta_1 = np.zeros((m,1))
theta_2 = np.zeros((m,1))
theta_3 = np.zeros((m,1))
theta_4 = np.zeros((m,1))
epochs = 0
cost_func = []
while(epochs < 10000):
y = theta_0 + theta_1 * x_1 + theta_2 * x_2 + theta_3 * x_3 + theta_4 * x_4
y = sigmoid(y)
cost = (- np.dot(np.transpose(y_train),np.log(y)) - np.dot(np.transpose(1-y_train),np.log(1-y)))/m
theta_0_grad = np.dot(np.ones((1,m)),y-y_train)/m
theta_1_grad = np.dot(np.transpose(x_1),y-y_train)/m
theta_2_grad = np.dot(np.transpose(x_2),y-y_train)/m
theta_3_grad = np.dot(np.transpose(x_3),y-y_train)/m
theta_4_grad = np.dot(np.transpose(x_4),y-y_train)/m
theta_0 = theta_0 - alpha * theta_0_grad
theta_1 = theta_1 - alpha * theta_1_grad
theta_2 = theta_2 - alpha * theta_2_grad
theta_3 = theta_3 - alpha * theta_3_grad
theta_4 = theta_4 - alpha * theta_4_grad
cost_func.append(cost)
epochs += 1
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment