Last active
January 11, 2022 09:38
-
-
Save Chiraagkv/36195bce98e2e92c6bdd518b523d8861 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class LinearRegressor: | |
def __init__(self, iters, X, y, lr): | |
self.iters = iters | |
self.X = X | |
self.lr = lr | |
self.y = y | |
self.b = 0 | |
self.w = 1 | |
def hypothesis_function(self, x): | |
list_of_hypothesis_functions = [] | |
for i in range(len(x)): | |
list_of_hypothesis_functions.append(self.b + x[i] * self.w) | |
return list_of_hypothesis_functions | |
def cost_function(self): | |
costs = [] | |
costs2 = [] | |
for i in range(len(self.X)): | |
costs.append(self.hypothesis_function(self.X)[i] - self.y[i]) | |
costs.append((self.hypothesis_function(self.X)[i] - self.y[i]) * self.X[i]) | |
return sum(costs) / len(self.X), sum(costs2) / len(self.X) | |
def gradient_descent(self): | |
print(f"previous cost: {self.cost_function()[0]}") | |
bn = self.b - self.lr * self.cost_function()[0] | |
wn = self.w - self.lr * self.cost_function()[1] | |
self.b = bn | |
self.w = wn | |
return self.b, self.w, self.cost_function()[0] | |
def train(self): | |
for i in range(self.iters + 1): | |
bout, wout, cost = self.gradient_descent() | |
print(f"bias: {bout}, weight: {wout}, cost: {cost}\n") | |
def predict(self, x): | |
return self.hypothesis_function(x) | |
# Example | |
model = LinearRegressor(iters=2, X=5, y=10, lr=0.01) | |
model.train() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment