Skip to content

Instantly share code, notes, and snippets.

@bdhammel
Last active July 7, 2018 20:43
Show Gist options
  • Save bdhammel/29314dae886205bbcc05199ba199f87a to your computer and use it in GitHub Desktop.
Save bdhammel/29314dae886205bbcc05199ba199f87a to your computer and use it in GitHub Desktop.
class LinearRegression:
def __init__(self, order):
self.W = np.random.randn((order+1))
def fit(self, X, Y, alpha=1e-5, epochs=1000):
X = np.vstack((X, np.ones_like(X))).T
Y = Y.T
for _ in range(epochs):
err = self.perdict(X) - Y # (Y_hat - Y)
dL = X.T.dot(err) # 2 X^T (Y_hat - Y), absorbing 2 into alpha
self.W -= alpha*dL # W <- W - alpha * dL/dW
def perdict(self, X):
return X.dot(self.W)
def coeff(self):
return self.W.ravel()
if __name__ == '__main__':
x = np.linspace(0,25,100)
epsilon = 3*np.random.randn(len(x))
y = 3*x + 1 + epsilon
lr = LinearRegression(order=1)
lr.fit(x,y)
w, b = lr.coeff()
plt.plot(x, y, 'bo')
plt.plot(x, w*x+b, 'r')
plt.xlabel("x")
plt.ylabel("y")
plt.show()
print("Equation of the line is y = {:.0f}x + {:.0f}".format(w, b))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment