Created
July 2, 2018 16:24
-
-
Save feryandi/c8f3b132203008a4b14bbae03db397ec to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import matplotlib.pyplot as plt | |
original_training_set = [[1, 1000],[2, 5000],[3, 4500],[5, 7500],[8, 10000],[10, 10000]] | |
training_set = [] | |
for t in original_training_set: | |
t.insert(0, 1) # menambahkan intercept term | |
training_set.append(t) | |
m = len(training_set) | |
n = len(training_set[0]) - 1 | |
theta = [10] * n | |
alpha = 0.001 | |
iteration = 1000 | |
error = 0.1 | |
def print_h(): | |
formula = '' | |
for j in range(n): | |
formula += str(theta[j]) + ' * x{} + '.format(j) | |
return formula[:-2] | |
def h(x): | |
result = 0 | |
for j in range(n): | |
result += theta[j] * x[j] | |
return result | |
def cost_function(): | |
sums = 0 | |
for i in range(m): | |
sums += pow(h(training_set[i]) - training_set[i][-1], 2) | |
return 0.5 * sums | |
def gradient_descent(theta, j): | |
partial_derivative = 0 | |
for data in training_set: | |
partial_derivative += (h(data[:-1]) - data[-1]) * data[j] | |
return theta - (alpha * partial_derivative) | |
init_error = 0 | |
while abs(init_error - cost_function()) > error: | |
init_error = cost_function() | |
theta_new = [0] * n | |
for j in range(n): | |
theta_new[j] = gradient_descent(theta[j], j) | |
for j in range(n): | |
theta[j] = theta_new[j] | |
print('H(x) = {}'.format(print_h())) | |
plt.plot(list(x[1] for x in training_set), list(h(x) for x in training_set), 'r') | |
plt.plot(list(x[1] for x in training_set), list(y[2] for y in training_set), 'bo') | |
plt.show() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment