Created
February 20, 2020 17:17
-
-
Save m8ttyB/f5d242d156a38ea20cd07414716cae08 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
from data_prep import features, targets, features_test, targets_test | |
np.random.seed(21) | |
def sigmoid(x): | |
""" | |
Calculate sigmoid | |
""" | |
return 1 / (1 + np.exp(-x)) | |
# Hyperparameters | |
n_hidden = 2 # number of hidden units | |
epochs = 900 | |
learnrate = 0.005 | |
n_records, n_features = features.shape | |
last_loss = None | |
# Initialize weights | |
weights_input_hidden = np.random.normal(scale=1 / n_features ** .5, | |
size=(n_features, n_hidden)) | |
weights_hidden_output = np.random.normal(scale=1 / n_features ** .5, | |
size=n_hidden) | |
for e in range(epochs): | |
del_w_input_hidden = np.zeros(weights_input_hidden.shape) | |
del_w_hidden_output = np.zeros(weights_hidden_output.shape) | |
for x, y in zip(features.values, targets): | |
## Forward pass ## | |
# TODO: Calculate the output | |
hidden_input = np.dot(x, weights_input_hidden) | |
hidden_output = sigmoid(hidden_input) | |
output_in = np.dot(hidden_output, weights_hidden_output) | |
output = sigmoid(output_in) | |
## Backward pass ## | |
# TODO: Calculate the network's prediction error | |
error = y - output | |
# Calculate error term for the output unit | |
output_error_term = (error * output) * (1 - output) | |
## propagate errors to hidden layer | |
# Calculate the hidden layer's contribution to the error | |
hidden_error = hidden_output * (1 - hidden_output) | |
# Calculate the error term for the hidden layer | |
hidden_error_term = np.dot(output_error_term, weights_hidden_output) * \ | |
hidden_error | |
# Update the change in weights | |
del_w_hidden_output += learnrate * output_error_term * hidden_output | |
del_w_input_hidden += learnrate * hidden_error_term * x[:,None] | |
# Update weights | |
weights_input_hidden += del_w_input_hidden + learnrate * del_w_input_hidden / n_records | |
weights_hidden_output += del_w_hidden_output + learnrate * del_w_hidden_output / n_records | |
# Printing out the mean square error on the training set | |
if e % (epochs / 10) == 0: | |
hidden_output = sigmoid(np.dot(x, weights_input_hidden)) | |
out = sigmoid(np.dot(hidden_output, | |
weights_hidden_output)) | |
loss = np.mean((out - targets) ** 2) | |
if last_loss and last_loss < loss: | |
print("Train loss: ", loss, " WARNING - Loss Increasing") | |
else: | |
print("Train loss: ", loss) | |
last_loss = loss | |
# Calculate accuracy on test data | |
hidden = sigmoid(np.dot(features_test, weights_input_hidden)) | |
out = sigmoid(np.dot(hidden, weights_hidden_output)) | |
predictions = out > 0.5 | |
accuracy = np.mean(predictions == targets_test) | |
print("Prediction accuracy: {:.3f}".format(accuracy)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment