Skip to content

Instantly share code, notes, and snippets.

@SuvroBaner
Created December 31, 2019 12:26
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save SuvroBaner/fb82201e09709643ccd6659a0d7a296b to your computer and use it in GitHub Desktop.
Save SuvroBaner/fb82201e09709643ccd6659a0d7a296b to your computer and use it in GitHub Desktop.
def model(X_train, Y_train, X_test, Y_test, learning_rate = 0.0001, num_epochs = 1500, minibatch_size = 32, print_cost = True):
"""
Implements a three-layer tensorflow neural network: LINEAR->RELU->LINEAR->RELU->LINEAR->SOFTMAX.
Returns:
parameters -- parameters learnt by the model. They can then be used to predict.
"""
ops.reset_default_graph() # to be able to rerun the model without overwriting tf variables
tf.set_random_seed(1) # to keep a consistent result
seed = 3 # to keep a consistent result, used in mini-batches
(n_x, m) = X_train.shape # n_x : input size (input features); m : num of examples in the train set
n_y = Y_train.shape[0] # n_y : num of classes
costs = []
# Create Placeholders
X, Y = create_placeholders(n_x, n_y)
# Initialize Parameters
parameters = initialize_parameters()
# Forward Propagation
Z3 = forward_propagation(X, parameters)
# Cost Function
cost = compute_cost(Z3, Y)
# Backpropagation using Adam optimizer
optimizer = tf.train.AdamOptimizer(learning_rate = learning_rate).minimize(cost)
# initialize all the variables
init = tf.global_variables_initializer()
# start the session and compute the tensorflow graph
with tf.Session() as sess:
# run the initialization
sess.run(init)
# do the training loop
for epoch in range(num_epochs):
epoch_cost = 0. # defines a cost related to an epoch
num_minibatches = int(m / minibatch_size)
seed = seed + 1
minibatches = random_mini_batches(X_train, Y_train, minibatch_size, seed)
for minibatch in minibatches:
# select a minibatch
(minibatch_X, minibatch_Y) = minibatch
# optimization
_, minibatch_cost = sess.run([optimizer, cost], feed_dict = {X : minibatch_X, Y : minibatch_Y})
# compute the epoch cost
epoch_cost += minibatch_cost / num_minibatches
# print the cost
if print_cost == True and epoch % 100 == 0:
print("Cost after epoch %i: %f" % (epoch, epoch_cost))
if print_cost == True and epoch % 5 == 0:
costs.append(epoch_cost)
# Plot the cost -
plt.plot(np.squeeze(costs))
plt.ylabel('cost')
plt.xlabel('iterations (per fives)')
plt.title("Learning Rate = " + str(learning_rate))
plt.show()
# saving parameters in a variable
parameters = sess.run(parameters)
print("Parameters have been trained")
# calculate the correct predictions
correct_prediction = tf.equal(tf.argmax(Z3), tf.argmax(Y))
# calculate accuracy on the test set
accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
print("Train Accuracy: ", accuracy.eval({X : X_train, Y : Y_train}))
print("Test Accuracy: ", accuracy.eval({X : X_test, Y : Y_test}))
return parameters
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment