This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def model(X_train, Y_train, X_test, Y_test, learning_rate = 0.0001, num_epochs = 1500, minibatch_size = 32, print_cost = True): | |
""" | |
Implements a three-layer tensorflow neural network: LINEAR->RELU->LINEAR->RELU->LINEAR->SOFTMAX. | |
Returns: | |
parameters -- parameters learnt by the model. They can then be used to predict. | |
""" | |
ops.reset_default_graph() # to be able to rerun the model without overwriting tf variables | |
tf.set_random_seed(1) # to keep a consistent result | |
seed = 3 # to keep a consistent result, used in mini-batches | |
(n_x, m) = X_train.shape # n_x : input size (input features); m : num of examples in the train set |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def compute_cost(Z3, Y): | |
""" | |
Arguments: | |
Z3 -- output of forward propagation (output of the last LINEAR unit), of shape (6, number of examples) | |
Y -- "true" labels vector placeholder, same shape as Z3 | |
Returns: | |
cost - Tensor of the cost function | |
""" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def forward_propagation(X, parameters): | |
# Retrieve the parameters from the dictionary "parameters" | |
W1 = parameters['W1'] | |
b1 = parameters['b1'] | |
W2 = parameters['W2'] | |
b2 = parameters['b2'] | |
W3 = parameters['W3'] | |
b3 = parameters['b3'] | |
Z1 = tf.add(tf.matmul(W1, X), b1) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def initialize_parameters(): | |
W1 = tf.get_variable("W1", [25, 12288], initializer = tf.contrib.layers.xavier_initializer(seed = 1)) | |
b1 = tf.get_variable("b1", [25, 1], initializer = tf.zeros_initializer()) | |
W2 = tf.get_variable("W2", [12, 25], initializer = tf.contrib.layers.xavier_initializer(seed = 1)) | |
b2 = tf.get_variable("b2", [12, 1], initializer = tf.zeros_initializer()) | |
W3 = tf.get_variable("W3", [6,12], initializer = tf.contrib.layers.xavier_initializer(seed = 1)) | |
b3 = tf.get_variable("b3", [6,1], initializer = tf.zeros_initializer()) | |
parameters = {"W1" : W1, | |
"b1" : b1, |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def create_placeholders(n_x, n_y): | |
""" | |
Arguments: | |
n_x -- scalar, size of an image vector (64 * 64 * 3 = 12288) | |
n_y -- scalar, number of classes (from 0 to 5, so n_y = 6) | |
Returns: | |
X -- placeholder for the data input, of shape [n_x, None] and dtype "tf.float32" | |
Y -- placeholder for the input labels, of shape [n_y, None] and dtype "tf.float32" | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def one_hot_matrix(labels, Con): | |
""" | |
Creates a matrix where the i-th row corresponds to the ith class number and the jth column | |
corresponds to the jth training example. So if example j had a label i. Then entry (i,j) | |
will be 1. | |
""" | |
C = tf.constant(Con, name = 'C') | |
one_hot_matrix = tf.one_hot(indices = labels, depth = C, axis = 0) | |
with tf.Session() as sess: | |
one_hot = sess.run(one_hot_matrix) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def cost(logits, labels): | |
""" | |
Computes the cost using the sigmoid cross entropy | |
Arguments: | |
logits -- vector containing z, output of the last linear unit (before the final sigmoid activation) | |
labels -- vector of labels y (1 or 0) | |
""" | |
z = tf.placeholder(tf.float32, name = 'z') | |
y = tf.placeholder(tf.float32, name = 'y') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def sigmoid(z): | |
x = tf.placeholder(tf.float32, name = 'x') # a placeholder tensor (variable) | |
sigmoid = tf.sigmoid(x) # compting sigmoid function of "x" | |
with tf.Session() as sess: | |
result = sess.run(sigmoid, feed_dict = {x : z}) # runnig the computation graph (sigmoid) using a feed_dict | |
return result | |
print(sigmoid(1)) | |
print(sigmoid(10)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def linear_function(): | |
X = np.random.randn(4, 1) # Initializes X to be a random tensor of shape (4,1) | |
W = np.random.randn(5, 4) # Initializes W to be a random tensor of shape (5,4) | |
b = np.random.randn(5, 1) # Initializes b to be a random tensor of shape (5,1) | |
Y = tf.add(tf.matmul(W, X), b) # computation graph | |
sess = tf.Session() | |
result = sess.run(Y) # evaluating the computation graph | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
import tensorflow as tf | |
coefficients = np.array([[1.], [-10.], [25.]]) | |
w = tf.Variable(0, dtype = tf.float32) # initializing the parameter as 0. | |
x = tf.placeholder(tf.float32, [3, 1]) # defining x as a 3x1 column vector | |
cost = x[0][0]*w**2 + x[1][0]*w + x[2][0] | |
train = tf.train.GradientDescentOptimizer(0.01).minimize(cost) |
NewerOlder