Created
August 22, 2018 17:31
-
-
Save ECE-Engineer/dbf317e175f3a988e3440fe23e8b1bfa to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def shallow_cnn(x): | |
# Reshape to use within a convolutional neural net | |
x_image = tf.reshape(x, [-1, data_width, data_height, 1]) | |
# Define Regularizer | |
regularizer = tf.keras.constraints.MaxNorm(max_value=2) | |
# Convolutional Layer #1 | |
W_conv1 = weight_variable([5, 5, 1, 32]) | |
b_conv1 = bias_variable([32]) | |
h_conv1 = tf.nn.relu(tf.nn.conv2d(x_image, regularizer.__call__(w=W_conv1), | |
strides=[1, 1, 1, 1], padding='SAME') + b_conv1) | |
# Pooling Layer #2 | |
h_pool1 = tf.nn.max_pool(h_conv1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME') | |
# Convolutional Layer #2 | |
W_conv2 = weight_variable([5, 5, 32, 64]) | |
b_conv2 = bias_variable([64]) | |
h_conv2 = tf.nn.relu(tf.nn.conv2d(h_pool1, regularizer.__call__(w=W_conv2), | |
strides=[1, 1, 1, 1], padding='SAME') + b_conv2) | |
# Batch Norm | |
batch_norm = tf.layers.batch_normalization(h_conv2,momentum=0.1,epsilon=1e-5) | |
# Pooling Layer #2 | |
h_pool2 = tf.nn.max_pool(batch_norm, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME') | |
# Dense Layer | |
W_fc1 = weight_variable([7 * 7 * 64, 1024]) | |
b_fc1 = bias_variable([1024]) | |
# Flatten | |
h_pool1_flat = tf.reshape(h_pool2, [-1, 7 * 7 * 64]) | |
h_fc1 = tf.nn.relu(tf.matmul(h_pool1_flat, W_fc1) + b_fc1) | |
# Dropout | |
keep_prob = tf.placeholder(tf.float32) | |
h_fc1_drop = tf.nn.dropout(h_fc1, keep_prob) | |
# Logits Layer | |
W_fc2 = weight_variable([1024, label_count]) | |
b_fc2 = bias_variable([label_count]) | |
# Max Norm | |
regularizer = tf.keras.constraints.MaxNorm(max_value=0.5) | |
y_conv = tf.nn.softmax(tf.matmul(h_fc1_drop, regularizer.__call__(w=W_fc2)) + b_fc2) | |
return y_conv, keep_prob |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment