Skip to content

Instantly share code, notes, and snippets.

@igorcoding
Created February 5, 2016 19:06
Show Gist options
  • Save igorcoding/5ccdc05387c2d97578f8 to your computer and use it in GitHub Desktop.
Save igorcoding/5ccdc05387c2d97578f8 to your computer and use it in GitHub Desktop.
import tensorflow as tf
import tensorflow.examples.tutorials.mnist.input_data
import numpy as np
mnist = tensorflow.examples.tutorials.mnist.input_data.read_data_sets("MNIST_data/", one_hot=True)
image_size = 28
n_labels = 10
def create_w_b(n_prev_layer, n_next_layer):
w = tf.Variable(tf.truncated_normal([n_prev_layer, n_next_layer]))
b = tf.Variable(tf.zeros([n_next_layer]))
return w, b
def accuracy(predictions, labels):
return 100.0 * np.sum(np.argmax(predictions, 1) == np.argmax(labels, 1)) / predictions.shape[0]
def main():
graph = tf.Graph()
with graph.as_default():
x = tf.placeholder(tf.float32, [None, image_size * image_size])
y = tf.placeholder(tf.float32, [None, n_labels])
W = tf.Variable(tf.zeros([image_size * image_size, n_labels]))
b = tf.Variable(tf.zeros([n_labels]))
y_ = tf.nn.softmax(tf.matmul(x, W) + b)
loss = -tf.reduce_sum(y * tf.log(y_))
optimizer = tf.train.GradientDescentOptimizer(0.01).minimize(loss)
num_steps = 7001
batch_size = 128
with tf.Session(graph=graph) as session:
tf.initialize_all_variables().run()
print "Initialized"
for step in xrange(num_steps):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
feed_dict = {x: batch_xs, y: batch_ys}
_, l, predictions = session.run([optimizer, loss, y_], feed_dict=feed_dict)
if step % 500 == 0:
# print "Current learning rate:", learning_rate.eval()
print "Minibatch loss at step", step, ":", l
print "Minibatch accuracy: %.1f%%" % accuracy(predictions, batch_ys)
print "Validation accuracy: %.1f%%" % accuracy(y_.eval(feed_dict={x: mnist.validation.images}),
mnist.validation.labels)
print "Test accuracy: %.1f%%" % accuracy(y_.eval(feed_dict={x: mnist.test.images}), mnist.test.labels)
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment