Skip to content

Instantly share code, notes, and snippets.

@EnsekiTT
Created November 14, 2015 17:28
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save EnsekiTT/80b2490d92da1f198fd7 to your computer and use it in GitHub Desktop.
Save EnsekiTT/80b2490d92da1f198fd7 to your computer and use it in GitHub Desktop.
TensorFlowのMNISTチュートリアルをinput_data.pyを使わずにやってみる。
# coding: utf-8
# In[26]:
get_ipython().magic(u'matplotlib inline')
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm as cm
#import seaborn as sns
import pandas as pd
# In[27]:
mnist = pd.read_pickle('mnist.pkl')
# In[28]:
def num2vec10(num):
holder = np.zeros(10)
holder[num] = 1
return holder
def nums2vecs10(nums):
return np.array([num2vec10(num) for num in nums])
# In[29]:
#こんなのがいっぱい入っている。
plt.imshow(mnist[1][0][1].reshape((28,28)), cmap = cm.Greys_r)
# In[30]:
sess = tf.InteractiveSession()
x = tf.placeholder('float', shape=[None, 784])
y_ = tf.placeholder('float', shape=[None, 10])
# In[31]:
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
sess.run(tf.initialize_all_variables())
# In[32]:
y = tf.nn.softmax(tf.matmul(x,W) + b)
cross_entropy = -tf.reduce_sum(y_*tf.log(y))
# In[33]:
train_step = tf.train.GradientDescentOptimizer(0.01).minimize(cross_entropy)
# In[34]:
for i in range(1000):
step = 50
x_batch = mnist[0][0][i*step:(i+1)*step]
y_batch = nums2vecs10(mnist[0][1][i*step:(i+1)*step])
train_step.run(feed_dict={x: x_batch, y_: list(y_batch)})
# In[37]:
#Accuracy(正答率を出す。)
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
print accuracy.eval(feed_dict={x: mnist[1][0], y_: nums2vecs10(mnist[1][1])})
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment