Skip to content

Instantly share code, notes, and snippets.

@localabjp
Last active July 12, 2017 22:17
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save localabjp/cb810ffc176463310bff2365794707d1 to your computer and use it in GitHub Desktop.
Save localabjp/cb810ffc176463310bff2365794707d1 to your computer and use it in GitHub Desktop.
TensorFlowを使ってシンプルなニューラルネットワークを作ってみる
#tensorflowとインポート。MNISTデータをロード
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist= input_data.read_data_sets('MNIST_data',one_hot=True)
#プレースホルダー作成
X = tf.placeholder(tf.float32, shape=[None, 784])
Y = tf.placeholder(tf.float32, shape=[None, 10])
#重み(w)とバイアス(b)
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
#トレーニングのためのパラメータを設定
batch_size = 100
learning_rate = 0.01
training_epochs = 10
y = tf.nn.softmax(tf.matmul(X,W) + b)
#コスト関数:クロスエントロピー誤差
cross_entropy = tf.reduce_mean(-tf.reduce_sum(Y * tf.log(y), reduction_indices=[1]))
#パラメータの精度を計算
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(Y,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
#トレーニング(勾配降下法)
train_model = tf.train.GradientDescentOptimizer(learning_rate).minimize(cross_entropy)
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
for epoch in range(training_epochs) :
batch_count = int(mnist.train.num_examples/batch_size)
for i in range(batch_count):
batch_x, batch_y = mnist.train.next_batch(batch_size)
sess.run([train_model], feed_dict={X: batch_x, Y: batch_y})
print "Accuracy: ", accuracy.eval(session = sess ,feed_dict={X: mnist.test.images, Y: mnist.test.labels})
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment