Create a gist now

Instantly share code, notes, and snippets.

What would you like to do?
TensorFlow でロジスティック回帰
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import tensorflow as tf
import numpy as np
import mnist
import logreg170326 as logreg
### mini batch indicies for stochastic gradient ascent
#
def makeBatchIndex(N, batchsize):
idx = np.random.permutation(N)
nbatch = int(np.ceil(float(N) / batchsize))
idxB = np.zeros(( nbatch, N ), dtype = bool)
for ib in range(nbatch - 1):
idxB[ib, idx[ib*batchsize:(ib+1)*batchsize]] = True
ib = nbatch - 1
idxB[ib, idx[ib*batchsize:]] = True
return idxB
if __name__ == '__main__':
### reading and preparing the training data
#
mn = mnist.MNIST(pathMNIST = '../150117-mnist')
X = mn.getImage('L') / 255.0
lab = mn.getLabel('L')
D = X.shape[1]
K = mn.nclass
Zt = np.zeros((X.shape[0], K))
for ik in range(K):
Zt[lab == ik, ik] = 1.0
xm = np.mean(X, axis = 0)
X -= xm
XL, ZtL = X[:50000], Zt[:50000]
XV, ZtV = X[50000:], Zt[50000:]
NL = XL.shape[0]
NV = XV.shape[0]
### initializing the network
#
lr = logreg.LogisticRegression(D, K)
### training
#
batchsize = 100
idxB = makeBatchIndex(NL, batchsize)
nbatch = idxB.shape[0]
print('### training: D =', D, ' K =', K, ' NL =', NL, ' NV =', NV, ' batchsize =', batchsize)
for i in range(100):
if i <= 10 or i % 10 == 0:
Y, Z = lr.output(XL)
ceL = lr.cost(Y, ZtL)
accL = lr.accuracy(Y, ZtL)
Y, Z = lr.output(XV)
accV = lr.accuracy(Y, ZtV)
print(i, ceL, (1.0 - accL)*100, (1.0 - accV)*100)
for ib in np.random.permutation(nbatch):
ii = idxB[ib, :]
lr.train(XL[ii], ZtL[ii])
### test
#
XT = mn.getImage('T') / 255.0
labT = mn.getLabel('T')
NT = XT.shape[0]
ZtT = np.zeros((NT, K))
for ik in range(K):
ZtT[labT == ik, ik] = 1.0
XT -= xm
Y, Z = lr.output(XT)
accT = lr.accuracy(Y, ZtT)
print('### test: NT =', NT)
print((1.0 - accT)*100)
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import tensorflow as tf
class LogisticRegression():
def __init__(self, D, K):
### definition of the network
#
self.X = tf.placeholder(tf.float32, shape = [None, D])
self.W = tf.Variable(0.1*(tf.random_uniform([D, K]) - 0.5))
self.b = tf.Variable(0.1*(tf.random_uniform([K]) - 0.5))
self.Y = tf.matmul(self.X, self.W) + self.b
self.Z = tf.nn.softmax(self.Y)
self.tfg_output = (self.Y, self.Z)
### definition of the cost & accuracy
#
self.Zt = tf.placeholder(tf.float32, shape = [None, K])
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(labels = self.Zt, logits = self.Y)
self.tfg_cost = tf.reduce_mean(cross_entropy)
correct_prediction = tf.equal(tf.argmax(self.Y, 1), tf.argmax(self.Zt, 1))
self.tfg_accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
### definition of the training step
#
eta = 0.01
mu = 0.9
optimizer = tf.train.MomentumOptimizer(eta, mu)
self.tfg_train = optimizer.minimize(self.tfg_cost)
### initialization
#
self.sess = tf.InteractiveSession()
tf.global_variables_initializer().run()
def output(self, X):
d = {self.X: X}
rv = self.sess.run(self.tfg_output, feed_dict = d)
return rv
def cost(self, Y, Zt):
d = {self.Y: Y, self.Zt: Zt}
rv = self.sess.run(self.tfg_cost, feed_dict = d)
return rv
'''
def accuracy(self, X, Zt):
d = {self.X: X, self.Zt: Zt}
rv = self.sess.run(self.tfg_accuracy, feed_dict = d)
return rv
'''
def accuracy(self, Y, Zt):
d = {self.Y: Y, self.Zt: Zt}
rv = self.sess.run(self.tfg_accuracy, feed_dict = d)
return rv
def train(self, X, Zt):
d = {self.X: X, self.Zt: Zt}
self.sess.run(self.tfg_train, feed_dict = d)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment