Skip to content

Instantly share code, notes, and snippets.

@chadrick-kwag
Created September 9, 2018 06:51
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save chadrick-kwag/7a84c9655e926dd0881fe8c9397e803a to your computer and use it in GitHub Desktop.
Save chadrick-kwag/7a84c9655e926dd0881fe8c9397e803a to your computer and use it in GitHub Desktop.
import numpy as np
from sklearn.metrics import accuracy_score
import tensorflow as tf
import tensorflow.contrib.slim.nets as nets
# tf version: 1.10.0
def get_random_input_and_label(batch_size, class_size):
# for demonstration purpose, I'm going to reuse a random input and label
random_input = np.random.rand(batch_size, 299, 299, 3)
random_index = np.random.randint(0, 5, batch_size)
random_output = np.zeros((batch_size, class_size))
for index, val in enumerate(random_index):
random_output[index, val] = 1.0
return random_input, random_output
class_size = 5
# build some model
input_ph = tf.placeholder(tf.float32, [None, 299, 299, 3])
onehot_labels_ph = tf.placeholder(tf.float32, [None, class_size])
lazy_acc_score_ph = tf.placeholder(tf.float32, [1])
lazy_acc_score_ts = tf.reduce_sum(lazy_acc_score_ph)
# for deatil on the model, check out https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/contrib/slim/python/slim/nets/inception_v3.py
logits_ts, end_points = nets.inception.inception_v3(input_ph, num_classes=5)
prediction_ts = end_points['Predictions']
loss_ts = tf.losses.softmax_cross_entropy(
onehot_labels=onehot_labels_ph, logits=logits_ts)
optimizer_op = tf.train.AdamOptimizer(0.001).minimize(loss_ts)
# in training, we want to log loss value, accuracy value
loss_summary = tf.summary.scalar("loss/loss", loss_ts)
# will detect loss_summary and train_accuracy_summary
train_summary_op = tf.summary.merge_all()
# summary op for recording externally calculated metric(acc_score)
acc_summary_op = tf.summary.scalar("metric/acc", lazy_acc_score_ts)
initop = tf.global_variables_initializer()
with tf.Session() as sess:
writer = tf.summary.FileWriter("tfsummary", session= sess)
sess.run(initop)
steps = 20
train_input, train_label = get_random_input_and_label(4, class_size)
test_input, test_label = get_random_input_and_label(4, class_size)
for step in range(steps):
train_summary, loss_val, prediction, _ = sess.run([train_summary_op, loss_ts, prediction_ts, optimizer_op],
feed_dict={input_ph: train_input, onehot_labels_ph: train_label})
writer.add_summary(train_summary, global_step = step)
print("train done for step={}".format(step))
# calculate accuracy outside of the computational graph
pred_argmax = np.argmax(prediction, axis=1)
label_argmax = np.argmax(test_label, axis=1)
acc_score = accuracy_score(label_argmax, pred_argmax)
acc_score = np.reshape(acc_score,(1,))
acc_summary = sess.run(acc_summary_op, feed_dict={lazy_acc_score_ph: acc_score})
writer.add_summary(acc_summary, global_step = step)
print("end of code")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment