Skip to content

Instantly share code, notes, and snippets.

@alinazhanguwo
Created March 22, 2019 23:30
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save alinazhanguwo/9a7a1078d5475772d96b72c87f2bdd38 to your computer and use it in GitHub Desktop.
Save alinazhanguwo/9a7a1078d5475772d96b72c87f2bdd38 to your computer and use it in GitHub Desktop.
# Computes softmax cross entropy between logits and labels
# Measures the probability error in discrete classification tasks
# For example, each font image is labeled with one and only one label: an image can be font SansSerif or Serif, but not both.
cross_entropy = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits_v2(logits = y + 1e-50, labels = y_))
# Applies exponential decay to the learning rate!!!
learning_rate = tf.train.exponential_decay(0.05, global_step, 1000, 0.85, staircase=True)
# GradientDescentOptimizer is used to minimize loss
train_step = tf.train.GradientDescentOptimizer(
learning_rate).minimize(cross_entropy, global_step=global_step)
# Define accuracy
correct_prediction = tf.equal(tf.argmax(y,1),
tf.argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(
correct_prediction, "float
# Train for 5000 times
epochs = 5000
train_acc = np.zeros(epochs//10)
test_acc = np.zeros(epochs//10)
for i in tqdm(range(epochs), ascii=True):
# Record summary data, and the accuracy
if i % 10 == 0:
# Check accuracy on train set
A = accuracy.eval(feed_dict={x: train_dataset, y_: train_labels})
train_acc[i//10] = A
# And now the test set
A = accuracy.eval(feed_dict={x: test_dataset, y_: test_labels})
test_acc[i//10] = A
train_step.run(feed_dict={x: train_dataset, y_: train_labels})
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment