Skip to content

Instantly share code, notes, and snippets.

@ajschumacher
Created May 14, 2016 03:40
Show Gist options
  • Save ajschumacher/5ff2b4af868d52eca7acf7bba225c76d to your computer and use it in GitHub Desktop.
Save ajschumacher/5ff2b4af868d52eca7acf7bba225c76d to your computer and use it in GitHub Desktop.
import tensorflow as tf
w = tf.Variable([2.0])
b = tf.Variable([2.0])
x = tf.constant([1.0])
y = tf.sigmoid(w*x + b)
y_ = tf.constant([0.0])
cross_entropy = -1*(y_*tf.log(y) + (1-y_)*(tf.log(1-y)))
s = tf.Session()
s.run(tf.initialize_all_variables())
train_step = tf.train.GradientDescentOptimizer(0.005).minimize(cross_entropy)
for i in range(300):
s.run(train_step)
print i, s.run(y)
# far too slow
import tensorflow as tf
w = tf.Variable([2.0])
b = tf.Variable([2.0])
x = tf.constant([1.0])
y = tf.sigmoid(w*x + b)
y_ = tf.constant([0.0])
cross_entropy = -1*(y_*tf.log(y) + (1-y_)*(tf.log(1-y)))
s = tf.Session()
s.run(tf.initialize_all_variables())
train_step = tf.train.GradientDescentOptimizer(0.05).minimize(cross_entropy)
for i in range(300):
s.run(train_step)
print i, s.run(y)
# about right
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment