Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save mmuratarat/1d46abd2f38c0d94b9a74716199cc477 to your computer and use it in GitHub Desktop.
Save mmuratarat/1d46abd2f38c0d94b9a74716199cc477 to your computer and use it in GitHub Desktop.
reset_graph()
n_epochs = 1000
learning_rate = 0.01
X = tf.constant(inputs, dtype = tf.float32, name = "x")
y = tf.constant(output, dtype = tf.float32, name = "y")
theta = tf.Variable(tf.random_uniform([n,1], -1.0, 1.0), name ="theta")
logits = tf.matmul(X, theta, name="logits")
#predictions = 1/(1+ tf.exp(-logits))
predictions = tf.sigmoid(logits)
#one can use TensorFlow's tf.losses.log_loss() function
loss = tf.losses.log_loss(labels = y, predictions = predictions, weights=1.0, epsilon=1e-07) # uses epsilon = 1e-7 by default
learning_rate = 0.01
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
training_op = optimizer.minimize(loss)
with tf.Session() as sess:
tf.global_variables_initializer().run()
for epoch in range(n_epochs):
sess.run(training_op)
if epoch % 100 == 0:
print("Epoch", epoch, "Loss = ", loss.eval())
best_theta = theta.eval()
print(best_theta)
#[[-0.27450362]
# [ 1.1188453 ]
# [-1.4013102 ]]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment