Skip to content

Instantly share code, notes, and snippets.

@parth-verma
Last active October 1, 2020 13:31
Show Gist options
  • Save parth-verma/634eb13a78147d1db26f32ef3ae468b6 to your computer and use it in GitHub Desktop.
Save parth-verma/634eb13a78147d1db26f32ef3ae468b6 to your computer and use it in GitHub Desktop.
XOR net in Tensorflow
import tensorflow as tf
import numpy as np
tf.set_random_seed(1)
def generate_train_data(batch_size=64):
indices = np.random.randint(4, size=batch_size)
XOR_X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
XOR_Y = np.array([[0], [1], [1], [0]])
return XOR_X[indices], XOR_Y[indices]
input = tf.placeholder(dtype=tf.float32, shape=[None, 2])
output = tf.placeholder(dtype=tf.float32, shape=[None, 1])
net = tf.contrib.layers.fully_connected(input, 3,activation_fn=tf.sigmoid)
net = tf.contrib.layers.fully_connected(net, 3,activation_fn=tf.sigmoid)
net = tf.contrib.layers.fully_connected(net, 1, activation_fn=tf.sigmoid)
loss = tf.losses.log_loss(labels=output, predictions=net)
optimizer = tf.train.AdamOptimizer(0.03).minimize(loss)
init = tf.global_variables_initializer()
sess = tf.InteractiveSession()
sess.run(init)
epochs = 0
loss_val = np.inf
while loss_val > 1e-8:
epochs +=1
XOR_X,XOR_Y = generate_train_data(16)
_, loss_val = sess.run([optimizer, loss], feed_dict={input: XOR_X, output: XOR_Y})
TEST_X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
TEST_Y = np.array([[0], [1], [1], [0]])
if epochs % 2000 == 0:
print({'epoch': epochs, 'loss': loss_val})
print("Learnt XOR after %s epochs." % epochs)
sess.close()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment