Skip to content

Instantly share code, notes, and snippets.

@cburgdorf
Last active November 18, 2020 11:23
Show Gist options
  • Save cburgdorf/e2fb46e5ad61ed7b9a29029c5cc30134 to your computer and use it in GitHub Desktop.
Save cburgdorf/e2fb46e5ad61ed7b9a29029c5cc30134 to your computer and use it in GitHub Desktop.
Comparing XOR between tensorflow and keras
import numpy as np
from keras.models import Sequential
from keras.layers.core import Activation, Dense
training_data = np.array([[0,0],[0,1],[1,0],[1,1]], "float32")
target_data = np.array([[0],[1],[1],[0]], "float32")
model = Sequential()
model.add(Dense(32, input_dim=2, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='mean_squared_error', optimizer='adam', metrics=['binary_accuracy'])
model.fit(training_data, target_data, nb_epoch=1000, verbose=2)
print model.predict(training_data)
import tensorflow as tf
input_data = [[0., 0.], [0., 1.], [1., 0.], [1., 1.]] # XOR input
output_data = [[0.], [1.], [1.], [0.]] # XOR output
n_input = tf.placeholder(tf.float32, shape=[None, 2], name="n_input")
n_output = tf.placeholder(tf.float32, shape=[None, 1], name="n_output")
hidden_nodes = 5
b_hidden = tf.Variable(tf.random_normal([hidden_nodes]), name="hidden_bias")
W_hidden = tf.Variable(tf.random_normal([2, hidden_nodes]), name="hidden_weights")
hidden = tf.sigmoid(tf.matmul(n_input, W_hidden) + b_hidden)
W_output = tf.Variable(tf.random_normal([hidden_nodes, 1]), name="output_weights") # output layer's weight matrix
output = tf.sigmoid(tf.matmul(hidden, W_output)) # calc output layer's activation
cross_entropy = tf.square(n_output - output) # simpler, but also works
loss = tf.reduce_mean(cross_entropy) # mean the cross_entropy
optimizer = tf.train.AdamOptimizer(0.01) # take a gradient descent for optimizing with a "stepsize" of 0.1
train = optimizer.minimize(loss) # let the optimizer train
init = tf.initialize_all_variables()
sess = tf.Session() # create the session and therefore the graph
sess.run(init) # initialize all variables
for epoch in xrange(0, 2001):
# run the training operation
cvalues = sess.run([train, loss, W_hidden, b_hidden, W_output],
feed_dict={n_input: input_data, n_output: output_data})
if epoch % 200 == 0:
print("")
print("step: {:>3}".format(epoch))
print("loss: {}".format(cvalues[1]))
print("")
print("input: {} | output: {}".format(input_data[0], sess.run(output, feed_dict={n_input: [input_data[0]]})))
print("input: {} | output: {}".format(input_data[1], sess.run(output, feed_dict={n_input: [input_data[1]]})))
print("input: {} | output: {}".format(input_data[2], sess.run(output, feed_dict={n_input: [input_data[2]]})))
print("input: {} | output: {}".format(input_data[3], sess.run(output, feed_dict={n_input: [input_data[3]]})))
@jxub
Copy link

jxub commented Oct 17, 2017

Neat!

@john-mclaughlin-fw
Copy link

Why does the Keras version have 32 nodes in the hidden layer wheras tensorflow only has 5? Am I understanding the code incorrectly?

@katejarne
Copy link

Do you know if it is possible to use a recurrent network to solve the Xor problem with keras? I was trying to implement it and I am not able to do it yet

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment