Skip to content

Instantly share code, notes, and snippets.

@stewartpark
Created October 12, 2015 08:17
Show Gist options
  • Star 23 You must be signed in to star a gist
  • Fork 11 You must be signed in to fork a gist
  • Save stewartpark/187895beb89f0a1b3a54 to your computer and use it in GitHub Desktop.
Save stewartpark/187895beb89f0a1b3a54 to your computer and use it in GitHub Desktop.
Simple XOR learning with keras
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import SGD
import numpy as np
X = np.array([[0,0],[0,1],[1,0],[1,1]])
y = np.array([[0],[1],[1],[0]])
model = Sequential()
model.add(Dense(8, input_dim=2))
model.add(Activation('tanh'))
model.add(Dense(1))
model.add(Activation('sigmoid'))
sgd = SGD(lr=0.1)
model.compile(loss='binary_crossentropy', optimizer=sgd)
model.fit(X, y, show_accuracy=True, batch_size=1, nb_epoch=1000)
print(model.predict_proba(X))
"""
[[ 0.0033028 ]
[ 0.99581173]
[ 0.99530098]
[ 0.00564186]]
"""
@katejarne
Copy link

Do you know how to build a xor model (or other binary task) using simple recurrent layers? Does it has any sense to do that? Could you comment regarding that?

@akol67
Copy link

akol67 commented Mar 7, 2018

baj12 left us a good example. While working on it (I added some noise) I could not get loss values less than 0.22...
Any help to get better loss values?
PS: if you want to change from SOFTMAX to SIGMOID activation you should remove categorical from y.

n= 200
ruido = 3
fat = n*ruido/100
print("nivel de ruido",ruido,"%")

x1 = np.random.rand(n,2) * (-1)
plt.plot(x1[:,0], x1[:,1], 'ro')
x11 = np.random.rand(fat,2) * (-1)
plt.plot(x11[:,0], x11[:,1], 'bo')
x2 = np.random.rand(n,2)
x2[:,1] *= (-1)
plt.plot(x2[:,0], x2[:,1], 'bo')
x22 = np.random.rand(fat,2)
x22[:,1] *= (-1)
plt.plot(x22[:,0], x22[:,1], 'ro')
x3 = np.random.rand(n,2)
x3[:,0] *= (-1)
plt.plot(x3[:,0], x3[:,1], 'bo')
x33 = np.random.rand(fat,2)
x33[:,0] *= (-1)
plt.plot(x33[:,0], x33[:,1], 'ro')
x4 = np.random.rand(n,2)
plt.plot(x4[:,0], x4[:,1], 'ro')
x44 = np.random.rand(fat,2)
plt.plot(x44[:,0], x44[:,1], 'bo')
X = np.concatenate((x1,x11,x2,x22,x3,x33,x4,x44))
X = (X + 1 ) /2
y1 = np.ones(n)
y11= np.zeros(fat)
y4 = np.ones(n)
y44 = np.zeros(fat)
y2 = np.zeros(n)
y22 = np.ones(fat)
y3 = np.zeros(n)
y33 = np.ones(fat)
y = np.concatenate((y1,y11,y2,y22,y3,y33,y4,y44))

if you want to change from SOFTMAX to SIGMOID activation you should remove categorical from y.

y2 = np_utils.to_categorical(y)
#y = np_utils.to_categorical(y)
model = Sequential()
model.add(Dense(12, input_dim=X.shape[1], activation='tanh',kernel_initializer = VarianceScaling()))
model.add(Dense(2, init='uniform', activation='softmax', name="output"))
#model.add(Dense(2, init='uniform', activation='sigmoid', name="output"))
sgd = SGD(lr=0.01)
model.compile(loss='binary_crossentropy', optimizer=sgd)
model.summary()
model.fit(X, y2, batch_size=2, shuffle=True, nb_epoch=2000, verbose=1,callbacks =[EarlyStoppingByLossVal()])
#model.fit(X, y, batch_size=2, shuffle=True, nb_epoch=2000, verbose=1,callbacks =[EarlyStoppingByLossVal()])
plot_decision_boundary(lambda X:model.predict_classes(X))
print("Last epoch: " + str(lastEpoch))

@akol67
Copy link

akol67 commented Mar 7, 2018

sorry, forgot to include the libraries I used to run the code above

from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import SGD
from keras.callbacks import Callback
from keras.initializers import VarianceScaling
import numpy as np
import matplotlib.pyplot as plt

lastEpoch = 0

class EarlyStoppingByLossVal(Callback):
def init(self, monitor='val_loss', value=0.02, verbose=0):
super(Callback, self).init()
self.monitor = monitor
self.value = value
self.verbose = verbose
def on_epoch_end(self, epoch, logs={}):
global lastEpoch
current = logs.get("loss")
if current != None and current < self.value:
self.model.stop_training = True
lastEpoch = epoch + 1

def plot_decision_boundary(pred_func):
# Set min and max values and give it some padding
x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5
y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5
h = 0.01
# Generate a grid of points with distance h between them
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
# Predict the function value for the whole gid
Z = pred_func(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# Plot the contour and training examples
plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral)
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Spectral)

@homerobse
Copy link

homerobse commented Aug 1, 2018

I wanted to solve this with only two hidden units. So I used this code which worked fine (for most executions, depending on the random initial condition):

 from keras.models import Sequential
 from keras.layers.core import Dense, Dropout, Activation
 from keras.optimizers import SGD
 import numpy as np
 
 X = np.array([[0,0],[0,1],[1,0],[1,1]])
 y = np.array([[0],[1],[1],[0]])
 
 model = Sequential()
 model.add(Dense(2, input_dim=2))
 model.add(Activation('tanh'))
 model.add(Dense(1))
 model.add(Activation('sigmoid'))
 
 sgd = SGD(lr=0.1)
 model.compile(loss='mean_squared_error', optimizer=sgd)
 
 model.fit(X, y, batch_size=1, epochs=1000)
 print(model.predict_proba(X))

I think to solve it for any initial condition we need to have scattered inputs like @baj12 proposed. But I didn't test it.

@jollyblade
Copy link

I added some biases and random initialization as well, however I have no better result as consciencia

rndU = RandomUniform(minval=-1, maxval=1, seed=None)
model = Sequential()
model.add(Dense(9, activation='sigmoid', input_dim=2, use_bias = True, kernel_initializer=rndU, bias_initializer=rndU))
model.add(Dense(1, activation='sigmoid', use_bias = True, kernel_initializer=rndU, bias_initializer=rndU))

@gauravkr0071
Copy link

from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
from keras.initializers import RandomUniform
import numpy as np
x=np.array([[0.1,0.1,1],
[0.1,0.9,1],
[0.9,0.1,1],
[0.9,0.9,1]])
y=np.array([[0.1],[0.9],[0.9],[0.1]])
model= Sequential()
model.add(Dense(4,input_dim=3,activation="sigmoid",
bias_initializer=RandomUniform(minval=-1.0, maxval=1, seed=None)))
model.add(Dense(1,activation="sigmoid",bias_initializer=RandomUniform(minval=-1.0, maxval=1, seed=None)))
sgd=SGD(lr=0.01)
model.compile(loss='mean_squared_error',optimizer='sgd')
model.fit(x,y,epochs=5000,batch_size=1,verbose=1)

i am not geeting good result what i am doing wrong any idea

@belabedmohammed
Copy link

@gauravkr0071 replace
model.compile(loss='mean_squared_error',optimizer='sgd')
by this
model.compile(loss='mean_squared_error',optimizer=sgd)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment