Skip to content

Instantly share code, notes, and snippets.

@andrefreitas
Last active December 17, 2015 09:09
Show Gist options
  • Save andrefreitas/5585620 to your computer and use it in GitHub Desktop.
Save andrefreitas/5585620 to your computer and use it in GitHub Desktop.
"""
Teaching a neural network to do the XOR binary operator
Artificial Intelligence - FEUP
"""
from pybrain.tools.shortcuts import buildNetwork
from pybrain.datasets import SupervisedDataSet
from pybrain.supervised.trainers import BackpropTrainer
from pybrain import TanhLayer
""" Builds the network """
# Creates a network with 2 input nodes, 3 hidden nodes and 1 output node
net = buildNetwork(2, 3, 1, bias=True, hiddenclass=TanhLayer)
# The activate() function gives an output. It's random since we didn't trained it yet
net.activate([2, 1])
""" Creates the dataset """
# Creates a dataset with 2 input nodes and 1 output node
ds = SupervisedDataSet(2, 1)
# Examples
ds.addSample((0, 0), (0))
ds.addSample((0, 1), (1))
ds.addSample((1, 0), (1))
ds.addSample((1, 1), (0))
""" Train the network """
trainer = BackpropTrainer(net, ds)
# Train and return the error
trainer.train()
# Train until convergence and return the errors
trainer.trainUntilConvergence()
""" Compute results """
net.activate([0,1])
net.activate([0,0])
net.activate([0,1])
net.activate([0,0])
trainer.trainUntilConvergence()
net.activate([0,0])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment