Skip to content
Create a gist now

Instantly share code, notes, and snippets.

Embed URL


Subversion checkout URL

You can clone with
Download ZIP
Basic Perceptron Learning for AND Gate
import numpy as np
class Perceptron:
def __init__(self,Weights,Biases):
self.Weights = Weights
self.Biases = Biases
def Train(self, Training, LearningRate):
y_in =[:,0:2], self.Weights) + self.Biases
# This is meant for a simple two-input AND gate, hence only first two columns of the training vector.
# But can be easily extended to any linearly separable dataset.
op = -1*np.ones(y_in.shape, dtype = int)
op[y_in > 0] = 1 # Activation
t = op.__eq__(Training[:,2].reshape((4,1))) # There should be a better way of
# comparing arrays
while not t.all():
for i in range(Training.shape[0]):
self.Weights = self.Weights + LearningRate*Training[i,2]* \
self.Biases = self.Biases + LearningRate*Training[i,2]
y_in =[:,0:2], self.Weights) + self.Biases
op = -1*np.ones(y_in.shape, dtype = int)
op[y_in > 0] = 1
t = op.__eq__(Training[:,2].reshape((4,1)))
def Test(self,Testing):
y_in =,self.Weights) + self.Biases
op = -1*np.ones(y_in.shape,dtype = int)
op[y_in > 0] = 1
return op
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Something went wrong with that request. Please try again.