Skip to content

Instantly share code, notes, and snippets.

@felipecruz
Last active March 30, 2018 08:53
Show Gist options
  • Save felipecruz/11102849 to your computer and use it in GitHub Desktop.
Save felipecruz/11102849 to your computer and use it in GitHub Desktop.
perceptron implementation for educational purposes
OR = (((0, 0), 0), ((0, 1), 1), ((1, 0), 1), ((1, 1), 1))
AND = (((0, 0), 0), ((0, 1), 0), ((1, 0), 0), ((1, 1), 1))
example = lambda x: x[0]
output = lambda x: x[1]
def train_perceptron(Is, Ws):
print("Xs: {} Ws: {}".format(example(Is), Ws))
learn_rate = 0.1
bias = 0
treshold = 0.5
result = sum(i * h for i, h in zip(example(Is), Ws)) >= treshold
error = output(Is) - (1 if result else 0)
if error:
for i, w in enumerate(Ws):
Ws[i] = Ws[i] + (learn_rate * error * example(Is)[i])
return Ws
def perceptron(Is, Ws):
treshold = 0.5
return 1 if sum(i * h for i, h in zip(Is, Ws)) >= treshold else 0
trained_Ws = Ws = [0, 0]
for training_item in OR * 4:
trained_Ws = train_perceptron(training_item, trained_Ws)
print("Trained OR")
print(perceptron((0, 0), trained_Ws))
print(perceptron((1, 0), trained_Ws))
print(perceptron((0, 1), trained_Ws))
print(perceptron((1, 1), trained_Ws))
trained_Ws = Ws = [0, 0]
for training_item in AND * 4:
trained_Ws = train_perceptron(training_item, trained_Ws)
print("Trained AND")
print(perceptron((0, 0), trained_Ws))
print(perceptron((1, 0), trained_Ws))
print(perceptron((0, 1), trained_Ws))
print(perceptron((1, 1), trained_Ws))
linear_func = (((0, 2), 0), ((1, 2), 0), ((2, 2), 0), ((3, 2), 0))
linear_func2 = (((0, 3), 1), ((1, 3), 1), ((2, 3), 1), ((4, 3), 1))
trained_Ws = Ws = [0, 0]
for training_item in (linear_func + linear_func2) * 4:
trained_Ws = train_perceptron(training_item, trained_Ws)
print("Trained For Linear Funcs")
print(perceptron((0, 2), trained_Ws))
print(perceptron((0, 3), trained_Ws))
print(perceptron((2, 2), trained_Ws))
print(perceptron((2, 3), trained_Ws))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment