Skip to content

Instantly share code, notes, and snippets.

@fauzisho
Created February 1, 2025 11:26
Show Gist options
  • Select an option

  • Save fauzisho/28fa20e75ba4e58e533bb38bfecd7f0a to your computer and use it in GitHub Desktop.

Select an option

Save fauzisho/28fa20e75ba4e58e533bb38bfecd7f0a to your computer and use it in GitHub Desktop.
import numpy as np
import pandas as pd
# Define the Perceptron class (reuse the provided implementation)
class Perceptron:
def __init__(self, n_features, learning_rate=0.01, n_iters=100, initial_weights=None):
# Initialize weights with provided values or small random values between -1 and 1
if initial_weights is not None:
self.weights = np.array(initial_weights)
else:
self.weights = np.random.uniform(-1, 1, n_features + 1) # +1 for w0
self.lr = learning_rate
self.n_iters = n_iters
self.initial_weights = self.weights.copy() # Store initial weights for comparison
def _step_function(self, x):
return np.where(x >= 0, 1, -1)
def fit(self, X, y):
# Add a bias term (1) to each input for w0
X_with_bias = np.c_[np.ones((X.shape[0], 1)), X] # Adds a column of 1s at the start
# Display initial weights
initial_weights_df = pd.DataFrame([self.initial_weights], columns=['w0', 'w1', 'w2'])
print("Initial weights:\n", initial_weights_df)
for epoch in range(self.n_iters):
errors = 0 # Track errors to check if we can stop early
print(f"\nEpoch {epoch + 1}/{self.n_iters}")
for idx, x_i in enumerate(X_with_bias):
# Calculate the weighted sum
linear_output = np.dot(x_i, self.weights)
# Calculate the predicted output
o_j = self._step_function(linear_output)
# Calculate the error
error = y[idx] - o_j
if error != 0:
# Update weights if there is an error
delta_w = self.lr * error * x_i
self.weights += delta_w
errors += 1 # Increment error count
# Print details of each update
print(f"Sample {idx + 1}:")
print(f" Input (with bias): {x_i}")
print(f" Target: {y[idx]}, Predicted: {o_j}")
print(f" Error: {error}")
print(f" Delta weights: {delta_w}")
print(f" Updated weights: {self.weights}")
# Check if we can stop early (if no errors in this epoch)
if errors == 0:
print("\nTraining complete - all samples correctly classified.")
break
def predict(self, X):
# Add a bias term (1) to each input for w0
X_with_bias = np.c_[np.ones((X.shape[0], 1)), X]
linear_output = np.dot(X_with_bias, self.weights)
return self._step_function(linear_output)
# Initialize the perceptrons with specific initial weights
and_perceptron = Perceptron(n_features=2, initial_weights=[-0.97, 0.64, 0.64])
or_perceptron = Perceptron(n_features=2, initial_weights=[-1.0, 2.6, 2.6])
nand_perceptron = Perceptron(n_features=2, initial_weights=[6.2, -4.6, -2.8])
# Define the XOR gate dataset
X_XOR = np.array([
[0, 0], # Input: (0, 0), Expected Output: -1
[0, 1], # Input: (0, 1), Expected Output: 1
[1, 0], # Input: (1, 0), Expected Output: 1
[1, 1], # Input: (1, 1), Expected Output: -1
])
y_XOR = np.array([-1, 1, 1, -1])
# Compute the outputs of AND, OR, and NAND
and_outputs = and_perceptron.predict(X_XOR)
or_outputs = or_perceptron.predict(X_XOR)
nand_outputs = nand_perceptron.predict(X_XOR)
# Combine the outputs of OR and NAND using an AND perceptron
xor_inputs = np.column_stack((or_outputs, nand_outputs))
xor_perceptron = Perceptron(n_features=2, initial_weights=[-1.0, 1.0, 1.0]) # XOR perceptron
# Predict the XOR output
xor_predictions = xor_perceptron.predict(xor_inputs)
# Display results
print("Inputs:\n", X_XOR)
print("AND Outputs:\n", and_outputs)
print("OR Outputs:\n", or_outputs)
print("NAND Outputs:\n", nand_outputs)
print("XOR Predictions:\n", xor_predictions)
print("Expected XOR Outputs:\n", y_XOR)
@fauzisho
Copy link
Author

fauzisho commented Feb 1, 2025

Inputs:
[[0 0]
[0 1]
[1 0]
[1 1]]
AND Outputs:
[-1 -1 -1 1]
OR Outputs:
[-1 1 1 1]
NAND Outputs:
[ 1 1 1 -1]
XOR Predictions:
[-1 1 1 -1]
Expected XOR Outputs:
[-1 1 1 -1]

Process finished with exit code 0

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment