Skip to content

Instantly share code, notes, and snippets.

@nisanthchunduru
Created November 13, 2023 10:18
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save nisanthchunduru/a5e96241da824a6754962f2c8e1da74a to your computer and use it in GitHub Desktop.
Save nisanthchunduru/a5e96241da824a6754962f2c8e1da74a to your computer and use it in GitHub Desktop.
Simple Neutral Network from scratch
# This program was generated by ChatGPT
import numpy as np
# Sigmoid activation function
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# Derivative of the sigmoid function
def sigmoid_derivative(x):
return x * (1 - x)
# Define the neural network class
class NeuralNetwork:
def __init__(self, input_size, hidden_size, output_size):
# Initialize weights and biases with random values
self.weights_input_hidden = np.random.rand(input_size, hidden_size)
self.bias_hidden = np.zeros((1, hidden_size))
self.weights_hidden_output = np.random.rand(hidden_size, output_size)
self.bias_output = np.zeros((1, output_size))
def forward(self, X):
# Forward pass through the network
self.hidden_input = np.dot(X, self.weights_input_hidden) + self.bias_hidden
self.hidden_output = sigmoid(self.hidden_input)
self.final_input = np.dot(self.hidden_output, self.weights_hidden_output) + self.bias_output
self.predictions = sigmoid(self.final_input)
return self.predictions
def backward(self, X, y, learning_rate):
# Backward pass to update weights and biases
error = y - self.predictions
output_delta = error * sigmoid_derivative(self.predictions)
hidden_error = output_delta.dot(self.weights_hidden_output.T)
hidden_delta = hidden_error * sigmoid_derivative(self.hidden_output)
# Update weights and biases
self.weights_hidden_output += self.hidden_output.T.dot(output_delta) * learning_rate
self.bias_output += np.sum(output_delta, axis=0, keepdims=True) * learning_rate
self.weights_input_hidden += X.T.dot(hidden_delta) * learning_rate
self.bias_hidden += np.sum(hidden_delta, axis=0, keepdims=True) * learning_rate
def train(self, X, y, epochs, learning_rate):
# Training loop
for epoch in range(epochs):
# Forward pass
predictions = self.forward(X)
# Backward pass
self.backward(X, y, learning_rate)
# Calculate and print the mean squared error
mse = np.mean(np.square(y - predictions))
if epoch % 100 == 0:
print(f"Epoch {epoch}, Mean Squared Error: {mse}")
# Create a simple dataset for XOR-like behavior
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y = np.array([[0], [1], [1], [0]])
# Create and train the neural network
input_size = 2
hidden_size = 4
output_size = 1
learning_rate = 0.1
epochs = 10000
# Instantiate the neural network
nn = NeuralNetwork(input_size, hidden_size, output_size)
# Train the neural network
nn.train(X, y, epochs, learning_rate)
# Make predictions
predictions = nn.forward(X)
print("\nFinal Predictions:")
print(predictions)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment