Skip to content

Instantly share code, notes, and snippets.

@omaraflak
Last active June 6, 2023 19:41
Show Gist options
  • Save omaraflak/100ddae589933a7040566942ef06e244 to your computer and use it in GitHub Desktop.
Save omaraflak/100ddae589933a7040566942ef06e244 to your computer and use it in GitHub Desktop.
from layer import Layer
import numpy as np
# inherit from base class Layer
class FCLayer(Layer):
# input_size = number of input neurons
# output_size = number of output neurons
def __init__(self, input_size, output_size):
self.weights = np.random.rand(input_size, output_size) - 0.5
self.bias = np.random.rand(1, output_size) - 0.5
# returns output for a given input
def forward_propagation(self, input_data):
self.input = input_data
self.output = np.dot(self.input, self.weights) + self.bias
return self.output
# computes dE/dW, dE/dB for a given output_error=dE/dY. Returns input_error=dE/dX.
def backward_propagation(self, output_error, learning_rate):
input_error = np.dot(output_error, self.weights.T)
weights_error = np.dot(self.input.T, output_error)
# dBias = output_error
# update parameters
self.weights -= learning_rate * weights_error
self.bias -= learning_rate * output_error
return input_error
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment