Skip to content

Instantly share code, notes, and snippets.

View akarsh-saxena's full-sized avatar

Akarsh Saxena akarsh-saxena

View GitHub Profile
import numpy as np
def forward_prop(X, params):
"""Performs forward propagation and calculates output value
Arguments
---------
X: array_like
Data
import numpy as np
def initialize_nn(X):
"""Initializes random weights and bias
Arguments
---------
X: array-like
Train Dataset
import numpy as np
def sigmoid(Z):
"""Applies sigmoid function to an array/value
Arguments
---------
Z: float/int/array_like
Original Value
Returns
from scipy.special import xlogy
import numpy as np
def calculate_loss(cache, y):
"""Calculate the entropy loss
Arguments
---------
cache: dict
import numpy as np
def backward_prop(X, y, cache):
"""Performs backward propagation and calculates dw and db
Arguments
---------
X: array_like
Data
import numpy as np
def update_weights(params, changes, learning_rate=0.01):
"""Updates weights of the layers
Arguments
---------
params: dict
Dictionary containing 'w' and 'b'
epochs = 1000
learning_rate = 5e-3
params = initialize_nn(X_train)
for i in range(epochs):
cache = forward_prop(X_train, params)
loss = calculate_loss(cache, y_train)
updates = backward_prop(X_train, y_train, cache)
params = update_weights(params, updates, learning_rate=learning_rate)