Skip to content

Instantly share code, notes, and snippets.

View muhammadgaffar's full-sized avatar

mgaffar muhammadgaffar

View GitHub Profile
@muhammadgaffar
muhammadgaffar / sigmoid.py
Last active October 24, 2018 15:07
sigmoid activation
def sigmoid(z):
return 1./(1+np.exp(-z))
def loss(Y_hat,Y):
if Y = 1:
return Y_hat
if Y = 0:
return 1-Y_hat
def propagate(w,x,b,y):
A = sigmoid(np.dot(w.T,x)+b) #forward propagation
m = x.shape[1] #data size
cost = -np.sum(np.multiply(y,np.log(A))+np.multiply(1-y,1-np.log(A)),axis=1)/m
dz = A-y #differinsial cost terhadap z
dw = np.dot(x,dz.T)/x.shape[1] #differinsial cost terhadap w
db = np.sum(A-y,axis=1)/x.shape[1] #differinsial cost terhadap b
def optimize(w, b, x, y, num_iterations, learning_rate):
costs = [] #buat matrix kosong cost
for i in range(num_iterations):
dw,db,cost = propagate(w,x,b,y)
w = w - learning_rate*dw #update w
b = b - learning_rate*db #update b
def linear_forward(A, W, b):
Z = np.dot(W,A)+b
assert(Z.shape == (W.shape[0], A.shape[1]))
cache = (A, W, b)
return Z, cache
def sigmoid(Z):
A = 1/(1+np.exp(-Z))
cache = Z
return A, cache
def relu(Z):
A = np.maximum(0,Z)
cache = Z
def relu_backward(dA, cache):
Z = cache
dZ = np.array(dA, copy=True) # just converting dz to a correct object.
# When z <= 0, you should set dz to 0 as well.
dZ[Z <= 0] = 0
assert (dZ.shape == Z.shape)
return dZ
def linear_activation_forward(A_prev, W, b, activation):
if activation == "sigmoid":
Z, linear_cache = linear_forward(A_prev, W, b)
A, activation_cache = sigmoid(Z)
elif activation == "relu":
Z, linear_cache = linear_forward(A_prev, W, b)
A, activation_cache = relu(Z)
def L_model_forward(X, parameters):
caches = []
A = X
L = len(parameters) // 2 # number of layers in the neural network
#From 1 to L-1 layer we use ReLU
for l in range(1, L):
A_prev = A
A, cache = linear_activation_forward(A_prev,
parameters['W' + str(l)],
def compute_cost(AL, Y):
m = Y.shape[1]
cost = -(1./m)*(np.dot(Y,np.log(AL).T) + np.dot(1-Y, np.log(1-AL).T))
cost = np.squeeze(cost)
assert(cost.shape == ())
return cost