This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def sigmoid(z): | |
return 1./(1+np.exp(-z)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def loss(Y_hat,Y): | |
if Y = 1: | |
return Y_hat | |
if Y = 0: | |
return 1-Y_hat |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def propagate(w,x,b,y): | |
A = sigmoid(np.dot(w.T,x)+b) #forward propagation | |
m = x.shape[1] #data size | |
cost = -np.sum(np.multiply(y,np.log(A))+np.multiply(1-y,1-np.log(A)),axis=1)/m | |
dz = A-y #differinsial cost terhadap z | |
dw = np.dot(x,dz.T)/x.shape[1] #differinsial cost terhadap w | |
db = np.sum(A-y,axis=1)/x.shape[1] #differinsial cost terhadap b |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def optimize(w, b, x, y, num_iterations, learning_rate): | |
costs = [] #buat matrix kosong cost | |
for i in range(num_iterations): | |
dw,db,cost = propagate(w,x,b,y) | |
w = w - learning_rate*dw #update w | |
b = b - learning_rate*db #update b | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def linear_forward(A, W, b): | |
Z = np.dot(W,A)+b | |
assert(Z.shape == (W.shape[0], A.shape[1])) | |
cache = (A, W, b) | |
return Z, cache |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def sigmoid(Z): | |
A = 1/(1+np.exp(-Z)) | |
cache = Z | |
return A, cache | |
def relu(Z): | |
A = np.maximum(0,Z) | |
cache = Z |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def relu_backward(dA, cache): | |
Z = cache | |
dZ = np.array(dA, copy=True) # just converting dz to a correct object. | |
# When z <= 0, you should set dz to 0 as well. | |
dZ[Z <= 0] = 0 | |
assert (dZ.shape == Z.shape) | |
return dZ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def linear_activation_forward(A_prev, W, b, activation): | |
if activation == "sigmoid": | |
Z, linear_cache = linear_forward(A_prev, W, b) | |
A, activation_cache = sigmoid(Z) | |
elif activation == "relu": | |
Z, linear_cache = linear_forward(A_prev, W, b) | |
A, activation_cache = relu(Z) | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def L_model_forward(X, parameters): | |
caches = [] | |
A = X | |
L = len(parameters) // 2 # number of layers in the neural network | |
#From 1 to L-1 layer we use ReLU | |
for l in range(1, L): | |
A_prev = A | |
A, cache = linear_activation_forward(A_prev, | |
parameters['W' + str(l)], |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def compute_cost(AL, Y): | |
m = Y.shape[1] | |
cost = -(1./m)*(np.dot(Y,np.log(AL).T) + np.dot(1-Y, np.log(1-AL).T)) | |
cost = np.squeeze(cost) | |
assert(cost.shape == ()) | |
return cost |
OlderNewer