Skip to content

Instantly share code, notes, and snippets.

@fatalure
Created July 21, 2019 08:20
Show Gist options
  • Save fatalure/66f169b5bc5ba8f8bfda84068034f1d8 to your computer and use it in GitHub Desktop.
Save fatalure/66f169b5bc5ba8f8bfda84068034f1d8 to your computer and use it in GitHub Desktop.
keep_prob = 0.5
def train_step(X):
hidden_layer_1 = np.maximum(0, np.dot(W1, X) + b1)
dropout_mask_1 = np.random.binomial(1, keep_prob, hidden_layer_1.shape) / keep_prob
hidden_layer_1 *= dropout_mask_1
hidden_layer_2 = np.maximum(0, np.dot(W2, hidden_layer_1) + b2)
dropout_mask_2 = np.random.binomial(1, keep_prob, hidden_layer_2.shape) / keep_prob
hidden_layer_2 *= dropout_mask_2
out = np.dot(W3, hidden_layer_2) + b3
# backward pass: compute gradients... (not shown)
# perform parameter update... (not shown)
def predict(X):
# ensembled forward pass
hidden_layer_1 = np.maximum(0, np.dot(W1, X) + b1)
hidden_layer_2 = np.maximum(0, np.dot(W2, hidden_layer_1) + b2)
out = np.dot(W3, hidden_layer_2) + b3
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment