Skip to content

Instantly share code, notes, and snippets.

@oiehot
Last active February 18, 2017 02:53
Show Gist options
  • Save oiehot/1f16c96b519c7c9d9718384c08668155 to your computer and use it in GitHub Desktop.
Save oiehot/1f16c96b519c7c9d9718384c08668155 to your computer and use it in GitHub Desktop.
딥 러닝 기초 함수들
import numpy as np
def step(x):
return np.array( x > 0, dtype=np.int )
def sigmoid(x):
y = 1 / (1 + np.exp(-x))
return y
def relu(x):
return np.maximum(0, x)
def softmax(x):
exp_x = np.exp(x)
sum_exp_x = np.sum(exp_x)
y = exp_x / sum_exp_x
return y
def softmax_overflow(x):
c = np.max(x)
exp_x = np.exp(x-c)
sum_exp_x = np.sum(exp_x)
y = exp_x / sum_exp_x
return y
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment