Created
February 19, 2018 10:53
-
-
Save huseinzol05/99319635062dbc180752e59f1c7e6f1a to your computer and use it in GitHub Desktop.
activation functions with first derivative
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"cells": [ | |
{ | |
"cell_type": "code", | |
"execution_count": 1, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"import numpy as np\n", | |
"\n", | |
"X = np.random.rand(100, 100)" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 2, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"def sigmoid(X, grad=False):\n", | |
" if grad:\n", | |
" return sigmoid(X) * (1 - sigmoid(X))\n", | |
" else:\n", | |
" return 1 / (1 + np.exp(-X))\n", | |
"\n", | |
"def tanh(X, grad=False):\n", | |
" if grad:\n", | |
" return 1 - np.square(tanh(X))\n", | |
" else:\n", | |
" return 2 / (1 + np.exp(-2*x)) - 1\n", | |
" \n", | |
"def relu(X, grad=False):\n", | |
" if grad:\n", | |
" return np.where(X >= 0, 1, 0)\n", | |
" else:\n", | |
" return np.where(X >= 0, X, 0)\n", | |
"\n", | |
"def leakyrelu(X, alpha=0.1,grad=False):\n", | |
" if grad:\n", | |
" return np.where(X >= 0, 1, alpha)\n", | |
" else:\n", | |
" return np.where(X >= 0, X, alpha*X)\n", | |
" \n", | |
"def elu(X, alpha=0.1, grad=False):\n", | |
" if grad:\n", | |
" return np.where(x >= 0, 1, alpha * np.exp(x))\n", | |
" else:\n", | |
" return np.where(x >= 0, X, alpha * (np.exp(X) - 1))\n", | |
" \n", | |
"def selu(X, grad=False):\n", | |
" alpha = 1.6732632423543772848170429916717\n", | |
" scale = 1.0507009873554804934193349852946\n", | |
" if grad:\n", | |
" return scale * np.where(x >= 0, 1, alpha * np.exp(X))\n", | |
" else:\n", | |
" return scale * np.where(x >= 0, X, alpha * (np.exp(X) - 1))\n", | |
" \n", | |
"def softplus(X, grad=False):\n", | |
" if grad:\n", | |
" return 1 / (1 + np.exp(-X))\n", | |
" else:\n", | |
" np.log(1 + np.exp(X))\n", | |
"\n", | |
"def softmax(X, grad=False):\n", | |
" if grad:\n", | |
" p = softmax(X)\n", | |
" return p * (1-p)\n", | |
" else:\n", | |
" e_x = np.exp(X - np.max(X, axis=-1, keepdims=True))\n", | |
" return e_x / np.sum(e_x, axis=-1, keepdims=True)\n", | |
" \n", | |
"def gaussian(X, grad=False):\n", | |
" if grad:\n", | |
" return -2*X*np.exp(np.square(-X))\n", | |
" else:\n", | |
" np.exp(np.square(-X))\n", | |
"\n", | |
"def isru(X, alpha = 0.1, grad=False):\n", | |
" if grad:\n", | |
" np.power(1 / (np.sqrt(1+alpha*X*X)), 3)\n", | |
" else:\n", | |
" 1 / (np.sqrt(1+alpha*X*X))\n", | |
" \n", | |
"def softsign(X, grad=False):\n", | |
" if grad:\n", | |
" return 1 / np.square(1+np.abs(X))\n", | |
" else:\n", | |
" return X / (1+np.abs(X))\n", | |
" \n", | |
"def swish(X, grad=False):\n", | |
" if grad:\n", | |
" return X * sigmoid(X,True) + sigmoid(X)\n", | |
" else:\n", | |
" return X * sigmoid(X)\n", | |
" \n", | |
"def arctan(X, grad=False):\n", | |
" if grad:\n", | |
" return 1 / (np.square(X) + 1)\n", | |
" else:\n", | |
" return np.arctan(X)" | |
] | |
} | |
], | |
"metadata": { | |
"kernelspec": { | |
"display_name": "Python 3", | |
"language": "python", | |
"name": "python3" | |
}, | |
"language_info": { | |
"codemirror_mode": { | |
"name": "ipython", | |
"version": 3 | |
}, | |
"file_extension": ".py", | |
"mimetype": "text/x-python", | |
"name": "python", | |
"nbconvert_exporter": "python", | |
"pygments_lexer": "ipython3", | |
"version": "3.5.2" | |
} | |
}, | |
"nbformat": 4, | |
"nbformat_minor": 2 | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment