Skip to content

Instantly share code, notes, and snippets.

@butsugiri
Created May 21, 2016 05:41
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save butsugiri/18b4bd03dcb74935bacd63a7e32eed21 to your computer and use it in GitHub Desktop.
Save butsugiri/18b4bd03dcb74935bacd63a7e32eed21 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"import numpy as np\n",
"import chainer\n",
"from chainer import cuda, Function, gradient_check, Variable, optimizers, serializers, utils\n",
"from chainer import Link, Chain, ChainList\n",
"import chainer.functions as F\n",
"import chainer.links as L"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"x_data = np.array([[0,0],[0,1],[1,0],[1,1]], dtype=np.float32)\n",
"y_data = np.array([1,0,0,1], dtype=np.int32)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"class MyMLP(Chain):\n",
" def __init__(self):\n",
" super(MyMLP, self).__init__(\n",
" l1 = L.Linear(2,4),\n",
" l2 = L.Linear(4,2),\n",
" )\n",
" def __call__(self, x):\n",
" h1 = F.sigmoid(self.l1(x))\n",
" y = F.sigmoid(self.l2(h1)) #ここで活性化関数を使うかはよくわからないが,とりあえず使う. サンプルだとy = self.l2(h1)\n",
" return y"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"model = L.Classifier(MyMLP())\n",
"optimizer = optimizers.Adam()\n",
"optimizer.setup(model)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"*** epoch 0 ***\n",
"***input data ***\n",
"[[ 0. 0.]\n",
" [ 0. 1.]]\n",
"*** loss ***\n",
"0.692005055344\n",
"\n",
"*** epoch 1000 ***\n",
"***input data ***\n",
"[[ 0. 0.]\n",
" [ 0. 1.]]\n",
"*** loss ***\n",
"0.691218764877\n",
"\n",
"*** epoch 2000 ***\n",
"***input data ***\n",
"[[ 0. 1.]\n",
" [ 1. 0.]]\n",
"*** loss ***\n",
"0.675955461836\n",
"\n",
"*** epoch 3000 ***\n",
"***input data ***\n",
"[[ 0. 1.]\n",
" [ 1. 0.]]\n",
"*** loss ***\n",
"0.635834552467\n",
"\n",
"*** epoch 4000 ***\n",
"***input data ***\n",
"[[ 1. 1.]]\n",
"*** loss ***\n",
"0.591571424758\n",
"\n",
"*** epoch 5000 ***\n",
"***input data ***\n",
"[[ 1. 0.]\n",
" [ 1. 1.]]\n",
"*** loss ***\n",
"0.529785670263\n",
"\n"
]
}
],
"source": [
"batchsize = 2\n",
"datasize = 4\n",
"for epoch in range(10000):\n",
" #print \"epoch {}\".format(epoch)\n",
" index = np.random.permutation(datasize)\n",
" for i in range(0, datasize, batchsize):\n",
" x = Variable(x_data[index[i : i + batchsize]])\n",
" t = Variable(y_data[index[i : i + batchsize]])\n",
" optimizer.update(model, x, t)\n",
" sum_loss, sum_accuracy = 0, 0\n",
" if epoch % 1000 == 0:\n",
" for i in range(0, 10000, batchsize):\n",
" k = np.random.randint(datasize)\n",
" x = Variable(x_data[k:k+batchsize])\n",
" t = Variable(y_data[k:k+batchsize])\n",
" loss = model(x,t)\n",
" sum_loss += loss.data * batchsize\n",
" mean_loss = sum_loss / 10000\n",
" \n",
" print \"*** epoch {} ***\".format(epoch)\n",
" print \"***input data ***\"\n",
" print \"{}\".format(x.data)\n",
" print \"*** loss ***\"\n",
" print mean_loss\n",
" print"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.11"
}
},
"nbformat": 4,
"nbformat_minor": 0
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment