Skip to content

Instantly share code, notes, and snippets.

@inodev
Created November 5, 2016 01:35
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save inodev/e2ad237449d756e15e8fcf121606d062 to your computer and use it in GitHub Desktop.
Save inodev/e2ad237449d756e15e8fcf121606d062 to your computer and use it in GitHub Desktop.
#encoding: utf-8
#
# Copyright (c) 2016 chainer_nlp_man
#
# This software is released under the MIT License.
# http://opensource.org/licenses/mit-license.php
#
import numpy as np # NumPyをnpという名前で使用
import chainer # Chainerを使用
from chainer import cuda, Function, gradient_check, Variable, optimizers, serializers, utils # 名前の省略
from chainer import Link, Chain, ChainList
import chainer.functions as F # functionsをFで使用可能に
import chainer.links as L # linksをLで使用可能に
# 2入力、2出力
# [nobias_flagがFalseの場合]
# x h y
# -o-o-o-
# x x
# -o-o-o-
# (Trueの場合は、xとhにバイアスノードが1つずつ加わる)
# MLP:multi-layer perceptrons(多層パーセプトロン)
class MLP(Chain):
def __init__(self, nobias_flag):
super(MLP, self).__init__(
l1 = L.Linear(2,2,nobias=nobias_flag), # 入力リニア2 出力ユニット2
l2 = L.Linear(2,2,nobias=nobias_flag), # 入力リニア2 出力ユニット2
)
self.nobias_flag = nobias_flag
def __call__(self, x):
h = F.sigmoid(self.l1(x)) #多層の場合はシグモイド関数等を使う
y = self.l2(h)
return y
def dump(self):
print(self.l1.W.data)
if not self.nobias_flag:
print(self.l1.b.data)
print(self.l2.W.data)
if not self.nobias_flag:
print(self.l2.b.data)
class Classifier(Chain):
def __init__(self,predictor):
super(Classifier, self).__init__(
predictor = predictor
)
def __call__(self, x, t):
y = self.predictor(x)
self.loss = F.softmax_cross_entropy(y,t)
self.accuracy = F.accuracy(y,t)
return self.loss
# モデルの準備
model = Classifier(MLP(False))
optimizer = optimizers.Adam()
optimizer.setup(model)
# 学習ループ
loss_value = 100000
cnt = 0
while loss_value > 1e-5:
# 学習データ
x = Variable(np.array([[0,0],[1,0],[0,1],[1,1]], dtype=np.float32))
t = Variable(np.array([0,1,1,0], dtype=np.int32))
# 学習
model.zerograds() # 勾配をゼロに初期化
loss = model(x,t)
loss_value = loss.data
loss.backward() # 誤差逆伝播で勾配を計算
optimizer.update()
cnt += 1
if cnt%1000 == 0:
# 途中結果の出力
y = F.softmax(model.predictor(x))
print("=====iter = {0}, loss = {1}=====".format(cnt, loss_value))
print("---output value---")
print(y.data)
print("---result---")
print(y.data.argmax(1))
print("---dump---")
model.predictor.dump()
# モデルファイル(学習した学習したアルゴリズム)を保存
serializers.save_npz('my_xor.model', model)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment