Skip to content

Instantly share code, notes, and snippets.

@masayuki5160
Last active February 20, 2017 00:08
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save masayuki5160/865ee64a87bb1e78764752b320227a1f to your computer and use it in GitHub Desktop.
Save masayuki5160/865ee64a87bb1e78764752b320227a1f to your computer and use it in GitHub Desktop.
chainerで論理演算を解く

概要

chainerで論理演算を学習してとく

env

参照:ほとんど同じ手順、ただしscikit-learnは不要、追加でpipでchainerを入れる https://gist.github.com/masayuki5160/6ab3006bff0e398b1c92ed0f9ded4aee

$ sudo pip install chainer

実行メモ

xor演算はレイヤーが一つのパーセプトロン(xor_fail.py)だとxor演算をうまく解けない。 実行結果はこんな感じ。

('loss:', array(0.2500000596046448, dtype=float32))
('loss:', array(0.2500000596046448, dtype=float32))
[[ 0.4996742 ]
[ 0.49994594]
[ 0.49995193]
[ 0.5002237 ]]

これをレイヤーを増やしてやる(xor_chainer.py)と解けるようになる。

('loss:', array(5.806828085042071e-07, dtype=float32))
[[ 3.73005867e-04]
[ 9.99323368e-01]
[ 9.99324203e-01]
[ 1.12652779e-03]]
import numpy as np
import chainer.functions as F
import chainer.links as L
from chainer import Variable,optimizers,Chain
class Model(Chain):
def __init__(self):
super(Model, self).__init__(
l1 = L.Linear(2,1),
)
def __call__(self, x):
# h = self.l1(x)
# sigmoid function
h = F.sigmoid(self.l1(x))
return h
model = Model()
optimizer = optimizers.MomentumSGD(lr=0.01, momentum=0.9)
optimizer.setup(model)
x = Variable(np.array([[0,0],[0,1],[1,0],[1,1]], dtype=np.float32))
t = Variable(np.array([[0],[1],[1],[1]], dtype=np.float32))
for i in range(0,3000):
optimizer.zero_grads()
y = model(x)
loss = F.mean_squared_error(y, t)
loss.backward()
optimizer.update()
print("loss:",loss.data)
print(y.data)
import numpy as np
import chainer.functions as F
import chainer.links as L
from chainer import Variable,optimizers,Chain
class Model(Chain):
def __init__(self):
super(Model, self).__init__(
l1 = L.Linear(2,2),
l2 = L.Linear(2,1),
)
def __call__(self, x):
# h = self.l1(x)
# sigmoid function
h = F.sigmoid(self.l1(x))
return self.l2(h)
model = Model()
optimizer = optimizers.MomentumSGD(lr=0.01, momentum=0.9)
optimizer.setup(model)
x = Variable(np.array([[0,0],[0,1],[1,0],[1,1]], dtype=np.float32))
t = Variable(np.array([[0],[1],[1],[0]], dtype=np.float32))
for i in range(0,3000):
optimizer.zero_grads()
y = model(x)
loss = F.mean_squared_error(y, t)
loss.backward()
optimizer.update()
print("loss:",loss.data)
print(y.data)
import numpy as np
import chainer.functions as F
import chainer.links as L
from chainer import Variable,optimizers,Chain
class Model(Chain):
def __init__(self):
super(Model, self).__init__(
l1 = L.Linear(2,1),
)
def __call__(self, x):
# h = self.l1(x)
# sigmoid function
h = F.sigmoid(self.l1(x))
return h
model = Model()
optimizer = optimizers.MomentumSGD(lr=0.01, momentum=0.9)
optimizer.setup(model)
x = Variable(np.array([[0,0],[0,1],[1,0],[1,1]], dtype=np.float32))
t = Variable(np.array([[0],[1],[1],[0]], dtype=np.float32))
for i in range(0,3000):
optimizer.zero_grads()
y = model(x)
loss = F.mean_squared_error(y, t)
loss.backward()
optimizer.update()
print("loss:",loss.data)
print(y.data)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment