Skip to content

Instantly share code, notes, and snippets.

@paraya3636
Created February 14, 2017 04:30
Show Gist options
  • Save paraya3636/33b8b409153e6973346c890a87fe7c11 to your computer and use it in GitHub Desktop.
Save paraya3636/33b8b409153e6973346c890a87fe7c11 to your computer and use it in GitHub Desktop.
AND演算 by Python3.5 & Chainer1.12
#!/usr/bin/env/ python
# coding:utf-8
import numpy as np
import chainer.links as L
from chainer import functions as F
from chainer import Variable, optimizers, Chain
class Model(Chain):
def __init__(self):
super(Model, self).__init__(
l1=L.Linear(2, 1),
)
def __call__(self, x):
h = F.sigmoid(self.l1(x))
return h
# モデル定義
model = Model()
optimizer = optimizers.MomentumSGD(lr=0.01, momentum=0.9)
optimizer.setup(model)
# 学習させる回数
times = 5000
# 学習用データ
x = Variable(np.atleast_2d(np.array([[0, 0], [0, 1], [1, 0], [1, 1]], dtype=np.float32)))
# 正解データ
t = Variable(np.atleast_2d(np.array([[0], [0], [0], [1]], dtype=np.float32)))
# 学習ループ
for i in range(0, times):
# 勾配を初期化
optimizer.zero_grads()
# ここでモデルに予測させている
y = model(x)
# 損失を計算する
loss = F.MeanSquaredError()(y, t)
# 逆伝播する
loss.backward()
# optimizerを更新する
optimizer.update()
# モデルが出した答えを表示
print("loss:", loss.data)
print(y.data)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment