Skip to content

Instantly share code, notes, and snippets.

@ShigekiKarita
Forked from joumyakun/simple_perceptron.py
Last active February 26, 2016 03:03
Show Gist options
  • Save ShigekiKarita/c4f2170c9f7a8ff49442 to your computer and use it in GitHub Desktop.
Save ShigekiKarita/c4f2170c9f7a8ff49442 to your computer and use it in GitHub Desktop.
add nonlinearity
---epoch 9---
source label:-1, data:[0 0 0 0 0 0 0 0]
predoct label:-1, weight:[-0.39016744 1.07635597 0.21081006 0.18976978 -0.7392488 0.20338442
-1.12607911 -1.17872689 0.41622946]
OK
source label:1, data:[0 1 0 0 0 0 0 1]
predoct label:1, weight:[-0.39016744 1.07635597 0.21081006 0.18976978 -0.7392488 0.20338442
-1.12607911 -1.17872689 0.41622946]
OK
source label:1, data:[1 0 1 0 0 0 0 0]
predoct label:1, weight:[-0.39016744 1.07635597 0.21081006 0.18976978 -0.7392488 0.20338442
-1.12607911 -1.17872689 0.41622946]
OK
source label:-1, data:[1 1 1 1 1 1 1 1]
predoct label:-1, weight:[-0.39016744 1.07635597 0.21081006 0.18976978 -0.7392488 0.20338442
-1.12607911 -1.17872689 0.41622946]
OK
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
パーセプトロンの実験
参考 荒木雅弘, フリーソフトで作る音声認識システム
"""
import numpy as np
import matplotlib.pyplot as plt
# 重みの初期化
def init_weights(i, j):
return np.random.uniform(-1.0, 1.0, (i, j))
# バイアス項の追加
def add_bias(d):
return np.concatenate(([1],d))
def function(w, x):
return np.dot(w, x)
def comb(x, n=1):
return x if n == 0 else comb(np.outer(x, x)[np.triu_indices(len(x))], n-1)
def nonlinearity(x):
return np.concatenate((x, comb(x, 2)))
def train(d, l, w):
rho = 0.5
f = np.sign(function(w, d))# -1/1の2値で関数を判定
j = True
if f != l:
# 重み更新
w += l*rho*d
j = False
return f, w
"""
#AND
datas_and_labels = [
[np.array([0, 0]),np.array([-1])],
[np.array([0, 1]),np.array([-1])],
[np.array([1, 0]),np.array([-1])],
[np.array([1, 1]),np.array([1])],
]
"""
"""
#OR
datas_and_labels = [
[np.array([0, 0]),np.array([-1])],
[np.array([0, 1]),np.array([1])],
[np.array([1, 0]),np.array([1])],
[np.array([1, 1]),np.array([1])],
]
"""
#XOR
datas_and_labels = [
[np.array([0, 0]),np.array([-1])],
[np.array([0, 1]),np.array([1])],
[np.array([1, 0]),np.array([1])],
[np.array([1, 1]),np.array([-1])],
]
"""
# Linearly separable
datas_and_labels = [
[np.array([-1, 0.8]), np.array([-1])],
[np.array([-1.2, 0.5]),np.array([-1])],
[np.array([-0.7, 0.2]),np.array([-1])],
[np.array([0.2, -1]), np.array([-1])],
[np.array([0.3, -0.8]),np.array([-1])],
[np.array([-0.3, 1]), np.array([1])],
[np.array([-0.2, 0.9]), np.array([1])],
[np.array([0.5, 0.6]), np.array([1])],
[np.array([0.7, -0.1]), np.array([1])],
[np.array([1, -0.4]), np.array([1])],
]
"""
# weights initialize
nl_len = len(nonlinearity(datas_and_labels[0][0])) + 1
weights = init_weights(1, nl_len)
# training
for epoch in range(10):
print("---epoch {}---".format(epoch))
for data, label in datas_and_labels:
data = nonlinearity(data)
data = add_bias(data)
predict, weights = train(data, label, weights)
print ("source label:{l}, data:{d}".format(l=label[0], d=data[1:]))
print ("predoct label:{p}, weight:{w}".format(p=int(predict[0]), w=weights[0]))
print ("{j}".format(j = "OK" if predict == label else "NG"))
# plot area
xmax = 2.0
ymax = 2.0
# decision surface
h = 0.3
xx, yy = np.meshgrid(np.arange(-xmax, xmax, h),
np.arange(-ymax, ymax, h))
area = np.c_[xx.ravel(), yy.ravel()]
area = np.apply_along_axis(nonlinearity, 1, area)
area = np.c_[area, np.ones(len(area))] # bias
predicts = (weights.dot(area.T)).reshape(xx.shape)
plt.contour(xx, yy, predicts)
# datas and labels
for d, l in datas_and_labels:
plt.scatter(d[0], d[1], c="r" if l > 0 else "b")
plt.xlim(-xmax, xmax)
plt.ylim(-ymax, ymax)
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment