Skip to content

Instantly share code, notes, and snippets.

@TetsuyaYoshimoto
Last active December 16, 2016 08:58
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save TetsuyaYoshimoto/935401beaeaa6423387bf4a77f352297 to your computer and use it in GitHub Desktop.
Save TetsuyaYoshimoto/935401beaeaa6423387bf4a77f352297 to your computer and use it in GitHub Desktop.
#encoding=utf-8
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from sklearn import datasets
class Perceptron(object):
def __init__(self, eta=0.01, n_iter=10):
self.eta = eta
self.n_iter = n_iter
def fit(self, X, y):
self.w_ = np.zeros(1 + X.shape[1])
self.errors_ = []
for _ in range(self.n_iter):
errors = 0
for xi, target in zip(X, y):
update = self.eta * (target - self.predict(xi))
self.w_[1:] += update * xi
self.w_[0] += update
errors += np.where(update == 0.0, 0, 1)
self.errors_.append(errors)
return self
def net_input(self, X):
return np.dot(X, self.w_[1:]) + self.w_[0]
def predict(self, X):
return np.where(self.net_input(X) >= 0.0, 1, -1)
def plot_decision_regions(X, y, classifier, resolution=0.02):
markers = ('s', 'x')
colors = ('red', 'blue')
cmap = ListedColormap(colors[:len(np.unique(y))])
x1_min, x1_max = X[:, 0].min() - 1, X[:, 0].max() + 1
x2_min, x2_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx1, xx2 = np.meshgrid(np.arange(x1_min, x1_max, resolution), np.arange(x2_min, x2_max, resolution))
Z = classifier.predict(np.array([xx1.ravel(), xx2.ravel()]).T).reshape(xx1.shape)
plt.contourf(xx1, xx2, Z, alpha=0.4, cmap=cmap)
plt.xlim(xx1.min(), xx1.max())
plt.ylim(xx2.min(), xx2.max())
for idx, cl in enumerate(np.unique(y)):
plt.scatter(x=X[y == cl, 0], y=X[y == cl, 1], alpha=0.8, c=cmap(idx), marker=markers[idx], label=cl)
def main():
iris = datasets.load_iris()
X = iris.data[:100, [0,2]]
y = iris.target[:100]
for i in range(len(y)):
if y[i] == 0:
y[i] = -1
plt.scatter(X[:50, 0], X[:50, 1], color="red", marker="x", label="A")
plt.scatter(X[50:100, 0], X[50:100, 1], color="blue", marker="*", label="B")
plt.xlabel("x")
plt.ylabel("y")
plt.legend(loc="upper left")
plt.show()
ppn = Perceptron(eta = 0.1, n_iter = 10).fit(X,y)
plt.plot(range(1, len(ppn.errors_) + 1), ppn.errors_, marker="o")
plt.xlabel("Epochs")
plt.ylabel("Number of misclassifications")
plt.show()
plot_decision_regions(X, y, classifier=ppn)
plt.xlabel("x")
plt.ylabel("y")
plt.legend(loc="upper left")
plt.show()
if __name__== "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment