Skip to content

Instantly share code, notes, and snippets.

@daien
Created March 6, 2012 22:02
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save daien/1989253 to your computer and use it in GitHub Desktop.
Save daien/1989253 to your computer and use it in GitHub Desktop.
MLR classification 2D toy examples
# -*- coding: utf-8 -*-
# <nbformat>3</nbformat>
# <codecell>
import numpy as np
import pylab as pl
import sklearn.datasets as skdata
import multinomial_logistic_regression as mlr
# <markdowncell>
# Binary classification
# =====================
# <codecell>
X, y = skdata.make_classification(
n_features=2, n_redundant=0, random_state=42)
clf = mlr.MLR(ss=1., weighted=False, seed=1).fit(X, y)
nx, ny = 100, 100
inds = pl.linspace(-1, 1, nx)
dec_hyperplane = inds * clf.W_[0,1] / clf.W_[1,0]
x_min, x_max = X[:, 0].min() * 1.05, X[:, 0].max() * 1.05
y_min, y_max = X[:, 1].min() * 1.05, X[:, 1].max() * 1.05
_idxs = np.nonzero((y_min <= dec_hyperplane) * (dec_hyperplane <= y_max))
inds = inds[_idxs]
dec_hyperplane = dec_hyperplane[_idxs]
pl.figure()
xx, yy = np.meshgrid(np.linspace(x_min, x_max, nx),
np.linspace(y_min, y_max, ny))
Z = clf.predict_proba(np.c_[xx.ravel(), yy.ravel()])
Z = Z[:, 1].reshape(xx.shape)
pl.pcolormesh(xx, yy, Z, cmap=pl.cm.jet)
pl.contour(xx, yy, Z, [0.5], linewidths=2., colors='k')
pl.scatter(X[:, 0], X[:, 1], marker='o', c=y, cmap=pl.cm.cool, s=50)
pl.plot(inds, dec_hyperplane)
pl.axis('off')
print clf.W_
pl.show()
# <markdowncell>
# Multiclass classification
# =========================
# <codecell>
X, y = skdata.make_classification(
n_features=2, n_redundant=0, n_classes=3, n_clusters_per_class=1,
random_state=42)
clf = mlr.MLR(ss=1., weighted=False, seed=1).fit(X, y)
h = .02 # step size in the mesh
colors = "bry"
pl.figure()
# create a mesh to plot in
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
pl.set_cmap(pl.cm.Paired)
# Plot the decision boundary. For that, we will asign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
pl.set_cmap(pl.cm.Paired)
cs = pl.contourf(xx, yy, Z)
#pl.axis('tight')
# Plot also the training points
for i, color in zip(clf.classes, colors):
idx = np.where(y == i)
pl.scatter(X[idx, 0], X[idx, 1], c=color)
pl.title("Decision surface of Multinomial Logistic Regression")
pl.axis('off')
print clf.W_
pl.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment