Skip to content

Instantly share code, notes, and snippets.

Created Aug 7, 2018
What would you like to do?
kernel approximation using svm
from sklearn.kernel_approximation import RBFSampler
from sklearn.decomposition import PCA
kernel_svm = svm.SVC(gamma=.2)
linear_svm = svm.LinearSVC()
feature_map_fourier = RBFSampler(gamma=.2, random_state=SEED)
feature_map_nystroem = Nystroem(gamma=.2, random_state=SEED)
fourier_approx_svm = pipeline.Pipeline([("feature_map", feature_map_fourier),
("svm", svm.LinearSVC())])
nystroem_approx_svm = pipeline.Pipeline([("feature_map", feature_map_nystroem),
("svm", svm.LinearSVC())])['train'], train_lbls)
print(kernel_svm.score(data['test'], val_lbls))['train'], train_lbls)
print(linear_svm.score(data['test'], val_lbls))['train'], train_lbls)
print(nystroem_approx_svm.score(data['test'], val_lbls))['train'], train_lbls)
print(fourier_approx_svm.score(data['test'], val_lbls))
# visualize the decision surface, projected down to the first
# two principal components of the dataset
pca = PCA(n_components=3).fit(data['train'])
X = pca.transform(data['train'])
# Generate grid along first two principal components
multiples = np.arange(-2, 2, 0.1)
# steps along first component
first = multiples[:, np.newaxis] * pca.components_[0, :]
# steps along second component
second = multiples[:, np.newaxis] * pca.components_[1, :]
# combine
grid = first[np.newaxis, :, :] + second[:, np.newaxis, :]
flat_grid = grid.reshape(-1, data['train'].shape[1])
# title for the plots
titles = ['SVC with rbf kernel',
'SVC (linear kernel)\n with Fourier rbf feature map\n'
'SVC (linear kernel)\n with Nystroem rbf feature map\n'
plt.figure(figsize=(12, 5))
# predict and plot
for i, clf in enumerate((kernel_svm, nystroem_approx_svm,
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, x_max]x[y_min, y_max].
plt.subplot(1, 3, i + 1)
Z = clf.predict(flat_grid)
# Put the result into a color plot
Z = Z.reshape(grid.shape[:-1])
plt.contourf(multiples, multiples, Z,
# Plot also the training points
plt.scatter(X[:, 0], X[:, 1], c=train_lbls,,
edgecolors=(0, 0, 0))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment