Skip to content

Instantly share code, notes, and snippets.

@loiseaujc
Created May 4, 2020 11:07
Show Gist options
  • Save loiseaujc/5c3a6aa5e6a586e8e217a154ea82291e to your computer and use it in GitHub Desktop.
Save loiseaujc/5c3a6aa5e6a586e8e217a154ea82291e to your computer and use it in GitHub Desktop.
Implementation of Adaline (Adaptive Linear Neurons) in Python.
# --> Import standard Python libraries.
import numpy as np
# --> Import sklearn utility functions to create derived-class objects.
from sklearn.base import BaseEstimator, ClassifierMixin
# --> Redefine the Heaviside function.
def H(x): return np.heaviside(x-0.5, 1).astype(np.int)
class Adaline(BaseEstimator, ClassifierMixin):
"""
Implementation of Adaline using sklearn BaseEstimator and
ClassifierMixin.
"""
def __init__(self, learning_rate=0.001, epochs=100, tol=1e-8):
# --> Learning rate for the delta rule.
self.learning_rate = learning_rate
# --> Maximum number of epochs for the optimizer.
self.epochs = epochs
# --> Tolerance for the optimizer.
self.tol = tol
def predict(self, X):
return H(self.weighted_sum(X))
def weighted_sum(self, X):
return X @ self.weights + self.bias
def fit(self, X, y):
"""
Implementation of the Delta rule for training Adaline.
INPUT
-----
X : numpy 2D array. Each row corresponds to one training example.
y : numpy 1D array. Label (0 or 1) of each example.
OUTPUT
------
self: The trained adaline model.
"""
# --> Number of features.
n = X.shape[1]
# --> Initialize the weights and bias.
self.weights = np.zeros((n, ))
self.bias = 0.0
# --> Training of Adaline using the Delta rule.
for _ in range(self.epochs):
# --> Compute the error.
error = self.weighted_sum(X) - y
# --> Update the weights and bias.
self.weights -= self.learning_rate * error @ X
self.bias -= self.learning_rate * error.sum()
# --> Check for convergence.
if np.linalg.norm(error) < self.tol:
break
return self
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment