Skip to content

Instantly share code, notes, and snippets.

@RikiyaOta
Created July 11, 2021 07:17
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save RikiyaOta/5ca6568a7fca6f64fad999257aeb324f to your computer and use it in GitHub Desktop.
Save RikiyaOta/5ca6568a7fca6f64fad999257aeb324f to your computer and use it in GitHub Desktop.
ADALINE implementation by Gradient Descent
import numpy as np
class AdalineByGradinentDescent:
def __init__(self, eta=0.01, n_iter=10, random_state=1):
"""
eta: 学習率
n_iter: 学習を反復する回数
"""
self.eta = eta
self.n_iter = n_iter
self.random_state = random_state
def fit(self, X, y):
"""
X: 入力行列(各行が入力データ) n_samples(サンプル個数) x n_features(特徴量の数) 行列
y: 教師データ n_samples 次元のベクトル
"""
rgen = np.random.RandomState(self.random_state)
feature_dimension = X.shape[1]
# 重みの初期化
self.w_ = rgen.normal(loc=0.0, scale=0.01, size=1+feature_dimension)
for i in range(self.n_iter):
net_input = self.net_input(X)
output = self.activation(net_input)
errors = y - output
# 重みの更新
## w_1 ~ w_n の更新
self.w_[1:] = self.eta * np.dot(X.T, errors)
## w_0 (バイアス項)の更新
self.w_[0] = self.eta * errors.sum()
return self
def net_input(self, X):
"""
入力データに対して重みをかけて総入力を返す
x -> wx
"""
return np.dot(X, self.w_[1:]) + self.w_[0]
def activation(self, net_input):
"""
活性化関数。ADALINEの場合は単なる恒等関数。
"""
return net_input
def predict(self, X):
"""
予測を行う関数。ここではステップ関数に基づいた予測を行う。
"""
return np.where(self.activation(self.net_input(X) >= 0.0, 1, -1)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment