Skip to content

Instantly share code, notes, and snippets.

@PatrikValkovic
Created April 17, 2019 10:49
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save PatrikValkovic/b81df097dd1dcaca8ad48faebf3ce4df to your computer and use it in GitHub Desktop.
Save PatrikValkovic/b81df097dd1dcaca8ad48faebf3ce4df to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 17.04.2019 12:35
:Licence MIT
Part of ML
"""
import math
from random import Random
import pandas as pd
import matplotlib.pyplot as plt
NUMBER_OF_RECORDS = 200
MAX_ITERS = 100
DELTA = 0.0001
# generate data
fr = pd.DataFrame()
r = Random()
for i in range(int(NUMBER_OF_RECORDS / 2)):
ORIG = (math.sqrt(2), math.sqrt(2))
# ORIG = (2, 2)
rot = r.random() * 2 * math.pi
dist = r.random()
x1 = ORIG[0] + math.cos(rot) * dist
x2 = ORIG[1] + math.sin(rot) * dist
fr = fr.append([[1, x1, x2, 0]])
for i in range(int(NUMBER_OF_RECORDS / 2)):
ORIG = (0, 0)
rot = r.random() * 2 * math.pi
dist = r.random()
x1 = ORIG[0] + math.cos(rot) * dist
x2 = ORIG[1] + math.sin(rot) * dist
fr = fr.append([[1, x1, x2, 1]])
fr.columns = ['D', 'X1', 'X2', 'Y']
fr = fr.sample(frac=1).reset_index(drop=True)
class Neuron:
def __init__(self, dimension, seed=None, init=None):
self._r = Random(seed)
init = init or [r.random() for _ in range(dimension)]
self.w = pd.Series(init)
self.e = NUMBER_OF_RECORDS
def _normalize(self, vector):
size = math.sqrt(sum(map(lambda x: x ** 2, vector)))
return pd.Series(map(lambda x: x / size, vector))
def _x_sum(self, x: pd.Series):
return sum(map(lambda q: q[0] * q[1], zip(x, self.w)))
def _get_gradient_for(self, i: int, data: pd.DataFrame):
return sum(
data.apply(lambda row:
2 *
(row['Y'] - 1 / (1 + math.exp(-self._x_sum(row.iloc[:-1])))) *
math.exp(-self._x_sum(row.iloc[:-1])) *
(-row.iloc[i]),
axis=1)
)
def _get_gradient(self, data: pd.DataFrame):
return pd.Series(
[self._get_gradient_for(i, data) for i in range(self.w.count())]
)
def _get_error(self, data: pd.DataFrame):
return sum(
data.apply(lambda row: pow(row['Y'] - 1 / (1 + math.exp(-self._x_sum(row.iloc[:-1]))), 2),
axis=1)
)
def predict(self, x: pd.Series):
return 1 / (1 + math.exp(-self._x_sum(x)))
def step(self, x: pd.DataFrame, y, alpha=0.0001):
whole = pd.concat([x, y], axis=1) # type: pd.DataFrame
whole.columns = list(x.columns) + ['Y']
gradient = self._get_gradient(whole)
self.w = self._normalize(self.w + alpha * gradient)
self.e = self._get_error(whole)
# Plotting
def plot_points(data):
plt.scatter(data[data['Y'] == 0]['X1'], data[data['Y'] == 0]['X2'], color='red')
plt.scatter(data[data['Y'] == 1]['X1'], data[data['Y'] == 1]['X2'], color='blue')
def plot_regressor(r: Neuron, min, max):
a = - r.w[1] / r.w[2] if r.w[2] != 0 else 0
b = - r.w[0] / r.w[2] if r.w[2] != 0 else 0
between = (min + max) / 2
plt.plot([min, max], [a * min + b, a * max + b], color='black')
plt.plot([between, between + r.w[1]], [a * between + b, a * between + b + r.w[2]], color='green')
def plot(data, regressor):
plt.gca().set_aspect('equal', adjustable='box')
plt.xlim(-1, 3)
plt.ylim(-1, 3)
plot_regressor(regressor, -1, 3)
plot_points(data)
plt.show()
p = Neuron(3, init=(0, 0, 1))
plot(fr, p)
i = 0
last_error = None
while p.e > 0.1:
p.step(fr.iloc[:, :3], fr.iloc[:, 3], alpha=0.01)
last_error = p.e + 2 * DELTA if last_error is None else last_error
current_delta = math.fabs(last_error - p.e)
print("Error: " + str(p.e))
print("Current delta: " + str(current_delta))
plot(fr, p)
if i >= MAX_ITERS or current_delta < DELTA:
break
i += 1
last_error = p.e
plot(fr, p)
print("Done in " + str(i) + " iterations with " + str(p.e) + " error")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment