Skip to content

Instantly share code, notes, and snippets.

@chaidhat
Created January 11, 2021 14:14
Show Gist options
  • Save chaidhat/6de3f4bb5f52f4e52e465d959ee06ce1 to your computer and use it in GitHub Desktop.
Save chaidhat/6de3f4bb5f52f4e52e465d959ee06ce1 to your computer and use it in GitHub Desktop.
My first ever machine learning with 100% code by me. No ML helper libraries used.
import math # this library helps with mathematics
import matplotlib.pyplot as plt # this library helps visual plotting
# our data points in a 2D matrix
data = [
[0.5, 1.4],
[2.3, 1.9],
[2.9, 3.2],
]
# initalize profiling weights
lossFormattedY = []
# intialize plotter
fig, (ax1, ax2) = plt.subplots(2)
fig.suptitle('by Chaidhat Chaimongkol')
# initalize our weights
# note: slope = w1*x + w0 (y = mx + c)
w0 = 1 # slope intercept (m)
w1 = 1 # y-intercept (c)
learningRate = 0.01 # how fast to train the AI
maxEpochs = 500 # maximum amount of epochs
for epoch in range(maxEpochs): # for each epoch
# initalize our variables
loss = 0
dW0 = 0
dW1 = 0
for dataPoint in data: # for each datapoint in data
dataPointX = dataPoint[0] # x coordinate of point
# linear regression (w1*x + w0) (y = mx + c)
predictedValue = w1 * dataPointX + w0 # our predicted value
actualValue = dataPoint[1] # the actual y coordinate of point
# error calculation
error = actualValue - predictedValue
# loss calculation (Mean Squared Error)
loss += (error) ** 2 # loss = SUM OF (actual - predicted) ^ 2
# derivative of loss w.r.t w0
dW0 += -2 * (error) # -2 * error
dW1 += -2 * dataPointX * (error) # -2 * x * error
#loss = loss / len(data[0])
# now flip the gradient vector to point to the local minima
dW0 *= -1
dW1 *= -1
print("epoch", epoch)
print("loss", loss)
print("dW0", dW0)
print("dW1", dW1)
w0 += dW0 * learningRate
w1 += dW1 * learningRate
# plot data
# format the data for matplotlib
dataFormattedX = [i[0] for i in data] # get all the x coords of data
dataFormattedY = [i[1] for i in data] # get all the y coords of data
# plot the data
ax1.plot(dataFormattedX, dataFormattedY, 'ro')
# plot the line of best fit from x = 0 to maximum x
ax1.plot([0, max(dataFormattedX)], [w0, w1 * max(dataFormattedY) + w0], "b-")
plt.pause(0.05)
# plot profile
lossFormattedX = list(range(1,epoch + 2)) # generate 1-epoch numbers
lossFormattedY.append(loss)
ax2.plot(lossFormattedX, lossFormattedY, "r-")
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment