Skip to content

Instantly share code, notes, and snippets.

@NBrown140
Created August 12, 2020 02:57
Show Gist options
  • Save NBrown140/4ba8dbf19b58048816d9df55aadbdcba to your computer and use it in GitHub Desktop.
Save NBrown140/4ba8dbf19b58048816d9df55aadbdcba to your computer and use it in GitHub Desktop.
Gradient descent implementation on arbitrary 2d function
import math as m
import matplotlib.pyplot as plt
import numpy as np
from tqdm import tqdm
x_init, y_init = -1.6, -0.09
alpha = 0.0001
N = 100000
# Rosenbrock banana function
f = lambda x, y: (1 - x)**2 + 100 * (y - x**2)**2
# Other functions
#f = lambda x, y: np.sin(0.5 * np.power(x, 2) - 0.25 * np.power(y, 2) + 3) * np.cos(2 * x + 1 - np.exp(y))
#f = lambda x, y: np.sin(x) * np.cos(y)
def gradient(f, x, y):
h = 0.001
# Symmetric difference quotient
grad_x = (f(x + h, y) - f(x - h, y)) / (2 * h)
grad_y = (f(x, y + h) - f(x, y - h)) / (2 * h)
return grad_x, grad_y
x, y = x_init, y_init
x_list, y_list = [x_init], [y_init]
print(f"Initial x, y: {x}, {y}")
for step in tqdm(range(0, N)):
grad_x, grad_y = gradient(f, x, y)
#print(f"Gradient: {grad_x}, {grad_y}")
x = x - alpha * grad_x
y = y - alpha * grad_y
x_list.append(x)
y_list.append(y)
#print(x, y)
# Plot
x_grid = np.linspace(-4, 4, 101)
y_grid = np.linspace(-4, 4, 101)
X, Y = np.meshgrid(x_grid, y_grid)
F = f(X, Y)
plt.contourf(X, Y, F, 30)
plt.contour(X, Y, F, 30)
plt.scatter(x_list, y_list, color='r', s=5)
#plt.plot(x_list, y_list)
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment