Skip to content

Instantly share code, notes, and snippets.

@Uriegas
Created March 11, 2021 02:56
Show Gist options
  • Save Uriegas/49448450cf3c8ad62e70a58039047930 to your computer and use it in GitHub Desktop.
Save Uriegas/49448450cf3c8ad62e70a58039047930 to your computer and use it in GitHub Desktop.
Gradient Descendant Algorithm Multi-variable Python
from sympy import *
x = Symbol('x')
y = Symbol('y')
z = Symbol('z')
f = 4*(x**2) + 30*x + 4*(y**2) - 40*y + 2375
# First partial derivative with respect to x
fpx = f.diff(x)
# First partial derivative with respect to y
fpy = f.diff(y)
# Gradient
grad = [fpx,fpy]
# Data
theta = 30 #x
theta1 = 20 #y
alpha = .01
iterations = 0
check = 0
precision = 1/100000000
printData = True
maxIterations = 1000
while True:
temptheta = theta - alpha*N(fpx.subs(x,theta).subs(y,theta1)).evalf()
temptheta1 = theta1 - alpha*N(fpy.subs(y,theta1)).subs(x,theta).evalf()
#If the number of iterations goes up too much, maybe theta (and/or theta1)
#is diverging! Let's stop the loop and try to understand.
iterations += 1
if iterations > maxIterations:
print("Too many iterations. Adjust alpha and make sure that the function is convex!")
printData = False
break
#If the value of theta changes less of a certain amount, our goal is met.
if abs(temptheta-theta) < precision and abs(temptheta1-theta1) < precision:
break
#Simultaneous update
theta = temptheta
theta1 = temptheta1
if printData:
print("The function "+str(f)+" converges to a minimum")
print("Number of iterations:",iterations,sep=" ")
print("theta (x0) =",temptheta,sep=" ")
print("theta1 (y0) =",temptheta1,sep=" ")
@Uriegas
Copy link
Author

Uriegas commented Mar 11, 2021

Alo?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment