Skip to content

Instantly share code, notes, and snippets.

@kirang89
Last active March 18, 2017 16:36
Show Gist options
  • Save kirang89/27b494df720a3339aa81808ed7fd0f69 to your computer and use it in GitHub Desktop.
Save kirang89/27b494df720a3339aa81808ed7fd0f69 to your computer and use it in GitHub Desktop.
A simple example to show how gradient descent works
#!/usr/bin/env python
# coding=utf8
import random
import math
def partial_difference_quotient(f, v, i, h):
"""This function approximates the derivate function as h approaches zero"""
dv = [elem + (h if index == i else 0)
for index, elem in enumerate(v)]
return (f(dv) - f(v)) / h
def find_gradient(f, v, h=0.00001):
return [partial_difference_quotient(f, v, i, h) for i in range(len(v))]
# The function to minimize
def sum_of_squares(vec):
return sum([elem ** 2 for elem in vec])
def step(v, direction, step_size):
return [elem + delem * step_size
for elem, delem in zip(v, direction)]
def distance(v1, v2):
"""Get distance b/w two vectors"""
return math.sqrt(sum([(e1 - e2) ** 2 for e1, e2 in zip(v1, v2)]))
def local_minima(vec, tolerance=0.0000001):
steps = 0
while True:
gradient = find_gradient(sum_of_squares, vec)
# Multiplying by -0.01 to make vector point in the reverse
# direction of the maxima
vec_next = step(vec, gradient, -0.01)
if distance(vec_next, vec) < tolerance:
return vec, steps
vec = vec_next
steps += 1
if __name__ == '__main__':
# A random starting point
v = [random.randint(-10, 10) for _ in range(3)]
print("Starting at {}".format(v))
minima, steps = local_minima(v)
print("Minima found at {} in {} steps".format(minima, steps))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment