Skip to content

Instantly share code, notes, and snippets.

@Koziev
Created December 4, 2018 13:57
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Koziev/4cd480e0a58c734222d647f79be13cdf to your computer and use it in GitHub Desktop.
Save Koziev/4cd480e0a58c734222d647f79be13cdf to your computer and use it in GitHub Desktop.
# -*- coding: utf-8 -*-
"""
Использование автоматического дифференцирования autograd (https://github.com/HIPS/autograd)
для решения линейной регрессии МНК.
"""
from __future__ import print_function
import autograd.numpy as np
from autograd import grad
n_samples = 50
X_data = np.linspace(1, 50, n_samples).reshape((n_samples, 1))
y_data = 2.0*X_data + 0.3 + 1e-4*np.random.randn(n_samples).reshape((n_samples, 1))
y_data = y_data.reshape((n_samples, 1))
X_data = np.hstack([X_data, np.ones((n_samples, 1))])
def cost(w):
pred = np.dot(X_data, w)
return np.sqrt(((pred - y_data) ** 2).mean(axis=None))
grad_cost = grad(cost)
W = 0.0
b = 0.0
weights = np.array([W, b]).reshape((2, 1))
learning_rate0 = 0.01
learning_rate = learning_rate0
lr_decay = 0.9999
min_lr = 1e-5
tolerance = 1e-2
iter = 0
while True:
iter += 1
print("*"*30)
print('iteration #{}'.format(iter))
print("*"*30)
weights2 = weights - grad_cost(weights)*learning_rate
delta_w = weights2[0, 0] - weights[0, 0]
delta_b = weights2[1, 0] - weights[1, 0]
cur_cost = cost(weights)
print('cost={} W={} b={} delta_w={} delta_b={}'.format(cur_cost, weights[0, 0], weights[1, 0], delta_w, delta_b))
weights = weights2
learning_rate = max(min_lr, learning_rate * lr_decay)
print('learning_rate={:8.6f}'.format(learning_rate))
if cur_cost <= tolerance:
print('Current cost={} is smaller than tolerance={}, optimization complete.'.format(cur_cost, tolerance))
break
print('All done.')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment