Skip to content

Instantly share code, notes, and snippets.

@tomonari-masada
Last active April 20, 2017 03:46
Show Gist options
  • Save tomonari-masada/e9ca81df3ad9ebdbbcbc546cefbc9690 to your computer and use it in GitHub Desktop.
Save tomonari-masada/e9ca81df3ad9ebdbbcbc546cefbc9690 to your computer and use it in GitHub Desktop.
import torch
from torch.autograd import Variable
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import axes3d
torch.manual_seed(102)
np.random.seed(22)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
N = 5
ITER = 100
alpha = 1.3
beta = np.array([[0.5], [1.9]])
X_data = np.random.randn(N, 2)
y_data = X_data.dot(beta) + alpha
x1 = np.arange(X_data[:,0].min(), X_data[:,0].max(), 0.2)
x2 = np.arange(X_data[:,1].min(), X_data[:,1].max(), 0.2)
X1, X2 = np.meshgrid(x1, x2)
X = Variable(torch.Tensor(X_data))
y = Variable(torch.Tensor(y_data))
w_alpha = Variable(torch.randn(1), requires_grad=True)
w_beta = Variable(torch.randn(2, 1), requires_grad=True)
learning_rate = 1e-2
optimizer = torch.optim.SGD([w_alpha, w_beta], lr=learning_rate)
for t in range(ITER):
y_pred = X.mm(w_beta).add(w_alpha.expand(N))
loss = (y_pred - y).pow(2).sum()
print(t, loss.data[0])
if not t % 10:
Z = X1 * w_beta.data[0].numpy() + X2 * w_beta.data[1].numpy() + w_alpha.data.numpy()
ax.plot_wireframe(X1, X2, Z, color=str(t/ITER))
optimizer.zero_grad()
loss.backward()
optimizer.step()
print(w_beta.data)
print(w_alpha.data)
ax.scatter(X_data[:,0], X_data[:,1], y_data, c='red', s=200, marker='o')
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment