Using autograd to confirm answers to CSC321 Winter 2018, Hw2, Q3
import torch
from torch .autograd import Variable
<torch._C.Generator at 0x7f997494de10>
# X, is the training examples
X = Variable (torch .rand (10 ,4 ), requires_grad = True )
X
tensor([[0.4963, 0.7682, 0.0885, 0.1320],
[0.3074, 0.6341, 0.4901, 0.8964],
[0.4556, 0.6323, 0.3489, 0.4017],
[0.0223, 0.1689, 0.2939, 0.5185],
[0.6977, 0.8000, 0.1610, 0.2823],
[0.6816, 0.9152, 0.3971, 0.8742],
[0.4194, 0.5529, 0.9527, 0.0362],
[0.1852, 0.3734, 0.3051, 0.9320],
[0.1759, 0.2698, 0.1507, 0.0317],
[0.2081, 0.9298, 0.7231, 0.7423]], requires_grad=True)
# t, is the targets
t = Variable (torch .rand (10 ), requires_grad = True )
t
tensor([0.5263, 0.2437, 0.5846, 0.0332, 0.1387, 0.2422, 0.8155, 0.7932, 0.2783,
0.4820], requires_grad=True)
# w, is the weights
w = Variable (torch .zeros (4 ), requires_grad = True )
# b, is the bias
b = Variable (torch .zeros (1 ), requires_grad = True )
# y, is the predictions, based on w and X
y = torch .matmul (w ,torch .transpose (X ,0 ,1 )) + b
y
tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], grad_fn=<AddBackward0>)
def Loss (vector_y , vector_t ):
return 1 - torch .cos (vector_y - vector_t )
def AvgLoss (vector_y , vector_t ):
return torch .sum (Loss (vector_y , vector_t )) / len (vector_y )
# Autograd calculation
print (w .grad )
tensor([-0.1418, -0.2397, -0.1770, -0.1813])
# Manual Calculation of ∂Loss/∂y
torch .matmul (- torch .sin (t - y ), X ) / len (y )
tensor([-0.1418, -0.2397, -0.1770, -0.1813], grad_fn=<DivBackward0>)
tensor([-0.3886])
# Manual Calculation of ∂Loss/∂b
torch .sum (- torch .sin (t - y )) / len (y )
tensor(-0.3886, grad_fn=<DivBackward0>)