Skip to content

Instantly share code, notes, and snippets.

@omarsar
Created December 29, 2019 16:20
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save omarsar/93ad09dd62539b98f6e5136929ddae15 to your computer and use it in GitHub Desktop.
Save omarsar/93ad09dd62539b98f6e5136929ddae15 to your computer and use it in GitHub Desktop.
## hyperparams
costs = []
dim = x_flatten.shape[0]
learning_rate = torch.scalar_tensor(0.0001).to(device)
num_iterations = 100
lrmodel = LR(dim, learning_rate)
lrmodel.to(device)
## transform the data
def transform_data(x, y):
x_flatten = x.T
y = y.unsqueeze(0)
return x_flatten, y
## training the model
for i in range(num_iterations):
x, y = next(iter(train_dataset))
test_x, test_y = next(iter(test_dataset))
x, y = transform_data(x, y)
test_x, test_y = transform_data(test_x, test_y)
# forward
yhat = lrmodel.forward(x.to(device))
cost = loss(yhat.data.cpu(), y)
train_pred = predict(yhat, y)
# backward
lrmodel.backward(x.to(device),
yhat.to(device),
y.to(device))
lrmodel.optimize()
## test
yhat_test = lrmodel.forward(test_x.to(device))
test_pred = predict(yhat_test, test_y)
if i % 10 == 0:
costs.append(cost)
if i % 10 == 0:
print("Cost after iteration {}: {} | Train Acc: {} | Test Acc: {}".format(i,
cost,
train_pred,
test_pred))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment