Created
December 4, 2018 00:33
-
-
Save albertlai431/358e8e290aba272db509c92adc81f0b7 to your computer and use it in GitHub Desktop.
Training
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#Train the model | |
iter = 0 | |
for epoch in range(num_epochs): | |
for i, (images, labels) in enumerate(train_loader): | |
images = Variable(images) | |
labels = Variable(labels) | |
#Clear the gradients | |
optimizer.zero_grad() | |
#Forward propagation | |
outputs = model(images) | |
#Calculating loss with softmax to obtain cross entropy loss | |
loss = criterion(outputs, labels) | |
#Backward propation | |
loss.backward() | |
#Updating gradients | |
optimizer.step() | |
iter += 1 | |
#Total number of labels | |
total = labels.size(0) | |
#Obtaining predictions from max value | |
_, predicted = torch.max(outputs.data, 1) | |
#Calculate the number of correct answers | |
correct = (predicted == labels).sum().item() | |
#Print loss and accuracy | |
if (i + 1) % 100 == 0: | |
print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}, Accuracy: {:.2f}%' | |
.format(epoch + 1, num_epochs, i + 1, len(train_loader), loss.item(), | |
(correct / total) * 100)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment