Last active
October 17, 2019 13:10
-
-
Save mohdsanadzakirizvi/c285fb648b7dfa503db4c9b289969c62 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
print("Beginning training...") | |
struct Statistics { | |
var correctGuessCount: Int = 0 | |
var totalGuessCount: Int = 0 | |
var totalLoss: Float = 0 | |
} | |
// Store accuracy results during training | |
var trainAccuracyResults: [Float] = [] | |
var testAccuracyResults: [Float] = [] | |
// The training loop. | |
for epoch in 1...epochCount { | |
var trainStats = Statistics() | |
var testStats = Statistics() | |
// Set context to training | |
Context.local.learningPhase = .training | |
for i in 0 ..< dataset.trainingSize / batchSize { | |
// Get mini-batches of x and y | |
let x = dataset.trainingImages.minibatch(at: i, batchSize: batchSize) | |
let y = dataset.trainingLabels.minibatch(at: i, batchSize: batchSize) | |
// Compute the gradient with respect to the model. | |
let 𝛁model = classifier.gradient { classifier -> Tensor<Float> in | |
let ŷ = classifier(x) | |
let correctPredictions = ŷ.argmax(squeezingAxis: 1) .== y | |
trainStats.correctGuessCount += Int(Tensor<Int32>(correctPredictions).sum().scalarized()) | |
trainStats.totalGuessCount += batchSize | |
let loss = softmaxCrossEntropy(logits: ŷ, labels: y) | |
trainStats.totalLoss += loss.scalarized() | |
return loss | |
} | |
// Update the model's differentiable variables along the gradient vector. | |
optimizer.update(&classifier, along: 𝛁model) | |
} | |
// Set context to inference | |
Context.local.learningPhase = .inference | |
for i in 0 ..< dataset.testSize / batchSize { | |
let x = dataset.testImages.minibatch(at: i, batchSize: batchSize) | |
let y = dataset.testLabels.minibatch(at: i, batchSize: batchSize) | |
// Compute loss on test set | |
let ŷ = classifier(x) | |
let correctPredictions = ŷ.argmax(squeezingAxis: 1) .== y | |
testStats.correctGuessCount += Int(Tensor<Int32>(correctPredictions).sum().scalarized()) | |
testStats.totalGuessCount += batchSize | |
let loss = softmaxCrossEntropy(logits: ŷ, labels: y) | |
testStats.totalLoss += loss.scalarized() | |
} | |
let trainAccuracy = Float(trainStats.correctGuessCount) / Float(trainStats.totalGuessCount) | |
let testAccuracy = Float(testStats.correctGuessCount) / Float(testStats.totalGuessCount) | |
// Save train and test accuracy | |
trainAccuracyResults.append(trainAccuracy) | |
testAccuracyResults.append(testAccuracy) | |
print(""" | |
[Epoch \(epoch)] \ | |
Training Loss: \(trainStats.totalLoss), \ | |
Training Accuracy: \(trainStats.correctGuessCount)/\(trainStats.totalGuessCount) \ | |
(\(trainAccuracy)), \ | |
Test Loss: \(testStats.totalLoss), \ | |
Test Accuracy: \(testStats.correctGuessCount)/\(testStats.totalGuessCount) \ | |
(\(testAccuracy)) | |
""") | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment