Skip to content

Instantly share code, notes, and snippets.

@anupamchugh
Created June 2, 2020 11:23
Show Gist options
  • Save anupamchugh/0b474cadb0293f57638b126993549090 to your computer and use it in GitHub Desktop.
Save anupamchugh/0b474cadb0293f57638b126993549090 to your computer and use it in GitHub Desktop.
lazy var classificationRequest: VNCoreMLRequest = {
do {
let model = try VNCoreMLModel(for: MobileNet().model)
let request = VNCoreMLRequest(model: model, completionHandler: { [weak self] request, error in
self?.processClassifications(for: request, error: error)
})
request.imageCropAndScaleOption = .centerCrop
return request
} catch {
fatalError("Failed to load Vision ML model: \(error)")
}
}()
func captureOutput(
_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection) {
guard let frame = CMSampleBufferGetImageBuffer(sampleBuffer) else {
debugPrint("unable to get image from sample buffer")
return
}
self.updateClassifications(in: frame)
}
func updateClassifications(in image: CVPixelBuffer) {
DispatchQueue.global(qos: .userInitiated).async {
let handler = VNImageRequestHandler(cvPixelBuffer: image, orientation: .right, options: [:])
do {
try handler.perform([self.classificationRequest])
} catch {
print("Failed to perform classification.\n\(error.localizedDescription)")
}
}
}
func processClassifications(for request: VNRequest, error: Error?) {
DispatchQueue.main.async {
guard let results = request.results else {
return
}
let classifications = results as! [VNClassificationObservation]
if !classifications.isEmpty {
if classifications.first!.confidence > 0.5{
let identifier = classifications.first?.identifier ?? ""
if identifier.contains(self.emojiString){
self.delegate?.emojiWasFound(result: true)
}
}
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment