Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
import Foundation
import AVFoundation
public class QRCodeScanner: NSObject, AVCaptureMetadataOutputObjectsDelegate {
public var callback: ((_ qrCode: String, _ corners: [CGPoint]) -> Void)?
public var captureSession: AVCaptureSession!
public var previewLayer: AVCaptureVideoPreviewLayer!
public func setup(previewView: UIView) -> Bool {
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return false }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return false
}
if captureSession.canAddInput(videoInput) {
captureSession.addInput(videoInput)
} else {
return false
}
let metadataOutput = AVCaptureMetadataOutput()
if captureSession.canAddOutput(metadataOutput) {
captureSession.addOutput(metadataOutput)
metadataOutput.metadataObjectTypes = [.qr]
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
} else {
return false
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = previewView.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
previewView.layer.addSublayer(previewLayer)
captureSession.startRunning()
return true
}
public func stop() {
if captureSession?.isRunning == true {
captureSession.stopRunning()
}
}
public func start() {
if captureSession?.isRunning == false {
captureSession.startRunning()
}
}
public func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
callback?(stringValue, readableObject.corners)
}
}
public func updateScannerOrientation() {
if let connection = previewLayer.connection {
let currentDevice: UIDevice = UIDevice.current
let orientation: UIDeviceOrientation = currentDevice.orientation
let previewLayerConnection: AVCaptureConnection = connection
if previewLayerConnection.isVideoOrientationSupported {
switch orientation {
case .portrait: previewLayerConnection.videoOrientation = AVCaptureVideoOrientation.portrait
case .landscapeRight: previewLayerConnection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft
case .landscapeLeft: previewLayerConnection.videoOrientation = AVCaptureVideoOrientation.landscapeRight
case .portraitUpsideDown: previewLayerConnection.videoOrientation = AVCaptureVideoOrientation.portraitUpsideDown
default: previewLayerConnection.videoOrientation = AVCaptureVideoOrientation.portrait
}
}
}
}
}
//
func setupScanner() {
if qrCodeScanner.setup(previewView: previewView) == false {
let ac = UIAlertController(title: NSLocalizedString("scanning_not_supported_title", comment: ""),
message: NSLocalizedString("scanning_not_supported_message", comment: ""),
preferredStyle: .alert)
ac.addAction(UIAlertAction(title: NSLocalizedString("ok", comment: ""), style: .default))
present(ac, animated: true)
qrCodeScanner.start()
} else {
qrCodeScanner.callback = scannedQRCode
}
}
func scannedQRCode(_ code: String, _ corners: [CGPoint]) {
log.debug("scanned accesstoken")
qrCodeScanner.stop()
AppDelegate.accessToken = code
performSegue(withIdentifier: "goDocumentScan", sender: nil)
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment