Skip to content

Instantly share code, notes, and snippets.

@RamonGilabert
Created November 10, 2016 14:57
Show Gist options
  • Save RamonGilabert/4ab3dca1ef63cd1c4f66a78b5741c656 to your computer and use it in GitHub Desktop.
Save RamonGilabert/4ab3dca1ef63cd1c4f66a78b5741c656 to your computer and use it in GitHub Desktop.
A camera controller that generates the camera image, applies a blending mode and reads a QR code if existent.
import UIKit
import PhotosUI
import AVFoundation
class ViewController: UIViewController {
lazy var session: AVCaptureSession = {
let session = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPresetPhoto
return session
}()
lazy var cameraView: UIView = {
let view = UIView()
view.frame = UIScreen.main.bounds
return view
}()
var coreContext = CIContext()
var filter = CIFilter(name: "CIMultiplyBlendMode")
var detector: CIDetector?
override func viewDidLoad() {
super.viewDidLoad()
coreContext = CIContext(options: [kCIContextUseSoftwareRenderer : true])
UIApplication.shared.isStatusBarHidden = true
filter?.setDefaults()
view.addSubview(cameraView)
let generator = CIFilter(name: "CIConstantColorGenerator")
let red = CIColor(red: 0, green: 0, blue: 1)
generator?.setValue(red, forKey: "inputColor")
let blendingImage = generator?.value(forKey: "outputImage")
filter?.setValue(blendingImage, forKey: "inputImage")
let options = [CIDetectorAccuracy: CIDetectorAccuracyHigh]
detector = CIDetector(ofType: CIDetectorTypeQRCode, context: nil, options: options)
checkPermission()
}
// MARK: - Setup camera
func setupCamera() {
let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let output = AVCaptureVideoDataOutput()
var input: AVCaptureDeviceInput?
do {
input = try AVCaptureDeviceInput(device: device)
} catch { }
output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable : kCVPixelFormatType_32BGRA]
output.alwaysDiscardsLateVideoFrames = true
output.setSampleBufferDelegate(self, queue: DispatchQueue.global())
session.addInput(input)
session.addOutput(output)
if let connection = output.connections.first as? AVCaptureConnection {
connection.videoOrientation = .portrait
}
session.startRunning()
}
// MARK: - Permissions
func checkPermission() {
let status = AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo)
switch status {
case .authorized:
setupCamera()
case .notDetermined:
requestPermission()
default:
print("Something went wrong.")
}
}
func requestPermission() {
AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo) { granted in
DispatchQueue.main.async {
guard granted else { return }
self.setupCamera()
}
}
}
// MARK: - Scanning
func codeDetection(image: CIImage) {
guard let detector = detector, let features = detector.features(in: image) as? [CIQRCodeFeature] else { return }
for feature in features {
print("Found: \(feature.messageString)")
}
}
}
extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ captureOutput: AVCaptureOutput!,
didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
from connection: AVCaptureConnection!) {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let image = CIImage(cvImageBuffer: imageBuffer)
filter?.setValue(image, forKey: "inputBackgroundImage")
guard let filter = filter, let outputImage = filter.outputImage else { return }
let reference = coreContext.createCGImage(outputImage, from: UIScreen.main.bounds)
if let image = reference {
codeDetection(image: CIImage(cgImage: image))
}
DispatchQueue.main.async {
self.cameraView.layer.contents = reference
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment