Skip to content

Instantly share code, notes, and snippets.

@Bashta
Last active June 28, 2019 15:08
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Bashta/179fbc4b67b9e58cbb7249e926e80bd5 to your computer and use it in GitHub Desktop.
Save Bashta/179fbc4b67b9e58cbb7249e926e80bd5 to your computer and use it in GitHub Desktop.
Basic implementation of camera scanner for rectangles.
//
// SnappCameraView.swift
// EdgeDetection
//
// Created by er on 11/4/17.
// Copyright (c) 2017 Erison Veshi
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
import AVFoundation
import GLKit
// TODO: - Proper error handling.
// TODO: - Properly handle force unwraped properties.
// TODO: - Provide delegates for ViewController handling.
// TODO: - Replace the current overlay with an image placeholder.
class SnappCameraView: UIView, AVCapturePhotoCaptureDelegate {
// MARK: - Static Properties
let highAccuracyRectangleDetector = CIDetector(ofType: CIDetectorTypeRectangle, context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh])
//MARK: Pulic Properties
var borderDetectionEnabled = true
var borderDetectionFrameColor: UIColor = UIColor(red: 1, green: 0, blue: 0, alpha: 0.5)
//MARK: Fileprivate Properties
fileprivate var captureSession = AVCaptureSession()
fileprivate var captureDevice: AVCaptureDevice?
fileprivate var context: EAGLContext?
fileprivate var stillImageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput()
fileprivate var forceStop: Bool = false
fileprivate var coreImageContext: CIContext?
fileprivate var renderBuffer: GLuint = 0
fileprivate var glkView: GLKView?
fileprivate var stopped: Bool = false
fileprivate var imageDetectionConfidence = 0.0
fileprivate var borderDetectFrame: Bool = false
fileprivate var borderDetectLastRectangleFeature: CIRectangleFeature?
fileprivate var timeKeeper: Timer?
fileprivate var capturing: Bool = false
//MARK: Lifecycle
/**
Adds observers to the NSNotificationCenter.
*/
override func awakeFromNib() {
super.awakeFromNib()
NotificationCenter.default.addObserver(self, selector: #selector(SnappCameraView.backgroundMode), name: NSNotification.Name.UIApplicationWillResignActive, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(SnappCameraView.foregroundMode), name: NSNotification.Name.UIApplicationDidBecomeActive, object: nil)
}
deinit {
NotificationCenter.default.removeObserver(self)
}
//MARK: Setup
/**
Set's up all needed Elements for Video and Border detection. Should be called in `viewDidLoad:` in the view controller.
*/
func setupCameraView() {
self.setupGLKView()
let allDevices = AVCaptureDevice.devices(for: AVMediaType.video)
let aDevice: AnyObject? = allDevices.first
if aDevice == nil {
return
}
self.captureSession.beginConfiguration()
self.captureDevice = (aDevice as! AVCaptureDevice)
let input = try! AVCaptureDeviceInput(device: self.captureDevice!)
self.captureSession.sessionPreset = AVCaptureSession.Preset.photo
self.captureSession.addInput(input)
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.alwaysDiscardsLateVideoFrames = true
dataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
self.captureSession.addOutput(dataOutput)
self.captureSession.addOutput(self.stillImageOutput)
let connection = dataOutput.connections.first
connection?.videoOrientation = .portrait
if self.captureDevice!.isFlashAvailable {
try! self.captureDevice?.lockForConfiguration()
self.captureDevice?.unlockForConfiguration()
}
if self.captureDevice!.isFocusModeSupported(.continuousAutoFocus) {
try! self.captureDevice?.lockForConfiguration()
self.captureDevice?.focusMode = .continuousAutoFocus
self.captureDevice?.unlockForConfiguration()
}
self.captureSession.commitConfiguration()
}
private func setupGLKView() {
if let _ = self.context {
return
}
self.context = EAGLContext(api: .openGLES2)
self.glkView = GLKView(frame: self.bounds, context: self.context!)
self.glkView!.autoresizingMask = ([UIViewAutoresizing.flexibleWidth, UIViewAutoresizing.flexibleHeight])
self.glkView!.translatesAutoresizingMaskIntoConstraints = true
self.glkView!.contentScaleFactor = 1.0
self.glkView!.drawableDepthFormat = .format24
self.insertSubview(self.glkView!, at: 0)
glGenRenderbuffers(1, &self.renderBuffer)
glBindRenderbuffer(GLenum(GL_RENDERBUFFER), self.renderBuffer)
self.coreImageContext = CIContext(eaglContext: self.context!, options: [kCIContextUseSoftwareRenderer: true])
EAGLContext.setCurrent(self.context!)
}
// MARK: - Actions
/**
Starts the camera.
*/
func start() {
self.stopped = false
self.captureSession.startRunning()
self.handleGlkView(hide: false)
self.timeKeeper = Timer.scheduledTimer(timeInterval: 0.5, target: self, selector: #selector(SnappCameraView.enableBorderDetection), userInfo: nil, repeats: true)
}
/**
Stops the camera
*/
func stop() {
self.stopped = true
self.captureSession.stopRunning()
self.handleGlkView(hide: true)
self.timeKeeper?.invalidate()
}
//MARK: Private Actions
@objc func backgroundMode() {
self.forceStop = true
}
@objc func foregroundMode() {
self.forceStop = false
}
@objc func enableBorderDetection() {
self.borderDetectFrame = true
}
}
// MARK: - Overlays
fileprivate extension SnappCameraView {
func overlayImageForFeatureInImage(image: CIImage, feature: CIRectangleFeature) -> CIImage {
var overlay = CIImage(color: CIColor(color: self.borderDetectionFrameColor))
overlay = overlay.cropped(to: image.extent)
overlay = overlay.applyingFilter("CIPerspectiveTransformWithExtent", parameters: ["inputExtent": CIVector(cgRect: image.extent),
"inputTopLeft": CIVector(cgPoint: feature.topLeft),
"inputTopRight": CIVector(cgPoint: feature.topRight),
"inputBottomLeft": CIVector(cgPoint: feature.bottomLeft),
"inputBottomRight": CIVector(cgPoint: feature.bottomRight)])
return overlay.composited(over: image)
}
func handleGlkView(hide: Bool) {
UIView.animate(withDuration: 0.1) { [weak self] in
self?.glkView?.alpha = (hide) ? 0.0 : 1.0
}
}
}
// MARK: - Helpers
fileprivate extension SnappCameraView {
func biggestRectangle(rectangles: [CIRectangleFeature]) -> CIRectangleFeature? {
// TODO: use confidence level to optimise results
if rectangles.count == 0 {
return nil
}
var biggestRectangle = rectangles.first!
var halfPerimeterValue = 0.0
for rectangle in rectangles {
// Rect points
let p1 = rectangle.topLeft
let p2 = rectangle.topRight
// Hypotenuse between 2 points
let width = hypotf(Float(p1.x - p2.x), Float(p1.y - p2.y))
let p3 = rectangle.bottomLeft
let height = hypotf(Float(p1.x - p3.x), Float(p1.y - p3.y))
let currentHalfPermiterValue = Double(height + width)
if halfPerimeterValue < currentHalfPermiterValue {
halfPerimeterValue = currentHalfPermiterValue
biggestRectangle = rectangle
}
}
return biggestRectangle
}
func detectionConfidenceValid() -> Bool {
// TODO: Use confidence level to improve predicted results
return (self.imageDetectionConfidence > 1.0)
}
}
extension SnappCameraView: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if self.forceStop {
return
}
let sampleBufferValid: Bool = CMSampleBufferIsValid(sampleBuffer)
if self.stopped || self.capturing || !sampleBufferValid {
return
}
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
var image = CIImage(cvPixelBuffer: pixelBuffer)
if self.borderDetectionEnabled {
if self.borderDetectFrame {
self.borderDetectLastRectangleFeature = self.biggestRectangle(rectangles: highAccuracyRectangleDetector?.features(in: image) as! [CIRectangleFeature])
self.borderDetectFrame = false
}
if let lastRectFeature = self.borderDetectLastRectangleFeature {
self.imageDetectionConfidence += 0.5
image = self.overlayImageForFeatureInImage(image: image, feature: lastRectFeature)
}
else {
self.imageDetectionConfidence = 0.0
}
}
if let context = self.context, let ciContext = self.coreImageContext, let glkView = self.glkView {
ciContext.draw(image, in: self.bounds, from: image.extent)
context.presentRenderbuffer(Int(GL_RENDERBUFFER))
glkView.setNeedsDisplay()
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment