Skip to content

Instantly share code, notes, and snippets.

@sssbohdan
Last active April 23, 2019 15:55
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save sssbohdan/df423cc59441a8246a03ab32b4bc2d8c to your computer and use it in GitHub Desktop.
Save sssbohdan/df423cc59441a8246a03ab32b4bc2d8c to your computer and use it in GitHub Desktop.
//
// CameraManager.swift
// CameraService
//
// Created by Bohdan Savych on 9/21/16.
// Copyright © 2016 Bohdan Savych. All rights reserved.
//
import Foundation
import UIKit
import AVFoundation
final class CameraManager: NSObject {
typealias PhotoCaptureCompletion = (UIImage?, Error?) -> Void
typealias QRCaptureCompletion = (String) -> Void
private(set) var previewLayer: AVCaptureVideoPreviewLayer?
private lazy var session = AVCaptureSession()
private let photoOutput = AVCapturePhotoOutput()
private lazy var device = AVCaptureDevice.default(for: AVMediaType.video)
private lazy var flashMode = AVCaptureDevice.FlashMode.off
private lazy var devicePosition = AVCaptureDevice.Position.back
private var input: AVCaptureDeviceInput?
private lazy var metadataOutput = AVCaptureMetadataOutput()
var onPhotoCapture: PhotoCaptureCompletion?
var onQRCaptureCompletion: QRCaptureCompletion?
init(frame:CGRect) {
super.init()
self.session.sessionPreset = .photo
if let device = self.device, let input = try? AVCaptureDeviceInput(device: device) {
self.session.addInput(input)
}
let output = AVCaptureVideoDataOutput()
self.session.addOutput(output)
output.videoSettings = [kCVPixelBufferPixelFormatTypeKey : kCVPixelFormatType_32BGRA] as [String : Any]
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer?.frame = frame
self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
if self.session.canAddOutput(self.photoOutput) {
self.session.addOutput(self.photoOutput)
}
}
private func getCurrentCaptureConnection() -> AVCaptureConnection? {
var videoConnection: AVCaptureConnection?
for connection in photoOutput.connections {
for port in connection.inputPorts {
if port.mediaType == AVMediaType.video {
videoConnection = connection
break
}
}
if videoConnection != nil {
break
}
}
return videoConnection
}
func capture() {
self.previewLayer?.connection?.isEnabled = true
let videoConnection = getCurrentCaptureConnection()
videoConnection?.videoOrientation = .portrait
let photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
self.photoOutput.capturePhoto(with: photoSettings, delegate: self)
}
func startCameraCapturing() {
if self.session.isRunning { return }
self.session.startRunning()
}
func stopCameraCapturing() {
if !self.session.isRunning { return }
self.session.stopRunning()
}
func startMetadataScanner() {
if self.session.canAddOutput(self.metadataOutput) {
self.session.addOutput(self.metadataOutput)
metadataOutput.metadataObjectTypes = [.qr]
self.metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
}
}
func stopMetadataScanner() {
self.session.removeOutput(self.metadataOutput)
}
}
// MARK: - AVCaptureMetadataOutputObjectsDelegate
extension CameraManager: AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
guard let metadataObject = metadataObjects.first else { return }
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
self.onQRCaptureCompletion?(stringValue)
}
}
// MARK: - AVCapturePhotoCaptureDelegate
extension CameraManager: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard error == nil else {
self.onPhotoCapture?(nil, error)
return
}
DispatchQueue.global().async { [weak self] in
guard let data = photo.fileDataRepresentation() else {
self?.onPhotoCapture?(nil, nil)
return
}
let image = UIImage(data: data)
DispatchQueue.main.async {
self?.onPhotoCapture?(image, error)
}
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment