-
-
Save sssbohdan/df423cc59441a8246a03ab32b4bc2d8c to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// CameraManager.swift | |
// CameraService | |
// | |
// Created by Bohdan Savych on 9/21/16. | |
// Copyright © 2016 Bohdan Savych. All rights reserved. | |
// | |
import Foundation | |
import UIKit | |
import AVFoundation | |
final class CameraManager: NSObject { | |
typealias PhotoCaptureCompletion = (UIImage?, Error?) -> Void | |
typealias QRCaptureCompletion = (String) -> Void | |
private(set) var previewLayer: AVCaptureVideoPreviewLayer? | |
private lazy var session = AVCaptureSession() | |
private let photoOutput = AVCapturePhotoOutput() | |
private lazy var device = AVCaptureDevice.default(for: AVMediaType.video) | |
private lazy var flashMode = AVCaptureDevice.FlashMode.off | |
private lazy var devicePosition = AVCaptureDevice.Position.back | |
private var input: AVCaptureDeviceInput? | |
private lazy var metadataOutput = AVCaptureMetadataOutput() | |
var onPhotoCapture: PhotoCaptureCompletion? | |
var onQRCaptureCompletion: QRCaptureCompletion? | |
init(frame:CGRect) { | |
super.init() | |
self.session.sessionPreset = .photo | |
if let device = self.device, let input = try? AVCaptureDeviceInput(device: device) { | |
self.session.addInput(input) | |
} | |
let output = AVCaptureVideoDataOutput() | |
self.session.addOutput(output) | |
output.videoSettings = [kCVPixelBufferPixelFormatTypeKey : kCVPixelFormatType_32BGRA] as [String : Any] | |
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session) | |
self.previewLayer?.frame = frame | |
self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill | |
if self.session.canAddOutput(self.photoOutput) { | |
self.session.addOutput(self.photoOutput) | |
} | |
} | |
private func getCurrentCaptureConnection() -> AVCaptureConnection? { | |
var videoConnection: AVCaptureConnection? | |
for connection in photoOutput.connections { | |
for port in connection.inputPorts { | |
if port.mediaType == AVMediaType.video { | |
videoConnection = connection | |
break | |
} | |
} | |
if videoConnection != nil { | |
break | |
} | |
} | |
return videoConnection | |
} | |
func capture() { | |
self.previewLayer?.connection?.isEnabled = true | |
let videoConnection = getCurrentCaptureConnection() | |
videoConnection?.videoOrientation = .portrait | |
let photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]) | |
self.photoOutput.capturePhoto(with: photoSettings, delegate: self) | |
} | |
func startCameraCapturing() { | |
if self.session.isRunning { return } | |
self.session.startRunning() | |
} | |
func stopCameraCapturing() { | |
if !self.session.isRunning { return } | |
self.session.stopRunning() | |
} | |
func startMetadataScanner() { | |
if self.session.canAddOutput(self.metadataOutput) { | |
self.session.addOutput(self.metadataOutput) | |
metadataOutput.metadataObjectTypes = [.qr] | |
self.metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main) | |
} | |
} | |
func stopMetadataScanner() { | |
self.session.removeOutput(self.metadataOutput) | |
} | |
} | |
// MARK: - AVCaptureMetadataOutputObjectsDelegate | |
extension CameraManager: AVCaptureMetadataOutputObjectsDelegate { | |
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) { | |
guard let metadataObject = metadataObjects.first else { return } | |
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return } | |
guard let stringValue = readableObject.stringValue else { return } | |
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate)) | |
self.onQRCaptureCompletion?(stringValue) | |
} | |
} | |
// MARK: - AVCapturePhotoCaptureDelegate | |
extension CameraManager: AVCapturePhotoCaptureDelegate { | |
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { | |
guard error == nil else { | |
self.onPhotoCapture?(nil, error) | |
return | |
} | |
DispatchQueue.global().async { [weak self] in | |
guard let data = photo.fileDataRepresentation() else { | |
self?.onPhotoCapture?(nil, nil) | |
return | |
} | |
let image = UIImage(data: data) | |
DispatchQueue.main.async { | |
self?.onPhotoCapture?(image, error) | |
} | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment