Skip to content

Instantly share code, notes, and snippets.

@ArchdukeTim
Created June 13, 2017 21:26
Show Gist options
  • Save ArchdukeTim/62a24ec7bb5b761e16faa357f58f205b to your computer and use it in GitHub Desktop.
Save ArchdukeTim/62a24ec7bb5b761e16faa357f58f205b to your computer and use it in GitHub Desktop.
FrostedGlass
//
// FrameExtractor.swift
// Created by Bobo on 29/12/2016.
//
import UIKit
import AVFoundation
protocol FrameExtractorDelegate: class {
func captured(image: UIImage)
}
class FrameExtractor: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
private var position = AVCaptureDevicePosition.back
private let quality = AVCaptureSessionPresetMedium
private var permissionGranted = false
private let sessionQueue = DispatchQueue(label: "session queue")
private let captureSession = AVCaptureSession()
private let context = CIContext()
weak var delegate: FrameExtractorDelegate?
override init() {
super.init()
checkPermission()
sessionQueue.async { [unowned self] in
self.configureSession()
self.captureSession.startRunning()
}
}
// MARK: AVSession configuration
private func checkPermission() {
switch AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo) {
case .authorized:
permissionGranted = true
case .notDetermined:
requestPermission()
default:
permissionGranted = false
}
}
private func requestPermission() {
sessionQueue.suspend()
AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo) { [unowned self] granted in
self.permissionGranted = granted
self.sessionQueue.resume()
}
}
private func configureSession() {
guard permissionGranted else { return }
captureSession.sessionPreset = quality
guard let captureDevice = selectCaptureDevice() else { return }
for case let vFormat as AVCaptureDeviceFormat in captureDevice.formats {
// 2
var ranges = vFormat.videoSupportedFrameRateRanges as! [AVFrameRateRange]
let frameRates = ranges[0]
// 3
if frameRates.maxFrameRate == 240 {
// 4
do{
try captureDevice.lockForConfiguration()
}
catch{}
captureDevice.activeFormat = vFormat as AVCaptureDeviceFormat
captureDevice.activeVideoMinFrameDuration = frameRates.minFrameDuration
captureDevice.activeVideoMaxFrameDuration = frameRates.maxFrameDuration
captureDevice.unlockForConfiguration()
}
}
guard let captureDeviceInput = try? AVCaptureDeviceInput(device: captureDevice) else { return }
guard captureSession.canAddInput(captureDeviceInput) else { return }
captureSession.addInput(captureDeviceInput)
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "sample buffer"))
guard captureSession.canAddOutput(videoOutput) else { return }
captureSession.addOutput(videoOutput)
guard let connection = videoOutput.connection(withMediaType: AVFoundation.AVMediaTypeVideo) else { return }
guard connection.isVideoOrientationSupported else { return }
guard connection.isVideoMirroringSupported else { return }
connection.videoOrientation = .portrait
connection.isVideoMirrored = position == .front
}
private func selectCaptureDevice() -> AVCaptureDevice? {
return AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: .back)
}
// MARK: Sample buffer to UIImage conversion
private func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer) -> UIImage? {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil }
let ciImage = CIImage(cvPixelBuffer: imageBuffer)
guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return nil }
return UIImage(cgImage: cgImage)
}
// MARK: AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
guard let uiImage = imageFromSampleBuffer(sampleBuffer: sampleBuffer) else { return }
DispatchQueue.main.async { [unowned self] in
self.delegate?.captured(image: uiImage)
}
}
}
//
// ViewController.swift
// Created by Bobo on 29/12/2016.
//
import UIKit
class ViewController: UIViewController, FrameExtractorDelegate {
var frameExtractor: FrameExtractor!
@IBOutlet weak var imageView: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
imageView.isUserInteractionEnabled = true
frameExtractor = FrameExtractor()
frameExtractor.delegate = self
}
@IBAction func singleTap(_ sender: UITapGestureRecognizer) {
if (sender.state == UIGestureRecognizerState.ended){
OpenCVWrapper.switchImage()
}
}
@IBAction func threeFingersDetected(_ sender: UILongPressGestureRecognizer) {
if (sender.state == UIGestureRecognizerState.ended){
UIImageWriteToSavedPhotosAlbum(imageView.image!, nil, nil, nil)
print("Saved Image")
}
}
func captured(image: UIImage) {
imageView.image = OpenCVWrapper.threshold(image, deg:0)
}
override var shouldAutorotate: Bool {
return true
}
override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) {
switch (UIDevice.current.orientation)
{
case UIDeviceOrientation.portrait:
imageView.transform = CGAffineTransform.init(rotationAngle: 0)
break
case UIDeviceOrientation.landscapeRight:
imageView.transform = CGAffineTransform.init(rotationAngle: .pi/2)
break
case UIDeviceOrientation.landscapeLeft:
imageView.transform = CGAffineTransform.init(rotationAngle: -.pi/2)
break
case UIDeviceOrientation.portraitUpsideDown:
imageView.transform = CGAffineTransform.init(rotationAngle: .pi)
break
default:
imageView.transform = CGAffineTransform.init(rotationAngle: 0)
}
imageView.frame = self.view.bounds
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment