Skip to content

Instantly share code, notes, and snippets.

@Sjahriyar
Last active May 8, 2019 12:14
Show Gist options
  • Save Sjahriyar/31b5170f49e39c331085fe7e17c1fd0e to your computer and use it in GitHub Desktop.
Save Sjahriyar/31b5170f49e39c331085fe7e17c1fd0e to your computer and use it in GitHub Desktop.
If you need to setup AVCaptureSession for capturing Photos/Videos in Swift, you can download this file and import to your project. Usage is very easy. Create an instance of CameraController and fill the required arguments for you use-case. then in viewDidLoad() method call: yourInstanceVariable.start(on: self.view)
//
// CameraController.swift
//
// Created by Shahriyar on 5/8/19.
//
import UIKit
import AVKit
/**
This class prepares an AVCaptureSession by chosen device.
*/
class CameraController: NSObject {
var currentCameraPosition: CameraPosition!
var captureSession : AVCaptureSession!
var captureLayer : AVCaptureVideoPreviewLayer!
var frontCamera : AVCaptureDevice?
var rearCamera : AVCaptureDevice?
var deviceTypes : [AVCaptureDevice.DeviceType]!
var toCapture : AVMediaType!
var cameraPosition : AVCaptureDevice.Position!
var outputType : OutputType = .photo
required init(deviceType: [AVCaptureDevice.DeviceType], toCapture: AVMediaType, cameraPosition: AVCaptureDevice.Position, outputType: OutputType)
{
super.init()
self.deviceTypes = deviceType
self.toCapture = toCapture
self.cameraPosition = cameraPosition
self.outputType = outputType
}
func start(on view: UIView) {
switch AVCaptureDevice.authorizationStatus(for: toCapture) {
case .authorized: // The user has previously granted access to the camera.
do {
try self.setupCaptureSession()
} catch {
print("☠️ \(error.localizedDescription)")
}
case .notDetermined: // The user has not yet been asked for camera access.
AVCaptureDevice.requestAccess(for: toCapture) { granted in
if granted {
do {
try self.setupCaptureSession()
} catch {
print("☠️ \(error.localizedDescription)")
}
}
}
case .denied: // The user has previously denied access.
return
case .restricted: // The user can't grant access due to restrictions.
return
default:
return
}
try! self.displayPreview(on: view)
}
fileprivate func setupCaptureSession() throws {
self.captureSession = AVCaptureSession()
let videoDevice = bestDevice()
if videoDevice.isFocusModeSupported(.autoFocus) {
videoDevice.focusMode = .autoFocus
}
self.captureSession.beginConfiguration()
guard
let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice),
captureSession.canAddInput(videoDeviceInput)
else { throw CameraControllerError.inputsAreInvalid }
captureSession.addInput(videoDeviceInput)
outPutType()
captureSession.commitConfiguration()
self.captureSession.startRunning()
}
fileprivate func bestDevice() -> AVCaptureDevice {
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: self.deviceTypes, mediaType: .video, position: .unspecified)
let devices = discoverySession.devices
guard !devices.isEmpty else { fatalError("Missing capture devices.")}
return devices.first(where: { $0.position == self.cameraPosition })!
}
fileprivate func outPutType() {
var videoOutput: [AVCaptureOutput]!
var preset: AVCaptureSession.Preset!
switch self.outputType {
case .photo:
videoOutput.append(AVCapturePhotoOutput())
preset = .photo
case .video:
videoOutput = [AVCaptureVideoDataOutput()]
preset = .medium
case .photoAndVideo:
videoOutput = [AVCapturePhotoOutput(), AVCaptureVideoDataOutput()]
preset = .none
case .videoAndAudio:
videoOutput = [AVCaptureVideoDataOutput(), AVCaptureAudioDataOutput()]
preset = .high
}
for typeOfOutPut in videoOutput {
guard captureSession.canAddOutput(typeOfOutPut) else { return }
captureSession.sessionPreset = preset
captureSession.addOutput(typeOfOutPut)
}
}
fileprivate func displayPreview(on view: UIView) throws {
guard let captureSession = self.captureSession, captureSession.isRunning else { throw CameraControllerError.captureSessionIsMissing }
self.captureLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.captureLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.captureLayer?.connection?.videoOrientation = .portrait
view.layer.insertSublayer(self.captureLayer!, at: 0)
self.captureLayer?.frame = view.frame
}
}
extension CameraController: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
print("🦋 \(sampleBuffer)")
}
}
enum CameraControllerError: Swift.Error {
case captureSessionAlreadyRunning
case captureSessionIsMissing
case inputsAreInvalid
case invalidOperation
case noCamerasAvailable
case accessDenied
case unknown
}
public enum CameraPosition {
case front
case rear
}
enum OutputType {
case video
case photo
case videoAndAudio
case photoAndVideo
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment