Skip to content

Instantly share code, notes, and snippets.

@kzorluoglu
Last active July 7, 2017 09:40
Show Gist options
  • Save kzorluoglu/62aef8e3d87dcaa18910a39133bb87c9 to your computer and use it in GitHub Desktop.
Save kzorluoglu/62aef8e3d87dcaa18910a39133bb87c9 to your computer and use it in GitHub Desktop.
Capture Photo from Camera Preview Layer Swift 3
//
// BodyguardViewController.swift
// My Mobile Bodyguard
//
// Created by Koray Zorluoglu on 06.07.17.
// Copyright © 2017 Koray Zorluoglu. All rights reserved.
//
import UIKit
import AVFoundation
import Darwin
class BodyguardViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
@IBOutlet weak var captureImageView: UIImageView!
var captureSession = AVCaptureSession()
var previewLayer : CALayer!
var captureDevice : AVCaptureDevice!
var startPhotoCapture = false
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
prepareCamera()
}
func prepareCamera() {
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .front).devices {
captureDevice = availableDevices.first
beginSession()
}
}
func beginSession(){
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice) as AVCaptureDeviceInput
if captureSession.inputs.isEmpty {
self.captureSession.addInput(captureDeviceInput)
}
} catch{
print(error.localizedDescription)
}
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession){
self.previewLayer = previewLayer
//self.view.layer.addSublayer(self.previewLayer)
//self.previewLayer.frame = self.view.layer.frame
captureSession.startRunning()
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput){
captureSession.addOutput(dataOutput)
}
captureSession.commitConfiguration()
let queue = DispatchQueue(label: "mymobilebodyguard")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if startPhotoCapture {
var evenCount = 0
// Break loop when the third even number is encountered.
myCondition : while true {
if let image = getImageFormSampleBuffer(buffer: sampleBuffer){
DispatchQueue.main.sync {
self.captureImageView.image = image
print(image.description)
}
// Print the value.
print(evenCount)
sleep(1)
if evenCount == 4 {
break myCondition
}
evenCount += 1
}
}
}
}
func getImageFormSampleBuffer(buffer:CMSampleBuffer) -> UIImage? {
if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer){
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext()
let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
if let image = context.createCGImage(ciImage, from: imageRect){
return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
}
}
return nil
}
func stopCaptureSession(){
self.captureSession.stopRunning()
if let inputs = captureSession.inputs as? [AVCaptureDeviceInput]{
for input in inputs{
self.captureSession.removeInput(input)
}
}
}
override func viewDidAppear(_ animated: Bool) {
playVideo()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
private func playVideo() {
guard let path = Bundle.main.path(forResource: "polizei", ofType:"mp4") else {
debugPrint("polizei.mp4 not found")
return
}
let player = AVPlayer(url: URL(fileURLWithPath: path))
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.view.bounds
self.view.layer.addSublayer(playerLayer)
player.play()
startPhotoCapture = true
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment