Skip to content

Instantly share code, notes, and snippets.

@yusuke024
Created November 16, 2018 03:15
Show Gist options
  • Save yusuke024/b5cd3909d9d7f9e919291491f6b381f0 to your computer and use it in GitHub Desktop.
Save yusuke024/b5cd3909d9d7f9e919291491f6b381f0 to your computer and use it in GitHub Desktop.
Recording video with AVAssetWriter
import UIKit
import AVFoundation
class ViewController: UIViewController {
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .notDetermined:
AVCaptureDevice.requestAccess(for: .video) { granted in
if granted {
self._setupCaptureSession()
}
}
case .restricted:
break
case .denied:
break
case .authorized:
_setupCaptureSession()
}
}
private var _captureSession: AVCaptureSession?
private var _videoOutput: AVCaptureVideoDataOutput?
private var _assetWriter: AVAssetWriter?
private var _assetWriterInput: AVAssetWriterInput?
private var _adpater: AVAssetWriterInputPixelBufferAdaptor?
private var _filename = ""
private var _time: Double = 0
private func _setupCaptureSession() {
let session = AVCaptureSession()
session.sessionPreset = .hd1920x1080
guard
let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .unspecified),
let input = try? AVCaptureDeviceInput(device: device),
session.canAddInput(input) else { return }
session.beginConfiguration()
session.addInput(input)
session.commitConfiguration()
let output = AVCaptureVideoDataOutput()
guard session.canAddOutput(output) else { return }
output.setSampleBufferDelegate(self, queue: DispatchQueue(label: "com.yusuke024.video"))
session.beginConfiguration()
session.addOutput(output)
session.commitConfiguration()
DispatchQueue.main.async {
let previewView = _PreviewView()
previewView.videoPreviewLayer.session = session
previewView.frame = self.view.bounds
previewView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
self.view.insertSubview(previewView, at: 0)
}
session.startRunning()
_videoOutput = output
_captureSession = session
}
private enum _CaptureState {
case idle, start, capturing, end
}
private var _captureState = _CaptureState.idle
@IBAction func capture(_ sender: Any) {
switch _captureState {
case .idle:
_captureState = .start
case .capturing:
_captureState = .end
default:
break
}
}
}
extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds
switch _captureState {
case .start:
// Set up recorder
_filename = UUID().uuidString
let videoPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!.appendingPathComponent("\(_filename).mov")
let writer = try! AVAssetWriter(outputURL: videoPath, fileType: .mov)
let settings = _videoOutput!.recommendedVideoSettingsForAssetWriter(writingTo: .mov)
let input = AVAssetWriterInput(mediaType: .video, outputSettings: settings) // [AVVideoCodecKey: AVVideoCodecType.h264, AVVideoWidthKey: 1920, AVVideoHeightKey: 1080])
input.mediaTimeScale = CMTimeScale(bitPattern: 600)
input.expectsMediaDataInRealTime = true
input.transform = CGAffineTransform(rotationAngle: .pi/2)
let adapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: input, sourcePixelBufferAttributes: nil)
if writer.canAdd(input) {
writer.add(input)
}
writer.startWriting()
writer.startSession(atSourceTime: .zero)
_assetWriter = writer
_assetWriterInput = input
_adpater = adapter
_captureState = .capturing
_time = timestamp
case .capturing:
if _assetWriterInput?.isReadyForMoreMediaData == true {
let time = CMTime(seconds: timestamp - _time, preferredTimescale: CMTimeScale(600))
_adpater?.append(CMSampleBufferGetImageBuffer(sampleBuffer)!, withPresentationTime: time)
}
break
case .end:
guard _assetWriterInput?.isReadyForMoreMediaData == true, _assetWriter!.status != .failed else { break }
let url = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!.appendingPathComponent("\(_filename).mov")
_assetWriterInput?.markAsFinished()
_assetWriter?.finishWriting { [weak self] in
self?._captureState = .idle
self?._assetWriter = nil
self?._assetWriterInput = nil
DispatchQueue.main.async {
let activity = UIActivityViewController(activityItems: [url], applicationActivities: nil)
self?.present(activity, animated: true, completion: nil)
}
}
default:
break
}
}
}
private class _PreviewView: UIView {
override class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
return layer as! AVCaptureVideoPreviewLayer
}
}
@GitHub-Ram
Copy link

GitHub-Ram commented Apr 9, 2022 via email

@RahulThink
Copy link

this code is getting 6 to 7 frames per second I want 30 fps i.e captureOutput(_:didOutput:from:) should get called 30 times in second is there any way

@sukidhar
Copy link

i guess the device has issues with frame rate or you might have configured some preset which is too heavy to process many frames at once. The delegate is called directly from AVCameraSession.

@mrousavy
Copy link

Hey - thanks for sharing that code.
The initialization of this takes ages for me, it introduces up to 5 seconds of delay on an iPhone 11 Pro (in debug mode). In release it's a bit faster, but still far from what the default Movie output can achieve.
Any tips?

@datpt11
Copy link

datpt11 commented Jun 26, 2024

same issue @mrousavy are you have solution ?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment