Skip to content

Instantly share code, notes, and snippets.

@delasign
Created December 3, 2018 16:19
Show Gist options
  • Save delasign/4d82dbe8ecba860fba8800c439fa1a72 to your computer and use it in GitHub Desktop.
Save delasign/4d82dbe8ecba860fba8800c439fa1a72 to your computer and use it in GitHub Desktop.
Audio Functionality
// MARK: AUDIO FUNCTIONALITY
var captureSession: AVCaptureSession?
var micInput:AVCaptureDeviceInput?
var audioOutput:AVCaptureAudioDataOutput?
func startAudioRecording(completionHandler:@escaping(Bool) -> ()) {
let microphone = AVCaptureDevice.default(.builtInMicrophone, for: AVMediaType.audio, position: .unspecified)
do {
try self.micInput = AVCaptureDeviceInput(device: microphone!);
self.captureSession = AVCaptureSession();
if (self.captureSession?.canAddInput(self.micInput!))! {
self.captureSession?.addInput(self.micInput!);
self.audioOutput = AVCaptureAudioDataOutput();
if self.captureSession!.canAddOutput(self.audioOutput!){
self.captureSession!.addOutput(self.audioOutput!)
self.audioOutput?.setSampleBufferDelegate(self, queue: DispatchQueue.global());
self.captureSession?.startRunning();
completionHandler(true);
}
}
}
catch {
completionHandler(false);
}
}
func endAudioRecording() { //completionHandler:@escaping()->()
self.captureSession!.stopRunning();
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// You now have the sample buffer - correct the timestamp to the video timestamp
//https://github.com/takecian/video-examples-ios/blob/master/recordings/TimelapseCameraEngine.swift
var count: CMItemCount = 0
CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, entryCount: 0, arrayToFill: nil, entriesNeededOut: &count);
var info = [CMSampleTimingInfo](repeating: CMSampleTimingInfo(duration: CMTimeMake(value: 0, timescale: 0), presentationTimeStamp: CMTimeMake(value: 0, timescale: 0), decodeTimeStamp: CMTimeMake(value: 0, timescale: 0)), count: count)
CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, entryCount: count, arrayToFill: &info, entriesNeededOut: &count);
let scale = CMTimeScale(NSEC_PER_SEC)
var currentFrameTime:CMTime = CMTime(value: CMTimeValue((self.sceneView.session.currentFrame!.timestamp) * Double(scale)), timescale: scale);
currentFrameTime = currentFrameTime-self.videoStartTime!;
for i in 0..<count {
info[i].decodeTimeStamp = currentFrameTime
info[i].presentationTimeStamp = currentFrameTime
}
var soundbuffer:CMSampleBuffer?
CMSampleBufferCreateCopyWithNewTiming(allocator: kCFAllocatorDefault, sampleBuffer: sampleBuffer, sampleTimingEntryCount: count, sampleTimingArray: &info, sampleBufferOut: &soundbuffer);
self.audioInput?.append(soundbuffer!);
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment