Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save ming-chu/95f719b47e038860bed9682d57a9dd00 to your computer and use it in GitHub Desktop.
Save ming-chu/95f719b47e038860bed9682d57a9dd00 to your computer and use it in GitHub Desktop.
AVFoundation write/read audio file frame by frame
#import <AVFoundation/AVFoundation.h>
#import "Recorder.h"
@interface Recorder()<AVCaptureAudioDataOutputSampleBufferDelegate>{
AVCaptureDevice *audioDevice;
AVCaptureDeviceInput *audioInput;
AVCaptureAudioDataOutput* _audioDataOutput;
dispatch_queue_t _captureQueue;
AVURLAsset *_asset;
AVAssetReader *_assetReader;
AVAssetReaderTrackOutput *_audioOutput;
AVAssetReaderTrackOutput *_videoOutput;
AVAssetTrack* audio_track;
}
@property (nonatomic, readonly) AVCaptureSession *session;
@property (nonatomic) AVAssetWriter *writer;
@property (nonatomic) AVAssetWriterInput *audioWriterInput;
@property (nonatomic) NSString *filename;
@property (nonatomic, readonly) int audioFrameCount;
@end
@implementation Recorder
- (id)init{
self = [super init];
_filename = [NSTemporaryDirectory() stringByAppendingPathComponent:@"temp.mp4"];
NSLog(@"filename: %@", _filename);
[self setupWriter];
[self setupDevices];
return self;
}
- (void)setupDevices{
_captureQueue = dispatch_queue_create("capture", DISPATCH_QUEUE_SERIAL);
NSError *error = nil;
_session = [[AVCaptureSession alloc] init];
_audioDataOutput = [[AVCaptureAudioDataOutput alloc] init];
[_audioDataOutput setSampleBufferDelegate:self queue:_captureQueue];
audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
for(AVCaptureDevice *dev in [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]){
if(dev.position == AVCaptureDevicePositionFront){
audioDevice = dev;
break;
}
}
audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
[_session setSessionPreset:AVCaptureSessionPresetMedium];
[_session addOutput:_audioDataOutput];
[_session addInput:audioInput];
[_session commitConfiguration];
[_session startRunning];
}
- (void)setupWriter{
if([[NSFileManager defaultManager] fileExistsAtPath:_filename]){
[[NSFileManager defaultManager] removeItemAtPath:_filename error:nil];
}
NSURL *url = [NSURL fileURLWithPath:_filename];
_writer = [AVAssetWriter assetWriterWithURL:url fileType:AVFileTypeMPEG4 error:nil];
NSDictionary* settings;
AudioChannelLayout acl;
bzero(&acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
settings = @{
AVFormatIDKey : @(kAudioFormatMPEG4AAC),
AVSampleRateKey: @(44100),
AVChannelLayoutKey: [NSData dataWithBytes:&acl length:sizeof(acl)],
};
_audioWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:settings];
_audioWriterInput.expectsMediaDataInRealTime = YES;
[_writer addInput:_audioWriterInput];
if(![_writer startWriting]){
NSLog(@"start writer failed: %@", _writer.error.description);
}
}
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
- (BOOL)captureOutputShouldProvideSampleAccurateRecordingStart:(AVCaptureOutput *)captureOutput{
return YES;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
if(!_audioWriterInput){
return;
}
if (!CMSampleBufferDataIsReady(sampleBuffer)){
NSLog(@"!CMSampleBufferDataIsReady");
return;
}
double time = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
if(_audioFrameCount == 0){
NSLog(@"start %@", _writer.outputURL.lastPathComponent);
[_writer startSessionAtSourceTime:CMTimeMakeWithSeconds(time, 1)];
}
if (_writer.status == AVAssetWriterStatusFailed){
NSLog(@"writer error %@", _writer.error.localizedDescription);
// TODO: set status
}else if(_audioWriterInput.readyForMoreMediaData == YES){
int count = (int)CMSampleBufferGetNumSamples(sampleBuffer);
_audioFrameCount += count;
[_audioWriterInput appendSampleBuffer:sampleBuffer];
if(_audioFrameCount > 44100){
_audioWriterInput = nil;
[_writer finishWritingWithCompletionHandler:^{
NSLog(@"stop %@", _writer.outputURL.lastPathComponent);
if(_writer.status != AVAssetWriterStatusCompleted){
NSLog(@"asset writer failed: %@", _writer.outputURL.lastPathComponent);
return;
}
NSLog(@"finish writting %d samples", _audioFrameCount);
[self readAudioFile];
}];
}
}else{
NSLog(@"!readyForMoreMediaData %d", (int)_writer.status);
}
}
- (void)readAudioFile{
NSError *error;
_asset = [AVURLAsset URLAssetWithURL:_writer.outputURL options:nil];
_assetReader = [[AVAssetReader alloc] initWithAsset:_asset error:&error];
NSDictionary *settings;
audio_track = [_asset tracksWithMediaType:AVMediaTypeAudio].lastObject;
if(audio_track){
settings = @{
AVFormatIDKey: @(kAudioFormatLinearPCM),
};
_audioOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:audio_track
outputSettings:settings];
if([_assetReader canAddOutput:_audioOutput]){
[_assetReader addOutput:_audioOutput];
}
}
[_assetReader startReading];
AudioFileID fileID = nil;
OSStatus err=noErr;
err = AudioFileOpenURL( (__bridge CFURLRef) _writer.outputURL, kAudioFileReadPermission, 0, &fileID );
if( err != noErr ) {
NSLog( @"AudioFileOpenURL failed" );
}
AudioFilePacketTableInfo _audioInfo;
UInt32 size = sizeof(_audioInfo);
AudioFileGetProperty(fileID, kAudioFilePropertyPacketTableInfo, &size, &_audioInfo);
NSLog(@"priming: %d remainder: %d valid: %d", _audioInfo.mPrimingFrames, _audioInfo.mRemainderFrames, (int)_audioInfo.mNumberValidFrames);
AudioFileClose(fileID);
int total_samples = 0;
NSMutableData *data = [[NSMutableData alloc] init];
while(1){
CMSampleBufferRef sampleBuffer = [_audioOutput copyNextSampleBuffer];
if(!sampleBuffer){
break;
}
OSStatus err;
AudioBufferList audioBufferList;
CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
err = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sampleBuffer,
NULL,
&audioBufferList,
sizeof(AudioBufferList),
NULL,
NULL,
kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
&blockBuffer
);
for (NSUInteger i = 0; i < audioBufferList.mNumberBuffers; i++) {
AudioBuffer audioBuffer = audioBufferList.mBuffers[i];
[data appendBytes:audioBuffer.mData length:audioBuffer.mDataByteSize];
}
CMItemCount numSamplesInBuffer = CMSampleBufferGetNumSamples(sampleBuffer);
total_samples += numSamplesInBuffer;
CFRelease(blockBuffer);
CFRelease(sampleBuffer);
}
NSLog(@"read %d samples", total_samples);
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment