Created
August 7, 2015 19:22
-
-
Save Jxrgxn/9e65592f10c2a7a497a5 to your computer and use it in GitHub Desktop.
Personal project.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#import "CameraEngine.h" | |
#import "VideoEncoder.h" | |
#import "AssetsLibrary/ALAssetsLibrary.h" | |
static CameraEngine *_engine; | |
@interface CameraEngine () <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate> | |
{ | |
AVCaptureSession *_session; | |
AVCaptureVideoPreviewLayer *_previewLayer; | |
dispatch_queue_t _captureQueue; | |
AVCaptureConnection *_audioConnection; | |
AVCaptureConnection *_videoConnection; | |
VideoEncoder *_encoder; | |
BOOL _discont; | |
NSString *_filePath; | |
CMTime _timeOffset; | |
CMTime _lastVideo; | |
CMTime _lastAudio; | |
int _cx; | |
int _cy; | |
int _channels; | |
Float64 _samplerate; | |
NSData *_recordedData; | |
} | |
@end | |
@implementation CameraEngine | |
+ (void)initialize | |
{ | |
// Test recommended to avoid duplicate init via subclass | |
if (self == [CameraEngine class]) { | |
_engine = [[CameraEngine alloc] init]; | |
} | |
} | |
+ (CameraEngine *)engine | |
{ | |
return _engine; | |
} | |
- (void)startup | |
{ | |
if (!_session) { | |
NSLog(@"Starting up camera"); | |
_isCapturing = NO; | |
_isPaused = NO; | |
_discont = NO; | |
// create capture device with video input | |
_session = [[AVCaptureSession alloc] init]; | |
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; | |
AVCaptureDevice *camera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; | |
for (AVCaptureDevice *device in devices) { | |
if (device.position == AVCaptureDevicePositionFront) { | |
camera = device; | |
} | |
} | |
if (!camera) { | |
return; | |
} | |
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:camera error:nil]; | |
[_session addInput:input]; // AVCaptureSession will crash if there is no camera | |
// Audio input from default mic | |
AVCaptureDevice *mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; | |
AVCaptureDeviceInput *micInput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:nil]; | |
[_session addInput:micInput]; | |
// Create an output for YUV output with self as delegate | |
_captureQueue = dispatch_queue_create("uk.co.gdcl.cameraengine.capture", DISPATCH_QUEUE_SERIAL); | |
AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init]; | |
[videoOutput setSampleBufferDelegate:self queue:_captureQueue]; | |
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: | |
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, | |
nil]; | |
videoOutput.videoSettings = videoSettings; | |
[_session addOutput:videoOutput]; | |
_videoConnection = [videoOutput connectionWithMediaType:AVMediaTypeVideo]; | |
// Find the actual dimensions used so we can set up the encoder to the same. | |
NSDictionary *actual = videoOutput.videoSettings; | |
_cy = [[actual objectForKey:@"Height"] intValue]; | |
_cx = [[actual objectForKey:@"Width"] intValue]; | |
AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init]; | |
[audioOutput setSampleBufferDelegate:self queue:_captureQueue]; | |
[_session addOutput:audioOutput]; | |
_audioConnection = [audioOutput connectionWithMediaType:AVMediaTypeAudio]; | |
// For audio, we want the channels and sample rate, but we can't get those from audioout.audiosettings on ios, so | |
// we need to wait for the first sample | |
// Start capture and a preview layer | |
[_session startRunning]; | |
_previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session]; | |
_previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; | |
} | |
} | |
- (void)startCaptureAtPath:(NSString *)path | |
{ | |
@synchronized(self) { | |
if (!_isCapturing) { | |
NSLog(@"Starting capture"); | |
_filePath = path; | |
// Create the encoder once we have the audio params | |
_encoder = nil; | |
_isPaused = NO; | |
_discont = NO; | |
_timeOffset = CMTimeMake(0, 0); | |
_isCapturing = YES; | |
} | |
} | |
} | |
- (void)stopCapture | |
{ | |
@synchronized(self) { | |
if (_isCapturing) { | |
NSLog(@"Stoping capture"); | |
// Serialize with audio and video capture | |
_isCapturing = NO; | |
dispatch_async(_captureQueue, ^{ | |
[_encoder finishWithCompletionHandler:^{ | |
_isCapturing = NO; | |
_encoder = nil; | |
// NSString *filePath = [*path stringByAppendingString:VIDEO_FILE]; | |
// NSURL *url = [NSURL fileURLWithPath:filePath]; | |
// | |
// _recordedData = [NSData dataWithContentsOfURL:url]; | |
// dispatch_async(dispatch_get_main_queue(), ^{ | |
// [_recordedData writeToFile:path atomically:YES]; | |
// NSLog(@"Save completed"); | |
// }); | |
}]; | |
}); | |
} | |
} | |
} | |
- (void)pauseCapture | |
{ | |
@synchronized(self) { | |
if (_isCapturing) { | |
NSLog(@"Pausing capture"); | |
_isPaused = YES; | |
_discont = YES; | |
} | |
} | |
} | |
- (void)resumeCapture | |
{ | |
@synchronized(self) { | |
if (_isPaused) { | |
NSLog(@"Resuming capture"); | |
_isPaused = NO; | |
} | |
} | |
} | |
- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset | |
{ | |
CMItemCount count; | |
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count); | |
CMSampleTimingInfo *pInfo = malloc(sizeof(CMSampleTimingInfo) *count); | |
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count); | |
for (CMItemCount i = 0; i < count; i++) { | |
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset); | |
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset); | |
} | |
CMSampleBufferRef sout; | |
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout); | |
free(pInfo); | |
return sout; | |
} | |
- (void)setAudioFormat:(CMFormatDescriptionRef)fmt | |
{ | |
const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(fmt); | |
_samplerate = asbd->mSampleRate; | |
_channels = asbd->mChannelsPerFrame; | |
_channels = 2; | |
} | |
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection | |
{ | |
BOOL bVideo = YES; | |
@synchronized(self) { | |
if (!_isCapturing || _isPaused) { | |
return; | |
} | |
if (connection != _videoConnection) { | |
bVideo = NO; | |
} | |
if ((_encoder == nil) && !bVideo) { | |
CMFormatDescriptionRef fmt = CMSampleBufferGetFormatDescription(sampleBuffer); | |
[self setAudioFormat:fmt]; | |
_encoder = [VideoEncoder encoderForPath:_filePath width:640 height:480 channels:_channels samples:_samplerate]; | |
} | |
if (_discont) { | |
if (bVideo) { | |
return; | |
} | |
_discont = NO; | |
// Calc adjustment | |
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); | |
CMTime last = bVideo ? _lastVideo : _lastAudio; | |
if (last.flags & kCMTimeFlags_Valid) | |
{ | |
if (_timeOffset.flags & kCMTimeFlags_Valid) | |
{ | |
pts = CMTimeSubtract(pts, _timeOffset); | |
} | |
CMTime offset = CMTimeSubtract(pts, last); | |
NSLog(@"Setting offset from %s", bVideo?"video": "audio"); | |
NSLog(@"Adding %f to %f (pts %f)", ((double)offset.value)/offset.timescale, ((double)_timeOffset.value)/_timeOffset.timescale, ((double)pts.value/pts.timescale)); | |
// This stops us having to set a scale for _timeOffset before we see the first video time | |
if (_timeOffset.value == 0) { | |
_timeOffset = offset; | |
} else { | |
_timeOffset = CMTimeAdd(_timeOffset, offset); | |
} | |
} | |
_lastVideo.flags = 0; | |
_lastAudio.flags = 0; | |
} | |
// Retain so that we can release either this or modified one | |
CFRetain(sampleBuffer); | |
if (_timeOffset.value > 0) { | |
CFRelease(sampleBuffer); | |
sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset]; | |
} | |
// Record most recent time so we know the length of the pause | |
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); | |
CMTime dur = CMSampleBufferGetDuration(sampleBuffer); | |
if (dur.value > 0) { | |
pts = CMTimeAdd(pts, dur); | |
} | |
if (bVideo) { | |
_lastVideo = pts; | |
} else { | |
_lastAudio = pts; | |
} | |
} | |
// Pass frame to encoder | |
[_encoder encodeFrame:sampleBuffer isVideo:bVideo]; | |
CFRelease(sampleBuffer); | |
} | |
- (void)shutdown | |
{ | |
NSLog(@"Shutting down camera"); | |
if (_session) { | |
[_session stopRunning]; | |
_session = nil; | |
} | |
[_encoder finishWithCompletionHandler:^{ | |
NSLog(@"Capture completed"); | |
}]; | |
} | |
- (AVCaptureVideoPreviewLayer *)getPreviewLayer | |
{ | |
return _previewLayer; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment