Skip to content

Instantly share code, notes, and snippets.

@jakemarsh
Forked from benlodotcom/MyAVController.h
Created July 2, 2010 11:45
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jakemarsh/461253 to your computer and use it in GitHub Desktop.
Save jakemarsh/461253 to your computer and use it in GitHub Desktop.
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
/*!
@class AVController
@author Benjamin Loulier
@brief Controller to demonstrate how we can have a direct access to the camera using the iPhone SDK 4
*/
@interface AVController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate> {
AVCaptureSession *_captureSession;
UIImageView *_imageView;
CALayer *_customLayer;
AVCaptureVideoPreviewLayer *_prevLayer;
}
/*!
@brief The capture session takes the input from the camera and capture it
*/
@property (nonatomic, retain) AVCaptureSession *captureSession;
/*!
@brief The UIImageView we use to display the image generated from the imageBuffer
*/
@property (nonatomic, retain) UIImageView *imageView;
/*!
@brief The CALayer we use to display the CGImageRef generated from the imageBuffer
*/
@property (nonatomic, retain) CALayer *customLayer;
/*!
@brief The CALAyer customized by apple to display the video corresponding to a capture session
*/
@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
/*!
@brief This method initializes the capture session
*/
- (void)initCapture;
@end
#import "AVController.h"
@implementation AVController
@synthesize captureSession = _captureSession;
@synthesize imageView = _imageView;
@synthesize customLayer = _customLayer;
@synthesize prevLayer = _prevLayer;
#pragma mark -
#pragma mark Initialization
- (id)init {
self = [super init];
if (self) {
/*We initialize some variables (they might be not initialized depending on what is commented or not)*/
self.imageView = nil;
self.prevLayer = nil;
self.customLayer = nil;
/*We intialize the capture*/
[self initCapture];
}
return self;
}
- (void)initCapture {
/*We setup the input*/
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]
error:nil];
/*We setupt the output*/
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
//captureOutput.minFrameDuration = CMTimeMake(1, 10); Uncomment it to specify a minimum duration for each video frame
[captureOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
// Set the video output to store frame in BGRA (It is supposed to be faster)
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
/*And we create a capture session*/
self.captureSession = [[AVCaptureSession alloc] init];
/*We add input and output*/
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:captureOutput];
/*We start the capture*/
[self.captureSession startRunning];
/*We add the preview layer*/
/*self.prevLayer = [AVCaptureVideoPreviewLayer layerWithSession: self.captureSession];
self.prevLayer.frame = self.view.bounds;
self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer: self.prevLayer];*/
/*We add the Custom Layer (We need to change the orientation of the layer so that the video is displayed correctly)*/
/*self.customLayer = [CALayer layer];
self.customLayer.frame = self.view.bounds;
self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer];*/
/*We add the imageView*/
self.imageView = [[UIImageView alloc] initWithFrame:self.view.bounds];
[self.view addSubview:self.imageView];
}
#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
/*Lock the image buffer*/
CVPixelBufferLockBaseAddress(imageBuffer,0);
/*Get information about the image*/
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
/*Create a CGImageRef from the CVImageBufferRef*/
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
/*We unlock the image buffer*/
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
/*We release some components*/
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
/*We display the result on the custom layer*/
/*self.customLayer.contents = (id) newImage;*/
/*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly)*/
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
self.imageView.image = image;
/*We relase the CGImageRef*/
CGImageRelease(newImage);
}
#pragma mark -
#pragma mark Memory management
- (void)viewDidUnload {
self.imageView = nil;
self.customLayer = nil;
self.prevLayer = nil;
}
- (void)dealloc {
[self.captureSession release];
[super dealloc];
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment