Created
May 9, 2011 20:28
-
-
Save pixelrevision/963319 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#import <UIKit/UIKit.h> | |
#import <AVFoundation/AVFoundation.h> | |
#import <CoreGraphics/CoreGraphics.h> | |
#import <CoreVideo/CoreVideo.h> | |
#import <CoreMedia/CoreMedia.h> | |
@class PXRCamView; | |
@protocol PXRCamViewDelegate | |
- (void)camView:(PXRCamView*)cv didCaptureImage:(UIImage*)img; | |
@end | |
@interface PXRCamView : UIView <AVCaptureVideoDataOutputSampleBufferDelegate>{ | |
BOOL hasCam; | |
BOOL wantsPictureTaken; | |
AVCaptureSession *captureSession; | |
AVCaptureVideoPreviewLayer *previewLayer; | |
AVCaptureVideoDataOutput *imageOut; | |
id <PXRCamViewDelegate> delegate; | |
} | |
@property (nonatomic, assign) id <PXRCamViewDelegate> delegate; | |
- (void)startCam; | |
- (void)stopCam; | |
- (void)snapPicture; | |
@end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#import "PXRCamView.h" | |
@implementation PXRCamView | |
@synthesize delegate; | |
- (void)startCam{ | |
if(!hasCam){ | |
captureSession = [[AVCaptureSession alloc] init]; | |
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; | |
AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil]; | |
[captureSession addInput:videoIn]; | |
imageOut = [[AVCaptureVideoDataOutput alloc] init]; | |
imageOut.alwaysDiscardsLateVideoFrames = YES; | |
dispatch_queue_t queue; | |
queue = dispatch_queue_create("cameraQueue", NULL); | |
[imageOut setSampleBufferDelegate:self queue:queue]; | |
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; | |
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; | |
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; | |
[imageOut setVideoSettings:videoSettings]; | |
[captureSession addOutput:imageOut]; | |
previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession]; | |
previewLayer.frame = self.bounds; // Assume you want the preview layer to fill the view. | |
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill]; | |
[self.layer addSublayer:previewLayer]; | |
[captureSession startRunning]; | |
hasCam = YES; | |
} | |
} | |
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{ | |
if(wantsPictureTaken){ | |
wantsPictureTaken = NO; | |
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init]; | |
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); | |
CVPixelBufferLockBaseAddress(imageBuffer,0); | |
/*Get information about the image*/ | |
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); | |
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); | |
size_t width = CVPixelBufferGetWidth(imageBuffer); | |
size_t height = CVPixelBufferGetHeight(imageBuffer); | |
/*Create a CGImageRef from the CVImageBufferRef*/ | |
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); | |
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); | |
CGImageRef newImage = CGBitmapContextCreateImage(newContext); | |
/*We release some components*/ | |
CGContextRelease(newContext); | |
CGColorSpaceRelease(colorSpace); | |
/*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly). | |
Same thing as for the CALayer we are not in the main thread so ...*/ | |
UIImage *image = [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight]; | |
[delegate camView:self didCaptureImage:image]; | |
/*We relase the CGImageRef*/ | |
CGImageRelease(newImage); | |
/*We unlock the image buffer*/ | |
CVPixelBufferUnlockBaseAddress(imageBuffer, 0); | |
[pool drain]; | |
} | |
} | |
- (void)stopCam{ | |
if(hasCam){ | |
[captureSession stopRunning]; | |
[captureSession release]; | |
[previewLayer removeFromSuperlayer]; | |
[previewLayer release]; | |
[imageOut release]; | |
hasCam = NO; | |
} | |
} | |
- (void)snapPicture{ | |
wantsPictureTaken = YES; | |
} | |
- (void)dealloc { | |
[self stopCam]; | |
[super dealloc]; | |
} | |
@end |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment