Skip to content

Instantly share code, notes, and snippets.

@spycos
Forked from erkie/PhotoGrabber.h
Created February 7, 2012 00:19
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save spycos/1756116 to your computer and use it in GitHub Desktop.
Save spycos/1756116 to your computer and use it in GitHub Desktop.
Capture photos from built in iSight-camera or other connected cameras using Objective-C
//
// PhotoGrabber.h
// By Erik Rothoff Andersson <erikrothoff.com>
//
#import <Foundation/Foundation.h>
#import <QTKit/QTKit.h>
@protocol PhotoGrabberDelegate <NSObject>
- (void)photoGrabbed:(NSImage*)image;
@end
@interface PhotoGrabber : NSObject {
CVImageBufferRef currentImage;
QTCaptureDevice *video;
QTCaptureDecompressedVideoOutput *output;
QTCaptureInput *input;
QTCaptureSession *session;
id<PhotoGrabberDelegate> delegate;
}
@property (nonatomic, assign) id<PhotoGrabberDelegate> delegate;
- (void)grabPhoto;
- (NSString*)deviceName;
@end
//
// PhotoGrabber.m
// By Erik Rothoff Andersson <erikrothoff.com>
//
#import "PhotoGrabber.h"
@implementation PhotoGrabber
@synthesize delegate;
- (id)init
{
if ( (self = [super init]) )
{
NSError *error = nil;
// Acquire a device, we will also have objects for getting input
// from the device, and another for output
video = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
BOOL success = [video open:&error];
if ( ! success || error )
{
NSLog(@"Did not succeed in acquire: %d", success);
NSLog(@"Error: %@", [error localizedDescription]);
return nil;
}
// QTCaptureDeviceInput is the object that will use the
// device as input, i.e. handle the photo-taking
input = [[QTCaptureDeviceInput alloc] initWithDevice:video];
// Session handles the input and output of both objects
session = [[QTCaptureSession alloc] init];
// Add our input object as input for this particular session
// (the code is pretty self-explanatory)
success = [session addInput:input error:&error];
if ( ! success || error )
{
NSLog(@"Did not succeed in connecting input to session: %d", success);
NSLog(@"Error: %@", [error localizedDescription]);
return nil;
}
// Create an object for outputing the video
// The input will tell the session object that it has taken
// some data, which will in turn send this to the output
// object, which has a delegate that you defined
output = [[QTCaptureDecompressedVideoOutput alloc] init];
// This is the delegate. Note the
// captureOutput:didOutputVideoFrame...-method of this
// object. That is the method which will be called when
// a photo has been taken.
[output setDelegate:self];
// Add the output-object for the session
success = [session addOutput:output error:&error];
if ( ! success || error )
{
NSLog(@"Did succeed in connecting output to session: %d", success);
NSLog(@"Error: %@", [error localizedDescription]);
return nil;
}
// Because the input stream is video we will be getting
// many frames after each other, we take the first one
// we get and store it, and don't accept any more after
// we already have one
currentImage = nil;
}
return self;
}
// This is the method to use when you want to initialize a grab
- (void)grabPhoto
{
[session startRunning];
}
// The device-name will most likely be "Built-in iSight camera"
- (NSString*)deviceName
{
return [video localizedDisplayName];
}
// QTCapture delegate method, called when a frame has been loaded by the camera
- (void)captureOutput:(QTCaptureOutput *)captureOutput didOutputVideoFrame:(CVImageBufferRef)videoFrame withSampleBuffer:(QTSampleBuffer *)sampleBuffer fromConnection:(QTCaptureConnection *)connection
{
// If we already have an image we should use that instead
if ( currentImage ) return;
// Retain the videoFrame so it won't disappear
// don't forget to release!
CVBufferRetain(videoFrame);
// The Apple docs state that this action must be synchronized
// as this method will be run on another thread
@synchronized (self) {
currentImage = videoFrame;
}
// As stated above, this method will be called on another thread, so
// we perform the selector that handles the image on the main thread
[self performSelectorOnMainThread:@selector(saveImage) withObject:nil waitUntilDone:NO];
}
// Called from QTCapture delegate method
- (void)saveImage
{
// Stop the session so we don't record anything more
[session stopRunning];
// Convert the image to a NSImage with JPEG representation
// This is a bit tricky and involves taking the raw data
// and turning it into an NSImage containing the image
// as JPEG
NSCIImageRep *imageRep = [NSCIImageRep imageRepWithCIImage:[CIImage imageWithCVImageBuffer:currentImage]];
NSImage *image = [[NSImage alloc] initWithSize:[imageRep size]];
[image addRepresentation:imageRep];
NSData *bitmapData = [image TIFFRepresentation];
NSBitmapImageRep *bitmapRep = [NSBitmapImageRep imageRepWithData:bitmapData];
NSData *imageData = [bitmapRep representationUsingType:NSJPEGFileType properties:nil];
[image release];
image = [[NSImage alloc] initWithData:imageData];
// Call delegate callback
if ( [self.delegate respondsToSelector:@selector(photoGrabbed:)] )
[self.delegate photoGrabbed:image];
// Clean up after us
[image release];
CVBufferRelease(currentImage);
currentImage = nil;
}
- (void)dealloc
{
self.delegate = nil;
// Just close/turn off everything if it's running
if ( [session isRunning] )
[session stopRunning];
if ( [video isOpen] )
[video close];
// Remove input/output
[session removeInput:input];
[session removeOutput:output];
[input release];
[session release];
[output release];
[super dealloc];
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment