Skip to content

Instantly share code, notes, and snippets.

@rock88
Created March 21, 2014 09:28
Show Gist options
  • Save rock88/9682633 to your computer and use it in GitHub Desktop.
Save rock88/9682633 to your computer and use it in GitHub Desktop.
@interface Camera : NSObject
@property (nonatomic, strong) AVCaptureStillImageOutput* stillImageOutput;
@property (nonatomic,strong) AVCaptureSession *session;
- (AVCaptureDevice *)frontFacingCameraIfAvailable;
- (void)setupCaptureSession;
- (void)captureWithBlock:(void(^)(UIImage* block))block;
@end
@implementation Camera
- (AVCaptureDevice *)frontFacingCameraIfAvailable
{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices){
if (device.position == AVCaptureDevicePositionFront){
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if (!captureDevice){
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
- (void)setupCaptureSession
{
self.session = [[AVCaptureSession alloc] init];
self.session.sessionPreset = AVCaptureSessionPresetMedium;
NSError *error = nil;
AVCaptureDevice *device = [self frontFacingCameraIfAvailable];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(@"ERROR: trying to open camera: %@", error);
}
[self.session addInput:input];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[self.session addOutput:self.stillImageOutput];
[self.session startRunning];
}
- (void)captureWithBlock:(void(^)(UIImage* image))block
{
AVCaptureConnection* videoConnection = nil;
for (AVCaptureConnection* connection in self.stillImageOutput.connections)
{
for (AVCaptureInputPort* port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo])
{
videoConnection = connection;
break;
}
}
if (videoConnection)
{
break;
}
}
NSLog(@"videoConnection = %@",videoConnection);
NSLog(@"about to request a capture from: %@", self.stillImageOutput);
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
NSData* imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage* image = [[UIImage alloc] initWithData:imageData];
//NSLog(@"image captured: %@", [imageData bytes]);
block(image);
}];
}
@end
int main(int argc, char * argv[])
{
Camera* camera = [[Camera alloc] init];
[camera setupCaptureSession];
__block BOOL done = NO;
[camera captureWithBlock:^(UIImage *image)
{
done = YES;
}];
while (!done)
{
[[NSRunLoop mainRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.1]];
}
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment