Skip to content

Instantly share code, notes, and snippets.

@JoppeSchwartz
Created July 19, 2014 19:32
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save JoppeSchwartz/72f3efd4ac0b5456dd8a to your computer and use it in GitHub Desktop.
Save JoppeSchwartz/72f3efd4ac0b5456dd8a to your computer and use it in GitHub Desktop.
Measure brightness with the iOS front camera.
/* Assumes your class has the following instance variables:
AVCaptureSession *_videoSession;
AVCaptureDevice *_cameraDevice;
AVCaptureStillImageOutput *_imageOutput;
AVCaptureConnection *_avConnection;
*/
-(void) setupStillVideoCaptureSession
{
// Set up the video capture session.
_videoSession = nil;
_cameraDevice = nil;
_imageOutput = nil;
_avConnection = nil;
NSError *err = nil;
_videoSession = [[AVCaptureSession alloc] init];
if ([_videoSession canSetSessionPreset:AVCaptureSessionPresetLow]) {
_videoSession.sessionPreset = AVCaptureSessionPresetLow;
} else {
NSLog(@"Could not set AVCaptureSession preset.");
return;
}
// Find the front camera device.
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == AVCaptureDevicePositionFront) {
_cameraDevice = device;
}
NSLog(@"Found device %@", device);
NSLog(@"Name: %@", [device localizedName]);
NSLog(@"Position: %@", ([device position] == AVCaptureDevicePositionBack ? @"back" : @"front"));
}
// Add the front camera device to the session.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:_cameraDevice error:&err];
if (err) {
NSLog(@"Error instantiating capture input device: %@", err.description);
return;
}
if ([_videoSession canAddInput:input]) {
[_videoSession addInput:input];
} else {
NSLog(@"Could not add input device to capture session.");
}
// Create and add the still image output device to the session.
_imageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([_videoSession canAddOutput:_imageOutput]){
[_videoSession addOutput:_imageOutput];
} else {
NSLog(@"Could not add output device to capture session.");
}
// Find the AV connection.
_avConnection = nil;
for (AVCaptureConnection *connection in _imageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
_avConnection = connection;
break;
}
if (_avConnection) break;
}
}
if (!_avConnection) {
NSLog(@"Could not find AVCaptureConnection for capture session.");
return;
}
[_videoSession startRunning];
}
-(void)sampleBrightness
{
if (_imageOutput) {
[_imageOutput captureStillImageAsynchronouslyFromConnection:_avConnection completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (error) {
NSLog(@"Error taking picture: %@", error.description);
return;
}
CFDictionaryRef exifAttachments = CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments) {
// Do something with the attachments.
bool containsBrightnessKey = CFDictionaryContainsKey(exifAttachments, kCGImagePropertyExifBrightnessValue);
if (!containsBrightnessKey) {
NSLog(@"EXIF dictionary doesn't contain brightness key");
return;
}
CFStringRef brightness = CFDictionaryGetValue(exifAttachments, kCGImagePropertyExifBrightnessValue);
NSLog(@"@Brightness: %@", (__bridge NSString*)brightness);
}
// If you want to do more with the sample, you may continue as follows:
//
// CMItemCount count = CMSampleBufferGetNumSamples(imageSampleBuffer);
// NSLog(@"Got %ld samples", count);
//
// CMBlockBufferRef buffer = CMSampleBufferGetDataBuffer(imageSampleBuffer);
// if (buffer) {
// size_t len = CMBlockBufferGetDataLength(buffer);
// char *data = NULL;
// CMBlockBufferGetDataPointer(buffer, 0, NULL, &len, &data);
// }
}];
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment