Skip to content

Instantly share code, notes, and snippets.

@inobo55
Last active August 29, 2015 14:09
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save inobo55/ad6cbbacaaad9c552258 to your computer and use it in GitHub Desktop.
Save inobo55/ad6cbbacaaad9c552258 to your computer and use it in GitHub Desktop.
Rekognitionで顔認識 [ Objective-C ]
//
// AVCaptureTestViewController.m
//
//
// Created by inobo52 on 2014/11/15.
// Copyright (c) 2014年 private inobo. All rights reserved.
//
// この記事に助けられた
// http://stackoverflow.com/questions/24842108/capture-video-frames-sample-code-for-ios-7-0-and-up
#import "AVCaptureTestViewController.h"
#import "Base64.h"
#import "ReKognitionSDK.h"
AVCaptureSession *session;
@interface AVCaptureTestViewController ()
@end
@implementation AVCaptureTestViewController
- (void)viewDidLoad
{
[super viewDidLoad];
[self setupCaptureSession];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
}
#pragma mark -
#pragma mark ReKognize
- (IBAction)startRekognize:(id)sender {
NSLog(@"%s",__func__);
UIImage *photo = self.imageView.image;
//顔検索をする.*nameSpaceに日本語はダメ*
NSString *searchResult = [ReKognitionSDK RKFaceSearch:photo scale:1.0
nameSpace:@"demo_project" userID:@"demo_user"];
NSDictionary *result = [self jsonParse:searchResult];
NSLog(@"Rekognize:%@",[result description]);
NSMutableArray *faces = [result objectForKey:@"face_detection"];
for (NSDictionary *face in faces) {
NSMutableArray *matches = [face objectForKey:@"matches"];
for (NSDictionary *match in matches) {
NSString *tag = [match objectForKey:@"tag"];
self.nameLabel.text = tag;
break;
}
}
}
-(NSDictionary*)jsonParse:(NSString*)resultString{
NSData *data = [resultString dataUsingEncoding:NSUTF8StringEncoding];
if(NSClassFromString(@"NSJSONSerialization")){
NSError *error = nil;
id object = [NSJSONSerialization JSONObjectWithData:data
options:0
error:&error];
if([object isKindOfClass:[NSDictionary class]]){
NSDictionary *result = object;
return result;
}
}
return nil;
}
#pragma mark -
#pragma mark capture
- (void)setupCaptureSession
{
NSError *error = nil;
session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
error:&error];
[session addInput:input];
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
[session addOutput:output];
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
output.videoSettings =
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
NSError *error2;
[device lockForConfiguration:&error2];
if (error2 == nil) {
if (device.activeFormat.videoSupportedFrameRateRanges){
[device setActiveVideoMinFrameDuration:CMTimeMake(1, 30)];
[device setActiveVideoMaxFrameDuration:CMTimeMake(1, 30)];
}
}
[device unlockForConfiguration];
// カメラの向きなどを設定する
AVCaptureConnection *videoConnection = NULL;
[session beginConfiguration];
for ( AVCaptureConnection *connection in [output connections] )
for ( AVCaptureInputPort *port in [connection inputPorts] )
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
videoConnection = connection;
if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
[videoConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
[session commitConfiguration];
[session startRunning];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSLog(@"captureOutput: didOutputSampleBufferFromConnection");
UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
dispatch_async(dispatch_get_main_queue(), ^{
[self.imageView setImage:image];
[self.view setNeedsDisplay];}
);
}
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
UIImage *image = [UIImage imageWithCGImage:quartzImage];
CGImageRelease(quartzImage);
return (image);
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment