Skip to content

Instantly share code, notes, and snippets.

@evandcoleman
Created November 8, 2017 20:46
Show Gist options
  • Save evandcoleman/411b2fadd09fd5fc0aa54d7f0e83cc40 to your computer and use it in GitHub Desktop.
Save evandcoleman/411b2fadd09fd5fc0aa54d7f0e83cc40 to your computer and use it in GitHub Desktop.
Facial Detection with OpenCV
//
// Copyright © 2016 Evan Coleman. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
@class OpenCVFaceFeature;
#pragma mark -
@interface OpenCVClassifier: NSObject
- (nonnull NSArray<OpenCVFaceFeature *> *)faceFeaturesInImage:(nonnull UIImage *)image detectEyes:(BOOL)detectEyes;
- (nonnull NSArray<UIImage *> *)detectFacesInImage:(nonnull UIImage *)image nearPoint:(nullable NSValue *)point;
- (nonnull NSArray<UIImage *> *)detectFacesInImage:(nonnull UIImage *)image;
@end
//
// Copyright © 2016 Evan Coleman. All rights reserved.
//
#import "OpenCVClassifier.h"
#import "UIImage+OpenCV.h"
#import "OpenCVFaceFeature.h"
#ifdef __cplusplus
#import <opencv2/opencv.hpp>
#endif
using namespace cv;
using namespace std;
@interface OpenCVClassifier ()
@property (nonatomic) CascadeClassifier faceDetector;
@property (nonatomic) CascadeClassifier eyeDetector;
@end
#pragma mark -
@implementation OpenCVClassifier
- (instancetype)init {
self = [super init];
if (self != nil) {
NSURL *bundleURL = [[NSBundle mainBundle] URLForResource:@"OpenCV" withExtension:@"bundle"];
NSBundle *bundle = [NSBundle bundleWithURL:bundleURL];
NSString *faceCascadePath = [bundle pathForResource:@"haarcascade_frontalface_default" ofType:@"xml"];
NSString *eyeCascadePath = [bundle pathForResource:@"haarcascade_eye" ofType:@"xml"];
_faceDetector = CascadeClassifier([faceCascadePath cStringUsingEncoding:NSUTF8StringEncoding]);
_eyeDetector = CascadeClassifier([eyeCascadePath cStringUsingEncoding:NSUTF8StringEncoding]);
}
return self;
}
- (nonnull NSArray<OpenCVFaceFeature *> *)faceFeaturesInImage:(nonnull UIImage *)image detectEyes:(BOOL)detectEyes {
cv::Mat img = [image cvMatRepresentationGray];
vector<cv::Rect> faces;
self.faceDetector.detectMultiScale(img, faces, 1.2, 4, 0, cv::Size(30, 30));
NSMutableArray<OpenCVFaceFeature *> *features = [NSMutableArray array];
for (vector<cv::Rect>::const_iterator r = faces.begin(); r != faces.end(); r++) {
CGRect faceRect = CGRectMake(r->x, r->y, r->width, r->height);
NSValue *faceRectValue = [NSValue valueWithCGRect:faceRect];
NSNumber *faceAngle = nil;
NSValue *leftEyePoint = nil;
NSValue *rightEyePoint = nil;
if (detectEyes) {
vector<cv::Rect> eyes;
cv::Mat croppedImage = img(*r).clone();
self.eyeDetector.detectMultiScale(croppedImage, eyes);
if (eyes.size() == 2) {
cv::Rect eyeRight = eyes[0];
cv::Rect eyeLeft = eyes[1];
double eyeXdist = eyeRight.x - eyeLeft.x;
double eyeYdist = eyeRight.y - eyeLeft.y;
double angle = atan(eyeYdist / eyeXdist) * (180 / CV_PI);
faceAngle = [NSNumber numberWithDouble:angle];
leftEyePoint = [NSValue valueWithCGPoint:CGPointMake(eyeLeft.x, eyeLeft.y)];
rightEyePoint = [NSValue valueWithCGPoint:CGPointMake(eyeRight.x, eyeRight.y)];
}
}
OpenCVFaceFeature *feature = [[OpenCVFaceFeature alloc] initWithBounds:faceRectValue faceAngle:faceAngle leftEyePoint:leftEyePoint rightEyePoint:rightEyePoint mouthPoint:nil];
[features addObject:feature];
}
return [NSArray arrayWithArray:features];
}
- (nonnull NSArray<UIImage *> *)detectFacesInImage:(nonnull UIImage *)image nearPoint:(nullable NSValue *)point {
NSArray<OpenCVFaceFeature *> *features = [self faceFeaturesInImage:image detectEyes:YES];
NSMutableArray *facesArray = [NSMutableArray array];
for (OpenCVFaceFeature *feature in features) {
CGRect faceRect = [feature.bounds CGRectValue];
// Outset the frame so we get the entire head
CGFloat cropOutset = CGRectGetWidth(faceRect) * 0.5;
CGRect outsetRect = CGRectInset(faceRect, -cropOutset, -cropOutset);
outsetRect.origin.y += cropOutset / 4;
while ((CGRectGetWidth(outsetRect) > image.size.width)
|| CGRectGetMinX(outsetRect) < 0
|| (CGRectGetMaxY(outsetRect) > image.size.height)
|| CGRectGetMinY(outsetRect) < 0) {
cropOutset /= 2;
outsetRect = CGRectInset(faceRect, -cropOutset, -cropOutset);
}
faceRect = CGRectInset(faceRect, -cropOutset, -cropOutset);
if (point != nil) {
if (!CGRectContainsPoint(faceRect, [point CGPointValue])) { continue; }
}
cv::Mat colorImage = [image cvMatRepresentationColor];
cv::Rect roi(CGRectGetMinX(faceRect), CGRectGetMinY(faceRect), CGRectGetWidth(faceRect), CGRectGetHeight(faceRect));
cv::Mat croppedImage = colorImage(roi).clone();
cv::Mat finalImage;
if (feature.faceAngle != nil) {
cv::Mat rotatedImage;
cv::Point2f src_center(roi.x + (croppedImage.cols / 2.0F), roi.y + (croppedImage.rows / 2.0F));
cv::Mat rot_mat = cv::getRotationMatrix2D(src_center, [feature.faceAngle doubleValue], 1.0);
cv::warpAffine(colorImage, rotatedImage, rot_mat, colorImage.size());
finalImage = rotatedImage(roi).clone();
} else {
finalImage = croppedImage;
}
[facesArray addObject:[UIImage imageFromCVMat:finalImage]];
}
return [NSArray arrayWithArray:facesArray];
}
- (nonnull NSArray<UIImage *> *)detectFacesInImage:(nonnull UIImage *)image {
return [self detectFacesInImage:image nearPoint:nil];
}
@end
//
// Copyright © 2016 Evan Coleman. All rights reserved.
//
#import <UIKit/UIKit.h>
namespace cv {
class Mat;
};
#pragma mark -
@interface UIImage (OpenCV)
+ (UIImage *)imageFromCVMat:(cv::Mat)mat;
- (cv::Mat)cvMatRepresentationColor;
- (cv::Mat)cvMatRepresentationGray;
@end
//
// Copyright © 2016 Evan Coleman. All rights reserved.
//
#import "UIImage+OpenCV.h"
#import <opencv2/opencv.hpp>
using namespace cv;
#pragma mark -
@implementation UIImage (OpenCV)
+ (UIImage *)imageFromCVMat:(cv::Mat)cvMat {
NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
CGColorSpaceRef colorSpace;
if (cvMat.elemSize() == 1) {
colorSpace = CGColorSpaceCreateDeviceGray();
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
}
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
// Creating CGImage from cv::Mat
CGImageRef imageRef = CGImageCreate(cvMat.cols, //width
cvMat.rows, //height
8, //bits per component
8 * cvMat.elemSize(), //bits per pixel
cvMat.step[0], //bytesPerRow
colorSpace, //colorspace
kCGImageAlphaNone|kCGBitmapByteOrderDefault,// bitmap info
provider, //CGDataProviderRef
NULL, //decode
false, //should interpolate
kCGRenderingIntentDefault //intent
);
// Getting UIImage from CGImage
UIImage *finalImage = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
return finalImage;
}
- (cv::Mat)cvMatRepresentationColor {
CGColorSpaceRef colorSpace = CGImageGetColorSpace(self.CGImage);
CGFloat cols = self.size.width;
CGFloat rows = self.size.height;
cv::Mat color(rows, cols, CV_8UC4); // 8 bits per component, 4 channels (color channels + alpha)
CGContextRef contextRef = CGBitmapContextCreate(color.data, // Pointer to data
cols, // Width of bitmap
rows, // Height of bitmap
8, // Bits per component
color.step[0], // Bytes per row
colorSpace, // Colorspace
kCGImageAlphaNoneSkipLast |
kCGBitmapByteOrderDefault); // Bitmap info flags
CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), self.CGImage);
CGContextRelease(contextRef);
return color;
}
- (cv::Mat)cvMatRepresentationGray {
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();
CGFloat cols = self.size.width;
CGFloat rows = self.size.height;
cv::Mat cvMat(rows, cols, CV_8UC1); // 8 bits per component, 1 channels
CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, // Pointer to data
cols, // Width of bitmap
rows, // Height of bitmap
8, // Bits per component
cvMat.step[0], // Bytes per row
colorSpace, // Colorspace
kCGImageAlphaNone |
kCGBitmapByteOrderDefault); // Bitmap info flags
CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), self.CGImage);
CGContextRelease(contextRef);
CGColorSpaceRelease(colorSpace);
return cvMat;
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment