Skip to content

Instantly share code, notes, and snippets.

@bgayman
Created November 3, 2017 21:43
Show Gist options
  • Save bgayman/f98c3738ef85317def30f06b77696479 to your computer and use it in GitHub Desktop.
Save bgayman/f98c3738ef85317def30f06b77696479 to your computer and use it in GitHub Desktop.
//
// ObjcViewController.m
// FilterCam
//
// Created by B Gay on 11/3/17.
// Copyright © 2017 B Gay. All rights reserved.
//
#import "ObjcViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <CoreImage/CoreImage.h>
#import <Vision/Vision.h>
#import "VisionFace.h"
@interface ObjcViewController () <AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureDevice *backCamera;
@property (nonatomic, strong) AVCaptureDevice *frontCamera;
@property (nonatomic, strong) AVCaptureDevice *currentCamera;
@property (nonatomic, strong) AVCapturePhotoOutput *photoOutput;
@property (nonatomic, assign) AVCaptureVideoOrientation orientation;
@property (nonatomic, strong) CIContext *context;
@property (weak, nonatomic) IBOutlet UIImageView *filteredImage;
@end
@implementation ObjcViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.context = [[CIContext alloc] init];
[self setupDevice];
[self setupInputOutput];
}
- (void)viewDidLayoutSubviews {
[super viewDidLayoutSubviews];
self.orientation = (AVCaptureVideoOrientation)[UIApplication sharedApplication].statusBarOrientation;
}
- (void)setupDevice {
AVCaptureDeviceDiscoverySession *deviceDiscoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionUnspecified];
NSArray *devices = deviceDiscoverySession.devices;
for (AVCaptureDevice *device in devices) {
if (device.position == AVCaptureDevicePositionBack) {
self.backCamera = device;
}
else if (device.position == AVCaptureDevicePositionFront) {
self.frontCamera = device;
}
}
self.currentCamera = self.frontCamera;
}
- (void)setupInputOutput {
self.captureSession = [[AVCaptureSession alloc] init];
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:self.currentCamera error:nil];
self.captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
if ([self.captureSession canAddInput:captureDeviceInput]) {
[self.captureSession addInput:captureDeviceInput];
}
AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[videoOutput setSampleBufferDelegate:self queue:dispatch_queue_create("CameraCaptureSampleBufferDelegateQueue", NULL)];
if ([self.captureSession canAddOutput:videoOutput]) {
[self.captureSession addOutput:videoOutput];
}
[self.captureSession startRunning];
}
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
connection.videoOrientation = self.orientation;
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage *cameraImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer];
NSMutableDictionary<VNImageOption, id> *requestOptions = [NSMutableDictionary dictionary];
CFTypeRef cameraIntrinsicData = CMGetAttachment(sampleBuffer,
kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix,
nil);
requestOptions[VNImageOptionCameraIntrinsics] = (__bridge id)(cameraIntrinsicData);
CGImageRef cgImage = [self.context createCGImage:cameraImage fromRect:cameraImage.extent];
VNDetectFaceLandmarksRequest *landmarksRequest =
[[VNDetectFaceLandmarksRequest alloc] initWithCompletionHandler:^(VNRequest *request, NSError *error) {
if (error != nil) {
NSLog(@"Error while detecting face landmarks: %@", error);
} else {
dispatch_async(dispatch_get_main_queue(), ^{
self.filteredImage.layer.sublayers = @[];
NSArray *faces = [self handRequest:request inImage:cameraImage];
if (faces.count) {
[self drawFaces:faces];
}
});
}
}];
dispatch_async(dispatch_get_main_queue(), ^{
self.filteredImage.image = [UIImage imageWithCGImage:cgImage];
});
VNImageRequestHandler *requestHandler = [[VNImageRequestHandler alloc] initWithCVPixelBuffer:pixelBuffer options:requestOptions];
NSError *error;
[requestHandler performRequests:@[landmarksRequest] error:&error];
}
- (NSArray <VisionFace *> *)handRequest:(VNRequest *)request inImage:(CIImage *)image {
NSMutableArray *faces = [NSMutableArray new];
for (VNFaceObservation *observation in request.results) {
CGRect faceRect = observation.boundingBox;
CGRect convertedFaceRect = CGRectMake(image.extent.size.width * faceRect.origin.x, image.extent.size.height * (1 - faceRect.origin.y - faceRect.size.height), image.extent.size.width * faceRect.size.width, image.extent.size.height * faceRect.size.height);
VisionFace *visionFace = [VisionFace new];
visionFace.rect = convertedFaceRect;
visionFace.landmarks = observation.landmarks;
[faces addObject:visionFace];
}
return faces;
}
- (void)drawFaces:(NSArray <VisionFace *>*)faces {
for (VisionFace *face in faces) {
CAShapeLayer *boundingBoxLayer = [[CAShapeLayer alloc] init];
boundingBoxLayer.path = [UIBezierPath bezierPathWithRect:face.rect].CGPath;
boundingBoxLayer.strokeColor = [UIColor redColor].CGColor;
boundingBoxLayer.fillColor = [UIColor clearColor].CGColor;
[self.filteredImage.layer addSublayer:boundingBoxLayer];
CAShapeLayer *shapeLayer = [self layerFromFace:face inRegion:face.landmarks];
[self.filteredImage.layer addSublayer:shapeLayer];
}
}
- (CAShapeLayer *) layerFromFace:(VisionFace *)face inRegion:(VNFaceLandmarks2D *)region {
CAShapeLayer *layer = [[CAShapeLayer alloc] init];
layer.strokeColor = [UIColor redColor].CGColor;
layer.fillColor = [UIColor clearColor].CGColor;
layer.lineWidth = 2.0;
UIBezierPath *path = [[UIBezierPath alloc] init];
for (NSInteger i = 0; i < region.allPoints.pointCount; i++) {
VNFaceLandmarkRegion2D * subregion = region.allPoints;
[path moveToPoint:[self absolutePointInFace:face normalizedPoint:subregion.normalizedPoints[0]]];
for (NSInteger j = 0; j< subregion.pointCount; j++) {
CGPoint normalPoint = [self absolutePointInFace:face normalizedPoint:subregion.normalizedPoints[j]];
[path addLineToPoint:normalPoint];
}
}
layer.path = path.CGPath;
return layer;
}
- (CGPoint)absolutePointInFace:(VisionFace *)face normalizedPoint:(CGPoint)point {
return CGPointMake(face.rect.origin.x + point.x * face.rect.size.width, face.rect.origin.y + (1 - point.y) * face.rect.size.height);
}
@end
@interface VisionFace : NSObject
@property (nonatomic, assign) CGRect rect;
@property (nonatomic, assign) VNFaceLandmarks2D *landmarks;
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment