Create a gist now

Instantly share code, notes, and snippets.

What would you like to do?
iOS live video face masking demo. For more details see http://chriscavanagh.wordpress.com/2013/11/12/live-video-face-masking-on-ios/
//
// CJCAnonymousFacesFilter.h
// CJC.FaceMaskingDemo
//
// Created by Chris Cavanagh on 11/9/13.
// Copyright (c) 2013 Chris Cavanagh. All rights reserved.
//
#import <CoreImage/CoreImage.h>
@interface CJCAnonymousFacesFilter : CIFilter
{
CIImage *inputImage;
}
@property (nonatomic) CIImage *inputImage;
@property (nonatomic) NSArray *inputFacesMetadata;
@end
//
// CJCAnonymousFacesFilter.m
// CJC.FaceMaskingDemo
//
// Created by Chris Cavanagh on 11/9/13.
// Copyright (c) 2013 Chris Cavanagh. All rights reserved.
//
#import "CJCAnonymousFacesFilter.h"
#import <AVFoundation/AVFoundation.h>
@interface CJCAnonymousFacesFilter ()
@property (nonatomic) CIFilter *anonymize;
@property (nonatomic) CIFilter *blend;
@property (atomic) CIImage *maskImage;
@end
@implementation CJCAnonymousFacesFilter
@synthesize inputImage;
@synthesize inputFacesMetadata;
- (CIImage *)outputImage
{
// Create a pixellated version of the image
[self.anonymize setValue:inputImage forKey:kCIInputImageKey];
CIImage *maskImage = self.maskImage;
CIImage *outputImage = nil;
if ( maskImage )
{
// Blend the pixellated image, mask and original image
[self.blend setValue:_anonymize.outputImage forKey:kCIInputImageKey];
[_blend setValue:inputImage forKey:kCIInputBackgroundImageKey];
[_blend setValue:self.maskImage forKey:kCIInputMaskImageKey];
outputImage = _blend.outputImage;
[_blend setValue:nil forKey:kCIInputImageKey];
[_blend setValue:nil forKey:kCIInputBackgroundImageKey];
[_blend setValue:nil forKey:kCIInputMaskImageKey];
}
else
{
outputImage = _anonymize.outputImage;
}
[_anonymize setValue:nil forKey:kCIInputImageKey];
return outputImage;
}
- (CIFilter *)anonymize
{
if ( !_anonymize )
{
// _anonymize = [CIFilter filterWithName:@"CIGaussianBlur"];
// [_anonymize setValue:@( 40 ) forKey:kCIInputRadiusKey];
_anonymize = [CIFilter filterWithName:@"CIPixellate"];
[_anonymize setValue:@( 40 ) forKey:kCIInputScaleKey];
}
return _anonymize;
}
- (CIFilter *)blend
{
if ( !_blend )
{
_blend = [CIFilter filterWithName:@"CIBlendWithMask"];
}
return _blend;
}
- (void)setInputFacesMetadata:(NSArray *)theInputFacesMetadata
{
inputFacesMetadata = theInputFacesMetadata;
self.maskImage = theInputFacesMetadata ? [self createMaskImageFromMetadata:theInputFacesMetadata] : nil;
}
- (CIImage *) createMaskImageFromMetadata:(NSArray *)metadataObjects
{
CIImage *maskImage = nil;
for ( AVMetadataObject *object in metadataObjects )
{
if ( [[object type] isEqual:AVMetadataObjectTypeFace] )
{
AVMetadataFaceObject* face = (AVMetadataFaceObject*)object;
CGRect faceRectangle = [face bounds];
CGFloat height = inputImage.extent.size.height;
CGFloat width = inputImage.extent.size.width;
CGFloat centerY = ( height * ( 1 - ( faceRectangle.origin.x + faceRectangle.size.width / 2.0 ) ) );
CGFloat centerX = width * ( 1 - ( faceRectangle.origin.y + faceRectangle.size.height / 2.0 ) );
CGFloat radiusX = width * ( faceRectangle.size.width / 1.5 );
CGFloat radiusY = height * ( faceRectangle.size.height / 1.5 );
CIImage *circleImage = [self createCircleImageWithCenter:CGPointMake( centerX, centerY )
radius:CGVectorMake( radiusX, radiusY )
angle:0];
maskImage = [self compositeImage:circleImage ontoBaseImage:maskImage];
}
}
return maskImage;
}
- (CIImage *) createCircleImageWithCenter:(CGPoint)center
radius:(CGVector)radius
angle:(CGFloat)angle
{
CIFilter *radialGradient = [CIFilter filterWithName:@"CIRadialGradient" keysAndValues:
@"inputRadius0", @(radius.dx),
@"inputRadius1", @(radius.dx + 1.0f ),
@"inputColor0", [CIColor colorWithRed:0.0 green:1.0 blue:0.0 alpha:1.0],
@"inputColor1", [CIColor colorWithRed:0.0 green:0.0 blue:0.0 alpha:0.0],
kCIInputCenterKey, [CIVector vectorWithX:0 Y:0],
nil];
CGAffineTransform transform = CGAffineTransformMakeTranslation( center.x, center.y );
transform = CGAffineTransformRotate( transform, angle );
transform = CGAffineTransformScale( transform, 1.2, 1.6 );
CIFilter *maskScale = [CIFilter filterWithName:@"CIAffineTransform" keysAndValues:
kCIInputImageKey, radialGradient.outputImage,
kCIInputTransformKey, [NSValue valueWithBytes:&transform objCType:@encode( CGAffineTransform )],
nil];
return [maskScale valueForKey:kCIOutputImageKey];
}
- (CIImage *) compositeImage:(CIImage *)ciImage
ontoBaseImage:(CIImage *)baseImage
{
if ( nil == baseImage ) return ciImage;
CIFilter *filter = [CIFilter filterWithName:@"CISourceOverCompositing" keysAndValues:
kCIInputImageKey, ciImage,
kCIInputBackgroundImageKey, baseImage,
nil];
return [filter valueForKey:kCIOutputImageKey];
}
@end
//
// CJCViewController.h
// CJC.FaceMaskingDemo
//
// Created by Chris Cavanagh on 11/9/13.
// Copyright (c) 2013 Chris Cavanagh. All rights reserved.
//
#import <UIKit/UIKit.h>
#import <GLKit/GLKit.h>
#import <AVFoundation/AVFoundation.h>
@interface CJCViewController : GLKViewController <AVCaptureMetadataOutputObjectsDelegate, AVCaptureVideoDataOutputSampleBufferDelegate>
@end
//
// CJCViewController.m
// CJC.FaceMaskingDemo
//
// Created by Chris Cavanagh on 11/9/13.
// Copyright (c) 2013 Chris Cavanagh. All rights reserved.
//
#import "CJCViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "CJCAnonymousFacesFilter.h"
@interface CJCViewController ()
{
dispatch_queue_t _serialQueue;
}
@property (strong, nonatomic) EAGLContext *eaglContext;
@property (strong, nonatomic) CIContext *ciContext;
@property (strong, nonatomic) CJCAnonymousFacesFilter *filter;
@property (strong, nonatomic) AVCaptureSession *captureSession;
@property (strong, nonatomic) NSArray *facesMetadata;
@end
@implementation CJCViewController
- (void)viewDidLoad
{
[super viewDidLoad];
self.eaglContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if ( !_eaglContext )
{
NSLog(@"Failed to create ES context");
}
self.ciContext = [CIContext contextWithEAGLContext:_eaglContext];
GLKView *view = (GLKView *)self.view;
view.context = _eaglContext;
view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
_serialQueue = dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_DEFAULT, 0 );
self.filter = [CJCAnonymousFacesFilter new];
}
- (void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
[self setupAVCapture];
}
- (void)viewWillDisappear:(BOOL)animated
{
[self tearDownAVCapture];
[super viewWillDisappear:animated];
}
- (void)dealloc
{
[self tearDownAVCapture];
if ([EAGLContext currentContext] == _eaglContext)
{
[EAGLContext setCurrentContext:nil];
}
}
- (void)setupAVCapture
{
AVCaptureSession *captureSession = [AVCaptureSession new];
[captureSession beginConfiguration];
NSError *error;
// Input device
AVCaptureDevice *captureDevice = [self frontOrDefaultCamera];
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if ( [captureSession canAddInput:deviceInput] )
{
[captureSession addInput:deviceInput];
}
if ( [captureSession canSetSessionPreset:AVCaptureSessionPresetHigh] )
{
captureSession.sessionPreset = AVCaptureSessionPresetHigh;
}
// Video data output
AVCaptureVideoDataOutput *videoDataOutput = [self createVideoDataOutput];
if ( [captureSession canAddOutput:videoDataOutput] )
{
[captureSession addOutput:videoDataOutput];
AVCaptureConnection *connection = videoDataOutput.connections[ 0 ];
connection.videoOrientation = AVCaptureVideoOrientationPortrait;
}
// Metadata output
AVCaptureMetadataOutput *metadataOutput = [self createMetadataOutput];
if ( [captureSession canAddOutput:metadataOutput] )
{
[captureSession addOutput:metadataOutput];
metadataOutput.metadataObjectTypes = [self metadataOutput:metadataOutput allowedObjectTypes:self.faceMetadataObjectTypes];
}
// Done
[captureSession commitConfiguration];
dispatch_async( _serialQueue,
^{
[captureSession startRunning];
});
_captureSession = captureSession;
// [self updateVideoOrientation:self.interfaceOrientation];
}
- (void)tearDownAVCapture
{
[_captureSession stopRunning];
_captureSession = nil;
}
- (AVCaptureMetadataOutput *)createMetadataOutput
{
AVCaptureMetadataOutput *metadataOutput = [AVCaptureMetadataOutput new];
[metadataOutput setMetadataObjectsDelegate:self queue:_serialQueue];
return metadataOutput;
}
- (NSArray *)metadataOutput:(AVCaptureMetadataOutput *)metadataOutput
allowedObjectTypes:(NSArray *)objectTypes
{
NSSet *available = [NSSet setWithArray:metadataOutput.availableMetadataObjectTypes];
[available intersectsSet:[NSSet setWithArray:objectTypes]];
return [available allObjects];
}
- (NSArray *)barcodeMetadataObjectTypes
{
return @
[
AVMetadataObjectTypeUPCECode,
AVMetadataObjectTypeCode39Code,
AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code,
AVMetadataObjectTypeEAN8Code,
AVMetadataObjectTypeCode93Code,
AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code,
AVMetadataObjectTypeQRCode,
AVMetadataObjectTypeAztecCode
];
}
- (NSArray *)faceMetadataObjectTypes
{
return @
[
AVMetadataObjectTypeFace
];
}
- (AVCaptureVideoDataOutput *)createVideoDataOutput
{
AVCaptureVideoDataOutput *videoDataOutput = [AVCaptureVideoDataOutput new];
[videoDataOutput setSampleBufferDelegate:self queue:_serialQueue];
return videoDataOutput;
}
- (void)willAnimateRotationToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration
{
// [self updateVideoOrientation:toInterfaceOrientation];
}
- (void)updateVideoOrientation:(UIInterfaceOrientation)orientation
{
AVCaptureConnection *connection = ( (AVCaptureOutput *)_captureSession.outputs[ 0 ] ).connections[ 0 ];
if ( [connection isVideoOrientationSupported] )
{
connection.videoOrientation = [self videoOrientation:orientation];
}
}
- (AVCaptureVideoOrientation)videoOrientation:(UIInterfaceOrientation)orientation
{
switch ( orientation )
{
case UIDeviceOrientationPortrait: return AVCaptureVideoOrientationPortrait;
case UIDeviceOrientationPortraitUpsideDown: return AVCaptureVideoOrientationPortraitUpsideDown;
case UIDeviceOrientationLandscapeLeft: return AVCaptureVideoOrientationLandscapeRight;
case UIDeviceOrientationLandscapeRight: return AVCaptureVideoOrientationLandscapeLeft;
default: return AVCaptureVideoOrientationPortrait;
}
}
- (AVCaptureDevice *)frontOrDefaultCamera
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for ( AVCaptureDevice *device in devices )
{
if ( device.position == AVCaptureDevicePositionFront )
{
return device;
}
}
return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
if ( [self isViewLoaded] && [[self view] window] == nil )
{
self.view = nil;
[self tearDownAVCapture];
if ( [EAGLContext currentContext] == _eaglContext )
{
[EAGLContext setCurrentContext:nil];
}
self.eaglContext = nil;
}
// Dispose of any resources that can be recreated.
}
#pragma mark - GLKView and GLKViewController delegate methods
- (void)update
{
}
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect
{
}
#pragma mark Metadata capture
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
_facesMetadata = metadataObjects;
}
#pragma mark Video data capture
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer( sampleBuffer );
if ( pixelBuffer )
{
CFDictionaryRef attachments = CMCopyDictionaryOfAttachments( kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate );
CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer options:(__bridge NSDictionary *)attachments];
if ( attachments ) CFRelease( attachments );
CGRect extent = ciImage.extent;
_filter.inputImage = ciImage;
_filter.inputFacesMetadata = _facesMetadata;
CIImage *output = _filter.outputImage;
_filter.inputImage = nil;
_filter.inputFacesMetadata = nil;
dispatch_async( dispatch_get_main_queue(),
^{
UIView *view = self.view;
CGRect bounds = view.bounds;
CGFloat scale = view.contentScaleFactor;
CGFloat extentFitWidth = extent.size.height / ( bounds.size.height / bounds.size.width );
CGRect extentFit = CGRectMake( ( extent.size.width - extentFitWidth ) / 2, 0, extentFitWidth, extent.size.height );
CGRect scaledBounds = CGRectMake( bounds.origin.x * scale, bounds.origin.y * scale, bounds.size.width * scale, bounds.size.height * scale );
[_ciContext drawImage:output inRect:scaledBounds fromRect:extentFit];
// [_ciContext render:output toCVPixelBuffer:pixelBuffer];
[_eaglContext presentRenderbuffer:GL_RENDERBUFFER];
[(GLKView *)self.view display];
});
}
}
- (BOOL)shouldAutorotate
{
return NO;
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment