Skip to content

Instantly share code, notes, and snippets.

@pvinis
Last active April 7, 2016 16:53
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save pvinis/1501117 to your computer and use it in GitHub Desktop.
Save pvinis/1501117 to your computer and use it in GitHub Desktop.
video mustachify
framework 'Cocoa'
framework 'avfoundation'
class FaceDetectionDelegate
attr_accessor :window
def applicationDidFinishLaunching(aNotification)
width = 1280
height = 720
session = AVCaptureSession.alloc.init
session.sessionPreset = AVCaptureSessionPreset1280x720
@cg_context = CGBitmapContextCreate(nil, width, height, 8, 4*width, CGColorSpaceCreateDeviceRGB(), KCGImageAlphaNoneSkipFirst)
@item_layer = {}
detectorOptions = {CIDetectorAccuracy: CIDetectorAccuracyLow}
@detector = CIDetector.detectorOfType "CIDetectorTypeFace", context:nil, options:detectorOptions
t_url = NSURL.URLWithString("http://dl.dropbox.com/u/349788/mustache.png")
t_source = CGImageSourceCreateWithURL t_url, nil
@tache = CGImageSourceCreateImageAtIndex t_source, 0, nil
g_url = NSURL.URLWithString("http://dl.dropbox.com/u/349788/glasses.png")
g_source = CGImageSourceCreateWithURL g_url, nil
@glasses = CGImageSourceCreateImageAtIndex g_source, 0, nil
h_url = NSURL.URLWithString("http://dl.dropbox.com/u/349788/hat.png")
h_source = CGImageSourceCreateWithURL h_url, nil
@hat = CGImageSourceCreateImageAtIndex h_source, 0, nil
@show_face = false
window.delegate = self
device = AVCaptureDevice.defaultDeviceWithMediaType AVMediaTypeVideo
input = AVCaptureDeviceInput.deviceInputWithDevice device, error:nil
output = AVCaptureVideoDataOutput.alloc.init
output.alwaysDiscardsLateVideoFrames = true
queue = Dispatch::Queue.new('cameraQueue')
output.setSampleBufferDelegate self, queue:queue.dispatch_object
output.setVideoSettings KCVPixelBufferPixelFormatTypeKey => KCVPixelFormatType_32BGRA, KCVPixelBufferWidthKey => width, KCVPixelBufferHeightKey => height
session.addInput input
session.addOutput output
@preview_layer = AVCaptureVideoPreviewLayer.layerWithSession session
@preview_layer.frame = [0.0, 0.0, width, height]
# @preview_layer.sublayerTransform = CATransform3DMakeScale(-1.0, 1.0, 0.0)
@preview_layer.videoGravity = AVLayerVideoGravityResizeAspectFill
mirror = CGAffineTransformMakeScale -1, 1
@preview_layer.affineTransform = mirror
session.startRunning
window.setFrame [0.0, 0.0, width, height], display:true, animate:true
window.center
window.contentView.wantsLayer = true
window.contentView.layer.addSublayer @preview_layer
window.orderFrontRegardless
end
def addLeftEye
leftEye = CALayer.layer
leftEye.backgroundColor = CGColorCreateGenericRGB(1,0,0,0.5)
@item_layer[:leftEye] = leftEye
@preview_layer.addSublayer leftEye
leftEye
end
def addRightEye
rightEye = CALayer.layer
rightEye.backgroundColor = CGColorCreateGenericRGB(0,0,1,0.5)
@item_layer[:rightEye] = rightEye
@preview_layer.addSublayer rightEye
rightEye
end
def addMouth
mouth = CALayer.layer
mouth.backgroundColor = CGColorCreateGenericRGB(0,1,0,0.5)
@item_layer[:mouth] = mouth
@preview_layer.addSublayer mouth
mouth
end
def add_tache_layer
tache = CALayer.layer
tache.contents = @tache
tache.contentsGravity = KCAGravityResize
@item_layer[:mustache] = tache
@preview_layer.addSublayer tache
tache
end
def add_glasses_layer
glasses = CALayer.layer
glasses.contents = @glasses
glasses.contentsGravity = KCAGravityResize
@item_layer[:glasses] = glasses
@preview_layer.addSublayer glasses
glasses
end
def add_hat_layer
hat = CALayer.layer
hat.contents = @hat
hat.contentsGravity = KCAGravityResize
@item_layer[:hat] = hat
@preview_layer.addSublayer hat
hat
end
def captureOutput(captureOutput, didOutputSampleBuffer:sampleBuffer, fromConnection:connection)
imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
image = CIImage.imageWithCVImageBuffer(imageBuffer)
features = @detector.featuresInImage(image)
Dispatch::Queue.main.sync do
## @item_layer[features.size..@item_layer.size].each {|t| t.opacity = 0} rescue nil # hide any unattached taches
features.each.with_index do |feature, i|
if @show_face
if feature.hasLeftEyePosition
leftEye = @item_layer[:leftEye] || addLeftEye
leftEye.frame = [feature.leftEyePosition.x-5, feature.leftEyePosition.y-5, 10, 10]
end
if feature.hasRightEyePosition
rightEye = @item_layer[:rightEye] || addRightEye
rightEye.frame = [feature.rightEyePosition.x-5, feature.rightEyePosition.y-5, 10, 10]
end
if feature.hasMouthPosition
mouth = @item_layer[:mouth] || addMouth
mouth.frame = [feature.mouthPosition.x-20, feature.mouthPosition.y-10, 40, 20]
end
end
if feature.hasMouthPosition
w = feature.bounds.size.width
h = feature.bounds.size.height/5
tache = @item_layer[:mustache] || add_tache_layer
tache.opacity = 0.9
tache.bounds = [0, 0, w, h]
tache.position = [(feature.mouthPosition.x + (feature.leftEyePosition.x + feature.rightEyePosition.x)/2)/2, feature.mouthPosition.y+ h/2]
rotation = Math.atan2(feature.rightEyePosition.y-feature.leftEyePosition.y,feature.rightEyePosition.x-feature.leftEyePosition.x)
tache.setValue rotation, forKeyPath: "transform.rotation"
end
if feature.hasLeftEyePosition && feature.hasRightEyePosition
w = feature.bounds.size.width
h = feature.bounds.size.height/2.5
glasses = @item_layer[:glasses] || add_glasses_layer
glasses.opacity = 0.9
glasses.bounds = [0, 0, w, h]
glasses.position = [(feature.leftEyePosition.x+feature.rightEyePosition.x)/2, (feature.rightEyePosition.y + feature.leftEyePosition.y)/2]
rotation = Math.atan2(feature.rightEyePosition.y-feature.leftEyePosition.y,feature.rightEyePosition.x-feature.leftEyePosition.x)
glasses.setValue rotation, forKeyPath: "transform.rotation"
end
if feature.hasLeftEyePosition && feature.hasRightEyePosition && feature.hasMouthPosition
w = feature.bounds.size.width*5/4
h = feature.bounds.size.height*5/4
hat = @item_layer[:hat] || add_hat_layer
hat.opacity = 0.9
hat.bounds = [0, 0, w, h]
hat.position = [(feature.rightEyePosition.x + feature.leftEyePosition.x + feature.mouthPosition.x)/3, (feature.leftEyePosition.y+feature.rightEyePosition.y)/2- h/7 +h/2]
rotation = 25*Math::PI/180+Math.atan2(feature.rightEyePosition.y-feature.leftEyePosition.y,feature.rightEyePosition.x-feature.leftEyePosition.x)
hat.setValue rotation, forKeyPath: "transform.rotation"
end
end
end
nil
end
def windowWillClose(sender); exit(1); end
end
# Create the Application
application = NSApplication.sharedApplication
NSApplication.sharedApplication.activationPolicy = NSApplicationActivationPolicyRegular
application.delegate = FaceDetectionDelegate.alloc.init
# create the Application Window
frame = [0.0, 0.0, 330, 250]
window = NSWindow.alloc.initWithContentRect frame,
styleMask: NSTitledWindowMask | NSClosableWindowMask,
backing: NSBackingStoreBuffered,
defer: false
application.delegate.window = window
window.orderOut(nil)
window.display
puts "Starting the app..."
application.run
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment