Skip to content

Instantly share code, notes, and snippets.

@alskipp
Created December 12, 2011 23:34
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save alskipp/1469659 to your computer and use it in GitHub Desktop.
Save alskipp/1469659 to your computer and use it in GitHub Desktop.
Macruby live(ish) moustachification in Mac OS X Lion.
framework 'Cocoa'
framework 'avfoundation'
class FaceDetectionDelegate
attr_accessor :window
def applicationDidFinishLaunching(aNotification)
width = 640
height = 480
session = AVCaptureSession.alloc.init
session.sessionPreset = AVCaptureSessionPreset640x480
@cg_context = CGBitmapContextCreate(nil, width, height, 8, 4*width, CGColorSpaceCreateDeviceRGB(), KCGImageAlphaNoneSkipFirst)
@tache_array = []
detectorOptions = {CIDetectorAccuracy: CIDetectorAccuracyLow}
@detector = CIDetector.detectorOfType "CIDetectorTypeFace", context:nil, options:detectorOptions
url = NSURL.URLWithString("http://dl.dropbox.com/u/349788/mustache.png")
source = CGImageSourceCreateWithURL url, nil
@tache = CGImageSourceCreateImageAtIndex source, 0, nil
window.delegate = self
device = AVCaptureDevice.defaultDeviceWithMediaType AVMediaTypeVideo
input = AVCaptureDeviceInput.deviceInputWithDevice device, error:nil
output = AVCaptureVideoDataOutput.alloc.init
output.alwaysDiscardsLateVideoFrames = true
queue = Dispatch::Queue.new('cameraQueue')
output.setSampleBufferDelegate self, queue:queue.dispatch_object
output.setVideoSettings KCVPixelBufferPixelFormatTypeKey => KCVPixelFormatType_32BGRA, KCVPixelBufferWidthKey => width, KCVPixelBufferHeightKey => height
session.addInput input
session.addOutput output
@preview_layer = AVCaptureVideoPreviewLayer.layerWithSession session
@preview_layer.frame = [0.0, 0.0, width, height]
@preview_layer.videoGravity = AVLayerVideoGravityResizeAspectFill
mirror = CGAffineTransformMakeScale -1, 1
@preview_layer.affineTransform = mirror
session.startRunning
window.setFrame [0.0, 0.0, width, height], display:true, animate:true
window.center
window.contentView.wantsLayer = true
window.contentView.layer.addSublayer @preview_layer
window.orderFrontRegardless
end
def add_tache_layer
tache = CALayer.layer
tache.contents = @tache
tache.contentsGravity = KCAGravityResize
@tache_array << tache
@preview_layer.addSublayer tache
tache
end
def captureOutput(captureOutput, didOutputSampleBuffer:sampleBuffer, fromConnection:connection)
imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
image = CIImage.imageWithCVImageBuffer(imageBuffer)
features = @detector.featuresInImage(image)
Dispatch::Queue.main.sync do
@tache_array[features.size..@tache_array.size].each {|t| t.opacity = 0} rescue nil # hide any unattached taches
features.each.with_index do |feature, i|
if feature.hasRightEyePosition && feature.hasLeftEyePosition && feature.hasMouthPosition
w = feature.bounds.size.width
h = feature.bounds.size.height/5
tache = @tache_array[i] || add_tache_layer
tache.opacity = 0.9
tache.bounds = [0, 0, w, h]
tache.position = [feature.mouthPosition.x, feature.mouthPosition.y+10]
rotation = Math.atan2(feature.rightEyePosition.y-feature.leftEyePosition.y,feature.rightEyePosition.x-feature.leftEyePosition.x)
tache.setValue rotation, forKeyPath: "transform.rotation"
end
end
end
nil
end
def windowWillClose(sender); exit(1); end
end
# Create the Application
application = NSApplication.sharedApplication
NSApplication.sharedApplication.activationPolicy = NSApplicationActivationPolicyRegular
application.delegate = FaceDetectionDelegate.alloc.init
# create the Application Window
frame = [0.0, 0.0, 330, 250]
window = NSWindow.alloc.initWithContentRect frame,
styleMask: NSTitledWindowMask | NSClosableWindowMask,
backing: NSBackingStoreBuffered,
defer: false
application.delegate.window = window
window.orderOut(nil)
window.display
puts "Starting the app..."
application.run
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment