Created
June 29, 2018 21:20
-
-
Save steverichey/b4c3aeaeb6d2100c54b1b787b644994a to your computer and use it in GitHub Desktop.
Camera edge detection demo for Realities 360
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import Cocoa | |
import AVFoundation | |
import AVKit | |
import QuartzCore | |
import PlaygroundSupport | |
import AppKit | |
import CoreGraphics | |
import CoreImage | |
let playgroundView = NSView(frame: NSRect(x: 0.0, y: 0.0, width: 640.0, height: 480.0)) | |
let nsImageView = NSImageView(frame: playgroundView.frame) | |
playgroundView.addSubview(nsImageView) | |
let session = AVCaptureSession() | |
session.sessionPreset = AVCaptureSession.Preset.vga640x480 | |
session.beginConfiguration() | |
session.commitConfiguration() | |
let devices = AVCaptureDevice.devices() | |
for device in devices { | |
if device.hasMediaType(AVMediaType.video) && device.supportsSessionPreset(AVCaptureSession.Preset.vga640x480) { | |
do { | |
let input = try AVCaptureDeviceInput(device: device as AVCaptureDevice) as AVCaptureDeviceInput | |
if session.canAddInput(input) { | |
session.addInput(input) | |
break | |
} | |
} | |
catch { | |
error | |
} | |
} | |
} | |
enum CIFilterError: Error { | |
case filterCreateError(name: String) | |
case filterApplyError(name: String) | |
} | |
extension CIImage { | |
func edgeDetect(radius: Double) throws -> CIImage { | |
let filter = try createEdgeFilter(inputImage: self, filterRadius: radius) | |
guard let edgeImage = filter.outputImage else { | |
throw CIFilterError.filterApplyError(name: filter.name) | |
} | |
return edgeImage | |
} | |
func blur(radius: Double) throws -> CIImage { | |
let filter = try createBlurFilter(inputImage: self, filterRadius: radius) | |
guard let blurImage = filter.outputImage else { | |
throw CIFilterError.filterApplyError(name: filter.name) | |
} | |
return blurImage | |
} | |
} | |
private enum FilterName: String { | |
case edgeWork = "CIEdgeWork" | |
case gaussianBlur = "CIGaussianBlur" | |
} | |
private func createEdgeFilter(inputImage: CIImage, filterRadius: Double) throws -> CIFilter { | |
return try createFilter(.edgeWork, withInputImage: inputImage, parameters: [ | |
"inputRadius": filterRadius | |
]) | |
} | |
private func createBlurFilter(inputImage: CIImage, filterRadius: Double) throws -> CIFilter { | |
return try createFilter(.gaussianBlur, withInputImage: inputImage, parameters: [ | |
"inputRadius": filterRadius | |
]) | |
} | |
private func createFilter(_ name: FilterName, withInputImage inputImage: CIImage, parameters: [String: Any]) throws -> CIFilter { | |
return try createFilter(name, parameters: [kCIInputImageKey: inputImage] + parameters) | |
} | |
private func createFilter(_ name: FilterName, parameters: [String: Any]) throws -> CIFilter { | |
guard let filter = CIFilter(name: name.rawValue, withInputParameters: parameters) else { | |
throw CIFilterError.filterCreateError(name: name.rawValue) | |
} | |
return filter | |
} | |
private func + <K, V>(lhs: [K: V], rhs: [K: V]) -> [K: V] { | |
var result = [K: V]() | |
for (key, val) in lhs { | |
result[key] = val | |
} | |
for (key, val) in rhs { | |
result[key] = val | |
} | |
return result | |
} | |
let output = AVCaptureVideoDataOutput() | |
output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)] | |
output.alwaysDiscardsLateVideoFrames = true | |
var busy = false | |
class SampleDelegate: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { | |
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { | |
if busy { | |
return | |
} | |
busy = true | |
guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { | |
return | |
} | |
let ciImage = try! CIImage(cvImageBuffer: buffer) | |
.blur(radius: 5.0) | |
.edgeDetect(radius: 1.0) | |
let context = CIContext(options: nil) | |
let width = CVPixelBufferGetWidth(buffer) | |
let height = CVPixelBufferGetHeight(buffer) | |
guard let cgImage = context.createCGImage(ciImage, from: CGRect(x: 0, y: 0, width: width, height: height)) else { | |
return | |
} | |
let nsImage = NSImage(cgImage: cgImage, size: CGSize(width: width, height: height)) | |
DispatchQueue.main.async { | |
nsImageView.image = nsImage | |
busy = false | |
} | |
} | |
} | |
let delegate = SampleDelegate() | |
output.setSampleBufferDelegate(delegate, queue: DispatchQueue(label: "Buffer Queue", | |
qos: .utility, | |
attributes: .concurrent, | |
autoreleaseFrequency: .inherit, | |
target: nil)) | |
if session.canAddOutput(output) { | |
session.addOutput(output) | |
} | |
session.startRunning() | |
PlaygroundPage.current.liveView = playgroundView |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment