Created
April 19, 2018 18:28
-
-
Save ryanhanks/05ea85ff655347fd5df65234d0b5a103 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Converted to Swift 4 by Swiftify v4.1.6680 - https://objectivec2swift.com/ | |
import AVFoundation | |
import libkern | |
class FLTSavePhotoDelegate: NSObject, AVCapturePhotoCaptureDelegate { | |
private(set) var path = "" | |
private(set) var result = FlutterResult() | |
/// Used to keep the delegate alive until didFinishProcessingPhotoSampleBuffer. | |
private var selfReference: FLTSavePhotoDelegate? | |
func initWithPath(_ filename: String?, result: FlutterResult) { | |
super.init() | |
assert(self, "super init cannot be nil") | |
self.path = path | |
self.result = result | |
selfReference = self | |
return self | |
} | |
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) { | |
selfReference = nil | |
if error != nil { | |
result(error?.flutterError()) | |
return | |
} | |
var data: Data? = nil | |
if let aBuffer = photoSampleBuffer { | |
data = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: aBuffer, previewPhotoSampleBuffer: previewPhotoSampleBuffer) | |
} | |
// TODO(sigurdm): Consider writing file asynchronously. | |
let success: Bool? = data?.write(toFile: path, atomically: true) | |
if !(success ?? false) { | |
result(FlutterError(code: "IOError", message: "Unable to write file", details: nil)) | |
return | |
} | |
result(nil) | |
} | |
} | |
class FLTCam: NSObject, FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, FlutterStreamHandler { | |
private(set) var textureId: Int64 = 0 | |
var onFrameAvailable: (() -> Void)? | |
var eventChannel: FlutterEventChannel? | |
var eventSink = FlutterEventSink() | |
private(set) var captureSession: AVCaptureSession? | |
private(set) var captureDevice: AVCaptureDevice? | |
private(set) var capturePhotoOutput: AVCapturePhotoOutput? | |
private(set) var latestPixelBuffer: CVPixelBuffer? | |
private(set) var previewSize = CGSize.zero | |
private(set) var captureSize = CGSize.zero | |
init(cameraName: String?, resolutionPreset: String?) throws { | |
super.init() | |
assert(self, "super init cannot be nil") | |
captureSession = AVCaptureSession() | |
var preset: AVCaptureSession.Preset | |
if (resolutionPreset == "high") { | |
preset = .high | |
} else if (resolutionPreset == "medium") { | |
preset = .medium | |
} else { | |
assert((resolutionPreset == "low"), "Unknown resolution preset \(resolutionPreset ?? "")") | |
preset = .low | |
} | |
captureSession?.sessionPreset = preset | |
captureDevice = AVCaptureDevice(uniqueID: cameraName ?? "") | |
var localError: Error? = nil | |
var input: AVCaptureInput? = nil | |
if let aDevice = captureDevice { | |
input = try? AVCaptureDeviceInput(device: aDevice) | |
} | |
if localError != nil { | |
error = localError | |
return nil | |
} | |
let dimensions: CMVideoDimensions = CMVideoFormatDescriptionGetDimensions(captureDevice?.activeFormat.formatDescription) | |
previewSize = CGSize(width: CGFloat(dimensions.width), height: CGFloat(dimensions.height)) | |
let output = AVCaptureVideoDataOutput() | |
output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA] | |
output.alwaysDiscardsLateVideoFrames = true | |
output.setSampleBufferDelegate(self, queue: DispatchQueue.main) | |
var connection: AVCaptureConnection? = nil | |
if let aPorts = input?.ports { | |
connection = AVCaptureConnection(inputPorts: aPorts, output: output) | |
} | |
if captureDevice?.position == .front { | |
connection?.isVideoMirrored = true | |
} | |
connection?.videoOrientation = .portrait | |
if let anInput = input { | |
captureSession?.addInputWithNoConnections(anInput) | |
} | |
captureSession?.addOutputWithNoConnections(output) | |
if let aConnection = connection { | |
captureSession?.add(aConnection) | |
} | |
capturePhotoOutput = AVCapturePhotoOutput() | |
if let anOutput = capturePhotoOutput { | |
captureSession?.addOutput(anOutput) | |
} | |
} | |
func start() { | |
captureSession?.startRunning() | |
} | |
func stop() { | |
captureSession?.stopRunning() | |
} | |
func capture(toFile filename: String?, result: FlutterResult) { | |
let settings = AVCapturePhotoSettings() | |
capturePhotoOutput?.capturePhoto(with: settings, delegate: FLTSavePhotoDelegate.initWithPath(path, result: result)) | |
} | |
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { | |
let newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) | |
CFRetain(newBuffer) | |
var old = latestPixelBuffer | |
while !OSAtomicCompareAndSwapPtrBarrier(old, newBuffer, latestPixelBuffer) { | |
old = latestPixelBuffer | |
} | |
if old != nil { | |
} | |
if onFrameAvailable != nil { | |
onFrameAvailable() | |
} | |
} | |
func close() { | |
captureSession?.stopRunning() | |
for input: AVCaptureInput? in captureSession?.inputs ?? [AVCaptureInput?]() { | |
if let anInput = input { | |
captureSession?.removeInput(anInput) | |
} | |
} | |
for output: AVCaptureOutput? in captureSession?.outputs ?? [AVCaptureOutput?]() { | |
if let anOutput = output { | |
captureSession?.removeOutput(anOutput) | |
} | |
} | |
} | |
deinit { | |
if latestPixelBuffer != nil { | |
} | |
} | |
func copyPixelBuffer() -> CVPixelBuffer? { | |
var pixelBuffer = latestPixelBuffer | |
while !OSAtomicCompareAndSwapPtrBarrier(pixelBuffer, nil, latestPixelBuffer) { | |
pixelBuffer = latestPixelBuffer | |
} | |
return pixelBuffer | |
} | |
func onCancel(withArguments arguments: Any?) -> FlutterError? { | |
eventSink = nil | |
return nil | |
} | |
func onListen(withArguments arguments: Any?, eventSink events: FlutterEventSink) -> FlutterError? { | |
eventSink = events | |
return nil | |
} | |
} | |
class CameraPlugin { | |
private(set) weak var registry: (NSObject & FlutterTextureRegistry)? | |
private(set) weak var messenger: (NSObject & FlutterBinaryMessenger)? | |
private(set) var cams = [AnyHashable: Any]() | |
class func register(withRegistrar registrar: (NSObject & FlutterPluginRegistrar)?) { | |
let channel = FlutterMethodChannel(name: "plugins.flutter.io/camera", binaryMessenger: registrar?.messenger()) | |
let instance = CameraPlugin(registry: registrar?.textures(), messenger: registrar?.messenger()) | |
registrar?.addMethodCallDelegate(instance, channel: channel) | |
} | |
init(registry: (NSObject & FlutterTextureRegistry)?, messenger: (NSObject & FlutterBinaryMessenger)?) { | |
super.init() | |
assert(self, "super init cannot be nil") | |
self.registry = registry | |
self.messenger = messenger | |
cams = [AnyHashable: Any](minimumCapacity: 1) | |
} | |
func handle(_ call: FlutterMethodCall?, result: FlutterResult) { | |
if ("init" == call?.method) { | |
for textureId: NSNumber in cams as? [NSNumber] ?? [NSNumber]() { | |
registry?.unregisterTexture(Int64(textureId)) | |
cams[textureId]?.close() | |
} | |
cams.removeAll() | |
result(nil) | |
} else if ("list" == call?.method) { | |
let discoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: .unspecified) | |
let devices = discoverySession.devices | |
var reply = [AnyHashable]() /* TODO: .reserveCapacity(devices.count) */ as? [[String: NSObject]] | |
for device: AVCaptureDevice in devices { | |
var lensFacing: String | |
switch device.position { | |
case .back: | |
lensFacing = "back" | |
case .front: | |
lensFacing = "front" | |
case .unspecified: | |
lensFacing = "external" | |
} | |
reply?.append(["name": device.uniqueID, "lensFacing": lensFacing]) | |
} | |
result(reply) | |
} else if ("create" == call?.method) { | |
let cameraName = call?.arguments["cameraName"] | |
let resolutionPreset = call?.arguments["resolutionPreset"] | |
var error: Error? | |
let cam = try? FLTCam(cameraName: cameraName, resolutionPreset: resolutionPreset) | |
if error != nil { | |
result(error?.flutterError()) | |
} else { | |
let textureId: Int64? = registry?.registerTexture(cam) | |
if let aCam = cam { | |
cams[textureId] = aCam | |
} | |
cam?.onFrameAvailable = {() -> Void in | |
registry?.textureFrameAvailable(textureId) | |
} | |
let eventChannel = FlutterEventChannel(name: "flutter.io/cameraPlugin/cameraEvents\(textureId ?? 0)", binaryMessenger: messenger) | |
eventChannel.streamHandler = cam | |
cam?.eventChannel = eventChannel | |
result(["textureId": textureId, "previewWidth": cam?.previewSize.width, "previewHeight": cam?.previewSize.height, "captureWidth": cam?.captureSize.width, "captureHeight": cam?.captureSize.height]) | |
} | |
} else { | |
let argsMap = call?.arguments | |
let textureId = Int((UInt((argsMap["textureId"] as? NSNumber) ?? 0))) | |
let cam = cams[textureId] as? FLTCam | |
if ("start" == call?.method) { | |
cam?.start() | |
result(nil) | |
} else if ("stop" == call?.method) { | |
cam?.stop() | |
result(nil) | |
} else if ("capture" == call?.method) { | |
cam?.capture(toFile: call?.arguments["path"], result: result) | |
} else if ("dispose" == call?.method) { | |
registry?.unregisterTexture(textureId) | |
cam?.close() | |
cams.removeValueForKey(textureId) | |
result(nil) | |
} else { | |
result(FlutterMethodNotImplemented) | |
} | |
} | |
} | |
} | |
extension NSError { | |
private(set) var flutterError: FlutterError? | |
func flutterError() -> FlutterError? { | |
return FlutterError(code: "Error \(code)", message: domain, details: localizedDescription) | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Awesome!! great.