Skip to content

Instantly share code, notes, and snippets.

@vlastachu
Created February 15, 2024 08:55
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save vlastachu/134d45cece646137816038d69ec14408 to your computer and use it in GitHub Desktop.
Save vlastachu/134d45cece646137816038d69ec14408 to your computer and use it in GitHub Desktop.
import SwiftUI
import AVFoundation
import CoreML
import Vision
struct ContentView: View {
@ObservedObject var cameraManager = CameraManager()
var body: some View {
VStack {
if let image = cameraManager.processedImage,
let original = cameraManager.currentFrame {
ZStack {
// Image(Bundle.main.url(forResource: "V", withExtension: "webp")!)
// Image(uiImage: UIImage(contentsOfFile: Bundle.main.url(forResource: "V", withExtension: "webp")!.path()) ?? UIImage())
// DroppingStarsView().frame(width: 200, height: 390)
Image(uiImage: image).resizable().aspectRatio(contentMode: .fit)
.rotationEffect(.degrees(90))
.scaleEffect(x: -1, y: 1)
.scaleEffect(x: 1.52, y: 1.52)
.padding([.leading], 40)
.aspectRatio(contentMode: .fit)
// Image("V").resizable().aspectRatio(contentMode: .fit)
// Image(uiImage: original).resizable().mask(alignment: .center) {
// Image(uiImage: image).resizable()
// .aspectRatio(contentMode: .fit)
// }
//
// .rotationEffect(.degrees(90))
// .scaleEffect(x: -1, y: 1)
// .scaleEffect(x: 1.5, y: 1.5)
// .aspectRatio(contentMode: .fit)
}
} else {
Text("Waiting for camera feed...")
}
}
.background(
LinearGradient(gradient: Gradient(colors: [.indigo, .pink]), startPoint: .top, endPoint: .bottom)
)
.onAppear {
cameraManager.startSession()
}
.onDisappear {
cameraManager.stopSession()
}
}
}
class CameraManager: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleBufferDelegate {
@Published var processedImage: UIImage?
@Published var currentFrame: UIImage?
private var captureSession: AVCaptureSession?
private var videoOutput: AVCaptureVideoDataOutput?
private let model = try! VNCoreMLModel(for: rvm(configuration: MLModelConfiguration()).model)
override init() {
super.init()
self.setupCamera()
}
private func setupCamera() {
let session = AVCaptureSession()
session.beginConfiguration()
guard let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front),
let videoInput = try? AVCaptureDeviceInput(device: videoDevice) else {
return
}
if session.canAddInput(videoInput) {
session.addInput(videoInput)
}
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
if session.canAddOutput(videoOutput) {
session.addOutput(videoOutput)
}
session.sessionPreset = .hd1280x720
session.commitConfiguration()
self.captureSession = session
self.videoOutput = videoOutput
}
func startSession() {
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession?.startRunning()
}
}
func stopSession() {
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession?.stopRunning()
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
let publishOriginal = imageFromSampleBuffer(sampleBuffer: sampleBuffer)
DispatchQueue.main.async {
self.currentFrame = publishOriginal
}
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return
}
let request = VNCoreMLRequest(model: model) { request, error in
let results = request.results?[3] as? VNPixelBufferObservation
let pixelBuffer = results?.pixelBuffer
if (error != nil || results == nil || pixelBuffer == nil) {
print("Failed to perform segmentation:", error?.localizedDescription ?? "Unknown error")
return
}
if let alphaed = self.compositeAlphaOntoImage(sampleBuffer: sampleBuffer, alphaPixelBuffer: pixelBuffer!) {
DispatchQueue.main.async {
self.processedImage = alphaed
}
}
// if let maskImage = self.createMaskImage(from: multiArray!, using: pixelBuffer) {
// DispatchQueue.main.async {
// self.processedImage = maskImage
// }
// }
}
try? VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:]).perform([request])
}
func compositeAlphaOntoImage(sampleBuffer: CMSampleBuffer, alphaPixelBuffer: CVPixelBuffer) -> UIImage? {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return nil
}
let context = CIContext()
// Convert original image buffer to CIImage
let originalCIImage = CIImage(cvImageBuffer: imageBuffer)
// Convert alpha pixel buffer to CIImage
let alphaCIImage = CIImage(cvPixelBuffer: alphaPixelBuffer)
let sourceOver = CIFilter(name:"CIBlendWithMask")!
sourceOver.setValue(originalCIImage, forKey: kCIInputImageKey)
sourceOver.setValue(alphaCIImage, forKey: kCIInputMaskImageKey)
let result = sourceOver.outputImage!
return UIImage(cgImage: context.createCGImage(result, from: result.extent)!)
// Composite alpha onto original image
// let compositedCIImage = alphaCIImage.composited(over: originalCIImage)
// Convert composited CIImage to UIImage
// let origin = context.createCGImage(originalCIImage, from: alphaCIImage.extent)
// let alpha = context.createCGImage(alphaCIImage, from: alphaCIImage.extent)
//
// let alphaUI = UIImage(cgImage: alpha!)
//
// if( origin == nil || alpha == nil) {
// return nil
// }
//
// let renderer = MaskRenderer(size: alphaUI.size, scale: alphaUI.scale)
// guard let imageMask = renderer.image(actions: { context in
// let rect = CGRect(origin: .zero, size: renderer.sizeInPixels)
// .insetBy(dx: 0, dy: renderer.sizeInPixels.height / 4)
// let path = UIBezierPath(ovalIn: rect)
// context.addPath(path.cgPath)
// context.setFillColor(gray: 1, alpha: 1)
// context.drawPath(using: CGPathDrawingMode.fillStroke)
// }) else { return nil }
//
// if let result = imageMask.cgImage!.masking(origin!) {
// return UIImage(cgImage: result)
// }
// return nil
}
private func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer) -> UIImage? {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return nil
}
let ciImage = CIImage(cvPixelBuffer: imageBuffer)
let context = CIContext()
guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else {
return nil
}
return UIImage(cgImage: cgImage)
}
func createMaskImage(from fgrImage: UIImage, using alphaArray: Data) -> UIImage? {
guard let cgImage = fgrImage.cgImage else {
return nil
}
let width = cgImage.width
let height = cgImage.height
let bytesPerPixel = 4 // assuming 4 bytes per pixel (RGBA)
let bitsPerComponent = 8
let bytesPerRow = width * bytesPerPixel
// Create pixel buffer
var pixelData = [UInt8](repeating: 0, count: width * height * bytesPerPixel)
// Convert alphaArray to UInt8 array
let alphaBytes = [UInt8](alphaArray)
// Populate pixelData with alpha values from alphaArray
for y in 0..<height {
for x in 0..<width {
let offset = (y * width + x) * bytesPerPixel
let alphaIndex = (y * width + x)
let alpha = alphaBytes[alphaIndex]
// Set pixel alpha value
pixelData[offset + 3] = alpha
}
}
// Create CGImage from pixelData
guard let providerRef = CGDataProvider(data: NSData(bytes: &pixelData, length: pixelData.count * MemoryLayout<UInt8>.size)) else {
return nil
}
guard let maskImage = CGImage(width: width, height: height, bitsPerComponent: bitsPerComponent, bitsPerPixel: bytesPerPixel * bitsPerComponent, bytesPerRow: bytesPerRow, space: CGColorSpaceCreateDeviceRGB(), bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedLast.rawValue), provider: providerRef, decode: nil, shouldInterpolate: true, intent: .defaultIntent) else {
return nil
}
// Create UIImage from CGImage
return UIImage(cgImage: maskImage)
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment