Skip to content

Instantly share code, notes, and snippets.

@romiroma
Last active September 9, 2021 08:04
Show Gist options
  • Save romiroma/b86a5adf594da4f6b257c506b399ddb0 to your computer and use it in GitHub Desktop.
Save romiroma/b86a5adf594da4f6b257c506b399ddb0 to your computer and use it in GitHub Desktop.
Downscale CVPixelBuffer with metal
import AVFoundation
import MetalPerformanceShaders
final class SamplebufferDelegate: NSObject, MTLTextureSource, AVCaptureVideoDataOutputSampleBufferDelegate {
var consumer: MTLTextureConsumer?
private let device: MTLDevice = MTLCreateSystemDefaultDevice()!
private var textureCache: CVMetalTextureCache!
private let texAttributes = [kCVMetalTextureUsage as String: [MTLTextureUsage.shaderRead],
kCVMetalTextureStorageMode as String: [MTLStorageMode.shared]] as CFDictionary
override init() {
super.init()
guard CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device, nil, &textureCache) == kCVReturnSuccess else {
fatalError("Can't create texture cache")
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let consumer = consumer else { return }
guard let scaledSampleBuffer = self.buffer(with: sampleBuffer) else { return }
print(scaledSampleBuffer)
}
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
print("did drop sample buffer, alarm!")
}
}
private extension SamplebufferDelegate {
func buffer(with sampleBuffer: CMSampleBuffer) -> CVPixelBuffer? {
autoreleasepool {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil }
let width = CVPixelBufferGetWidth(imageBuffer)
let height = CVPixelBufferGetHeight(imageBuffer)
var cvMetalTextureOut: CVMetalTexture?
let result = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
textureCache,
imageBuffer,
texAttributes,
.bgra8Unorm,
width,
height,
0,
&cvMetalTextureOut)
guard result == kCVReturnSuccess else { return nil }
guard let tex = cvMetalTextureOut.flatMap(CVMetalTextureGetTexture) else { return nil }
let scaleKernel = MPSImageLanczosScale(device: device)
let transformFactor: MPSScaleTransform = .init(scaleX: 0.5, scaleY: 0.5, translateX: 0, translateY: 0)
withUnsafePointer(to: transformFactor) { scaleKernel.scaleTransform = $0 }
let descriptor: MTLTextureDescriptor = .init()
descriptor.width = tex.width / 2
descriptor.width = tex.height / 2
descriptor.pixelFormat = tex.pixelFormat
guard let scaledTexture = device.makeTexture(descriptor: descriptor) else { return nil }
guard let commandBuffer = device.makeCommandQueue()?.makeCommandBuffer() else { return nil }
scaleKernel.encode(commandBuffer: commandBuffer, sourceTexture: tex, destinationTexture: scaledTexture)
commandBuffer.waitUntilCompleted()
var outPixelbuffer: CVPixelBuffer?
if let datas = scaledTexture.buffer?.contents() {
CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
scaledTexture.width,
scaledTexture.height,
kCVPixelFormatType_64RGBAHalf,
datas,
scaledTexture.bufferBytesPerRow,
nil,
nil,
nil,
&outPixelbuffer);
}
return outPixelbuffer
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment