Skip to content

Instantly share code, notes, and snippets.

@scornflake
Created October 23, 2023 18:27
Show Gist options
  • Save scornflake/e5b2d9342ad14822bfa020262d7081ee to your computer and use it in GitHub Desktop.
Save scornflake/e5b2d9342ad14822bfa020262d7081ee to your computer and use it in GitHub Desktop.
Render a layer tree to a MTLTexture (using a MTLHeap)
import SWBShared2
import Metal
import AppKit
import CoreImage
import CoreGraphics
import QuartzCore
@globalActor
public struct CALayerToMetalRendererActor {
public actor ActorType {
}
public static let shared: ActorType = ActorType()
}
/*
Render from the layer tree, to a metal texture.
Code ideas from:
https://stackoverflow.com/questions/56150363/rendering-animated-calayer-off-screen-using-carenderer-with-mtltexture
*/
/*
Related to black frames, and "Core Image defers the rendering until the client requests the access to the frame buffer, i.e. CVPixelBufferLockBaseAddress."
https://stackoverflow.com/questions/56018503/making-cicontext-renderciimage-cvpixelbuffer-work-with-avassetwriter
Regarding CARenderer owning the layer:
https://stackoverflow.com/questions/73467494/carenderer-draws-nothing-into-bound-texture
*/
/*
Discussion on drawing on a background thread
https://stackoverflow.com/questions/51812966/is-drawing-to-an-mtkview-or-cametallayer-required-to-take-place-on-the-main-thre#comment90593639_51814181
*/
/*
Using the queue (kCARendererMetalCommandQueue):
Below is some code that uses it. It passes the command queue directly to the CARenderer.
https://github.com/jrmuizel/carenderer-yuv/blob/main/main.mm
*/
/*
Not directly related to CARenderer, but one of the better articles I've seen on metal in general:
https://medium.com/@nathan.fooo/real-time-player-with-metal-part-1-3a670f33417d
*/
// for some reason this has to be done on main, else the resulting texture is black/pink (ALL THE TIME)
public class CALayerToMetalRenderer: CountableInstance {
private let textureDescriptor: MTLTextureDescriptor
public enum Errors: Error {
case cannotSetupTextures
}
public private(set) var id = UUID()
private var device: MTLDevice!
private var queue: MTLCommandQueue!
private var renderer: CARenderer?
private var stopped: Bool = false
private var renderSize: NSSize
public private(set) var textureHeap: MetalTextureHeap
public init(renderSize: NSSize, device: MTLDevice? = nil) throws {
self.renderSize = renderSize
textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .rgba8Unorm, width: Int(renderSize.width), height: Int(renderSize.height), mipmapped: false)
// .private means the texture is GPU only. No sync blit will be done during rendering.
textureDescriptor.storageMode = .private
textureDescriptor.usage = [.shaderRead, .renderTarget]
// 10 should be OK. Past that, we've got some SERIOUS problems going on
guard let newTextureHeap = try MetalTextureHeap(size: renderSize, maxTextures: 10) else {
throw Errors.cannotSetupTextures
}
textureHeap = newTextureHeap
if let d = device {
self.device = d
} else {
// Gives us the GPU associated with the main display
self.device = MTLCreateSystemDefaultDevice()
}
queue = self.device.makeCommandQueue()
Task {
await InstanceCounter.shared.instanceInit(self)
}
}
var rendererOptions: [AnyHashable: Any] {
// let options: [AnyHashable: Any] = [kCARendererColorSpace: colorSpace as Any, kCARendererMetalCommandQueue: queue as Any]
/*
The source media (CALayers) is sRGB - I wonder if this is TRUE if you've a P3 display?
*/
[kCARendererColorSpace: CGColorSpace(name: CGColorSpace.sRGB) as Any]
}
deinit {
renderer?.layer = nil
InstanceCounter.shared.safeDeinit(self)
}
@CALayerToMetalRendererActor
func cleanUp() {
stopped = true
renderer?.layer = nil
CATransaction.flush()
CATransaction.commit()
}
private func makeDescriptor(forTexture: MTLTexture) -> MTLRenderPassDescriptor {
let descriptor = MTLRenderPassDescriptor()
let isKnownDevMachine = NSApplication.isAKnownDevMachine
descriptor.colorAttachments[0].texture = forTexture
descriptor.colorAttachments[0].loadAction = .clear
if isKnownDevMachine {
// pink! so it is more visible to us
descriptor.colorAttachments[0].clearColor = MTLClearColorMake(1.0, 0.1, 0.5, 1.0)
} else {
descriptor.colorAttachments[0].clearColor = MTLClearColorMake(0.0, 0.0, 0.0, 1.0)
}
descriptor.colorAttachments[0].storeAction = .store
return descriptor
}
public func setupRendererWith(renderer: CARenderer, layer: CALayer) -> CARenderer {
if layer != renderer.layer {
renderer.layer = layer
// https://stackoverflow.com/questions/73467494/carenderer-draws-nothing-into-bound-texture
CATransaction.flush()
CATransaction.commit()
}
let rect = NSMakeRect(0, 0, renderSize.width, renderSize.height)
if !rect.equalTo(renderer.bounds, tolerance: 1) {
layer.bounds = rect
renderer.bounds = rect
}
return renderer
}
@CALayerToMetalRendererActor
public func renderLayerToMTLTexture(layer: CALayer, size: NSSize) async -> MTLTexture? {
if stopped {
return nil
}
var target: MTLTexture? = nil
do {
target = try textureHeap.newTexture(descriptor: textureDescriptor)
} catch {
V2Logging.rendering.error("Could not create texture: \(error)")
return nil
}
guard let target = target else {
return nil
}
if renderer == nil {
renderer = CARenderer(mtlTexture: target)
}
assert(renderer != nil, "Renderer should not be nil")
let rendererToUse = setupRendererWith(renderer: renderer!, layer: layer)
rendererToUse.setDestination(target)
let currentDescriptor = makeDescriptor(forTexture: target)
/*
I think only needed if we're trying to use the descriptor to .clear the buffer
*/
if let renderCommandBuffer: MTLCommandBuffer = queue.makeCommandBuffer() {
let renderCommandEncoder: MTLRenderCommandEncoder = renderCommandBuffer.makeRenderCommandEncoder(descriptor: currentDescriptor)!
renderCommandEncoder.endEncoding()
renderCommandBuffer.commit()
renderCommandBuffer.waitUntilCompleted()
rendererToUse.beginFrame(atTime: CACurrentMediaTime(), timeStamp: nil)
rendererToUse.addUpdate(rendererToUse.bounds)
rendererToUse.render()
rendererToUse.endFrame()
/*
Trying to sync texture (to get around the black/pink frame problem).
*/
if let blitCommandBuffer: MTLCommandBuffer = queue.makeCommandBuffer(), target.storageMode == .managed {
let blitCommandEncoder: MTLBlitCommandEncoder = blitCommandBuffer.makeBlitCommandEncoder()!
blitCommandEncoder.synchronize(resource: target)
blitCommandEncoder.endEncoding()
await withCheckedContinuation { continuation in
blitCommandBuffer.addCompletedHandler { _ in
V2Logging.rendering.debug("blitCommandBuffer completed")
continuation.resume()
}
blitCommandBuffer.commit()
blitCommandBuffer.waitUntilScheduled()
}
}
}
// This was me showing myself that the texture did NOT have a iosurface
// DispatchQueue.once {
// V2Logging.rendering.info("Renderer setup. textureIO surface: \(texture.iosurface)")
// }
return target
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment