Skip to content

Instantly share code, notes, and snippets.

@Jonovono
Created June 17, 2024 16:25
Show Gist options
  • Save Jonovono/8e4c02bb6e9b030565ec1566edb253da to your computer and use it in GitHub Desktop.
Save Jonovono/8e4c02bb6e9b030565ec1566edb253da to your computer and use it in GitHub Desktop.
AVFoundation combine images and audio into video
@MainActor
class Mixer: ObservableObject {
private var assetWriter: AVAssetWriter?
private var videoInput: AVAssetWriterInput?
private var audioInput: AVAssetWriterInput?
private var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor?
private var startTime: CMTime?
private var audioFormatDescription: CMFormatDescription?
public var frameCount: Int = 0
public var frameRate: Double = 30
public var currentFrame: Int = 0
func startMixing(renderer: ImageRenderer<CounterTestView>) {
let fileManager = FileManager.default
let documentDirectory = fileManager.urls(for: .documentDirectory, in: .userDomainMask).first!
let randomFolderName = UUID().uuidString
let randomFolderURL = documentDirectory.appendingPathComponent(randomFolderName)
do {
try fileManager.createDirectory(at: randomFolderURL, withIntermediateDirectories: true, attributes: nil)
} catch {
print("Error creating directory: \(error)")
}
let fileURL = randomFolderURL.appendingPathComponent("output.mov")
print("File URL: \(fileURL)")
do {
assetWriter = try AVAssetWriter(outputURL: fileURL, fileType: .mov)
let videoSettings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: 400,
AVVideoHeightKey: 700
]
videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
if let assetWriter = assetWriter, assetWriter.canAdd(videoInput!), assetWriter.canAdd(audioInput!) {
print("[DEBUG] assetwriter set")
assetWriter.add(videoInput!)
assetWriter.add(audioInput!)
let sourcePixelBufferAttributes: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
kCVPixelBufferWidthKey as String: 400,
kCVPixelBufferHeightKey as String: 700,
kCVPixelBufferIOSurfacePropertiesKey as String: [:]
]
pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(
assetWriterInput: videoInput!,
sourcePixelBufferAttributes: sourcePixelBufferAttributes
)
assetWriter.startWriting()
startTime = CMTime.zero
assetWriter.startSession(atSourceTime: .zero)
print("[DEBUG] Asset writer started writing")
Task {
var audioSampleIndex = 0
while true {
print("letter")
// Process video frame
if let cgImage = renderer.cgImage {
print("frame")
if let pixelBuffer = self.pixelBufferFromCGImage(cgImage) {
let frameTime = CMTimeMake(value: Int64(frameCount), timescale: CMTimeScale(frameRate))
print("Rendering frame \(frameCount) with frame time \(frameTime)")
if self.pixelBufferAdaptor?.append(pixelBuffer, withPresentationTime: frameTime) == false {
print("Failed to append pixel buffer at time \(frameTime)")
} else {
print("Appended frame \(frameCount) at time \(frameTime)")
}
while self.videoInput?.isReadyForMoreMediaData == false {
try await Task.sleep(nanoseconds: 10000000) // Sleep for 10 milliseconds
}
} else {
print("Failed to create pixel buffer from CGImage")
}
} else {
print("Failed to render CGImage")
}
currentFrame = frameCount
frameCount += 1
try await Task.sleep(for: .seconds(1.0 / frameRate))
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment