Skip to content

Instantly share code, notes, and snippets.

@trilliwon
Last active July 9, 2023 08:00
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save trilliwon/1258fbfbdea30d328f9ef8b95a93b59a to your computer and use it in GitHub Desktop.
Save trilliwon/1258fbfbdea30d328f9ef8b95a93b59a to your computer and use it in GitHub Desktop.
Reencoder ios swift
import AVFoundation
import os.log
/// https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/05_Export.html
/**
Steps
- Use serialization queues to handle the asynchronous nature of reading and writing audiovisual data
- Initialize an asset reader and configure two asset reader outputs, one for audio and one for video
- Initialize an asset writer and configure two asset writer inputs, one for audio and one for video
- Use an asset reader to asynchronously supply media data to an asset writer through two different output/input combinations
- Use a dispatch group to be notified of completion of the reencoding process
- Allow a user to cancel the reencoding process once it has begun
*/
/// https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/05_Export.html
class Reencoder {
func reencode(url: URL, completion: @escaping ((Result<URL, Error>) -> Void)) {
let dispatchGroup = DispatchGroup()
let mainSerializationQueue = DispatchQueue(label: "\(self) serialization queue")
/// Create the serialization queue to use for reading and writing the audio data
let rwAudioSerializationQueue = DispatchQueue(label: "\(self) rw audio serialization queue")
/// Create the serialization queue to use for reading and writing the video data
let rwVideoSerializationQueue = DispatchQueue(label: "\(self) rw video serialization queue")
let asset = AVURLAsset(url: url)
let filepath = NSTemporaryDirectory() + UUID().uuidString + ".mov"
let outputURL = URL(fileURLWithPath: filepath)
/// If the tracks loaded successfully, make sure that no file exists at the output path for the asset writer
if FileManager.default.fileExists(atPath: filepath) {
do {
try FileManager.default.removeItem(atPath: filepath)
} catch {
fatalError("")
}
}
asset.loadValuesAsynchronously(forKeys: ["tracks"]) {
mainSerializationQueue.async {
let assetReader: AVAssetReader
let assetWriter: AVAssetWriter
do {
assetReader = try AVAssetReader(asset: asset)
assetWriter = try AVAssetWriter(outputURL: outputURL, fileType: AVFileType.mov)
} catch {
return
}
// MARK: - Audio Track
/// If the reader and writer were successfully initialized, grab the audio and video asset tracks that will be used.
guard let assetAudioTrack = asset.tracks(withMediaType: .audio).first else {
fatalError("assetAudioTrack nil")
}
/// If there is an audio track to read, set the decompression to Linear PCM and create the asset reader output.
let decompressionAudioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatLinearPCM]
let assetReaderAudioOutput = AVAssetReaderTrackOutput(track: assetAudioTrack, outputSettings: decompressionAudioSettings)
guard assetReader.canAdd(assetReaderAudioOutput) else {
fatalError("Can't add ...")
}
assetReader.add(assetReaderAudioOutput)
var stereoChannelLayout = AudioChannelLayout()
memset(&stereoChannelLayout, 0, MemoryLayout<AudioChannelLayout>.size)
stereoChannelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo
let channelLayoutAsData = Data(bytes: &stereoChannelLayout, count: MemoryLayout<AudioChannelLayout>.size)
let compressionAudioSettings: [String: Any] = [
AVFormatIDKey : kAudioFormatMPEG4AAC,
AVEncoderBitRateKey : 128000,
AVSampleRateKey : 44100,
AVChannelLayoutKey : channelLayoutAsData,
AVNumberOfChannelsKey : 2
]
let assetWriterAudioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: compressionAudioSettings)
guard assetWriter.canAdd(assetWriterAudioInput) else {
fatalError("Can't add ...")
}
assetWriter.add(assetWriterAudioInput)
// MARK: - Video Track
guard let assetVideoTrack = asset.tracks(withMediaType: .video).first else {
fatalError("assetVideoTrack nil")
}
/// If there is a video track to read, set the decompression settings for YUV and create the asset reader output
let decompressionVideoSettings: [String: Any] = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
]
let assetReaderVideoOutput = AVAssetReaderTrackOutput(track: assetVideoTrack, outputSettings: decompressionVideoSettings)
assetReaderVideoOutput.supportsRandomAccess = true
if assetReader.canAdd(assetReaderVideoOutput) {
assetReader.add(assetReaderVideoOutput)
}
let trackDimensions = assetVideoTrack.naturalSize
let videoCompositionProps = [AVVideoAverageBitRateKey: assetVideoTrack.estimatedDataRate]
let videoSettings: [String: Any] = [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : trackDimensions.width,
AVVideoHeightKey : trackDimensions.height,
AVVideoCompressionPropertiesKey: videoCompositionProps
]
let assetWriterVideoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
assetWriterVideoInput.expectsMediaDataInRealTime = false
assetWriterVideoInput.transform = assetVideoTrack.preferredTransform
if assetWriter.canAdd(assetWriterVideoInput) {
assetWriter.add(assetWriterVideoInput)
}
// MARK: - Reencoding the Asset
/// Attempt to start the asset reader.
guard assetReader.startReading() else {
completion(.failure(VideoExporterError.completeWithError))
return
}
/// If the reader started successfully, attempt to start the asset writer.
guard assetWriter.startWriting() else {
completion(.failure(VideoExporterError.completeWithError))
return
}
assetWriter.startSession(atSourceTime: .zero)
dispatchGroup.enter()
assetWriterAudioInput.requestMediaDataWhenReady(on: rwAudioSerializationQueue) {
while assetWriterAudioInput.isReadyForMoreMediaData {
if let sampleBuffer = assetReaderAudioOutput.copyNextSampleBuffer() {
let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
print("⏰", presentationTime.seconds)
assetWriterAudioInput.append(sampleBuffer)
} else {
assetWriterAudioInput.markAsFinished()
dispatchGroup.leave()
break
}
}
print("Audio Input ", terminator: "")
switch assetReader.status {
case .cancelled:
print("cancelled")
case .completed:
print("completed")
case .reading:
print("reading")
case .failed:
print("failed")
case .unknown:
print("unknown")
@unknown default:
fatalError()
}
}
/// Specify the block to execute when the asset writer is ready for video media data, and specify the queue to call it on
dispatchGroup.enter()
assetWriterVideoInput.requestMediaDataWhenReady(on: rwVideoSerializationQueue) {
while assetWriterVideoInput.isReadyForMoreMediaData {
if let sampleBuffer = assetReaderVideoOutput.copyNextSampleBuffer() {
let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
print("⏰", presentationTime.seconds)
assetWriterVideoInput.append(sampleBuffer)
} else {
assetWriterVideoInput.markAsFinished()
dispatchGroup.leave()
break
}
}
print("Video Input ", terminator: "")
switch assetReader.status {
case .cancelled:
print("cancelled")
case .completed:
print("completed")
case .reading:
print("writing")
case .failed:
print("failed")
case .unknown:
print("unknown")
@unknown default:
fatalError()
}
}
dispatchGroup.notify(queue: mainSerializationQueue, work: DispatchWorkItem {
print("⚠️⚠️⚠️⚠️⚠️")
assetWriter.finishWriting {
print("⚠️ finish writing completed")
print("💡assetReader .... \(assetReader.status.rawValue)")
print("💡assetWriter .... \(assetWriter.status.rawValue)")
completion(.success(outputURL))
}
})
}
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment