Skip to content

Instantly share code, notes, and snippets.

@afpro
Created November 17, 2021 03:51
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save afpro/cf0a72d87d63c34e3d6a62e4eeb458ba to your computer and use it in GitHub Desktop.
Save afpro/cf0a72d87d63c34e3d6a62e4eeb458ba to your computer and use it in GitHub Desktop.
objc codec sample code
@import Foundation;
@import AVFoundation;
@import VideoToolbox;
@import AudioToolbox;
void videoCompressTest(void);
int main(int argc, const char * argv[]) {
@autoreleasepool {
videoCompressTest();
}
return 0;
}
void videoOutput(
void * CM_NULLABLE outputCallbackRefCon,
void * CM_NULLABLE sourceFrameRefCon,
OSStatus status,
VTEncodeInfoFlags infoFlags,
CM_NULLABLE CMSampleBufferRef sampleBuffer) {
NSLog(@"sample received");
AVAssetWriterInput *input = (__bridge AVAssetWriterInput*) outputCallbackRefCon;
[input appendSampleBuffer:sampleBuffer];
}
void videoEncodeColor(VTCompressionSessionRef vtSession, int w, int h, int color, CMTime time) {
@autoreleasepool {
CVPixelBufferRef buffer = nil;
NSMutableData *pixels = [NSMutableData new];
uint8_t color_bytes[3] = {
(uint8_t)((color >> 16) & 0xff),
(uint8_t)((color >> 8) & 0xff),
(uint8_t)(color & 0xff),
};
for (int i = 0; i < w; i ++) {
for (int j = 0; j < h; j ++) {
[pixels appendBytes:(void*)color_bytes length:3];
}
}
[pixels appendBytes:(void*)&color length:3];
if (CVPixelBufferCreateWithBytes(nil, w, h, kCVPixelFormatType_24RGB, (void*) pixels.bytes, w * 3, nil, nil, nil, &buffer) != kCVReturnSuccess) {
NSLog(@"can't create pixel buffer");
return;
}
VTEncodeInfoFlags flags;
OSStatus status = VTCompressionSessionEncodeFrame(vtSession, buffer, time, kCMTimeInvalid, nil, nil, &flags);
CFRelease(buffer);
if (status != noErr) {
NSLog(@"can't encode frame: %x", status);
return;
}
NSLog(@"frame encoded: %x", flags);
}
}
void videoWriteFile(NSData *data, NSString *path) {
if (![data writeToFile:path atomically:FALSE]) {
NSLog(@"write output to %@ failed", path);
}
}
void videoCompressTest(void) {
AVAssetWriter *writer;
AVAssetWriterInput *videoInput;
AVAssetWriterInput *audioInput;
// remove old file
[[NSFileManager defaultManager] removeItemAtPath:@"/tmp/fuck/output.mp4" error:nil];
/* init asset writer */
{
NSError *error;
writer = [AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:@"/tmp/fuck/output.mp4"] fileType:AVFileTypeMPEG4 error:&error];
if (error != nil) {
NSLog(@"create writer failed: %@", error);
return;
}
}
/* init video input */
{
CMFormatDescriptionRef formatHint = nil;
OSStatus status = CMVideoFormatDescriptionCreate(nil, kCMVideoCodecType_H264, 100, 100, nil, &formatHint);
if (status != noErr) {
NSLog(@"create video format hint faile: %x", status);
return;
}
videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:nil sourceFormatHint:formatHint];
CFRelease(formatHint);
[writer addInput:videoInput];
}
/* init audio input */
{
AudioStreamBasicDescription asbd = {};
asbd.mSampleRate = 44100;
asbd.mFormatID = kAudioFormatMPEG4AAC;
asbd.mChannelsPerFrame = 1;
AVAudioFormat *aacFormat = [[AVAudioFormat alloc] initWithStreamDescription:&asbd];
audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil
sourceFormatHint:aacFormat.formatDescription];
[writer addInput:audioInput];
}
// start writing
[writer startWriting];
[writer startSessionAtSourceTime:kCMTimeZero];
/* video encode session */
{
VTCompressionSessionRef vtSession = nil;
NSDictionary* encoderSpec = @{
(NSString*)kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder: @true,
};
NSDictionary* sourceImageAttr = @{
(NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_24RGB),
#if TARGET_OS_IOS
(NSString*)kCVPixelBufferOpenGLESCompatibilityKey: &true,
#endif
(NSString*)kCVPixelBufferIOSurfacePropertiesKey: @{},
(NSString*)kCVPixelBufferWidthKey: @100,
(NSString*)kCVPixelBufferHeightKey: @100,
};
OSStatus status = VTCompressionSessionCreate(nil, 100, 100, kCMVideoCodecType_H264, (__bridge CFDictionaryRef)encoderSpec, (__bridge CFDictionaryRef)sourceImageAttr, nil, videoOutput, (__bridge void *)videoInput, &vtSession);
if (status != noErr) {
NSLog(@"can't create video compress session %x", status);
return;
}
status = VTCompressionSessionPrepareToEncodeFrames(vtSession);
if (status != noErr) {
NSLog(@"prepare session failed: %x", status);
return;
}
NSLog(@"prepare done");
videoEncodeColor(vtSession, 100, 100, 0xffff0000, CMTimeMake(0, 1));
videoEncodeColor(vtSession, 100, 100, 0xff00ff00, CMTimeMake(1, 1));
videoEncodeColor(vtSession, 100, 100, 0xff0000ff, CMTimeMake(2, 1));
VTCompressionSessionCompleteFrames(vtSession, CMTimeMake(3, 1));
VTCompressionSessionInvalidate(vtSession);
CFRelease(vtSession);
}
[videoInput markAsFinished];
NSLog(@"video finish, status = %d", (int)writer.status);
/* audio encode session */
{
NSLog(@"convert");
NSError *error;
AVAudioFormat *pcmFormat = [[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatInt16
sampleRate:44100
channels:1
interleaved:false];
AudioStreamBasicDescription asbd = {};
asbd.mSampleRate = 44100;
asbd.mFormatID = kAudioFormatMPEG4AAC;
asbd.mChannelsPerFrame = 1;
/* fill format info */
{
UInt32 asbdSize = sizeof(asbd);
AudioFormatGetProperty(kAudioFormatProperty_FormatInfo, 0, nil, &asbdSize, &asbd);
}
AVAudioFormat *aacFormat = [[AVAudioFormat alloc] initWithStreamDescription:&asbd];
AVAudioConverter *converter = [[AVAudioConverter alloc] initFromFormat:pcmFormat
toFormat:aacFormat];
AVAudioCompressedBuffer *aac = [[AVAudioCompressedBuffer alloc] initWithFormat:aacFormat
packetCapacity:1024
maximumPacketSize:converter.maximumOutputPacketSize];
// do convert
__block bool inputDone = false;
AVAudioConverterOutputStatus convertStatus = [converter convertToBuffer:aac
error:&error
withInputFromBlock:^AVAudioBuffer *(AVAudioPacketCount inNumberOfPackets, AVAudioConverterInputStatus* outStatus) {
if (inputDone) {
NSLog(@"convert input -> eof");
*outStatus = AVAudioConverterInputStatus_EndOfStream;
return nil;
}
@autoreleasepool {
NSError *error;
NSData *pcmBytes = [NSData dataWithContentsOfFile:@"/tmp/fuck/audio.pcm"
options:NSDataReadingMappedIfSafe
error:&error];
if (error != nil) {
NSLog(@"read pcm failed: %@", error);
*outStatus = AVAudioConverterInputStatus_EndOfStream;
return nil;
}
AVAudioPCMBuffer* pcm = [[AVAudioPCMBuffer alloc] initWithPCMFormat:pcmFormat
frameCapacity:(AVAudioFrameCount) pcmBytes.length / 2];
pcm.frameLength = pcm.frameCapacity;
memcpy(pcm.int16ChannelData[0], pcmBytes.bytes, pcmBytes.length);
inputDone = true;
*outStatus = AVAudioConverterInputStatus_HaveData;
NSLog(@"convert input -> pcm");
return pcm;
}
}];
if (convertStatus != AVAudioConverterOutputStatus_HaveData) {
NSLog(@"convert failed: %x", (int)convertStatus);
return;
}
if (error != nil) {
NSLog(@"convert failed: %@", error);
return;
}
NSLog(@"convert done, buf=%p, packets=%d, bytes=%d", (void*)aac.audioBufferList, (int)aac.packetCount, (int)aac.byteLength);
CMSampleBufferRef cmBuf = nil;
/* create buffer */
{
OSStatus status = CMAudioSampleBufferCreateWithPacketDescriptions(nil, nil, false, nil, nil, aacFormat.formatDescription, (CMItemCount)aac.packetCount, kCMTimeZero, aac.packetDescriptions, &cmBuf);
if (status != noErr) {
NSLog(@"create audio sample buffer failed: %d", (int)status);
return;
}
status = CMSampleBufferSetDataBufferFromAudioBufferList(cmBuf, nil, nil, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, aac.audioBufferList);
if (status != noErr) {
NSLog(@"fill audio sample from buffer list failed: %d", (int)status);
return;
}
status = CMSampleBufferSetDataReady(cmBuf);
if (status != noErr) {
NSLog(@"set sample buffer ready failed: %d", (int)status);
return;
}
}
NSLog(@"append audio sample buffer");
CFDictionaryRef trim = CMTimeCopyAsDictionary(CMTimeMakeWithSeconds(1, 1000000), nil);
CMSetAttachment(cmBuf, kCMSampleBufferAttachmentKey_TrimDurationAtStart, trim, kCVAttachmentMode_ShouldNotPropagate);
CFRelease(trim);
[audioInput appendSampleBuffer:cmBuf];
CFRelease(cmBuf);
}
[audioInput markAsFinished];
NSLog(@"audio finish, status = %d", (int)writer.status);
/* finish */
{
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
NSLog(@"finish writing");
[writer finishWritingWithCompletionHandler:^{
dispatch_semaphore_signal(semaphore);
}];
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
if (writer.status == AVAssetWriterStatusCompleted) {
NSLog(@"writer done");
} else if (writer.status == AVAssetWriterStatusFailed) {
NSLog(@"writer error: %@", writer.error);
} else {
NSLog(@"writer exception: %d", (int) writer.status);
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment