Skip to content

Instantly share code, notes, and snippets.

@fdstevex
Last active May 23, 2016 02:18
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save fdstevex/9686808447e24a7c523cbbeb3655d9d4 to your computer and use it in GitHub Desktop.
Save fdstevex/9686808447e24a7c523cbbeb3655d9d4 to your computer and use it in GitHub Desktop.
SPVideoRecorder from http://stackoverflow.com/questions/6795157/what-is-a-10-6-compatible-means-of-recording-video-frames-to-a-movie-without-usi updated with changes required for ffmpeg libavformat circa ffmpeg 3.0.1
#import <Foundation/Foundation.h>
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include <libavutil/imgutils.h>
uint64_t getNanoseconds(void);
@interface SPVideoRecorder : NSObject
{
NSString *movieFileName;
CGFloat framesPerSecond;
AVCodecContext *codecContext;
AVStream *videoStream;
AVOutputFormat *outputFormat;
AVFormatContext *outputFormatContext;
AVFrame *videoFrame;
AVFrame inputRGBAFrame;
uint8_t *pictureBuffer;
int frameColorCounter;
unsigned char *videoFrameToEncode;
dispatch_queue_t videoRecordingQueue;
dispatch_semaphore_t frameEncodingSemaphore;
uint64_t movieStartTime;
}
@property(readwrite, assign) CGFloat framesPerSecond;
@property(readwrite, assign) unsigned char *videoFrameToEncode;
@property(readwrite, copy) NSString *movieFileName;
// Movie recording control
- (void)startRecordingMovie;
- (void)encodeNewFrameToMovie;
- (void)stopRecordingMovie;
@end
#import "SPVideoRecorder.h"
#include <sys/time.h>
@implementation SPVideoRecorder
uint64_t getNanoseconds(void)
{
struct timeval now;
gettimeofday(&now, NULL);
return now.tv_sec * NSEC_PER_SEC + now.tv_usec * NSEC_PER_USEC;
}
#pragma mark -
#pragma mark Initialization and teardown
- (id)init
{
if (!(self = [super init]))
{
return nil;
}
/* register all the codecs */
avcodec_register_all();
av_register_all();
av_log_set_level( AV_LOG_ERROR );
videoRecordingQueue = dispatch_queue_create("com.sonoplot.videoRecordingQueue", NULL);;
frameEncodingSemaphore = dispatch_semaphore_create(1);
return self;
}
#pragma mark -
#pragma mark Movie recording control
- (void)startRecordingMovie
{
dispatch_async(videoRecordingQueue, ^{
NSLog(@"Start recording to file: %@", movieFileName);
const char *filename = [movieFileName UTF8String];
// Use an MP4 container, in the standard QuickTime format so it's readable on the Mac
outputFormat = av_guess_format("mov", NULL, NULL);
if (!outputFormat) {
NSLog(@"Could not set output format");
}
outputFormatContext = avformat_alloc_context();
if (!outputFormatContext)
{
NSLog(@"avformat_alloc_context Error!");
}
outputFormatContext->oformat = outputFormat;
snprintf(outputFormatContext->filename, sizeof(outputFormatContext->filename), "%s", filename);
// Add a video stream to the MP4 file
videoStream = avformat_new_stream(outputFormatContext,NULL);
if (!videoStream)
{
NSLog(@"av_new_stream Error!");
}
// Use the MPEG4 encoder (other DiVX-style encoders aren't compatible with this container, and x264 is GPL-licensed)
AVCodec *codec = avcodec_find_encoder(AV_CODEC_ID_MPEG4);
if (!codec) {
fprintf(stderr, "codec not found\n");
exit(1);
}
codecContext = videoStream->codec;
codecContext->codec_id = codec->id;
codecContext->codec_type = AVMEDIA_TYPE_VIDEO;
codecContext->bit_rate = 4800000;
codecContext->width = 640;
codecContext->height = 480;
codecContext->pix_fmt = AV_PIX_FMT_YUV420P;
// codecContext->time_base = (AVRational){1,(int)round(framesPerSecond)};
// videoStream->time_base = (AVRational){1,(int)round(framesPerSecond)};
codecContext->time_base = (AVRational){1,200}; // Set it to 200 FPS so that we give a little wiggle room when recording at 50 FPS
videoStream->time_base = (AVRational){1,200};
// codecContext->max_b_frames = 3;
// codecContext->b_frame_strategy = 1;
codecContext->qmin = 1;
codecContext->qmax = 10;
// codecContext->mb_decision = 2; // -mbd 2
// codecContext->me_cmp = 2; // -cmp 2
// codecContext->me_sub_cmp = 2; // -subcmp 2
codecContext->keyint_min = (int)round(framesPerSecond);
// codecContext->flags |= CODEC_FLAG_4MV; // 4mv
// codecContext->flags |= CODEC_FLAG_LOOP_FILTER;
codecContext->i_quant_factor = 0.71;
codecContext->qcompress = 0.6;
// codecContext->max_qdiff = 4;
// deprecated flag -- trouble?
// codecContext->flags2 |= CODEC_FLAG2_FASTPSKIP;
if(outputFormat->flags & AVFMT_GLOBALHEADER)
{
codecContext->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
// Open the codec
if (avcodec_open2(codecContext, codec, NULL) < 0)
{
NSLog(@"Couldn't initialize the codec");
return;
}
// Open the file for recording
if (avio_open(&outputFormatContext->pb, outputFormatContext->filename, AVIO_FLAG_WRITE) < 0)
{
NSLog(@"Couldn't open file");
return;
}
// Start by writing the video header
if (avformat_write_header(outputFormatContext, NULL) < 0)
{
NSLog(@"Couldn't write video header");
return;
}
// Set up the video frame and output buffers
int size = codecContext->width * codecContext->height;
int pictureBytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, codecContext->width, codecContext->height, 32);
pictureBuffer = (uint8_t *)av_malloc(pictureBytes);
videoFrame = av_frame_alloc();
videoFrame->data[0] = pictureBuffer;
videoFrame->data[1] = videoFrame->data[0] + size;
videoFrame->data[2] = videoFrame->data[1] + size / 4;
videoFrame->linesize[0] = codecContext->width;
videoFrame->linesize[1] = codecContext->width / 2;
videoFrame->linesize[2] = codecContext->width / 2;
av_image_alloc(inputRGBAFrame.data, inputRGBAFrame.linesize, codecContext->width, codecContext->height, AV_PIX_FMT_BGRA, 32);
frameColorCounter = 0;
movieStartTime = getNanoseconds();
});
}
- (void)encodeNewFrameToMovie
{
// NSLog(@"Encode frame");
if (dispatch_semaphore_wait(frameEncodingSemaphore, DISPATCH_TIME_NOW) != 0)
{
return;
}
dispatch_async(videoRecordingQueue, ^{
// CFTimeInterval previousTimestamp = CFAbsoluteTimeGetCurrent();
frameColorCounter++;
if (codecContext == NULL)
{
return;
}
// Take the input BGRA texture data and convert it to a YUV 4:2:0 planar frame
av_image_fill_arrays(inputRGBAFrame.data, inputRGBAFrame.linesize, videoFrameToEncode, AV_PIX_FMT_BGRA, codecContext->width, codecContext->height, 32);
struct SwsContext * img_convert_ctx = sws_getContext(codecContext->width, codecContext->height, AV_PIX_FMT_BGRA, codecContext->width, codecContext->height, AV_PIX_FMT_YUV420P, SWS_FAST_BILINEAR, NULL, NULL, NULL);
sws_scale(img_convert_ctx, (const uint8_t* const *)inputRGBAFrame.data, inputRGBAFrame.linesize, 0, codecContext->height, videoFrame->data, videoFrame->linesize);
// Encode the frame
AVPacket pkt = { 0 };
av_init_packet(&pkt);
int got_packet;
if (avcodec_encode_video2(codecContext, &pkt, videoFrame, &got_packet) < 0) {
// error
return;
}
// Generate a packet and insert in the video stream
if (pkt.size != 0)
{
AVPacket videoPacket;
av_init_packet(&videoPacket);
if (pkt.pts != AV_NOPTS_VALUE)
{
uint64_t currentFrameTime = getNanoseconds();
videoPacket.pts = av_rescale_q(((uint64_t)currentFrameTime - (uint64_t)movieStartTime) / 1000ull/*codecContext->coded_frame->pts*/, AV_TIME_BASE_Q/*codecContext->time_base*/, videoStream->time_base);
// NSLog(@"Frame time %lld, converted time: %lld", ((uint64_t)currentFrameTime - (uint64_t)movieStartTime) / 1000ull, videoPacket.pts);
}
if(pkt.flags & AV_PKT_FLAG_KEY)
{
videoPacket.flags |= AV_PKT_FLAG_KEY;
}
int ret = av_write_frame(outputFormatContext, &pkt);
if (ret < 0)
{
av_log(outputFormatContext, AV_LOG_ERROR, "%s","Error while writing frame.\n");
av_packet_unref(&videoPacket);
return;
}
av_packet_unref(&videoPacket);
}
av_packet_unref(&pkt);
// CFTimeInterval frameDuration = CFAbsoluteTimeGetCurrent() - previousTimestamp;
// NSLog(@"Frame duration: %f ms", frameDuration * 1000.0);
dispatch_semaphore_signal(frameEncodingSemaphore);
});
}
- (void)stopRecordingMovie
{
dispatch_async(videoRecordingQueue, ^{
// Write out the video trailer
if (av_write_trailer(outputFormatContext) < 0)
{
av_log(outputFormatContext, AV_LOG_ERROR, "%s","Error while writing trailer.\n");
exit(1);
}
// Close out the file
if (!(outputFormat->flags & AVFMT_NOFILE))
{
avio_close(outputFormatContext->pb);
}
// Free up all movie-related resources
avcodec_close(codecContext);
av_free(codecContext);
codecContext = NULL;
free(pictureBuffer);
av_free(videoFrame);
av_free(outputFormatContext);
av_free(videoStream);
});
}
#pragma mark -
#pragma mark Accessors
@synthesize framesPerSecond, videoFrameToEncode, movieFileName;
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment