Skip to content

Instantly share code, notes, and snippets.

@prabindh
Created July 23, 2020 13:10
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save prabindh/9dc5d6f523b0792442fc030e8f657619 to your computer and use it in GitHub Desktop.
Save prabindh/9dc5d6f523b0792442fc030e8f657619 to your computer and use it in GitHub Desktop.
encode using vaapi
// from https://stackoverflow.com/questions/59666753/encode-video-from-c-using-libavcodec-and-vaapi
#ifndef ENCODER_H
#define ENCODER_H
#include <cassert>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/opt.h>
#include <libavutil/hwcontext.h>
#include <libavfilter/avfilter.h>
#include <libavfilter/buffersink.h>
#include <libavfilter/buffersrc.h>
}
class Encoder
{
public:
Encoder(const bool hwAccel);
void addFrame(AVFrame* frame);
void flush();
static constexpr int s_width = 640;
static constexpr int s_height = 480;
static constexpr int s_fps = 25;
private:
void setup();
void setupEncoder();
void initFilters();
void initInputFilters(AVFilterInOut* inputs);
void initOutputFilters(AVFilterInOut* outputs);
void filterFrame(AVFrame* inFrame, AVFrame* outFrame);
void encodeFrame(AVFrame* frame);
// members
int m_frameId = 1;
const bool m_hardwareAcceleration = false;
AVCodecContext* m_encoder = nullptr;
AVFormatContext* m_muxer = nullptr;
AVStream* m_avStream = nullptr;
AVBufferRef* m_device = nullptr;
AVFrame* m_hwFrame = nullptr;
AVFilterGraph* m_filterGraph = nullptr;
AVFilterContext* m_bufferSrc = nullptr;
AVFilterContext* m_bufferSink = nullptr;
AVFilterContext* m_formatFilter = nullptr;
};
#endif // ENCODER_H
#include "encoder.h"
extern "C" {
static enum AVPixelFormat get_vaapi_format(AVCodecContext*, const enum AVPixelFormat *pix_fmts)
{
const enum AVPixelFormat *p;
for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) {
if (*p == AV_PIX_FMT_VAAPI)
return *p;
}
fprintf(stderr, "Failed to get HW surface format.\n");
return AV_PIX_FMT_NONE;
}
}
Encoder::Encoder(const bool hwAccel)
: m_hardwareAcceleration(hwAccel)
{
setup();
}
void Encoder::addFrame(AVFrame* frame)
{
AVFrame* frameToEncode = frame;
if(m_hardwareAcceleration) {
filterFrame(frame, m_hwFrame);
assert(m_hwFrame->format == AV_PIX_FMT_VAAPI);
frameToEncode = m_hwFrame;
}
frameToEncode->pts = m_frameId++;
encodeFrame(frameToEncode);
}
void Encoder::flush()
{
encodeFrame(nullptr);
av_write_trailer(m_muxer);
}
void Encoder::setup()
{
assert(avformat_alloc_output_context2(&m_muxer, nullptr, "matroska", nullptr) == 0);
assert(m_muxer != nullptr);
setupEncoder();
m_avStream = avformat_new_stream(m_muxer, nullptr);
assert(m_avStream != nullptr);
m_avStream->id = m_muxer->nb_streams-1;
m_avStream->time_base = m_encoder->time_base;
// Some formats want stream headers to be separate.
if(m_muxer->oformat->flags & AVFMT_GLOBALHEADER)
m_encoder->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
assert(avcodec_parameters_from_context(m_avStream->codecpar, m_encoder) == 0);
assert(avio_open(&m_muxer->pb, m_hardwareAcceleration? "hardware.mkv": "software.mkv", AVIO_FLAG_WRITE) == 0);
assert(avformat_write_header(m_muxer, nullptr) == 0);
}
void Encoder::setupEncoder()
{
const char* encoderName = m_hardwareAcceleration? "h264_vaapi": "libx264";
AVCodec* videoCodec = avcodec_find_encoder_by_name(encoderName);
m_encoder = avcodec_alloc_context3(videoCodec);
m_encoder->bit_rate = s_width * s_height * s_fps * 2;
m_encoder->width = s_width;
m_encoder->height = s_height;
m_encoder->time_base = (AVRational){1, s_fps};
m_encoder->framerate = (AVRational){s_fps, 1};
m_encoder->gop_size = s_fps; // have at least 1 I-frame per second
m_encoder->max_b_frames = 1;
m_encoder->pix_fmt = AV_PIX_FMT_YUV420P;
if(m_hardwareAcceleration) {
m_encoder->pix_fmt = AV_PIX_FMT_VAAPI;
m_encoder->get_format = get_vaapi_format;
assert(av_hwdevice_ctx_create(&m_device, AV_HWDEVICE_TYPE_VAAPI, "/dev/dri/renderD128", nullptr, 0) == 0);
const AVHWDeviceContext* deviceCtx = (AVHWDeviceContext*) m_device->data;
assert(deviceCtx->type == AV_HWDEVICE_TYPE_VAAPI);
initFilters();
m_encoder->hw_device_ctx = nullptr;
m_encoder->hw_frames_ctx = av_buffer_ref(av_buffersink_get_hw_frames_ctx(m_bufferSink));
}
assert(avcodec_open2(m_encoder, videoCodec, nullptr) == 0);
if(m_hardwareAcceleration) {
m_encoder->hw_device_ctx = av_hwframe_ctx_alloc(m_device);
m_hwFrame = av_frame_alloc();
av_hwframe_get_buffer(m_encoder->hw_device_ctx, m_hwFrame, 0);
}
m_muxer->video_codec_id = videoCodec->id;
m_muxer->video_codec = videoCodec;
}
void Encoder::initFilters()
{
AVFilterInOut* inputs = nullptr;
AVFilterInOut* outputs = nullptr;
m_filterGraph = avfilter_graph_alloc();
assert(avfilter_graph_parse2(m_filterGraph, "format=nv12,hwupload", &inputs, &outputs) == 0);
for(unsigned i=0; i<m_filterGraph->nb_filters; i++) {
m_filterGraph->filters[i]->hw_device_ctx = av_buffer_ref(m_device);
assert(m_filterGraph->filters[i]->hw_device_ctx != nullptr);
}
initInputFilters(inputs);
initOutputFilters(outputs);
assert(avfilter_graph_config(m_filterGraph, nullptr) == 0);
}
void Encoder::initInputFilters(AVFilterInOut* inputs)
{
assert(inputs != nullptr);
assert(inputs->next == nullptr);
char args[512];
snprintf(args, sizeof(args),
"video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
s_width, s_height, AV_PIX_FMT_YUV420P,
1, s_fps,
1, 1);
assert(avfilter_graph_create_filter(&m_bufferSrc, avfilter_get_by_name("buffer"), "in",
args, nullptr, m_filterGraph) == 0);
assert(avfilter_link(m_bufferSrc, 0, inputs->filter_ctx, inputs->pad_idx) == 0);
}
void Encoder::initOutputFilters(AVFilterInOut* outputs)
{
assert(outputs != nullptr);
assert(outputs->next == nullptr);
assert(avfilter_graph_create_filter(&m_bufferSink, avfilter_get_by_name("buffersink"), "out",
nullptr, nullptr, m_filterGraph) == 0);
assert(avfilter_graph_create_filter(&m_formatFilter, avfilter_get_by_name("format"), "format",
"vaapi_vld", nullptr, m_filterGraph) == 0);
assert(avfilter_link(outputs->filter_ctx, outputs->pad_idx, m_formatFilter, 0) == 0);
assert(avfilter_link(m_formatFilter, 0, m_bufferSink, 0) == 0);
}
void Encoder::filterFrame(AVFrame* inFrame, AVFrame* outFrame)
{
assert(av_buffersrc_add_frame_flags(m_bufferSrc, inFrame, AV_BUFFERSRC_FLAG_KEEP_REF) == 0);
assert(av_buffersink_get_frame(m_bufferSink, outFrame) == 0);
}
void Encoder::encodeFrame(AVFrame* frame)
{
assert(avcodec_send_frame(m_encoder, frame) == 0);
AVPacket packet;
av_init_packet(&packet);
int ret = 0;
while(ret >= 0) {
ret = avcodec_receive_packet(m_encoder, &packet);
if(ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
return; // nothing to write
}
assert(ret >= 0);
av_packet_rescale_ts(&packet, m_encoder->time_base, m_avStream->time_base);
packet.stream_index = m_avStream->index;
av_interleaved_write_frame(m_muxer, &packet);
av_packet_unref(&packet);
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment