Skip to content

Instantly share code, notes, and snippets.

@BtbN

BtbN/decoder.cpp Secret

Last active August 29, 2015 14:02
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save BtbN/040907be634ac3595433 to your computer and use it in GitHub Desktop.
Save BtbN/040907be634ac3595433 to your computer and use it in GitHub Desktop.
vdpau decoder
#include <QtDebug>
extern "C"
{
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <libavcodec/vdpau.h>
}
#include <vdpau/vdpau.h>
#include <vdpau/vdpau_x11.h>
#include <QMutexLocker>
#include <QVector>
#include <QMutex>
#include <QHash>
#include <cinttypes>
#include <mutex>
#include "avdecoder_va.h"
#include "avdecoder.h"
#include "vdpau_funcs.h"
struct Packet
{
explicit Packet(AVFormatContext* ctxt = nullptr)
{
av_init_packet(&packet);
packet.data = nullptr;
packet.size = 0;
if(ctxt)
reset(ctxt);
}
Packet(Packet&& other)
:packet(std::move(other.packet))
{
other.packet.data = nullptr;
other.packet.size = 0;
}
~Packet()
{
if (packet.data)
av_free_packet(&packet);
}
bool reset(AVFormatContext* ctxt)
{
if(packet.data)
av_free_packet(&packet);
if(av_read_frame(ctxt, &packet) < 0)
{
packet.data = nullptr;
packet.size = 0;
return false;
}
return true;
}
void reset()
{
if(packet.data)
av_free_packet(&packet);
packet.data = nullptr;
packet.size = 0;
}
AVPacket packet;
};
struct AVDecoderVA_private
{
bool isOpen;
QString openPath;
QSharedPointer<AVFormatContext> lavfCtx;
QSharedPointer<AVCodecContext> lavcCtx;
QSharedPointer<quint8> extradata;
AVStream *vstream;
QSharedPointer<AVFrame> frame;
Packet curPacket;
qint64 offset;
quint64 frameNum;
QSharedPointer<VDPAUFuncs> vdp;
int width, height;
QSharedPointer<AVVDPAUContext> hwctx;
QMutex vidSurfLock;
QList<QSharedPointer<AVVdpFrame> > videoSurfaces;
VdpDecoder decoder;
};
AVDecoderVA::AVDecoderVA(QObject *parent)
:QObject(parent)
{
p = new AVDecoderVA_private;
p->isOpen = false;
p->width = -1;
p->height = -1;
p->decoder = VDP_INVALID_HANDLE;
ff_init_av();
p->vdp = VDPAUFuncs::get();
p->frame = QSharedPointer<AVFrame>(av_frame_alloc(), [](AVFrame *f)
{
av_frame_free(&f);
});
}
static void freePrivData(AVDecoderVA_private *p)
{
if(p->decoder != VDP_INVALID_HANDLE)
{
p->vdp->vdpDecoderDestroy(p->decoder);
p->decoder = VDP_INVALID_HANDLE;
}
p->videoSurfaces.clear();
}
AVDecoderVA::~AVDecoderVA()
{
p->lavcCtx.clear();
p->lavfCtx.clear();
freePrivData(p);
delete p;
}
void ReadVdpFormatOf(AVCodecID codec, VdpDecoderProfile &vdp_decoder_profile, VdpChromaType &vdp_chroma_type)
{
switch (codec)
{
case AV_CODEC_ID_MPEG1VIDEO:
vdp_decoder_profile = VDP_DECODER_PROFILE_MPEG1;
vdp_chroma_type = VDP_CHROMA_TYPE_420;
break;
case AV_CODEC_ID_MPEG2VIDEO:
vdp_decoder_profile = VDP_DECODER_PROFILE_MPEG2_MAIN;
vdp_chroma_type = VDP_CHROMA_TYPE_420;
break;
case AV_CODEC_ID_H264:
vdp_decoder_profile = VDP_DECODER_PROFILE_H264_HIGH;
vdp_chroma_type = VDP_CHROMA_TYPE_420;
break;
case AV_CODEC_ID_WMV3:
vdp_decoder_profile = VDP_DECODER_PROFILE_VC1_MAIN;
vdp_chroma_type = VDP_CHROMA_TYPE_420;
break;
case AV_CODEC_ID_VC1:
vdp_decoder_profile = VDP_DECODER_PROFILE_VC1_ADVANCED;
vdp_chroma_type = VDP_CHROMA_TYPE_420;
break;
case AV_CODEC_ID_MPEG4:
vdp_decoder_profile = VDP_DECODER_PROFILE_MPEG4_PART2_ASP;
vdp_chroma_type = VDP_CHROMA_TYPE_420;
break;
default:
vdp_decoder_profile = 0;
vdp_chroma_type = 0;
break;
}
}
static void ReleaseBuffer(void *opaque, quint8 *data)
{
AVDecoderVA_private *p = (AVDecoderVA_private*)opaque;
QMutexLocker lock(&p->vidSurfLock);
VdpVideoSurface surf = (VdpVideoSurface)(quintptr)data;
for(QSharedPointer<AVVdpFrame> f: p->videoSurfaces)
{
if(f->getSurf() == surf)
{
f->setFFState(0);
return;
}
}
qDebug() << "Tried to release unknown surface!";
}
static int GetBuffer(AVCodecContext *avctx, AVFrame *pic, int flags)
{
Q_UNUSED(flags);
AVDecoderVA_private *p = (AVDecoderVA_private*)avctx->opaque;
QSharedPointer<VDPAUFuncs> vdp = p->vdp;
QMutexLocker lock(&p->vidSurfLock);
QSharedPointer<AVVdpFrame> videoFrame;
for(QSharedPointer<AVVdpFrame> f: p->videoSurfaces)
{
if(f->getState() == 0 && f->getFFState() == 0)
{
videoFrame = f;
break;
}
}
if(videoFrame.isNull())
{
VdpDecoderProfile profile;
VdpChromaType chroma;
ReadVdpFormatOf(avctx->codec_id, profile, chroma);
VdpVideoSurface surf;
VdpStatus err = p->vdp->vdpVideoSurfaceCreate(p->vdp->getDevice(),
chroma,
avctx->coded_width,
avctx->coded_height,
&surf);
if(err != VDP_STATUS_OK)
{
qDebug() << "vdpVideoSurfaceCreate failed:" << p->vdp->vdpGetErrorString(err);
return -1;
}
videoFrame = QSharedPointer<AVVdpFrame>(new AVVdpFrame(surf), [vdp](AVVdpFrame *f)
{
vdp->vdpVideoSurfaceDestroy(f->getSurf());
delete f;
});
p->videoSurfaces << videoFrame;
}
videoFrame->setFFState(1);
pic->data[1] = pic->data[2] = NULL;
pic->data[0] = (quint8*)(quintptr)videoFrame->getSurf();
pic->data[3] = (quint8*)(quintptr)videoFrame->getSurf();
pic->linesize[0] = pic->linesize[1] = pic->linesize[2] = 0;
AVBufferRef *buf = av_buffer_create(pic->data[3], 0, &ReleaseBuffer, (void*)p, 0);
if(!buf)
{
qDebug() << "Error creating AVBuffer";
return -1;
}
pic->buf[0] = buf;
pic->reordered_opaque = avctx->reordered_opaque;
return 0;
}
static AVPixelFormat GetFormat(AVCodecContext *avctx, const AVPixelFormat *fmt)
{
AVDecoderVA_private *p = (AVDecoderVA_private*)avctx->opaque;
const AVPixelFormat *cur = fmt - 1;
while(*(++cur) != AV_PIX_FMT_NONE)
{
if(*cur != AV_PIX_FMT_VDPAU)
continue;
if(avctx->coded_width == 0 || avctx->coded_height == 0)
continue;
VdpDecoderProfile profile;
VdpChromaType chroma;
ReadVdpFormatOf(avctx->codec_id, profile, chroma);
if(!profile)
continue;
VdpBool is_supported = 0;
quint32 max_level, max_macroblocks, max_width, max_height;
VdpStatus err = p->vdp->vdpDecoderQueryCapabilities(p->vdp->getDevice(), profile, &is_supported, &max_level, &max_macroblocks, &max_width, &max_height);
if(err != VDP_STATUS_OK)
{
qDebug() << "vdpDecoderQueryCapabilities failed:" << p->vdp->vdpGetErrorString(err);
continue;
}
if(max_width < (quint32)avctx->coded_width || max_height < (quint32)avctx->coded_height)
{
qDebug() << "Frame size too big for VDPAU";
continue;
}
if(p->decoder != VDP_INVALID_HANDLE)
{
p->vdp->vdpDecoderDestroy(p->decoder);
p->decoder = VDP_INVALID_HANDLE;
}
err = p->vdp->vdpDecoderCreate(p->vdp->getDevice(), profile, avctx->coded_width, avctx->coded_height, 16, &p->decoder);
if(err != VDP_STATUS_OK)
{
qDebug() << "Creating test decoder failed:" << p->vdp->vdpGetErrorString(err);
continue;
}
p->hwctx = QSharedPointer<AVVDPAUContext>(av_vdpau_alloc_context(), [](AVVDPAUContext *c)
{
av_free(c);
});
p->hwctx->render = p->vdp->vdpDecoderRender;
p->hwctx->decoder = p->decoder;
avctx->get_buffer2 = &GetBuffer;
avctx->hwaccel_context = (void*)p->hwctx.data();
return AV_PIX_FMT_VDPAU;
}
return avcodec_default_get_format(avctx, fmt);
}
bool AVDecoderVA::open(const QString &path)
{
QMutexLocker locker(&openLock);
AVFormatContext *lavfCtx = nullptr;
p->isOpen = false;
p->lavcCtx.reset();
p->lavfCtx.reset();
freePrivData(p);
if(p->vdp.isNull())
{
qDebug() << "VDPAU funcs not loaded!";
return false;
}
if(path.isEmpty())
{
qDebug() << "Tried to open with empty path";
return false;
}
if(avformat_open_input(&lavfCtx, path.toUtf8().constData(), nullptr, nullptr) != 0)
{
qDebug() << "avformat_open_input failed";
return false;
}
p->lavfCtx = QSharedPointer<AVFormatContext>(lavfCtx, &avformat_free_context);
if(avformat_find_stream_info(lavfCtx, nullptr) < 0)
{
qDebug() << "avformat_find_stream_info failed";
return false;
}
p->vstream = nullptr;
for(unsigned int i = 0; i < lavfCtx->nb_streams; ++i)
{
if(lavfCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
p->vstream = lavfCtx->streams[i];
break;
}
}
if(!p->vstream)
{
p->lavfCtx.reset();
qDebug() << "No video stream found";
return false;
}
p->width = p->vstream->codec->width;
p->height = p->vstream->codec->height;
AVCodec *codec = avcodec_find_decoder(p->vstream->codec->codec_id);
if(!codec)
{
p->lavfCtx.reset();
qDebug() << "No codec found";
return false;
}
p->lavcCtx = QSharedPointer<AVCodecContext>(avcodec_alloc_context3(codec), [](AVCodecContext* c)
{
avcodec_close(c);
av_free(c);
});
p->extradata = QSharedPointer<quint8>((quint8*)av_mallocz(p->vstream->codec->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE), &av_free);
memcpy(p->extradata.data(), p->vstream->codec->extradata, p->vstream->codec->extradata_size);
p->lavcCtx->extradata = p->extradata.data();
p->lavcCtx->extradata_size = p->vstream->codec->extradata_size;
p->lavcCtx->workaround_bugs = FF_BUG_AUTODETECT;
p->lavcCtx->opaque = (void*)p;
p->lavcCtx->get_format = &GetFormat;
p->lavcCtx->thread_count = 1;
p->lavcCtx->thread_type = FF_THREAD_SLICE;
if(avcodec_open2(p->lavcCtx.data(), codec, nullptr) < 0)
{
p->lavcCtx.reset();
p->lavfCtx.reset();
qDebug() << "avcodec_open2 failed";
return false;
}
qDebug() << "Successfully opened" << path;
p->offset = 0;
p->curPacket.reset();
p->isOpen = true;
p->openPath = path;
p->frameNum = 0;
return true;
}
void AVDecoderVA::rewind()
{
if(!p->isOpen)
return;
avcodec_flush_buffers(p->lavcCtx.data());
int res = av_seek_frame(p->lavfCtx.data(), p->vstream->index, 0, AVSEEK_FLAG_BACKWARD);
if(res < 0)
res = av_seek_frame(p->lavfCtx.data(), p->vstream->index, 0, AVSEEK_FLAG_ANY);
if(res < 0)
open(p->openPath);
else
{
avcodec_flush_buffers(p->lavcCtx.data());
p->curPacket.reset();
p->offset = 0;
}
emit rewindDone();
}
bool AVDecoderVA::decodeFrame()
{
if(!p->isOpen)
{
qDebug() << "Tried to decodeFrame without open decoder";
return false;
}
bool ok = true;
if(p->offset >= p->curPacket.packet.size)
{
p->offset = 0;
ok = p->curPacket.reset(p->lavfCtx.data());
while(ok && p->curPacket.packet.stream_index != p->vstream->index)
{
ok = p->curPacket.reset(p->lavfCtx.data());
}
}
AVPacket packetToSend;
av_init_packet(&packetToSend);
if(ok)
{
packetToSend.data = p->curPacket.packet.data + p->offset;
packetToSend.size = p->curPacket.packet.size - p->offset;
}
else
{
packetToSend.data = nullptr;
packetToSend.size = 0;
}
int gotFrame = 0;
int procLen = avcodec_decode_video2(p->lavcCtx.data(), p->frame.data(), &gotFrame, &packetToSend);
if(procLen < 0)
{
qDebug() << "Error decoding frame";
emit decodeError("avcodec_decode_video2 failed");
return false;
}
p->offset += procLen;
if(!ok && !gotFrame)
{
emit decodeDone();
}
else if(gotFrame)
{
VdpVideoSurface surf = (VdpVideoSurface)(quintptr)p->frame->data[3];
QSharedPointer<AVVdpFrame> res;
for(QSharedPointer<AVVdpFrame> f: p->videoSurfaces)
{
if(f->getSurf() == surf)
{
res = f;
break;
}
}
if(res.isNull())
{
qDebug() << "Decoded surface in unknown!";
return false;
}
res->setState(1);
res->setFFState(0);
emit sendingDecodedFrame();
emit decodedFrame(res, p->lavcCtx->coded_width, p->lavcCtx->coded_height);
p->frameNum += 1;
}
else
{
return decodeFrame();
}
return true;
}
#pragma once
#include <QObject>
#include <QAtomicInt>
#include <QSharedPointer>
class AVVdpFrame
{
AVVdpFrame() {}
public:
AVVdpFrame(const AVVdpFrame &o) = delete;
AVVdpFrame& operator=(const AVVdpFrame &o) = delete;
AVVdpFrame(quint32 videoSurf)
:videoSurf(videoSurf), ffstate(0), state(0)
{}
quint32 getSurf()
{
return videoSurf;
}
int getState()
{
return state;
}
void setState(int state)
{
this->state = state;
}
int getFFState()
{
return ffstate;
}
void setFFState(int state)
{
this->ffstate = state;
}
private:
quint32 videoSurf;
QAtomicInt ffstate;
QAtomicInt state;
};
struct AVDecoderVA_private;
class AVDecoderVA : public QObject
{
Q_OBJECT
public:
AVDecoderVA(QObject *parent = 0);
~AVDecoderVA();
public slots:
bool open(const QString &path);
void rewind();
bool decodeFrame();
signals:
void rewindDone();
void decodeError(const QString &err);
void decodeDone();
void decodedFrame(const QSharedPointer<AVVdpFrame> &surface, int width, int height);
void sendingDecodedFrame();
private:
AVDecoderVA_private *p;
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment