Skip to content

Instantly share code, notes, and snippets.

@rauf
Created June 29, 2016 20:18
Show Gist options
  • Save rauf/dcf6aba42b4103a20c842f63c110c6d0 to your computer and use it in GitHub Desktop.
Save rauf/dcf6aba42b4103a20c842f63c110c6d0 to your computer and use it in GitHub Desktop.
#include <gpac/modules/codec.h>
#include <gpac/internal/media_dev.h>
#include <gpac/constants.h>
#include "../../src/compositor/gl_inc.h"
#ifdef GPAC_ANDROID
#include <jni.h>
#include "media/NdkMediaCodec.h"
#include "media/NdkMediaExtractor.h"
#include "media/NdkMediaFormat.h"
#include <android/log.h>
#define TAG "mc_decode"
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
//for logging to be in android logcat
#define printf(...) __android_log_print(ANDROID_LOG_DEBUG, "TAG", __VA_ARGS__);
#endif
typedef struct
{
#ifdef GPAC_ANDROID
AMediaCodec *codec;
AMediaFormat *format;
#endif
u32 width;
u32 height;
u32 stride;
Bool raw_frame_dispatch;
const char *mime;
u8 chroma_format, luma_bit_depth, chroma_bit_depth;
u32 max_input_size;
u32 pix_fmt;
GF_ESD *esd;
GF_Err last_error;
Bool codec_specific_data;
Bool inputEOS;
Bool outputEOS;
u32 out_size;
u32 pixel_ar;
//NAL-based specific
char *sps, *pps;
u32 sps_size, pps_size;
Bool is_annex_b;
char *cached_annex_b;
u32 cached_annex_b_size;
u32 nalu_size_length;
u64 presentation;
u32 no;
u16 frame_rate;
int vtb_session;
} MCDec;
#ifdef GPAC_ANDROID
int sdkInt()
{
char sdk_str[3] = "0";
__system_property_get("ro.build.version.sdk", sdk_str, "0");
return atoi(sdk_str);
}
#endif
//todo initialise other items
static GF_Err MCDec_InitDecoder(MCDec *ctx) {
printf("\nMCDec_InitDecoder called");
char *dsi_data=NULL;
u32 dsi_data_size=0;
#ifdef GPAC_ANDROID
ctx->pix_fmt = GF_PIXEL_YV12;
switch (ctx->esd->decoderConfig->objectTypeIndication) {
case GPAC_OTI_VIDEO_AVC :
if (ctx->sps && ctx->pps) {
AVCState avc;
s32 idx;
memset(&avc, 0, sizeof(AVCState));
avc.sps_active_idx = -1;
idx = gf_media_avc_read_sps(ctx->sps, ctx->sps_size, &avc, 0, NULL);
ctx->mime = "video/avc";
assert(ctx->sps);
ctx->width = avc.sps[idx].width;
ctx->height = avc.sps[idx].height;
ctx->out_size = 3 * ctx->height * ctx->width /2 ;
if (avc.sps[idx].vui.par_num && avc.sps[idx].vui.par_den) {
ctx->pixel_ar = avc.sps[idx].vui.par_num;
ctx->pixel_ar <<= 16;
ctx->pixel_ar |= avc.sps[idx].vui.par_den;
}
ctx->chroma_format = avc.sps[idx].chroma_format;
ctx->luma_bit_depth = 8 + avc.sps[idx].luma_bit_depth_m8;
ctx->chroma_bit_depth = 8 + avc.sps[idx].chroma_bit_depth_m8;
switch (ctx->chroma_format) {
case 2:
//422 decoding doesn't seem supported ...
if (ctx->luma_bit_depth>8) {
ctx->pix_fmt = GF_PIXEL_YUV422_10;
} else {
ctx->pix_fmt = GF_PIXEL_YUV422;
}
break;
case 3:
if (ctx->luma_bit_depth>8) {
ctx->pix_fmt = GF_PIXEL_YUV444_10;
} else {
ctx->pix_fmt = GF_PIXEL_YUV444;
}
break;
default:
if (ctx->luma_bit_depth>8) {
ctx->pix_fmt = GF_PIXEL_YV12_10;
}
break;
}
if (!ctx->esd->decoderConfig->decoderSpecificInfo || !ctx->esd->decoderConfig->decoderSpecificInfo->data) {
GF_AVCConfigSlot *slc_s, *slc_p;
GF_AVCConfig *cfg = gf_odf_avc_cfg_new();
cfg->configurationVersion = 1;
cfg->profile_compatibility = avc.sps[idx].prof_compat;
cfg->AVCProfileIndication = avc.sps[idx].profile_idc;
cfg->AVCLevelIndication = avc.sps[idx].level_idc;
cfg->chroma_format = avc.sps[idx].chroma_format;
cfg->luma_bit_depth = 8 + avc.sps[idx].luma_bit_depth_m8;
cfg->chroma_bit_depth = 8 + avc.sps[idx].chroma_bit_depth_m8;
cfg->nal_unit_size = 4;
GF_SAFEALLOC(slc_s, GF_AVCConfigSlot);
slc_s->data = ctx->sps;
slc_s->size = ctx->sps_size;
gf_list_add(cfg->sequenceParameterSets, slc_s);
GF_SAFEALLOC(slc_p, GF_AVCConfigSlot);
slc_p->data = ctx->pps;
slc_p->size = ctx->pps_size;
gf_list_add(cfg->pictureParameterSets , slc_p);
gf_odf_avc_cfg_write(cfg, &dsi_data, &dsi_data_size);
slc_s->data = slc_p->data = NULL;
gf_odf_avc_cfg_del((cfg));
} else {
dsi_data = ctx->esd->decoderConfig->decoderSpecificInfo->data;
dsi_data_size = ctx->esd->decoderConfig->decoderSpecificInfo->dataLength;
}
/*
dsi = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
data = CFDataCreate(kCFAllocatorDefault, dsi_data, dsi_data_size);
if (data) {
CFDictionarySetValue(dsi, CFSTR("avcC"), data);
CFDictionarySetValue(dec_dsi, kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms, dsi);
CFRelease(data);
}
CFRelease(dsi);
if (!ctx->esd->decoderConfig->decoderSpecificInfo || !ctx->esd->decoderConfig->decoderSpecificInfo->data) {
gf_free(ctx->sps);
ctx->sps = NULL;
gf_free(ctx->pps);
ctx->pps = NULL;
gf_free(dsi_data);
}*/
}
break;
default:
return GF_NOT_SUPPORTED;
}
ctx->codec = AMediaCodec_createDecoderByType(ctx->mime);
if(!ctx->codec) {
LOGE("AMediaCodec_createDecoderByType failed");
return GF_CODEC_NOT_FOUND;
}
ctx->format = AMediaFormat_new();
if(!ctx->format) {
LOGE("AMediaFormat_new() failed");
return GF_CODEC_NOT_FOUND;
}
ctx->frame_rate = 30;
AMediaFormat_setString(ctx->format, AMEDIAFORMAT_KEY_MIME, ctx->mime);
AMediaFormat_setInt32(ctx->format, AMEDIAFORMAT_KEY_WIDTH, ctx->width);
AMediaFormat_setInt32(ctx->format, AMEDIAFORMAT_KEY_HEIGHT, ctx->height);
AMediaFormat_setInt32(ctx->format, AMEDIAFORMAT_KEY_STRIDE, ctx->stride);
AMediaFormat_setInt32(ctx->format, AMEDIAFORMAT_KEY_COLOR_FORMAT, ctx->pix_fmt);
//AMediaFormat_setInt32(ctx->format, AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, ctx->max_input_size);
//AMediaFormat_setInt32(ctx->format, AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, 10670); // 115156 from java
AMediaFormat_setInt32(ctx->format, AMEDIAFORMAT_KEY_FRAME_RATE, ctx->frame_rate);
int sps_size = 4 + ctx->sps_size;
int pps_size = 4 + ctx->pps_size;
char *sps = (char *)malloc(sps_size);
char *pps = (char *)malloc(pps_size);
int i;
//assuming sps_size to be greater than pps_size
for(i = 0; i < sps_size; i++) {
if(i < 3){
sps[i] = 0x00;
pps[i] = 0x00;
}
else if (i == 3) {
sps[i] = 0x01;
pps[i] = 0x01;
}
else {
if(i < pps_size) {
pps[i] = ctx->pps[i-4];
}
sps[i] = ctx->sps[i-4];
}
if(i < pps_size){
LOGV("==== i: %d, sps: %02x pps: %02x", i , sps[i], pps[i]);
}
else LOGV("=== i: %d, sps: %02x ", i, sps[i]);
}
//LOGV("AMediaFormat_setBuffer=======================================");
AMediaFormat_setBuffer(ctx->format, "csd-0", sps, sps_size);
AMediaFormat_setBuffer(ctx->format, "csd-1", pps, pps_size);
media_status_t status = AMediaCodec_configure(
ctx->codec, // codec
ctx->format, // format
NULL, //surface
NULL, // crypto
0 // flags
);
if(status != AMEDIA_OK){
LOGE("AMediaCodec_configure failed");
return getGF_Err(status);
}
status = AMediaCodec_start(ctx->codec);
if(status != AMEDIA_OK){
LOGE("AMediaCodec_start failed");
return getGF_Err(status);
}
#endif
ctx->inputEOS = GF_FALSE;
ctx->outputEOS = GF_FALSE;
ctx->presentation = 0;
ctx->no = 0;
LOGI("Video size: %d x %d", ctx->width, ctx->height);
return GF_OK;
}
static GF_Err MCDec_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd)
{
printf("\nMCDec_AttachStream called");
MCDec *ctx = (MCDec *)ifcg->privateStack;
ctx->esd = esd;
GF_Err e;
//check AVC config
if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_AVC) {
if (!esd->decoderConfig->decoderSpecificInfo || !esd->decoderConfig->decoderSpecificInfo->data) {
ctx->is_annex_b = GF_TRUE;
ctx->width=ctx->height=128;
ctx->out_size = ctx->width*ctx->height*3/2;
LOGV("=============AttachStream ctx->height: %d", ctx->height);
LOGV("=============AttachStream: ctx->out_size : %d", ctx->out_size);
ctx->pix_fmt = GF_PIXEL_YV12;
return GF_OK;
} else {
GF_AVCConfigSlot *slc;
GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
slc = gf_list_get(cfg->sequenceParameterSets, 0);
if (slc) {
ctx->sps = slc->data;
ctx->sps_size = slc->size;
}
slc = gf_list_get(cfg->pictureParameterSets, 0);
if (slc) {
ctx->pps = slc->data;
ctx->pps_size = slc->size;
}
if (ctx->sps && ctx->pps) {
ctx->codec_specific_data = GF_TRUE;
e = MCDec_InitDecoder(ctx);
} else {
ctx->nalu_size_length = cfg->nal_unit_size;
e = GF_OK;
}
gf_odf_avc_cfg_del(cfg);
return e;
}
}
//!!!add other video type files
/*
//check VOSH config
if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_MPEG4_PART2) {
if (!esd->decoderConfig->decoderSpecificInfo || !esd->decoderConfig->decoderSpecificInfo->data) {
ctx->width=ctx->height=128;
ctx->out_size = ctx->width*ctx->height*3/2;
ctx->pix_fmt = GF_PIXEL_YV12;
} else {
return MCDec_InitDecoder(ctx, "vid");
}
}
*/
//return VTBDec_InitDecoder(ctx, GF_);
return GF_NOT_SUPPORTED;
}
static GF_Err MCDec_DetachStream(GF_BaseDecoder *ifcg, u16 ES_ID)
{
//MCDec *ctx = (MCDec *)ifcg->privateStack;
printf("\nMCDec_DetachStream called");
return GF_OK;
}
#ifdef GPAC_ANDROID
GF_Err getGF_Err(media_status_t err) {
switch(err) {
case AMEDIA_OK: return GF_OK;
case AMEDIA_ERROR_UNSUPPORTED: return GF_NOT_SUPPORTED;
case AMEDIA_ERROR_INVALID_PARAMETER: return GF_BAD_PARAM;
case AMEDIA_ERROR_MALFORMED:
case AMEDIA_ERROR_INVALID_OBJECT:
case AMEDIA_ERROR_BASE:
default: return -1234;
}
}
#endif
static GF_Err MCDec_GetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability *capability)
{
printf("\nMCDec_GetCapabilities called");
MCDec *ctx = (MCDec *)ifcg->privateStack;
switch (capability->CapCode) {
case GF_CODEC_RESILIENT:
capability->cap.valueInt = 1;
break;
case GF_CODEC_WIDTH:
capability->cap.valueInt = ctx->width;
break;
case GF_CODEC_HEIGHT:
capability->cap.valueInt = ctx->height;
break;
case GF_CODEC_STRIDE:
capability->cap.valueInt = ctx->stride;
break;
case GF_CODEC_FPS:
capability->cap.valueFloat = 30.0;
break;
case GF_CODEC_PAR:
capability->cap.valueInt = ctx->pixel_ar; //!!
break;
case GF_CODEC_OUTPUT_SIZE:
capability->cap.valueInt = ctx->out_size;
break;
case GF_CODEC_PIXEL_FORMAT:
capability->cap.valueInt = ctx->pix_fmt;
break;
case GF_CODEC_BUFFER_MIN:
capability->cap.valueInt = 1;
break;
case GF_CODEC_BUFFER_MAX:
capability->cap.valueInt = 6; //ctx->max_input_size;
break;
/*by default we use 4 bytes padding (otherwise it happens that XviD crashes on some videos...)*/
case GF_CODEC_PADDING_BYTES:
capability->cap.valueInt = 0;
break;
/*reorder is up to us*/
case GF_CODEC_REORDER:
capability->cap.valueInt = 0;
break;
case GF_CODEC_WANTS_THREAD:
capability->cap.valueInt = 0;
break;
/* case GF_CODEC_FRAME_OUTPUT:
capability->cap.valueInt = 1;
break;
*/
/*not known at our level...*/
case GF_CODEC_CU_DURATION:
default:
capability->cap.valueInt = 0;
break;
}
return GF_OK;
}
static GF_Err MCDec_SetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability capability)
{
printf("\nMCDec_SetCapabilities called");
printf("\nMCDec_SetCapabilities ... capability code: %d, value: %d", capability.CapCode, (int) capability.cap.valueInt);
return GF_OK;
}
static GF_Err MCDec_ProcessData(GF_MediaDecoder *ifcg,
char *inBuffer, u32 inBufferLength,
u16 ES_ID, u32 *CTS,
char *outBuffer, u32 *outBufferLength,
u8 PaddingBits, u32 mmlevel)
{
printf("\nMCDec_ProcessData called");
/*
char *in_data;
u32 in_data_size;
GF_Err e;
*/
MCDec *ctx = (MCDec *)ifcg->privateStack;
*outBufferLength = 0;
/*
//if (ctx->is_annex_b || /*!ctx->vtb_session && ctx->nalu_size_length)
if(GF_TRUE)
{
LOGD("MCDec_ProcessData inside if");
if (ctx->cached_annex_b) {
in_data = ctx->cached_annex_b;
in_data_size = ctx->cached_annex_b_size;
ctx->cached_annex_b = NULL;
//self added
ctx->is_annex_b = GF_TRUE;
} else {
LOGD("MCDec_ProcessData Rewrite NAL");
e = MC_RewriteNALs(ctx, inBuffer, inBufferLength, &in_data, &in_data_size);
if (e) return e;
}
if (!ctx->raw_frame_dispatch && (ctx->out_size != *outBufferLength)) {
LOGD("MCDec_ProcessData inside if#2");
*outBufferLength = ctx->out_size;
ctx->cached_annex_b = in_data;
ctx->cached_annex_b_size = in_data_size;
return GF_BUFFER_TOO_SMALL;
}
}
/*
else if (ctx->vosh_size) {
in_data = inBuffer + ctx->vosh_size;
in_data_size = inBufferLength - ctx->vosh_size;
ctx->vosh_size = 0;
}
else {
LOGD("MCDec_ProcessData inside else");
in_data = inBuffer;
in_data_size = inBufferLength;
}
/*
if (!ctx->vtb_session) {
*outBufferLength=0;
return GF_OK;
}
*/
/*
LOGD("\nNew Buffer: ");
u32 j;
for(j = 0; j < inBufferLength; j++) {
LOGD("Buffer j: %d, inBuffer : %02x", j , inBuffer[j]);
}
*/
//#ifdef GPAC_ANDROID
u32 i = 0;
u32 pos = 0;
u32 DEQUEUE_TIMEOUT = 100000;
if(!ctx->inputEOS) {
LOGV("\nAMediaCodec_dequeueInputBuffer");
ssize_t inIndex = AMediaCodec_dequeueInputBuffer(ctx->codec, DEQUEUE_TIMEOUT);
LOGV("Input Buffer Index: %d", inIndex);
if (inIndex >= 0) {
size_t inSize;
LOGV("AMediaCodec_getInputBuffer");
//uint8_t *buffer = AMediaCodec_getInputBuffer(ctx->codec, inIndex, &inSize);
char *buffer = (char *)AMediaCodec_getInputBuffer(ctx->codec, inIndex, &inSize);
LOGV("Input Buffer size: %d", inSize);
LOGV("inBufferLength: %d", inBufferLength);
if (inBufferLength > inSize) {
LOGE("The returned buffer is too small");
return GF_BUFFER_TOO_SMALL;
}
if(inBuffer[4] == 0x67) { //checking for sps
int start = ctx->sps_size + ctx->pps_size;
u32 m;
int k = 4;
Bool copy = GF_FALSE;
for (m = start; m < inBufferLength - 4; ++m) {
if(inBuffer[m] == 0x00 && inBuffer[m+1] == 0x00 && inBuffer[m+2] == 0x01){
copy = GF_TRUE;
}
if(copy) {
buffer[k] = inBuffer[m + 4];
//LOGV("First Buffer, k: %d, buffer[k]: %02x", k, buffer[k]);
k++;
}
}
}
else {
memcpy(buffer, inBuffer, inBufferLength);
}
buffer[0] = 0x00;
buffer[1] = 0x00;
buffer[2] = 0x00;
buffer[3] = 0x01;
/*
LOGD("=====================================New Buffer: ");
u32 j;
for(j = 0; j < inBufferLength; j++) {
LOGV("Buffer j: %d, buffer: %02x and inBuffer : %02x", j , buffer[j], inBuffer[j]);
}
*/
if(!inBuffer){
LOGI("AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM input");
ctx->inputEOS = true;
}
LOGV("AMediaCodec_queueInputBuffer");
media_status_t status;
u64 presentationTimeUs = ctx->no * 1000000 / ctx->frame_rate;
ctx->no++;
//LOGV("presentation::::::: %llu", presentationTimeUs);
//LOGV("noooo:::::::::::::: %d", ctx->no);
status = AMediaCodec_queueInputBuffer(ctx->codec,
inIndex,
0,
inBufferLength, //!!!
ctx->inputEOS ? 0 : presentationTimeUs, //!!!!!!!!!!!!!!presentation time
inBuffer ? 0 : AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM
);
if (status != AMEDIA_OK) {
LOGE("AMediaCodec_queueInputBuffer failed");
return GF_BAD_PARAM;
}
} else {
LOGI("Input Buffer not available.");
}
}
if(!ctx->outputEOS) {
AMediaCodecBufferInfo info;
LOGV("AMediaCodec_dequeueOutputBuffer");
ssize_t outIndex = AMediaCodec_dequeueOutputBuffer(ctx->codec, &info, DEQUEUE_TIMEOUT);
LOGV("OutputIndex: %d", outIndex);
switch(outIndex) {
case AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED:
LOGI("AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED");
ctx->format = AMediaCodec_getOutputFormat(ctx->codec);
LOGV("New ctx->format: %p" , ctx->format);
//AMediaFormat_getString(ctx->format, AMEDIAFORMAT_KEY_MIME, ctx->mime);
//AMediaFormat_getInt32(ctx->format, AMEDIAFORMAT_KEY_WIDTH, (int32_t *)ctx->width);
//AMediaFormat_getInt32(ctx->format, AMEDIAFORMAT_KEY_HEIGHT, (int32_t *)ctx->height);
//AMediaFormat_getInt32(ctx->format, AMEDIAFORMAT_KEY_STRIDE, (int32_t *)ctx->stride);
//AMediaFormat_getInt32(ctx->format, AMEDIAFORMAT_KEY_COLOR_FORMAT, (int32_t *)ctx->pix_fmt);
// AMediaFormat_getInt32(ctx->format, AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, (int32_t *)ctx->max_input_size);
break;
case AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED:
LOGI("AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED");
break;
case AMEDIACODEC_INFO_TRY_AGAIN_LATER:
LOGI("AMEDIACODEC_INFO_TRY_AGAIN_LATER");
break;
default:
LOGV("AMediaCodecBufferInfo info.size: %d", info.size);
//LOGV("AMediaCodecBufferInfo info.offset: %d", info.offset);
//LOGV("AMediaCodecBufferInfo size - offset: %d", info.size - info.offset);
//LOGV("AMediaCodecBufferInfo presentationtime : %ld", info.presentationTimeUs);
if (outIndex >=0) {
if(info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
LOGI("AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM output");
ctx->outputEOS = true;
}
size_t outSize;
uint8_t *buffer = AMediaCodec_getOutputBuffer(ctx->codec, outIndex, &outSize);
LOGV("Output Buffer size: %d", outSize);
if(!buffer) {
LOGE("AMediaCodec_getOutputBuffer failed"); //
*outBufferLength = 0;
} else {
LOGV("outBufferLength = size");
//outBufferLength = outSize;
//*outBufferLength = info.size;
*outBufferLength = ctx->out_size;
}
/*
u32 k;
for(k = 0; k < 100; k++) {
//outBuffer[k - info.offset] = buffer[k];
LOGV("Output Buffer, i: %d, buffer: 0x%02x", k , buffer[k]);
}
*/
LOGV("*outBufferLength: %d", *outBufferLength);
LOGV("outBuffer %p" , outBuffer);
LOGV("buffer %p", buffer);
LOGV("copying output......");
//LOGV("ctx->out_size: %d", ctx->out_size);
memcpy(outBuffer, buffer, *outBufferLength);
//memcpy(outBuffer, buffer, *outBufferLength);
//outBuffer = buffer;
LOGV("color format: %d", ctx->pix_fmt);
AMediaCodec_releaseOutputBuffer(ctx->codec, outIndex, false);
LOGV("AMediaCodec_releaseOutputBuffer ");
} else{
LOGE("Output Buffer not available");
}
}
}
//#endif
LOGV("GF_OK arrived");
return GF_OK;
}
static u32 MCDec_CanHandleStream(GF_BaseDecoder *dec, u32 StreamType, GF_ESD *esd, u8 PL)
{
printf("\nMCDec_CanHandleStream called");
if (StreamType != GF_STREAM_VISUAL) return GF_CODEC_NOT_SUPPORTED;
/*media type query*/
if (!esd) return GF_CODEC_STREAM_TYPE_SUPPORTED;
printf("\nobject type indication: %d", esd->decoderConfig->objectTypeIndication);
switch (esd->decoderConfig->objectTypeIndication) {
case GPAC_OTI_VIDEO_AVC:
printf("\nMCDec_CanHandleStream avc stream");
return GF_CODEC_SUPPORTED;
case GPAC_OTI_VIDEO_HEVC:
printf("\nMCDec_CanHandleStream hevc stream");
return GF_CODEC_SUPPORTED; //to be removed
#ifdef GPAC_ANDROID
if(sdkInt() >= 21){
return GF_CODEC_SUPPORTED;
}
#endif
return GF_CODEC_NOT_SUPPORTED;
case GPAC_OTI_VIDEO_MPEG4_PART2:
printf("\nMCDec_CanHandleStream mpeg4 stream");
return GF_CODEC_SUPPORTED;
/*
//for h263
case GPAC_OTI_MEDIA_GENERIC:
if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->dataLength) {
char *dsi = esd->decoderConfig->decoderSpecificInfo->data;
if (!strnicmp(dsi, "h263", 4))
return GF_CODEC_SUPPORTED;
}
*/
}
printf("\nMCDec_CanHandleStream ....stream not supported");
return GF_CODEC_NOT_SUPPORTED;
}
static const char *MCDec_GetCodecName(GF_BaseDecoder *dec)
{
printf("\nMCDec_GetCodecName called");
MCDec *ctx = (MCDec *) dec->privateStack;
printf("\nMCDec_GetCodecName codec name: %s", ctx->mime);
if(!strcmp(ctx->mime, "video/avc"))
return "MediaCodec hardware AVC|H264";
else if(!strcmp(ctx->mime, "video/hevc"))
return "MediaCodec hardware HEVC|H265";
else if(!strcmp(ctx->mime, "video/mp4v-es"))
return "MediaCodec hardware MPEG-4 Part2";
else if(!strcmp(ctx->mime, "video/3gpp")) //for h.263
return "MediaCodec hardware H263";
else return "MediaCodec not supported";
}
GF_BaseDecoder *NewMCDec()
{
printf("\nNewMCDec called");
GF_MediaDecoder *ifcd;
MCDec *dec;
GF_SAFEALLOC(ifcd, GF_MediaDecoder);
if (!ifcd) return NULL;
GF_SAFEALLOC(dec, MCDec);
if (!dec) {
gf_free(ifcd);
return NULL;
}
GF_REGISTER_MODULE_INTERFACE(ifcd, GF_MEDIA_DECODER_INTERFACE, "Android MediaCodec Decoder", "gpac distribution")
ifcd->privateStack = dec;
/*setup our own interface*/
ifcd->AttachStream = MCDec_AttachStream;
ifcd->DetachStream = MCDec_DetachStream;
ifcd->GetCapabilities = MCDec_GetCapabilities;
ifcd->SetCapabilities = MCDec_SetCapabilities;
ifcd->GetName = MCDec_GetCodecName;
ifcd->CanHandleStream = MCDec_CanHandleStream;
ifcd->ProcessData = MCDec_ProcessData;
//ifcd->GetOutputFrame = MCDec_GetOutputFrame; //ifcd->GetOutputFrame not recognised
return (GF_BaseDecoder *) ifcd;
}
void DeleteMCDec(GF_BaseDecoder *ifcg)
{
printf("\nDeleteMCDec called");
MCDec *ctx = (MCDec *)ifcg->privateStack;
#ifdef GPAC_ANDROID
//AMediaExtractor_delete(ctx->extractor);
AMediaCodec_delete(ctx->codec);
//ANativeWindow_release(ctx->window);
#endif
gf_free(ctx);
gf_free(ifcg);
}
GPAC_MODULE_EXPORT
const u32 *QueryInterfaces()
{
printf("\nQueryInterfaces called");
static u32 si [] = {
#ifndef GPAC_DISABLE_AV_PARSERS
GF_MEDIA_DECODER_INTERFACE,
#endif
0
};
return si;
}
GPAC_MODULE_EXPORT
GF_BaseInterface *LoadInterface(u32 InterfaceType)
{
printf("\nLoadInterface called");
#ifndef GPAC_DISABLE_AV_PARSERS
if (InterfaceType == GF_MEDIA_DECODER_INTERFACE) return (GF_BaseInterface *)NewMCDec();
#endif
return NULL;
}
GPAC_MODULE_EXPORT
void ShutdownInterface(GF_BaseInterface *ifce)
{
printf("\nShutdownInterface called");
switch (ifce->InterfaceType) {
#ifndef GPAC_DISABLE_AV_PARSERS
case GF_MEDIA_DECODER_INTERFACE:
DeleteMCDec((GF_BaseDecoder*)ifce);
break;
#endif
}
}
GPAC_MODULE_STATIC_DECLARATION( mc )
/*
//select track using MediaExtractor
u32 selectTrack(MCDec *dec)
{
int trackCount = (int) AMediaExtractor_getTrackCount(dec->extractor);
size_t i;
LOGV("Total num. of tracks: %d", trackCount);
for (i = 0; i < trackCount; ++i) {
AMediaFormat *format = AMediaExtractor_getTrackFormat(dec->extractor, i);
const char *mime;
bool b = AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
if(!b) {
LOGE("No mime type");
return -1;
}
else LOGI("Track num.: %d, mime type: %s ", i, mime);
if (!strncmp(mime, "video/", 6)) {
LOGI("Selected Track num.: %d, mime type: %s", i, mime);
AMediaExtractor_selectTrack(dec->extractor, i);
AMediaExtractor_selectTrack(dec->extractor, i);
dec->codec = AMediaCodec_createDecoderByType(mime);
AMediaCodec_configure(dec->codec, format, dec->window, NULL, 0);
AMediaFormat_delete(format);
AMediaCodec_start(dec->codec);
//dec->width = AMediaFormat_getInteger(AMediaFormat_KEY_WIDTH);
//dec->height = AMediaFormat_getInteger(AMediaFormat_KEY_HEIGHT);
LOGI("Video size = %d x %d ",dec->width, dec->height);
AMediaFormat_delete(format);
return i;
}
}
LOGE("Video Track not found");
return -1;
}
*/
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment