Skip to content

Instantly share code, notes, and snippets.

@cprakashagr
Created February 26, 2016 05:24
Show Gist options
  • Save cprakashagr/85ed5f103557bda081c8 to your computer and use it in GitHub Desktop.
Save cprakashagr/85ed5f103557bda081c8 to your computer and use it in GitHub Desktop.
Simple video scaler which decodes a file, scale the video frames and encodes back to the new file.
#include <jni.h>
#include <android/log.h>
#include <libavutil/imgutils.h>
#include <libavutil/parseutils.h>
#include <libavutil/timestamp.h>
#include <libswscale/swscale.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
int doMain(const char *inFileName, const char *outFileName) {
AVFormatContext *pFormatCtx = NULL, *pFormatCtxOutput = NULL;
AVOutputFormat *ofmt = NULL;
AVCodecContext *pCodecCtx;
AVCodec *pCodec = NULL;
AVFrame *pFrame, *pFrameNew = NULL;
AVPacket packet,packetNew;
AVDictionary *optionsDict = NULL;
struct SwsContext *swsCtx = NULL;
uint8_t *buffer = NULL;
int i, j=0, videoStream = -1, frameFinished, numBytes;
const char *TAG = "Video Processor - Glider";
av_register_all();
if (avformat_open_input(&pFormatCtx, inFileName, NULL, NULL)!=0) {
__android_log_print(ANDROID_LOG_VERBOSE, TAG, "Coudn't open the input file. Exiting!");
exit(1);
}
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
__android_log_print(ANDROID_LOG_VERBOSE, TAG, "Couldn't find stream informations. Exiting!");
exit(1);
}
av_dump_format(pFormatCtx, 0, inFileName, 0);
// Creating the output context
avformat_alloc_output_context2(&pFormatCtxOutput, NULL, NULL, outFileName);
if (!pFormatCtxOutput) {
__android_log_print(ANDROID_LOG_VERBOSE, TAG, "Cannot open output context. Exiting!");
exit(1);
}
ofmt = pFormatCtxOutput->oformat;
__android_log_print(ANDROID_LOG_VERBOSE, TAG, "Format found: ");
for (i=0;i<pFormatCtx->nb_streams;i++) {
AVStream *inStream = pFormatCtx->streams[i];
AVStream *outStream = avformat_new_stream(pFormatCtxOutput, inStream->codec->codec);
if (!outStream) {
__android_log_print(ANDROID_LOG_VERBOSE, TAG, "Failed allocating the stream %d", inStream->codec->codec_type);
exit(1);
}
if (avcodec_copy_context(outStream->codec, inStream->codec) < 0) {
__android_log_print(ANDROID_LOG_VERBOSE, TAG, "Failed to copy context from the input to output. Exiting!");
exit(1);
}
outStream->codec->codec_tag = 0;
if (pFormatCtxOutput->oformat->flags & AVFMT_GLOBALHEADER) {
outStream->codec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
__android_log_print(ANDROID_LOG_VERBOSE, TAG, "Found Stream at %d: %d", i, pFormatCtx->streams[i]->codec->codec_type);
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
__android_log_print(ANDROID_LOG_VERBOSE, TAG, "Found Video Stream at %d: %d", i, pFormatCtx->streams[i]->codec->codec_type);
videoStream = i;
}
}
// Dump Format for the output file
av_dump_format(pFormatCtxOutput,0, outFileName, 1);
// Opening the output file
if (!(ofmt->flags & AVFMT_NOFILE)) {
if ((avio_open(&pFormatCtxOutput->pb, outFileName, AVIO_FLAG_WRITE)) < 0) {
__android_log_print(ANDROID_LOG_VERBOSE, TAG, "Could not open the output file. Exiting!");
exit(1);
}
}
// Write the headers:
if (avformat_write_header(pFormatCtxOutput, NULL) < 0) {
__android_log_print(ANDROID_LOG_VERBOSE, TAG, "Error on writing the headers. Exiting!");
exit(1);
}
if (videoStream == -1) {
__android_log_print(ANDROID_LOG_VERBOSE, TAG, "Video stream is not available. Exiting!");
exit(1);
}
pCodecCtx = pFormatCtx->streams[videoStream]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
__android_log_print(ANDROID_LOG_VERBOSE, TAG, "The codec is not suported. Exiting!");
exit(1);
}
if (avcodec_open2(pCodecCtx, pCodec, &optionsDict) < 0) {
exit(1);
}
pFrame = av_frame_alloc();
pFrameNew = av_frame_alloc();
if (pFrameNew == NULL) {
__android_log_print(ANDROID_LOG_VERBOSE, TAG, "Couldn't allocate the new Frames. Exiting!");
exit(1);
}
__android_log_print(ANDROID_LOG_DEBUG, TAG, "Width: %d, Height: %d...", pCodecCtx->width, pCodecCtx->height);
numBytes = avpicture_get_size(pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height);
buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
__android_log_print(ANDROID_LOG_DEBUG, TAG, "buffer: %d, ...", numBytes);
// avpicture_fill((AVPicture *) pFrameNew, buffer, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height);
swsCtx = sws_getContext(
pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
SWS_BICUBIC, NULL, NULL, NULL);
if (!swsCtx) {
__android_log_print(ANDROID_LOG_VERBOSE, TAG, "Not possible to generate the given resolution. Exiting!");
exit(1);
}
while (1) {
AVStream *in_stream, *out_stream;
int ret, got_packet;
ret = av_read_frame(pFormatCtx, &packet);
if (ret < 0) {
break;
}
in_stream = pFormatCtx->streams[packet.stream_index];
out_stream = pFormatCtxOutput->streams[packet.stream_index];
/*
* Copy Packets for the audio stream.
* For the video stream, need to generate the new packets
*/
packet.pts = av_rescale_q_rnd(packet.pts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
packet.dts = av_rescale_q_rnd(packet.dts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
packet.duration = av_rescale_q(packet.duration, in_stream->time_base, out_stream->time_base);
packet.pos = -1;
if (packet.stream_index == videoStream) {
__android_log_print(ANDROID_LOG_DEBUG, TAG, "%d", ++j);
// Get the Frame
// Scale to a new Frame
// Create a new packet with the new frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if (frameFinished) {
__android_log_print(ANDROID_LOG_DEBUG, TAG, "%d, Frame Complete", j);
avpicture_fill((AVPicture *) pFrameNew, buffer, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height);
__android_log_print(ANDROID_LOG_DEBUG, TAG, "%d, avpicture_fill", j);
sws_scale(
swsCtx,
(const uint8_t *const *) pFrame->data,
pFrame->linesize,
0,
pCodecCtx->height,
pFrameNew->data,
pFrameNew->linesize);
__android_log_print(ANDROID_LOG_DEBUG, TAG, "%d, scaling done", j);
// Need encoding back;
av_init_packet(&packetNew);
__android_log_print(ANDROID_LOG_DEBUG, TAG, "%d, packet initiated", j);
// av_free_packet(&packetNew);
ret = avcodec_encode_video2(out_stream->codec, &packetNew, pFrameNew, &got_packet);
__android_log_print(ANDROID_LOG_DEBUG, TAG, "%d, encoding done", j); // Never Reached
if (ret < 0) {
__android_log_print(ANDROID_LOG_DEBUG, TAG, "Error encoding video frame: %s\n", av_err2str(ret));
exit(1);
}
if (got_packet) {
// May move to the remuxing type scaling...
// FIXME
av_packet_rescale_ts(&packetNew, in_stream->codec->time_base, in_stream->time_base);
ret = av_interleaved_write_frame(pFormatCtxOutput, &packetNew);
av_packet_unref(&packetNew);
} else {
ret = 0;
}
if (ret < 0) {
__android_log_print(ANDROID_LOG_DEBUG, TAG, "Error while writing video frame: %s\n", av_err2str(ret));
exit(1);
}
}
}
else {
// Do get the Audio frame
// Do the required optimizations if required
// write the audio frame
ret = av_interleaved_write_frame(pFormatCtxOutput, &packet);
if (ret < 0) {
break;
}
}
av_packet_unref(&packet);
// av_packet_unref(&packetNew);
}
av_write_trailer(pFormatCtxOutput);
end:
av_free(pFrame);
av_free(pFrameNew);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
if (pFormatCtxOutput && !(ofmt->flags & AVFMT_NOFILE))
avio_closep(&pFormatCtxOutput->pb);
avformat_free_context(pFormatCtxOutput);
return 0;
}
JNIEXPORT jstring JNICALL Java_com_cprakashagr_videoeditor_MainActivity_getFromNative(JNIEnv* env, jobject javaThis, jstring in_fileName, jstring out_fileName) {
doMain((*env)->GetStringUTFChars(env, in_fileName, NULL), (*env)->GetStringUTFChars(env, out_fileName, NULL));
return (*env)->NewStringUTF(env, "Hello from native code!");
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment