Skip to content

Instantly share code, notes, and snippets.

@chichunchen
Created July 23, 2018 19:10
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save chichunchen/6b06130e10b32df04f461729038634e7 to your computer and use it in GitHub Desktop.
Save chichunchen/6b06130e10b32df04f461729038634e7 to your computer and use it in GitHub Desktop.
#include <gst/gst.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
GstElement *playbin; /* Our one and only element */
gboolean playing; /* Are we in the PLAYING state? */
gboolean terminate; /* Should we terminate execution? */
gboolean seek_enabled; /* Is seeking enabled for this media? */
gboolean seek_done; /* Have we performed the seek already? */
gint64 duration; /* How long does this media last, in nanoseconds */
} CustomData;
GMainLoop *loop;
GstElement *pipeline, *source, *demuxer, *parser, *decoder, *conv, *sink;
GstStateChangeReturn ret;
CustomData data;
int count = 2;
static void handle_message (CustomData *data, GstMessage *msg);
void setup_video_pipeline(GMainLoop *loop, GstBus *bus, CustomData *data, const char *filename);
static void on_pad_added (GstElement *element, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
GstCaps *caps;
GstElement *parser = (GstElement *) data;
GstStructure *str;
/* We can now link this pad with the h264parse sink pad */
caps = gst_pad_get_current_caps (pad);
g_print ("Dynamic pad created, linking demuxer/parser\n");
g_print("%s\n", gst_caps_to_string(caps));
caps = gst_caps_make_writable(caps);
str = gst_caps_get_structure (caps, 0);
// gst_structure_remove_fields (str,"level", "profile", "height", "width", "framerate", "pixel-aspect-ratio", NULL);
const char * lala = gst_caps_to_string (caps);
GstCaps *ee = gst_caps_from_string(lala);
// std::cout << "ee " << gst_caps_to_string (ee) << std::endl;
sinkpad = gst_element_get_static_pad (parser, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
static void on_pad_added_parser (GstElement *element, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
GstCaps *caps;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the h264parse sink pad */
gst_pad_use_fixed_caps (pad);
caps = gst_pad_get_current_caps (pad);
g_print ("Dynamic pad created, linking parser/decoder\n");
g_print("caps: %s\n", gst_caps_to_string(caps));
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
gst_element_link(element, decoder);
}
/**
* Set up a pipeline for decoding and playing one video file.
*/
void setup_video_pipeline(GMainLoop *loop, GstBus *bus, CustomData *data, const char *filename) {
/* Create the elements */
data->playbin = gst_pipeline_new ("mp4-player");
source = gst_element_factory_make ("filesrc", "file-source");
demuxer = gst_element_factory_make ("qtdemux", "demuxer");
parser = gst_element_factory_make ("h264parse", "parser");
decoder = gst_element_factory_make ("omxh264dec", "decoder");
sink = gst_element_factory_make ("nveglglessink", "video-output");
if (!data->playbin || !source || !demuxer || !parser || !decoder || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
}
g_object_set (G_OBJECT (source), "location", filename, NULL);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (data->playbin), source, demuxer, parser, decoder, sink, NULL);
gst_element_link (source, demuxer);
gst_element_link_many (parser, decoder, sink, NULL);
g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), parser);
g_signal_connect (parser, "pad-added", G_CALLBACK (on_pad_added_parser), decoder);
/* Add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (data->playbin));
//gst_bus_add_watch (bus, bus_call, loop);
/* Start playing */
g_print ("Now playing: %s\n", filename);
ret = gst_element_set_state (data->playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data->playbin);
exit(1);
}
}
/**
* Skip frames/seconds by listening the message on the bus
*/
void skip_by_seconds (GstBus *bus, CustomData *data, int seconds) {
GstMessage *msg;
/* Listen to the bus */
bus = gst_element_get_bus (data->playbin);
do {
msg = gst_bus_timed_pop_filtered (bus, 100 * GST_MSECOND,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION);
/* Parse message */
if (msg != NULL) {
handle_message (data, msg);
} else {
/* We got no message, this means the timeout expired */
if (data->playing) {
if (data->seek_enabled && !data->seek_done) {
gst_element_seek (data->playbin,
1.0,
GST_FORMAT_TIME,
GST_SEEK_FLAG_FLUSH,
GST_SEEK_TYPE_SET,
seconds * GST_SECOND,
GST_SEEK_TYPE_NONE,
-1);
g_print ("\nStart from %d sec...\n", seconds);
data->seek_done = TRUE;
}
}
}
} while (!data->terminate);
}
int main(int argc, char *argv[]) {
GstBus *bus;
data.playing = FALSE;
data.terminate = FALSE;
data.seek_enabled = FALSE;
data.seek_done = FALSE;
data.duration = GST_CLOCK_TIME_NONE;
/* Initialize GStreamer */
loop = g_main_loop_new (NULL, FALSE);
gst_init (&argc, &argv);
setup_video_pipeline(loop, bus, &data, "rhino/output_1.mp4");
g_main_loop_run (loop);
//skip_by_seconds (bus, &data, 0.5);
/* Free resources */
gst_element_set_state (data.playbin, GST_STATE_NULL);
gst_object_unref (data.playbin);
return 0;
}
static void handle_message (CustomData *data, GstMessage *msg) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
data->terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
data->terminate = TRUE;
g_main_loop_quit (loop);
break;
case GST_MESSAGE_DURATION:
/* The duration has changed, mark the current one as invalid */
data->duration = GST_CLOCK_TIME_NONE;
break;
case GST_MESSAGE_STATE_CHANGED: {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
/* Remember whether we are in the PLAYING state or not */
data->playing = (new_state == GST_STATE_PLAYING);
if (data->playing) {
/* We just moved to PLAYING. Check if seeking is possible */
GstQuery *query;
gint64 start, end;
query = gst_query_new_seeking (GST_FORMAT_TIME);
if (gst_element_query (data->playbin, query)) {
gst_query_parse_seeking (query, NULL, &data->seek_enabled, &start, &end);
if (data->seek_enabled) {
g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n",
GST_TIME_ARGS (start), GST_TIME_ARGS (end));
} else {
g_print ("Seeking is DISABLED for this stream.\n");
}
}
else {
g_printerr ("Seeking query failed.");
}
gst_query_unref (query);
}
}
} break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment