Created
June 21, 2015 04:51
-
-
Save mohan43u/e82265a875e7cb8e1aef to your computer and use it in GitHub Desktop.
example for howto use gstreamer editing services
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#include <string.h> | |
#include <glib-unix.h> | |
#include <gst/gst.h> | |
#include <ges/ges.h> | |
/* interval to query pipeline position (currently 600 seconds or 10 mins) */ | |
#define QUERY_POSITION_INTERVAL 10 * 60 * 1000 | |
typedef struct { | |
GESPipeline *pipeline; | |
GESTimeline *timeline; | |
GESLayer *layer; | |
GstBus *bus; | |
GMainLoop *mainloop; | |
} Mediacat; | |
void mediacat_init(int *argc, gchar **argv[]) { | |
gst_init_check(argc, argv, NULL); | |
ges_init(); | |
} | |
static gboolean mediacat_pipeline_query_position(gpointer user_data) { | |
Mediacat *mediacat = (Mediacat *) user_data; | |
gint64 position = 0; | |
GDateTime *time = g_date_time_new_now_local(); | |
if(gst_element_query_position(GST_ELEMENT(mediacat->pipeline), GST_FORMAT_TIME, &position)) { | |
g_printerr("[%s] Pipeline Position: %" GST_TIME_FORMAT "\n", g_date_time_format(time, "%Y-%m-%d %H:%M:%S"), GST_TIME_ARGS(position)); | |
} | |
g_date_time_unref(time); | |
return TRUE; | |
} | |
void mediacat_run(Mediacat *mediacat) { | |
gst_element_set_state(GST_ELEMENT(mediacat->pipeline), GST_STATE_PLAYING); | |
g_timeout_add(QUERY_POSITION_INTERVAL, (GSourceFunc) mediacat_pipeline_query_position, mediacat); | |
g_main_loop_run(mediacat->mainloop); | |
} | |
void mediacat_stop(Mediacat *mediacat) { | |
gst_element_set_state(GST_ELEMENT(mediacat->pipeline), GST_STATE_NULL); | |
g_main_loop_quit(mediacat->mainloop); | |
} | |
static gboolean mediacat_bus_cb(GstBus *bus, GstMessage *message, gpointer *data) { | |
Mediacat *mediacat = (Mediacat *) data; | |
switch(GST_MESSAGE_TYPE(message)) { | |
case(GST_MESSAGE_EOS): mediacat_stop(mediacat); | |
break; | |
case(GST_MESSAGE_STATE_CHANGED): { | |
GstState old; | |
GstState new; | |
gst_message_parse_state_changed(message, &old, &new, NULL); | |
g_printerr("%d: state changed from %s to %s on %s\n", | |
GST_MESSAGE_SEQNUM(message), | |
gst_element_state_get_name(old), | |
gst_element_state_get_name(new), | |
GST_MESSAGE_SRC_NAME(message)); | |
} | |
break; | |
default: g_printerr("%d: received %s from %s..\n", | |
GST_MESSAGE_SEQNUM(message), | |
GST_MESSAGE_TYPE_NAME(message), | |
GST_MESSAGE_SRC_NAME(message)); | |
} | |
return TRUE; | |
} | |
Mediacat* mediacat_new() { | |
Mediacat *mediacat = g_new0(Mediacat, 1); | |
mediacat->pipeline = ges_pipeline_new(); | |
mediacat->timeline = ges_timeline_new_audio_video(); | |
mediacat->layer = ges_timeline_append_layer(mediacat->timeline); | |
mediacat->bus = gst_pipeline_get_bus(GST_PIPELINE(mediacat->pipeline)); | |
mediacat->mainloop = g_main_loop_new(NULL, TRUE); | |
ges_pipeline_set_timeline(mediacat->pipeline, mediacat->timeline); | |
gst_bus_add_watch_full(mediacat->bus, G_PRIORITY_DEFAULT, (GstBusFunc) mediacat_bus_cb, mediacat, NULL); | |
return mediacat; | |
} | |
void mediacat_delete(Mediacat* mediacat) { | |
g_free(mediacat); | |
} | |
static GstClockTime mediacat_timestring_to_gstclocktime(gchar *timestring) { | |
gchar **timev = g_strsplit(timestring, ":", -1); | |
gint timec = g_strv_length(timev); | |
gdouble time = 0; | |
time = time + ((timec - 1) >= 0 && timev[timec - 1] ? g_strtod(timev[timec - 1], NULL) : 0); | |
time = time + ((timec - 2) >= 0 && timev[timec - 2] ? g_strtod(timev[timec - 2], NULL) * 60 : 0); | |
time = time + ((timec - 3) >= 0 && timev[timec - 3] ? g_strtod(timev[timec - 3], NULL) * 60 * 60 : 0); | |
g_strfreev(timev); | |
return (GstClockTime) (time * GST_SECOND); | |
} | |
void mediacat_add_media(Mediacat *mediacat, gchar *uri, gchar *starttime, gchar *endtime) { | |
GESUriClipAsset *media = ges_uri_clip_asset_request_sync(uri, NULL); | |
GstClockTime start = GST_CLOCK_TIME_NONE; | |
GstClockTime end = GST_CLOCK_TIME_NONE; | |
GstClockTime duration = GST_CLOCK_TIME_NONE; | |
start = (starttime ? mediacat_timestring_to_gstclocktime(starttime) : 0); | |
end = (endtime ? mediacat_timestring_to_gstclocktime(endtime) : ges_uri_clip_asset_get_duration(media)); | |
duration = (end - start); | |
ges_layer_add_asset(mediacat->layer, GES_ASSET(media), GST_CLOCK_TIME_NONE, start, duration, GES_TRACK_TYPE_UNKNOWN); | |
ges_timeline_commit(mediacat->timeline); | |
} | |
static GstEncodingProfile* mediacat_gen_profile(gchar *container, gchar *audio, gchar *video) { | |
GstEncodingContainerProfile *profile = NULL; | |
GstCaps *caps = NULL; | |
caps = gst_caps_from_string(container); | |
profile = gst_encoding_container_profile_new(NULL, NULL, caps, NULL); | |
gst_caps_unref(caps); | |
caps = gst_caps_from_string(audio); | |
gst_encoding_container_profile_add_profile(profile, (GstEncodingProfile *) gst_encoding_audio_profile_new(caps, NULL, NULL, 0)); | |
gst_caps_unref(caps); | |
caps = gst_caps_from_string(video); | |
gst_encoding_container_profile_add_profile(profile, (GstEncodingProfile *) gst_encoding_video_profile_new(caps, NULL, NULL, 0)); | |
gst_caps_unref(caps); | |
return (GstEncodingProfile *) profile; | |
} | |
void mediacat_set_output(Mediacat *mediacat, gchar *uri) { | |
gchar **uriv = g_strsplit(uri, "^", -1); | |
guint uric = g_strv_length(uriv); | |
gchar *container = (uric > 1 && uriv[1] && strlen(uriv[1]) > 0 ? uriv[1] : "application/ogg"); | |
gchar *audio = (uric > 2 && uriv[2] && strlen(uriv[2]) > 0 ? uriv[2] : "audio/x-vorbis"); | |
gchar *video = (uric > 3 && uriv[3] && strlen(uriv[3]) > 0 ? uriv[3] : "video/x-theora"); | |
ges_pipeline_set_render_settings(mediacat->pipeline, uriv[0], mediacat_gen_profile(container, audio, video)); | |
ges_pipeline_set_mode(mediacat->pipeline, GES_PIPELINE_MODE_SMART_RENDER); | |
g_strfreev(uriv); | |
} | |
int main(int argc, char *argv[]) { | |
Mediacat *mediacat = NULL; | |
int iter = 2; | |
if(argc < 2 || g_strcmp0("--help", argv[1]) == 0 || g_strcmp0("-h", argv[1]) == 0) { | |
g_printerr("[usage] %s " | |
"<outputuri>[^<container(default:application/ogg)>^<audio(default:audio/x-vorbis>^<video(default:video/x-theora>] " | |
"<inputuri>[^starttime(format:hh:mm:ss)^endtime(format:hh:mm:ss)] [...]\n", | |
argv[0]); | |
return 1; | |
} | |
mediacat_init(&argc, &argv); | |
mediacat = mediacat_new(); | |
while(argv[iter]) { | |
gchar **uriv = g_strsplit(argv[iter], "^", -1); | |
guint uric = g_strv_length(uriv); | |
gchar *uri = (uric > 0 && uriv[0] && strlen(uriv[0]) > 0 ? uriv[0] : NULL); | |
gchar *starttime = (uric > 1 && uriv[1] && strlen(uriv[1]) > 0 ? uriv[1] : NULL); | |
gchar *endtime = (uric > 2 && uriv[2] && strlen(uriv[2]) > 0 ? uriv[2] : NULL); | |
mediacat_add_media(mediacat, uri, starttime, endtime); | |
g_strfreev(uriv); | |
iter++; | |
} | |
mediacat_set_output(mediacat, argv[1]); | |
mediacat_run(mediacat); | |
mediacat_delete(mediacat); | |
return 0; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment