Skip to content

Instantly share code, notes, and snippets.

Created September 8, 2015 12:40
Show Gist options
  • Save anonymous/3237e4e78ae4e3f923e9 to your computer and use it in GitHub Desktop.
Save anonymous/3237e4e78ae4e3f923e9 to your computer and use it in GitHub Desktop.
//g++ test-netclock-client2.cpp -o c2 `pkg-config --cflags --libs gstreamer-rtsp-server-1.0 gstreamer-1.0 gstreamer-plugins-base-1.0 gstreamer-net-1.0 gstreamer-rtsp-1.0 gstreamer-app-1.0 opencv`
/* GStreamer
* Copyright (C) 2008 Wim Taymans <wim.taymans at gmail.com>
* Copyright (C) 2014 Jan Schmidt <jan@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include <stdlib.h>
#include <cv.h>
#include <highgui.h>
#include <gst/gst.h>
#include <gst/net/gstnet.h>
#include <gst/gstbuffer.h>
#include <gst/video/video.h>
#include <gst/app/gstappsink.h>
// for timings
#include <time.h>
using namespace cv;
#define PLAYBACK_DELAY_MS 10
#define WAIT_FOR_MSECS 3 // this is just a delay to simulate some load and show images with opencv
static void
source_created (GstElement * pipe, GstElement * source)
{
g_object_set (source, "latency", PLAYBACK_DELAY_MS,
"ntp-time-source", 3, "buffer-mode", 4, "ntp-sync", TRUE, NULL);
}
static gboolean
message (GstBus * bus, GstMessage * message, gpointer user_data)
{
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_error (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
break;
}
case GST_MESSAGE_WARNING:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_warning (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
break;
}
case GST_MESSAGE_EOS:
g_print ("Got EOS\n");
break;
default:
break;
}
return TRUE;
}
void newPad(GstElement *myelement,
GstPad *pad,
gpointer data)
{
GstPad *sinkpad;
GstElement *color = (GstElement *) data;
sinkpad = gst_element_get_static_pad (color, "sink");
if (!sinkpad){
//fprintf(stderr, "Gstreamer: no pad named sink\n");
return;
}
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
int
main (int argc, char *argv[])
{
GstClock *net_clock1;
GstClock *net_clock2;
gchar *server1;
gchar *server2;
gint clock_port1;
gint clock_port2;
GstElement *pipeline1;
GstElement *pipeline2;
GstElement *uridecodebin1;
GstElement *uridecodebin2;
GstElement *color1;
GstElement *color2;
GstElement *sink1;
GstElement *sink2;
GstElement *sink1f;
GstElement *sink2f;
GstSample * sample1=NULL;
GstSample * sample2=NULL;
GstBuffer * buffer1=NULL;
GstBuffer * buffer2=NULL;
GstMapInfo* info1=new GstMapInfo;;
GstMapInfo* info2=new GstMapInfo;
gint width;
gint height;
GstClockTime pts1;
GstClockTime prev_pts1;
GstClockTime curtime1;
GstClockTime pts2;
GstClockTime prev_pts2;
GstClockTime curtime2;
GstCaps* caps;
// for opencv display
IplImage* frame1=NULL;
IplImage* frame2=NULL;
Mat diff;
// for timings
struct timespec starttime;
struct timespec lasttime;
gst_init (&argc, &argv);
if (argc < 6) {
g_print ("usage: %s rtsp://URI clock-IP clock-PORT rtsp://URI clock-IP clock-PORT\n"
"example: %s rtsp://localhost:8554/test 127.0.0.1 8554 rtsp://localhost:8554/test 127.0.0.1 8554\n",
argv[0], argv[0]);
return -1;
}
// window for showing data with opencv
cvNamedWindow("1", CV_WINDOW_AUTOSIZE );
cvNamedWindow("2", CV_WINDOW_AUTOSIZE );
cvMoveWindow("1",300,300);
cvMoveWindow("2",300,300);
server1 = argv[2];
clock_port1 = atoi (argv[3]);
server2 = argv[5];
clock_port2 = atoi (argv[6]);
net_clock1 = gst_net_client_clock_new ("net_clock", server1, clock_port1, 0);
net_clock2 = gst_net_client_clock_new ("net_clock", server2, clock_port2, 0);
if (net_clock1 == NULL || net_clock2 == NULL) {
g_print ("Failed to create net clock client for %s:%d or %s:%d\n",
server1, clock_port1,server2, clock_port2);
return 1;
}
g_print ("Clock...");
/* Wait for the clock to stabilise */
gst_clock_wait_for_sync (net_clock1, GST_CLOCK_TIME_NONE);
gst_clock_wait_for_sync (net_clock2, GST_CLOCK_TIME_NONE);
g_print ("done\n");
//loop = g_main_loop_new (NULL, FALSE);
pipeline1 = gst_pipeline_new(NULL);
pipeline2 = gst_pipeline_new(NULL);
uridecodebin1 = gst_element_factory_make ("uridecodebin", NULL);
uridecodebin2 = gst_element_factory_make ("uridecodebin", NULL);
g_object_set (uridecodebin1, "uri", argv[1], NULL);
g_object_set (uridecodebin2, "uri", argv[4], NULL);
g_signal_connect (uridecodebin1, "source-setup", G_CALLBACK (source_created), NULL);
g_signal_connect (uridecodebin2, "source-setup", G_CALLBACK (source_created), NULL);
/* Set this high enough so that it's higher than the minimum latency
* on all receivers */
gst_pipeline_set_latency (GST_PIPELINE (pipeline1), 500 * GST_MSECOND);
gst_pipeline_set_latency (GST_PIPELINE (pipeline2), 500 * GST_MSECOND);
color1 = gst_element_factory_make("autovideoconvert", NULL);
color2 = gst_element_factory_make("autovideoconvert", NULL);
sink1 = gst_element_factory_make("appsink", NULL);
sink2 = gst_element_factory_make("appsink", NULL);
// split pipeline and save to file
//sink1f = gst_element_factory_make("appsink", NULL);
//sink2f = gst_element_factory_make("appsink", NULL);
gst_bin_add_many(GST_BIN(pipeline1), uridecodebin1, color1, sink1, NULL);
gst_bin_add_many(GST_BIN(pipeline2), uridecodebin2, color2, sink2, NULL);
gst_pipeline_use_clock (GST_PIPELINE (pipeline1), net_clock1);
gst_pipeline_use_clock (GST_PIPELINE (pipeline2), net_clock2);
g_signal_connect(uridecodebin1, "pad-added", G_CALLBACK(newPad), color1);
g_signal_connect(uridecodebin2, "pad-added", G_CALLBACK(newPad), color2);
//link elements
if(!gst_element_link(color1, sink1)) {
g_print( "GStreamer: cannot link color -> sink\n");
gst_object_unref(pipeline1);
pipeline1 = NULL;
return -1;
}
//link elements
if(!gst_element_link(color2, sink2)) {
g_print( "GStreamer: cannot link color -> sink\n");
gst_object_unref(pipeline2);
pipeline2 = NULL;
return -1;
}
//TODO: is 1 single buffer really high enough?
gst_app_sink_set_max_buffers (GST_APP_SINK(sink1), 10);
gst_app_sink_set_max_buffers (GST_APP_SINK(sink2), 10);
gst_app_sink_set_drop (GST_APP_SINK(sink1), TRUE);
gst_app_sink_set_drop (GST_APP_SINK(sink2), TRUE);
//do not emit signals: all calls will be synchronous and blocking
gst_app_sink_set_emit_signals (GST_APP_SINK(sink1), 0);
gst_app_sink_set_emit_signals (GST_APP_SINK(sink2), 0);
//#if GST_VERSION_MAJOR == 0
// caps = gst_caps_new_simple("video/x-raw-rgb",
// "bpp", G_TYPE_INT, 24,
// "red_mask", G_TYPE_INT, 0x0000FF,
// "green_mask", G_TYPE_INT, 0x00FF00,
// "blue_mask", G_TYPE_INT, 0xFF0000,
// NULL);
//#else
// support 1 and 3 channel 8 bit data, as well as bayer (also 1 channel, 8bit)
caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}");
//#endif
gst_app_sink_set_caps(GST_APP_SINK(sink1), caps);
gst_app_sink_set_caps(GST_APP_SINK(sink2), caps);
gst_caps_unref(caps);
// gst_rtp_bin_associate ?????
if (gst_element_set_state (pipeline1,
GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
g_print ("Failed to set state 1 to PLAYING\n");
//here we shiuild go to exit!!!!
return -1;
}
if (gst_element_set_state (pipeline2,
GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
g_print ("Failed to set state 1 to PLAYING\n");
//here we shiuild go to exit!!!!
return -1;
}
gst_bus_add_signal_watch (GST_ELEMENT_BUS (pipeline1));
gst_bus_add_signal_watch (GST_ELEMENT_BUS (pipeline2));
g_signal_connect (GST_ELEMENT_BUS (pipeline1), "message", G_CALLBACK (message),
NULL);
g_signal_connect (GST_ELEMENT_BUS (pipeline2), "message", G_CALLBACK (message),
NULL);
g_print ("Ready for loop!!\n");
//g_main_loop_run (loop);
int frames = 0;
while (1)
{
frames++;
//timing
if (frames == 1)
clock_gettime(CLOCK_MONOTONIC, &starttime);
// get frame
if(sample1)
gst_sample_unref(sample1);
if(sample2)
gst_sample_unref(sample2);
sample1 = gst_app_sink_pull_sample(GST_APP_SINK(sink1));
sample2 = gst_app_sink_pull_sample(GST_APP_SINK(sink2));
if(!sample1 || !sample2)
break;
buffer1 = gst_sample_get_buffer(sample1);
buffer2 = gst_sample_get_buffer(sample2);
if(!buffer1 || !buffer2)
break;
pts1 = buffer1->pts;
pts2 = buffer2->pts;
//construct a frame header if we did not have any yet
if(!frame1)
{
g_print ("Creating frame 1\n");
//some stuff for timing
prev_pts1=pts1;
// get pipeline curtime
curtime1 = gst_element_get_base_time(pipeline1);
GstCaps* buffer_caps = gst_sample_get_caps(sample1);
// bail out in no caps
assert(gst_caps_get_size(buffer_caps) == 1);
GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
// bail out if width or height are 0
if(!gst_structure_get_int(structure, "width", &width) ||
!gst_structure_get_int(structure, "height", &height))
{
gst_caps_unref(buffer_caps);
return 0;
}
int depth = 3;
depth = 0;
const gchar* name = gst_structure_get_name(structure);
const gchar* format = gst_structure_get_string(structure, "format");
if (!name || !format)
break;
// we support 3 types of data:
// video/x-raw, format=BGR -> 8bit, 3 channels
// video/x-raw, format=GRAY8 -> 8bit, 1 channel
// video/x-bayer -> 8bit, 1 channel
// bayer data is never decoded, the user is responsible for that
// everything is 8 bit, so we just test the caps for bit depth
if (strcasecmp(name, "video/x-raw") == 0)
{
if (strcasecmp(format, "BGR") == 0) {
depth = 3;
}
else if(strcasecmp(format, "GRAY8") == 0){
depth = 1;
}
}
else if (strcasecmp(name, "video/x-bayer") == 0)
{
depth = 1;
}
if (depth > 0) {
frame1 = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U, depth);
} else {
gst_caps_unref(buffer_caps);
break;
}
gst_caps_unref(buffer_caps);
}
//construct a frame header if we did not have any yet
if(!frame2)
{
g_print ("Creating frame 2\n");
prev_pts2=pts2;
// get pipeline curtime
curtime2 = gst_element_get_base_time(pipeline2);
GstCaps* buffer_caps = gst_sample_get_caps(sample2);
// bail out in no caps
assert(gst_caps_get_size(buffer_caps) == 1);
GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
// bail out if width or height are 0
if(!gst_structure_get_int(structure, "width", &width) ||
!gst_structure_get_int(structure, "height", &height))
{
gst_caps_unref(buffer_caps);
return 0;
}
int depth = 3;
depth = 0;
const gchar* name = gst_structure_get_name(structure);
const gchar* format = gst_structure_get_string(structure, "format");
if (!name || !format)
break;
// we support 3 types of data:
// video/x-raw, format=BGR -> 8bit, 3 channels
// video/x-raw, format=GRAY8 -> 8bit, 1 channel
// video/x-bayer -> 8bit, 1 channel
// bayer data is never decoded, the user is responsible for that
// everything is 8 bit, so we just test the caps for bit depth
if (strcasecmp(name, "video/x-raw") == 0)
{
if (strcasecmp(format, "BGR") == 0) {
depth = 3;
}
else if(strcasecmp(format, "GRAY8") == 0){
depth = 1;
}
}
else if (strcasecmp(name, "video/x-bayer") == 0)
{
depth = 1;
}
if (depth > 0) {
frame2 = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U, depth);
} else {
gst_caps_unref(buffer_caps);
break;
}
gst_caps_unref(buffer_caps);
}
// gstreamer expects us to handle the memory at this point
// so we can just wrap the raw buffer and be done with it
// the data ptr in GstMapInfo is only valid throughout the mapifo objects life.
// TODO: check if reusing the mapinfo object is ok.
gboolean success = gst_buffer_map(buffer1,info1, (GstMapFlags)GST_MAP_READ);
if (!success){
//something weird went wrong here. abort. abort.
//fprintf(stderr,"GStreamer: unable to map buffer");
break;
}
frame1->imageData = (char*)info1->data;
gst_buffer_unmap(buffer1,info1);
success = gst_buffer_map(buffer2,info2, (GstMapFlags)GST_MAP_READ);
if (!success){
//something weird went wrong here. abort. abort.
//fprintf(stderr,"GStreamer: unable to map buffer");
break;
}
frame2->imageData = (char*)info2->data;
gst_buffer_unmap(buffer2,info2);
if ((frames%3)==0)
{
//absdiff(Mat(frame1,false), Mat(frame2,false), diff);
imshow("1", Mat(frame1,false));
imshow("2", Mat(frame2,false));
//if (std::abs((int)(pts2-pts1))<1e8) // 0.1 secs
//{
// imshow("sync", diff );
//} else {
// imshow("notsync", diff );
//}
}
char c = cvWaitKey(WAIT_FOR_MSECS);
if( c == 27 )
{
break;
}
clock_gettime(CLOCK_MONOTONIC, &lasttime);
g_print ("Frames %d sync: %lf ms start_time %lf s FPS 1: ~%lf FPS 2: ~%lf FPS prog: ~%lf \n", frames , ((double)((int)(pts2-pts1))/1000000.), ((double)((int)(curtime2-curtime1))/1000000000.), 1./(((double)(pts1-prev_pts1))/1000000000.), 1./(((double)(pts2-prev_pts2))/1000000000.),
((double)frames)/(((double)lasttime.tv_sec + 1.0e-9*lasttime.tv_nsec) - ((double)starttime.tv_sec + 1.0e-9*starttime.tv_nsec)) );
prev_pts1=pts1;
prev_pts2=pts2;
}
gst_element_set_state (pipeline1, GST_STATE_NULL);
gst_element_set_state (pipeline2, GST_STATE_NULL);
gst_object_unref (pipeline1);
gst_object_unref (pipeline2);
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment