-
-
Save patrickelectric/5dca1cb7cef4ffa7fbb6fb70dd9f9edc to your computer and use it in GitHub Desktop.
/** | |
* Based on: | |
* https://stackoverflow.com/questions/10403588/adding-opencv-processing-to-gstreamer-application | |
*/ | |
// Include atomic std library | |
#include <atomic> | |
// Include gstreamer library | |
#include <gst/gst.h> | |
#include <gst/app/app.h> | |
// Include OpenCV library | |
#include <opencv.hpp> | |
// Share frame between main loop and gstreamer callback | |
std::atomic<cv::Mat*> atomicFrame; | |
/** | |
* @brief Check preroll to get a new frame using callback | |
* https://gstreamer.freedesktop.org/documentation/design/preroll.html | |
* @return GstFlowReturn | |
*/ | |
GstFlowReturn new_preroll(GstAppSink* /*appsink*/, gpointer /*data*/) | |
{ | |
return GST_FLOW_OK; | |
} | |
/** | |
* @brief This is a callback that get a new frame when a preroll exist | |
* | |
* @param appsink | |
* @return GstFlowReturn | |
*/ | |
GstFlowReturn new_sample(GstAppSink *appsink, gpointer /*data*/) | |
{ | |
static int framecount = 0; | |
// Get caps and frame | |
GstSample *sample = gst_app_sink_pull_sample(appsink); | |
GstCaps *caps = gst_sample_get_caps(sample); | |
GstBuffer *buffer = gst_sample_get_buffer(sample); | |
GstStructure *structure = gst_caps_get_structure(caps, 0); | |
const int width = g_value_get_int(gst_structure_get_value(structure, "width")); | |
const int height = g_value_get_int(gst_structure_get_value(structure, "height")); | |
// Print dot every 30 frames | |
if(!(framecount%30)) { | |
g_print("."); | |
} | |
// Show caps on first frame | |
if(!framecount) { | |
g_print("caps: %s\n", gst_caps_to_string(caps)); | |
} | |
framecount++; | |
// Get frame data | |
GstMapInfo map; | |
gst_buffer_map(buffer, &map, GST_MAP_READ); | |
// Convert gstreamer data to OpenCV Mat | |
cv::Mat* prevFrame; | |
prevFrame = atomicFrame.exchange(new cv::Mat(cv::Size(width, height), CV_8UC3, (char*)map.data, cv::Mat::AUTO_STEP)); | |
if(prevFrame) { | |
delete prevFrame; | |
} | |
gst_buffer_unmap(buffer, &map); | |
gst_sample_unref(sample); | |
return GST_FLOW_OK; | |
} | |
/** | |
* @brief Bus callback | |
* Print important messages | |
* | |
* @param bus | |
* @param message | |
* @param data | |
* @return gboolean | |
*/ | |
static gboolean my_bus_callback(GstBus *bus, GstMessage *message, gpointer data) | |
{ | |
// Debug message | |
//g_print("Got %s message\n", GST_MESSAGE_TYPE_NAME(message)); | |
switch(GST_MESSAGE_TYPE(message)) { | |
case GST_MESSAGE_ERROR: { | |
GError *err; | |
gchar *debug; | |
gst_message_parse_error(message, &err, &debug); | |
g_print("Error: %s\n", err->message); | |
g_error_free(err); | |
g_free(debug); | |
break; | |
} | |
case GST_MESSAGE_EOS: | |
/* end-of-stream */ | |
break; | |
default: | |
/* unhandled message */ | |
break; | |
} | |
/* we want to be notified again the next time there is a message | |
* on the bus, so returning TRUE (FALSE means we want to stop watching | |
* for messages on the bus and our callback should not be called again) | |
*/ | |
return true; | |
} | |
int main(int argc, char *argv[]) { | |
gst_init(&argc, &argv); | |
gchar *descr = g_strdup( | |
"udpsrc port=5600 " | |
"! application/x-rtp, payload=96 ! rtph264depay ! h264parse ! avdec_h264 " | |
"! decodebin ! videoconvert ! video/x-raw,format=(string)BGR ! videoconvert " | |
"! appsink name=sink emit-signals=true sync=false max-buffers=1 drop=true" | |
); | |
// Check pipeline | |
GError *error = nullptr; | |
GstElement *pipeline = gst_parse_launch(descr, &error); | |
if(error) { | |
g_print("could not construct pipeline: %s\n", error->message); | |
g_error_free(error); | |
exit(-1); | |
} | |
// Get sink | |
GstElement *sink = gst_bin_get_by_name(GST_BIN(pipeline), "sink"); | |
/** | |
* @brief Get sink signals and check for a preroll | |
* If preroll exists, we do have a new frame | |
*/ | |
gst_app_sink_set_emit_signals((GstAppSink*)sink, true); | |
gst_app_sink_set_drop((GstAppSink*)sink, true); | |
gst_app_sink_set_max_buffers((GstAppSink*)sink, 1); | |
GstAppSinkCallbacks callbacks = { nullptr, new_preroll, new_sample }; | |
gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, nullptr, nullptr); | |
// Declare bus | |
GstBus *bus; | |
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline)); | |
gst_bus_add_watch(bus, my_bus_callback, nullptr); | |
gst_object_unref(bus); | |
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING); | |
// Main loop | |
while(1) { | |
g_main_iteration(false); | |
cv::Mat* frame = atomicFrame.load(); | |
if(frame) { | |
cv::imshow("Frame", atomicFrame.load()[0]); | |
cv::waitKey(30); | |
} | |
} | |
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL); | |
gst_object_unref(GST_OBJECT(pipeline)); | |
return 0; | |
} |
GST_PLUGIN_PATH is set to 👍 G:\gstreamer\1.0\x86_64\lib
where I have
G:.
├───gio
│ └───modules
├───glib-2.0
│ └───include
├───graphene-1.0
│ └───include
├───gst-validate-launcher
│ └───python
│ └───launcher
│ ├───apps
│ └───testsuites
├───gstreamer-1.0
│ └───include
│ └───gst
│ └───gl
└───pkgconfig
etc ...
if I use
udpfactory = gst_element_factory_find("udpsrc");
g_return_if_fail(udpfactory != NULL);
udp = gst_element_factory_make("udpsrc", "udp");
I get the following error:
(TestGstreamerCpp.exe:11028): GStreamer-WARNING **: 16:14:00.989: Failed to load plugin 'G:\gstreamer\1.0\x86_64\lib\gio\modules\giognutls.dll': 'G:\gstreamer\1.0\x86_64\lib\gio\modules\giognutls.dll': The specified module could not be found.
(TestGstreamerCpp.exe:11028): GStreamer-WARNING **: 16:17:26.514: Failed to load plugin 'G:\gstreamer\1.0\x86_64\lib\gstreamer-1.0\gstudp.dll': 'G:\gstreamer\1.0\x86_64\lib\gstreamer-1.0\gstudp.dll': The specified module could not be found.
while the dll are exactly at these locations.
Maybe you are compiling with the 32 bits and linked with the 64 bits or vice-versa ?
no I checked. I just uninstall and reinstall gstreamer. and I can't even compile in 32bits.
seems to work now. I added another env variable.
I have now gst_pluging_path, gstreamer_dir, gstreamer_1_0_ROOT_X86_64 and in PATH I added the path of gstreamer bin. quite painful this need to add env variables.
any chance you have a similar code to make an udp sink for opencv frame ?
in your code, I don't understand the buffer.
you set
gst_app_sink_set_drop((GstAppSink*)sink, true);
gst_app_sink_set_max_buffers((GstAppSink*)sink, 1);
so I thought that there was no buffer. and if I put a delay at reception (let's say a sleep of 5s), I would lose 5s of frame.
but actually not.
it waits 5 seconds, but I don't lose any frame. it seems there still is a buffer.
It's just a pipeline configuration, this part of code only runs once, this only enables drop frames and creates a buffer of 1 single frame.
any chance you have a similar code to make an udp sink for opencv frame ?
Can you explain what you want to do ?
Dear Patrickelectric,
I run your source code but no frame appears in window.
And please tell me how use source with rtps stream with link like: rtsp://192.168.1.1:554/stream
I am looked forward to your reply
Many thanks
It's just a pipeline configuration, this part of code only runs once, this only enables drop frames and creates a buffer of 1 single frame.
I know, but then if I have a buffer of 1 single frame and drops frames. why does it "bufferize" ? I mean that if I slow down the computing time to make the program not able to deal with frames at the same rates as the video. like a video at 15 fps and I put a "sleep" in new_sample:
GstFlowReturn new_sample(GstAppSink *appsink, gpointer /*data*/)
{
static int framecount = 0;
std::this_thread::sleep_for(std::chrono::milliseconds(2000));
in main I did this change in the while true loop:
cv::Mat* predFrame = NULL;
while (1) {
g_main_iteration(false);
cv::Mat* frame = atomicFrame.load();
if (frame) {
if (predFrame != NULL)
{
if (cv::countNonZero(predFrame - frame)!=0)
{
std::cout << "we have a frame 2" << std::endl;
cv::imwrite("G:\\gstreamer\\testWrite\\frame" + std::to_string(i) + ".jpg", *frame);
i++;
predFrame = frame;
}
}
I would have expect that it doesn't write every frame of the video, but instead one frame every 2 secondes of the video.
but it writes every frame of the video, and if I stop the video, it continue writing until it empty a queue/buffer.
(here is the command line I am using to send the video received by video_udp.cpp : gst-launch-1.0.exe -v filesrc location=G:\gstreamer\Gravity.mp4 ! decodebin ! videoconvert ! openh264enc ! rtph264pay name=pay0 pt=96 config-interval=1 ! udpsink host=10.231.220.199 port=5000)
my goal is a real time "application", so I prefer to lose frames that having a delay
For my application it was necessary to have a single frame, the buffer was not important for me, this is just a minimal example and for different usages the code may be changed.
Hi!
Do you have an example how to send the received data back via an appsrc udp video writer? (Or Simiar)
A bit late to the party, but I think there's a race condition in this code due to a delete after atomicFrame.load() .
By using std::atomic<std::shared_ptr> this race can be avoided (since c++20).
Basically I ran into corrupted frames, the shared_ptr fixed the issue for me.
Besides that: Thanks for this gist! Helped me a lot.
Hi @pdeman,
The last tip that I can give, since I'm not a windows developer, is to make sure that you have all plugins accessible under
GST_PLUGIN_PATH
environment variable, If that does not help, I would recommend to seek suggestions in the official gstreamer communication channel for windows.