Last active
June 29, 2021 06:34
-
-
Save JustinhoCHN/4e4e324c17881a4ecf3c6a41f7a2bed5 to your computer and use it in GitHub Desktop.
nvdsanalytics + msgconv
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* | |
* Copyright (c) 2018-2020, NVIDIA CORPORATION. All rights reserved. | |
* | |
* Permission is hereby granted, free of charge, to any person obtaining a | |
* copy of this software and associated documentation files (the "Software"), | |
* to deal in the Software without restriction, including without limitation | |
* the rights to use, copy, modify, merge, publish, distribute, sublicense, | |
* and/or sell copies of the Software, and to permit persons to whom the | |
* Software is furnished to do so, subject to the following conditions: | |
* | |
* The above copyright notice and this permission notice shall be included in | |
* all copies or substantial portions of the Software. | |
* | |
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | |
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | |
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER | |
* DEALINGS IN THE SOFTWARE. | |
*/ | |
#include <gst/gst.h> | |
#include <glib.h> | |
#include <stdio.h> | |
#include <stdlib.h> | |
#include <string.h> | |
#include <time.h> | |
#include <sys/timeb.h> | |
// #include <iostream.h> | |
// #include <vector.h> | |
// #include <unordered_map.h> | |
#ifndef PLATFORM_TEGRA | |
#include "gst-nvmessage.h" | |
#endif | |
#include "gstnvdsmeta.h" | |
#include "nvdsmeta_schema.h" | |
#include "nvds_analytics_meta.h" | |
#define MAX_DISPLAY_LEN 64 | |
#define MAX_TIME_STAMP_LEN 32 | |
#define PGIE_CLASS_ID_VEHICLE 0 | |
#define PGIE_CLASS_ID_PERSON 2 | |
#define PGIE_CONFIG_FILE "nvdsanalytics_pgie_config.txt" | |
#define MSCONV_CONFIG_FILE "dstest4_msgconv_config.txt" | |
#define MSG2P_LIB "/opt/nvidia/deepstream/deepstream-5.0/sources/libs/nvmsgconv/libnvds_msgconv.so" | |
/* The muxer output resolution must be set if the input streams will be of | |
* different resolution. The muxer will scale all the input frames to this | |
* resolution. */ | |
#define MUXER_OUTPUT_WIDTH 1920 | |
#define MUXER_OUTPUT_HEIGHT 1080 | |
/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set | |
* based on the fastest source's framerate. */ | |
#define MUXER_BATCH_TIMEOUT_USEC 40000 | |
#define TILED_OUTPUT_WIDTH 1080 | |
#define TILED_OUTPUT_HEIGHT 720 | |
/* NVIDIA Decoder source pad memory feature. This feature signifies that source | |
* pads having this capability will push GstBuffers containing cuda buffers. */ | |
#define GST_CAPS_FEATURES_NVMM "memory:NVMM" | |
static gchar *cfg_file = NULL; | |
static gchar *topic = NULL; | |
static gchar *conn_str = NULL; | |
static gchar *proto_lib = NULL; | |
static gint schema_type = 0; | |
static gboolean display_off = FALSE; | |
static guint num_sources = 0; | |
static gchar **source_names = NULL; | |
// gint frame_number = 0; | |
gchar pgie_classes_str[4][32] = { "Vehicle", "TwoWheeler", "Person", | |
"RoadSign" | |
}; | |
GOptionEntry entries[] = { | |
{"cfg-file", 'c', 0, G_OPTION_ARG_FILENAME, &cfg_file, | |
"Set the adaptor config file. Optional if connection string has relevant details.", NULL}, | |
{"topic", 't', 0, G_OPTION_ARG_STRING, &topic, | |
"Name of message topic. Optional if it is part of connection string or config file.", NULL}, | |
{"conn-str", 0, 0, G_OPTION_ARG_STRING, &conn_str, | |
"Connection string of backend server. Optional if it is part of config file.", NULL}, | |
{"proto-lib", 'p', 0, G_OPTION_ARG_STRING, &proto_lib, | |
"Absolute path of adaptor library", NULL}, | |
{"schema", 's', 0, G_OPTION_ARG_INT, &schema_type, | |
"Type of message schema (0=Full, 1=minimal), default=0", NULL}, | |
{"no-display", 0, 0, G_OPTION_ARG_NONE, &display_off, "Disable display", NULL}, | |
{"num-sources", 'n', 0, G_OPTION_ARG_INT, &num_sources, "Number of rtsp sources", NULL}, | |
{"sources", 0, 0, G_OPTION_ARG_STRING_ARRAY, &source_names, "RTSP source address", NULL}, | |
{NULL} | |
}; | |
static void generate_ts_rfc3339 (char *buf, int buf_size) | |
{ | |
time_t tloc; | |
struct tm tm_log; | |
struct timespec ts; | |
char strmsec[6]; //.nnnZ\0 | |
clock_gettime(CLOCK_REALTIME, &ts); | |
memcpy(&tloc, (void *)(&ts.tv_sec), sizeof(time_t)); | |
gmtime_r(&tloc, &tm_log); | |
strftime(buf, buf_size,"%Y-%m-%dT%H:%M:%S", &tm_log); | |
int ms = ts.tv_nsec/1000000; | |
g_snprintf(strmsec, sizeof(strmsec),".%.3dZ", ms); | |
strncat(buf, strmsec, buf_size); | |
} | |
static gpointer meta_copy_func (gpointer data, gpointer user_data) | |
{ | |
NvDsUserMeta *user_meta = (NvDsUserMeta *) data; | |
NvDsEventMsgMeta *srcMeta = (NvDsEventMsgMeta *) user_meta->user_meta_data; | |
NvDsEventMsgMeta *dstMeta = NULL; | |
dstMeta = g_memdup (srcMeta, sizeof(NvDsEventMsgMeta)); | |
if (srcMeta->ts) | |
dstMeta->ts = g_strdup (srcMeta->ts); | |
if (srcMeta->sensorStr) | |
dstMeta->sensorStr = g_strdup (srcMeta->sensorStr); | |
if (srcMeta->objSignature.size > 0) { | |
dstMeta->objSignature.signature = g_memdup (srcMeta->objSignature.signature, | |
srcMeta->objSignature.size); | |
dstMeta->objSignature.size = srcMeta->objSignature.size; | |
} | |
if(srcMeta->objectId) { | |
dstMeta->objectId = g_strdup (srcMeta->objectId); | |
} | |
if (srcMeta->extMsgSize > 0) { | |
if (srcMeta->objType == NVDS_OBJECT_TYPE_VEHICLE) { | |
NvDsVehicleObject *srcObj = (NvDsVehicleObject *) srcMeta->extMsg; | |
NvDsVehicleObject *obj = (NvDsVehicleObject *) g_malloc0 (sizeof (NvDsVehicleObject)); | |
if (srcObj->type) | |
obj->type = g_strdup (srcObj->type); | |
if (srcObj->make) | |
obj->make = g_strdup (srcObj->make); | |
if (srcObj->model) | |
obj->model = g_strdup (srcObj->model); | |
if (srcObj->color) | |
obj->color = g_strdup (srcObj->color); | |
if (srcObj->license) | |
obj->license = g_strdup (srcObj->license); | |
if (srcObj->region) | |
obj->region = g_strdup (srcObj->region); | |
dstMeta->extMsg = obj; | |
dstMeta->extMsgSize = sizeof (NvDsVehicleObject); | |
} else if (srcMeta->objType == NVDS_OBJECT_TYPE_PERSON) { | |
NvDsPersonObject *srcObj = (NvDsPersonObject *) srcMeta->extMsg; | |
NvDsPersonObject *obj = (NvDsPersonObject *) g_malloc0 (sizeof (NvDsPersonObject)); | |
obj->age = srcObj->age; | |
if (srcObj->gender) | |
obj->gender = g_strdup (srcObj->gender); | |
if (srcObj->cap) | |
obj->cap = g_strdup (srcObj->cap); | |
if (srcObj->hair) | |
obj->hair = g_strdup (srcObj->hair); | |
if (srcObj->apparel) | |
obj->apparel = g_strdup (srcObj->apparel); | |
dstMeta->extMsg = obj; | |
dstMeta->extMsgSize = sizeof (NvDsPersonObject); | |
} | |
} | |
return dstMeta; | |
} | |
static void meta_free_func (gpointer data, gpointer user_data) | |
{ | |
NvDsUserMeta *user_meta = (NvDsUserMeta *) data; | |
NvDsEventMsgMeta *srcMeta = (NvDsEventMsgMeta *) user_meta->user_meta_data; | |
g_free (srcMeta->ts); | |
g_free (srcMeta->sensorStr); | |
if (srcMeta->objSignature.size > 0) { | |
g_free (srcMeta->objSignature.signature); | |
srcMeta->objSignature.size = 0; | |
} | |
if(srcMeta->objectId) { | |
g_free (srcMeta->objectId); | |
} | |
if (srcMeta->extMsgSize > 0) { | |
if (srcMeta->objType == NVDS_OBJECT_TYPE_VEHICLE) { | |
NvDsVehicleObject *obj = (NvDsVehicleObject *) srcMeta->extMsg; | |
if (obj->type) | |
g_free (obj->type); | |
if (obj->color) | |
g_free (obj->color); | |
if (obj->make) | |
g_free (obj->make); | |
if (obj->model) | |
g_free (obj->model); | |
if (obj->license) | |
g_free (obj->license); | |
if (obj->region) | |
g_free (obj->region); | |
} else if (srcMeta->objType == NVDS_OBJECT_TYPE_PERSON) { | |
NvDsPersonObject *obj = (NvDsPersonObject *) srcMeta->extMsg; | |
if (obj->gender) | |
g_free (obj->gender); | |
if (obj->cap) | |
g_free (obj->cap); | |
if (obj->hair) | |
g_free (obj->hair); | |
if (obj->apparel) | |
g_free (obj->apparel); | |
} | |
g_free (srcMeta->extMsg); | |
srcMeta->extMsgSize = 0; | |
} | |
g_free (user_meta->user_meta_data); | |
user_meta->user_meta_data = NULL; | |
} | |
static void | |
generate_vehicle_meta (gpointer data) | |
{ | |
NvDsVehicleObject *obj = (NvDsVehicleObject *) data; | |
obj->type = g_strdup ("sedan"); | |
obj->color = g_strdup ("blue"); | |
obj->make = g_strdup ("Bugatti"); | |
obj->model = g_strdup ("M"); | |
obj->license = g_strdup ("XX1234"); | |
obj->region = g_strdup ("CA"); | |
} | |
static void | |
generate_person_meta (gpointer data) | |
{ | |
NvDsPersonObject *obj = (NvDsPersonObject *) data; | |
obj->age = 45; | |
obj->cap = g_strdup ("none"); | |
obj->hair = g_strdup ("black"); | |
obj->gender = g_strdup ("male"); | |
obj->apparel= g_strdup ("formal"); | |
} | |
static void | |
generate_event_msg_meta (gpointer data, gint class_id, NvDsObjectMeta * obj_params) | |
{ | |
NvDsEventMsgMeta *meta = (NvDsEventMsgMeta *) data; | |
meta->sensorId = 0; | |
meta->placeId = 0; | |
meta->moduleId = 0; | |
meta->sensorStr = g_strdup ("sensor-0"); | |
meta->ts = (gchar *) g_malloc0 (MAX_TIME_STAMP_LEN + 1); | |
meta->objectId = (gchar *) g_malloc0 (MAX_LABEL_SIZE); | |
strncpy(meta->objectId, obj_params->obj_label, MAX_LABEL_SIZE); | |
generate_ts_rfc3339(meta->ts, MAX_TIME_STAMP_LEN); | |
/* | |
* This demonstrates how to attach custom objects. | |
* Any custom object as per requirement can be generated and attached | |
* like NvDsVehicleObject / NvDsPersonObject. Then that object should | |
* be handled in payload generator library (nvmsgconv.cpp) accordingly. | |
*/ | |
if (class_id == PGIE_CLASS_ID_VEHICLE) { | |
meta->type = NVDS_EVENT_MOVING; | |
meta->objType = NVDS_OBJECT_TYPE_VEHICLE; | |
meta->objClassId = PGIE_CLASS_ID_VEHICLE; | |
NvDsVehicleObject *obj = (NvDsVehicleObject *) g_malloc0 (sizeof (NvDsVehicleObject)); | |
generate_vehicle_meta (obj); | |
meta->extMsg = obj; | |
meta->extMsgSize = sizeof (NvDsVehicleObject); | |
} else if (class_id == PGIE_CLASS_ID_PERSON) { | |
meta->type = NVDS_EVENT_ENTRY; | |
meta->objType = NVDS_OBJECT_TYPE_PERSON; | |
meta->objClassId = PGIE_CLASS_ID_PERSON; | |
NvDsPersonObject *obj = (NvDsPersonObject *) g_malloc0 (sizeof (NvDsPersonObject)); | |
generate_person_meta (obj); | |
meta->extMsg = obj; | |
meta->extMsgSize = sizeof (NvDsPersonObject); | |
} | |
} | |
/* nvdsanalytics_src_pad_buffer_probe will extract metadata received on tiler sink pad | |
* and extract nvanalytics metadata etc. */ | |
static GstPadProbeReturn | |
nvdsanalytics_src_pad_buffer_probe (GstPad * pad, GstPadProbeInfo * info, | |
gpointer u_data) | |
{ | |
GstBuffer *buf = (GstBuffer *) info->data; | |
guint num_rects = 0; | |
NvDsFrameMeta *frame_meta = NULL; | |
NvDsObjectMeta *obj_meta = NULL; | |
guint vehicle_count = 0; | |
guint person_count = 0; | |
NvDsMetaList * l_frame = NULL; | |
NvDsMetaList * l_obj = NULL; | |
gboolean is_first_object = TRUE; | |
guint lc_count = 0; | |
guint roi_count = 0; | |
// bool overcrowding = false; | |
NvOSD_TextParams *txt_params = NULL; | |
NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta (buf); | |
for (l_frame = batch_meta->frame_meta_list; l_frame != NULL; | |
l_frame = l_frame->next) { | |
NvDsFrameMeta *frame_meta = (NvDsFrameMeta *) (l_frame->data); | |
for (l_obj = frame_meta->obj_meta_list; l_obj != NULL; | |
l_obj = l_obj->next) { | |
obj_meta = (NvDsObjectMeta *) (l_obj->data); | |
if (obj_meta->class_id == PGIE_CLASS_ID_VEHICLE) { | |
vehicle_count++; | |
num_rects++; | |
} | |
if (obj_meta->class_id == PGIE_CLASS_ID_PERSON) { | |
person_count++; | |
num_rects++; | |
} | |
// Access attached user meta for each object | |
for (NvDsMetaList *l_user_meta = obj_meta->obj_user_meta_list; l_user_meta != NULL; | |
l_user_meta = l_user_meta->next) { | |
NvDsUserMeta *user_meta = (NvDsUserMeta *) (l_user_meta->data); | |
if(user_meta->base_meta.meta_type == NVDS_USER_OBJ_META_NVDSANALYTICS) | |
{ | |
NvDsAnalyticsObjInfo * user_meta_data = (NvDsAnalyticsObjInfo *)user_meta->user_meta_data; | |
if (user_meta_data->dirStatus.length()){ | |
g_print ("object %lu moving in %s\n", obj_meta->object_id, user_meta_data->dirStatus.c_str()); | |
} | |
} | |
} | |
} | |
roi_count = 0; | |
lc_count = 0; | |
// overcrowding = false; | |
/* Iterate user metadata in frames to search analytics metadata */ | |
for (NvDsMetaList * l_user = frame_meta->frame_user_meta_list; | |
l_user != NULL; l_user = l_user->next) { | |
NvDsUserMeta *user_meta = (NvDsUserMeta *) l_user->data; | |
if (user_meta->base_meta.meta_type != NVDS_USER_FRAME_META_NVDSANALYTICS) | |
continue; | |
/* convert to metadata */ | |
NvDsAnalyticsFrameMeta *meta = | |
(NvDsAnalyticsFrameMeta *) user_meta->user_meta_data; | |
/* Get the labels from nvdsanalytics config file */ | |
roi_count = meta->objInROIcnt["RF"]; | |
lc_count = meta->objLCCumCnt["Exit"]; | |
// overcrowding = meta->ocStatus["OC"]; | |
} | |
is_first_object = TRUE; | |
for (l_obj = frame_meta->obj_meta_list; l_obj != NULL; | |
l_obj = l_obj->next) { | |
obj_meta = (NvDsObjectMeta *) (l_obj->data); | |
if (obj_meta == NULL) { | |
// Ignore Null object. | |
continue; | |
} | |
txt_params = &(obj_meta->text_params); | |
if (txt_params->display_text) | |
g_free (txt_params->display_text); | |
txt_params->display_text = (gchar *) g_malloc0 (MAX_DISPLAY_LEN); | |
g_snprintf (txt_params->display_text, MAX_DISPLAY_LEN, "%s ", | |
pgie_classes_str[obj_meta->class_id]); | |
/* Now set the offsets where the string should appear */ | |
txt_params->x_offset = obj_meta->rect_params.left; | |
txt_params->y_offset = obj_meta->rect_params.top - 25; | |
/* Font , font-color and font-size */ | |
txt_params->font_params.font_name = "Serif"; | |
txt_params->font_params.font_size = 10; | |
txt_params->font_params.font_color.red = 1.0; | |
txt_params->font_params.font_color.green = 1.0; | |
txt_params->font_params.font_color.blue = 1.0; | |
txt_params->font_params.font_color.alpha = 1.0; | |
/* Text background color */ | |
txt_params->set_bg_clr = 1; | |
txt_params->text_bg_clr.red = 0.0; | |
txt_params->text_bg_clr.green = 0.0; | |
txt_params->text_bg_clr.blue = 0.0; | |
txt_params->text_bg_clr.alpha = 1.0; | |
/* | |
* Ideally NVDS_EVENT_MSG_META should be attached to buffer by the | |
* component implementing detection / recognition logic. | |
* Here it demonstrates how to use / attach that meta data. | |
*/ | |
if (is_first_object && !(frame_meta->frame_num % 30)) { | |
/* Frequency of messages to be send will be based on use case. | |
* Here message is being sent for first object every 30 frames. | |
*/ | |
NvDsEventMsgMeta *msg_meta = (NvDsEventMsgMeta *) g_malloc0 (sizeof (NvDsEventMsgMeta)); | |
msg_meta->bbox.top = obj_meta->rect_params.top; | |
msg_meta->bbox.left = obj_meta->rect_params.left; | |
msg_meta->bbox.width = obj_meta->rect_params.width; | |
msg_meta->bbox.height = obj_meta->rect_params.height; | |
msg_meta->frameId = frame_meta->frame_num; | |
msg_meta->trackingId = obj_meta->object_id; | |
msg_meta->confidence = obj_meta->confidence; | |
generate_event_msg_meta (msg_meta, obj_meta->class_id, obj_meta, roi_count); | |
NvDsUserMeta *user_event_meta = nvds_acquire_user_meta_from_pool (batch_meta); | |
if (user_event_meta) { | |
user_event_meta->user_meta_data = (void *) msg_meta; | |
user_event_meta->base_meta.meta_type = NVDS_EVENT_MSG_META; | |
user_event_meta->base_meta.copy_func = (NvDsMetaCopyFunc) meta_copy_func; | |
user_event_meta->base_meta.release_func = (NvDsMetaReleaseFunc) meta_free_func; | |
nvds_add_user_meta_to_frame(frame_meta, user_event_meta); | |
} else { | |
g_print ("Error in attaching event meta to buffer\n"); | |
} | |
is_first_object = FALSE; | |
} | |
} | |
g_print ("Frame Number = %d of Stream = %d, Number of objects = %d " | |
"Vehicle Count = %d Person Count = %d Objs in ROI = %d LC count = %d \n", | |
frame_meta->frame_num, frame_meta->pad_index, | |
num_rects, vehicle_count, person_count, roi_count, lc_count); | |
// frame_number++; | |
} | |
return GST_PAD_PROBE_OK; | |
} | |
static gboolean | |
bus_call (GstBus * bus, GstMessage * msg, gpointer data) | |
{ | |
GMainLoop *loop = (GMainLoop *) data; | |
switch (GST_MESSAGE_TYPE (msg)) { | |
case GST_MESSAGE_EOS: | |
g_print ("End of stream\n"); | |
g_main_loop_quit (loop); | |
break; | |
case GST_MESSAGE_ERROR:{ | |
gchar *debug; | |
GError *error; | |
gst_message_parse_error (msg, &error, &debug); | |
g_printerr ("ERROR from element %s: %s\n", | |
GST_OBJECT_NAME (msg->src), error->message); | |
if (debug) | |
g_printerr ("Error details: %s\n", debug); | |
g_free (debug); | |
g_error_free (error); | |
g_main_loop_quit (loop); | |
break; | |
} | |
default: | |
break; | |
} | |
return TRUE; | |
} | |
static void | |
cb_newpad (GstElement * decodebin, GstPad * decoder_src_pad, gpointer data) | |
{ | |
g_print ("In cb_newpad\n"); | |
GstCaps *caps = gst_pad_get_current_caps (decoder_src_pad); | |
const GstStructure *str = gst_caps_get_structure (caps, 0); | |
const gchar *name = gst_structure_get_name (str); | |
GstElement *source_bin = (GstElement *) data; | |
GstCapsFeatures *features = gst_caps_get_features (caps, 0); | |
/* Need to check if the pad created by the decodebin is for video and not | |
* audio. */ | |
if (!strncmp (name, "video", 5)) { | |
/* Link the decodebin pad only if decodebin has picked nvidia | |
* decoder plugin nvdec_*. We do this by checking if the pad caps contain | |
* NVMM memory features. */ | |
if (gst_caps_features_contains (features, GST_CAPS_FEATURES_NVMM)) { | |
/* Get the source bin ghost pad */ | |
GstPad *bin_ghost_pad = gst_element_get_static_pad (source_bin, "src"); | |
if (!gst_ghost_pad_set_target (GST_GHOST_PAD (bin_ghost_pad), | |
decoder_src_pad)) { | |
g_printerr ("Failed to link decoder src pad to source bin ghost pad\n"); | |
} | |
gst_object_unref (bin_ghost_pad); | |
} else { | |
g_printerr ("Error: Decodebin did not pick nvidia decoder plugin.\n"); | |
} | |
} | |
} | |
static void | |
decodebin_child_added (GstChildProxy * child_proxy, GObject * object, | |
gchar * name, gpointer user_data) | |
{ | |
g_print ("Decodebin child added: %s\n", name); | |
if (g_strrstr (name, "decodebin") == name) { | |
g_signal_connect (G_OBJECT (object), "child-added", | |
G_CALLBACK (decodebin_child_added), user_data); | |
} | |
} | |
static GstElement * | |
create_source_bin (guint index, gchar * uri) | |
{ | |
GstElement *bin = NULL, *uri_decode_bin = NULL; | |
gchar bin_name[16] = { }; | |
g_snprintf (bin_name, 15, "source-bin-%02d", index); | |
/* Create a source GstBin to abstract this bin's content from the rest of the | |
* pipeline */ | |
bin = gst_bin_new (bin_name); | |
/* Source element for reading from the uri. | |
* We will use decodebin and let it figure out the container format of the | |
* stream and the codec and plug the appropriate demux and decode plugins. */ | |
uri_decode_bin = gst_element_factory_make ("uridecodebin", "uri-decode-bin"); | |
if (!bin || !uri_decode_bin) { | |
g_printerr ("One element in source bin could not be created.\n"); | |
return NULL; | |
} | |
/* We set the input uri to the source element */ | |
g_object_set (G_OBJECT (uri_decode_bin), "uri", uri, NULL); | |
/* Connect to the "pad-added" signal of the decodebin which generates a | |
* callback once a new pad for raw data has beed created by the decodebin */ | |
g_signal_connect (G_OBJECT (uri_decode_bin), "pad-added", | |
G_CALLBACK (cb_newpad), bin); | |
g_signal_connect (G_OBJECT (uri_decode_bin), "child-added", | |
G_CALLBACK (decodebin_child_added), bin); | |
gst_bin_add (GST_BIN (bin), uri_decode_bin); | |
/* We need to create a ghost pad for the source bin which will act as a proxy | |
* for the video decoder src pad. The ghost pad will not have a target right | |
* now. Once the decode bin creates the video decoder and generates the | |
* cb_newpad callback, we will set the ghost pad target to the video decoder | |
* src pad. */ | |
if (!gst_element_add_pad (bin, gst_ghost_pad_new_no_target ("src", | |
GST_PAD_SRC))) { | |
g_printerr ("Failed to add ghost pad in source bin\n"); | |
return NULL; | |
} | |
return bin; | |
} | |
int | |
main (int argc, char *argv[]) | |
{ | |
GMainLoop *loop = NULL; | |
GstElement *pipeline = NULL, *source = NULL, *h264parser = NULL, | |
*decoder = NULL, *sink = NULL, *pgie = NULL, *nvtracker = NULL, *nvdsanalytics = NULL, | |
*nvvidconv = NULL, *nvosd = NULL, *tiler = NULL, *nvstreammux; | |
GstElement *msgconv = NULL, *msgbroker = NULL, *tee = NULL; | |
GstElement *queue1 = NULL, *queue2 = NULL; | |
#ifdef PLATFORM_TEGRA | |
GstElement *transform = NULL; | |
#endif | |
GstBus *bus = NULL; | |
guint bus_watch_id; | |
GstPad *nvdsanalytics_src_pad = NULL; | |
GstPad *tee_render_pad = NULL; | |
GstPad *tee_msg_pad = NULL; | |
GstPad *sink_pad = NULL; | |
GstPad *src_pad = NULL; | |
GOptionContext *ctx = NULL; | |
GOptionGroup *group = NULL; | |
GError *error = NULL; | |
guint i; | |
guint tiler_rows, tiler_columns; | |
guint pgie_batch_size; | |
ctx = g_option_context_new ("Nvidia DeepStream Test4"); | |
group = g_option_group_new ("test4", NULL, NULL, NULL, NULL); | |
g_option_group_add_entries (group, entries); | |
g_option_context_set_main_group (ctx, group); | |
g_option_context_add_group (ctx, gst_init_get_option_group ()); | |
if (!g_option_context_parse (ctx, &argc, &argv, &error)) { | |
g_option_context_free (ctx); | |
g_printerr ("%s", error->message); | |
return -1; | |
} | |
g_option_context_free (ctx); | |
if (!proto_lib) { | |
g_printerr("missing arguments\n"); | |
g_printerr ("Usage: %s -i <H264 filename> -p <Proto adaptor library> --conn-str=<Connection string>\n", argv[0]); | |
return -1; | |
} | |
loop = g_main_loop_new (NULL, FALSE); | |
/* Create gstreamer elements */ | |
/* Create Pipeline element that will form a connection of other elements */ | |
pipeline = gst_pipeline_new ("dstest4-pipeline"); | |
/* Create nvstreammux instance to form batches from one or more sources. */ | |
nvstreammux = gst_element_factory_make ("nvstreammux", "nvstreammux"); | |
if (!pipeline || !nvstreammux) { | |
g_printerr ("Pipeline or streammux element could not be created. Exiting.\n"); | |
return -1; | |
} | |
gst_bin_add (GST_BIN (pipeline), nvstreammux); | |
for (i = 0; i < num_sources; i++) { | |
GstPad *sinkpad, *srcpad; | |
gchar pad_name[16] = { }; | |
GstElement *source_bin = create_source_bin (i, source_names[i]); | |
if (!source_bin) { | |
g_printerr ("Failed to create source bin. Exiting.\n"); | |
return -1; | |
} | |
gst_bin_add (GST_BIN (pipeline), source_bin); | |
g_snprintf (pad_name, 15, "sink_%u", i); | |
sinkpad = gst_element_get_request_pad (nvstreammux, pad_name); | |
if (!sinkpad) { | |
g_printerr ("Streammux request sink pad failed. Exiting.\n"); | |
return -1; | |
} | |
srcpad = gst_element_get_static_pad (source_bin, "src"); | |
if (!srcpad) { | |
g_printerr ("Failed to get src pad of source bin. Exiting.\n"); | |
return -1; | |
} | |
if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) { | |
g_printerr ("Failed to link source bin to stream muxer. Exiting.\n"); | |
return -1; | |
} | |
gst_object_unref (srcpad); | |
gst_object_unref (sinkpad); | |
} | |
/* Use nvinfer to run inferencing on decoder's output, | |
* behaviour of inferencing is set through config file */ | |
pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine"); | |
/* Use nvtracker to track detections on batched frame. */ | |
nvtracker = gst_element_factory_make ("nvtracker", "nvtracker"); | |
/* Use nvdsanalytics to perform analytics on object */ | |
nvdsanalytics = gst_element_factory_make ("nvdsanalytics", "nvdsanalytics"); | |
/* Use nvtiler to composite the batched frames into a 2D tiled array based | |
* on the source of the frames. */ | |
tiler = gst_element_factory_make ("nvmultistreamtiler", "nvtiler"); | |
/* Use convertor to convert from NV12 to RGBA as required by nvosd */ | |
nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter"); | |
/* Create OSD to draw on the converted RGBA buffer */ | |
nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay"); | |
/* Create msg converter to generate payload from buffer metadata */ | |
msgconv = gst_element_factory_make ("nvmsgconv", "nvmsg-converter"); | |
/* Create msg broker to send payload to server */ | |
msgbroker = gst_element_factory_make ("nvmsgbroker", "nvmsg-broker"); | |
/* Create tee to render buffer and send message simultaneously*/ | |
tee = gst_element_factory_make ("tee", "nvsink-tee"); | |
/* Create queues */ | |
queue1 = gst_element_factory_make ("queue", "nvtee-que1"); | |
queue2 = gst_element_factory_make ("queue", "nvtee-que2"); | |
/* Finally render the osd output */ | |
if (display_off) { | |
sink = gst_element_factory_make ("fakesink", "nvvideo-renderer"); | |
} else { | |
sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer"); | |
#ifdef PLATFORM_TEGRA | |
transform = gst_element_factory_make ("nvegltransform", "nvegl-transform"); | |
if (!transform) { | |
g_printerr ("nvegltransform element could not be created. Exiting.\n"); | |
return -1; | |
} | |
#endif | |
} | |
if (!pipeline || !source || !h264parser || !decoder || !nvstreammux || !pgie | |
|| !nvtracker || !nvdsanalytics || !nvvidconv || !nvosd || !msgconv || !msgbroker || !tee | |
|| !queue1 || !queue2 || !sink) { | |
g_printerr ("One element could not be created. Exiting.\n"); | |
return -1; | |
} | |
g_object_set (G_OBJECT (nvstreammux), "batch-size", 1, NULL); | |
g_object_set (G_OBJECT (nvstreammux), "width", MUXER_OUTPUT_WIDTH, "height", | |
MUXER_OUTPUT_HEIGHT, | |
"batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL); | |
/* Set all the necessary properties of the nvinfer element, | |
* the necessary ones are : */ | |
g_object_set (G_OBJECT (pgie), | |
"config-file-path", PGIE_CONFIG_FILE, NULL); | |
/* Override the batch-size set in the config file with the number of sources. */ | |
g_object_get (G_OBJECT (pgie), "batch-size", &pgie_batch_size, NULL); | |
if (pgie_batch_size != num_sources) { | |
g_printerr | |
("WARNING: Overriding infer-config batch-size (%d) with number of sources (%d)\n", | |
pgie_batch_size, num_sources); | |
g_object_set (G_OBJECT (pgie), "batch-size", num_sources, NULL); | |
} | |
/* Configure the nvtracker element for using the particular tracker algorithm. */ | |
g_object_set (G_OBJECT (nvtracker), | |
"ll-lib-file", "/opt/nvidia/deepstream/deepstream-5.0/lib/libnvds_nvdcf.so", | |
"ll-config-file", "tracker_config.yml", "tracker-width", 640, "tracker-height", 480, | |
NULL); | |
/* Configure the nvdsanalytics element for using the particular analytics config file*/ | |
g_object_set (G_OBJECT (nvdsanalytics), | |
"config-file", "config_nvdsanalytics.txt", | |
NULL); | |
g_object_set (G_OBJECT(msgconv), "config", MSCONV_CONFIG_FILE, NULL); | |
g_object_set (G_OBJECT(msgconv), "payload-type", schema_type, NULL); | |
g_object_set (G_OBJECT(msgconv), "msg2p-lib", MSG2P_LIB, NULL); | |
g_object_set (G_OBJECT(msgbroker), "proto-lib", proto_lib, | |
"conn-str", conn_str, "sync", FALSE, NULL); | |
if (topic) { | |
g_object_set (G_OBJECT(msgbroker), "topic", topic, NULL); | |
} | |
if (cfg_file) { | |
g_object_set (G_OBJECT(msgbroker), "config", cfg_file, NULL); | |
} | |
tiler_rows = (guint) sqrt (num_sources); | |
tiler_columns = (guint) ceil (1.0 * num_sources / tiler_rows); | |
/* we set the tiler properties here */ | |
g_object_set (G_OBJECT (tiler), "rows", tiler_rows, "columns", tiler_columns, | |
"width", TILED_OUTPUT_WIDTH, "height", TILED_OUTPUT_HEIGHT, NULL); | |
g_object_set (G_OBJECT (sink), "qos", 0, NULL); | |
g_object_set (G_OBJECT (sink), "sync", FALSE, NULL); /* set to true at first, but maybe cause laggy*/ | |
/* we add a message handler */ | |
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); | |
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop); | |
gst_object_unref (bus); | |
/* Set up the pipeline */ | |
/* we add all elements into the pipeline */ | |
gst_bin_add_many (GST_BIN (pipeline), | |
nvstreammux, pgie, nvtracker, nvdsanalytics, | |
tiler, nvvidconv, nvosd, tee, queue1, queue2, msgconv, | |
msgbroker, sink, NULL); | |
#ifdef PLATFORM_TEGRA | |
if (!display_off) | |
gst_bin_add (GST_BIN (pipeline), transform); | |
#endif | |
/* we link the elements together */ | |
/* file-source -> h264-parser -> nvh264-decoder -> nvstreammux -> | |
* nvinfer -> nvvidconv -> nvosd -> tee -> video-renderer | |
* | | |
* |-> msgconv -> msgbroker */ | |
if (!gst_element_link_many (nvstreammux, pgie, nvtracker, nvdsanalytics, tiler, nvvidconv, nvosd, tee, NULL)) { | |
g_printerr ("Elements could not be linked. Exiting.\n"); | |
return -1; | |
} | |
if (!gst_element_link_many (queue1, msgconv, msgbroker, NULL)) { | |
g_printerr ("Elements could not be linked. Exiting.\n"); | |
return -1; | |
} | |
#ifdef PLATFORM_TEGRA | |
if (!display_off) { | |
if (!gst_element_link_many (queue2, transform, sink, NULL)) { | |
g_printerr ("Elements could not be linked. Exiting.\n"); | |
return -1; | |
} | |
} else { | |
if (!gst_element_link (queue2, sink)) { | |
g_printerr ("Elements could not be linked. Exiting.\n"); | |
return -1; | |
} | |
} | |
#else | |
if (!gst_element_link (queue2, sink)) { | |
g_printerr ("Elements could not be linked. Exiting.\n"); | |
return -1; | |
} | |
#endif | |
sink_pad = gst_element_get_static_pad (queue1, "sink"); | |
tee_msg_pad = gst_element_get_request_pad (tee, "src_%u"); | |
tee_render_pad = gst_element_get_request_pad (tee, "src_%u"); | |
if (!tee_msg_pad || !tee_render_pad) { | |
g_printerr ("Unable to get request pads\n"); | |
return -1; | |
} | |
if (gst_pad_link (tee_msg_pad, sink_pad) != GST_PAD_LINK_OK) { | |
g_printerr ("Unable to link tee and message converter\n"); | |
gst_object_unref (sink_pad); | |
return -1; | |
} | |
gst_object_unref (sink_pad); | |
sink_pad = gst_element_get_static_pad (queue2, "sink"); | |
if (gst_pad_link (tee_render_pad, sink_pad) != GST_PAD_LINK_OK) { | |
g_printerr ("Unable to link tee and render\n"); | |
gst_object_unref (sink_pad); | |
return -1; | |
} | |
gst_object_unref (sink_pad); | |
/* Lets add probe to get informed of the meta data generated, we add probe to | |
* the sink pad of the osd element, since by that time, the buffer would have | |
* had got all the metadata. */ | |
nvdsanalytics_src_pad = gst_element_get_static_pad (nvdsanalytics, "src"); | |
if (!nvdsanalytics_src_pad) | |
g_print ("Unable to get src pad\n"); | |
else | |
gst_pad_add_probe (nvdsanalytics_src_pad, GST_PAD_PROBE_TYPE_BUFFER, | |
nvdsanalytics_src_pad_buffer_probe, NULL, NULL); | |
/* Set the pipeline to "playing" state */ | |
g_print ("Now playing:"); | |
for (i = 0; i < num_sources; i++) { | |
g_print (" %s,", argv[i + 1]); | |
} | |
g_print ("\n"); | |
gst_element_set_state (pipeline, GST_STATE_PLAYING); | |
/* Wait till pipeline encounters an error or EOS */ | |
g_print ("Running...\n"); | |
g_main_loop_run (loop); | |
/* Out of the main loop, clean up nicely */ | |
g_print ("Returned, stopping playback\n"); | |
g_free (cfg_file); | |
g_free (topic); | |
g_free (conn_str); | |
g_free (proto_lib); | |
/* Release the request pads from the tee, and unref them */ | |
gst_element_release_request_pad (tee, tee_msg_pad); | |
gst_element_release_request_pad (tee, tee_render_pad); | |
gst_object_unref (tee_msg_pad); | |
gst_object_unref (tee_render_pad); | |
gst_element_set_state (pipeline, GST_STATE_NULL); | |
g_print ("Deleting pipeline\n"); | |
gst_object_unref (GST_OBJECT (pipeline)); | |
g_source_remove (bus_watch_id); | |
g_main_loop_unref (loop); | |
return 0; | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
################################################################################ | |
# Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved. | |
# | |
# Permission is hereby granted, free of charge, to any person obtaining a | |
# copy of this software and associated documentation files (the "Software"), | |
# to deal in the Software without restriction, including without limitation | |
# the rights to use, copy, modify, merge, publish, distribute, sublicense, | |
# and/or sell copies of the Software, and to permit persons to whom the | |
# Software is furnished to do so, subject to the following conditions: | |
# | |
# The above copyright notice and this permission notice shall be included in | |
# all copies or substantial portions of the Software. | |
# | |
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | |
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | |
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER | |
# DEALINGS IN THE SOFTWARE. | |
################################################################################ | |
APP:= deepstream-test4-app | |
TARGET_DEVICE = $(shell gcc -dumpmachine | cut -f1 -d -) | |
NVDS_VERSION:=5.0 | |
LIB_INSTALL_DIR?=/opt/nvidia/deepstream/deepstream-$(NVDS_VERSION)/lib/ | |
APP_INSTALL_DIR?=/opt/nvidia/deepstream/deepstream-$(NVDS_VERSION)/bin/ | |
ifeq ($(TARGET_DEVICE),aarch64) | |
CFLAGS:= -DPLATFORM_TEGRA | |
endif | |
SRCS:= $(wildcard *.c) | |
INCS:= $(wildcard *.h) | |
PKGS:= gstreamer-1.0 | |
OBJS:= $(SRCS:.c=.o) | |
CFLAGS+= -I../../../includes | |
CFLAGS+= `pkg-config --cflags $(PKGS)` | |
LIBS:= `pkg-config --libs $(PKGS)` | |
LIBS+= -L$(LIB_INSTALL_DIR) -lnvdsgst_meta -lnvds_meta -lrt \ | |
-Wl,-rpath,$(LIB_INSTALL_DIR) | |
all: $(APP) | |
%.o: %.c $(INCS) Makefile | |
$(CC) -c -o $@ $(CFLAGS) $< | |
$(APP): $(OBJS) Makefile | |
$(CC) -o $(APP) $(OBJS) $(LIBS) | |
install: $(APP) | |
cp -rv $(APP) $(APP_INSTALL_DIR) | |
clean: | |
rm -rf $(OBJS) $(APP) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment