Please provide complete information as applicable to your setup.
• Hardware Platform (Jetson / GPU) :- dGPU
• DeepStream Version :- 6.4
I am trying to work with appsrc element and uridecodebin element together in a pipeline. I am able to link all the required elements together, but for some reason, I am not able to see the images pushed through appsrc in the display sink.
This is my pipeline.
and here is my code.
#include <glib.h>
#include <gst/gst.h>
#include <hiredis/hiredis.h>
#include <unistd.h>
#include <cmath>
#include <csignal>
#include <cstdlib>
#include <cstring>
#include <iostream>
#include <nlohmann/json.hpp>
#include <string>
#include "gst/app/gstappsrc.h"
#include "gstnvdsinfer.h"
#include "gstnvdsmeta.h"
#include "nvbufsurface.h"
#include "nvds_obj_encode.h"
#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
using json = nlohmann::json;
using namespace std;
#define MUXER_OUTPUT_WIDTH 1280
#define MUXER_OUTPUT_HEIGHT 720
#define MUXER_BATCH_TIMEOUT_USEC 40000
// #define perf_measurement
#define enable_probes
#define TILED_OUTPUT_WIDTH 1280
#define TILED_OUTPUT_HEIGHT 720
#define GST_CAPS_FEATURES_NVMM "memory:NVMM"
using json = nlohmann::json;
typedef struct _AppSrcData {
GstElement *app_source;
long frame_size;
FILE *file;
gint appsrc_frame_num;
guint fps;
guint sourceid;
} AppSrcData;
struct UserData {
GMainLoop *main_loop;
GstElement *pipeline;
};
void signal_handler_final(int signum) {
std::cout << "Existing because you insist!" << std::endl;
exit(EXIT_SUCCESS);
}
GstBuffer *createGstBufferFromMat(const cv::Mat &mat) {
if (mat.empty()) {
std::cerr << "Empty cv::Mat provided" << std::endl;
return nullptr;
}
size_t bufferSize = mat.total() * mat.elemSize();
GstBuffer *buffer = gst_buffer_new_allocate(nullptr, bufferSize, nullptr);
if (!buffer) {
std::cerr << "Failed to create GstBuffer" << std::endl;
return nullptr;
}
GstMapInfo map;
if (!gst_buffer_map(buffer, &map, GST_MAP_WRITE)) {
std::cerr << "Failed to map GstBuffer" << std::endl;
gst_buffer_unref(buffer);
return nullptr;
}
std::memcpy(map.data, mat.data, bufferSize);
gst_buffer_unmap(buffer, &map);
return buffer;
}
gboolean read_data(AppSrcData *data2) {
std::cout << "in read data " << std::endl;
while (true) {
cv::Mat image = cv::imread("image.jpg", cv::IMREAD_COLOR);
if (image.empty()) {
std::cerr << "Failed to load image" << std::endl;
return FALSE;
}
GstBuffer *gstBuffer = createGstBufferFromMat(image);
int gstret;
if (gstBuffer != nullptr) {
for (int i = 0; i < 10; i++) {
gstret = gst_app_src_push_buffer((GstAppSrc *)data2->app_source,
gstBuffer);
if (gstret != GST_FLOW_OK) {
g_print("gst_app_src_push_buffer returned %d \n", gstret);
return FALSE;
}
}
}
}
return TRUE;
}
void signal_handler(int sig_num) {
std::cout << std::endl
<< "Caught signal" << sig_num << std::endl
<< "Press CTRL+C again to exit!" << std::endl;
std::signal(SIGINT, signal_handler_final);
}
static void start_feed(GstElement *source, guint size, AppSrcData *data) {
read_data(data);
}
static void stop_feed(GstElement *source, AppSrcData *data) {
if (data->sourceid != 0) {
g_source_remove(data->sourceid);
data->sourceid = 0;
}
}
gboolean bus_call_short(GstBus *bus, GstMessage *msg, gpointer user_data) {
struct UserData *data = (struct UserData *)user_data;
GMainLoop *loop = (GMainLoop *)data->main_loop;
GstElement *pipeline = data->pipeline;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_printerr("ERROR from element %s: %s\n", GST_OBJECT_NAME(msg->src),
error->message);
if (debug) g_printerr("Error details: %s\n", debug);
g_free(debug);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
case GST_MESSAGE_EOS:
g_print("End of stream\n");
g_main_loop_quit(loop);
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print("Deleting pipeline\n");
gst_object_unref(GST_OBJECT(pipeline));
break;
default:
std::cout << " ()()() some other error ()()() " << std::endl;
break;
}
return TRUE;
}
static void cb_newpad(GstElement *decodebin, GstPad *decoder_src_pad,
gpointer data) {
GstCaps *caps = gst_pad_get_current_caps(decoder_src_pad);
if (!caps) {
caps = gst_pad_query_caps(decoder_src_pad, NULL);
}
const GstStructure *str = gst_caps_get_structure(caps, 0);
const gchar *name = gst_structure_get_name(str);
GstElement *source_bin = (GstElement *)data;
GstCapsFeatures *features = gst_caps_get_features(caps, 0);
/* Need to check if the pad created by the decodebin is for video and not
* audio. */
if (!strncmp(name, "video", 5)) {
/* Link the decodebin pad only if decodebin has picked nvidia
* decoder plugin nvdec_*. We do this by checking if the pad caps
* contain NVMM memory features. */
if (gst_caps_features_contains(features, GST_CAPS_FEATURES_NVMM)) {
/* Get the source bin ghost pad */
GstPad *bin_ghost_pad =
gst_element_get_static_pad(source_bin, "src");
if (!gst_ghost_pad_set_target(GST_GHOST_PAD(bin_ghost_pad),
decoder_src_pad)) {
g_printerr(
"Failed to link decoder src pad to source bin ghost pad\n");
}
gst_object_unref(bin_ghost_pad);
} else {
g_printerr(
"Error: Decodebin did not pick nvidia decoder plugin.\n");
}
}
}
static void decodebin_child_added(GstChildProxy *child_proxy, GObject *object,
gchar *name, gpointer user_data) {
g_print("Decodebin child added: %s\n", name);
if (g_strrstr(name, "decodebin") == name) {
g_signal_connect(G_OBJECT(object), "child-added",
G_CALLBACK(decodebin_child_added), user_data);
}
if (g_strrstr(name, "source") == name) {
g_object_set(G_OBJECT(object), "drop-on-latency", true, NULL);
}
}
static GstElement *create_source_bin(guint index, gchar *uri) {
GstElement *bin = NULL, *uri_decode_bin = NULL;
gchar bin_name[16] = {};
g_snprintf(bin_name, 15, "source-bin-%02d", index);
/* Create a source GstBin to abstract this bin's content from the rest of
* the pipeline */
bin = gst_bin_new(bin_name);
/* Source element for reading from the uri.
* We will use decodebin and let it figure out the container format of the
* stream and the codec and plug the appropriate demux and decode plugins.
*/
bool PERF_MODE = true;
if (PERF_MODE) {
uri_decode_bin =
gst_element_factory_make("nvurisrcbin", "uri-decode-bin");
g_object_set(G_OBJECT(uri_decode_bin), "file-loop", false, NULL);
g_object_set(G_OBJECT(uri_decode_bin), "cudadec-memtype", 0, NULL);
g_object_set(G_OBJECT(uri_decode_bin), "latency", 50, NULL);
g_object_set(G_OBJECT(uri_decode_bin), "rtsp-reconnect-interval", 60,
NULL);
} else {
uri_decode_bin =
gst_element_factory_make("uridecodebin", "uri-decode-bin");
}
if (!bin || !uri_decode_bin) {
g_printerr("One element in source bin could not be created.\n");
return NULL;
}
/* We set the input uri to the source element */
g_object_set(G_OBJECT(uri_decode_bin), "uri", uri, NULL);
/* Connect to the "pad-added" signal of the decodebin which generates a
* callback once a new pad for raw data has beed created by the decodebin */
g_signal_connect(G_OBJECT(uri_decode_bin), "pad-added",
G_CALLBACK(cb_newpad), bin);
g_signal_connect(G_OBJECT(uri_decode_bin), "child-added",
G_CALLBACK(decodebin_child_added), bin);
gst_bin_add(GST_BIN(bin), uri_decode_bin);
/* We need to create a ghost pad for the source bin which will act as a
* proxy for the video decoder src pad. The ghost pad will not have a target
* right now. Once the decode bin creates the video decoder and generates
* the cb_newpad callback, we will set the ghost pad target to the video
* decoder src pad. */
if (!gst_element_add_pad(bin,
gst_ghost_pad_new_no_target("src", GST_PAD_SRC))) {
g_printerr("Failed to add ghost pad in source bin\n");
return NULL;
}
return bin;
}
// std::vector<std::string> *person_name;
int main(int argc, char *argv[]) {
GMainLoop *loop = nullptr;
GstElement *pipeline = nullptr, *streammux = nullptr, *sink = nullptr,
*pgie = nullptr, *nvtracker = nullptr, *nvdsanalytics = nullptr,
*decoder = nullptr, *nvvidconv = nullptr, *nvosd = nullptr,
*tiler = nullptr, *nvvidconv2 = nullptr, *sgie = nullptr,
*person_detector = nullptr, *queue1, *queue2, *queue3, *queue4,
*queue5, *queue6, *tee = nullptr, *decodebin = nullptr,
*jpegenc = nullptr, *filesink = nullptr,
*landmarks_model = nullptr, *fakesink = nullptr;
GstBus *bus = nullptr;
GstElement *jpegparser = nullptr;
AppSrcData data2;
guint bus_watch_id;
GstPad *nvdsanalytics_src_pad = nullptr;
guint i, num_sources = argc - 1;
guint tiler_rows, tiler_columns;
guint pgie_batch_size;
struct UserData user_data = {.main_loop = loop, .pipeline = pipeline};
int current_device = -1;
cudaGetDevice(¤t_device);
struct cudaDeviceProp prop;
cudaGetDeviceProperties(&prop, current_device);
putenv("NVDS_ENABLE_LATENCY_MEASUREMENT=1");
putenv("NVDS_ENABLE_COMPONENT_LATENCY_MEASUREMENT=1");
gst_init(&argc, &argv);
loop = g_main_loop_new(nullptr, FALSE);
pipeline = gst_pipeline_new("FRS_Pipeline");
data2.app_source = gst_element_factory_make("appsrc", "app-source");
jpegparser = gst_element_factory_make("jpegparse", "jpeg-parser");
decoder = gst_element_factory_make("nvv4l2decoder", "nvv4l2-decoder");
streammux = gst_element_factory_make("nvstreammux", "stream-muxer");
pgie = gst_element_factory_make("nvinfer", "PGIE");
landmarks_model = gst_element_factory_make("nvinfer", "landmarks_model");
sgie = gst_element_factory_make("nvinfer", "SGIE");
person_detector = gst_element_factory_make("nvinfer", "person_detector");
nvtracker = gst_element_factory_make("nvtracker", "nvtracker");
nvdsanalytics = gst_element_factory_make("nvdsanalytics", "nvdsanalytics");
tiler = gst_element_factory_make("nvmultistreamtiler", "nvtiler");
nvvidconv = gst_element_factory_make("nvvideoconvert", "nvvideo-converter");
nvvidconv2 =
gst_element_factory_make("nvvideoconvert", "nvvideo-converter2");
tee = gst_element_factory_make("tee", "nvsink-tee");
nvosd = gst_element_factory_make("nvdsosd", "nv-onscreendisplay");
fakesink = gst_element_factory_make("fakesink", "fakesink");
jpegenc = gst_element_factory_make("jpegenc", "jpegenc");
filesink = gst_element_factory_make("multifilesink", "filesink");
tee = gst_element_factory_make("tee", "nvsink-tee");
sink = gst_element_factory_make("nveglglessink", "nvvideo-renderer");
queue1 = gst_element_factory_make("queue", "queue1");
queue2 = gst_element_factory_make("queue", "queue2");
queue3 = gst_element_factory_make("queue", "queue3");
queue4 = gst_element_factory_make("queue", "queue4");
queue5 = gst_element_factory_make("queue", "queue5");
queue6 = gst_element_factory_make("queue", "queue6");
if (!pgie || !sgie || !nvtracker || !nvdsanalytics || !tiler ||
!nvvidconv || !nvosd || !sink || !jpegenc || !filesink) {
g_printerr("One element could not be created. Exiting.\n");
return -1;
}
g_object_set(G_OBJECT(data2.app_source), "num-buffers", -1, "stream-type",
0, "format", GST_FORMAT_TIME, "is-live", 1, NULL);
// g_object_set(G_OBJECT(jpegparser), "disable-passthrough", 1, NULL);
g_signal_connect(data2.app_source, "need-data", G_CALLBACK(start_feed),
&data2);
// g_signal_connect (data2.app_source, "enough-data", G_CALLBACK
// (stop_feed),&data2);
g_object_set(G_OBJECT(streammux), "width", MUXER_OUTPUT_WIDTH, "height",
MUXER_OUTPUT_HEIGHT, "live-source", 1, "batch-size", 1,
"batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
tiler_rows = (guint)sqrt(std::max(1, argc - 1)) + 1;
tiler_columns = (guint)ceil(1.0 * std::max(1, argc - 1) / tiler_rows) + 1;
g_object_set(G_OBJECT(tiler), "rows", tiler_rows, "columns", tiler_columns,
"width", TILED_OUTPUT_WIDTH, "height", TILED_OUTPUT_HEIGHT,
NULL);
// bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
// gst_bus_add_watch(bus, bus_call_short, &user_data);
// gst_bus_add_signal_watch(bus);
// gst_object_unref(bus);
g_object_set(G_OBJECT(queue1), "max-size-buffers", 1, NULL);
g_object_set(G_OBJECT(queue2), "max-size-buffers", 1, NULL);
g_object_set(G_OBJECT(queue3), "max-size-buffers", 1, NULL);
g_object_set(G_OBJECT(queue4), "max-size-buffers", 1, NULL);
gst_bin_add_many(GST_BIN(pipeline), data2.app_source, jpegparser, decoder,
streammux, queue2, queue3, queue4, queue5, queue6,
nvvidconv, /*tiler,*/ nvosd,
/*nvvidconv2,jpegenc , filesink,*/ sink, NULL);
for (i = 0; i < num_sources; i++) {
GstPad *sinkpad, *srcpad;
gchar pad_name[16] = {};
GstElement *source_bin = NULL;
source_bin = create_source_bin(i, argv[i + 1]);
if (!source_bin) {
g_printerr("Failed to create source bin. Exiting.\n");
return -1;
}
gst_bin_add(GST_BIN(pipeline), source_bin);
g_snprintf(pad_name, 15, "sink_%u", i + 1);
sinkpad = gst_element_get_request_pad(streammux, pad_name);
if (!sinkpad) {
g_printerr("Streammux request sink pad failed. Exiting.\n");
return -1;
}
srcpad = gst_element_get_static_pad(source_bin, "src");
if (!srcpad) {
g_printerr("Failed to get src pad of source bin. Exiting.\n");
return -1;
}
if (gst_pad_link(srcpad, sinkpad) != GST_PAD_LINK_OK) {
g_printerr("Failed to link source bin to stream muxer. Exiting.\n");
return -1;
}
gst_object_unref(srcpad);
gst_object_unref(sinkpad);
}
GstIterator *iter = gst_bin_iterate_elements(GST_BIN(pipeline));
GValue elem = G_VALUE_INIT;
gboolean done = false;
while (!done) {
switch (gst_iterator_next(iter, &elem)) {
case GST_ITERATOR_OK: {
GstElement *element = GST_ELEMENT(g_value_get_object(&elem));
g_print("Element name: %s\n", GST_ELEMENT_NAME(element));
break;
}
case GST_ITERATOR_RESYNC:
gst_iterator_resync(iter);
break;
case GST_ITERATOR_DONE:
case GST_ITERATOR_ERROR:
done = TRUE;
break;
}
}
if (!gst_element_link_many(data2.app_source, jpegparser, decoder, NULL))
{
g_printerr("Elements could not be linked. Exiting.\n");
return -1;
}
if (!gst_element_link_many(streammux, queue2, queue3, queue4, queue5,
nvvidconv, NULL)) {
g_printerr("Elements could not be linked. Exiting.\n");
return -1;
}
if (!gst_element_link_many(nvvidconv, nvosd, sink, NULL)) {
g_printerr("Elements could not be linked. Exiting.\n");
return -1;
}
GstPad *sinkpad = nullptr, *srcpad = nullptr;
gchar pad_name_sink[16] = "sink_0";
gchar pad_name_src[16] = "src";
sinkpad = gst_element_get_request_pad(streammux, pad_name_sink);
if (!sinkpad) {
g_printerr("Streammux request sink pad failed. Exiting.\n");
return -1;
}
srcpad = gst_element_get_static_pad(decoder, pad_name_src);
if (!srcpad) {
g_printerr("Decoder request src pad failed. Exiting.\n");
return -1;
}
if (gst_pad_link(srcpad, sinkpad) != GST_PAD_LINK_OK) {
g_printerr("Failed to link caps filter to stream muxer. Exiting.\n");
return -1;
}
gst_object_unref(sinkpad);
gst_object_unref(srcpad);
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL,
"pipeline");
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_print("Running...\n");
g_main_loop_run(loop);
g_print("Returned, stopping playback\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print("Deleting pipeline\n");
gst_object_unref(GST_OBJECT(pipeline));
g_source_remove(bus_watch_id);
g_main_loop_unref(loop);
return 0;
}
can you please check and point out what am I doing wrong ?
also, there is no tiler, so sink should display images from different source one by one right ?