Please provide complete information as applicable to your setup.
• Hardware Platform: NVIDIA GeForce RTX 4050 GPU on x86-64 Ubuntu 22.04 system
• DeepStream Version: 7.1
• TensorRT Version: 10.3.0.26-1+cuda12.5.
• NVIDIA GPU Driver Version: 535.230.02
• Issue Type: BUG
• How to reproduce the issue ?
Hello! I am experiencing a segmentation fault when trying to launch a GStreamer pipeline that contains a nvinfer plugin after I have already created and destroyed a previous pipeline containing also a nvinfer plugin in the same program. I have been digging through GStreamer trace logs and using gdb/valgrind, and I am pretty certain the segmentation occurs in a call to the libnvinfer library, specifically when the nvinfer plugin tries to either create a new Context or load the engine again. I have tried performing a cudaDeviceReset() between pipelines to try and clean the CUDA memory but it seems to not be of help.
In order to reproduce the issue, I have coded a small test that you can try to run for yourself:
/* This this is a minimal test to find out problems when rebooting the nvinfer element.
In this test we create a pipeline with a nvinfer element and run it for 10 seconds.
Then, we stop the pipeline and run other pipeline with nvinfer for 10 seconds.
Related issues:
https://forums.developer.nvidia.com/t/restarting-pipeline-on-deepstream2-0/63258/20
*/
#include <gst/gst.h>
#include <iostream>
#include <chrono>
#include <thread>
#include <cuda_runtime.h>
int main(int argc, char **argv) {
// Initialize gstreamer
gst_init(&argc, &argv);
// Create the first gst pipeline
std::cout << "-------------------1st PIPELINE --------------------" << std::endl;
GstElement* pipeline = gst_pipeline_new("nvinfer_pipeline1");
// Add multifile source
GstElement* source = gst_element_factory_make("videotestsrc", "source1");
GstCaps* caps = gst_caps_from_string("image/png,framerate=1/2");
gst_bin_add(GST_BIN(pipeline), source);
// Add nvvideoconvert
GstElement* videoconvert = gst_element_factory_make("nvvideoconvert", "nvvideoconvert1");
g_object_set(videoconvert, "compute-hw", 1, NULL);
gst_bin_add(GST_BIN(pipeline), videoconvert);
// Add nvstreammux
GstElement* streammux = gst_element_factory_make("nvstreammux", "mux1");
g_object_set(streammux, "width", 2752, NULL);
g_object_set(streammux, "height", 1856, NULL);
g_object_set(streammux, "batch-size", 1, NULL);
g_object_set(streammux, "live-source", false, NULL);
gst_bin_add(GST_BIN(pipeline), streammux);
// Add nvinfer
GstElement* nvinfer = gst_element_factory_make("nvinfer", "nvinfer1");
g_object_set(nvinfer, "config-file-path", "/path/to/nvinfer/config.txt", NULL);
gst_bin_add(GST_BIN(pipeline), nvinfer);
// nvdsosd
GstElement* nvdsosd = gst_element_factory_make("nvdsosd", "nvdsosd1");
gst_bin_add(GST_BIN(pipeline), nvdsosd);
// Add screen
GstElement* sink = gst_element_factory_make("nveglglessink", "sink1");
gst_bin_add(GST_BIN(pipeline), sink);
// Link elements
std::cout << "Linking elements" << std::endl;
// Link manually source to videoconvert
gst_element_link(source, videoconvert);
// Link manually videoconvert to sink 0 of stream mux
GstPad* streammux_sink_0 = gst_element_get_request_pad(streammux, "sink_0");
GstPad* videoconvert_src = gst_element_get_static_pad(videoconvert, "src");
gst_pad_link(videoconvert_src, streammux_sink_0);
// Link the rest of the elements
gst_element_link_many(streammux, nvinfer, nvdsosd, sink, NULL);
// Set the pipeline to playing
std::cout << "Setting pipeline to playing" << std::endl;
gst_element_set_state(pipeline, GST_STATE_PLAYING);
// Wait 10 seconds
std::this_thread::sleep_for(std::chrono::seconds(10));
// Stop the pipeline
GstBus *bus1 = gst_element_get_bus(pipeline);
gst_bus_post(GST_BUS(bus1), gst_message_new_eos(GST_OBJECT(pipeline)));
gst_element_set_state(pipeline, GST_STATE_NULL);
std::cout << "First pipeline finished" << std::endl;
// Clean-up memory
gst_object_unref(pipeline);
gst_object_unref(bus1);
std::this_thread::sleep_for(std::chrono::seconds(10));
// Create the third gst pipeline
std::cout << std::endl << "-------------------2nd PIPELINE --------------------" << std::endl;
GstElement* pipeline2 = gst_pipeline_new("nvinfer_pipeline2");
// Add multifile source
GstElement* source2 = gst_element_factory_make("videotestsrc", "source2");
// Set caps to 0.5fps frame rate
GstCaps* caps2 = gst_caps_from_string("image/png,framerate=1/2");
gst_bin_add(GST_BIN(pipeline2), source2);
// Add nvvideoconvert
GstElement* videoconvert2 = gst_element_factory_make("nvvideoconvert", "nvvideoconvert2");
g_object_set(videoconvert2, "compute-hw", 1, NULL);
gst_bin_add(GST_BIN(pipeline2), videoconvert2);
// Add nvstreammux
GstElement* streammux2 = gst_element_factory_make("nvstreammux", "mux2");
g_object_set(streammux2, "width", 2752, NULL);
g_object_set(streammux2, "height", 1856, NULL);
g_object_set(streammux2, "batch-size", 1, NULL);
g_object_set(streammux2, "live-source", false, NULL);
gst_bin_add(GST_BIN(pipeline2), streammux2);
// Add nvinfer
GstElement* nvinfer2 = gst_element_factory_make("nvinfer", "nvinfer2");
g_object_set(nvinfer2, "config-file-path", "path/to/nvinfer/config.txt", NULL);
gst_bin_add(GST_BIN(pipeline2), nvinfer2);
// nvdsosd
GstElement* nvdsosd2 = gst_element_factory_make("nvdsosd", "nvdsosd2");
gst_bin_add(GST_BIN(pipeline2), nvdsosd2);
// Add screen to see
GstElement* sink2 = gst_element_factory_make("nveglglessink", "sink2");
gst_bin_add(GST_BIN(pipeline2), sink2);
// Link elements
std::cout << "Linking elements" << std::endl;
// Link manually source to videoconvert
gst_element_link(source2, videoconvert2);
// Link manually videoconvert to sink 0 of stream mux
GstPad* streammux_sink_02 = gst_element_get_request_pad(streammux2, "sink_0");
GstPad* videoconvert_src2 = gst_element_get_static_pad(videoconvert2, "src");
gst_pad_link(videoconvert_src2, streammux_sink_02);
// Link the rest of the elements
gst_element_link_many(streammux2, nvinfer2, nvdsosd2, sink2, NULL);
// Set the pipeline to playing
std::cout << "Setting pipeline to playing" << std::endl;
gst_element_set_state(pipeline2, GST_STATE_PLAYING);
// Wait 10 seconds
std::this_thread::sleep_for(std::chrono::seconds(10));
// Stop the pipeline
GstBus *bus2 = gst_element_get_bus(pipeline2);
gst_bus_post(GST_BUS(bus2), gst_message_new_eos(GST_OBJECT(pipeline2)));
gst_element_set_state(pipeline2, GST_STATE_NULL);
gst_object_unref(pipeline2);
gst_object_unref(bus2);
std::cout << "Second pipeline finished" << std::endl;
std::cout << "Test finished with clean exit" << std::endl;
return 0;
}
You would have to substitute the nvinfer_config file path with one of your own. The model/engine used does not seem to matter, I have tried using dummies and the ones provided with the DeepStream SDK and the segmentation fault keeps appearing.
Any help with this issue would be very much appreciated. Thanks in advance for the time and the attention!