Hi folks.
I found omxh264dec or nvvidconv has memory leak problems.
The tested pipeline is as follows.
rtspsrc ! queue ! rtph264hdepay ! h264parse ! omxh264dec ! queue ! nvvidconv ! capsfilter ! xvimagesink
And I also attached a test code in last.
In the test code, start and stop streaming every 10 seconds.
And then, by using pmap command you can see that the memory usage is increasing.
On the other hand, using avdec_h264 / videoscale, these are general decoding/scaling elements, instead of omxh264dec / nvvidconv, the memory usage is not increasing.
The amount of increasing is about 30Mbyte in 10 hours.
You may feel that it’s a little. However, I want to run over 30 pipelines at the same time and the pipelines may stop and restart in a short interval. So this memory leak is critical for me.
Does anyone know the solution about it?
FYI, I also check this leakage using GstTracer.
Here is the log.
0:00:05.737988245 5350 0x615a10 TRACE GST_TRACER :0:: object-alive, type-name=(string)GstCaps, address=(gpointer)0x7f54007720, description=(string)video/x-raw, format=(string)I420, width=(int)800, height=(int)450, framerate=(fraction)0/1, ref-count=(uint)1, trace=(string)/usr/lib/aarch64-linux-gnu/libgstreamer-1.0.so.0(+0x6a4f0) [0x7f7cae54f0]
;
0:00:05.738213107 5350 0x615a10 TRACE GST_TRACER :0:: object-alive, type-name=(string)GstCaps, address=(gpointer)0x7f6c003140, description=(string)video/x-raw, format=(string)NV12, width=(int)1920, height=(int)1080, interlace-mode=(string)progressive, pixel-aspect-ratio=(fraction)1/1, chroma-site=(string)mpeg2, colorimetry=(string)bt709, framerate=(fraction)0/1, ref-count=(uint)1, trace=(string)/usr/lib/aarch64-linux-gnu/libgstreamer-1.0.so.0(+0x6a4f0) [0x7f7cae54f0]
;
** (a.out:5350): WARNING **: Leaks detected
Here is a test code that is based on GStreamer’s tutorial sample code (Your first application).
#include <gst/gst.h>
#include <glib.h>
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void
on_pad_added (GstElement *element,
GstPad *pad,
gpointer data)
{
GstPad *sinkpad;
GstElement *queue = (GstElement *) data;
/* We can now link this pad */
g_print ("Dynamic pad created, linking front-queue\n");
sinkpad = gst_element_get_static_pad (queue, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
static gint
on_timeout_switch ( GstElement *pipeline )
{
if(GST_STATE(pipeline) == GST_STATE_NULL){
g_print ("Now playing\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
}
else{
g_print ("Now stopping\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
}
return TRUE;
}
int
main (int argc,
char *argv[])
{
int ret = 0;
GMainLoop *loop;
GstElement *pipeline, *source, *queue1, *depay, *parse, *decoder, *queue2, *conv, *sclcaps, *sink;
GstBus *bus;
guint bus_watch_id;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Check input arguments */
if (argc != 2) {
g_printerr ("Usage: %s <URI>\n", argv[0]);
goto ERROR;
}
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("player");
source = gst_element_factory_make ("rtspsrc", "rtsp-source");
queue1 = gst_element_factory_make ("queue", "front-queue");
depay = gst_element_factory_make ("rtph264depay", "rtp-depay");
parse = gst_element_factory_make ("h264parse", "parser");
decoder = gst_element_factory_make ("omxh264dec", "decoder");
queue2 = gst_element_factory_make ("queue", "back-queue");
conv = gst_element_factory_make ("nvvidconv", "scaler");
sclcaps = gst_element_factory_make ("capsfilter", "scale-filter");
sink = gst_element_factory_make ("xvimagesink", "renderer");
if (!pipeline || !source || !queue1 || !depay || !parse || !decoder || !queue2 || !conv || !sclcaps || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
goto ERROR;
}
/* Set up the pipeline */
/* we set the input to the source element */
g_object_set (G_OBJECT (source), "location", argv[1], NULL);
/* Fix the output stream resolution */
GstCaps *caps = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 800,
"height", G_TYPE_INT, 450,
NULL);
g_object_set (G_OBJECT (sclcaps),
"caps", caps,
NULL);
gst_caps_unref(caps);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (pipeline),
source, queue1, depay, parse, decoder, queue2, conv, sclcaps, sink, NULL);
/* we link the elements together */
gst_element_link_many (queue1, depay, parse, decoder, queue2, conv, sclcaps, sink, NULL);
g_signal_connect (source, "pad-added", G_CALLBACK (on_pad_added), queue1);
/* Set the pipeline to "playing" state*/
g_print ("Now playing: %s\n", argv[1]);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Start/Stop streaming every 10 sec. */
g_timeout_add_seconds(10, (GSourceFunc)on_timeout_switch, pipeline);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
EXIT:
g_main_loop_unref (loop);
gst_deinit();
return ret;
ERROR:
ret = -1;
goto EXIT;
}