Segmentation fault in c

Please provide complete information as applicable to your setup.

• Hardware Platform (Jetson / GPU)
**• DeepStream Version 5.0
I am having segmentation fault in c pipeline.
please help in resolving error.
#include <gst/gst.h>

#define GST_CAPS_FEATURES_NVMM “memory:NVMM”
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print (“End of stream\n”);
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr (“Error: %s\n”, error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return 1;
}

static void
cb_newpad (GstElement * decodebin, GstPad * decoder_src_pad, gpointer data)
{
g_print (“In cb_newpad\n”);
GstCaps *caps = gst_pad_get_current_caps (decoder_src_pad);
const GstStructure *str = gst_caps_get_structure (caps, 0);
const gchar *name = gst_structure_get_name (str);
GstElement *source_bin = (GstElement *) data;
GstCapsFeatures *features = gst_caps_get_features (caps, 0);

/* Need to check if the pad created by the decodebin is for video and not

  • audio. /
    if (!strncmp (name, “video”, 5)) {
    /
    Link the decodebin pad only if decodebin has picked nvidia
    • decoder plugin nvdec_*. We do this by checking if the pad caps contain
    • NVMM memory features. /
      if (gst_caps_features_contains (features, GST_CAPS_FEATURES_NVMM)) {
      /
      Get the source bin ghost pad */
      GstPad *bin_ghost_pad = gst_element_get_static_pad (source_bin, “src”);
      if (!gst_ghost_pad_set_target (GST_GHOST_PAD (bin_ghost_pad),
      decoder_src_pad)) {
      g_printerr (“Failed to link decoder src pad to source bin ghost pad\n”);
      }
      gst_object_unref (bin_ghost_pad);
      } else {
      g_printerr (“Error: Decodebin did not pick nvidia decoder plugin.\n”);
      }
      }
      }

static void
decodebin_child_added (GstChildProxy * child_proxy, GObject * object,
gchar * name, gpointer user_data)
{
g_print (“Decodebin child added: %s\n”, name);
if (g_strrstr (name, “decodebin”) == name) {
g_signal_connect (G_OBJECT (object), “child-added”,
G_CALLBACK (decodebin_child_added), user_data);
}
}

static GstElement *
create_source_bin (gchar * uri)
{
GstElement *bin = NULL, *uri_decode_bin = NULL;
gchar bin_name[16] = { };

//g_snprintf (bin_name, 15, “source-bin-%02d”, index);
/* Create a source GstBin to abstract this bin’s content from the rest of the

  • pipeline */
    bin = gst_bin_new (bin_name);

/* Source element for reading from the uri.

  • We will use decodebin and let it figure out the container format of the
  • stream and the codec and plug the appropriate demux and decode plugins. */
    uri_decode_bin = gst_element_factory_make (“uridecodebin”, “uri-decode-bin”);

if (!bin || !uri_decode_bin) {
g_printerr (“One element in source bin could not be created.\n”);
return NULL;
}

/* We set the input uri to the source element */
g_object_set (G_OBJECT (uri_decode_bin), “uri”, uri, NULL);

/* Connect to the “pad-added” signal of the decodebin which generates a

  • callback once a new pad for raw data has beed created by the decodebin */
    g_signal_connect (G_OBJECT (uri_decode_bin), “pad-added”,
    G_CALLBACK (cb_newpad), bin);
    g_signal_connect (G_OBJECT (uri_decode_bin), “child-added”,
    G_CALLBACK (decodebin_child_added), bin);

gst_bin_add (GST_BIN (bin), uri_decode_bin);

/* We need to create a ghost pad for the source bin which will act as a proxy

  • for the video decoder src pad. The ghost pad will not have a target right
  • now. Once the decode bin creates the video decoder and generates the
  • cb_newpad callback, we will set the ghost pad target to the video decoder
  • src pad. */
    if (!gst_element_add_pad (bin, gst_ghost_pad_new_no_target (“src”,
    GST_PAD_SRC))) {
    g_printerr (“Failed to add ghost pad in source bin\n”);
    return NULL;
    }

return bin;
}

gint main (gint argc, gchar *argv)
{
GMainLoop *loop;
GstElement *pipeline, *src, *depay, *avdec, *videorate, *capsfilterRate, *jpegenc, *multifilesink, *sink;
GstBus *bus;
guint bus_watch_id;

/* init GStreamer */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);

/* setup */
pipeline = gst_pipeline_new (“pipeline”);

bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);

if (argc < 2) {
g_printerr (“Usage: %s \n”, argv[0]);
return -1;
}

/* Standard GStreamer initialization */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);

/* Create gstreamer elements /
/
Create Pipeline element that will form a connection of other elements */
pipeline = gst_pipeline_new (“pipeline”);

//src = gst_element_factory_make (“rtspsrc”,“rtspsrc”);

GstPad *sinkpad, *srcpad;
gchar pad_name[16] = { };
GstElement *source_bin = create_source_bin (argv[1]);

if (!source_bin) {
g_printerr (“Failed to create source bin. Exiting.\n”);
return -1;
}

gst_bin_add (GST_BIN (pipeline), source_bin);

//g_snprintf (pad_name, 15, "sink_%u", i);
sinkpad = gst_element_get_request_pad (sink, pad_name);
if (!sinkpad) {
  g_printerr ("Streammux request sink pad failed. Exiting.\n");
  return -1;
}

srcpad = gst_element_get_static_pad (source_bin, "src");
if (!srcpad) {
  g_printerr ("Failed to get src pad of source bin. Exiting.\n");
  return -1;
}

if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
  g_printerr ("Failed to link source bin to stream muxer. Exiting.\n");
  return -1;
}

gst_object_unref (srcpad);
gst_object_unref (sinkpad);

depay = gst_element_factory_make (“rtph264depay”, “rtph264depay”);

avdec = gst_element_factory_make (“avdec_h264”, “avdec_h264”);

videorate = gst_element_factory_make (“videorate”, “videorate”);

capsfilterRate = gst_element_factory_make(“capsfilter”, “video-rate”);

GstCaps *capsRate = gst_caps_from_string (“video/x-raw,framerate=10/1”);
g_object_set ( capsfilterRate, “caps”, capsRate, NULL);
gst_caps_unref(capsRate);//again unref!

jpegenc = gst_element_factory_make(“jpegenc”, “jpegenc”);

sink = gst_element_factory_make(“multifilesink”, “multifilesink”);

/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);

g_object_set (G_OBJECT (src), “location”, argv[1], NULL);

g_object_set (G_OBJECT (sink), “location”, “./frame%08d.jpg”, NULL);

//then add all elements together
gst_bin_add_many (GST_BIN (pipeline), src, depay, avdec, videorate, capsfilterRate, jpegenc, sink, NULL);

//link everythink after source
gst_element_link_many (depay, avdec, videorate, capsfilterRate, jpegenc, sink, NULL);

g_print (“Now playing:”);
g_print (" %s,“, argv[1]);
g_print (”\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);

/* Wait till pipeline encounters an error or EOS */
g_print (“Running…\n”);
g_main_loop_run (loop);

/* Out of the main loop, clean up nicely */
g_print (“Returned, stopping playback\n”);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print (“Deleting pipeline\n”);
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}

Do you want anyone to debug your code?

You need to debug the code by yourself. If you find anything wrong with Nvidia provided functions, you can report to us.

This topic was automatically closed 60 days after the last reply. New replies are no longer allowed.