Can't open the url of rtsp server on jetson nx

  • deepstream-app version 5.0.0
  • DeepStreamSDK 5.0.0
  • CUDA Driver Version: 10.2
  • CUDA Runtime Version: 10.2
  • TensorRT Version: 7.1
  • cuDNN Version: 8.0
  • libNVWarp360 Version: 2.0.1d3
  • jetpack 4.5
  • L4T 32.5.0
  • Jetxon Xavier NX

Hi,every one
I am using deepstream to run a application

application :
input RTSP or file , output RTSP

problem:
open the url of output RTSP is black screen , same code can be run in X86

black screen picture

ther is my code

#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/stat.h>
#include <sys/types.h>
#define GST_CAPS_FEATURES_NVMM "memory:NVMM"
#define MAX_DISPLAY_LEN 64
#include <cuda_runtime_api.h>
#include <glib.h>
#include <gst/gst.h>
#include <json-glib/json-glib.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>

#include "gstnvdsmeta.h"
#include "nvbufsurface.h"
#include "nvds_obj_encode.h"
#include <libsoup/soup.h>
static void cb_newpad(GstElement *decodebin, GstPad *decoder_src_pad,
                      gpointer data) {
  g_print("In cb_newpad\n");
  GstCaps *caps = gst_pad_get_current_caps(decoder_src_pad);
  const GstStructure *str = gst_caps_get_structure(caps, 0);
  const gchar *name = gst_structure_get_name(str);
  GstElement *source_bin = (GstElement *)data;
  GstCapsFeatures *features = gst_caps_get_features(caps, 0);

  /* Need to check if the pad created by the decodebin is for video and not
   * audio. */
  if (!strncmp(name, "video", 5)) {
    /* Link the decodebin pad only if decodebin has picked nvidia
     * decoder plugin nvdec_*. We do this by checking if the pad caps contain
     * NVMM memory features. */
    if (gst_caps_features_contains(features, GST_CAPS_FEATURES_NVMM)) {
      /* Get the source bin ghost pad */
      GstPad *bin_ghost_pad = gst_element_get_static_pad(source_bin, "src");
      if (!gst_ghost_pad_set_target(GST_GHOST_PAD(bin_ghost_pad),
                                    decoder_src_pad)) {
        g_printerr("Failed to link decoder src pad to source bin ghost pad\n");
      }
      gst_object_unref(bin_ghost_pad);
    } else {
      g_printerr("Error: Decodebin did not pick nvidia decoder plugin.\n");
    }
  }
}

static void decodebin_child_added(GstChildProxy *child_proxy, GObject *object,
                                  gchar *name, gpointer user_data) {
  g_print("Decodebin child added: %s\n", name);
  if (g_strrstr(name, "decodebin") == name) {
    g_signal_connect(G_OBJECT(object), "child-added",
                     G_CALLBACK(decodebin_child_added), user_data);
  }
  if (g_strstr_len(name, -1, "nvv4l2decoder") == name) {
    g_print("Seting bufapi_version\n");
    g_object_set(object, "bufapi-version", TRUE, NULL);
  }
}

static GstElement *create_source_bin(guint index, gchar *uri) {
  GstElement *bin = NULL, *uri_decode_bin = NULL;
  gchar bin_name[16] = {};

  g_snprintf(bin_name, 15, "source-bin-%02d", index);
  /* Create a source GstBin to abstract this bin's content from the rest of the
   * pipeline */
  bin = gst_bin_new(bin_name);

  /* Source element for reading from the uri.
   * We will use decodebin and let it figure out the container format of the
   * stream and the codec and plug the appropriate demux and decode plugins. */
  uri_decode_bin = gst_element_factory_make("uridecodebin", "uri-decode-bin");

  if (!bin || !uri_decode_bin) {
    g_printerr("One element in source bin could not be created.\n");
    return NULL;
  }

  /* We set the input uri to the source element */
  g_object_set(G_OBJECT(uri_decode_bin), "uri", uri, NULL);

  /* Connect to the "pad-added" signal of the decodebin which generates a
   * callback once a new pad for raw data has beed created by the decodebin */
  g_signal_connect(G_OBJECT(uri_decode_bin), "pad-added", G_CALLBACK(cb_newpad),
                   bin);
  g_signal_connect(G_OBJECT(uri_decode_bin), "child-added",
                   G_CALLBACK(decodebin_child_added), bin);

  gst_bin_add(GST_BIN(bin), uri_decode_bin);

  /* We need to create a ghost pad for the source bin which will act as a proxy
   * for the video decoder src pad. The ghost pad will not have a target right
   * now. Once the decode bin creates the video decoder and generates the
   * cb_newpad callback, we will set the ghost pad target to the video decoder
   * src pad. */
  if (!gst_element_add_pad(bin,
                           gst_ghost_pad_new_no_target("src", GST_PAD_SRC))) {
    g_printerr("Failed to add ghost pad in source bin\n");
    return NULL;
  }

  return bin;
}
int main(int argc, char *argv[]) {
  GMainLoop *loop = NULL;
  GstElement *pipeline = NULL, *streammux = NULL, *tee = NULL, *tee_file = NULL,
             *nvvidconv = NULL, *nvosd = NULL, *nvosd_post = NULL,
             *filter = NULL, *encoder = NULL, *rtppay = NULL, *sink = NULL,
             *nvvidconv_file = NULL, *h264enc_file = NULL, *parse = NULL,
             *qtmux = NULL, *filesink = NULL, *queue1 = NULL, *queue2 = NULL,
             *queue3 = NULL, *queue4 = NULL;
  GstPad *sinkpad, *srcpad;
  GstBus *bus = NULL;
  int current_device = -1;
  cudaGetDevice(&current_device);
  struct cudaDeviceProp prop;
  cudaGetDeviceProperties(&prop, current_device);
  gst_init(&argc, &argv);
  loop = g_main_loop_new(NULL, FALSE);
  pipeline = gst_pipeline_new("NN_water_gauge");
  streammux = gst_element_factory_make("nvstreammux", "stream");
  gst_bin_add(GST_BIN(pipeline), streammux);
  GstElement *source_bin = create_source_bin(
      0, "rtsp://wowzaec2demo.streamlock.net/vod/mp4:BigBuckBunny_115k.mov");

  sinkpad = gst_element_get_request_pad(streammux, "sink_0");
  srcpad = gst_element_get_static_pad(source_bin, "src");
  if (!srcpad) {
    g_printerr("Failed to get src pad of source bin. Exiting.\n");
    return -1;
  }

  gst_bin_add(GST_BIN(pipeline), source_bin);
  if (gst_pad_link(srcpad, sinkpad) != GST_PAD_LINK_OK) {
    g_printerr("Failed to link source bin to stream muxer. Exiting.\n");
    return -1;
  }
  gst_object_unref(srcpad);
  gst_object_unref(sinkpad);

  nvvidconv = gst_element_factory_make("nvvideoconvert", "nvvideo-converter");

  filter = gst_element_factory_make("capsfilter", "filter");
  GstCaps *caps = NULL;
  caps = gst_caps_from_string("video/x-raw(memory:NVMM),format=I420");
  g_object_set(G_OBJECT(filter), "caps", caps, NULL);
  gst_caps_unref(caps);

  encoder = gst_element_factory_make("nvv4l2h264enc", "encoder");
  rtppay = gst_element_factory_make("rtph264pay", "rtppay");
  sink = gst_element_factory_make("udpsink", "udpsink");

  /* Add queue elements between every two elements */
  queue1 = gst_element_factory_make("queue", "queue1");
  queue2 = gst_element_factory_make("queue", "queue2");
  queue3 = gst_element_factory_make("queue", "queue3");
  queue4 = gst_element_factory_make("queue", "queue4");

  g_object_set(G_OBJECT(streammux), "width", 1920, "height", 1080, "batch-size",
               1, "live-source", 1, "batched-push-timeout", 40000, NULL);
  g_object_set(G_OBJECT(encoder), "bitrate", 4000000, NULL);
  g_object_set(G_OBJECT(sink), "host", "224.224.255.255", "port", 18000,
               "async", false, "sync", 1, NULL);

  gst_bin_add_many(GST_BIN(pipeline), queue1, nvvidconv, queue2, filter,
                   encoder, rtppay, sink, NULL);

  gst_element_link_many(streammux, queue1, nvvidconv, queue2, filter, encoder,
                        rtppay, sink, NULL);

  GstRTSPServer *server;
  GstRTSPMountPoints *mounts;
  GstRTSPMediaFactory *factory;
  server = gst_rtsp_server_new();

  mounts = gst_rtsp_server_get_mount_points(server);

  factory = gst_rtsp_media_factory_new();

  g_object_set(server, "service", "8555", NULL);
  gst_rtsp_media_factory_set_launch(
      factory, "( udpsrc name=pay0 port=18000 buffer-size=524288 "
               "caps=\"application/x-rtp, media=video, clock-rate=90000, "
               "encoding-name=(string)H264, payload=96 \" )");
  gst_rtsp_media_factory_set_shared(factory, TRUE);
  gst_rtsp_mount_points_add_factory(mounts, "/ds-test", factory);

  gst_rtsp_server_attach(server, NULL);

  gst_element_set_state(pipeline, GST_STATE_PLAYING);

  /* Wait till pipeline encounters an error or EOS */
  g_print("Running...\n");

  g_main_loop_run(loop);

  /* Out of the main loop, clean up nicely */
  g_print("Returned, stopping playback\n");
  gst_element_set_state(pipeline, GST_STATE_NULL);
  g_print("Deleting pipeline\n");

  gst_object_unref(GST_OBJECT(pipeline));
  g_main_loop_unref(loop);
  return 0;
}

Do you have any ideas ? I look forward to your reply

Hi, from this python output_rtsp

I add this code ,can be work , this is my problem ,sorry.

g_object_set(G_OBJECT(encoder),"preset-level",1,"insert-sps-pps",1,"bufapi-version",1,NULL);

Glad to know you resolved the problem.

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.