When using opencv+gstreamer for rtsp output, vlc has 9 seconds delay, why?

Please provide complete information as applicable to your setup.

• Hardware Platform (Jetson / GPU)
RTX 2060
• DeepStream Version
6.0.1 which I working in 6.0.1’s docker container
• JetPack Version (valid for Jetson only)
• TensorRT Version
TRT8.2
• NVIDIA GPU Driver Version (valid for GPU only)
NVIDIA-SMI 470.182.03
• Issue Type( questions, new requirements, bugs)
questions

• How to reproduce the issue ? (This is for bugs. Including which sample app is using, the configuration files content, the command line used and other details for reproducing)

When using opencv+gstreamer for rtsp output, vlc has 9 seconds delay, why? I used cpu for encoding, what should I do if I want to switch to gpu encoding?

My code is as follows:


using namespace std;
using namespace cv;

typedef struct
{
  string INDEX;
  string in_rtsp;
  int out_width;
  int out_height;
  int out_fps;
  string out_port;
} Params;

typedef struct
{
  gboolean white;
  GstClockTime timestamp;
  int out_width;
  int out_height;
  int out_fps;
  string INDEX;
} MyContext;

int counter = 0;
GMainLoop *loop;
int dx = 1;
int x = 0;

static atomic<bool> isRunning(true);


static void need_data(GstElement *appsrc, guint unused, MyContext *ctx)
{

  GstBuffer *buffer;
  guint buffersize;
  GstFlowReturn ret;
  GstMapInfo info;

  counter++;
  
  Mat frameimage(int(ctx->out_height), int(ctx->out_width), CV_8UC3);
  
  x = x + dx;
  if (x >= 255 || x <= 0)
  {
    dx = -dx;
  }
  frameimage = cv::Scalar(x, x, x);

  putText(frameimage, std::to_string(counter), Point(100, 100),
          2, 1, Scalar(0, 0, 254), 1, 8, false);

  
  buffersize = frameimage.cols * frameimage.rows * frameimage.channels();
  
  buffer = gst_buffer_new_and_alloc(buffersize);
  uchar *IMG_data = frameimage.data;
  

  if (gst_buffer_map(buffer, &info, (GstMapFlags)GST_MAP_WRITE))
  {
    memcpy(info.data, IMG_data, buffersize);
  
    gst_buffer_unmap(buffer, &info);
  }
  else
    g_print("OPS! ERROR.");

  ctx->white = !ctx->white;

  //increment the timestamp every {duration = 1/outFps} second
  GST_BUFFER_PTS(buffer) = ctx->timestamp;
  // std::cout<<"Gctx->timestamp:"<< ctx->timestamp <<std::endl;
  GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int(1, GST_SECOND, int(ctx->out_fps));
  ctx->timestamp += GST_BUFFER_DURATION(buffer);

  g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);

  if (ret != GST_FLOW_OK)
  {
    g_print("ops\n");
    GST_DEBUG("something wrong in cb_need_data");
    g_main_loop_quit(loop);
  }
  gst_buffer_unref(buffer);
  std::cout << "Index:" << ctx->INDEX << "  Frame:" << counter << ",x:" << x << std::endl;
}

static void media_configure(GstRTSPMediaFactory *factory, GstRTSPMedia *media, gpointer user_data)
{
  GstElement *element, *appsrc;
  MyContext *ctx;
  Params p = *((Params *)user_data);

  /* get the element used for providing the streams of the media */
  element = gst_rtsp_media_get_element(media);

  /* get our appsrc, we named it 'mysrc' with the name property */
  appsrc = gst_bin_get_by_name_recurse_up(GST_BIN(element), "mysrc");

  /* this instructs appsrc that we will be dealing with timed buffer */
  // g_object_set (G_OBJECT (appsrc), "is-live" , TRUE ,  NULL);
  // g_object_set (G_OBJECT (appsrc), "min-latency" , 67000000 ,  NULL);
  g_object_set(G_OBJECT(appsrc),
               "stream-type", 0, //rtsp
               "format", GST_FORMAT_TIME, NULL);

  g_object_set(G_OBJECT(appsrc), "caps",
               gst_caps_new_simple("video/x-raw",
                                   "format", G_TYPE_STRING, "BGR",
                                   "width", G_TYPE_INT, int(p.out_width),
                                   "height", G_TYPE_INT, int(p.out_height),
                                   "framerate", GST_TYPE_FRACTION, int(p.out_fps), 1,
                                   "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, NULL),
               NULL);

  ctx = g_new0(MyContext, 1);
  ctx->white = FALSE;
  ctx->timestamp = 0;
  ctx->out_fps = p.out_fps;
  ctx->out_width = p.out_width;
  ctx->out_height = p.out_height;
  ctx->INDEX = p.INDEX;

  /* make sure ther datais freed when the media is gone */
  g_object_set_data_full(G_OBJECT(media), "my-extra-data", ctx,
                         (GDestroyNotify)g_free);

  /* install the callback that will be called when a buffer is needed */
  g_signal_connect(appsrc, "need-data", (GCallback)need_data, ctx);
  //g_signal_connect (appsrc, "need-data", G_CALLBACK (start_feed), );
  //g_signal_connect (appsrc, "enough-data", G_CALLBACK (stop_feed), );
  gst_object_unref(appsrc);
  gst_object_unref(element);
}

void start_server()
{

  GstRTSPServer *server;
  GstRTSPMountPoints *mounts;
  GstRTSPMediaFactory *factory;

  Params para1;
  para1.INDEX = "0";
  para1.in_rtsp = "";
  para1.out_width = 1280;
  para1.out_height = 720;
  para1.out_fps = 25;
  para1.out_port = "8554";

  gst_init(NULL, NULL);

  loop = g_main_loop_new(NULL, FALSE);

  /* create a server instance */
  server = gst_rtsp_server_new();
  g_object_set(server, "service", para1.out_port.c_str(), NULL);

  
  mounts = gst_rtsp_server_get_mount_points(server);


  factory = gst_rtsp_media_factory_new();

  char *outAppsrc = new char[200];
  sprintf(outAppsrc, "( appsrc name=mysrc is-live=true block=true format=GST_FORMAT_TIME caps=video/x-raw,format=BGR,width=%d,height=%d,framerate=%d/1 ! videoconvert ! video/x-raw,format=I420 ! x264enc speed-preset=ultrafast tune=zerolatency byte-stream=true threads=1  ! rtph264pay config-interval=1 name=pay0 pt=96 )",
          int(para1.out_width), int(para1.out_height), int(para1.out_fps));

  gst_rtsp_media_factory_set_launch(factory,
                                    outAppsrc);


  g_signal_connect(factory, "media-configure", (GCallback)media_configure,
                   (void *)&para1); 

  char index_url[16] = {0};
  sprintf(index_url, "/index/%s", para1.INDEX.c_str());

  gst_rtsp_mount_points_add_factory(mounts, index_url, factory);


  g_object_unref(mounts);

  gst_rtsp_server_attach(server, NULL);


  g_print("stream ready at rtsp://127.0.0.1:%s%s\n", para1.out_port.c_str(), index_url);
  g_main_loop_run(loop);
}
int main(int argc, char *argv[])
{


VideoCapture cap;

  thread server_thread(start_server);

  char input;
  cin >> input;
  if (input == 'q')
  {
    isRunning = false;
    g_main_loop_quit(loop);
    server_thread.join();
  }

  return 0;
}


• Requirement details( This is for new requirement. Including the module name-for which plugin or for which sample application, the function description)
I observed that when vlc was playing, it seemed to be able to connect at first, but it dropped immediately, and then it took 9 seconds to display the screen normally. I also tried to add key-int-max=15 intra-refresh=true, they make vlc display the screen as soon as possible, but it seems that it doesn’t work properly in many players, and there will be a stuttering phenomenon at the beginning, so I can only give up. I want to play the rtsp server as soon as possible, what should I do?

You are using a pure open source GStreamer pipeline and openCV. It has nothing to do with DeepStream or even RTX 2060. You can google GStreamer and openCV related resources for their RTSP related commitment.

I used cpu for encoding, what should I do if I want to switch to gpu encoding?

What is your source? A camera, a video file or a live stream such as http,rtsp?

for my code,I use opencv read mp4,and get mat image as source. can I use NvDsRtspOut for my ouput bin?

There is MP4 video demux and decoding inside DeepStream, you don’t need opencv.

We already provide RTSP output sample in DeepStream SDK. Please refer to deepstream-app sample C/C++ Sample Apps Source Details — DeepStream 6.2 Release documentation

er,I love opencv,it’s very simply,and gstreamer is hard to debug…but ,so,that’s all,thank u!

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.