How to convert object image to base64

It may take some time. Will be back to you when there is any finding.

1 Like

thank you

i retest with source format is uri=http://xxx.xxx.xxx:xxx/video.h264 it work fine, but with source format is uri=http://xxx.xxx.xxx:xxx/video.mjpeg or jpeg it not working and like this error. i want to run with source mjpeg, what i need to config

deepstream-test5 does not support mjpeg rtsp stream. You need to add the function by yourself or write your own app.

1 Like
  • Can you suggest me how to use uri=http://xxx.xxx.xxx:xxx/video.mjpeg in deepstream test5 app.
  • i was try In deepstream test5 app with uri=http://xxx.xxx.xxx:xxx/video.mjpeg work well if i dont use cv::imwrite and cv::imencode so i think the problem not at source

You need to modify cb_rtspsrc_select_stream() in /opt/nvidia/deepstream/deepstream-5.0/sources/apps/apps-common/src/deepstream_source_bin.c to add “rtpjpegdepay” https://gstreamer.freedesktop.org/documentation/rtp/rtpjpegdepay.html and “jpegparse” https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-bad/html/gst-plugins-bad-plugins-jpegparse.html to support mjpeg stream parsing.

Current hardware decoder can only support YUV420 format mjpeg. You must make sure that your mjpeg stream contains the YUV420 format encoded data.

1 Like

i will try, thank you

at void decodebin_child_added i add g_strrstr (name, “jpegparse”) == name

static void
    decodebin_child_added (GstChildProxy * child_proxy, GObject * object,
        gchar * name, gpointer user_data)
    {
      NvDsSrcBin *bin = (NvDsSrcBin *) user_data;
      NvDsSourceConfig *config = bin->config;
      if (g_strrstr (name, "decodebin") == name) {
        g_signal_connect (G_OBJECT (object), "child-added",
            G_CALLBACK (decodebin_child_added), user_data);
      }
      if ((g_strrstr (name, "h264parse") == name) ||
          (g_strrstr (name, "h265parse") == name) || (g_strrstr (name, "jpegparse") == name)) {
          g_object_set (object, "config-interval", -1, NULL);
      }
      if (g_strrstr (name, "fakesink") == name) {
          g_object_set (object, "enable-last-sample", FALSE, NULL);
      }
      if (g_strrstr (name, "nvcuvid") == name) {
        g_object_set (object, "gpu-id", config->gpu_id, NULL);

        g_object_set (G_OBJECT (object), "cuda-memory-type",
            config->cuda_memory_type, NULL);

        g_object_set (object, "source-id", config->camera_id, NULL);
        g_object_set (object, "num-decode-surfaces", config->num_decode_surfaces,
            NULL);
        if (config->Intra_decode)
          g_object_set (object, "Intra-decode", config->Intra_decode, NULL);
      }
      if (g_strstr_len (name, -1, "omx") == name) {
        if (config->Intra_decode)
          g_object_set (object, "skip-frames", 2, NULL);
        g_object_set (object, "disable-dvfs", TRUE, NULL);
      }
      if (g_strstr_len (name, -1, "nvjpegdec") == name) {
        g_object_set (object, "DeepStream", TRUE, NULL);
      }
      if (g_strstr_len (name, -1, "nvv4l2decoder") == name) {
        if (config->Intra_decode)
          g_object_set (object, "skip-frames", 2, NULL);
    #ifdef __aarch64__
        g_object_set (object, "enable-max-performance", TRUE, NULL);
    #else
        g_object_set (object, "gpu-id", config->gpu_id, NULL);
        g_object_set (G_OBJECT (object), "cudadec-memtype",
            config->cuda_memory_type, NULL);
    #endif
        g_object_set (object, "drop-frame-interval", config->drop_frame_interval, NULL);
        g_object_set (object, "num-extra-surfaces", config->num_extra_surfaces,
            NULL);

        /* Seek only if file is the source. */
        if (config->loop && g_strstr_len(config->uri, -1, "file:/") == config->uri) {
          NVGSTDS_ELEM_ADD_PROBE (bin->src_buffer_probe, GST_ELEMENT(object),
              "sink", restart_stream_buf_prob,
              (GstPadProbeType) (GST_PAD_PROBE_TYPE_EVENT_BOTH |
                  GST_PAD_PROBE_TYPE_EVENT_FLUSH | GST_PAD_PROBE_TYPE_BUFFER),
              bin);
        }
      }
    done:
      return;
}

and in : cb_rtspsrc_select_stream i add

else if (!g_strcmp0 (encoding_name, "JPEG")) {
      bin->depay = gst_element_factory_make ("rtpjpegdepay", elem_name);
      g_snprintf (elem_name, sizeof (elem_name), "jpegparse_elem%d", bin->bin_id);
      bin->parser = gst_element_factory_make ("jpegparse", elem_name);
    }

static gboolean
cb_rtspsrc_select_stream (GstElement *rtspsrc, guint num, GstCaps *caps,
        gpointer user_data)
{
  GstStructure *str = gst_caps_get_structure (caps, 0);
  const gchar *media = gst_structure_get_string (str, "media");
  const gchar *encoding_name = gst_structure_get_string (str, "encoding-name");
  gchar elem_name[50];
  NvDsSrcBin *bin = (NvDsSrcBin *) user_data;
  gboolean ret = FALSE;

  gboolean is_video = (!g_strcmp0 (media, "video"));

  if (!is_video)
    return FALSE;

  /* Create and add depay element only if it is not created yet. */
  if (!bin->depay) {
    g_snprintf (elem_name, sizeof (elem_name), "depay_elem%d", bin->bin_id);

    /* Add the proper depay element based on codec. */
    if (!g_strcmp0 (encoding_name, "H264")) {
      bin->depay = gst_element_factory_make ("rtph264depay", elem_name);
      g_snprintf (elem_name, sizeof (elem_name), "h264parse_elem%d", bin->bin_id);
      bin->parser = gst_element_factory_make ("h264parse", elem_name);
    } else if (!g_strcmp0 (encoding_name, "H265")) {
      bin->depay = gst_element_factory_make ("rtph265depay", elem_name);
      g_snprintf (elem_name, sizeof (elem_name), "h265parse_elem%d", bin->bin_id);
      bin->parser = gst_element_factory_make ("h265parse", elem_name);
    } else if (!g_strcmp0 (encoding_name, "JPEG")) {
      bin->depay = gst_element_factory_make ("rtpjpegdepay", elem_name);
      g_snprintf (elem_name, sizeof (elem_name), "jpegparse_elem%d", bin->bin_id);
      bin->parser = gst_element_factory_make ("jpegparse", elem_name);
    }else {
      NVGSTDS_WARN_MSG_V ("%s not supported", encoding_name);
      return FALSE;
    }

    if (!bin->depay) {
      NVGSTDS_ERR_MSG_V ("Failed to create '%s'", elem_name);
      return FALSE;
    }

    gst_bin_add_many (GST_BIN (bin->bin), bin->depay, bin->parser, NULL);

    NVGSTDS_LINK_ELEMENT (bin->depay, bin->parser);
    NVGSTDS_LINK_ELEMENT (bin->parser, bin->tee_rtsp_pre_decode);

    if (!gst_element_sync_state_with_parent (bin->depay)) {
      NVGSTDS_ERR_MSG_V ("'%s' failed to sync state with parent", elem_name);
      return FALSE;
    }
    gst_element_sync_state_with_parent (bin->parser);
  }

  ret = TRUE;
done:
  return ret;
}

but no fix the problem

Can you dump the pipeline graph when you run with mjpeg source? The method is here DeepStream SDK FAQ - Intelligent Video Analytics / DeepStream SDK - NVIDIA Developer Forums

i try with command below but can’t see gst_init()

 4.3 DeepStream application
  for exmaple
  4.3.1 add "g_setenv("GST_DEBUG_DUMP_DOT_DIR", "/tmp", TRUE);" before  gst_init()
  4.3.2 add "GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(gst_objs.pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "demo-app-pipeline");" at the point where want to export the dot file, e.g. when switching to PLAYING
   BTW, need to include header file -   #include <gio/gio.h>

with:

4.1 deepstream-app
 run "export GST_DEBUG_DUMP_DOT_DIR=/tmp/" before deepstream-app command, e.g.
 $ sudo apt-get install graphviz
 $ export GST_DEBUG_DUMP_DOT_DIR=/tmp/
 $ deepstream-app -c deepstream_app_config_yoloV2.txt
 $ cd   /tmp/
 $ dot -Tpng 0.03.47.898178403-ds-app-playing.dot >~/0.03.47.898178403-ds-app-playing.png

i get Error: dot: can’t open 0.03.47.898178403-ds-app-playing.dot

flow this comment Deepstream RTSP reading fails with type=4 but works with multiURI so i set source type=3 but still get

Bus error (core dumped)

at line

cv::imencode(…)

The “0.03.47.898178403-ds-app-playing.dot” is just a sample. You need to check the dot file name generated in your /tmp folder and use the actual name with dot command.

1 Like

According to the graph, the source bin is not connected. So the buffer may be empty.

so what i need to do next

Try with gst-launch-1.0 command to get the correct decoding pipeline for your mjpeg rtsp stream, and then modify deepstream-app according to the successful pipeline.

The following pipeline you may have a try.
gst-launch-1.0 --gst-debug=v4l2videodec:5 rtspsrc location=rtsp://xxxxx ! rtpjpegdepay ! jpegparse ! nvv4l2decoder mjpeg=1 ! fakesink sync=false

1 Like

Since deepstream-app is using decodebin with rtsp, you may debug with decodebin_child_added() to know which decoder is chosen when using mjpeg rtsp source.

1 Like

how can i debug with it

The source code is open. You can debug with the method you like.

A easier way is to get graph with the following command:

gst-launch-1.0 --gst-debug=3 rtspsrc location=rtsp://xxxxx ! rtpjpegdepay ! jpegparse ! decodebin ! fakesink sync=false

We don’t have mjpeg rtsp source, you need to debug and add new feature by yourself.