Deepstream read from folder of images

Please provide complete information as applicable to your setup.

• Hardware Platform (Jetson / GPU)
Jetson
• DeepStream Version
5.1
• TensorRT Version
7.1.3

Hi,

We are using multifilesrc and JpegParser to process list of images in a folder
the issue with this pipeline is:
1- very slow compared to h264parser with video
on h264 parser the speed is 200 fps while the jpeg parser is 30 fps

2- pipeline with jpeg parser freezes when we set larger muxer width and height.

Is there best alternative for jpegparser we can use? is nvjpegdec will help us
I see this topic, but how can i inbtegrate this with our pipeline ?

/*
 * Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
 *
 * Permission is hereby granted, free of charge, to any person obtaining a
 * copy of this software and associated documentation files (the "Software"),
 * to deal in the Software without restriction, including without limitation
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
 * and/or sell copies of the Software, and to permit persons to whom the
 * Software is furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
 * DEALINGS IN THE SOFTWARE.
 */

#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include <math.h>
#include <string.h>
#include <sys/time.h>
#include <glob.h>
#include "gstnvdsmeta.h"
#include <iostream>
#include <unistd.h>
#include <string>
#include <array>
#include <memory>
#include <boost/property_tree/ptree.hpp>
#include <boost/property_tree/xml_parser.hpp>
#include <boost/property_tree/ini_parser.hpp>
#include <boost/property_tree/json_parser.hpp>
#include <boost/property_tree/ini_parser.hpp>
using boost::property_tree::ptree;
using boost::property_tree::write_xml;

#include <vector>
#include <dirent.h>
#include <sys/types.h>
#include <string.h>

#include <list>
#include <iterator>
#include <cuda.h>
#include <cuda_runtime.h>
#include "nvbufsurface.h"
#include "nvbufsurftransform.h"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/highgui/highgui.hpp"
using namespace cv;
//#include <gtk/gtk.h>

//#include "gstnvstreammeta.h"
#ifndef PLATFORM_TEGRA
#include "gst-nvmessage.h"
#endif

#define MAX_DISPLAY_LEN 64

/* The muxer output resolution must be set if the input streams will be of
 * different resolution. The muxer will scale all the input frames to this
 * resolution. */
#define MUXER_OUTPUT_WIDTH 1920
#define MUXER_OUTPUT_HEIGHT 1080

using namespace std;

#define MAX_TRACKING_ID_LEN 16

/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set
 * based on the fastest source's framerate. */
#define MUXER_BATCH_TIMEOUT_USEC 40000
#define CONFIG_FILE_VW "config/config_vw_detection.txt"

#define TILED_OUTPUT_WIDTH 1920
#define TILED_OUTPUT_HEIGHT 1080

/* NVIDIA Decoder source pad memory feature. This feature signifies that source
 * pads having this capability will push GstBuffers containing cuda buffers. */
#define GST_CAPS_FEATURES_NVMM "memory:NVMM"

#define FPS_PRINT_INTERVAL 300

/* tiler_sink_pad_buffer_probe  will extract metadata received on OSD sink pad
 * and update params for drawing rectangle, object information etc. */

static clock_t start_time = NULL;
static guint probe_counter = 0;

list<int> full_list;
int count_array = 0;
static int count = 0;
gint frame_number = 0;

int counter = 0;

#define CHECK_CUDA_STATUS(cuda_status, error_str)                            \
  do                                                                         \
  {                                                                          \
    if ((cuda_status) != cudaSuccess)                                        \
    {                                                                        \
      g_print("Error: %s in %s at line %d (%s)\n",                           \
              error_str, __FILE__, __LINE__, cudaGetErrorName(cuda_status)); \
    }                                                                        \
  } while (0)

static GstElement *
create_source_bin(guint index, gchar *uri)
{
  GstElement *bin = NULL /*, *uri_decode_bin = NULL*/;
  gchar bin_name[16] = {};
  gboolean multi_file_src = FALSE;

  g_snprintf(bin_name, 15, "source-bin-%02d", index);
  /* Create a source GstBin to abstract this bin's content from the rest of the
   * pipeline */
  bin = gst_bin_new(bin_name);

  GstElement *source, *jpegparser, *decoder;

  if (strstr(uri, "%"))
  {
    std::cout << "multifilesrc"
              << "\n";
    source = gst_element_factory_make("multifilesrc", "source");
    multi_file_src = TRUE;
  }
  else
  {
    //std::cout << uri << "\n";
    source = gst_element_factory_make("filesrc", "source");
  }

  jpegparser = gst_element_factory_make("jpegparse", "jpeg-parser");

  decoder = gst_element_factory_make("nvv4l2decoder", "nvv4l2-decoder");

  if (!source || !jpegparser || !decoder)
  {
    g_printerr("One element could not be created. Exiting.\n");
    return NULL;
  }
  if (strstr(uri, "%"))
  {
    /* Set properties of the multifilesrc element */
    GstCaps *caps =
        gst_caps_new_simple("image/jpeg", "format", G_TYPE_STRING, "RGBA", "framerate", GST_TYPE_FRACTION, 1, 1, NULL);
    //GstCapsFeatures *feature = gst_caps_features_new (GST_CAPS_FEATURES_NVMM, NULL);
    //gst_caps_set_features (caps, 0, feature);

    g_object_set(G_OBJECT(source), "location", uri, "caps", caps, NULL);
  }
  else
  {
    g_object_set(G_OBJECT(source), "location", uri, NULL);
  }
  const char *dot = strrchr(uri, '.');
  if ((!strcmp(dot + 1, "mjpeg")) || (!strcmp(dot + 1, "mjpg")) || (multi_file_src == TRUE))
  {
#ifdef PLATFORM_TEGRA
    g_object_set(G_OBJECT(decoder), "mjpeg", 1, NULL);
#endif
  }

  gst_bin_add_many(GST_BIN(bin), source, jpegparser, decoder, NULL);

  gst_element_link_many(source, jpegparser, decoder, NULL);

  /* We need to create a ghost pad for the source bin which will act as a proxy
   * for the video decoder src pad. The ghost pad will not have a target right
   * now. Once the decode bin creates the video decoder and generates the
   * cb_newpad callback, we will set the ghost pad target to the video decoder
   * src pad. */
  if (!gst_element_add_pad(bin, gst_ghost_pad_new_no_target("src",
                                                            GST_PAD_SRC)))
  {
    g_printerr("Failed to add ghost pad in source bin\n");
    return NULL;
  }

  GstPad *srcpad = gst_element_get_static_pad(decoder, "src");
  if (!srcpad)
  {
    g_printerr("Failed to get src pad of source bin. Exiting.\n");
    return NULL;
  }
  GstPad *bin_ghost_pad = gst_element_get_static_pad(bin, "src");
  if (!gst_ghost_pad_set_target(GST_GHOST_PAD(bin_ghost_pad),
                                srcpad))
  {
    g_printerr("Failed to link decoder src pad to source bin ghost pad\n");
  }

  return bin;
}

std::string exec(const char *cmd)
{
  std::array<char, 128> buffer;
  std::string result;
  std::unique_ptr<FILE, decltype(&pclose)> pipe(popen(cmd, "r"), pclose);
  if (!pipe)
  {
    throw std::runtime_error("popen() failed!");
  }
  while (fgets(buffer.data(), buffer.size(), pipe.get()) != nullptr)
  {
    result += buffer.data();
  }
  return result;
}

static GstPadProbeReturn
tiler_src_pad_buffer_probe(GstPad *pad, GstPadProbeInfo *info,
                           gpointer u_data)
{

  g_print(" end of fraame \n");


  counter++;

  g_print("FRAME  NUMBER ======================== %d  \n", counter);

  clock_t end_time = clock();
  if (start_time == NULL)
  {
    start_time = end_time;
  }
  if ((((double)(end_time - start_time)) / CLOCKS_PER_SEC) > 1)
  {
    g_print("FPS ======================== %f  \n", (probe_counter + 1) / 1.0);
    g_print("prop counter  ======================== %d  \n", (probe_counter + 1));
    full_list.push_back(probe_counter + 1);
    probe_counter = 0;
    start_time = end_time;
  }
  else
  {
    probe_counter = probe_counter + 1;
  }
#if 0
        display_meta = nvds_acquire_display_meta_from_pool(batch_meta);
        NvOSD_TextParams *txt_params  = &display_meta->text_params;
        txt_params->display_text = g_malloc0 (MAX_DISPLAY_LEN);
        offset = snprintf(txt_params->display_text, MAX_DISPLAY_LEN, "Person = %d ", person_count);
        offset = snprintf(txt_params->display_text + offset , MAX_DISPLAY_LEN, "Vehicle = %d ", vehicle_count);

        /* Now set the offsets where the string should appear */
        txt_params->x_offset = 10;
        txt_params->y_offset = 12;

        /* Font , font-color and font-size */
        txt_params->font_params.font_name = "Serif";
        txt_params->font_params.font_size = 10;
        txt_params->font_params.font_color.red = 1.0;
        txt_params->font_params.font_color.green = 1.0;
        txt_params->font_params.font_color.blue = 1.0;
        txt_params->font_params.font_color.alpha = 1.0;

        /* Text background color */
        txt_params->set_bg_clr = 1;
        txt_params->text_bg_clr.red = 0.0;
        txt_params->text_bg_clr.green = 0.0;
        txt_params->text_bg_clr.blue = 0.0;
        txt_params->text_bg_clr.alpha = 1.0;

        nvds_add_display_meta_to_frame(frame_meta, display_meta);
#endif

  return GST_PAD_PROBE_OK;
}
/* Tracker config parsing */

#define CHECK_ERROR(error)                                               \
  if (error)                                                             \
  {                                                                      \
    g_printerr("Error while parsing config file: %s\n", error->message); \
    goto done;                                                           \
  }

#define CONFIG_GROUP_TRACKER "tracker"
#define CONFIG_GROUP_TRACKER_WIDTH "tracker-width"
#define CONFIG_GROUP_TRACKER_HEIGHT "tracker-height"
#define CONFIG_GROUP_TRACKER_LL_CONFIG_FILE "ll-config-file"
#define CONFIG_GROUP_TRACKER_LL_LIB_FILE "ll-lib-file"
#define CONFIG_GROUP_TRACKER_ENABLE_BATCH_PROCESS "enable-batch-process"
#define CONFIG_GPU_ID "gpu-id"
static gchar *
get_absolute_file_path(gchar *cfg_file_path, gchar *file_path)
{
  gchar abs_cfg_path[PATH_MAX + 1];
  gchar *abs_file_path;
  gchar *delim;

  if (file_path && file_path[0] == '/')
  {
    return file_path;
  }

  if (!realpath(cfg_file_path, abs_cfg_path))
  {
    g_free(file_path);
    return NULL;
  }

  // Return absolute path of config file if file_path is NULL.
  if (!file_path)
  {
    abs_file_path = g_strdup(abs_cfg_path);
    return abs_file_path;
  }

  delim = g_strrstr(abs_cfg_path, "/");
  *(delim + 1) = '\0';

  abs_file_path = g_strconcat(abs_cfg_path, file_path, NULL);
  g_free(file_path);

  return abs_file_path;
}
static gboolean
bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
  GMainLoop *loop = (GMainLoop *)data;
  switch (GST_MESSAGE_TYPE(msg))
  {
  case GST_MESSAGE_EOS:
    g_print("End of stream\n");
    g_main_loop_quit(loop);
    break;
  case GST_MESSAGE_WARNING:
  {
    gchar *debug;
    GError *error;
    gst_message_parse_warning(msg, &error, &debug);
    g_printerr("WARNING from element %s: %s\n",
               GST_OBJECT_NAME(msg->src), error->message);
    g_free(debug);
    g_printerr("Warning: %s\n", error->message);
    g_error_free(error);
    break;
  }
  case GST_MESSAGE_ERROR:
  {
    gchar *debug;
    GError *error;
    gst_message_parse_error(msg, &error, &debug);
    g_printerr("ERROR from element %s: %s\n",
               GST_OBJECT_NAME(msg->src), error->message);
    if (debug)
      g_printerr("Error details: %s\n", debug);
    g_free(debug);
    g_error_free(error);
    g_main_loop_quit(loop);
    break;
  }
#ifndef PLATFORM_TEGRA
  case GST_MESSAGE_ELEMENT:
  {
    if (gst_nvmessage_is_stream_eos(msg))
    {
      guint stream_id;
      if (gst_nvmessage_parse_stream_eos(msg, &stream_id))
      {
        g_print("Got EOS from stream %d\n", stream_id);
      }
    }
    break;
  }
#endif
  default:
    break;
  }
  return TRUE;
}

int main(int argc, char *argv[])
{
  GMainLoop *loop = NULL;
  GstElement *pipeline = NULL, *streammux = NULL, *sink = NULL, *vw_detection = NULL,
             *nvvidconv = NULL, *nvosd = NULL, *tiler = NULL, *mobile_v = NULL, *plate_detection = NULL, *vehicle_type = NULL, *plate_recognition = NULL, *dsexample = NULL, *nvtracker = NULL;
  std::string input_path, input_format;
#ifdef PLATFORM_TEGRA
  GstElement *transform = NULL;
#endif
  GstBus *bus = NULL;
  guint bus_watch_id;
  GstPad *tiler_src_pad = NULL;
  guint i, num_sources;
  guint tiler_rows, tiler_columns;
  guint pgie_batch_size;

  /* Check input arguments */
  // if (argc < 2)
  // {
  //  g_printerr("Usage: %s <elementary JPEG file1> <elementary JPEG file2> ..."
  //           " <elementary JPEG fileN> \n",
  //          argv[0]);
  // return -1;
  // }
  // num_sources = argc - 1;
  int width;
  int height;
  /* Standard GStreamer initialization */
  gst_init(&argc, &argv);
  loop = g_main_loop_new(NULL, FALSE);

  /* Create gstreamer elements */
  /* Create Pipeline element that will form a connection of other elements */
  pipeline = gst_pipeline_new("P2P-pipeline");

  /* Create nvstreammux instance to form batches from one or more sources. */
  streammux = gst_element_factory_make("nvstreammux", "stream-muxer");

  if (!pipeline || !streammux)
  {
    g_printerr("One element could not be created. Exiting.\n");
    return -1;
  }
  gst_bin_add(GST_BIN(pipeline), streammux);

  /* Read Settings file */
  boost::property_tree::ptree settings;
  read_ini("Settings/settings.ini", settings);
  input_path = settings.get<std::string>("DEFAULT.INPUT_PATH");

  //Input Preprocessing
  gchar *image_folder = new char[input_path.length() + 1];
  strcpy(image_folder, input_path.c_str());
  chdir(image_folder);
  char s[100];
  printf("%s\n", getcwd(s, 100));
  exec("mkdir tmp");

  exec("curIdx=0; for curFile in  *.jpg; do  outFile=`printf \"./tmp/img_%04d.jpg\" \"$curIdx\"`; echo \'File: \'$curFile \'->\' $outFile; ln $curFile $outFile; curIdx=`expr $curIdx + 1 `; done");
  chdir("/opt/nvidia/deepstream/deepstream-5.1/sources/apps/sample_apps/P2P_Engine_VW/");
  input_path += "tmp/";
  input_format = settings.get<std::string>("DEFAULT.INPUT_FORMAT");
  /*Create full input Path */
  std::string full_path_tmp = input_path + input_format;
  gchar *image_file = new char[full_path_tmp.length() + 1];
  strcpy(image_file, full_path_tmp.c_str());

  GstPad *sinkpad, *srcpad;
  gchar pad_name[16] = {};
  GstElement *source_bin = create_source_bin(0, image_file);

  if (!source_bin)
  {
    g_printerr("Failed to create source bin. Exiting.\n");
    return -1;
  }

  gst_bin_add(GST_BIN(pipeline), source_bin);

  g_snprintf(pad_name, 15, "sink_%u", i);
  sinkpad = gst_element_get_request_pad(streammux, pad_name);
  if (!sinkpad)
  {
    g_printerr("Streammux request sink pad failed. Exiting.\n");
    return -1;
  }

  srcpad = gst_element_get_static_pad(source_bin, "src");
  if (!srcpad)
  {
    g_printerr("Failed to get src pad of source bin. Exiting.\n");
    return -1;
  }

  if (gst_pad_link(srcpad, sinkpad) != GST_PAD_LINK_OK)
  {
    g_printerr("Failed to link source bin to stream muxer. Exiting.\n");
    return -1;
  }

  gst_object_unref(srcpad);
  gst_object_unref(sinkpad);

  num_sources = 8;

  //nvtracker = gst_element_factory_make("nvtracker", "tracker");
  //dsexample = gst_element_factory_make("dsexample", "example-plugin");

  /* Use nvinfer to infer on batched frame. */
  vw_detection = gst_element_factory_make("nvinfer", "primary-nvinference-engine");
  //vehicle_type = gst_element_factory_make("nvinfer", "secondary2-nvinference-engine");

  //plate_detection = gst_element_factory_make("nvinfer", "secondary1-nvinference-engine");
  // plate_recognition = gst_element_factory_make("nvinfer", "secondary4-nvinference-engine");
  /* Use nvtiler to composite the batched frames into a 2D tiled array based
   * on the source of the frames. */
  //tiler = gst_element_factory_make("nvmultistreamtiler", "nvtiler");

  /* Use convertor to convert from NV12 to RGBA as required by nvosd */
  nvvidconv = gst_element_factory_make("nvvideoconvert", "nvvideo-converter");

  /* Create OSD to draw on the converted RGBA buffer */
  nvosd = gst_element_factory_make("nvdsosd", "nv-onscreendisplay");
  /* Finally render the osd output */

  /* Finally render the osd output */
#ifdef PLATFORM_TEGRA
  transform = gst_element_factory_make("queue", "nvegl-transform");
  //transform = gst_element_factory_make("nvegltransform", "nvegl-transform");
#endif
  sink = gst_element_factory_make("fakesink", "nvvideo-renderer");

  if (!vw_detection || !nvvidconv || !sink)
  {
    g_printerr("One element could not be created. Exiting.\n");
    return -1;
  }

#ifdef PLATFORM_TEGRA
  if (!transform)
  {
    g_printerr("One tegra element could not be created. Exiting.\n");
    return -1;
  }
#endif

  g_object_set(G_OBJECT(streammux), "width", MUXER_OUTPUT_WIDTH, "height",
               MUXER_OUTPUT_HEIGHT, "batch-size", 1,
               "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);

  /* Configure the nvinfer element using the nvinfer config file. */
  g_object_set(G_OBJECT(vw_detection), "config-file-path", CONFIG_FILE_VW, NULL);
  //g_object_set(G_OBJECT(plate_detection), "config-file-path", CONFIG_FILE_PLATE_DETECTION, NULL);
  // g_object_set(G_OBJECT(vehicle_type), "config-file-path", CONFIG_FILE_VEHICLE_TYPE, NULL);
  //g_object_set(G_OBJECT(dsexample), "full-frame", TRUE, NULL);
  //g_object_get(G_OBJECT(dsexample), "processing-width", &width, NULL);
  // g_object_get(G_OBJECT(dsexample), "processing-height", &height, NULL);
  // g_object_set(G_OBJECT(plate_recognition), "config-file-path", CONFIG_FILE_PLATE_RECOGNITION, NULL);
  std::cout << "batch size \n ";

  /* Override the batch-size set in the config file with the number of sources. */
  g_object_get(G_OBJECT(vw_detection), "batch-size", &pgie_batch_size, NULL);
  if (pgie_batch_size != num_sources)
  {
    g_printerr("WARNING: Overriding infer-config batch-size (%d) with number of sources (%d)\n",
               pgie_batch_size, num_sources);
    g_object_set(G_OBJECT(vw_detection), "batch-size", 1, NULL);
  }
  std::cout << pgie_batch_size << "       " << num_sources << endl;

  /* we add a message handler */
  bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
  bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
  gst_object_unref(bus);

  /* Set up the pipeline */
  /* we add all elements into the pipeline */

#ifdef PLATFORM_TEGRA
  gst_bin_add_many(GST_BIN(pipeline), vw_detection, /*nvvidconv, /*nvosd,*/ transform, sink,
                   NULL);
#else
  gst_bin_add_many(GST_BIN(pipeline), vw_detection, /*nvvidconv, nvosd,*/ sink,
                   NULL);
#endif

#ifdef PLATFORM_TEGRA
  if (!gst_element_link_many(streammux, vw_detection, /* nvvidconv, /*nvosd, */ transform, sink, NULL))
  {
    g_printerr("Elements could not be linked. Exiting.\n");
    return -1;
  }
#else
  if (!gst_element_link_many(streammux, vw_detection, /* nvvidconv, /*nvosd ,*/ sink, NULL))
  {
    g_printerr("Elements could not be linked. Exiting.\n");
    return -1;
  }
#endif

  /* Lets add probe to get informed of the meta data generated, we add probe to
   * the sink pad of the osd element, since by that time, the buffer would have
   * had got all the metadata. */
  tiler_src_pad = gst_element_get_static_pad(vw_detection, "src");
  if (!tiler_src_pad)
    g_print("Unable to get src pad\n");
  else
    gst_pad_add_probe(tiler_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
                      tiler_src_pad_buffer_probe, NULL, NULL);

  /* Set the pipeline to "playing" state */

  g_print("\n \n \n \n Now playing: \n \n \n \n ");
  for (i = 0; i < num_sources; i++)
  {
    g_print(" %s,", argv[i + 1]);
  }
  g_print("\n");
  gst_element_set_state(pipeline, GST_STATE_PLAYING);

  /* Wait till pipeline encounters an error or EOS */
  g_print("Running...\n");

  const clock_t begin_time = clock();

  g_main_loop_run(loop);

  std::cout << float(begin_time);

  int total = 0;
  int c = 0;
  list<int>::iterator it;
  for (it = full_list.begin(); it != full_list.end(); ++it)
  {
    cout << '\t' << *it;
    total = total + (*it);
    c++;
  }

  g_print("******************* \n ");

  g_print("count_array  =   %d\n", c);
  g_print("sum  =   %d\n", total);

  g_print("avg   =   %d\n", (total / c));

  cout << '\n';

  std::cout << float(clock() - begin_time) / CLOCKS_PER_SEC;

  std::cout << "----------------------------" << endl;
  std::cout << float(clock() - begin_time);

  /* Out of the main loop, clean up nicely */
  g_print("Returned, stopping playback\n");
  gst_element_set_state(pipeline, GST_STATE_NULL);
  g_print("Deleting pipeline\n");
  gst_object_unref(GST_OBJECT(pipeline));
  g_source_remove(bus_watch_id);
  g_main_loop_unref(loop);
  return 0;
}

multifilesrc and jpegparser are all gstreamer public plugins which are developed and maintained by gstreamer community. multifilesrc: GStreamer Good Plugins 1.0 Plugins Reference Manual jpegparse: GStreamer Bad Plugins 1.0 Plugins Reference Manual. We can not gurantee the performance of them. Please contact gstreamer community for your concern.

Do you mean you want to switch to mjpeg stream instead of jpeg pictures?

No, I want to read multiple images without freezing the pipeline and supported by NVIDIA.

jpegparse is to parse the encoded JPEG frame from jpeg stream, nvjpegdec is to decompress the encoded JPEG frame to raw data. They are with different functions. I don’t think nvjpegdec can resolve jpegparse’s problem.