Extract raw image from appsink always empty

Please provide complete information as applicable to your setup.

**• Hardware Platform Jetson **
• DeepStream Version 6.4
• JetPack Version 6.0 DP
• TensorRT Version 8.6.2.3
• Issue Type questions
I want input image to appsrc and save inference result with raw image.

My pipeline look like this
app-source → nvvidconv → caps filter → pgie → nvvidconv → caps filter → appsink

I try to get image buffer at appsink new-sample is come. I can get inference result, but the image always empty.

Is there anything I’ve set up wrong?

here my code

/*
 * Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved.
 *
 * Permission is hereby granted, free of charge, to any person obtaining a
 * copy of this software and associated documentation files (the "Software"),
 * to deal in the Software without restriction, including without limitation
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
 * and/or sell copies of the Software, and to permit persons to whom the
 * Software is furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
 * DEALINGS IN THE SOFTWARE.
 */

#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <glib.h>
#include <stdio.h>
#include <string.h>
#include <cuda_runtime_api.h>
#include "gstnvdsmeta.h"
#include "nvbufsurface.h"

#include "webcam.h"

#include <opencv2/opencv.hpp>

#include <Poco/LocalDateTime.h>

#define MAX_DISPLAY_LEN 64

#define PGIE_CLASS_ID_VEHICLE 0
#define PGIE_CLASS_ID_PERSON 2

#define CUSTOM_PTS 1

#define NVINFER_PLUGIN "nvinfer"
#define NVINFERSERVER_PLUGIN "nvinferserver"
#define PGIE_CONFIG_FILE  "config_infer_primary_yoloV8.txt"

/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set
 * based on the fastest source's framerate. */
#define MUXER_BATCH_TIMEOUT_USEC 33000

gint frame_number = 0;

/* Structure to contain all our information for appsrc,
 * so we can pass it to callbacks */
typedef struct _AppSrcData
{
  GstElement *app_source;
  long frame_size;
  FILE *file;                   /* Pointer to the raw video file */
  guint fps;                    /* To set the FPS value */
  guint sourceid;               /* To control the GSource */
  GstClockTime timestamp;
} AppSrcData;

/* new_sample is an appsink callback that will extract metadata received
 * tee sink pad and update params for drawing rectangle,
 *object information etc. */
static GstFlowReturn
//new_sample (GstElement *sink, gpointer * data)
new_sample (GstAppSink *sink, gpointer * data)
{
  GstSample *sample;
  GstBuffer *buf = NULL;
  guint num_rects = 0;
  NvDsObjectMeta *obj_meta = NULL;
  guint vehicle_count = 0;
  guint person_count = 0;
  NvDsMetaList *l_frame = NULL;
  NvDsMetaList *l_obj = NULL;
  unsigned long int pts = 0;
  int sample_width = 0;
  int sample_height = 0;
  GstMapInfo map;
  const GstStructure *info = NULL;
  GstCaps *caps = NULL;
  NvBufSurface *surface = NULL; 

  sample = gst_app_sink_pull_sample (GST_APP_SINK (sink));
  if (gst_app_sink_is_eos (GST_APP_SINK (sink))) {
    g_print ("EOS received in Appsink********\n");
  }

  do
  {
    if(sample == NULL)
      break;
    /* Obtain GstBuffer from sample and then extract metadata from it. */
    buf = gst_sample_get_buffer (sample);

    char *src_data = NULL;

    if (!gst_buffer_map(buf, &map, GST_MAP_READ))
    {
      g_print("Error: Failed to map gst buffer\n");
      break;
    }

    caps = gst_sample_get_caps(sample);
    if (caps == NULL)
    {
      g_print("get caps is null");
      break;
    }
    info = gst_caps_get_structure(caps, 0);
    if (info == NULL)
    {
      g_print("get info is null");
      break;
    }
    // -------- Read frame and convert to opencv format --------
    // convert gstreamer data to OpenCV Mat, you could actually
    // resolve height / width from caps...
    gst_structure_get_int(info, "width", &sample_width);
    gst_structure_get_int(info, "height", &sample_height);

    g_print("image width %d height %d\n", sample_width, sample_height);

    if (map.data == NULL)
    {
      g_print("appsink buffer data empty\n");
      return GST_FLOW_OK;
    }
   
    /*    
    for (auto i = 0; i < 32; i++)
    {
      for (auto j = 0; j < 32; j++)
      {
        g_print("%x ", map.data[i * sample_width + j]);
      }
      g_print("\n");
    }
    */
    surface = (NvBufSurface *)map.data;

    g_print("NvBufSurface mem tpye %d\n", surface->memType);
    g_print("NvBufSurface surfaceList %x\n", surface->surfaceList); // always NULL

    gst_buffer_unmap (buf, &map);

    NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta (buf);

    for (l_frame = batch_meta->frame_meta_list; l_frame != NULL;
        l_frame = l_frame->next) {
      NvDsFrameMeta *frame_meta = (NvDsFrameMeta *) (l_frame->data);
      guint  batch_id = frame_meta->batch_id;

      pts = frame_meta->buf_pts;
      for (l_obj = frame_meta->obj_meta_list; l_obj != NULL;
          l_obj = l_obj->next) {
        obj_meta = (NvDsObjectMeta *) (l_obj->data);

        int left = obj_meta->rect_params.left;
        int top = obj_meta->rect_params.top;
        int width = obj_meta->rect_params.width;
        int height = obj_meta->rect_params.height;
        auto conf = obj_meta->confidence;
        auto uuid = obj_meta->object_id;
        auto lable = obj_meta->class_id;

        g_print("label %d - conf : %f width %d height %d\n", lable, conf, width, height);
      }
    }

    /*
    g_print ("Frame Number = %d Number of objects = %d "
        "Vehicle Count = %d Person Count = %d PTS = %" GST_TIME_FORMAT "\n",
        frame_number, num_rects, vehicle_count, person_count,
        GST_TIME_ARGS (pts));
        */
    frame_number++;
    gst_sample_unref (sample);
    return GST_FLOW_OK;
  } while (false) ;
  return GST_FLOW_ERROR;
}

static gboolean
bus_call (GstBus * bus, GstMessage * msg, gpointer data)
{
  GMainLoop *loop = (GMainLoop *) data;
  switch (GST_MESSAGE_TYPE (msg)) {
    case GST_MESSAGE_EOS:
      g_print ("End of stream\n");
      g_main_loop_quit (loop);
      break;
    case GST_MESSAGE_ERROR:{
      gchar *debug;
      GError *error;
      gst_message_parse_error (msg, &error, &debug);
      g_printerr ("ERROR from element %s: %s\n",
          GST_OBJECT_NAME (msg->src), error->message);
      if (debug)
        g_printerr ("Error details: %s\n", debug);
      g_free (debug);
      g_error_free (error);
      g_main_loop_quit (loop);
      break;
    }
    default:
      break;
  }
  return TRUE;
}

int
main (int argc, char *argv[])
{
  GMainLoop *loop = NULL;
  GstElement *pipeline = NULL, *nvvidconv1 = NULL, *caps_filter = NULL, *caps_filter2 = NULL,
      *streammux = NULL, *sink = NULL, *pgie = NULL, *nvvidconv2 = NULL,*appsink = NULL;
  GstBus *bus = NULL;
  guint bus_watch_id;
  AppSrcData data;
  GstCaps *caps = NULL;
  GstCapsFeatures *feature = NULL;
  GstPad *tee_source_pad1, *tee_source_pad2;
  GstPad *osd_sink_pad, *appsink_sink_pad;
  gboolean is_nvinfer_server = FALSE;
  const gchar* new_mux_str = g_getenv("USE_NEW_NVSTREAMMUX");
  gboolean use_new_mux = !g_strcmp0(new_mux_str, "yes");
  //cv::namedWindow("debug", cv::WINDOW_NORMAL);
  Webcam test_webcam(0);

  int current_device = -1;
  cudaGetDevice(&current_device);
  struct cudaDeviceProp prop;
  cudaGetDeviceProperties(&prop, current_device);

  gchar *format = NULL, *input_stream = NULL;
  long fps = 10, width = 640, height = 480;
  format = "BGR"; // I420, NV12

  /* Initialize custom data structure */
  memset (&data, 0, sizeof (data));
  //RGBA
  data.frame_size = width * height * 3;
  data.fps = fps;

  /* Standard GStreamer initialization */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  /* Create gstreamer elements */
  /* Create Pipeline element that will form a connection of other elements */
  pipeline = gst_pipeline_new ("dstest-appsrc-pipeline");
  if (!pipeline) {
    g_printerr ("Pipeline could not be created. Exiting.\n");
    return -1;
  }

  /* App Source element for reading from raw video file */
  data.app_source = gst_element_factory_make ("appsrc", "app-source");
  if (!data.app_source) {
    g_printerr ("Appsrc element could not be created. Exiting.\n");
    return -1;
  }
  /* Use convertor to convert from software buffer to GPU buffer */
  nvvidconv1 =
      gst_element_factory_make ("nvvideoconvert", "nvvideo-converter1");
  if (!nvvidconv1) {
    g_printerr ("nvvideoconvert1 could not be created. Exiting.\n");
    return -1;
  }
  caps_filter = gst_element_factory_make ("capsfilter", "capsfilter");
  if (!caps_filter) {
    g_printerr ("Caps_filter could not be created. Exiting.\n");
    return -1;
  }

  caps_filter2 = gst_element_factory_make ("capsfilter", "capsfilter2");
  if (!caps_filter2) {
    g_printerr ("Caps_filter2 could not be created. Exiting.\n");
    return -1;
  }

  /* Create nvstreammux instance to form batches from one or more sources. */
  streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");
  if (!streammux) {
    g_printerr ("nvstreammux could not be created. Exiting.\n");
    return -1;
  }

  /* Use nvinfer to run inferencing on streammux's output,
   * behaviour of inferencing is set through config file */
  pgie = gst_element_factory_make (NVINFER_PLUGIN, "primary-nvinference-engine");
  if (!pgie) {
    g_printerr ("Primary nvinfer could not be created. Exiting.\n");
    return -1;
  }

  /* Use convertor to convert from NV12 to RGBA as required by nvdsosd */
  nvvidconv2 =
      gst_element_factory_make ("nvvideoconvert", "nvvideo-converter2");
  if (!nvvidconv2) {
    g_printerr ("nvvideoconvert2 could not be created. Exiting.\n");
    return -1;
  }

  appsink = gst_element_factory_make ("appsink", "app-sink");
  if (!appsink) {
    g_printerr ("Appsink element could not be created. Exiting.\n");
    return -1;
  }

  /* Configure appsrc */
  g_object_set (data.app_source, "caps",
      gst_caps_new_simple ("video/x-raw",
          "format", G_TYPE_STRING, format,
          "width", G_TYPE_INT, width,
          "height", G_TYPE_INT, height,
          "framerate", GST_TYPE_FRACTION, data.fps, 1, NULL), NULL);
#if !CUSTOM_PTS
  g_object_set (G_OBJECT (data.app_source), "do-timestamp", TRUE, NULL);
#endif
  caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING,
      "NV12", NULL);
  feature = gst_caps_features_new ("memory:NVMM", NULL);
  gst_caps_set_features (caps, 0, feature);
  g_object_set (G_OBJECT (caps_filter), "caps", caps, NULL);

  g_object_set(G_OBJECT(caps_filter2), "caps", gst_caps_new_simple("video/x-raw",
                                   "format", G_TYPE_STRING, "BGR", NULL), NULL);

  g_object_set(G_OBJECT(nvvidconv1), "compute-hw", 1, NULL);
  g_object_set(G_OBJECT(nvvidconv2), "compute-hw", 1, NULL);

  g_object_set(appsink, "caps",
               gst_caps_new_simple("video/x-raw",
                                   // "format", G_TYPE_STRING, "NV12", NULL),
                                   "format", G_TYPE_STRING, "BGR", NULL),
               NULL);

  /* Set streammux properties */
  if (!use_new_mux) {
    g_object_set (G_OBJECT (streammux), "width", width, "height",
      height, "batch-size", 1, "live-source", TRUE,
      "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
  }
  else {
    g_object_set (G_OBJECT (streammux), "batch-size", 1,
      "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
  }
  g_object_set (G_OBJECT (pgie), "config-file-path", PGIE_CONFIG_FILE, NULL);
  //g_object_set (G_OBJECT (pgie), "input-tensor-meta", TRUE , NULL);

  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* Set up the pipeline */
  /* we add all elements into the pipeline */
  gst_bin_add_many (GST_BIN (pipeline),
      data.app_source, nvvidconv1, caps_filter, streammux, pgie,
      nvvidconv2, caps_filter2, appsink, NULL);

  GstPad *sinkpad, *srcpad;
  gchar pad_name_sink[16] = "sink_0";
  gchar pad_name_src[16] = "src";

  sinkpad = gst_element_request_pad_simple (streammux, pad_name_sink);
  if (!sinkpad) {
    g_printerr ("Streammux request sink pad failed. Exiting.\n");
    return -1;
  }

  srcpad = gst_element_get_static_pad (caps_filter, pad_name_src);
  if (!srcpad) {
    g_printerr ("Decoder request src pad failed. Exiting.\n");
    return -1;
  }

  if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
    g_printerr ("Failed to link caps filter to stream muxer. Exiting.\n");
    return -1;
  }

  gst_object_unref (sinkpad);
  gst_object_unref (srcpad);

  /* we link the elements together */
  /* app-source -> nvvidconv -> caps filter ->
   * pgie -> nvvidconv -> caps filter -> appsink */

  g_print ("start link\n");

  if (!gst_element_link_many (data.app_source, nvvidconv1, caps_filter, NULL) ||
      //!gst_element_link_many (streammux, pgie, nvvidconv2, caps_filter2, appsink, NULL)) {
        !gst_element_link_many (streammux, pgie, nvvidconv2, caps_filter2, appsink, NULL)) {
    g_printerr ("Elements could not be linked: Exiting.\n");
    return -1;
  }

  g_print ("link success\n");
  
  /* Configure appsink to extract data from DeepStream pipeline */
  g_object_set (appsink, "emit-signals", TRUE, "async", FALSE, NULL);

  /* Callback to access buffer and object info. */
  g_signal_connect (appsink, "new-sample", G_CALLBACK (new_sample), NULL);

  /* Set the pipeline to "playing" state */
  g_print ("Now playing:\n");
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  test_webcam.setReadDataCallback(
    [&](const cv::Mat &frame) {
      GstBuffer *buffer;
      guint size = frame.rows * frame.step;
      GstFlowReturn gstret;

      GstMapInfo map;
      buffer = gst_buffer_new_allocate(NULL, size, NULL);
      gst_buffer_map(buffer, &map, GST_MAP_WRITE);
      memcpy(map.data, frame.data, size);
      gst_buffer_unmap(buffer, &map);

      /* increment the timestamp every 1/2 second */
       GST_BUFFER_PTS (buffer) = data.timestamp;
       GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 30);
       data.timestamp += GST_BUFFER_DURATION (buffer);

      Poco::LocalDateTime st;
      g_signal_emit_by_name(data.app_source, "push-buffer", buffer, &gstret);
      gst_buffer_unref(buffer);
      Poco::LocalDateTime et;

      if (gstret != GST_FLOW_OK) {
			  g_print("ret not ok! \n");
      } else {
        g_print("ret ok! %d \n", (et-st).totalMilliseconds());
      }

    }
  );

  /* Wait till pipeline encounters an error or EOS */
  g_print ("Running...\n");
  
  //g_main_loop_run (loop);
  while(true)
  {
    auto c = cv::waitKey(10);
    if(c =='q')
      break;
  }

  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);
  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));
  g_source_remove (bus_watch_id);
  g_main_loop_unref (loop);
  return 0;
}

I use our demo deepstream-appsrc-test to print the pointer, it works normally. You can compare your code with our demo code yourself. Also you need to use %p to print a pointer not %x.

NvBufSurface surfaceList 0x7f8d8c009860
image width 1280 height 720
NvBufSurface mem tpye 2

I finally realized that the setup parameters of my two GstCaps shared the same GstCapsFeatures, which resulted in an incorrect format conversion and an empty image.

Everything’s fine now.

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.