How to decode h264 buffers with gstreamer and gets the video frames

Hi,

I’m trying to decode h264 video and gets the frame, I get the buffer by CB function as the follow:

liveViewCb(uint8_t* buf, int bufLen, void* pipline)
{
// DO something with the buffer
}

I wrote program that success to decode the first frame or more depends on the frame size. i got the follow message from gstreamer debug:
091:gst_clock_get_time:<GstSystemClock> adjusted time 1:27:44.882934836
0:00:02.052908983 32105   0x7f64002520 DEBUG                 appsrc gstappsrc.c:1819:gst_app_src_push_internal:<mysource> queue filled (200226 >= 200000)
0:00:02.052941784 32105   0x7f64002520 DEBUG                 appsrc gstappsrc.c:1819:gst_app_src_push_internal:<mysource> queue filled (200226 >= 200000)
0:00:02.052980761 32105   0x7f64002520 DEBUG                 appsrc gstappsrc.c:1863:gst_app_src_push_internal:<mysource> queueing buffer 0x7f700291e0

The code I wrote for decoding the frames:

typedef struct
{
  GstPipeline*             pipeline;
  GstAppSrc*               src;
  GstElement*              sink;
  GstClockTime             timestamp;
  bool                     need_data;
} gst_app_t;

GstElement*
get_pipline(const gchar* pipeline_string)
{
  return gst_parse_launch(pipeline_string, NULL);
}

void
start_feed(gst_app_t* pipline)
{
  pipline->need_data = true;
}

void
stop_feed(gst_app_t* pipline)
{
  pipline->need_data = false;
}

void
start_pipeline(const gchar* pipeline_string, gst_app_t* pipline)
{
  printf("gst_init!\n");
  gst_init(NULL, NULL); // Initialize Gstreamer
  main_loop            = g_main_loop_new(NULL, FALSE);
  GstElement* pipeline = get_pipline(pipeline_string);
  pipline->pipeline    = GST_PIPELINE(pipeline);
  pipline->need_data   = true;
  if (pipline->pipeline == NULL)
  {
    g_print("Bad pipeline\n");
    return;
  }
  pipline->src = (GstAppSrc*)gst_bin_get_by_name(GST_BIN(pipeline), "mysource");
  pipline->sink = gst_bin_get_by_name(GST_BIN(pipeline), "mysink");
  gst_app_src_set_max_bytes(pipline->src, 200000000);
  // g_object_set(pipline->src, "stream-type",
  // GST_APP_STREAM_TYPE_STREAM,"format",GST_FORMAT_TIME,"do-timestamp",TRUE,"is-live",TRUE,"block",TRUE,null)
  printf("start_pipeline!\n");
  GstStateChangeReturn state_ret;
  ProgramData*         data     = NULL;
  GstElement*          testsink = NULL;
  GstBus*              bus      = NULL;
  printf("timestamp!\n");
  pipline->timestamp = 0;

  // Play the pipeline
  printf("Play the pipeline!\n");
  state_ret =
    gst_element_set_state((GstElement*)pipline->pipeline, GST_STATE_PLAYING);
  g_assert(state_ret == GST_STATE_CHANGE_ASYNC);

  printf("sink!\n");
  testsink = pipline->sink;
  printf("emit-signals!\n");
  g_object_set(G_OBJECT(testsink), "emit-signals", TRUE, "sync", FALSE, NULL);
  printf("G_CALLBACK!\n");
  g_signal_connect(
    testsink, "new-sample", G_CALLBACK(on_new_sample_from_sink), NULL);

  g_signal_connect(pipline->src, "need-data", G_CALLBACK(start_feed), NULL);
  g_signal_connect(pipline->src, "enough-data", G_CALLBACK(stop_feed), NULL);

  pipline->last_time_buffer = std::chrono::steady_clock::now();
  printf("complete creating pipeline!\n");
}

bool decode_buffer(uint8_t* pBuf, int len, gst_app_t* pipline)
{
  printf("got new buffer!!!\n");
  GstStateChangeReturn state_ret;
  GstElement*          testsink = NULL;
  ProgramData*         data     = NULL;
  gst_app_t*         app = pipline;
  GstElementFactory* srcfactory;
  g_assert(pBuf != NULL);
  GstBuffer* buffer = gst_buffer_new_and_alloc(len);
  gst_buffer_fill(buffer, 0, pBuf, len);
  // Set frame timestamp
  GST_BUFFER_PTS(buffer)      = 0;
  GST_BUFFER_DTS(buffer)      = 0;
  GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int(1, GST_SECOND, 1);
  // app->timestamp += GST_BUFFER_DURATION(buffer);

  GstFlowReturn ret;
  /* Push the buffer into the appsrc */
  g_signal_emit_by_name(app->src, "push-buffer", buffer, &ret);
  /* Free the buffer now that we are done with it */
  gst_buffer_unref(buffer);
  if (ret != GST_FLOW_OK)
  {
    /* We got some error, stop sending data */
    return FALSE;
  }
  return TRUE;
}
bool
on_new_sample_from_sink(GstElement* elt)
{
  GstSample*    sample;
  GstMapInfo    map_info;
  GstBuffer *   app_buffer, *buffer;
  GstElement*   source;
  GstFlowReturn ret;
  /* get the sample from appsink */
  sample = gst_app_sink_pull_sample(GST_APP_SINK(elt));
  buffer = gst_sample_get_buffer(sample);
  if (sample)
    gst_sample_unref(sample);
  if (gst_buffer_map(buffer, &map_info, GST_MAP_READ))
  {

    cv::Mat frame = Mat::zeros(1280, 720, CV_8UC3);
    frame =
      cv::Mat(720, 1280, CV_8UC3, (char*)map_info.data, cv::Mat::AUTO_STEP);
    memcpy(frame.data, map_info.data, map_info.size);
    gst_buffer_unref(buffer);
    if (!frame.empty())
    {
      imwrite("show.jpg", frame);
    }
  }
}
liveViewCb(uint8_t* buf, int bufLen, void* pipline)
{
  std::cout << "we got buffer to decode!!" << endl;
  std::cout << "Buffer length1: " << bufLen << endl;
  if (((gst_app_t*)pipline)->need_data)
    decoder->decode_buffer(buf, bufLen, (gst_app_t*)pipline);
}
int main()
{
decoder* decoder;
static gst_app_t pipeline;
decoder->start_pipeline(
    "appsrc name=mysource is-live=0 ! h264parse ! nvv4l2decoder ! nvvidconv ! "
    "video/x-raw,format=BGRx ! videoconvert ! video/x-raw,format=BGR ! queue ! "
    "appsink name=mysink drop=1",
    pipeline);
startH264StreamSetCBFunction(
          CAMERA_POSITION, liveViewCb, (void*)&pipeline);
}

As you see the pipeline im using is : "appsrc name=mysource is-live=0 ! h264parse ! nvv4l2decoder ! nvvidconv ! "
"video/x-raw,format=BGRx ! videoconvert ! video/x-raw,format=BGR ! queue ! "
“appsink name=mysink drop=1”.

Hi,
Please refer to this sample:
Gstreamer decode live video stream with the delay difference between gst-launch-1.0 command and appsink callback - #6 by DaneLLL

This topic was automatically closed 60 days after the last reply. New replies are no longer allowed.