Errors when using Gstreamer 1.0 appsrc (Thanks for helping me to know the issues)

Hi,

My application consists of pushing data from an yuv file to a gstreamer pipeline (appsrc -> omxh264enco -> h264parser -> qtmux ->filesink) using the appsrc plugin.

Below is the first code I did, using examples I found :

#include <gst/gst.h>
#include <string.h>
#include <stdlib.h>
#include <stdio.h>

#define BUFFER_SIZE 1024   /* Amount of bytes we are sending in each buffer */

/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {

  GstElement *pipeline, *source, *encoder, *parser, *muxer, *fsink;
  guint sourceid;   
  FILE *file_to_encode;    
  //long bytes_to_read; 
  GMainLoop *main_loop;  

} CustomData;

/* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc. 
 * The ide handler is added to the mainloop when appsrc requests us to start sending data (need-data signal)
 * and is removed when appsrc has enough data (enough-data signal).
 */
static gboolean push_data (CustomData *data) {
  
    GstBuffer *buffer;
    guint8 *ptr;
    gint size;
    GstFlowReturn ret;

    ptr = g_malloc(BUFFER_SIZE);
    g_assert(ptr);

    size = fread(ptr, 1, BUFFER_SIZE, data->file_to_encode);
    
    if(size == 0){
        ret = gst_app_src_end_of_stream(data->source);
        g_debug("eos returned %d at %d\n", ret, __LINE__);
        return FALSE;
    }

    buffer = gst_buffer_new_wrapped((gpointer*)(ptr), size);

    ret = gst_app_src_push_buffer(data->source, buffer);

    if(ret !=  GST_FLOW_OK){
        g_debug("push buffer returned %d for %d bytes \n", ret, size);
        return FALSE;
    }

    if(size != BUFFER_SIZE){
        ret = gst_app_src_end_of_stream(data->source);
        g_debug("eos returned %d at %d\n", ret, __LINE__);
        return FALSE;
    }

    return TRUE;
}

/* This signal callback triggers when appsrc needs data. Here, we add an idle handler
 * to the mainloop to start pushing data into the appsrc */
static void start_feed (GstElement *source, guint size, CustomData *data) {
  if (data->sourceid == 0) {
    g_print ("Start feeding\n");
    data->sourceid = g_idle_add ((GSourceFunc) push_data, data);
  }
}

/* This callback triggers when appsrc has enough data and we can stop sending.
 * We remove the idle handler from the mainloop */
static void stop_feed (GstElement *source, CustomData *data) {
  if (data->sourceid != 0) {
    g_print ("Stop feeding\n");
    g_source_remove (data->sourceid);
    data->sourceid = 0;
  }
}

int main ( int argc, char *argv[]){

  CustomData data;
  GstCaps *video_caps;
  GstPad *parser_pad, *muxer_pad;

  data.file_to_encode = fopen("/media/ubuntu/6634-3132/Basketball.yuv", "rb");
  if(data.file_to_encode == NULL)  
	{
		g_printerr ("YUV file could not be opened.\n");
		return -1;
		exit(0);
	}
  //fseek(data.file_to_encode, 0, SEEK_END);
  //data.bytes_to_read = ftell(data.file_to_encode);
  //rewind (data.file_to_encode);

  /* Initialize cumstom data structure */
  memset (&data, 0, sizeof (data));

 /* Initialize GStreamer */
  gst_init (&argc, &argv);

  /* Create the elements */
  data.source = gst_element_factory_make ("appsrc", "myapp_source");
  data.encoder = gst_element_factory_make ("omxh264enc", "myapp_encoder");
  data.parser = gst_element_factory_make ("h264parse", "myapp_parser");
  data.muxer = gst_element_factory_make ("qtmux", "myapp_muxer");
  data.fsink = gst_element_factory_make ("filesink", "myapp_sink");

 /* Create the empty pipeline */
  data.pipeline = gst_pipeline_new ("myapp_pipeline");

  if (!data.pipeline || !data.source || !data.encoder || !data.parser || 
	!data.muxer || !data.fsink)
 {
    g_printerr ("Not all elements could be created.\n");
    return -1;
  }

  /* Configure appsrc and encoder*/
  video_caps = gst_caps_new_simple ("video/x-raw",
                     "format", G_TYPE_STRING, "I420",
                     "width", G_TYPE_INT, 640,
                     "height", G_TYPE_INT, 360,
                     "framerate", GST_TYPE_FRACTION, 25, 1,
                     NULL);

  g_object_set (G_OBJECT (data.source), "caps", video_caps, "stream-type", 0,
  	"format", GST_FORMAT_TIME, NULL);

  gst_object_unref (video_caps);

  g_object_set (G_OBJECT (data.fsink), "location", "/media/ubuntu/6634-3132/encoded_file.h264", NULL);

  g_signal_connect (data.source, "need-data", G_CALLBACK (start_feed), &data);
  g_signal_connect (data.source, "enough-data", G_CALLBACK (stop_feed), &data);

  /* Link all elements that can be automatically linked*/
  gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.encoder, 
	data.parser, data.muxer, data.fsink, NULL);

  if ((gst_element_link_many (data.source, data.encoder, data.parser, NULL) != 
	TRUE ) || (gst_element_link (data.muxer, data.fsink) != TRUE)) {
    g_printerr ("Elements could not be linked.\n");
    gst_object_unref (data.pipeline);
    return -1;
  }

 /* Manually link the Qtmux, which has sink "Request" pads */
 muxer_pad = gst_element_get_request_pad (data.muxer, "video_%u");
 g_print ("Obtained request pad %s for muxer sink.\n", 
	gst_pad_get_name(muxer_pad));

 parser_pad = gst_element_get_static_pad (data.parser, "src");

  if (gst_pad_link (parser_pad, muxer_pad) != GST_PAD_LINK_OK ) {
    g_printerr ("Muxer could not be linked\n");
    gst_object_unref (data.pipeline);
    return -1;
  }

  gst_object_unref (muxer_pad);
  gst_object_unref (parser_pad);

  /* Start playing the pipeline */
  gst_element_set_state (data.pipeline, GST_STATE_PLAYING);

  /* Create a GLib Main Loop and set it to run */
  data.main_loop = g_main_loop_new (NULL, FALSE);
  g_main_loop_run (data.main_loop);

  /* Release the request pads from the Tee, and unref them */
  gst_element_release_request_pad (data.muxer, muxer_pad);
  gst_element_release_request_pad (data.parser, parser_pad);
  gst_object_unref (muxer_pad);
  gst_object_unref (parser_pad);

  /* Free resources */
  gst_element_set_state (data.pipeline, GST_STATE_NULL);
  gst_object_unref (data.pipeline);
  return 0;	
}

It compiles with 2 warnings, but when I excute it I obtain a segmentation fault error, someone can help me to resolve this issue (The problem occurs when using fread function in line 33, but my file is more than 1024 bytes, it is about 162 MBytes) :

ubuntu@tegra-ubuntu:~/Desktop$ gcc -I/usr/include/gstreamer-1.0 -I/usr/lib/aarch64-linux-gnu/gstreamer-1.0/include -I/usr/include/glib-2.0 -I/usr/lib/aarch64-linux-gnu/glib-2.0/include  -o ProduceH264FromAppsrc ProduceH264FromAppsrc.c -pthread -lgstreamer-1.0 -lgobject-2.0 -lglib-2.0 -lgstapp-1.0 -lgstbase-1.0
ProduceH264FromAppsrc.c: In function ‘push_data’:
ProduceH264FromAppsrc.c:38:15: warning: implicit declaration of function ‘gst_app_src_end_of_stream’ [-Wimplicit-function-declaration]
         ret = gst_app_src_end_of_stream(data->source);
               ^
ProduceH264FromAppsrc.c:45:11: warning: implicit declaration of function ‘gst_app_src_push_buffer’ [-Wimplicit-function-declaration]
     ret = gst_app_src_push_buffer(data->source, buffer);
           ^
ubuntu@tegra-ubuntu:~/Desktop$ ./ProduceH264FromAppsrc 

(ProduceH264FromAppsrc:8206): GLib-GObject-CRITICAL **: g_object_unref: assertion 'G_IS_OBJECT (object)' failed
Obtained request pad video_0 for muxer sink.
Inside NvxLiteH264DecoderLowLatencyInitNvxLiteH264DecoderLowLatencyInit set DPB and MjstreamingInside NvxLiteH265DecoderLowLatencyInitNvxLiteH265DecoderLowLatencyInit set DPB and MjstreamingFramerate set to : 25 at NvxVideoEncoderSetParameterNvMMLiteOpen : Block : BlockType = 4 
===== MSENC =====
NvMMLiteBlockCreate : Block : BlockType = 4 
Start feeding
Segmentation fault (core dumped)

I try another code :

#include <stdio.h>
#include <string.h>
#include <fstream>
#include <unistd.h>
#include <cstdlib>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>

using namespace std;

int main(int argc, char **argv){

GstPipeline *pipeline;
GstAppSrc  *appsrc;
GstElement   *filter1, *encoder, *filter2, *parser, *qtmux, 
	*sink;
GstStateChangeReturn state_ret;
GstClockTime timestamp = 0;

//Step 1 : Initialize gstreamer
gst_init (&argc, &argv);

//Step 2 : Create pipeline, and pipeline elements
pipeline = (GstPipeline*)gst_pipeline_new("mypipeline");
appsrc    = (GstAppSrc*)gst_element_factory_make("appsrc", "mysrc");
filter1 =  gst_element_factory_make ("capsfilter", "myfilter1");
encoder =  gst_element_factory_make ("omxh264enc", "myomx");
filter2 =  gst_element_factory_make ("capsfilter", "myfilter2");
parser =   gst_element_factory_make("h264parse"  , "myparser");
qtmux =    gst_element_factory_make("qtmux"      , "mymux");
sink =     gst_element_factory_make ("filesink"  , NULL);

if( !pipeline || !appsrc || !filter1 || !encoder || !filter2 || 
	!parser || !qtmux || !sink) {

	printf("Error creating pipeline elements!\n");
	exit(2);
}

//Step 3 : Attach elements to pipeline
gst_bin_add_many( GST_BIN(pipeline), (GstElement*)appsrc, filter1, encoder,
	filter2, parser, qtmux, sink, NULL);

//Step 4 : Set pipeline element attributes
g_object_set (appsrc, "format", GST_FORMAT_TIME, NULL);

GstCaps *filtercaps1 = gst_caps_new_simple ("video/x-raw", // ?!!
    		"format", G_TYPE_STRING, "I420",
    		"width", G_TYPE_INT, 640,
    		"height", G_TYPE_INT, 360,
    		"framerate", GST_TYPE_FRACTION, 25, 1,
    		NULL);

g_object_set (G_OBJECT (filter1), "caps", filtercaps1, NULL);

GstCaps *filtercaps2 = gst_caps_new_simple ("video/x-h264",
    		"stream-format", G_TYPE_STRING, "byte-stream",
    		NULL);

g_object_set (G_OBJECT (filter2), "caps", filtercaps2, NULL);

g_object_set (G_OBJECT (sink), "location", "/media/ubuntu/6634-3132/output1902.h264", NULL);

//Step 5 : Link elements together
g_assert( gst_element_link_many((GstElement*)appsrc, filter1, encoder, filter2,
	parser, qtmux, sink, NULL ) );

//Step 6 : Play the pipeline
state_ret = gst_element_set_state((GstElement*)pipeline, GST_STATE_PLAYING);
g_assert(state_ret == GST_STATE_CHANGE_ASYNC);

//Step 7 : Get a pointer to the test input
FILE *testfile = fopen("/media/ubuntu/6634-3132/Basketball.yuv", "rb");	
g_assert(testfile != NULL);

//Step 8 : Push the data from buffer to gstpipeline "1000" times // ??!!
for(int i = 0; i < 1000; i++) {

	char* filebuffer = (char*)malloc (162000); //Allocate memory for framebuffer ????

    	if (filebuffer == NULL) {
		printf("Memory error\n"); 
		exit (2);
	} //Errorcheck

	size_t bytesread = fread(filebuffer, 1 , 162000, testfile); //Read to filebuffer
    	printf("File Read: %zu bytes\n", bytesread);

    	GstBuffer *pushbuffer; //Actual databuffer
    	GstFlowReturn ret; //Return value
    	pushbuffer = gst_buffer_new_wrapped (filebuffer, 162000); //Wrap the data

    	//Set frame timestamp
    	GST_BUFFER_PTS      (pushbuffer) = timestamp;
    	GST_BUFFER_DTS      (pushbuffer) = timestamp;	
    	GST_BUFFER_DURATION (pushbuffer) = gst_util_uint64_scale_int (1, GST_SECOND, 1);
    	timestamp += GST_BUFFER_DURATION (pushbuffer);
    	printf("Frame is at %lu\n", timestamp);

    	ret = gst_app_src_push_buffer( appsrc, pushbuffer); //Push data into pipeline

    	g_assert(ret ==  GST_FLOW_OK);
}

//Step 9 : Declare end of stream
gst_app_src_end_of_stream (GST_APP_SRC (appsrc));
printf("End Program.\n");
usleep(100000);
}

This one executes without errors, but I obtain an empty h264 output file. Someone can help me too, to know what is the issue ?

Thanks in advance,
Regards.

Here is an example of running appsrc:
https://devtalk.nvidia.com/default/topic/1026106/jetson-tx1/usage-of-nvbuffer-apis/post/5219225/#5219225