gstreamer unable to encode from YUV images to MP4

I needed to do the following conversion, from YUV to mp4 ,the following is a part of my code:

gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
	GMainLoop *loop = (GMainLoop *) data;
	switch (GST_MESSAGE_TYPE (msg))
	{

	case GST_MESSAGE_EOS:
		fprintf(stderr, "End of stream\n");
		g_main_loop_quit(loop);
		break;
	case GST_MESSAGE_ERROR:
	{
		gchar *debug;
		GError *error;
		gst_message_parse_error(msg, &error, &debug);
		g_free(debug);
		g_printerr("Error: %s\n", error->message);
		g_error_free(error);
		g_main_loop_quit(loop);
		break;
	}
	default:
		break;
	}
	return TRUE;
}

void PushBuffer()
{
	GstFlowReturn ret;
	GstBuffer *buffer;
	int size = FRAMELENYUV;
	GstMapInfo info;

	buffer = gst_buffer_new_allocate(NULL, size, NULL); 		//分配空间
	gst_buffer_map(buffer, &info, GST_MAP_WRITE);
	unsigned char* buf = info.data;
	memmove(buf, rawdata, size);
	gst_buffer_unmap(buffer, &info);
	ret = gst_app_src_push_buffer(GST_APP_SRC(source), buffer);
	if(ret < 0)
	{
		puts("############################\n");
	}
	else
	{
		;
	}
}

int factory_make()
{	

	pipeline = gst_pipeline_new("mypipeline");
	source = gst_element_factory_make("appsrc", "mysource");	
	videoparse = gst_element_factory_make("videoparse" , "myvideoparse");
	enconder = gst_element_factory_make("omxh264enc", "myencoder");
	filter1 = gst_element_factory_make ("capsfilter", "mufilter");
	parser = gst_element_factory_make ("h264parse", "myparser");
	muxer = gst_element_factory_make ("qtmux","mymuxer");	
	sink = 	gst_element_factory_make ("appsink","mysink");
	filter2 = gst_element_factory_make ("capsfilter", "mufilter");


	filter_caps1 = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING,"I420", "width",G_TYPE_INT,FRAMEWIDTH,"height",G_TYPE_INT,FRAMEHEIGHT,NULL);	
	filter_caps2 = gst_caps_new_simple ("video/x-h264", "stream-format", G_TYPE_STRING,"byte-stream", NULL);

	if(!pipeline || !source || !videoparse || !enconder || !filter1 || !filter2 ||!parser || !muxer ||!muxer || !filter_caps2 || !filter_caps1)
	{
		fprintf(stderr, "Could not gst_element_factory_make, terminating\n");
		exit(0);		
	}

	char szTemp[64];
	
	g_object_set (G_OBJECT (filter1), "caps", filter_caps1, NULL);
	g_object_set (G_OBJECT (filter2), "caps", filter_caps2, NULL);
	gst_caps_unref(filter_caps1);
	gst_caps_unref(filter_caps2);


	g_object_set(G_OBJECT (enconder), "insert-sps-pps", true , NULL);
	g_object_set(G_OBJECT (enconder), "profile", "high" , NULL);
	g_object_set(G_OBJECT(enconder) , "iframeinterval" , 15 , NULL);	
	sprintf(szTemp, "%d" , FRAMELENYUV);
	g_object_set(G_OBJECT (source), "blocksize", szTemp,NULL);
	g_object_set(G_OBJECT(videoparse) , "width" , FRAMEWIDTH , "height" , FRAMEHEIGHT , NULL);

	return 0;
}


int pipelinemake()
{	
	gst_bin_add_many(GST_BIN(pipeline), source, filter1,enconder,filter2,parser,muxer,sink, NULL);		
	gst_element_link_many(source,filter1 ,enconder, filter2,parser,muxer,sink);
	
	return 0;	
}

int watcher_make()
{
	bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
	bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
	gst_object_unref(bus);

	return 0;	
}


void *GstreamerThdFun(void *)
{
	
	gst_init(NULL, NULL);	
	
	loop = g_main_loop_new(NULL, FALSE);

	factory_make();
	watcher_make();
	pipelinemake();					
	gst_element_set_state(pipeline, GST_STATE_PLAYING);				
	g_main_loop_run(loop);											

	//release all															
	gst_element_set_state(pipeline, GST_STATE_NULL);				
	gst_object_unref(GST_OBJECT(pipeline));
	g_source_remove (bus_watch_id);
	g_main_loop_unref (loop);										
}

void *sendThdFun(void*)
{
       
    unsigned long int max_file = 1024*1024*512;
    int countnum = 0;

    while(1)
    {

	usleep(1000*5);

        GstSample* sample = gst_app_sink_pull_sample(GST_APP_SINK(source));
        if(sample != NULL)
        {
            GstBuffer* buffer = gst_sample_get_buffer (sample);
            GstMapInfo map;
            gst_buffer_map (buffer, &map, GST_MAP_READ);
			
			//存储文件
			if(storage_num>1024*1024*10)
			{
	    		fwrite(storage, sizeof(unsigned char), storage_num , fd);
				storage_num = 0;
	    	}
			else
			{
				memcpy(storage + storage_num,map.data,map.size);
				storage_num += map.size;
			}
			countnum += map.size;
			if(countnum >= max_file)
			{
				fclose(fd);				
				countnum = 0;
			}
			
	       gst_buffer_unmap (buffer, &map);
           gst_sample_unref (sample);
       }
       else
       {
		printf("******************************\n");
       }
    }
}

when I run this program,it failed and the error message is :

gst_app_sink_pull_sample:assertion 'GST_IS_APP_SINK(appsink)'falied!

Here is a sample of ‘appsrc ! omxh264enc ! qtmux ! filesink’
https://devtalk.nvidia.com/default/topic/1015571/jetson-tx2/what-is-maximum-video-encoding-resolution-in-pixels-/post/5253760/#5253760
FYR

I mean C code not command line.

Your using an “appsrc” element and passing it to a function that expects an “appsink”,

...
gst_app_sink_pull_sample(GST_APP_SINK(source));
...

It would seem you’re using the incorrect API calls for an “appsrc” element.

I have already fix this bug by changing source as a appsink,but it still can not work.
I list all my code here :

#include <sys/mman.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <pthread.h>
#include <errno.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <time.h>
#include <math.h>
#include <sys/ioctl.h>
#include <linux/fb.h>
#include <iostream>
#include <signal.h>

#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>


using namespace cv;


const unsigned int FRAMEWIDTH = 1920;								
const unsigned int FRAMEHEIGHT = 1080;	
const unsigned int FRAMELENYUV =  FRAMEWIDTH * FRAMEHEIGHT ; 

static unsigned char rawdata[FRAMELENYUV];
unsigned char storage[1024*1024*150];
unsigned int storage_num = 0;

static GstElement *source;
static GstElement *filter1;
static GstCaps *filter_caps1;
static GstElement * videoparse;
static GstElement *enconder;
static GstElement *filter2;
static GstElement *parser;
static GstElement *muxer;
static GstElement *sink;
static GstCaps *filter_caps2;

GMainLoop *loop;
GstElement *pipeline ;
GstBus *bus;
guint bus_watch_id;


gint iFrame = 10;
char filename[200];
FILE *fd;


gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
	GMainLoop *loop = (GMainLoop *) data;
	switch (GST_MESSAGE_TYPE (msg))
	{

	case GST_MESSAGE_EOS:
		fprintf(stderr, "End of stream\n");
		g_main_loop_quit(loop);
		break;
	case GST_MESSAGE_ERROR:
	{
		gchar *debug;
		GError *error;
		gst_message_parse_error(msg, &error, &debug);
		g_free(debug);
		g_printerr("Error: %s\n", error->message);
		g_error_free(error);
		g_main_loop_quit(loop);
		break;
	}
	default:
		break;
	}
	return TRUE;
}


void PushBuffer()
{
	GstFlowReturn ret;
	GstBuffer *buffer;
	int size = FRAMELENYUV;
	GstMapInfo info;

	buffer = gst_buffer_new_allocate(NULL, size, NULL); 		
	gst_buffer_map(buffer, &info, GST_MAP_WRITE);
	unsigned char* buf = info.data;
	memmove(buf, rawdata, size);
	gst_buffer_unmap(buffer, &info);
	ret = gst_app_src_push_buffer(GST_APP_SRC(source), buffer);
	if(ret < 0)
	{
		puts("############################\n");
	}
	else
	{
		;
	}
}

int factory_make()
{	

	pipeline = gst_pipeline_new("mypipeline");
	source = gst_element_factory_make("appsrc", "mysource");	
	
	enconder = gst_element_factory_make("omxh264enc", "myencoder");
	filter1 = gst_element_factory_make ("capsfilter", "mufilter");
	parser = gst_element_factory_make ("h264parse", "myparser");
	muxer = gst_element_factory_make ("qtmux","mymuxer");	
	sink = 	gst_element_factory_make ("appsink","mysink");
	filter2 = gst_element_factory_make ("capsfilter", "mufilter");


	filter_caps1 = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING,"I420", "width",G_TYPE_INT,FRAMEWIDTH,"height",G_TYPE_INT,FRAMEHEIGHT,NULL);	
	filter_caps2 = gst_caps_new_simple ("video/x-h264", "stream-format", G_TYPE_STRING,"byte-stream", NULL);

	if(!pipeline || !source || !enconder || !filter1 || !filter2 ||!parser || !muxer ||!muxer || !filter_caps2 || !filter_caps1)
	{
		fprintf(stderr, "Could not gst_element_factory_make, terminating\n");
		exit(0);		
	}

		
	g_object_set (G_OBJECT (filter1), "caps", filter_caps1, NULL);
	g_object_set (G_OBJECT (filter2), "caps", filter_caps2, NULL);
	gst_caps_unref(filter_caps1);
	gst_caps_unref(filter_caps2);


	g_object_set(G_OBJECT (enconder), "insert-sps-pps", true , NULL);
	g_object_set(G_OBJECT (enconder), "profile", "high" , NULL);
	g_object_set(G_OBJECT(enconder) , "iframeinterval" , 15 , NULL);	
	
	char szTemp[64];
	sprintf(szTemp, "%d" , FRAMELENYUV);
	g_object_set(G_OBJECT (source), "blocksize", szTemp,NULL);
	g_object_set(G_OBJECT(videoparse) , "width" , FRAMEWIDTH , "height" , FRAMEHEIGHT , NULL);

	return 0;
}


int pipelinemake()
{	
	gst_bin_add_many(GST_BIN(pipeline), source, filter_caps1,enconder,filter2,parser,muxer,sink, NULL);		
	gst_element_link_many(source,filter1 ,enconder, filter2,parser,muxer,sink);
	
	return 0;	
}

int watcher_make()
{
	bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
	bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
	gst_object_unref(bus);

	return 0;	
}


void *GstreamerThdFun(void *)
{
	
	gst_init(NULL, NULL);	
	
	loop = g_main_loop_new(NULL, FALSE);

	factory_make();
	watcher_make();
	pipelinemake();					
	gst_element_set_state(pipeline, GST_STATE_PLAYING);				
	g_main_loop_run(loop);											

	//release all															
	gst_element_set_state(pipeline, GST_STATE_NULL);				
	gst_object_unref(GST_OBJECT(pipeline));
	g_source_remove (bus_watch_id);
	g_main_loop_unref (loop);										
}

void *sendThdFun(void*)
{
       
    unsigned long int max_file = 1024*1024*512;
    int countnum = 0;

    while(1)
    {

	usleep(1000*5);

        GstSample* sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
        if(sample != NULL)
        {
            GstBuffer* buffer = gst_sample_get_buffer (sample);
            GstMapInfo map;
            gst_buffer_map (buffer, &map, GST_MAP_READ);
			
		
			if(storage_num>1024*1024*10)
			{
	    		        fwrite(storage, sizeof(unsigned char), storage_num , fd);
				storage_num = 0;
	    	}
			else
			{
				memcpy(storage + storage_num,map.data,map.size);
				storage_num += map.size;
			}
			countnum += map.size;
			if(countnum >= max_file)
			{
				fclose(fd);				
				countnum = 0;
			}
			
	       gst_buffer_unmap (buffer, &map);
           gst_sample_unref (sample);
       }
       else
       {
		printf("******************************\n");
       }
    }
}

void ReadFile(char* name)
{
	FILE* pFile = fopen(name, "rb");
	if(pFile == NULL)
	{
		fprintf(stderr, "Could not open input file. Exiting.\n");
		exit(-1);
	}

	if(fread(rawdata, 1, FRAMELENYUV, pFile) != FRAMELENYUV)
	{
		fclose(pFile);
		fprintf(stderr, "Could not read input file. Exiting.\n");
		exit(-1);
	}
	fclose(pFile);
}


int main()
{
   sprintf(filename,"test.mp4");
   fd = fopen(filename,"wb");
   
   pthread_t GstreamerThd;
   pthread_t sendFrameThd; 	

   
   int  ret ;
   
    ret = pthread_create(&GstreamerThd, NULL,GstreamerThdFun, NULL);
    if (ret != 0)
	{
		fprintf(stderr, "GstreamerThd returned error %d\n", ret);
		exit(0);
	}
	ret = pthread_create(&sendFrameThd, NULL,sendThdFun, NULL);	
	if (ret != 0)
	{
		fprintf(stderr, "sendFrameThd returned error %d\n", ret);
		exit(0);
	}
   
   ReadFile("a.yuv");

   while(1)
    {    
		usleep(1000*5);			
		
		PushBuffer();				

    }
	

    gst_app_src_end_of_stream(GST_APP_SRC(source));
   
    return(0);	
	
}

Build and run it with the following message:
GLib-GObject-WARNING **: invalid unclassed pointer in cast to ‘GstAppSink’
** (videosave:1756): CRITICAL **: gst_app_sink_pull_sample: assertion ‘GST_IS_APP_SINK (appsink)’ failed
CRITICAL **: gst_app_src_push_buffer_full: assertion ‘GST_IS_APP_SRC (appsrc)’ failed

You are the one sourcing the raw image, why then use an appsink?

Sorry,I really have no idea about what do you mean?Could you give me some more detail information.

My interpretation was that your application has YUV (raw image data) and you want to encode it to MP4, correct?

If so you may use the appsrc [1] element to insert your own data into a gstreamer encoding pipeline.

[APP code + appsrc] --> [parser] --> [encodebin] --> ***

If you need the h264 data back you may similarly use an appsink element [2]:

[APP code + appsrc] --> [parser] --> [encodebin] --> [something] --> [appsink]

[1] https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-plugins/html/gst-plugins-base-plugins-appsrc.html

[2] https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-libs/html/gst-plugins-base-libs-appsink.html

You Are right,I want to save my raw data as Mp4,and the raw data is get by reading a local picture Repeatedly!

I successfully save a .h264 file by the following pipeline:
[appsrc]->[videoparse]->[omxh264enc]->[capsfilter]->[appsink]

However, when I change the pipeline like this:
[appsrc]->[videoparse]->[omxh264enc]->[capsfilter]->[h264parse]->[qtmux]->[appsink]
I set capsfilter with (“video/x-h264”, “stream-format”, G_TYPE_STRING,“byte-stream”, NULL)

It doesn’t work,is there something wrong?

Looks like ‘qtmux ! appsink’ is not supported in gstreamer:
http://gstreamer-devel.966125.n4.nabble.com/appsink-can-t-get-qtmux-streamheader-td4119740.html
http://gstreamer-devel.966125.n4.nabble.com/mp4mux-amp-appsink-td4676469.html#a4676475

We always run ‘qtmux ! fliesink’. Not sure but seems like you have to implement seeking in appsink for ‘qtmux ! appsink’

Thus,you meand I need to change my pipeline to:
[appsrc]->[videoparse]->[omxh264enc]->[capsfilter]->[h264parse]->[qtmux]->[filesink]

Can I change to save .avi from YUV?

‘qtmux’ merges streams (audio and video) into QuickTime(.mov) files.

Are you sure you need the ‘qtmux’ element for your specific application?

Yes, If you use the above pipeline with a filesink above you should be able to save it as .mov or .avi and any reasonable videoplayer should be able to playback the H264 encoded content.

I want to realize a screen record module and there is no audio.
Could you show me some code example or give me some instrcuctions on that.

I am puzzled that can I use a pipeline like :
[appsrc]->[videoparse]->[omxh264enc]->[filesink]

appsrc get data by pushbufffer.

Shold appsrc used with sppsink and filesrc with filesink?

For screen record, you can refer to
https://devtalk.nvidia.com/default/topic/1001636/

filesrc/filesink/appsrc/appsink are all independent and not required to be used together.

Your questions are purely about gstreamer and we suggest you go to
http://gstreamer-devel.966125.n4.nabble.com