Hi,
I use the code that I found @ : https://devtalk.nvidia.com/default/topic/1024356/solved-harware-accelerated-video-encoding-from-c-buffer-with-gstreamer-omxh264enc-and-filesink/, to read a yuv video(640*360) from a file, to encode it via omxh264enc and to obtain the encoded video in a file.
Code used :
// compile with :
// g++ -Wall $(pkg-config --cflags gstreamer-1.0) -o App1 Application1.cpp $(pkg-config --libs gstreamer-1.0) -lgstapp-1.0
// g++ -std=c++11 -I/usr/include/gstreamer-1.0 -I/usr/lib/aarch64-linux-gnu/gstreamer-1.0/include -I/usr/include/glib-2.0 -I/usr/lib/aarch64-linux-gnu/glib-2.0/include -o App1 Application1.cpp -pthread -lgstreamer-1.0 -lgobject-2.0 -lglib-2.0 -lgstapp-1.0 -lgstbase-1.0
#include <stdio.h>
#include <string.h>
#include <fstream>
#include <unistd.h>
#include <cstdlib>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
int main(int argc, char **argv){
GstPipeline *pipeline;
GstAppSrc *appsrc;
GstElement *filter1, *encoder, *filter2, *parser, *qtmux,
*sink;
GstStateChangeReturn state_ret;
GstClockTime timestamp = 0;
//Step 1 : Initialize gstreamer
gst_init (&argc, &argv);
//Step 2 : Create pipeline, and pipeline elements
pipeline = (GstPipeline*)gst_pipeline_new("mypipeline");
appsrc = (GstAppSrc*)gst_element_factory_make("appsrc", "mysrc");
filter1 = gst_element_factory_make ("capsfilter", "myfilter1");
encoder = gst_element_factory_make ("omxh264enc", "myomx");
filter2 = gst_element_factory_make ("capsfilter", "myfilter2");
parser = gst_element_factory_make("h264parse" , "myparser");
qtmux = gst_element_factory_make("qtmux" , "mymux");
sink = gst_element_factory_make ("filesink" , NULL);
if( !pipeline || !appsrc || !filter1 || !encoder || !filter2 ||
!parser || !qtmux || !sink) {
printf("Error creating pipeline elements!\n");
exit(2);
}
//Step 3 : Attach elements to pipeline
gst_bin_add_many( GST_BIN(pipeline), (GstElement*)appsrc, filter1, encoder,
filter2, parser, qtmux, sink, NULL);
//Step 4 : Set pipeline element attributes
g_object_set (appsrc, "format", GST_FORMAT_TIME, NULL);
GstCaps *filtercaps1 = gst_caps_new_simple ("video/x-raw", // ?!!
"format", G_TYPE_STRING, "YUV",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 360,
//"framerate", GST_TYPE_FRACTION, 1, 1,
NULL);
g_object_set (G_OBJECT (filter1), "caps", filtercaps1, NULL);
GstCaps *filtercaps2 = gst_caps_new_simple ("video/x-h264",
"stream-format", G_TYPE_STRING, "byte-stream",
NULL);
g_object_set (G_OBJECT (filter2), "caps", filtercaps2, NULL);
g_object_set (G_OBJECT (sink), "location", "/home/ubuntu/Desktop/WoodyFTP/video/output.h264", NULL);
//Step 5 : Link elements together
g_assert( gst_element_link_many((GstElement*)appsrc, filter1, encoder, filter2,
parser, qtmux, sink, NULL ) );
//Step 6 : Play the pipeline
state_ret = gst_element_set_state((GstElement*)pipeline, GST_STATE_PLAYING);
g_assert(state_ret == GST_STATE_CHANGE_ASYNC);
//Step 7 : Get a pointer to the test input
FILE *testfile = fopen("/home/ubuntu/Desktop/WoodyFTP/video/Basketball_640*360.yuv", "rb");
g_assert(testfile != NULL);
// I should find another method to push data !!!
//Step 8 : Push the data from buffer to gstpipeline "100" times // ??!!
for(int i = 0; i < 100; i++) {
char* filebuffer = (char*)malloc (161395200); //Allocate memory for framebuffer ????
if (filebuffer == NULL) {
printf("Memory error\n");
exit (2);
} //Errorcheck
size_t bytesread = fread(filebuffer, 1 , (161395200), testfile); //Read to filebuffer
printf("File Read: %zu bytes\n", bytesread);
GstBuffer *pushbuffer; //Actual databuffer
GstFlowReturn ret; //Return value
pushbuffer = gst_buffer_new_wrapped (filebuffer, 161395200); //Wrap the data
//Set frame timestamp
GST_BUFFER_PTS (pushbuffer) = timestamp;
GST_BUFFER_DTS (pushbuffer) = timestamp;
GST_BUFFER_DURATION (pushbuffer) = gst_util_uint64_scale_int (1, GST_SECOND, 1);
timestamp += GST_BUFFER_DURATION (pushbuffer);
printf("Frame is at %lu\n", timestamp);
ret = gst_app_src_push_buffer( appsrc, pushbuffer); //Push data into pipeline
g_assert(ret == GST_FLOW_OK);
}
//Step 9 : Declare end of stream
gst_app_src_end_of_stream (GST_APP_SRC (appsrc));
printf("End Program.\n");
usleep(100000);
}
I am not sure if the code corresponds to my application (I don’t know why @https://devtalk.nvidia.com/member/2219802/ uses a for loop to push data and the size of buffer I choose is according to my video size).
But after compilation I obtain this error :
**
ERROR:Application1.cpp:68:int main(int, char**): assertion failed: (gst_element_link_many((GstElement*)appsrc, filter1, encoder, filter2, parser, qtmux, sink, NULL ))
Aborted (core dumped)
Can someone help me to know what is the cause of this error.
Thanks in advance,