I’m going to share my complete video encoding thread. You can see the mutexes I use to protect the shared data between this thread and the writing thread. If you see any errors @DaneLLL, please let me know. This is working very well for me at the moment and running real-time. My only issue is that it seems to be encoding at twice the framerate I’m hoping for. I’m not sure if this is a feeding issue though since it does say 30 FPS when I play it back as it should.
Please ignore anything that’s just not cleaned up yet. This is still a WIP.
#ifdef MKV_OUT
#define GST_ENC_PIPELINE "appsrc name=srcEncode ! " \
"video/x-raw, width=(int)2880, height=(int)1440, format=(string)RGBA, framerate=(fraction)30/1 ! " \
"nvvidconv ! video/x-raw(memory:NVMM), format=NV12 ! " \
"nvv4l2h264enc bitrate=16000000 profile=4 ! " \
"h264parse ! matroskamux ! filesink location=%s"
#else
#define GST_ENC_PIPELINE "appsrc name=srcEncode ! " \
"video/x-raw, width=(int)2880, height=(int)1440, format=(string)RGBA, framerate=(fraction)30/1 ! " \
"nvvidconv ! video/x-raw(memory:NVMM), format=NV12 ! " \
"nvv4l2h264enc bitrate=16000000 profile=4 ! " \
"h264parse ! qtmux ! filesink location=%s"
#endif
void *video_encoding_thread(void *arg)
{
time_t r_time;
struct tm *l_time = NULL;
char datetime[16];
gchar descr[1024];
GstElement *pipeline = NULL, *srcEncode = NULL;
GError *error = NULL;
GstFlowReturn ret = -1;
GstBuffer *buffer = NULL;
GstClock *sys_clock = NULL;
GstClockTime current_time = 0;
guint64 count = 0;
struct timespec timeout;
GstBus *bus = NULL;
time(&r_time);
l_time = localtime(&r_time);
strftime(datetime, sizeof(datetime), "%Y%m%d_%H%M%S", l_time);
#ifdef MKV_OUT
snprintf(this_vod.filename, sizeof(this_vod.filename), "vid-%s.mkv", datetime);
#else
snprintf(this_vod.filename, sizeof(this_vod.filename), "vid-%s.mp4", datetime);
#endif
sys_clock = gst_system_clock_obtain();
current_time = gst_clock_get_time(sys_clock);
g_snprintf(descr, 1024, GST_ENC_PIPELINE, this_vod.filename);
pipeline = gst_parse_launch(descr, &error);
if (error != NULL) {
SDL_Log("could not construct pipeline \"%s\": %s\n", descr, error->message);
g_error_free(error);
return NULL;
}
/* get sink */
srcEncode = gst_bin_get_by_name(GST_BIN(pipeline), "srcEncode");
gst_element_set_state(pipeline, GST_STATE_PLAYING);
while (this_vod.recording) {
timeout.tv_sec = time(NULL);
timeout.tv_nsec = 50000000;
if (sem_timedwait(&this_vod.p_mutex, &timeout) == 0) {
//printf("Creating new encoding buffer.\n");
buffer = gst_buffer_new_wrapped(this_vod.rgb_out_pixels[this_vod.buffer_num],
OUTPUT_WIDTH * 2 * RGB_OUT_SIZE * OUTPUT_HEIGHT);
if (buffer == NULL) {
printf("Failure to allocate new buffer for encoding.\n");
break;
}
//buffer->pts = gst_clock_get_time(sys_clock);
buffer->duration = gst_util_uint64_scale(1, GST_SECOND, 30);
current_time += buffer->duration;
buffer->pts = current_time;
buffer->offset = count++;
//printf("Feeding the buffer (%lu, %lu)...\n", buffer->offset, buffer->pts);
/* get the preroll buffer from appsink */
g_signal_emit_by_name(srcEncode, "push-buffer", buffer, &ret);
sem_post(&this_vod.r_mutex);
gst_buffer_unref(buffer);
if (ret != GST_FLOW_OK) {
printf("GST_FLOW error while pushing buffer: %d\n", ret);
break;
}
}
}
sem_trywait(&this_vod.p_mutex);
sem_post(&this_vod.r_mutex);
/* Video */
g_signal_emit_by_name(srcEncode, "end-of-stream", &ret);
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_poll(bus, GST_MESSAGE_EOS, GST_CLOCK_TIME_NONE);
gst_element_set_state((GstElement *) pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
return NULL;
}
Thanks again @DaneLLL !