Hi,
I’m trying to create a GStreamer pipeline that takes the feed from one camera and splits into three branches: appsink, JPEG encoding with a multifilesink, and H264 encoding with a splitmuxsink. I have a version of the pipeline with only the appsink and H264 encoding working perfectly. However, when I try to add the branch with the JPEG encoder on top of that the pipeline blocks without a error message. Can someone check my code for bugs?
Much appreciated!
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <signal.h>
#include <string.h>
#include <stdlib.h>
#include <stdio.h>
#include <pthread.h>
#include <unistd.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <semaphore.h>
#define MAXLINE 1024
#define VIDEODIR "/home/nvidia/video"
#define OVERLAY_PIPE "/dev/shm/overlay.pipe"
/* Globals */
static GstElement *main_pipeline;
static GstElement *overlay;
static unsigned int trigger = 0;
static sem_t *sem_id;
/* Override SIGINT to safely shutdown pipeline. */
static void
sigint_handler(int signum)
{
/* Safely shutdown from SIGINT by sending a End-of-Stream (EOS) event to the pipeline. */
gst_element_send_event(main_pipeline, gst_event_new_eos());
remove(OVERLAY_PIPE);
}
/* Takes SIGUSR1 as a trigger(kill -10). */
static void
sigusr1_handler(int signum)
{
trigger++;
}
/* GstBus watch callback function. Handles End-of-Stream (EOS) events and errors. */
static gboolean
bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
/* Watches for messages on the pipeline bus. */
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS: {
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
}
default:
break;
}
return TRUE;
}
/* Polls "overlay.pipe" to update the overlay information. */
static void
info_overlay(GstElement *overlay)
{
FILE* p;
if (access(OVERLAY_PIPE, F_OK))
mkfifo(OVERLAY_PIPE, 0666);
while (1) {
char buffer[MAXLINE];
p = fopen(OVERLAY_PIPE, "r");
fgets(buffer, MAXLINE, p);
fclose(p);
buffer[strlen(buffer) - 1] = '
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <signal.h>
#include <string.h>
#include <stdlib.h>
#include <stdio.h>
#include <pthread.h>
#include <unistd.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <semaphore.h>
#define MAXLINE 1024
#define VIDEODIR “/home/nvidia/video”
#define OVERLAY_PIPE “/dev/shm/overlay.pipe”
/* Globals */
static GstElement *main_pipeline;
static GstElement *overlay;
static unsigned int trigger = 0;
static sem_t *sem_id;
/* Override SIGINT to safely shutdown pipeline. /
static void
sigint_handler(int signum)
{
/ Safely shutdown from SIGINT by sending a End-of-Stream (EOS) event to the pipeline. */
gst_element_send_event(main_pipeline, gst_event_new_eos());
remove(OVERLAY_PIPE);
}
/* Takes SIGUSR1 as a trigger(kill -10). */
static void
sigusr1_handler(int signum)
{
trigger++;
}
/* GstBus watch callback function. Handles End-of-Stream (EOS) events and errors. */
static gboolean
bus_call(GstBus *bus, GstMessage msg, gpointer data)
{
/ Watches for messages on the pipeline bus. */
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS: {
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
}
default:
break;
}
return TRUE;
}
/* Polls “overlay.pipe” to update the overlay information. */
static void
info_overlay(GstElement overlay)
{
FILE p;
if (access(OVERLAY_PIPE, F_OK))
mkfifo(OVERLAY_PIPE, 0666);
while (1) {
char buffer[MAXLINE];
p = fopen(OVERLAY_PIPE, "r");
fgets(buffer, MAXLINE, p);
fclose(p);
buffer[strlen(buffer) - 1] = '\0';
g_object_set(overlay, "text", buffer, NULL);
}
}
/* Name H264 chunks by timestamp. /
static gchararray
name_by_timestamp(GstElement splitmuxsink, guint id, gpointer userdata)
{
/* Names the encoded files by timestamp. */
time_t now = time(NULL);
struct tm curr_tm = *localtime(&now);
char video_dir[64];
char date_dir[16];
gchar *filename = (gchar *)malloc(100);
// Create top-level video directory
strcpy(video_dir, VIDEODIR);
if (access(video_dir, F_OK))
mkdir(video_dir, 0775);
// Create data directory
sprintf(date_dir, "/%d%02d%02d", curr_tm.tm_year + 1900,
curr_tm.tm_mon + 1, curr_tm.tm_mday);
strcat(video_dir, date_dir);
if (access(video_dir, F_OK))
mkdir(video_dir, 0775);
// Create next filename
sprintf(filename,"%s/%d%02d%02d_%02d%02d%02d%s.h264", video_dir,
curr_tm.tm_year + 1900, curr_tm.tm_mon + 1, curr_tm.tm_mday,
curr_tm.tm_hour, curr_tm.tm_min, curr_tm.tm_sec,
curr_tm.tm_isdst? "DST" : "");
return filename;
}
/* Handler for grabbing new samples from appsink. */
static int
on_new_sample_from_sink (GstAppSink * elt)
{
GstSample *sample;
GstBuffer *buf;
GstMapInfo map;
FILE *fp;
/* get the sample from appsink */
sample = gst_app_sink_pull_sample(elt);
buf = gst_sample_get_buffer(sample);
if (gst_buffer_map (buf, &map, GST_MAP_READ)) {
/* map.data, map.size */
/* TODO: Load frame into framebuffer */
fprintf(stdout, "frame\n");
gst_buffer_unmap (buf, &map);
/* TODO: Respond to trigger */
if (trigger > 0) {
trigger--;
}
}
gst_sample_unref (sample);
return 0;
}
/* Creates the H264 and frame buffer pipeline. */
static GstElement *
create_main_pipeline(GMainLoop loop)
{
/ Make a new GStreamer pipeline */
GstElement *pipeline = gst_pipeline_new(NULL);
/* Set up GStreamer Bus Message Handler */
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
guint bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
/* Create GStreamer elements */
GstElement *src = gst_element_factory_make("nvcamerasrc", NULL);
GstElement *t0 = gst_element_factory_make("tee", NULL);
GstElement *t1 = gst_element_factory_make("tee", NULL);
GstElement *q0 = gst_element_factory_make("queue", NULL);
GstElement *q0a = gst_element_factory_make("queue", NULL);
GstElement *q0b = gst_element_factory_make("queue", NULL);
GstElement *q1 = gst_element_factory_make("queue", NULL);
GstElement *q1a = gst_element_factory_make("queue", NULL);
GstElement *q1b = gst_element_factory_make("queue", NULL);
GstElement *nvconv0 = gst_element_factory_make("nvvidconv", NULL);
GstElement *nvconv1 = gst_element_factory_make("nvvidconv", NULL);
GstElement *nvconv2 = gst_element_factory_make("nvvidconv", NULL);
overlay = gst_element_factory_make("clockoverlay", NULL);
GstElement *h264enc = gst_element_factory_make("omxh264enc", NULL);
GstElement *h264parse = gst_element_factory_make("h264parse", NULL);
GstElement *jpegenc = gst_element_factory_make("nvjpegenc", NULL);
GstElement *appsink = gst_element_factory_make("appsink", NULL);
GstElement *multifilesink = gst_element_factory_make("multifilesink", NULL);
GstElement *splitmuxsink = gst_element_factory_make("splitmuxsink", NULL);
if ( !pipeline || !src || !t0 || !t1 || !q0 || !q0a || !q0b || !q1 || !q1a
|| !q1b || !nvconv0 || !nvconv1 || !overlay || !h264enc || !h264parse
|| !jpegenc || !appsink || !multifilesink || !splitmuxsink ) {
g_printerr("One element count not be created.\n");
return NULL;
}
/* Add elements to pipeline */
gst_bin_add_many(GST_BIN(pipeline), src, t0, t1, q0, q0a, q0b, q1, q1a, q1b,
nvconv0, nvconv1, nvconv2, overlay, h264enc, h264parse, jpegenc,
appsink, splitmuxsink, multifilesink, NULL);
/* Configure GStreamer Element properties */
g_object_set(src, "fpsRange", "30.0 30.0", "queue-size", 30, NULL);
g_object_set(jpegenc, "quality", 100, NULL);
/* Configure the appsink signal and attach new sample handler. */
g_object_set(appsink, "emit-signals", TRUE, "sync", FALSE, NULL);
g_signal_connect (appsink, "new-sample", G_CALLBACK (on_new_sample_from_sink), NULL);
/* Attach the file naming function to filesink. */
g_object_set(splitmuxsink, "location", "%d.264", "max-size-time", 60000000000, NULL);
g_signal_connect(splitmuxsink, "format-location", G_CALLBACK(name_by_timestamp), NULL);
g_object_set(multifilesink, "location", "/dev/shm/live.jpg", NULL);
g_object_set(overlay, "shaded-background", 1, "shading-value", 255, "ypad", 0, "xpad", 0,
"halignment", 0, "valignment", 1, "auto-resize", 0, "font-desc", "sans 32",
"line-alignment", 0, "time-format", " Date: %D Time: %H:%M:%S", NULL);
g_object_set(h264enc, "control-rate", 2, "bitrate", 8000000, "profile", 8, NULL);
/* Link elements */
GstCaps *caps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, 3840,
"height", G_TYPE_INT, 2160,
"framerate", GST_TYPE_FRACTION, 30, 1, NULL);
gst_caps_set_features(caps, 0, gst_caps_features_new("memory:NVMM", NULL));
gst_element_link_filtered(src, q0, caps);
gst_caps_unref(caps);
gst_element_link_many(q0, t0, q0a, nvconv0, NULL);
caps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, 3840,
"height", G_TYPE_INT, 2160,
"framerate", GST_TYPE_FRACTION, 30, 1, NULL);
gst_element_link_filtered(nvconv0, appsink, caps);
gst_caps_unref(caps);
gst_element_link_many(t0, q0b, nvconv1, NULL);
caps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, 3840,
"height", G_TYPE_INT, 2160,
"framerate", GST_TYPE_FRACTION, 30, 1, NULL);
gst_element_link_filtered(nvconv1, overlay, caps);
gst_caps_unref(caps);
gst_element_link(overlay, nvconv2);
caps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, 3840,
"height", G_TYPE_INT, 2160,
"framerate", GST_TYPE_FRACTION, 30, 1, NULL);
gst_caps_set_features(caps, 0, gst_caps_features_new("memory:NVMM", NULL));
gst_element_link_filtered(nvconv2, q1, caps);
gst_caps_unref(caps);
gst_element_link_many(q1, t1, q1a, h264enc, h264parse, splitmuxsink, NULL);
gst_element_link_many(t1, q1b, jpegenc, multifilesink, NULL);
return pipeline;
}
int
main (int argc, char argv[])
{
/ Encodes the video stream from the onboard camera with the H264 codec. */
GMainLoop *loop;
pthread_t pid;
/* Initialize GStreamer */
gst_init(NULL, NULL);
loop = g_main_loop_new(NULL, FALSE);
/* Attach custom signal handlers. */
signal(SIGINT, sigint_handler);
signal(SIGUSR1, sigusr1_handler);
/* Create the two GStreamer pipelines. */
main_pipeline = create_main_pipeline(loop);
/* Create the thread that updates the text overlay on the H264 feed. */
pthread_create(&pid, NULL, (void* (*)(void*))info_overlay, overlay);
/* Start Pipeline */
g_print("Starting Pipeline\n");
gst_element_set_state (main_pipeline, GST_STATE_PLAYING);
g_main_loop_run(loop);
/* Exited GMainLoop. */
g_print("Cleaning up\n");
gst_element_set_state(main_pipeline, GST_STATE_NULL);
gst_object_unref(main_pipeline);
g_main_loop_unref(loop);
return 0;
}
';
g_object_set(overlay, "text", buffer, NULL);
}
}
/* Name H264 chunks by timestamp. */
static gchararray
name_by_timestamp(GstElement* splitmuxsink, guint id, gpointer userdata)
{
/* Names the encoded files by timestamp. */
time_t now = time(NULL);
struct tm curr_tm = *localtime(&now);
char video_dir[64];
char date_dir[16];
gchar *filename = (gchar *)malloc(100);
// Create top-level video directory
strcpy(video_dir, VIDEODIR);
if (access(video_dir, F_OK))
mkdir(video_dir, 0775);
// Create data directory
sprintf(date_dir, "/%d%02d%02d", curr_tm.tm_year + 1900,
curr_tm.tm_mon + 1, curr_tm.tm_mday);
strcat(video_dir, date_dir);
if (access(video_dir, F_OK))
mkdir(video_dir, 0775);
// Create next filename
sprintf(filename,"%s/%d%02d%02d_%02d%02d%02d%s.h264", video_dir,
curr_tm.tm_year + 1900, curr_tm.tm_mon + 1, curr_tm.tm_mday,
curr_tm.tm_hour, curr_tm.tm_min, curr_tm.tm_sec,
curr_tm.tm_isdst? "DST" : "");
return filename;
}
/* Handler for grabbing new samples from appsink. */
static int
on_new_sample_from_sink (GstAppSink * elt)
{
GstSample *sample;
GstBuffer *buf;
GstMapInfo map;
FILE *fp;
/* get the sample from appsink */
sample = gst_app_sink_pull_sample(elt);
buf = gst_sample_get_buffer(sample);
if (gst_buffer_map (buf, &map, GST_MAP_READ)) {
/* map.data, map.size */
/* TODO: Load frame into framebuffer */
fprintf(stdout, "frame\n");
gst_buffer_unmap (buf, &map);
/* TODO: Respond to trigger */
if (trigger > 0) {
trigger--;
}
}
gst_sample_unref (sample);
return 0;
}
/* Creates the H264 and frame buffer pipeline. */
static GstElement *
create_main_pipeline(GMainLoop *loop)
{
/* Make a new GStreamer pipeline */
GstElement *pipeline = gst_pipeline_new(NULL);
/* Set up GStreamer Bus Message Handler */
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
guint bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
/* Create GStreamer elements */
GstElement *src = gst_element_factory_make("nvcamerasrc", NULL);
GstElement *t0 = gst_element_factory_make("tee", NULL);
GstElement *t1 = gst_element_factory_make("tee", NULL);
GstElement *q0 = gst_element_factory_make("queue", NULL);
GstElement *q0a = gst_element_factory_make("queue", NULL);
GstElement *q0b = gst_element_factory_make("queue", NULL);
GstElement *q1 = gst_element_factory_make("queue", NULL);
GstElement *q1a = gst_element_factory_make("queue", NULL);
GstElement *q1b = gst_element_factory_make("queue", NULL);
GstElement *nvconv0 = gst_element_factory_make("nvvidconv", NULL);
GstElement *nvconv1 = gst_element_factory_make("nvvidconv", NULL);
GstElement *nvconv2 = gst_element_factory_make("nvvidconv", NULL);
overlay = gst_element_factory_make("clockoverlay", NULL);
GstElement *h264enc = gst_element_factory_make("omxh264enc", NULL);
GstElement *h264parse = gst_element_factory_make("h264parse", NULL);
GstElement *jpegenc = gst_element_factory_make("nvjpegenc", NULL);
GstElement *appsink = gst_element_factory_make("appsink", NULL);
GstElement *multifilesink = gst_element_factory_make("multifilesink", NULL);
GstElement *splitmuxsink = gst_element_factory_make("splitmuxsink", NULL);
if ( !pipeline || !src || !t0 || !t1 || !q0 || !q0a || !q0b || !q1 || !q1a
|| !q1b || !nvconv0 || !nvconv1 || !overlay || !h264enc || !h264parse
|| !jpegenc || !appsink || !multifilesink || !splitmuxsink ) {
g_printerr("One element count not be created.\n");
return NULL;
}
/* Add elements to pipeline */
gst_bin_add_many(GST_BIN(pipeline), src, t0, t1, q0, q0a, q0b, q1, q1a, q1b,
nvconv0, nvconv1, nvconv2, overlay, h264enc, h264parse, jpegenc,
appsink, splitmuxsink, multifilesink, NULL);
/* Configure GStreamer Element properties */
g_object_set(src, "fpsRange", "30.0 30.0", "queue-size", 30, NULL);
g_object_set(jpegenc, "quality", 100, NULL);
/* Configure the appsink signal and attach new sample handler. */
g_object_set(appsink, "emit-signals", TRUE, "sync", FALSE, NULL);
g_signal_connect (appsink, "new-sample", G_CALLBACK (on_new_sample_from_sink), NULL);
/* Attach the file naming function to filesink. */
g_object_set(splitmuxsink, "location", "%d.264", "max-size-time", 60000000000, NULL);
g_signal_connect(splitmuxsink, "format-location", G_CALLBACK(name_by_timestamp), NULL);
g_object_set(multifilesink, "location", "/dev/shm/live.jpg", NULL);
g_object_set(overlay, "shaded-background", 1, "shading-value", 255, "ypad", 0, "xpad", 0,
"halignment", 0, "valignment", 1, "auto-resize", 0, "font-desc", "sans 32",
"line-alignment", 0, "time-format", " Date: %D Time: %H:%M:%S", NULL);
g_object_set(h264enc, "control-rate", 2, "bitrate", 8000000, "profile", 8, NULL);
/* Link elements */
GstCaps *caps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, 3840,
"height", G_TYPE_INT, 2160,
"framerate", GST_TYPE_FRACTION, 30, 1, NULL);
gst_caps_set_features(caps, 0, gst_caps_features_new("memory:NVMM", NULL));
gst_element_link_filtered(src, q0, caps);
gst_caps_unref(caps);
gst_element_link_many(q0, t0, q0a, nvconv0, NULL);
caps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, 3840,
"height", G_TYPE_INT, 2160,
"framerate", GST_TYPE_FRACTION, 30, 1, NULL);
gst_element_link_filtered(nvconv0, appsink, caps);
gst_caps_unref(caps);
gst_element_link_many(t0, q0b, nvconv1, NULL);
caps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, 3840,
"height", G_TYPE_INT, 2160,
"framerate", GST_TYPE_FRACTION, 30, 1, NULL);
gst_element_link_filtered(nvconv1, overlay, caps);
gst_caps_unref(caps);
gst_element_link(overlay, nvconv2);
caps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, 3840,
"height", G_TYPE_INT, 2160,
"framerate", GST_TYPE_FRACTION, 30, 1, NULL);
gst_caps_set_features(caps, 0, gst_caps_features_new("memory:NVMM", NULL));
gst_element_link_filtered(nvconv2, q1, caps);
gst_caps_unref(caps);
gst_element_link_many(q1, t1, q1a, h264enc, h264parse, splitmuxsink, NULL);
gst_element_link_many(t1, q1b, jpegenc, multifilesink, NULL);
return pipeline;
}
int
main (int argc, char *argv[])
{
/* Encodes the video stream from the onboard camera with the H264 codec. */
GMainLoop *loop;
pthread_t pid;
/* Initialize GStreamer */
gst_init(NULL, NULL);
loop = g_main_loop_new(NULL, FALSE);
/* Attach custom signal handlers. */
signal(SIGINT, sigint_handler);
signal(SIGUSR1, sigusr1_handler);
/* Create the two GStreamer pipelines. */
main_pipeline = create_main_pipeline(loop);
/* Create the thread that updates the text overlay on the H264 feed. */
pthread_create(&pid, NULL, (void* (*)(void*))info_overlay, overlay);
/* Start Pipeline */
g_print("Starting Pipeline\n");
gst_element_set_state (main_pipeline, GST_STATE_PLAYING);
g_main_loop_run(loop);
/* Exited GMainLoop. */
g_print("Cleaning up\n");
gst_element_set_state(main_pipeline, GST_STATE_NULL);
gst_object_unref(main_pipeline);
g_main_loop_unref(loop);
return 0;
}