I have 6 raspberry pi camera modules interfaced via mipi. I am able to capture all the cameras simultaneously by running gstreamer pipelines like the below
gst-launch-1.0 nvcamerasrc sensor-id=4 fpsRange=“30 30” ! “video/x-raw(memory:NVMM), width=(int)820, height=(int)616,format=(string)I420, framerate=(fraction)30/1” ! nvvidconv ! nveglglessink -v
However when i run the same pipeline as a code it throws the following error
…
Error received from element video-source: GStreamer error: state change failed and some element failed to post a proper error message with the reason for the failure.
Debugging information: gstbasesrc.c(3354): gst_base_src_start (): /GstPipeline:pipeline/GstNvCameraSrc:video-source:
Failed to start
NvCameraSrc: Trying To Set Default Camera Resolution. Selected 820x616 FrameRate = 30.000000 …
Socket read error. Camera Daemon stopped functioning…
gst_nvcamera_open() failed ret=0
Error received from element video-source: GStreamer error: state change failed and some element failed to post a proper error message with the reason for the failure.
Debugging information: gstbasesrc.c(3354): gst_base_src_start (): /GstPipeline:pipeline/GstNvCameraSrc:video-source:
Failed to start
…
for some cameras randomly . Below is my complete code
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <bits/stdc++.h>
#include <unistd.h>
#include <sys/time.h>
#include <opencv2/opencv.hpp>
#include <pthread.h>
using namespace cv;
using namespace std;
typedef struct CameraPipeline
{
GstElement *pipeline;
GstElement *camera_source;
GstElement *input_caps_filter;
GstElement *output_caps_filter;
GstElement *conversion_element;
GstElement *sink;
GstCaps *input_caps;
GstCaps *output_caps;
GstBus *bus;
GstMessage *msg;
int camera_index;
int width;
int height;
int fps;
unsigned char *frame;
}CameraPipeline;
static void new_sample(GstElement *appsink, void *data)
{
CameraPipeline *ptr = (CameraPipeline *)data;
GstBuffer *buffer;
GstMapInfo info;
GstSample *sample;
/* Retrieve the buffer */
g_signal_emit_by_name (appsink, “pull-sample”, &sample);
if (sample)
{
/* The only thing we do in this example is print a * to indicate a received buffer */
g_print("*");
buffer = gst_sample_get_buffer(sample);
gboolean success = gst_buffer_map(buffer, &info, (GstMapFlags)GST_MAP_READ);
if (!success)
{
printf("GStreamer: unable to map buffer");
return ;
}
void * decodedPtr = info.data;
ptr->frame = (unsigned char *)decodedPtr;
gst_buffer_unmap(buffer, &info);
gst_buffer_unref (buffer);
gst_sample_unref (sample);
}
}
void CameraPipelineInit(CameraPipeline *p, int cameraIndex, int dstWidth, int dstHeight, int dstFPS)
{
p->camera_index = cameraIndex;
p->width = dstWidth;
p->height = dstHeight;
p->fps = dstFPS;
p->frame = (unsigned char *)malloc(dstWidth * dstHeight * 1.5);
}
int cnt = 0;
void * CameraPipelineRun(void *p)
{
gst_init(NULL, NULL);
CameraPipeline *ptr = (CameraPipeline *)p;
ptr->pipeline = gst_pipeline_new("pipeline");
ptr->camera_source = gst_element_factory_make("nvcamerasrc", "video-source");
ptr->conversion_element = gst_element_factory_make("nvvidconv", "video-convert");
ptr->input_caps_filter = gst_element_factory_make("capsfilter", "input-caps-filter");
ptr->output_caps_filter = gst_element_factory_make("capsfilter", "output-caps-filter");
ptr->sink = gst_element_factory_make("nveglglessink", NULL);
ptr->input_caps = gst_caps_from_string("video/x-raw(memory:NVMM), width=820, height=616, format=I420, framerate=(fraction)30/1");
ptr->output_caps = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, ptr->width,
"height", G_TYPE_INT, ptr->height,
"framerate", GST_TYPE_FRACTION, ptr->fps, 1,
"format", G_TYPE_STRING, "I420",
NULL);
g_object_set(G_OBJECT(ptr->camera_source), "sensor-id", ptr->camera_index, NULL);
g_object_set(G_OBJECT(ptr->input_caps_filter), "caps", ptr->input_caps, NULL);
g_object_set(G_OBJECT(ptr->output_caps_filter), "caps", ptr->output_caps, NULL);
gst_bin_add_many(GST_BIN(ptr->pipeline), ptr->camera_source, ptr->input_caps_filter, ptr->conversion_element, ptr->sink, NULL);
gst_element_link_many(ptr->camera_source, ptr->input_caps_filter, ptr->conversion_element, ptr->sink, NULL);
//gst_app_sink_set_max_buffers (GST_APP_SINK(ptr->sink), 1);
//gst_app_sink_set_drop (GST_APP_SINK(ptr->sink), true);
//g_object_set(G_OBJECT(ptr->sink),"emit-signals",TRUE, NULL);
//g_object_set(G_OBJECT(ptr->sink),"enable-last-sample", FALSE, NULL);
//g_signal_connect (ptr->sink, "new-sample", G_CALLBACK (new_sample), (void *)ptr);
// gst_pipeline_set_clock((GstPipeline *)ptr->pipeline, NULL);
gst_element_set_state(ptr->pipeline, GST_STATE_PLAYING);
ptr->bus = gst_element_get_bus (ptr->pipeline);
ptr->msg = gst_bus_timed_pop_filtered (ptr->bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR);
/* Parse message */
if (ptr->msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (ptr->msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (ptr->msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (ptr->msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (ptr->msg);
}
gst_object_unref (ptr->bus);
gst_element_set_state (ptr->pipeline, GST_STATE_NULL);
gst_object_unref (ptr->pipeline);
}
int main()
{
int num_cameras = 6;
int width = 1920;
int height = 1080;
int fps = 30;
unsigned char **img;
img = (unsigned char **)malloc(num_cameras * sizeof(unsigned char *));
CameraPipeline **cameras;
pthread_t camera_pipeline_thread[num_cameras];
cameras = (CameraPipeline **)malloc(num_cameras * sizeof(CameraPipeline *));
for(int i = 0 ; i < num_cameras ; i++)
{
cameras[i] = (CameraPipeline *)malloc(sizeof(CameraPipeline));
CameraPipelineInit(cameras[i], i, width, height, fps);
img[i] = (unsigned char *)malloc(width * height * 1.5);
}
pthread_attr_t attr_;
cpu_set_t cpus_;
pthread_attr_init(&attr_);
CPU_ZERO(&cpus_);
CPU_SET(1, &cpus_);
pthread_attr_setaffinity_np(&attr_, sizeof(cpu_set_t), &cpus_);
pthread_create(&camera_pipeline_thread[0], NULL, CameraPipelineRun, (void *)cameras[0]);
CPU_ZERO(&cpus_);
CPU_SET(2, &cpus_);
pthread_attr_setaffinity_np(&attr_, sizeof(cpu_set_t), &cpus_);
pthread_create(&camera_pipeline_thread[1], NULL, CameraPipelineRun, (void *)cameras[1]);
CPU_ZERO(&cpus_);
CPU_SET(3, &cpus_);
pthread_attr_setaffinity_np(&attr_, sizeof(cpu_set_t), &cpus_);
pthread_create(&camera_pipeline_thread[2], NULL, CameraPipelineRun, (void *)cameras[2]);
CPU_ZERO(&cpus_);
CPU_SET(1, &cpus_);
pthread_attr_setaffinity_np(&attr_, sizeof(cpu_set_t), &cpus_);
pthread_create(&camera_pipeline_thread[3], NULL, CameraPipelineRun, (void *)cameras[3]);
CPU_ZERO(&cpus_);
CPU_SET(1, &cpus_);
pthread_attr_setaffinity_np(&attr_, sizeof(cpu_set_t), &cpus_);
pthread_create(&camera_pipeline_thread[4], NULL, CameraPipelineRun, (void *)cameras[4]);
CPU_ZERO(&cpus_);
CPU_SET(1, &cpus_);
pthread_attr_setaffinity_np(&attr_, sizeof(cpu_set_t), &cpus_);
pthread_create(&camera_pipeline_thread[5], NULL, CameraPipelineRun, (void *)cameras[5]);
FILE *f;
int cnt = 0 ;
while(1)
{
continue;
/*cnt++;
for(int i = 0 ; i < num_cameras ; i++)
{
ostringstream str;
str << i + 1;
img[i] = cameras[i]->frame;
Mat image(height * 1.5, width, CV_8UC1, img[i]);
Mat imagedst(height, width, CV_8UC3);
cvtColor(image, imagedst, CV_YUV2BGR_I420);
imshow(str.str().c_str(), imagedst);
waitKey(1);
}*/
}
return 0;
}
Any help would be appreciated.