//g++ `pkg-config gstreamer-1.0 gstreamer-app-1.0 --cflags` -IFlyCapture fc2_gst.cpp `pkg-config gstreamer-1.0 gstreamer-app-1.0 --libs` -lflycapture -o fc2_gst #include #include #include #include #include #include "FlyCapture2.h" #include #include #include #define CLIENT_IP "130.134.235.179" #define TARGET_BITRATE 15000000 #define WIDTH 1504 #define HEIGHT 1504 FlyCapture2::Camera pg_camera; FlyCapture2::Error pg_error; FlyCapture2::EmbeddedImageInfo pg_embeddedInfo; FlyCapture2::ImageMetadata pg_metadata; FlyCapture2::CameraInfo pg_camInfo; FlyCapture2::Format7ImageSettings fmt7ImageSettings; FlyCapture2::Format7PacketInfo fmt7PacketInfo; FlyCapture2::Format7Info fmt7Info; FlyCapture2::PixelFormat k_fmt7PixFmt; FlyCapture2::Mode k_fmt7Mode; FlyCapture2::Image pg_rawImage; int imageSize; static GMainLoop *loop; GstElement *pipeline, *source, *bayer_filter, *bayer2rgb_conv, *Nvidia_vid_conv, *encoder,*YUV_filter, *Ethernet_filter, *parser, *rtp265, *sink; GstCaps *source_caps, *bayer_filter_caps, *YUV_filter_caps, *Ethernet_filter_caps; GstBus *bus; GstStateChangeReturn ret; guint bus_watch_id; static void sigint_restore (void) { struct sigaction action; memset (&action, 0, sizeof (action)); action.sa_handler = SIG_DFL; sigaction (SIGINT, &action, NULL); } /* Signal handler for ctrl+c */ void intHandler(int dummy) { //! Emit the EOS signal which tells all the elements to shut down properly: printf("Sending EOS signal to shutdown pipeline cleanly\n"); gst_element_send_event(pipeline, gst_event_new_eos()); sigint_restore(); return; } static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data) { GMainLoop *loop = (GMainLoop *) data; switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_EOS: g_print ("End of stream\n"); g_main_loop_quit (loop); break; case GST_MESSAGE_ERROR: { gchar *debug; GError *error; gst_message_parse_error (msg, &error, &debug); g_free (debug); g_printerr ("Error: %s\n", error->message); g_error_free (error); g_main_loop_quit (loop); break; } default: break; } return TRUE; } int watcher_make() { /* we add a message handler */ bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); bus_watch_id = gst_bus_add_watch (bus, bus_call, loop); gst_object_unref (bus); return 0; } bool grabFrame(FlyCapture2::Image *frame) { pg_error = pg_camera.RetrieveBuffer( frame ); if (pg_error != FlyCapture2::PGRERROR_OK) { printf("Failed to capture image from PG Camera!\n"); return false; } return true; } double ms_time(void) { struct timespec now_timespec; clock_gettime(CLOCK_MONOTONIC, &now_timespec); return ((double)now_timespec.tv_sec)*1000.0 + ((double)now_timespec.tv_nsec)*1.0e-6; } bool OpenCamera(void) { //! Create a camera and connect to it (this is FlyCapture code): pg_error = pg_camera.Connect ( 0 ); if (pg_error != FlyCapture2::PGRERROR_OK) { printf("Failed to connect to a PG Camera. Check connection!\n"); return false; } printf(" $$ Opened Point Grey camera!\n"); pg_error = pg_camera.GetEmbeddedImageInfo( &pg_embeddedInfo ); if (pg_error != FlyCapture2::PGRERROR_OK) { printf("Unable to get flycapture camera info:\n"); pg_error.PrintErrorTrace(); return false; } //! Turn on time stamp on PG camera if it's off: if ( pg_embeddedInfo.timestamp.onOff == false ) { printf("PG Camera: Timestamps were off. Turning them on!\n"); pg_embeddedInfo.timestamp.onOff = true; } if ( pg_embeddedInfo.frameCounter.onOff == false ) { printf("PG Camera: Enabling frame counter\n"); pg_embeddedInfo.frameCounter.onOff = true; } pg_error = pg_camera.SetEmbeddedImageInfo( &pg_embeddedInfo ); if (pg_error != FlyCapture2::PGRERROR_OK ) { printf("Unable to turn on timestamp feature on PG Camera\n"); pg_error.PrintErrorTrace(); } //! Get the camera info and print it out: pg_error = pg_camera.GetCameraInfo( &pg_camInfo ); if (pg_error != FlyCapture2::PGRERROR_OK ) { printf("Unable to get PG Camera info\n"); pg_error.PrintErrorTrace(); } printf("Fly Capture Camera Information:\n"); printf(" Vendor: %s\n", pg_camInfo.vendorName); printf(" Model: %s\n", pg_camInfo.modelName); printf(" Serial Num: %d\n", pg_camInfo.serialNumber); /// Need to find a place for these variables k_fmt7Mode = FlyCapture2::MODE_0; k_fmt7PixFmt = FlyCapture2::PIXEL_FORMAT_RAW8; //! Check if mode is supported bool supported; fmt7Info.mode = k_fmt7Mode; pg_error = pg_camera.GetFormat7Info( &fmt7Info, &supported ); if (pg_error != FlyCapture2::PGRERROR_OK ) { printf("Format Mode not supported\n"); pg_error.PrintErrorTrace(); } //! Check if pixel format is supported if ( (k_fmt7PixFmt & fmt7Info.pixelFormatBitField) == 0 ) { printf("Pixel Format not supported\n"); pg_error.PrintErrorTrace(); } //! Create struct with new settings fmt7ImageSettings.mode = k_fmt7Mode; fmt7ImageSettings.offsetX = (fmt7Info.maxWidth-WIDTH)/2; fmt7ImageSettings.offsetY = (fmt7Info.maxHeight-HEIGHT)/2; fmt7ImageSettings.width = WIDTH; fmt7ImageSettings.height = HEIGHT; fmt7ImageSettings.pixelFormat = k_fmt7PixFmt; //! Validate new settings bool valid; // Validate the settings to make sure that they are valid pg_error = pg_camera.ValidateFormat7Settings(&fmt7ImageSettings,&valid, &fmt7PacketInfo ); if (pg_error != FlyCapture2::PGRERROR_OK ) { printf("Cannot validate format settings\n"); pg_error.PrintErrorTrace(); } if ( !valid ) { printf("Format settings required are not supported\n"); pg_error.PrintErrorTrace(); return false; } //! Set the new settings to the camera pg_error = pg_camera.SetFormat7Configuration(&fmt7ImageSettings,fmt7PacketInfo.recommendedBytesPerPacket ); if (pg_error != FlyCapture2::PGRERROR_OK ) { printf("Unable to set 7 camera settings\n"); pg_error.PrintErrorTrace(); } //! Start capturing images pg_error = pg_camera.StartCapture(); if (pg_error == FlyCapture2::PGRERROR_ISOCH_BANDWIDTH_EXCEEDED) { printf("Bandwidth exceeded!\n"); return false; } else if (pg_error != FlyCapture2::PGRERROR_OK) { printf("Error starting capture!\n"); pg_error.PrintErrorTrace(); } //! Capture first image to make sure things are working ok: pg_error = pg_camera.RetrieveBuffer( &pg_rawImage ); if (pg_error != FlyCapture2::PGRERROR_OK) { printf("Failed to capture first image from PG Camera!\n"); return false; } //! Create dump file if we are saving images: imageSize = pg_rawImage.GetDataSize(); printf(" IMAGE SIZE: %d\n", imageSize); return true; } int main(int argc, char *argv[]) { OpenCamera(); //bool grabFrame(FlyCapture2::Image *frame); signal(SIGINT, intHandler); /* Initialize GStreamer */ gst_init (&argc, &argv); loop = g_main_loop_new (NULL, FALSE); /* Create the elements */ source = gst_element_factory_make ("appsrc", "source"); // source = gst_element_factory_make ("videotestsrc", "source"); bayer_filter = gst_element_factory_make ("capsfilter", "bayer_filter"); bayer2rgb_conv = gst_element_factory_make ("bayer2rgb", "bayer2rgb_conv"); Nvidia_vid_conv = gst_element_factory_make ("nvvidconv", "Nvidia_vid_conv"); YUV_filter = gst_element_factory_make ("capsfilter", "YUV_filter"); encoder = gst_element_factory_make ("omxh265enc", "encoder"); Ethernet_filter = gst_element_factory_make ("capsfilter", "Ethernet_filter"); parser = gst_element_factory_make ("h265parse", "parser"); rtp265 = gst_element_factory_make ("rtph265pay", "rtp265"); sink = gst_element_factory_make ("udpsink", "sink"); /* Create the empty pipeline */ pipeline = gst_pipeline_new ("pipeline"); if (!pipeline || !source || !bayer_filter || !bayer2rgb_conv || !Nvidia_vid_conv || !YUV_filter || !encoder || !Ethernet_filter || !parser || !rtp265 || !sink) { g_printerr ("Not all elements could be created.\n"); return -1; } /* Build the pipeline */ gst_bin_add_many (GST_BIN (pipeline), source, bayer_filter, bayer2rgb_conv, Nvidia_vid_conv, YUV_filter, encoder, Ethernet_filter, parser, rtp265, sink, NULL); /* Link the elements together */ gst_element_link_many (source, bayer_filter, bayer2rgb_conv, Nvidia_vid_conv, YUV_filter, encoder, Ethernet_filter, parser, rtp265, sink, NULL); /* Modify the caps properties */ source_caps = gst_caps_new_simple ("video/x-bayer", "format", G_TYPE_STRING, "rggb", "width", G_TYPE_INT, WIDTH, "height", G_TYPE_INT, HEIGHT, "framerate", GST_TYPE_FRACTION, 120, 1, NULL); bayer_filter_caps = gst_caps_new_simple ("video/x-bayer","format", G_TYPE_STRING, "rggb", "width", G_TYPE_INT, WIDTH, "height", G_TYPE_INT, HEIGHT, "framerate", GST_TYPE_FRACTION, 120, 1, NULL); YUV_filter_caps = gst_caps_from_string("video/x-raw(memory:NVMM),format=I420"); Ethernet_filter_caps = gst_caps_new_simple ("video/x-h265", "stream-format", G_TYPE_STRING, "byte-stream", NULL); g_object_set (G_OBJECT (source), "caps", source_caps, NULL); g_object_set (G_OBJECT (bayer_filter), "caps", bayer_filter_caps, NULL); g_object_set (G_OBJECT (YUV_filter), "caps", YUV_filter_caps, NULL); g_object_set (G_OBJECT (Ethernet_filter), "caps", Ethernet_filter_caps, NULL); g_object_set (G_OBJECT (encoder), "bitrate", TARGET_BITRATE, "control-rate", 2, NULL); g_object_set( G_OBJECT(rtp265), "pt", 96, "config-interval",1,NULL); g_object_set( G_OBJECT(sink), "host", CLIENT_IP, "port", 5000, "sync",FALSE, "async", FALSE, NULL); /* Add function to watch bus */ if(watcher_make() != 0) return -1; /* Start playing */ ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (pipeline); return -1; } g_main_loop_run(loop); /* Free resources */ gst_caps_unref (bayer_filter_caps); gst_caps_unref (YUV_filter_caps); gst_caps_unref (Ethernet_filter_caps); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (GST_OBJECT (pipeline)); g_main_loop_unref (loop); return 0; }